From 641888e1cbcb0f29ebe99d600c4b86c92b279d5f Mon Sep 17 00:00:00 2001 From: LiranCohen Date: Thu, 30 Nov 2023 14:53:48 -0500 Subject: [PATCH] EventsQuery interface and LevelIndex enhancements. (#625) This PR contains a new interface method, EventsQuery, which is primarily used to query for filtered events in order to enable SelectiveSync. EventLogLevel: EventLogLevel now takes an additional property indexes to it's append() function. These indexes are key-value indexes in the same format that we use withe the MessageStore index. IndexLevel: To enable this functionality IndexLevel has been enhanced to become a more general purpose LevelDB Index that supports sorting as well as a cursor pointer property. * dwn selevtive sync wip, copy-pasta from index level * first pass at POC for selective sync * eventlog query with watermark * EventsQuery interface * update after rebase * events query interfaces and handler * index immutable RecordsWrite properties of a RecordsDelete * additional indexing for event log/sync * complete tests added to handler * refactor and move tests * events get coverage * increase coverage clean up query functions * range filter support, extract event within iterator * additional event log indexing tests * refactor indexes to use common methods, remove cidlog in favor of cidindex * simply sync * refactor before consolidation * initial pass at abstraction for IndexStore within the sync log store * abstracting functionality for index store * refactor cleanup * continue refactor and test effort * continue refactoring effort * test index level * complete test coverage for index level * migrate functionality from MessageStoreIndex to new IndexLevel * increase test case coverage * clean up filter object * use new IndexLevel cursor functionality for MessageStore pagination * clean up, comments * remove selective sync specific tests, added to coverage and comments * clean up tests and legibility * improved code legibility and comments * update after rebase * add more testing for sort/cursor, finalized sorting logic * add tests for sort/cursor when inserted out of order * move events get schema into the interfaces folder * export FilteredQuery * export EventsQuery Message and Reply types, add processMessage overload * sortProperty vs sort * deletes all indexed keys * review chanes * key -> item ID rename where appropriate (#571) * more item ID renames (#572) * sync tests, need to make more clear * review suggestions * scenario test and review suggestions * Disambiguating meaning of cursor (#575) * Disambiguating meaning of cursor * comment update Co-authored-by: Diane Huxley --------- Co-authored-by: Liran Cohen Co-authored-by: Diane Huxley * sort and pagination within the index level implementation * rip out property indexing * clean up index level and apply to message store and event log * clean up tests and add comments * cleanup and comments * pagination bench tests * add to scenario test * update benchmark tests * some cleanup * change benchmark filters for pagination * create sorted indexes for all indexed properties * filter query for index * support both query paths, update records query tests * clean up and add some comments * clean up, two paths * cleanup after rebase, add unimplemented missing test * remove circular deps * filter selector/augmentor * add index utils * fix circular dep * clean up * update tests to use top-level query, will addd more tests * fix EventsQuery/cursor check * move filter selector into a class, renamed index utility to FilterUtility * added tests for filter utility * move FilterSelector class to util/filters, updated tests to cover level index, need to refactor tests a bit more * moved index types into their own file, fixes circular deps, cleaner placement * simplify index-level, remove external watermark using messageCid as a cursor for events * filter selector tresting, needs some rules removed/changed * events query for author across message types * review suggestions * replace watermark with cursor where necessary * update match comments * update watermark comments * address PR comments * Remove Nested Object Indexing (#621) * only index strings, numbers, and booleans * clean up any usages of uknown in filters * clean up matchRange filter * move encoding value/numbers into IndexLevel * fix async test * FilterIndex to KeyValues type * remove faltten * refactoring (#622) * Renames (#626) * review suggestions * removed uneeeded indexing, remove refernces to Record type in favor of KeyValues when building indexes * remove circular dep * update tests * update EventsFilter types, error handling, convertFilter/parse * added tests for nonsensicle pobilshed filter * update test suite * allow indexing of empty strings * scaffold events query scenario tests * updated tests for filter reducer * fix message store benchmark * filter reducer coverage * test for one of * add testing, filter type needs work * fix circular dep * rename method * Modified code such that in-memory-paging supports empty filter and array (#632) * Modified code such that in-memory-paging supports empty filter and array * Removed the need for FilterSelector * remove author filter and tests * test filter selector * move encoding tests to index-level, add tests for convert range criterion * protocol events tests * more event filter tests * more scenario test coverage * contextId scenario testing, add comments * updated tests to isFilterConcise --------- Co-authored-by: Henry Tsai Co-authored-by: Diane Huxley --- .gitignore | 1 + benchmarks/store/index/index-level.js | 27 +- benchmarks/store/index/search-index.js | 11 + .../store/message/message-store-level.js | 203 ++- build/compile-validators.js | 6 +- .../interface-methods/events-filter.json | 112 ++ .../events-get.json | 2 +- .../interface-methods/events-query.json | 52 + package-lock.json | 2 +- package.json | 1 - src/core/auth.ts | 2 +- src/core/dwn-error.ts | 4 + src/core/protocol-authorization.ts | 2 +- src/dwn.ts | 5 +- src/event-log/event-log-level.ts | 94 +- src/handlers/events-get.ts | 6 +- src/handlers/events-query.ts | 43 + src/handlers/permissions-grant.ts | 24 +- src/handlers/permissions-request.ts | 2 +- src/handlers/permissions-revoke.ts | 23 +- src/handlers/protocols-configure.ts | 8 +- src/handlers/records-delete.ts | 50 +- src/handlers/records-query.ts | 31 +- src/handlers/records-read.ts | 2 +- src/handlers/records-write.ts | 2 +- src/index.ts | 9 +- src/interfaces/events-get.ts | 6 +- src/interfaces/events-query.ts | 124 ++ src/interfaces/protocols-query.ts | 2 +- src/interfaces/records-query.ts | 23 +- src/interfaces/records-write.ts | 7 +- src/store/index-level.ts | 648 ++++++--- src/store/level-wrapper.ts | 10 + src/store/message-store-level.ts | 166 +-- src/types/event-log.ts | 36 +- src/types/event-types.ts | 46 +- src/types/message-store.ts | 5 +- src/types/message-types.ts | 37 +- src/types/protocols-types.ts | 2 +- src/types/query-types.ts | 52 + src/types/records-types.ts | 14 +- src/utils/filter.ts | 210 +++ src/utils/object.ts | 12 - src/utils/records.ts | 45 +- tests/event-log/event-log-level.spec.ts | 147 +- tests/event-log/event-log.spec.ts | 282 ++++ tests/handlers/events-get.spec.ts | 22 +- tests/handlers/events-query.spec.ts | 111 ++ tests/handlers/permissions-grant.spec.ts | 7 +- tests/handlers/permissions-request.spec.ts | 4 +- tests/handlers/permissions-revoke.spec.ts | 6 +- tests/handlers/protocols-configure.spec.ts | 4 +- tests/handlers/records-delete.spec.ts | 36 +- tests/handlers/records-query.spec.ts | 213 ++- tests/handlers/records-write.spec.ts | 4 +- tests/interfaces/events-get.spec.ts | 18 +- tests/interfaces/events-query.spec.ts | 162 +++ tests/interfaces/records-query.spec.ts | 18 +- tests/scenarios/events-query.spec.ts | 1126 +++++++++++++++ tests/store/index-level.spec.ts | 1254 +++++++++++++---- tests/store/message-store.spec.ts | 65 +- tests/test-suite.ts | 6 + tests/utils/filters.spec.ts | 314 +++++ tests/utils/test-data-generator.ts | 37 +- 64 files changed, 4892 insertions(+), 1113 deletions(-) create mode 100644 json-schemas/interface-methods/events-filter.json rename json-schemas/{events => interface-methods}/events-get.json (97%) create mode 100644 json-schemas/interface-methods/events-query.json create mode 100644 src/handlers/events-query.ts create mode 100644 src/interfaces/events-query.ts create mode 100644 src/types/query-types.ts create mode 100644 src/utils/filter.ts create mode 100644 tests/event-log/event-log.spec.ts create mode 100644 tests/handlers/events-query.spec.ts create mode 100644 tests/interfaces/events-query.spec.ts create mode 100644 tests/scenarios/events-query.spec.ts create mode 100644 tests/utils/filters.spec.ts diff --git a/.gitignore b/.gitignore index fa4e870d9..1b54ed6bc 100644 --- a/.gitignore +++ b/.gitignore @@ -24,6 +24,7 @@ INDEX # location for index specific levelDB data storage levelDB data storage for non-browser tests TEST-INDEX BENCHMARK-INDEX +BENCHMARK-BLOCK # folders used by code coverage .nyc_output/ coverage diff --git a/benchmarks/store/index/index-level.js b/benchmarks/store/index/index-level.js index cab5c449f..064a2f58a 100644 --- a/benchmarks/store/index/index-level.js +++ b/benchmarks/store/index/index-level.js @@ -1,6 +1,8 @@ import { IndexLevel } from '../../../dist/esm/src/store/index-level.js'; import { v4 as uuid } from 'uuid'; +const tenant = 'did:xyz:alice'; + // create const createStart = Date.now(); @@ -21,31 +23,40 @@ console.log('clear - before', clearBeforeEnd - clearBeforeStart); // put const putStart = Date.now(); -await Promise.all(Array(10_000).fill().map(() => index.put(uuid(), { - test : 'foo', - number : Math.random() -}))); +await Promise.all(Array(10_000).fill().map((_,i) => { + const id = uuid(); + const doc = { test: 'foo', number: Math.random() }; + return index.put(tenant, id, doc, doc, { index: i, number: Math.random(), id }); +})); const putEnd = Date.now(); console.log('put', putEnd - putStart); // query - equal const queryEqualStart = Date.now(); -await index.query({ +await index.query(tenant, [{ 'test': 'foo' -}); +}], { sortProperty: 'id' }); const queryEqualEnd = Date.now(); console.log('query - equal', queryEqualEnd - queryEqualStart); // query - range const queryRangeStart = Date.now(); -await index.query({ +await index.query(tenant, [{ 'number': { gte: 0.5 } -}); +}],{ sortProperty: 'id' }); const queryRangeEnd = Date.now(); console.log('query - range', queryRangeEnd - queryRangeStart); +const multipleRangeStart = Date.now(); +await index.query(tenant, [ + { 'number': { lte: 0.1 } }, + { 'number': { gte: 0.5 } } +],{ sortProperty: 'id' }); +const multipleRangeEnd = Date.now(); +console.log('query - multiple range', multipleRangeEnd - multipleRangeStart); + // clear - after const clearAfterStart = Date.now(); diff --git a/benchmarks/store/index/search-index.js b/benchmarks/store/index/search-index.js index 5ef37bee8..2e869a68c 100644 --- a/benchmarks/store/index/search-index.js +++ b/benchmarks/store/index/search-index.js @@ -46,6 +46,17 @@ await index.QUERY({ AND: [ { const queryRangeEnd = Date.now(); console.log('query - range', queryRangeEnd - queryRangeStart); +const multipleRangeStart = Date.now(); +await index.QUERY({ AND: [ { + FIELD : 'number', + VALUE : { LTE: '0.1' } +},{ + FIELD : 'number', + VALUE : { GTE: '0.5' } +} ] }); +const multipleRangeEnd = Date.now(); +console.log('query - multiple range', multipleRangeEnd - multipleRangeStart); + // clear - after const clearAfterStart = Date.now(); diff --git a/benchmarks/store/message/message-store-level.js b/benchmarks/store/message/message-store-level.js index 43e9256d9..e4b75660e 100644 --- a/benchmarks/store/message/message-store-level.js +++ b/benchmarks/store/message/message-store-level.js @@ -1,25 +1,38 @@ import { MessageStoreLevel } from '../../../dist/esm/src/store/message-store-level.js'; +import { SortDirection } from '../../../dist/esm/src/types/query-types.js'; import { TestDataGenerator } from '../../../dist/esm/tests/utils/test-data-generator.js'; import { Time } from '../../../dist/esm/src/utils/time.js'; const tenant = 'did:xyz:alice'; console.log('message store benchmarks'); +const items = 10_000; + // pre-generate messages -const insertMessages = Array(10_000).fill().map((_,i) => { +const insertMessages = Array(items).fill().map((_,i) => { + // random schema from 1-5 + const schemaId = Math.floor(Math.random() * 5) + 1; + const schema = `schema${schemaId}`; + + //random protocol from 1-10 + const protocolId = Math.floor(Math.random() * 9); + const protocol = `proto${protocolId}`; + + const bobId = i % 25; + const recipient = `bob${bobId + 1}`; + const author = i % 50 === 0 ? 'bob1' : 'alice'; + const published = i % 100 === 0 ? true : false; + + let year; const mod = i % 3; - let schema, year; switch (mod) { case 0: - schema = 'schema1'; year = 2022; break; case 1: - schema = 'schema2'; year = 2023; break; default: - schema = 'schema3'; year = 2024; } @@ -35,7 +48,11 @@ const insertMessages = Array(10_000).fill().map((_,i) => { const indexes = { ...message.descriptor, schema, + protocol, dateCreated, + recipient, + author, + published, }; return { message, indexes }; }); @@ -43,64 +60,190 @@ const insertMessages = Array(10_000).fill().map((_,i) => { // create const createStart = Date.now(); const messageStore = new MessageStoreLevel({ - location: 'BENCHMARK-INDEX' + blockstoreLocation : 'BENCHMARK-BLOCK', + indexLocation : 'BENCHMARK-INDEX', }); await messageStore.open(); const createEnd = Date.now(); -console.log('\tcreate\t\t\t:', createEnd - createStart); +console.log('\tcreate\t\t\t\t:', createEnd - createStart, 'ms'); // clear - before const clearBeforeStart = Date.now(); await messageStore.clear(); const clearBeforeEnd = Date.now(); -console.log('\tclear - before\t\t:', clearBeforeEnd - clearBeforeStart); +console.log('\tclear - before\t\t\t:', clearBeforeEnd - clearBeforeStart, 'ms'); // put const putStart = Date.now(); await Promise.all(insertMessages.map(({ message, indexes }) => messageStore.put(tenant, message, indexes))); const putEnd = Date.now(); -console.log('\tput\t\t\t:', putEnd - putStart); +console.log('\tput\t\t\t\t:', putEnd - putStart, 'ms'); + +const firstDayOf2024 = Time.createTimestamp({ year: 2024, month: 1, day: 1 }); + +// advanced query +const ascOrder = { messageTimestamp: SortDirection.Ascending }; +const descOrder = { messageTimestamp: SortDirection.Descending }; + +// paginate 10 pages of 20 results for a specific schema +// note: published: true is a smaller subset so will perform better if index optimizes for equality filter +let page = 0; +let paginationMessageCid = undefined; +let messages = []; +let results = []; +const paginationStart = Date.now(); +while (page < 10) { + page++; + ({ messages, paginationMessageCid } = await messageStore.query(tenant, [ + { published: true, schema: 'schema2', protocol: 'proto6' } + ], ascOrder, { limit: 20, paginationMessageCid } )); + results.push(...messages); + if (paginationMessageCid === undefined) { + break; + } +} +const paginationEnd = Date.now(); +console.log('\tpagination small subset\t\t:', paginationEnd - paginationStart, 'ms', 'results ', results.length); -// query - equal +// descending order +results = []; +page = 0; +paginationMessageCid = undefined; +const paginationDescStart = Date.now(); +while (page < 10) { + page++; + ({ messages, paginationMessageCid } = await messageStore.query(tenant, [ + { published: true, schema: 'schema2', protocol: 'proto6' } + ], descOrder, { limit: 20, paginationMessageCid } )); + results.push(...messages); + if (paginationMessageCid === undefined) { + break; + } +} +const paginationDescEnd = Date.now(); +console.log('\tpagination small subset des\t:', paginationDescEnd - paginationDescStart, 'ms', ' results', results.length); + +// filter for a larger result set. +results = []; +page = 0; +paginationMessageCid = undefined; +const paginationLargeStart = Date.now(); +while (page < 10) { + page++; + ({ messages, paginationMessageCid } = await messageStore.query(tenant, [ + { published: true, schema: 'schema2', protocol: 'proto6' }, + { published: false, schema: 'schema2', protocol: 'proto6' } + ], ascOrder, { limit: 20, paginationMessageCid } )); + results.push(...messages); + if (paginationMessageCid === undefined) { + break; + } +} +const paginationLargeEnd = Date.now(); +console.log('\tpagination large subset\t\t:', paginationLargeEnd - paginationLargeStart, 'ms', ' results', results.length); + +// ascending multiple filters. similar to non-owner query +results = []; +page = 0; +paginationMessageCid = undefined; +const paginationNonOwnerStart = Date.now(); +while (page < 10) { + page++; + ({ messages, paginationMessageCid } = await messageStore.query(tenant, [ + { schema: 'schema2', published: false, author: 'bob1', protocol: 'proto6' }, + { schema: 'schema2', published: true, protocol: 'proto6' }, + { schema: 'schema2', published: false, recipient: 'bob1', protocol: 'proto6' }, + ], ascOrder, { limit: 20, paginationMessageCid } )); + results.push(...messages); + if (paginationMessageCid === undefined) { + break; + } +} +const paginationNonOwnerEnd = Date.now(); +console.log('\tpagination non owner\t\t:', paginationNonOwnerEnd - paginationNonOwnerStart, 'ms', ' results', results.length); + +// descending multiple filters. similar to non-owner query +results = []; +page = 0; +paginationMessageCid = undefined; +const paginationDescNonOwnerStart = Date.now(); +while (page < 10) { + page++; + ({ messages, paginationMessageCid } = await messageStore.query(tenant, [ + { schema: 'schema2', published: false, author: 'bob1', protocol: 'proto6' }, + { schema: 'schema2', published: true, protocol: 'proto6' }, + { schema: 'schema2', published: false, recipient: 'bob1', protocol: 'proto6' }, + ], descOrder, { limit: 20, paginationMessageCid } )); + results.push(...messages); + if (paginationMessageCid === undefined) { + break; + } +} +const paginationDescNonOwnerEnd = Date.now(); +console.log('\tpagination desc non owner\t:', paginationDescNonOwnerEnd - paginationDescNonOwnerStart, 'ms', ' results', results.length); + +const smallResultSetStart = Date.now(); +({ messages } = await messageStore.query(tenant, [{ published: true, recipient: 'bob1' }])); +const smallResultSetEnd = Date.now(); +console.log('\tquery asc - small set equal\t:', smallResultSetEnd - smallResultSetStart, 'ms'); +console.log('\t\tresults count\t\t:', messages.length); + +const lastDayOf2022 = Time.createTimestamp({ year: 2022, month: 12, day: 31 }); +const lastDayOf2023 = Time.createTimestamp({ year: 2023, month: 12, day: 31 }); +const queryRangeStart = Date.now(); +({ messages } = await messageStore.query(tenant, [{ + dateCreated: { gt: lastDayOf2022, lt: lastDayOf2023 } +}])); +const queryRangeEnd = Date.now(); +console.log('\tquery - range\t\t\t:', queryRangeEnd - queryRangeStart, 'ms'); +console.log('\t\tresults count\t\t:', messages.length); + +// larger result set const queryEqualStart = Date.now(); -let { messages } = await messageStore.query(tenant, [{ schema: 'schema2' }]); +({ messages } = await messageStore.query(tenant, [{ schema: 'schema2' }])); const queryEqualEnd = Date.now(); -console.log('\tquery - equal\t\t:', queryEqualEnd - queryEqualStart); -console.log('\t\tresults count\t:', messages.length); +console.log('\tquery - equal\t\t\t:', queryEqualEnd - queryEqualStart, 'ms'); +console.log('\t\tresults count\t\t:', messages.length); -// query - equal multiple +// multiple queries const multipleEqualStart = Date.now(); -({ messages } = await messageStore.query(tenant, [{ schema: 'schema2' }, { schema: 'schema1' }])); +({ messages } = await messageStore.query(tenant, [{ schema: ['schema2', 'schema1'] }, { published: true }])); const multipleEqualEnd = Date.now(); -console.log('\tquery - multiple equal\t:', multipleEqualEnd - multipleEqualStart); -console.log('\t\tresults count\t:', messages.length); +console.log('\tquery - multiple equal\t\t:', multipleEqualEnd - multipleEqualStart, 'ms'); +console.log('\t\tresults count\t\t:', messages.length); -// query - range -const lastDayOf2022 = Time.createTimestamp({ year: 2022, month: 12, day: 31 }); -const queryRangeStart = Date.now(); +//range queries +// gt +const queryGTRangeStart = Date.now(); ({ messages } = await messageStore.query(tenant, [{ dateCreated: { gt: lastDayOf2022 } }])); -const queryRangeEnd = Date.now(); -console.log('\tquery - range\t\t:', queryRangeEnd - queryRangeStart); -console.log('\t\tresults count\t:', messages.length); +const queryGTRangeEnd = Date.now(); +console.log('\tquery - gt range\t\t:', queryGTRangeEnd - queryGTRangeStart, 'ms'); +console.log('\t\tresults count\t\t:', messages.length); + +// lt +const queryLTRangeStart = Date.now(); +({ messages } = await messageStore.query(tenant, [{ + dateCreated: { lt: lastDayOf2022 } +}])); +const queryLTRangeEnd = Date.now(); +console.log('\tquery - lt range\t\t:', queryLTRangeEnd - queryLTRangeStart, 'ms'); +console.log('\t\tresults count\t\t:', messages.length); // query - range multiple const multipleRangeStart = Date.now(); -const firstDayOf2024 = Time.createTimestamp({ year: 2024, month: 1, day: 1 }); -const lastDayOf2023 = Time.createTimestamp({ year: 2023, month: 12, day: 31 }); ({ messages } = await messageStore.query(tenant, [ { dateCreated: { gt: lastDayOf2022 } }, - { dateCreated: { lt: firstDayOf2024, gt: lastDayOf2023 } } + { dateCreated: { lt: firstDayOf2024, gt: lastDayOf2023 } }, ])); const multipleRangeEnd = Date.now(); -console.log('\tquery - multiple range\t:', multipleRangeEnd - multipleRangeStart); -console.log('\t\tresults count\t:', messages.length); - +console.log('\tquery - multiple range\t\t:', multipleRangeEnd - multipleRangeStart, 'ms'); +console.log('\t\tresults count\t\t:', messages.length); // clear - after const clearAfterStart = Date.now(); await messageStore.clear(); const clearAfterEnd = Date.now(); -console.log('\tclear - after\t\t:', clearAfterEnd - clearAfterStart); +console.log('\tclear - after\t\t\t:', clearAfterEnd - clearAfterStart, 'ms'); diff --git a/build/compile-validators.js b/build/compile-validators.js index 084368313..934266617 100644 --- a/build/compile-validators.js +++ b/build/compile-validators.js @@ -21,7 +21,9 @@ import Authorization from '../json-schemas/authorization.json' assert { type: 'j import AuthorizationDelegatedGrant from '../json-schemas/authorization-delegated-grant.json' assert { type: 'json' }; import AuthorizationOwner from '../json-schemas/authorization-owner.json' assert { type: 'json' }; import Definitions from '../json-schemas/definitions.json' assert { type: 'json' }; -import EventsGet from '../json-schemas/events/events-get.json' assert { type: 'json' }; +import EventsFilter from '../json-schemas/interface-methods/events-filter.json' assert { type: 'json' }; +import EventsGet from '../json-schemas/interface-methods/events-get.json' assert { type: 'json' }; +import EventsQuery from '../json-schemas/interface-methods/events-query.json' assert { type: 'json' }; import GeneralJwk from '../json-schemas/jwk/general-jwk.json' assert { type: 'json' }; import GeneralJws from '../json-schemas/general-jws.json' assert { type: 'json' }; import GenericSignaturePayload from '../json-schemas/signature-payloads/generic-signature-payload.json' assert { type: 'json' }; @@ -54,7 +56,9 @@ const schemas = { RecordsQuery, RecordsWrite, RecordsWriteUnidentified, + EventsFilter, EventsGet, + EventsQuery, Definitions, GeneralJwk, GeneralJws, diff --git a/json-schemas/interface-methods/events-filter.json b/json-schemas/interface-methods/events-filter.json new file mode 100644 index 000000000..784347781 --- /dev/null +++ b/json-schemas/interface-methods/events-filter.json @@ -0,0 +1,112 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://identity.foundation/dwn/json-schemas/events-filter.json", + "type": "object", + "additionalProperties": false, + "minProperties": 1, + "properties": { + "interface": { + "enum": [ + "Permissions", + "Protocols", + "Records" + ], + "type": "string" + }, + "method":{ + "enum": [ + "Configure", + "Delete", + "Grant", + "Revoke", + "Write" + ], + "type": "string" + }, + "protocol": { + "type": "string" + }, + "protocolPath": { + "type": "string" + }, + "recipient": { + "$ref": "https://identity.foundation/dwn/json-schemas/defs.json#/definitions/did" + }, + "contextId": { + "type": "string" + }, + "schema": { + "type": "string" + }, + "recordId": { + "type": "string" + }, + "parentId": { + "type": "string" + }, + "dataFormat": { + "type": "string" + }, + "dataSize": { + "$ref": "https://identity.foundation/dwn/json-schemas/number-range-filter.json" + }, + "dateCreated": { + "type": "object", + "minProperties": 1, + "additionalProperties": false, + "properties": { + "from": { + "$ref": "https://identity.foundation/dwn/json-schemas/defs.json#/definitions/date-time" + }, + "to": { + "$ref": "https://identity.foundation/dwn/json-schemas/defs.json#/definitions/date-time" + } + } + }, + "datePublished": { + "type": "object", + "minProperties": 1, + "additionalProperties": false, + "properties": { + "from": { + "$ref": "https://identity.foundation/dwn/json-schemas/defs.json#/definitions/date-time" + }, + "to": { + "$ref": "https://identity.foundation/dwn/json-schemas/defs.json#/definitions/date-time" + } + } + }, + "dateUpdated": { + "type": "object", + "minProperties": 1, + "additionalProperties": false, + "properties": { + "from": { + "$ref": "https://identity.foundation/dwn/json-schemas/defs.json#/definitions/date-time" + }, + "to": { + "$ref": "https://identity.foundation/dwn/json-schemas/defs.json#/definitions/date-time" + } + } + } + }, + "dependencies": { + "datePublished": { + "oneOf": [ + { + "properties": { + "published": { + "enum": [true] + } + }, + "required": ["published"] + }, + { + "not": { + "required": ["published"] + } + } + ] + } + } +} \ No newline at end of file diff --git a/json-schemas/events/events-get.json b/json-schemas/interface-methods/events-get.json similarity index 97% rename from json-schemas/events/events-get.json rename to json-schemas/interface-methods/events-get.json index a79b67aaa..08592c64b 100644 --- a/json-schemas/events/events-get.json +++ b/json-schemas/interface-methods/events-get.json @@ -35,7 +35,7 @@ "messageTimestamp": { "type": "string" }, - "watermark": { + "cursor": { "type": "string" } } diff --git a/json-schemas/interface-methods/events-query.json b/json-schemas/interface-methods/events-query.json new file mode 100644 index 000000000..5b2e4cfde --- /dev/null +++ b/json-schemas/interface-methods/events-query.json @@ -0,0 +1,52 @@ +{ + "$schema": "http://json-schema.org/draft-07/schema#", + "$id": "https://identity.foundation/dwn/json-schemas/events-query.json", + "type": "object", + "additionalProperties": false, + "required": [ + "authorization", + "descriptor" + ], + "properties": { + "authorization": { + "$ref": "https://identity.foundation/dwn/json-schemas/authorization.json" + }, + "descriptor": { + "type": "object", + "additionalProperties": false, + "required": [ + "interface", + "method", + "messageTimestamp", + "filters" + ], + "properties": { + "interface": { + "enum": [ + "Events" + ], + "type": "string" + }, + "method": { + "enum": [ + "Query" + ], + "type": "string" + }, + "messageTimestamp": { + "$ref": "https://identity.foundation/dwn/json-schemas/defs.json#/definitions/date-time" + }, + "filters": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "https://identity.foundation/dwn/json-schemas/events-filter.json" + } + }, + "cursor": { + "type": "string" + } + } + } + } +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 2b831d330..427f49bed 100644 --- a/package-lock.json +++ b/package-lock.json @@ -18,7 +18,6 @@ "blockstore-core": "4.2.0", "cross-fetch": "4.0.0", "eciesjs": "0.4.5", - "flat": "5.0.2", "interface-blockstore": "5.2.3", "interface-store": "5.1.2", "ipfs-unixfs-exporter": "13.1.5", @@ -3181,6 +3180,7 @@ "version": "5.0.2", "resolved": "https://registry.npmjs.org/flat/-/flat-5.0.2.tgz", "integrity": "sha512-b6suED+5/3rTpUBdG1gupIl8MPFCAMA0QXwmljLhvCUKcUvdE4gWky9zpuGCcXHOsz4J9wPGNWq6OKpmIzz3hQ==", + "dev": true, "bin": { "flat": "cli.js" } diff --git a/package.json b/package.json index 77fc7142c..6f6c61525 100644 --- a/package.json +++ b/package.json @@ -70,7 +70,6 @@ "blockstore-core": "4.2.0", "cross-fetch": "4.0.0", "eciesjs": "0.4.5", - "flat": "5.0.2", "interface-blockstore": "5.2.3", "interface-store": "5.1.2", "ipfs-unixfs-exporter": "13.1.5", diff --git a/src/core/auth.ts b/src/core/auth.ts index 5d06b76bd..32da64c0e 100644 --- a/src/core/auth.ts +++ b/src/core/auth.ts @@ -38,6 +38,6 @@ export async function authorize(tenant: string, incomingMessage: { author: strin if (incomingMessage.author === tenant) { return; } else { - throw new DwnError(DwnErrorCode.AuthorizationUnknownAuthor, 'message failed authorization, permission grant check not yet implemented'); + throw new DwnError(DwnErrorCode.AuthorizationUnknownAuthor, 'message failed authorization. Only the tenant is authorized'); } } diff --git a/src/core/dwn-error.ts b/src/core/dwn-error.ts index e53999d53..a255b2c3f 100644 --- a/src/core/dwn-error.ts +++ b/src/core/dwn-error.ts @@ -37,6 +37,8 @@ export enum DwnErrorCode { GrantAuthorizationGrantNotYetActive = 'GrantAuthorizationGrantNotYetActive', HdKeyDerivationPathInvalid = 'HdKeyDerivationPathInvalid', JwsVerifySignatureUnsupportedCrv = 'JwsVerifySignatureUnsupportedCrv', + IndexInvalidSortProperty = 'IndexInvalidSortProperty', + IndexMissingIndexableProperty = 'IndexMissingIndexableProperty', JwsDecodePlainObjectPayloadInvalid = 'JwsDecodePlainObjectPayloadInvalid', MessageGetInvalidCid = 'MessageGetInvalidCid', ParseCidCodecNotSupported = 'ParseCidCodecNotSupported', @@ -79,6 +81,8 @@ export enum DwnErrorCode { ProtocolsQueryUnauthorized = 'ProtocolsQueryUnauthorized', RecordsDecryptNoMatchingKeyEncryptedFound = 'RecordsDecryptNoMatchingKeyEncryptedFound', RecordsDeleteAuthorizationFailed = 'RecordsDeleteAuthorizationFailed', + RecordsQueryCreateFilterPublishedSortInvalid = 'RecordsQueryCreateFilterPublishedSortInvalid', + RecordsQueryParseFilterPublishedSortInvalid = 'RecordsQueryParseFilterPublishedSortInvalid', RecordsGrantAuthorizationConditionPublicationProhibited = 'RecordsGrantAuthorizationConditionPublicationProhibited', RecordsGrantAuthorizationConditionPublicationRequired = 'RecordsGrantAuthorizationConditionPublicationRequired', RecordsGrantAuthorizationScopeContextIdMismatch = 'RecordsGrantAuthorizationScopeContextIdMismatch', diff --git a/src/core/protocol-authorization.ts b/src/core/protocol-authorization.ts index deef16ea8..911c3b6a5 100644 --- a/src/core/protocol-authorization.ts +++ b/src/core/protocol-authorization.ts @@ -1,4 +1,4 @@ -import type { Filter } from '../types/message-types.js'; +import type { Filter } from '../types/query-types.js'; import type { MessageStore } from '../types/message-store.js'; import type { RecordsDelete } from '../interfaces/records-delete.js'; import type { RecordsQuery } from '../interfaces/records-query.js'; diff --git a/src/dwn.ts b/src/dwn.ts index b3729e3f9..494165549 100644 --- a/src/dwn.ts +++ b/src/dwn.ts @@ -6,7 +6,7 @@ import type { MethodHandler } from './types/method-handler.js'; import type { Readable } from 'readable-stream'; import type { RecordsWriteHandlerOptions } from './handlers/records-write.js'; import type { TenantGate } from './core/tenant-gate.js'; -import type { EventsGetMessage, EventsGetReply } from './types/event-types.js'; +import type { EventsGetMessage, EventsGetReply, EventsQueryMessage, EventsQueryReply } from './types/event-types.js'; import type { GenericMessageReply, UnionMessageReply } from './core/message-reply.js'; import type { MessagesGetMessage, MessagesGetReply } from './types/messages-types.js'; import type { PermissionsGrantMessage, PermissionsRequestMessage, PermissionsRevokeMessage } from './types/permissions-types.js'; @@ -16,6 +16,7 @@ import type { RecordsDeleteMessage, RecordsQueryMessage, RecordsQueryReply, Reco import { AllowAllTenantGate } from './core/tenant-gate.js'; import { DidResolver } from './did/did-resolver.js'; import { EventsGetHandler } from './handlers/events-get.js'; +import { EventsQueryHandler } from './handlers/events-query.js'; import { Message } from './core/message.js'; import { messageReplyFromError } from './core/message-reply.js'; import { MessagesGetHandler } from './handlers/messages-get.js'; @@ -47,6 +48,7 @@ export class Dwn { this.methodHandlers = { [DwnInterfaceName.Events + DwnMethodName.Get] : new EventsGetHandler(this.didResolver, this.eventLog), + [DwnInterfaceName.Events + DwnMethodName.Query] : new EventsQueryHandler(this.didResolver, this.eventLog), [DwnInterfaceName.Messages + DwnMethodName.Get] : new MessagesGetHandler(this.didResolver, this.messageStore, this.dataStore), [DwnInterfaceName.Permissions + DwnMethodName.Grant] : new PermissionsGrantHandler( this.didResolver, this.messageStore, this.eventLog), @@ -95,6 +97,7 @@ export class Dwn { * @param tenant The tenant DID to route the given message to. */ public async processMessage(tenant: string, rawMessage: EventsGetMessage): Promise; + public async processMessage(tenant: string, rawMessage: EventsQueryMessage): Promise; public async processMessage(tenant: string, rawMessage: MessagesGetMessage): Promise; public async processMessage(tenant: string, rawMessage: ProtocolsConfigureMessage): Promise; public async processMessage(tenant: string, rawMessage: ProtocolsQueryMessage): Promise; diff --git a/src/event-log/event-log-level.ts b/src/event-log/event-log-level.ts index e20330064..1617458a4 100644 --- a/src/event-log/event-log-level.ts +++ b/src/event-log/event-log-level.ts @@ -1,9 +1,11 @@ -import type { LevelWrapperBatchOperation } from '../store/level-wrapper.js'; import type { ULIDFactory } from 'ulidx'; -import type { Event, EventLog, GetEventsOptions } from '../types/event-log.js'; +import type { EventLog, GetEventsOptions } from '../types/event-log.js'; +import type { Filter, KeyValues } from '../types/query-types.js'; +import { createLevelDatabase } from '../store/level-wrapper.js'; +import { IndexLevel } from '../store/index-level.js'; import { monotonicFactory } from 'ulidx'; -import { createLevelDatabase, LevelWrapper } from '../store/level-wrapper.js'; +import { SortDirection } from '../types/query-types.js'; type EventLogLevelConfig = { /** @@ -15,101 +17,51 @@ type EventLogLevelConfig = { createLevelDatabase?: typeof createLevelDatabase, }; -const WATERMARKS_SUBLEVEL_NAME = 'watermarks'; -const CIDS_SUBLEVEL_NAME = 'cids'; - export class EventLogLevel implements EventLog { - config: EventLogLevelConfig; - db: LevelWrapper; ulidFactory: ULIDFactory; + index: IndexLevel; constructor(config?: EventLogLevelConfig) { - this.config = { + this.index = new IndexLevel({ location: 'EVENTLOG', createLevelDatabase, ...config, - }; - - this.db = new LevelWrapper({ - location : this.config.location!, - createLevelDatabase : this.config.createLevelDatabase, - valueEncoding : 'utf8', }); + this.ulidFactory = monotonicFactory(); } async open(): Promise { - return this.db.open(); + return this.index.open(); } async close(): Promise { - return this.db.close(); + return this.index.close(); } async clear(): Promise { - return this.db.clear(); + return this.index.clear(); } - async append(tenant: string, messageCid: string): Promise { - const tenantEventLog = await this.db.partition(tenant); - const watermarkLog = await tenantEventLog.partition(WATERMARKS_SUBLEVEL_NAME); - const cidLog = await tenantEventLog.partition(CIDS_SUBLEVEL_NAME); - + async append(tenant: string, messageCid: string, indexes: KeyValues): Promise { const watermark = this.ulidFactory(); - - await watermarkLog.put(watermark, messageCid); - await cidLog.put(messageCid, watermark); - - return watermark; + await this.index.put(tenant, messageCid, { ...indexes, watermark }); } - async getEvents(tenant: string, options?: GetEventsOptions): Promise { - const tenantEventLog = await this.db.partition(tenant); - const watermarkLog = await tenantEventLog.partition(WATERMARKS_SUBLEVEL_NAME); - const events: Array = []; - - for await (const [key, value] of watermarkLog.iterator(options)) { - const event = { watermark: key, messageCid: value }; - events.push(event); - } - - return events; + async queryEvents(tenant: string, filters: Filter[], watermark?: string): Promise { + return await this.index.query(tenant, filters, { sortProperty: 'watermark', cursor: watermark }); } - async deleteEventsByCid(tenant: string, cids: Array): Promise { - if (cids.length === 0) { - return 0; - } - - const tenantEventLog = await this.db.partition(tenant); - const cidLog = await tenantEventLog.partition(CIDS_SUBLEVEL_NAME); - - let ops: LevelWrapperBatchOperation[] = []; - const promises: Array> = []; - - for (const cid of cids) { - ops.push({ type: 'del', key: cid }); - - const promise = cidLog.get(cid).catch(e => e); - promises.push(promise); - } - - await cidLog.batch(ops); - - ops = []; - let numEventsDeleted = 0; + async getEvents(tenant: string, options?: GetEventsOptions): Promise { + return await this.index.query(tenant, [], { sortProperty: 'watermark', sortDirection: SortDirection.Ascending, cursor: options?.cursor }); + } - const watermarks: Array = await Promise.all(promises); - for (const watermark of watermarks) { - if (watermark) { - ops.push({ type: 'del', key: watermark }); - numEventsDeleted += 1; - } + async deleteEventsByCid(tenant: string, messageCids: Array): Promise { + const indexDeletePromises: Promise[] = []; + for (const messageCid of messageCids) { + indexDeletePromises.push(this.index.delete(tenant, messageCid)); } - const watermarkLog = await tenantEventLog.partition('watermarks'); - await watermarkLog.batch(ops); - - return numEventsDeleted; + await Promise.all(indexDeletePromises); } } \ No newline at end of file diff --git a/src/handlers/events-get.ts b/src/handlers/events-get.ts index 83bfff16c..054f9a353 100644 --- a/src/handlers/events-get.ts +++ b/src/handlers/events-get.ts @@ -29,11 +29,11 @@ export class EventsGetHandler implements MethodHandler { return messageReplyFromError(e, 401); } - // if watermark was provided in message, get all events _after_ the watermark. + // if a cursor was provided in message, get all events _after_ the cursor. // Otherwise, get all events. let options: GetEventsOptions | undefined; - if (message.descriptor.watermark) { - options = { gt: message.descriptor.watermark }; + if (message.descriptor.cursor) { + options = { cursor: message.descriptor.cursor }; } const events = await this.eventLog.getEvents(tenant, options); diff --git a/src/handlers/events-query.ts b/src/handlers/events-query.ts new file mode 100644 index 000000000..6a8ce9bf5 --- /dev/null +++ b/src/handlers/events-query.ts @@ -0,0 +1,43 @@ +import type { DidResolver } from '../did/did-resolver.js'; +import type { EventLog } from '../types/event-log.js'; +import type { MethodHandler } from '../types/method-handler.js'; +import type { EventsQueryMessage, EventsQueryReply } from '../types/event-types.js'; + +import { EventsQuery } from '../interfaces/events-query.js'; +import { messageReplyFromError } from '../core/message-reply.js'; +import { authenticate, authorize } from '../core/auth.js'; + + +export class EventsQueryHandler implements MethodHandler { + + constructor(private didResolver: DidResolver, private eventLog: EventLog) { } + + public async handle({ + tenant, + message + }: {tenant: string, message: EventsQueryMessage}): Promise { + let eventsQuery: EventsQuery; + + try { + eventsQuery = await EventsQuery.parse(message); + } catch (e) { + return messageReplyFromError(e, 400); + } + + try { + await authenticate(message.authorization, this.didResolver); + await authorize(tenant, eventsQuery); + } catch (e) { + return messageReplyFromError(e, 401); + } + + const { filters, cursor } = eventsQuery.message.descriptor; + const logFilters = EventsQuery.convertFilters(filters); + const events = await this.eventLog.queryEvents(tenant, logFilters, cursor); + + return { + status: { code: 200, detail: 'OK' }, + events + }; + } +} diff --git a/src/handlers/permissions-grant.ts b/src/handlers/permissions-grant.ts index 3bcc834eb..e8c401e02 100644 --- a/src/handlers/permissions-grant.ts +++ b/src/handlers/permissions-grant.ts @@ -1,6 +1,7 @@ import type { DidResolver } from '../did/did-resolver.js'; import type { EventLog } from '../types//event-log.js'; import type { GenericMessageReply } from '../core/message-reply.js'; +import type { KeyValues } from '../types/query-types.js'; import type { MessageStore } from '../types//message-store.js'; import type { MethodHandler } from '../types/method-handler.js'; import type { PermissionsGrantMessage } from '../types/permissions-types.js'; @@ -9,6 +10,7 @@ import { authenticate } from '../core/auth.js'; import { Message } from '../core/message.js'; import { messageReplyFromError } from '../core/message-reply.js'; import { PermissionsGrant } from '../interfaces/permissions-grant.js'; +import { removeUndefinedProperties } from '../utils/object.js'; export class PermissionsGrantHandler implements MethodHandler { constructor(private didResolver: DidResolver, private messageStore: MessageStore, private eventLog: EventLog) { } @@ -31,22 +33,32 @@ export class PermissionsGrantHandler implements MethodHandler { return messageReplyFromError(e, 401); } - const { scope, conditions, ...propertiesToIndex } = message.descriptor; - const indexes: { [key: string]: string | boolean } = { - author: permissionsGrant.author!, - ...propertiesToIndex, - }; + const indexes = PermissionsGrantHandler.constructIndexes(permissionsGrant); // If we have not seen this message before, store it const messageCid = await Message.getCid(message); const existingMessage = await this.messageStore.get(tenant, messageCid); if (existingMessage === undefined) { await this.messageStore.put(tenant, message, indexes); - await this.eventLog.append(tenant, messageCid); + await this.eventLog.append(tenant, messageCid, indexes); } return { status: { code: 202, detail: 'Accepted' } }; } + + static constructIndexes( + permissionsGrant: PermissionsGrant, + ): KeyValues { + const message = permissionsGrant.message; + const { scope, conditions, ...propertiesToIndex } = message.descriptor; + const indexes: KeyValues = { + author: permissionsGrant.author!, + ...propertiesToIndex, + }; + + removeUndefinedProperties(indexes); + return indexes; + } } \ No newline at end of file diff --git a/src/handlers/permissions-request.ts b/src/handlers/permissions-request.ts index 2ec1244b0..07d9e0221 100644 --- a/src/handlers/permissions-request.ts +++ b/src/handlers/permissions-request.ts @@ -44,7 +44,7 @@ export class PermissionsRequestHandler implements MethodHandler { const existingMessage = await this.messageStore.get(tenant, messageCid); if (existingMessage === undefined) { await this.messageStore.put(tenant, message, indexes); - await this.eventLog.append(tenant, messageCid); + await this.eventLog.append(tenant, messageCid, indexes); } return { diff --git a/src/handlers/permissions-revoke.ts b/src/handlers/permissions-revoke.ts index 7ed17f9d6..83e7a73dd 100644 --- a/src/handlers/permissions-revoke.ts +++ b/src/handlers/permissions-revoke.ts @@ -1,6 +1,7 @@ import type { DidResolver } from '../did/did-resolver.js'; import type { EventLog } from '../types/event-log.js'; import type { GenericMessageReply } from '../core/message-reply.js'; +import type { KeyValues } from '../types/query-types.js'; import type { MessageStore } from '../types/message-store.js'; import type { MethodHandler } from '../types/method-handler.js'; import type { PermissionsGrantMessage, PermissionsRevokeMessage } from '../types/permissions-types.js'; @@ -84,13 +85,9 @@ export class PermissionsRevokeHandler implements MethodHandler { } // Store incoming PermissionsRevoke - const indexes: { [key: string]: string } = { - interface : DwnInterfaceName.Permissions, - method : DwnMethodName.Revoke, - permissionsGrantId : message.descriptor.permissionsGrantId, - }; + const indexes = PermissionsRevokeHandler.constructIndexes(permissionsRevoke); await this.messageStore.put(tenant, message, indexes); - await this.eventLog.append(tenant, await Message.getCid(message)); + await this.eventLog.append(tenant, await Message.getCid(message), indexes); // Delete existing revokes which are all newer than the incoming message const removedRevokeCids: string[] = []; @@ -118,4 +115,18 @@ export class PermissionsRevokeHandler implements MethodHandler { status: { code: 202, detail: 'Accepted' } }; } + + static constructIndexes( + permissionsRevoke: PermissionsRevoke, + ): KeyValues { + const { descriptor } = permissionsRevoke.message; + + return { + interface : DwnInterfaceName.Permissions, + method : DwnMethodName.Revoke, + author : permissionsRevoke.author!, + messageTimestamp : descriptor.messageTimestamp, + permissionsGrantId : descriptor.permissionsGrantId, + }; + } } \ No newline at end of file diff --git a/src/handlers/protocols-configure.ts b/src/handlers/protocols-configure.ts index 32864707f..e57897aac 100644 --- a/src/handlers/protocols-configure.ts +++ b/src/handlers/protocols-configure.ts @@ -56,11 +56,11 @@ export class ProtocolsConfigureHandler implements MethodHandler { // write the incoming message to DB if incoming message is newest let messageReply: GenericMessageReply; if (incomingMessageIsNewest) { - const indexes = ProtocolsConfigureHandler.constructProtocolsConfigureIndexes(protocolsConfigure); + const indexes = ProtocolsConfigureHandler.constructIndexes(protocolsConfigure); const messageCid = await Message.getCid(message); await this.messageStore.put(tenant, message, indexes); - await this.eventLog.append(tenant, messageCid); + await this.eventLog.append(tenant, messageCid, indexes); messageReply = { status: { code: 202, detail: 'Accepted' } @@ -87,7 +87,7 @@ export class ProtocolsConfigureHandler implements MethodHandler { return messageReply; }; - private static constructProtocolsConfigureIndexes(protocolsConfigure: ProtocolsConfigure): { [key: string]: string | boolean } { + static constructIndexes(protocolsConfigure: ProtocolsConfigure): { [key: string]: string | boolean } { // strip out `definition` as it is not indexable const { definition, ...propertiesToIndex } = protocolsConfigure.message.descriptor; const { author } = protocolsConfigure; @@ -96,7 +96,7 @@ export class ProtocolsConfigureHandler implements MethodHandler { ...propertiesToIndex, author : author!, protocol : definition.protocol, // retain protocol url from `definition`, - published : definition.published + published : definition.published // retain published state from definition }; return indexes; diff --git a/src/handlers/records-delete.ts b/src/handlers/records-delete.ts index 7d22b1902..c92dba0fa 100644 --- a/src/handlers/records-delete.ts +++ b/src/handlers/records-delete.ts @@ -2,6 +2,7 @@ import type { DataStore } from '../types/data-store.js'; import type { DidResolver } from '../did/did-resolver.js'; import type { EventLog } from '../types/event-log.js'; import type { GenericMessageReply } from '../core/message-reply.js'; +import type { KeyValues } from '../types/query-types.js'; import type { MessageStore } from '../types//message-store.js'; import type { MethodHandler } from '../types/method-handler.js'; import type { RecordsDeleteMessage, RecordsWriteMessage } from '../types/records-types.js'; @@ -12,6 +13,7 @@ import { messageReplyFromError } from '../core/message-reply.js'; import { ProtocolAuthorization } from '../core/protocol-authorization.js'; import { RecordsDelete } from '../interfaces/records-delete.js'; import { RecordsWrite } from '../interfaces/records-write.js'; +import { removeUndefinedProperties } from '../utils/object.js'; import { StorageController } from '../store/storage-controller.js'; import { DwnError, DwnErrorCode } from '../core/dwn-error.js'; import { DwnInterfaceName, DwnMethodName } from '../enums/dwn-interface-method.js'; @@ -83,11 +85,12 @@ export class RecordsDeleteHandler implements MethodHandler { return messageReplyFromError(e, 401); } - const indexes = await constructIndexes(tenant, recordsDelete); + const recordsWrite = await RecordsWrite.getInitialWrite(existingMessages); + const indexes = RecordsDeleteHandler.constructIndexes(recordsDelete, recordsWrite); await this.messageStore.put(tenant, message, indexes); const messageCid = await Message.getCid(message); - await this.eventLog.append(tenant, messageCid); + await this.eventLog.append(tenant, messageCid, indexes); // delete all existing messages that are not newest, except for the initial write await StorageController.deleteAllOlderMessagesButKeepInitialWrite( @@ -117,21 +120,30 @@ export class RecordsDeleteHandler implements MethodHandler { ); } } -} - -export async function constructIndexes(tenant: string, recordsDelete: RecordsDelete): Promise> { - const message = recordsDelete.message; - const descriptor = { ...message.descriptor }; - - // NOTE: the "trick" not may not be apparent on how a query is able to omit deleted records: - // we intentionally not add index for `isLatestBaseState` at all, this means that upon a successful delete, - // no messages with the record ID will match any query because queries by design filter by `isLatestBaseState = true`, - // `isLatestBaseState` for the initial delete would have been toggled to `false` - const indexes: Record = { - // isLatestBaseState : "true", // intentionally showing that this index is omitted - author: recordsDelete.author, - ...descriptor - }; - return indexes; -} + /** + * Indexed properties needed for MessageStore indexing. + */ + static constructIndexes(recordsDelete: RecordsDelete, recordsWrite: RecordsWriteMessage): KeyValues { + const message = recordsDelete.message; + const descriptor = { ...message.descriptor }; + + // we add the immutable properties from the initial RecordsWrite message in order to use them when querying relevant deletes. + const { protocol, protocolPath, recipient, schema, parentId, dataFormat, dateCreated } = recordsWrite.descriptor; + + // NOTE: the "trick" not may not be apparent on how a query is able to omit deleted records: + // we intentionally not add index for `isLatestBaseState` at all, this means that upon a successful delete, + // no messages with the record ID will match any query because queries by design filter by `isLatestBaseState = true`, + // `isLatestBaseState` for the initial delete would have been toggled to `false` + const indexes: { [key:string]: string | undefined } = { + // isLatestBaseState : "true", // intentionally showing that this index is omitted + protocol, protocolPath, recipient, schema, parentId, dataFormat, dateCreated, + contextId : recordsWrite.contextId, + author : recordsDelete.author, + ...descriptor + }; + removeUndefinedProperties(indexes); + + return indexes as KeyValues; + } +}; \ No newline at end of file diff --git a/src/handlers/records-query.ts b/src/handlers/records-query.ts index 9a6b417c5..9de192b90 100644 --- a/src/handlers/records-query.ts +++ b/src/handlers/records-query.ts @@ -1,8 +1,9 @@ import type { DataStore } from '../types/data-store.js'; import type { DidResolver } from '../did/did-resolver.js'; +import type { Filter } from '../types/query-types.js'; import type { MessageStore } from '../types//message-store.js'; import type { MethodHandler } from '../types/method-handler.js'; -import type { Filter, GenericMessage, MessageSort } from '../types/message-types.js'; +import type { GenericMessage, MessageSort } from '../types/message-types.js'; import type { RecordsQueryMessage, RecordsQueryReply, RecordsWriteMessageWithOptionalEncodedData } from '../types/records-types.js'; import { authenticate } from '../core/auth.js'; @@ -11,7 +12,7 @@ import { messageReplyFromError } from '../core/message-reply.js'; import { ProtocolAuthorization } from '../core/protocol-authorization.js'; import { Records } from '../utils/records.js'; import { RecordsQuery } from '../interfaces/records-query.js'; -import { SortOrder } from '../types/message-types.js'; +import { SortDirection } from '../types/query-types.js'; import { DwnInterfaceName, DwnMethodName } from '../enums/dwn-interface-method.js'; export class RecordsQueryHandler implements MethodHandler { @@ -77,15 +78,15 @@ export class RecordsQueryHandler implements MethodHandler { private convertDateSort(dateSort?: DateSort): MessageSort { switch (dateSort) { case DateSort.CreatedAscending: - return { dateCreated: SortOrder.Ascending }; + return { dateCreated: SortDirection.Ascending }; case DateSort.CreatedDescending: - return { dateCreated: SortOrder.Descending }; + return { dateCreated: SortDirection.Descending }; case DateSort.PublishedAscending: - return { datePublished: SortOrder.Ascending }; + return { datePublished: SortDirection.Ascending }; case DateSort.PublishedDescending: - return { datePublished: SortOrder.Descending }; + return { datePublished: SortDirection.Descending }; default: - return { dateCreated: SortOrder.Ascending }; + return { dateCreated: SortDirection.Ascending }; } } @@ -97,10 +98,9 @@ export class RecordsQueryHandler implements MethodHandler { recordsQuery: RecordsQuery ): Promise<{ messages: GenericMessage[], cursor?: string }> { const { dateSort, filter, pagination } = recordsQuery.message.descriptor; - // fetch all published records matching the query const queryFilter = { - ...Records.convertFilter(filter), + ...Records.convertFilter(filter, dateSort), interface : DwnInterfaceName.Records, method : DwnMethodName.Write, isLatestBaseState : true @@ -134,7 +134,6 @@ export class RecordsQueryHandler implements MethodHandler { ): Promise<{ messages: GenericMessage[], cursor?: string }> { const { dateSort, pagination } = recordsQuery.message.descriptor; const filters = []; - if (RecordsQueryHandler.filterIncludesPublishedRecords(recordsQuery)) { filters.push(RecordsQueryHandler.buildPublishedRecordsFilter(recordsQuery)); } @@ -169,9 +168,10 @@ export class RecordsQueryHandler implements MethodHandler { } private static buildPublishedRecordsFilter(recordsQuery: RecordsQuery): Filter { + const { dateSort, filter } = recordsQuery.message.descriptor; // fetch all published records matching the query return { - ...Records.convertFilter(recordsQuery.message.descriptor.filter), + ...Records.convertFilter(filter, dateSort), interface : DwnInterfaceName.Records, method : DwnMethodName.Write, published : true, @@ -183,9 +183,10 @@ export class RecordsQueryHandler implements MethodHandler { * Creates a filter for unpublished records that are intended for the query author (where `recipient` is the author). */ private static buildUnpublishedRecordsForQueryAuthorFilter(recordsQuery: RecordsQuery): Filter { + const { dateSort, filter } = recordsQuery.message.descriptor; // include records where recipient is query author return { - ...Records.convertFilter(recordsQuery.message.descriptor.filter), + ...Records.convertFilter(filter, dateSort), interface : DwnInterfaceName.Records, method : DwnMethodName.Write, recipient : recordsQuery.author!, @@ -199,8 +200,9 @@ export class RecordsQueryHandler implements MethodHandler { * Validation that `protocol` and other required protocol-related fields occurs before this method. */ private static buildUnpublishedProtocolAuthorizedRecordsFilter(recordsQuery: RecordsQuery): Filter { + const { dateSort, filter } = recordsQuery.message.descriptor; return { - ...Records.convertFilter(recordsQuery.message.descriptor.filter), + ...Records.convertFilter(filter, dateSort), interface : DwnInterfaceName.Records, method : DwnMethodName.Write, isLatestBaseState : true, @@ -212,9 +214,10 @@ export class RecordsQueryHandler implements MethodHandler { * Creates a filter for only unpublished records where the author is the same as the query author. */ private static buildUnpublishedRecordsByQueryAuthorFilter(recordsQuery: RecordsQuery): Filter { + const { dateSort, filter } = recordsQuery.message.descriptor; // include records where author is the same as the query author return { - ...Records.convertFilter(recordsQuery.message.descriptor.filter), + ...Records.convertFilter(filter, dateSort), author : recordsQuery.author!, interface : DwnInterfaceName.Records, method : DwnMethodName.Write, diff --git a/src/handlers/records-read.ts b/src/handlers/records-read.ts index ee1e9b003..1f65f8c2d 100644 --- a/src/handlers/records-read.ts +++ b/src/handlers/records-read.ts @@ -1,6 +1,6 @@ import type { DataStore } from '../types/data-store.js'; import type { DidResolver } from '../did/did-resolver.js'; -import type { Filter } from '../types/message-types.js'; +import type { Filter } from '../types/query-types.js'; import type { MessageStore } from '../types//message-store.js'; import type { MethodHandler } from '../types/method-handler.js'; import type { RecordsReadMessage, RecordsReadReply, RecordsWriteMessageWithOptionalEncodedData } from '../types/records-types.js'; diff --git a/src/handlers/records-write.ts b/src/handlers/records-write.ts index be6cc6a0f..0a686f180 100644 --- a/src/handlers/records-write.ts +++ b/src/handlers/records-write.ts @@ -131,7 +131,7 @@ export class RecordsWriteHandler implements MethodHandler { } await this.messageStore.put(tenant, messageWithOptionalEncodedData, indexes); - await this.eventLog.append(tenant, await Message.getCid(message)); + await this.eventLog.append(tenant, await Message.getCid(message), indexes); const messageReply = { status: { code: 202, detail: 'Accepted' } diff --git a/src/index.ts b/src/index.ts index d7756a7ec..9e464cbb9 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,9 +1,10 @@ // export everything that we want to be consumable export type { DwnConfig } from './dwn.js'; export type { DidMethodResolver, DwnServiceEndpoint, ServiceEndpoint, DidDocument, DidResolutionResult, DidResolutionMetadata, DidDocumentMetadata, VerificationMethod } from './types/did-types.js'; -export type { EventLog, Event, GetEventsOptions } from './types/event-log.js'; -export type { EventsGetMessage, EventsGetReply } from './types/event-types.js'; -export type { Filter, GenericMessage, MessageSort, Pagination } from './types/message-types.js'; +export type { EventLog, GetEventsOptions } from './types/event-log.js'; +export type { EventsGetMessage, EventsGetReply, EventsQueryMessage, EventsQueryReply } from './types/event-types.js'; +export type { Filter } from './types/query-types.js'; +export type { GenericMessage, MessageSort, Pagination } from './types/message-types.js'; export type { MessagesGetMessage, MessagesGetReply } from './types/messages-types.js'; export type { PermissionConditions, PermissionScope, PermissionsGrantDescriptor } from './types/permissions-grant-descriptor.js'; export type { PermissionsGrantMessage, PermissionsRequestDescriptor, PermissionsRequestMessage, PermissionsRevokeDescriptor, PermissionsRevokeMessage } from './types/permissions-types.js'; @@ -47,7 +48,7 @@ export { RecordsDelete, RecordsDeleteOptions } from './interfaces/records-delete export { RecordsRead, RecordsReadOptions } from './interfaces/records-read.js'; export { Secp256k1 } from './utils/secp256k1.js'; export { Signer } from './types/signer.js'; -export { SortOrder } from './types/message-types.js'; +export { SortDirection } from './types/query-types.js'; export { Time } from './utils/time.js'; // store interfaces diff --git a/src/interfaces/events-get.ts b/src/interfaces/events-get.ts index 8f5342ef8..9a23d469f 100644 --- a/src/interfaces/events-get.ts +++ b/src/interfaces/events-get.ts @@ -7,7 +7,7 @@ import { Time } from '../utils/time.js'; import { DwnInterfaceName, DwnMethodName } from '../enums/dwn-interface-method.js'; export type EventsGetOptions = { - watermark?: string; + cursor?: string; signer: Signer; messageTimestamp?: string; }; @@ -29,8 +29,8 @@ export class EventsGet extends AbstractMessage { messageTimestamp : options.messageTimestamp ?? Time.getCurrentTimestamp(), }; - if (options.watermark) { - descriptor.watermark = options.watermark; + if (options.cursor) { + descriptor.cursor = options.cursor; } const authorization = await Message.createAuthorization({ descriptor, signer: options.signer }); diff --git a/src/interfaces/events-query.ts b/src/interfaces/events-query.ts new file mode 100644 index 000000000..52ec9688f --- /dev/null +++ b/src/interfaces/events-query.ts @@ -0,0 +1,124 @@ +import type { Filter } from '../types/query-types.js'; +import type { ProtocolsQueryFilter } from '../types/protocols-types.js'; +import type { Signer } from '../types/signer.js'; +import type { EventsMessageFilter, EventsQueryDescriptor, EventsQueryFilter, EventsQueryMessage, EventsRecordsFilter } from '../types/event-types.js'; + +import { AbstractMessage } from '../core/abstract-message.js'; +import { FilterUtility } from '../utils/filter.js'; +import { Message } from '../core/message.js'; +import { ProtocolsQuery } from '../interfaces/protocols-query.js'; +import { Records } from '../utils/records.js'; +import { removeUndefinedProperties } from '../utils/object.js'; +import { Time } from '../utils/time.js'; +import { DwnInterfaceName, DwnMethodName } from '../enums/dwn-interface-method.js'; + +export type EventsQueryOptions = { + signer: Signer; + filters: EventsQueryFilter[]; + cursor?: string; + messageTimestamp?: string; +}; + +export class EventsQuery extends AbstractMessage{ + + public static async parse(message: EventsQueryMessage): Promise { + Message.validateJsonSchema(message); + await Message.validateMessageSignatureIntegrity(message.authorization.signature, message.descriptor); + + return new EventsQuery(message); + } + + public static async create(options: EventsQueryOptions): Promise { + const descriptor: EventsQueryDescriptor = { + interface : DwnInterfaceName.Events, + method : DwnMethodName.Query, + filters : this.normalizeFilters(options.filters), + messageTimestamp : options.messageTimestamp ?? Time.getCurrentTimestamp(), + cursor : options.cursor, + }; + + removeUndefinedProperties(descriptor); + + const authorization = await Message.createAuthorization({ descriptor, signer: options.signer }); + const message = { descriptor, authorization }; + + Message.validateJsonSchema(message); + + return new EventsQuery(message); + } + + private static normalizeFilters(filters: EventsQueryFilter[]): EventsQueryFilter[] { + + const eventsQueryFilters: EventsQueryFilter[] = []; + + // normalize each filter individually by the type of filter it is. + for (const filter of filters) { + if (this.isMessagesFilter(filter)) { + eventsQueryFilters.push(filter); + } else if (this.isRecordsFilter(filter)) { + eventsQueryFilters.push(Records.normalizeFilter(filter)); + } else if (this.isProtocolFilter(filter)) { + const protocolFilter = ProtocolsQuery.normalizeFilter(filter); + eventsQueryFilters.push(protocolFilter!); + } + } + + return eventsQueryFilters; + } + + + /** + * Converts an incoming array of EventsFilter into an array of Filter usable by EventLog. + * + * @param filters An array of EventsFilter + * @returns {Filter[]} an array of generic Filter able to be used when querying. + */ + public static convertFilters(filters: EventsQueryFilter[]): Filter[] { + + const eventsQueryFilters: Filter[] = []; + + // normalize each filter individually by the type of filter it is. + for (const filter of filters) { + if (this.isMessagesFilter(filter)) { + eventsQueryFilters.push(this.convertFilter(filter)); + } else if (this.isRecordsFilter(filter)) { + eventsQueryFilters.push(Records.convertFilter(filter)); + } else if (this.isProtocolFilter(filter)) { + eventsQueryFilters.push(filter); + } + } + + return eventsQueryFilters; + } + + private static convertFilter(filter: EventsMessageFilter): Filter { + const filterCopy = { ...filter } as Filter; + + const { dateUpdated } = filter; + const messageTimestampFilter = dateUpdated ? FilterUtility.convertRangeCriterion(dateUpdated) : undefined; + if (messageTimestampFilter) { + filterCopy.messageTimestamp = messageTimestampFilter; + delete filterCopy.dateUpdated; + } + return filterCopy as Filter; + } + + private static isMessagesFilter(filter: EventsQueryFilter): filter is EventsMessageFilter { + return 'method' in filter || 'interface' in filter || 'dateUpdated' in filter || 'author' in filter; + } + + private static isRecordsFilter(filter: EventsQueryFilter): filter is EventsRecordsFilter { + return 'dateCreated' in filter || + 'dataFormat' in filter || + 'dataSize' in filter || + 'parentId' in filter || + 'recordId' in filter || + 'schema' in filter || + ('protocolPath' in filter && 'protocol' in filter) || + 'recipient' in filter; + } + + private static isProtocolFilter(filter: EventsQueryFilter): filter is ProtocolsQueryFilter { + return 'protocol' in filter; + } +} \ No newline at end of file diff --git a/src/interfaces/protocols-query.ts b/src/interfaces/protocols-query.ts index 546279467..1aaa758bf 100644 --- a/src/interfaces/protocols-query.ts +++ b/src/interfaces/protocols-query.ts @@ -65,7 +65,7 @@ export class ProtocolsQuery extends AbstractMessage { return protocolsQuery; } - private static normalizeFilter(filter: ProtocolsQueryFilter | undefined): ProtocolsQueryFilter | undefined { + static normalizeFilter(filter: ProtocolsQueryFilter | undefined): ProtocolsQueryFilter | undefined { if (filter === undefined) { return undefined; } diff --git a/src/interfaces/records-query.ts b/src/interfaces/records-query.ts index 2ea600bb9..776d5cc4a 100644 --- a/src/interfaces/records-query.ts +++ b/src/interfaces/records-query.ts @@ -1,9 +1,10 @@ import type { DelegatedGrantMessage } from '../types/delegated-grant-message.js'; import type { Pagination } from '../types/message-types.js'; import type { Signer } from '../types/signer.js'; -import type { DateSort, RecordsFilter, RecordsQueryDescriptor, RecordsQueryMessage } from '../types/records-types.js'; +import type { RecordsFilter, RecordsQueryDescriptor, RecordsQueryMessage } from '../types/records-types.js'; import { AbstractMessage } from '../core/abstract-message.js'; +import { DateSort } from '../types/records-types.js'; import { Message } from '../core/message.js'; import { Records } from '../utils/records.js'; import { removeUndefinedProperties } from '../utils/object.js'; @@ -32,6 +33,16 @@ export type RecordsQueryOptions = { export class RecordsQuery extends AbstractMessage { public static async parse(message: RecordsQueryMessage): Promise { + + if (message.descriptor.filter.published === false) { + if (message.descriptor.dateSort === DateSort.PublishedAscending || message.descriptor.dateSort === DateSort.PublishedDescending) { + throw new DwnError( + DwnErrorCode.RecordsQueryParseFilterPublishedSortInvalid, + `queries must not filter for \`published:false\` and sort by ${message.descriptor.dateSort}` + ); + } + } + let signaturePayload; if (message.authorization !== undefined) { signaturePayload = await Message.validateMessageSignatureIntegrity(message.authorization.signature, message.descriptor); @@ -47,6 +58,7 @@ export class RecordsQuery extends AbstractMessage { ); } } + if (message.descriptor.filter.protocol !== undefined) { validateProtocolUrlNormalized(message.descriptor.filter.protocol); } @@ -69,6 +81,15 @@ export class RecordsQuery extends AbstractMessage { pagination : options.pagination, }; + if (options.filter.published === false) { + if (options.dateSort === DateSort.PublishedAscending || options.dateSort === DateSort.PublishedDescending) { + throw new DwnError( + DwnErrorCode.RecordsQueryCreateFilterPublishedSortInvalid, + `queries must not filter for \`published:false\` and sort by ${options.dateSort}` + ); + } + } + // delete all descriptor properties that are `undefined` else the code will encounter the following IPLD issue when attempting to generate CID: // Error: `undefined` is not supported by the IPLD Data Model and cannot be encoded removeUndefinedProperties(descriptor); diff --git a/src/interfaces/records-write.ts b/src/interfaces/records-write.ts index ea08bb026..42ba0b5ec 100644 --- a/src/interfaces/records-write.ts +++ b/src/interfaces/records-write.ts @@ -1,5 +1,6 @@ import type { DelegatedGrantMessage } from '../types/delegated-grant-message.js'; import type { GeneralJws } from '../types/jws-types.js'; +import type { KeyValues } from '../types/query-types.js'; import type { MessageInterface } from '../types/message-interface.js'; import type { MessageStore } from '../types/message-store.js'; import type { PublicJwk } from '../types/jose-types.js'; @@ -690,16 +691,16 @@ export class RecordsWrite implements MessageInterface { public async constructRecordsWriteIndexes( isLatestBaseState: boolean - ): Promise> { + ): Promise { const message = this.message; const descriptor = { ...message.descriptor }; delete descriptor.published; // handle `published` specifically further down - const indexes: Record = { + const indexes: KeyValues = { ...descriptor, isLatestBaseState, published : !!message.descriptor.published, - author : this.author, + author : this.author!, //author will not be undefined when indexes are constructed as it's been authorized recordId : message.recordId, entryId : await RecordsWrite.getEntryId(this.author, this.message.descriptor) }; diff --git a/src/store/index-level.ts b/src/store/index-level.ts index 86f79cee0..696e67e39 100644 --- a/src/store/index-level.ts +++ b/src/store/index-level.ts @@ -1,213 +1,456 @@ -import type { Filter, RangeFilter } from '../types/message-types.js'; -import type { LevelWrapperBatchOperation, LevelWrapperIteratorOptions } from './level-wrapper.js'; +import type { EqualFilter, Filter, KeyValues, QueryOptions, RangeFilter } from '../types/query-types.js'; +import type { LevelWrapperBatchOperation, LevelWrapperIteratorOptions, } from './level-wrapper.js'; -import { executeUnlessAborted } from '../utils/abort.js'; -import { flatten } from '../utils/object.js'; +import { isEmptyObject } from '../utils/object.js'; +import { lexicographicalCompare } from '../utils/string.js'; +import { SortDirection } from '../types/query-types.js'; import { createLevelDatabase, LevelWrapper } from './level-wrapper.js'; +import { DwnError, DwnErrorCode } from '../core/dwn-error.js'; +import { FilterSelector, FilterUtility } from '../utils/filter.js'; + +type IndexLevelConfig = { + location?: string, + createLevelDatabase?: typeof createLevelDatabase +}; + +type IndexedItem = { itemId: string, indexes: KeyValues }; + +const INDEX_SUBLEVEL_NAME = 'index'; export interface IndexLevelOptions { signal?: AbortSignal; } /** - * A LevelDB implementation for indexing the messages stored in the DWN. + * A LevelDB implementation for indexing the messages and events stored in the DWN. */ export class IndexLevel { - config: IndexLevelConfig; - db: LevelWrapper; + config: IndexLevelConfig; constructor(config: IndexLevelConfig) { this.config = { createLevelDatabase, - ...config + ...config, }; - this.db = new LevelWrapper({ ...this.config, valueEncoding: 'utf8' }); + this.db = new LevelWrapper({ + location : this.config.location!, + createLevelDatabase : this.config.createLevelDatabase, + keyEncoding : 'utf8' + }); } async open(): Promise { - return this.db.open(); + await this.db.open(); } async close(): Promise { - return this.db.close(); + await this.db.close(); + } + + /** + * deletes everything in the underlying index db. + */ + async clear(): Promise { + await this.db.clear(); } /** - * Adds indexes for a specific data/object/content. - * @param dataId ID of the data/object/content being indexed. + * Put an item into the index using information that will allow it to be queried for. + * + * @param tenant + * @param itemId a unique ID that represents the item being indexed, this is also used as the cursor value in a query. + * @param indexes - (key-value pairs) to be included as part of indexing this item. Must include at least one indexing property. + * @param options IndexLevelOptions that include an AbortSignal. */ async put( tenant: string, - dataId: string, - indexes: { [property: string]: unknown }, + itemId: string, + indexes: KeyValues, options?: IndexLevelOptions ): Promise { - const partition = await executeUnlessAborted(this.db.partition(tenant), options?.signal); - indexes = flatten(indexes); - - const operations: LevelWrapperBatchOperation[] = [ ]; - - // create an index entry for each property in the `indexes` - for (const propertyName in indexes) { - const propertyValue = indexes[propertyName]; - - // NOTE: appending data ID after (property + value) serves two purposes: - // 1. creates a unique entry of the property-value pair per data/object - // 2. when we need to delete all indexes of a given data ID (`delete()`), we can reconstruct the index keys and remove the indexes efficiently - // - // example keys (\u0000 is just shown for illustration purpose because it is the delimiter used to join the string segments below): - // 'interface\u0000"Records"\u0000bafyreigs3em7lrclhntzhgvkrf75j2muk6e7ypq3lrw3ffgcpyazyw6pry' - // 'method\u0000"Write"\u0000bafyreigs3em7lrclhntzhgvkrf75j2muk6e7ypq3lrw3ffgcpyazyw6pry' - // 'schema\u0000"http://ud4kyzon6ugxn64boz7v"\u0000bafyreigs3em7lrclhntzhgvkrf75j2muk6e7ypq3lrw3ffgcpyazyw6pry' - // 'dataCid\u0000"bafkreic3ie3cxsblp46vn3ofumdnwiqqk4d5ah7uqgpcn6xps4skfvagze"\u0000bafyreigs3em7lrclhntzhgvkrf75j2muk6e7ypq3lrw3ffgcpyazyw6pry' - // 'dateCreated\u0000"2023-05-25T18:23:29.425008Z"\u0000bafyreigs3em7lrclhntzhgvkrf75j2muk6e7ypq3lrw3ffgcpyazyw6pry' - const key = this.join(propertyName, this.encodeValue(propertyValue), dataId); - operations.push({ type: 'put', key, value: dataId }); + + // ensure we have something valid to index + if (isEmptyObject(indexes)) { + throw new DwnError(DwnErrorCode.IndexMissingIndexableProperty, 'Index must include at least one valid indexable property'); } - // create a reverse lookup entry for data ID -> its indexes - // this is for indexes deletion (`delete()`): so that given the data ID, we are able to delete all its indexes - // we can consider putting this info in a different data partition if this ever becomes more complex/confusing - operations.push({ type: 'put', key: `__${dataId}__indexes`, value: JSON.stringify(indexes) }); + const indexOps: LevelWrapperBatchOperation[] = []; + + // create an index entry for each property index + // these indexes are all sortable lexicographically. + for (const indexName in indexes) { + const indexValue = indexes[indexName]; + // the key is indexValue followed by the itemId as a tie-breaker. + // for example if the property is messageTimestamp the key would look like: + // '"2023-05-25T18:23:29.425008Z"\u0000bafyreigs3em7lrclhntzhgvkrf75j2muk6e7ypq3lrw3ffgcpyazyw6pry' + const key = IndexLevel.keySegmentJoin(IndexLevel.encodeValue(indexValue), itemId); + const item: IndexedItem = { itemId, indexes }; + + const partitionOperation = await this.createOperationForIndexPartition( + tenant, + indexName, + { type: 'put', key, value: JSON.stringify(item) } + ); + indexOps.push(partitionOperation); + } + + // create a reverse lookup for the sortedIndex values. This is used during deletion and cursor starting point lookup. + const partitionOperation = await this.createOperationForIndexesLookupPartition( + tenant, + { type: 'put', key: itemId, value: JSON.stringify(indexes) } + ); + indexOps.push(partitionOperation); - await partition.batch(operations, options); + const tenantPartition = await this.db.partition(tenant); + await tenantPartition.batch(indexOps, options); } /** - * Executes the given single filter query and appends the results without duplicate into `matchedIDs`. + * Deletes all of the index data associated with the item. */ - private async executeSingleFilterQuery(tenant: string, filter: Filter, matchedIDs: Set, options?: IndexLevelOptions): Promise { - // Note: We have an array of Promises in order to support OR (anyOf) matches when given a list of accepted values for a property - const propertyNameToPromises: { [key: string]: Promise[] } = {}; - - // Do a separate DB query for each property in `filter` - // We will find the union of these many individual queries later. - for (const propertyName in filter) { - const propertyFilter = filter[propertyName]; - - if (typeof propertyFilter === 'object') { - if (Array.isArray(propertyFilter)) { - // `propertyFilter` is a AnyOfFilter - - // Support OR matches by querying for each values separately, - // then adding them to the promises associated with `propertyName` - propertyNameToPromises[propertyName] = []; - for (const propertyValue of new Set(propertyFilter)) { - const exactMatchesPromise = this.findExactMatches(tenant, propertyName, propertyValue, options); - propertyNameToPromises[propertyName].push(exactMatchesPromise); - } - } else { - // `propertyFilter` is a `RangeFilter` - const rangeMatchesPromise = this.findRangeMatches(tenant, propertyName, propertyFilter, options); - propertyNameToPromises[propertyName] = [rangeMatchesPromise]; - } - } else { - // propertyFilter is an EqualFilter, meaning it is a non-object primitive type - const exactMatchesPromise = this.findExactMatches(tenant, propertyName, propertyFilter, options); - propertyNameToPromises[propertyName] = [exactMatchesPromise]; - } + async delete(tenant: string, itemId: string, options?: IndexLevelOptions): Promise { + const indexOps: LevelWrapperBatchOperation[] = []; + + const indexes = await this.getIndexes(tenant, itemId); + if (indexes === undefined) { + // invalid itemId + return; } - // map of ID of all data/object -> list of missing property matches - // if count of missing property matches is 0, it means the data/object fully matches the filter - const missingPropertyMatchesForId: { [dataId: string]: Set } = { }; - - // resolve promises for each property match and - // eliminate matched property from `missingPropertyMatchesForId` iteratively to work out complete matches - for (const [propertyName, promises] of Object.entries(propertyNameToPromises)) { - // acting as an OR match for the property, any of the promises returning a match will be treated as a property match - for (const promise of promises) { - // reminder: the promise returns a list of IDs of data satisfying a particular match - for (const dataId of await promise) { - // short circuit: if a data is already included to the final matched ID set (by a different `Filter`), - // no need to evaluate if the data satisfies this current filter being evaluated - if (matchedIDs.has(dataId)) { - continue; - } - - // if first time seeing a property matching for the data/object, record all properties needing a match to track progress - missingPropertyMatchesForId[dataId] ??= new Set([ ...Object.keys(filter) ]); - - missingPropertyMatchesForId[dataId].delete(propertyName); - if (missingPropertyMatchesForId[dataId].size === 0) { - // full filter match, add it to return list - matchedIDs.add(dataId); - } + // delete the reverse lookup + const partitionOperation = await this.createOperationForIndexesLookupPartition(tenant, { type: 'del', key: itemId }); + indexOps.push(partitionOperation); + + // delete the keys for each sortIndex + for (const indexName in indexes) { + const sortValue = indexes[indexName]; + const partitionOperation = await this.createOperationForIndexPartition( + tenant, + indexName, + { + type : 'del', + key : IndexLevel.keySegmentJoin(IndexLevel.encodeValue(sortValue), itemId) } + ); + indexOps.push(partitionOperation); + } + + const tenantPartition = await this.db.partition(tenant); + await tenantPartition.batch(indexOps, options); + } + + /** + * Wraps the given operation as an operation for the specified index partition. + */ + private async createOperationForIndexPartition(tenant: string, indexName: string, operation: LevelWrapperBatchOperation) + : Promise> { + // we write the index entry into a sublevel-partition of tenantPartition. + // putting each index entry within a sublevel allows the levelDB system to calculate a gt minKey and lt maxKey for each of the properties + // this prevents them from clashing, especially when iterating in reverse without iterating through other properties. + const tenantPartition = await this.db.partition(tenant); + const indexPartitionName = IndexLevel.getIndexPartitionName(indexName); + const partitionOperation = tenantPartition.createPartitionOperation(indexPartitionName, operation); + return partitionOperation; + } + + /** + * Wraps the given operation as an operation for the itemId to indexes lookup partition. + */ + private async createOperationForIndexesLookupPartition(tenant: string, operation: LevelWrapperBatchOperation) + : Promise> { + const tenantPartition = await this.db.partition(tenant); + const partitionOperation = tenantPartition.createPartitionOperation(INDEX_SUBLEVEL_NAME, operation); + return partitionOperation; + } + + private static getIndexPartitionName(indexName: string): string { + // we create index partition names in __${indexName}__ wrapping so they do not clash with other sublevels that are created for other purposes. + return `__${indexName}__`; + } + + /** + * Gets the index partition of the given indexName. + */ + private async getIndexPartition(tenant: string, indexName: string): Promise> { + const indexPartitionName = IndexLevel.getIndexPartitionName(indexName); + return (await this.db.partition(tenant)).partition(indexPartitionName); + } + + /** + * Gets the itemId to indexes lookup partition. + */ + private async getIndexesLookupPartition(tenant: string): Promise> { + return (await this.db.partition(tenant)).partition(INDEX_SUBLEVEL_NAME); + } + + /** + * Queries the index for items that match the filters. If no filters are provided, all items are returned. + * + * @param filters Array of filters that are treated as an OR query. + * @param queryOptions query options for sort and pagination, requires at least `sortProperty`. The default sort direction is ascending. + * @param options IndexLevelOptions that include an AbortSignal. + * @returns {string[]} an array of itemIds that match the given filters. + */ + async query(tenant: string, filters: Filter[], queryOptions: QueryOptions, options?: IndexLevelOptions): Promise { + + // check if we should query using in-memory paging or iterator paging + if (IndexLevel.shouldQueryWithInMemoryPaging(filters, queryOptions)) { + return this.queryWithInMemoryPaging(tenant, filters, queryOptions, options); + } + return this.queryWithIteratorPaging(tenant, filters, queryOptions, options); + } + + /** + * Queries the sort property index for items that match the filters. If no filters are provided, all items are returned. + * This query is a linear iterator over the sorted index, checking each item for a match. + * If a cursor is provided it starts the iteration from the cursor point. + */ + async queryWithIteratorPaging(tenant: string, filters: Filter[], queryOptions: QueryOptions, options?: IndexLevelOptions): Promise { + const { limit, cursor , sortProperty } = queryOptions; + + // if there is a cursor we fetch the starting key given the sort property, otherwise we start from the beginning of the index. + const startKey = cursor ? await this.getStartingKeyForCursor(tenant, cursor, sortProperty, filters) : ''; + if (startKey === undefined) { + // getStartingKeyForCursor returns undefined if an invalid cursor is provided, we return an empty result set. + return []; + } + + const matches: string[] = []; + for await ( const item of this.getIndexIterator(tenant, startKey, queryOptions, options)) { + if (limit !== undefined && matches.length === limit) { + return matches; + } + const { itemId, indexes } = item; + if (FilterUtility.matchAnyFilter(indexes, filters)) { + matches.push(itemId); } } + return matches; } - async query(tenant: string, filters: Filter[], options?: IndexLevelOptions): Promise> { - const matchedIDs: Set = new Set(); + /** + * Creates an AsyncGenerator that returns each sorted index item given a specific sortProperty. + * If a cursor is passed, the starting value (gt or lt) is derived from that. + */ + private async * getIndexIterator( + tenant: string, startKey:string, queryOptions: QueryOptions, options?: IndexLevelOptions + ): AsyncGenerator { + const { sortProperty, sortDirection = SortDirection.Ascending, cursor } = queryOptions; - for (const filter of filters) { - await this.executeSingleFilterQuery(tenant, filter, matchedIDs, options); + const iteratorOptions: LevelWrapperIteratorOptions = { + gt: startKey + }; + + // if we are sorting in descending order we can iterate in reverse. + if (sortDirection === SortDirection.Descending) { + iteratorOptions.reverse = true; + + // if a cursor is provided and we are sorting in descending order, the startKey should be the upper bound. + if (cursor !== undefined) { + iteratorOptions.lt = startKey; + delete iteratorOptions.gt; + } } - return [...matchedIDs]; + const sortPartition = await this.getIndexPartition(tenant, sortProperty); + for await (const [ _, val ] of sortPartition.iterator(iteratorOptions, options)) { + const { indexes, itemId } = JSON.parse(val); + yield { indexes, itemId }; + } } - async delete(tenant: string, dataId: string, options?: IndexLevelOptions): Promise { - const partition = await executeUnlessAborted(this.db.partition(tenant), options?.signal); - const serializedIndexes = await partition.get(`__${dataId}__indexes`, options); - if (!serializedIndexes) { + /** + * Gets the starting point for a LevelDB query given an itemId as a cursor and the indexed property. + * Used as (gt) for ascending queries, or (lt) for descending queries. + */ + private async getStartingKeyForCursor(tenant: string, itemId: string, property: string, filters: Filter[]): Promise { + const indexes = await this.getIndexes(tenant, itemId); + if (indexes === undefined) { + // invalid itemId return; } - const indexes = JSON.parse(serializedIndexes); + const sortValue = indexes[property]; + if (sortValue === undefined) { + // invalid sort property + return; + } - // delete all indexes associated with the data of the given ID - const ops: LevelWrapperBatchOperation[] = [ ]; - for (const propertyName in indexes) { - const propertyValue = indexes[propertyName]; - const key = this.join(propertyName, this.encodeValue(propertyValue), dataId); - ops.push({ type: 'del', key }); + // cursor indexes must match the provided filters in order to be valid. + // ie: if someone passes a valid messageCid for a cursor that's not part of the filter. + if (FilterUtility.matchAnyFilter(indexes, filters)) { + return IndexLevel.keySegmentJoin(IndexLevel.encodeValue(sortValue), itemId); } + } - ops.push({ type: 'del', key: `__${dataId}__indexes` }); + /** + * Queries the provided searchFilters asynchronously, returning results that match the matchFilters. + * + * @param filters the filters passed to the parent query. + * @param searchFilters the modified filters used for the LevelDB query to search for a subset of items to match against. + * + * @throws {DwnErrorCode.IndexLevelInMemoryInvalidSortProperty} if an invalid sort property is provided. + */ + async queryWithInMemoryPaging( + tenant: string, + filters: Filter[], + queryOptions: QueryOptions, + options?: IndexLevelOptions + ): Promise { + const { sortProperty, sortDirection = SortDirection.Ascending, cursor, limit } = queryOptions; + + // we create a matches map so that we can short-circuit matched items within the async single query below. + const matches:Map = new Map(); + + // If the filter is empty, we just give it an empty filter so that we can iterate over all the items later in executeSingleFilterQuery(). + // We could do the iteration here, but it would be duplicating the same logic, so decided to just setup the data structure here. + if (filters.length === 0) { + filters = [{}]; + } - await partition.batch(ops, options); + try { + await Promise.all(filters.map(filter => { + return this.executeSingleFilterQuery(tenant, filter, sortProperty, matches, options ); + })); + } catch (error) { + if ((error as DwnError).code === DwnErrorCode.IndexInvalidSortProperty) { + // return empty results if the sort property is invalid. + return []; + } + } + + const sortedValues = [...matches.values()].sort((a,b) => this.sortItems(a,b, sortProperty, sortDirection)); + + // we find the cursor point and only return the result starting there + the limit. + // if there is no cursor index, we just start in the beginning. + const cursorIndex = cursor ? sortedValues.findIndex(match => match.itemId === cursor) : -1; + if (cursor !== undefined && cursorIndex === -1) { + // if a cursor is provided but we cannot find it, we return an empty result set + return []; + } + + const start = cursorIndex > -1 ? cursorIndex + 1 : 0; + const end = limit !== undefined ? start + limit : undefined; + + return sortedValues.slice(start, end).map(match => match.itemId); } - async clear(): Promise { - return this.db.clear(); + /** + * Execute a filtered query against a single filter and return all results. + */ + private async executeSingleFilterQuery( + tenant: string, + filter: Filter, + sortProperty: string, + matches: Map, + levelOptions?: IndexLevelOptions + ): Promise { + + // Note: We have an array of Promises in order to support OR (anyOf) matches when given a list of accepted values for a property + const filterPromises: Promise[] = []; + + // If the filter is empty, then we just iterate over one of the indexes that contains all the records and return all items. + if (isEmptyObject(filter)) { + const getAllItemsPromise = this.getAllItems(tenant, sortProperty); + filterPromises.push(getAllItemsPromise); + } + + // else the filter is not empty + const searchFilter = FilterSelector.reduceFilter(filter); + for (const propertyName in searchFilter) { + const propertyFilter = searchFilter[propertyName]; + // We will find the union of these many individual queries later. + if (FilterUtility.isEqualFilter(propertyFilter)) { + // propertyFilter is an EqualFilter, meaning it is a non-object primitive type + const exactMatchesPromise = this.filterExactMatches(tenant, propertyName, propertyFilter, levelOptions); + filterPromises.push(exactMatchesPromise); + } else if (FilterUtility.isOneOfFilter(propertyFilter)) { + // `propertyFilter` is a OneOfFilter + // Support OR matches by querying for each values separately, then adding them to the promises array. + for (const propertyValue of new Set(propertyFilter)) { + const exactMatchesPromise = this.filterExactMatches(tenant, propertyName, propertyValue, levelOptions); + filterPromises.push(exactMatchesPromise); + } + } else if (FilterUtility.isRangeFilter(propertyFilter)) { + // `propertyFilter` is a `RangeFilter` + const rangeMatchesPromise = this.filterRangeMatches(tenant, propertyName, propertyFilter, levelOptions); + filterPromises.push(rangeMatchesPromise); + } + } + + // acting as an OR match for the property, any of the promises returning a match will be treated as a property match + for (const promise of filterPromises) { + const indexItems = await promise; + // reminder: the promise returns a list of IndexedItem satisfying a particular property match + for (const indexedItem of indexItems) { + // short circuit: if a data is already included to the final matched key set (by a different `Filter`), + // no need to evaluate if the data satisfies this current filter being evaluated + // otherwise check that the item is a match. + if (matches.has(indexedItem.itemId) || !FilterUtility.matchFilter(indexedItem.indexes, filter)) { + continue; + } + + // ensure that each matched item has the sortProperty, otherwise fail the entire query. + if (indexedItem.indexes[sortProperty] === undefined) { + throw new DwnError(DwnErrorCode.IndexInvalidSortProperty, `invalid sort property ${sortProperty}`); + } + + matches.set(indexedItem.itemId, indexedItem); + } + } + } + + private async getAllItems(tenant: string, sortProperty: string): Promise { + const filterPartition = await this.getIndexPartition(tenant, sortProperty); + const items: IndexedItem[] = []; + for await (const [ _key, value ] of filterPartition.iterator()) { + items.push(JSON.parse(value) as IndexedItem); + } + return items; } /** - * @returns IDs of data that matches the exact property and value. + * Returns items that match the exact property and value. */ - private async findExactMatches(tenant: string, propertyName: string, propertyValue: unknown, options?: IndexLevelOptions): Promise { - const partition = await executeUnlessAborted(this.db.partition(tenant), options?.signal); - const propertyValuePrefix = this.join(propertyName, this.encodeValue(propertyValue), ''); + private async filterExactMatches( + tenant:string, + propertyName: string, + propertyValue: EqualFilter, + options?: IndexLevelOptions + ): Promise { + const matchPrefix = IndexLevel.keySegmentJoin(IndexLevel.encodeValue(propertyValue)); const iteratorOptions: LevelWrapperIteratorOptions = { - gt: propertyValuePrefix + gt: matchPrefix }; - const matches: string[] = []; - for await (const [ key, dataId ] of partition.iterator(iteratorOptions, options)) { - if (!key.startsWith(propertyValuePrefix)) { + const filterPartition = await this.getIndexPartition(tenant, propertyName); + const matches: IndexedItem[] = []; + for await (const [ key, value ] of filterPartition.iterator(iteratorOptions, options)) { + // immediately stop if we arrive at an index that contains a different property value + if (!key.startsWith(matchPrefix)) { break; } - - matches.push(dataId); + matches.push(JSON.parse(value) as IndexedItem); } return matches; } /** - * @returns IDs of data that matches the range filter. + * Returns items that match the range filter. */ - private async findRangeMatches(tenant: string, propertyName: string, rangeFilter: RangeFilter, options?: IndexLevelOptions): Promise { - const partition = await executeUnlessAborted(this.db.partition(tenant), options?.signal); + private async filterRangeMatches( + tenant: string, + propertyName: string, + rangeFilter: RangeFilter, + options?: IndexLevelOptions + ): Promise { const iteratorOptions: LevelWrapperIteratorOptions = {}; - for (const comparator in rangeFilter) { const comparatorName = comparator as keyof RangeFilter; - iteratorOptions[comparatorName] = this.join(propertyName, this.encodeValue(rangeFilter[comparatorName])); + iteratorOptions[comparatorName] = IndexLevel.encodeValue(rangeFilter[comparatorName]!); } // if there is no lower bound specified (`gt` or `gte`), we need to iterate from the upper bound, @@ -216,46 +459,74 @@ export class IndexLevel { iteratorOptions.reverse = true; } - const matches: string[] = []; - for await (const [ key, dataId ] of partition.iterator(iteratorOptions, options)) { + const matches: IndexedItem[] = []; + const filterPartition = await this.getIndexPartition(tenant, propertyName); + + for await (const [ key, value ] of filterPartition.iterator(iteratorOptions, options)) { // if "greater-than" is specified, skip all keys that contains the exact value given in the "greater-than" condition - if ('gt' in rangeFilter && IndexLevel.extractValueFromKey(key) === this.encodeValue(rangeFilter.gt)) { + if ('gt' in rangeFilter && this.extractIndexValueFromKey(key) === IndexLevel.encodeValue(rangeFilter.gt!)) { continue; } - - // immediately stop if we arrive at an index entry for a different property - if (!key.startsWith(propertyName)) { - break; - } - - matches.push(dataId); + matches.push(JSON.parse(value) as IndexedItem); } if ('lte' in rangeFilter) { // When `lte` is used, we must also query the exact match explicitly because the exact match will not be included in the iterator above. - // This is due to the extra data (CID) appended to the (property + value) key prefix, e.g. - // key = 'dateCreated\u0000"2023-05-25T11:22:33.000000Z"\u0000bafyreigs3em7lrclhntzhgvkrf75j2muk6e7ypq3lrw3ffgcpyazyw6pry' - // the value would be considered greater than { lte: `dateCreated\u0000"2023-05-25T11:22:33.000000Z"` } used in the iterator options, + // This is due to the extra data appended to the (property + value) key prefix, e.g. + // the key '"2023-05-25T11:22:33.000000Z"\u0000bayfreigu....' + // would be considered greater than `lte` value in { lte: '"2023-05-25T11:22:33.000000Z"' } iterator options, // thus would not be included in the iterator even though we'd like it to be. - for (const dataId of await this.findExactMatches(tenant, propertyName, rangeFilter.lte, options)) { - matches.push(dataId); + for (const item of await this.filterExactMatches(tenant, propertyName, rangeFilter.lte as EqualFilter, options)) { + matches.push(item); } } return matches; } - private encodeValue(value: unknown): string { - switch (typeof value) { - case 'string': - // We can't just `JSON.stringify` as that'll affect the sort order of strings. - // For example, `'\x00'` becomes `'\\u0000'`. - return `"${value}"`; - case 'number': - return IndexLevel.encodeNumberValue(value); - default: - return String(value); + /** + * Sorts Items lexicographically in ascending or descending order given a specific indexName, using the itemId as a tie breaker. + * We know the indexes include the indexName here because they have already been checked within executeSingleFilterQuery. + */ + private sortItems(itemA: IndexedItem, itemB: IndexedItem, indexName: string, direction: SortDirection): number { + const aValue = IndexLevel.encodeValue(itemA.indexes[indexName]) + itemA.itemId; + const bValue = IndexLevel.encodeValue(itemB.indexes[indexName]) + itemB.itemId; + return direction === SortDirection.Ascending ? + lexicographicalCompare(aValue, bValue) : + lexicographicalCompare(bValue, aValue); + } + + /** + * Gets the indexes given an itemId. This is a reverse lookup to construct starting keys, as well as deleting indexed items. + */ + private async getIndexes(tenant: string, itemId: string): Promise { + const indexesLookupPartition = await this.getIndexesLookupPartition(tenant); + const serializedIndexes = await indexesLookupPartition.get(itemId); + if (serializedIndexes === undefined) { + // invalid itemId + return; } + + return JSON.parse(serializedIndexes) as KeyValues; + } + + /** + * Given a key from an indexed partitioned property key. + * ex: + * key: '"2023-05-25T11:22:33.000000Z"\u0000bayfreigu....' + * returns "2023-05-25T11:22:33.000000Z" + */ + private extractIndexValueFromKey(key: string): string { + const [value] = key.split(IndexLevel.delimiter); + return value; + } + + /** + * Joins the given values using the `\x00` (\u0000) character. + */ + private static delimiter = `\x00`; + private static keySegmentJoin(...values: string[]): string { + return values.join(IndexLevel.delimiter); } /** @@ -278,29 +549,52 @@ export class IndexLevel { } /** - * Extracts the value encoded within the indexed key when a record is inserted. - * - * ex. key: 'dateCreated\u0000"2023-05-25T18:23:29.425008Z"\u0000bafyreigs3em7lrclhntzhgvkrf75j2muk6e7ypq3lrw3ffgcpyazyw6pry' - * extracted value: "2023-05-25T18:23:29.425008Z" + * Encodes an indexed value to a string * - * @param key an IndexLevel db key. - * @returns the extracted encodedValue from the key. + * NOTE: we currently only use this for strings, numbers and booleans. */ - static extractValueFromKey(key: string): string { - const [, value] = key.split(this.delimiter); - return value; + static encodeValue(value: string | number | boolean): string { + switch (typeof value) { + case 'number': + return this.encodeNumberValue(value); + default: + return JSON.stringify(value); + } } - /** - * Joins the given values using the `\x00` (\u0000) character. - */ - private static delimiter = `\x00`; - private join(...values: unknown[]): string { - return values.join(IndexLevel.delimiter); + private static shouldQueryWithInMemoryPaging(filters: Filter[], queryOptions: QueryOptions): boolean { + for (const filter of filters) { + if (!IndexLevel.isFilterConcise(filter, queryOptions)) { + return false; + } + } + + // only use in-memory paging if all filters are concise + return true; } -} -type IndexLevelConfig = { - location: string, - createLevelDatabase?: typeof createLevelDatabase, -}; \ No newline at end of file + + public static isFilterConcise(filter: Filter, queryOptions: QueryOptions): boolean { + // if there is a specific recordId in the filter, return true immediately. + if (filter.recordId !== undefined) { + return true; + } + + // unless a recordId is present, if there is a cursor we never use in memory paging + if (queryOptions.cursor !== undefined) { + return false; + } + // NOTE: remaining conditions will not have cursor + if ( + filter.protocolPath !== undefined || + filter.contextId !== undefined || + filter.parentId !== undefined || + filter.schema !== undefined + ) { + return true; + } + + // all else + return false; + } +} diff --git a/src/store/level-wrapper.ts b/src/store/level-wrapper.ts index eb0187b33..4da51a3eb 100644 --- a/src/store/level-wrapper.ts +++ b/src/store/level-wrapper.ts @@ -205,6 +205,16 @@ export class LevelWrapper { return executeUnlessAborted(this.db.batch(operations), options?.signal); } + /** + * Wraps the given LevelWrapperBatchOperation as an operation for the specified partition. + */ + createPartitionOperation(partitionName: string, operation: LevelWrapperBatchOperation): LevelWrapperBatchOperation { + return { ...operation, sublevel: this.db.sublevel(partitionName, { + keyEncoding : 'utf8', + valueEncoding : this.config.valueEncoding + }) }; + } + private async compactUnderlyingStorage(options?: LevelWrapperOptions): Promise { options?.signal?.throwIfAborted(); diff --git a/src/store/message-store-level.ts b/src/store/message-store-level.ts index aa0f2df2a..220460ab5 100644 --- a/src/store/message-store-level.ts +++ b/src/store/message-store-level.ts @@ -1,12 +1,11 @@ -import type { RecordsWriteMessage } from '../types/records-types.js'; -import type { Filter, GenericMessage, MessageSort, Pagination } from '../types/message-types.js'; +import type { Filter, KeyValues, QueryOptions } from '../types/query-types.js'; +import type { GenericMessage, MessageSort, Pagination } from '../types/message-types.js'; import type { MessageStore, MessageStoreOptions } from '../types/message-store.js'; import * as block from 'multiformats/block'; import * as cbor from '@ipld/dag-cbor'; -import { ArrayUtility } from '../utils/array.js'; import { BlockstoreLevel } from './blockstore-level.js'; import { Cid } from '../utils/cid.js'; import { CID } from 'multiformats/cid'; @@ -15,7 +14,7 @@ import { executeUnlessAborted } from '../utils/abort.js'; import { IndexLevel } from './index-level.js'; import { Message } from '../core/message.js'; import { sha256 } from 'multiformats/hashes/sha2'; -import { SortOrder } from '../types/message-types.js'; +import { SortDirection } from '../types/query-types.js'; /** @@ -26,7 +25,6 @@ export class MessageStoreLevel implements MessageStore { config: MessageStoreLevelConfig; blockstore: BlockstoreLevel; - index: IndexLevel; /** @@ -92,143 +90,61 @@ export class MessageStoreLevel implements MessageStore { ): Promise<{ messages: GenericMessage[], cursor?: string }> { options?.signal?.throwIfAborted(); + // creates the query options including sorting and pagination. + // this adds 1 to the limit if provided, that way we can check to see if there are additional results and provide a return cursor. + const queryOptions = MessageStoreLevel.buildQueryOptions(messageSort, pagination); + const results = await this.index.query(tenant, filters, queryOptions, options); + const messages: GenericMessage[] = []; - // note: injecting tenant into filters to allow querying with an "empty" filter. - // if there are no other filters present it will return all the messages the tenant. - const resultIds = await this.index.query(tenant, filters.map(f => ({ ...f, tenant })), options); - - // as an optimization for large data sets, we are finding the message object which matches the cursor here. - // we can use this within the pagination function after sorting to determine the starting point of the array in a more efficient way. - let paginationMessage: GenericMessage | undefined; - for (const id of resultIds) { - const message = await this.get(tenant, id, options); + for (let i = 0; i < results.length; i++) { + const messageCid = results[i]; + const message = await this.get(tenant, messageCid, options); if (message) { messages.push(message); } - if (pagination?.cursor && pagination.cursor === id) { - paginationMessage = message; - } - } - - if (pagination?.cursor !== undefined && paginationMessage === undefined) { - return { messages: [] }; //if paginationMessage is not found, do not return any results - } - - const sortedRecords = await MessageStoreLevel.sortMessages(messages, messageSort); - return this.paginateMessages(sortedRecords, paginationMessage, pagination); - } - - private async paginateMessages( - messages: GenericMessage[], - paginationMessage?: GenericMessage, - pagination: Pagination = { } - ): Promise<{ messages: GenericMessage[], cursor?: string } > { - const { limit } = pagination; - if (paginationMessage === undefined && limit === undefined) { - return { messages }; // return all without pagination pointer. } - // we are passing the pagination message object for an easier lookup - // since we know this object exists within the array if passed, we can assume that it will always have a value greater than -1 - // TODO: #506 - Improve performance by modifying filters based on the pagination cursor (https://github.com/TBD54566975/dwn-sdk-js/issues/506) - const cursorIndex = paginationMessage ? messages.indexOf(paginationMessage) : undefined; - - // the first element of the returned results is always the message immediately following the cursor. - const start = cursorIndex === undefined ? 0 : cursorIndex + 1; - const end = limit === undefined ? undefined : start + limit; - const results = messages.slice(start, end); - - // we only return a cursor cursor if there are more results - const hasMoreResults = end !== undefined && end < messages.length; - let cursor: string|undefined; + // checks to see if the returned results are greater than the limit, which would indicate additional results. + const hasMoreResults = pagination?.limit !== undefined && pagination.limit < results.length; + let cursor: string | undefined; if (hasMoreResults) { - // we extract the cid of the last message in the result set. - const lastMessage = results.at(-1); + // if there are additional results, we remove the extra result we queried for. + messages.splice(-1); // remove last element + const lastMessage = messages.at(-1); // we choose the last remaining result as a cursor point. cursor = await Message.getCid(lastMessage!); } - return { messages: results, cursor }; + return { messages, cursor }; } /** - * Compares the chosen property of two messages in lexicographical order. - * When the value is the same between the two objects, `messageCid` comparison is used to tiebreak. - * tiebreaker always compares messageA to messageB - * - * @returns if SortOrder is Ascending: - * 1 if the chosen property of `messageA` is larger than of `messageB`; - * -1 if the chosen property `messageA` is smaller/older than of `messageB`; - * 0 otherwise - * if SortOrder is Descending: - * 1 if the chosen property of `messageB` is larger than of `messageA`; - * -1 if the chosen property `messageB` is smaller/older than of `messageA`; - * 0 otherwise + * Builds the IndexLevel QueryOptions object given MessageStore sort and pagination parameters. */ - static async lexicographicalCompare( - messageA: GenericMessage, - messageB: GenericMessage, - comparedPropertyName: string, - sortOrder: SortOrder): Promise - { - const a = (messageA.descriptor as any)[comparedPropertyName]; - const b = (messageB.descriptor as any)[comparedPropertyName]; - - if (sortOrder === SortOrder.Ascending) { - if (a > b) { - return 1; - } else if (a < b) { - return -1; - } - } else { - // descending order - if (b > a) { - return 1; - } else if (b < a) { - return -1; - } - } - - // if we reach here it means the compared properties have the same values, we need to fall back to compare the `messageCid` instead - return await Message.compareCid(messageA, messageB); - } - - /** - * This is a temporary naive sort, it will eventually be done within the underlying data store. - * - * If sorting is based on date published, records that are not published are filtered out. - * @param messages - Messages to be sorted if dateSort is present - * @param sort - Sorting scheme - * @returns Sorted Messages - */ - public static async sortMessages( - messages: GenericMessage[], - messageSort: MessageSort = { } - ): Promise { + static buildQueryOptions(messageSort: MessageSort = {}, pagination: Pagination = {}): QueryOptions { + let { limit, cursor } = pagination; const { dateCreated, datePublished, messageTimestamp } = messageSort; - let sortOrder = SortOrder.Ascending; // default - let messagesToSort = messages; // default - let propertyToCompare: keyof MessageSort | undefined; // `keyof MessageSort` = name of all properties of `MessageSort` + let sortDirection = SortDirection.Ascending; // default + // `keyof MessageSort` = name of all properties of `MessageSort` defaults to messageTimestamp + let sortProperty: keyof MessageSort = 'messageTimestamp'; + // set the sort property if (dateCreated !== undefined) { - propertyToCompare = 'dateCreated'; + sortProperty = 'dateCreated'; } else if (datePublished !== undefined) { - propertyToCompare = 'datePublished'; - messagesToSort = (messages as RecordsWriteMessage[]).filter(message => message.descriptor.published); + sortProperty = 'datePublished'; } else if (messageTimestamp !== undefined) { - propertyToCompare = 'messageTimestamp'; + sortProperty = 'messageTimestamp'; } - if (propertyToCompare !== undefined) { - sortOrder = messageSort[propertyToCompare]!; - } else { - propertyToCompare = 'messageTimestamp'; + if (messageSort[sortProperty] !== undefined) { + sortDirection = messageSort[sortProperty]!; } - const asyncComparer = (a: GenericMessage, b: GenericMessage): Promise => { - return MessageStoreLevel.lexicographicalCompare(a, b, propertyToCompare!, sortOrder); - }; + // we add one more to the limit to determine whether there are additional results and to return a cursor. + if (limit && limit > 0) { + limit = limit + 1; + } - // NOTE: we needed to implement our own asynchronous sort method because Array.sort() does not take an async comparer - return await ArrayUtility.asyncSort(messagesToSort, asyncComparer); + return { sortDirection, sortProperty, limit, cursor }; } async delete(tenant: string, cidString: string, options?: MessageStoreOptions): Promise { @@ -244,7 +160,7 @@ export class MessageStoreLevel implements MessageStore { async put( tenant: string, message: GenericMessage, - indexes: { [key: string]: string | boolean }, + indexes: KeyValues, options?: MessageStoreOptions ): Promise { options?.signal?.throwIfAborted(); @@ -259,16 +175,10 @@ export class MessageStoreLevel implements MessageStore { const messageCidString = messageCid.toString(); - // note: leaving the additional tenant indexing to allow for querying with an "empty" filter. - // when querying, we also inject a filter for the specific tenant. - // if there are no other filters present it will return all the messages for that tenant. - const indexDocument = { - ...indexes, - tenant, - }; - await this.index.put(tenant, messageCidString, indexDocument, options); + await this.index.put(tenant, messageCidString, indexes, options); } + /** * deletes everything in the underlying blockstore and indices. */ diff --git a/src/types/event-log.ts b/src/types/event-log.ts index 35fac2575..081f327c2 100644 --- a/src/types/event-log.ts +++ b/src/types/event-log.ts @@ -1,11 +1,7 @@ -export type Event = { - watermark: string, - messageCid: string -}; - +import type { Filter, KeyValues } from './query-types.js'; export type GetEventsOptions = { - gt: string + cursor: string }; export interface EventLog { @@ -23,21 +19,35 @@ export interface EventLog { * adds an event to a tenant's event log * @param tenant - the tenant's DID * @param messageCid - the CID of the message - * @returns {Promise} watermark + * @param indexes - (key-value pairs) to be included as part of indexing this event. + */ + append(tenant: string, messageCid: string, indexes: KeyValues): Promise + + /** + * Retrieves all of a tenant's events that occurred after the cursor provided. + * If no cursor is provided, all events for a given tenant will be returned. + * + * The cursor is a messageCid. + * + * Returns an array of messageCids that represent the events. */ - append(tenant: string, messageCid: string): Promise + getEvents(tenant: string, options?: GetEventsOptions): Promise /** - * retrieves all of a tenant's events that occurred after the watermark provided. - * If no watermark is provided, all events for a given tenant will be returned. + * retrieves a filtered set of events that occurred after a the cursor provided, accepts multiple filters. + * + * If no cursor is provided, all events for a given tenant and filter combo will be returned. + * The cursor is a messageCid. + * + * Returns an array of messageCids that represent the events. */ - getEvents(tenant: string, options?: GetEventsOptions): Promise> + queryEvents(tenant: string, filters: Filter[], cursor?: string): Promise /** - * deletes any events that have any of the cids provided + * deletes any events that have any of the messageCids provided * @returns {Promise} the number of events deleted */ - deleteEventsByCid(tenant: string, cids: Array): Promise + deleteEventsByCid(tenant: string, messageCids: Array): Promise /** * Clears the entire store. Mainly used for cleaning up in test environment. diff --git a/src/types/event-types.ts b/src/types/event-types.ts index da63e897c..4e3b1b228 100644 --- a/src/types/event-types.ts +++ b/src/types/event-types.ts @@ -1,12 +1,35 @@ -import type { Event } from './event-log.js'; import type { GenericMessageReply } from '../core/message-reply.js'; +import type { ProtocolsQueryFilter } from './protocols-types.js'; import type { AuthorizationModel, GenericMessage } from './message-types.js'; import type { DwnInterfaceName, DwnMethodName } from '../enums/dwn-interface-method.js'; +import type { RangeCriterion, RangeFilter } from './query-types.js'; + +export type EventsMessageFilter = { + interface?: string; + method?: string; + dateUpdated?: RangeCriterion; +}; + +// We only allow filtering for events by immutable properties, the omitted properties could be different per subsequent writes. +export type EventsRecordsFilter = { + recipient?: string; + protocol?: string; + protocolPath?: string; + contextId?: string; + schema?: string; + recordId?: string; + parentId?: string; + dataFormat?: string; + dataSize?: RangeFilter; + dateCreated?: RangeCriterion; +}; + +export type EventsQueryFilter = EventsMessageFilter | EventsRecordsFilter | ProtocolsQueryFilter; export type EventsGetDescriptor = { interface : DwnInterfaceName.Events; method: DwnMethodName.Get; - watermark?: string; + cursor?: string; messageTimestamp: string; }; @@ -16,5 +39,22 @@ export type EventsGetMessage = GenericMessage & { }; export type EventsGetReply = GenericMessageReply & { - events?: Event[]; + events?: string[]; +}; + +export type EventsQueryDescriptor = { + interface: DwnInterfaceName.Events; + method: DwnMethodName.Query; + messageTimestamp: string; + filters: EventsQueryFilter[]; + cursor?: string; +}; + +export type EventsQueryMessage = GenericMessage & { + authorization: AuthorizationModel; + descriptor: EventsQueryDescriptor; +}; + +export type EventsQueryReply = GenericMessageReply & { + events?: string[]; }; \ No newline at end of file diff --git a/src/types/message-store.ts b/src/types/message-store.ts index ec30c903b..22777095f 100644 --- a/src/types/message-store.ts +++ b/src/types/message-store.ts @@ -1,4 +1,5 @@ -import type { Filter, GenericMessage, MessageSort, Pagination } from './message-types.js'; +import type { Filter, KeyValues } from './query-types.js'; +import type { GenericMessage, MessageSort, Pagination } from './message-types.js'; export interface MessageStoreOptions { signal?: AbortSignal; @@ -22,7 +23,7 @@ export interface MessageStore { put( tenant: string, message: GenericMessage, - indexes: { [key: string]: string | boolean }, + indexes: KeyValues, options?: MessageStoreOptions ): Promise; diff --git a/src/types/message-types.ts b/src/types/message-types.ts index 545f09b90..947112c86 100644 --- a/src/types/message-types.ts +++ b/src/types/message-types.ts @@ -1,5 +1,6 @@ import type { DelegatedGrantMessage } from '../types/delegated-grant-message.js'; import type { GeneralJws } from './jws-types.js'; +import type { SortDirection } from './query-types.js'; /** * Intersection type for all concrete message types. @@ -68,31 +69,6 @@ export type QueryResultEntry = { encodedData?: string; }; -export type EqualFilter = string | number | boolean; - -export type OneOfFilter = EqualFilter[]; - -export type RangeValue = string | number; - -/** - * "greater than" or "greater than or equal to" range condition. `gt` and `gte` are mutually exclusive. - */ -export type GT = ({ gt: RangeValue } & { gte?: never }) | ({ gt?: never } & { gte: RangeValue }); - -/** - * "less than" or "less than or equal to" range condition. `lt`, `lte` are mutually exclusive. - */ -export type LT = ({ lt: RangeValue } & { lte?: never }) | ({ lt?: never } & { lte: RangeValue }); - -/** - * Ranger filter. 1 condition is required. - */ -export type RangeFilter = (GT | LT) & Partial & Partial; - -export type Filter = { - [property: string]: EqualFilter | OneOfFilter | RangeFilter -}; - /** * Pagination Options for querying messages. * @@ -103,13 +79,10 @@ export type Pagination = { limit?: number; }; -export enum SortOrder { - Descending = -1, - Ascending = 1 -} + export type MessageSort = { - dateCreated?: SortOrder; - datePublished?: SortOrder; - messageTimestamp?: SortOrder; + dateCreated?: SortDirection; + datePublished?: SortDirection; + messageTimestamp?: SortDirection; }; \ No newline at end of file diff --git a/src/types/protocols-types.ts b/src/types/protocols-types.ts index d3a35b15a..cda472c72 100644 --- a/src/types/protocols-types.ts +++ b/src/types/protocols-types.ts @@ -13,7 +13,7 @@ export type ProtocolsConfigureDescriptor = { export type ProtocolDefinition = { protocol: string; /** - * Denotes if this Protocol Definition can be returned by unauthenticated `ProtocolsQuery`. + * Denotes if this Protocol Definition can be returned by unauthenticated or unauthorized `ProtocolsQuery`. */ published: boolean; types: ProtocolTypes; diff --git a/src/types/query-types.ts b/src/types/query-types.ts new file mode 100644 index 000000000..642327937 --- /dev/null +++ b/src/types/query-types.ts @@ -0,0 +1,52 @@ +export type QueryOptions = { + sortProperty: string; + sortDirection?: SortDirection; + limit?: number; + cursor?: string; +}; + +export enum SortDirection { + Descending = -1, + Ascending = 1 +} + +export type KeyValues = { [key:string]: string | number | boolean }; + +export type EqualFilter = string | number | boolean; + +export type OneOfFilter = EqualFilter[]; + +export type RangeValue = string | number; + +/** + * "greater than" or "greater than or equal to" range condition. `gt` and `gte` are mutually exclusive. + */ +export type GT = ({ gt: RangeValue } & { gte?: never }) | ({ gt?: never } & { gte: RangeValue }); + +/** + * "less than" or "less than or equal to" range condition. `lt`, `lte` are mutually exclusive. + */ +export type LT = ({ lt: RangeValue } & { lte?: never }) | ({ lt?: never } & { lte: RangeValue }); + +/** + * Ranger filter. 1 condition is required. + */ +export type RangeFilter = (GT | LT) & Partial & Partial; + +export type FilterValue = EqualFilter | OneOfFilter | RangeFilter; + +export type Filter = { + [property: string]: FilterValue; +}; + +export type RangeCriterion = { + /** + * Inclusive starting date-time. + */ + from?: string; + + /** + * Inclusive end date-time. + */ + to?: string; +}; \ No newline at end of file diff --git a/src/types/records-types.ts b/src/types/records-types.ts index f3587aef7..19d375ce8 100644 --- a/src/types/records-types.ts +++ b/src/types/records-types.ts @@ -3,10 +3,10 @@ import type { GeneralJws } from './jws-types.js'; import type { GenericMessageReply } from '../core/message-reply.js'; import type { KeyDerivationScheme } from '../utils/hd-key.js'; import type { PublicJwk } from './jose-types.js'; -import type { RangeFilter } from './message-types.js'; import type { Readable } from 'readable-stream'; import type { AuthorizationModel, GenericMessage, GenericSignaturePayload, Pagination } from './message-types.js'; import type { DwnInterfaceName, DwnMethodName } from '../enums/dwn-interface-method.js'; +import type { RangeCriterion, RangeFilter } from './query-types.js'; export enum DateSort { CreatedAscending = 'createdAscending', @@ -122,18 +122,6 @@ export type RecordsFilter = { dateUpdated?: RangeCriterion; }; -export type RangeCriterion = { - /** - * Inclusive starting date-time. - */ - from?: string; - - /** - * Inclusive end date-time. - */ - to?: string; -}; - export type RecordsWriteAttestationPayload = { descriptorCid: string; }; diff --git a/src/utils/filter.ts b/src/utils/filter.ts new file mode 100644 index 000000000..8b39567ff --- /dev/null +++ b/src/utils/filter.ts @@ -0,0 +1,210 @@ +import type { EqualFilter, Filter, FilterValue, KeyValues, OneOfFilter, RangeCriterion, RangeFilter, RangeValue } from '../types/query-types.js'; + +/** + * A Utility class to help match indexes against filters. + */ +export class FilterUtility { + /** + * Matches the given key values against an array of filters, if any of the filters match, returns true. + * + * @returns true if any of the filters match. + */ + static matchAnyFilter(keyValues: KeyValues, orFilters: Filter[]): boolean { + if (orFilters.length === 0) { + return true; + } + + for (const filter of orFilters) { + // if any of the filters match the indexed values, we return true as it's a match + if (this.matchFilter(keyValues, filter)) { + return true; + } + } + + return false; + } + + /** + * Evaluates the given filter against the indexed values. + * + * @param indexedValues the indexed values for an item. + * @param filter + * @returns true if all of the filter properties match. + */ + public static matchFilter(indexedValues: KeyValues, filter: Filter): boolean { + // set of unique query properties. + // if count of missing property matches is 0, it means the data/object fully matches the filter + const missingPropertyMatches: Set = new Set([ ...Object.keys(filter) ]); + + for (const filterProperty in filter) { + const filterValue = filter[filterProperty]; + const indexValue = indexedValues[filterProperty]; + if (indexValue === undefined) { + return false; + } + + if (typeof filterValue === 'object') { + if (Array.isArray(filterValue)) { + // if `filterValue` is an array, it is a OneOfFilter + // Support OR matches by querying for each values separately, + if (!this.matchOneOf(filterValue, indexValue)) { + return false; + } + missingPropertyMatches.delete(filterProperty); + continue; + } else { + // `filterValue` is a `RangeFilter` + // range filters cannot range over booleans + if (!this.matchRange(filterValue, indexValue as RangeValue)) { + return false; + } + missingPropertyMatches.delete(filterProperty); + continue; + } + } else { + // filterValue is an EqualFilter, meaning it is a non-object primitive type + if (indexValue !== filterValue) { + return false; + } + missingPropertyMatches.delete(filterProperty); + continue; + } + } + return missingPropertyMatches.size === 0; + } + + /** + * Evaluates a OneOfFilter given an indexedValue extracted from the index. + * + * @param filter An array of EqualFilters. Treated as an OR. + * @param indexedValue the indexed value being compared. + * @returns true if any of the given filters match the indexedValue + */ + private static matchOneOf(filter: OneOfFilter, indexedValue: string | number | boolean): boolean { + for (const orFilterValue of filter) { + if (indexedValue === orFilterValue) { + return true; + } + } + return false; + } + + /** + * Evaluates if the given indexedValue is within the range given by the RangeFilter. + * + * @returns true if all of the range filter conditions are met. + */ + private static matchRange(rangeFilter: RangeFilter, indexedValue: string | number): boolean { + if (rangeFilter.lt !== undefined && indexedValue >= rangeFilter.lt) { + return false; + } + if (rangeFilter.lte !== undefined && indexedValue > rangeFilter.lte) { + return false; + } + if (rangeFilter.gt !== undefined && indexedValue <= rangeFilter.gt) { + return false; + } + if (rangeFilter.gte !== undefined && indexedValue < rangeFilter.gte) { + return false; + } + return true; + } + + static isEqualFilter(filter: FilterValue): filter is EqualFilter { + if (typeof filter !== 'object') { + return true; + } + return false; + } + + static isRangeFilter(filter: FilterValue): filter is RangeFilter { + if (typeof filter === 'object' && !Array.isArray(filter)) { + return 'gt' in filter || 'lt' in filter || 'lte' in filter || 'gte' in filter; + }; + return false; + } + + static isOneOfFilter(filter: FilterValue): filter is OneOfFilter { + if (typeof filter === 'object' && Array.isArray(filter)) { + return true; + }; + return false; + } + + static convertRangeCriterion(inputFilter: RangeCriterion): RangeFilter | undefined { + let rangeFilter: RangeFilter | undefined; + if (inputFilter.to !== undefined && inputFilter.from !== undefined) { + rangeFilter = { + gte : inputFilter.from, + lt : inputFilter.to, + }; + } else if (inputFilter.to !== undefined) { + rangeFilter = { + lt: inputFilter.to, + }; + } else if (inputFilter.from !== undefined) { + rangeFilter = { + gte: inputFilter.from, + }; + } + return rangeFilter; + } + +} + +export class FilterSelector { + + /** + * Reduce Filter so that it is a filter that can be quickly executed against the DB. + */ + static reduceFilter(filter: Filter): Filter { + // if there is only one or no property, we have no way to reduce it further + const filterProperties = Object.keys(filter); + if (filterProperties.length <= 1) { + return filter; + } + + // else there is are least 2 filter properties, since zero property is not allowed + + const { recordId, attester, parentId, recipient, contextId, author, protocolPath, schema, protocol, ...remainingProperties } = filter; + + if (recordId !== undefined) { + return { recordId }; + } + + if (attester !== undefined) { + return { attester }; + } + + if (parentId !== undefined) { + return { parentId }; + } + + if (recipient !== undefined) { + return { recipient }; + } + + if (contextId !== undefined) { + return { contextId }; + } + + if (protocolPath !== undefined) { + return { protocolPath }; + } + + if (schema !== undefined) { + return { schema }; + } + + if (protocol !== undefined) { + return { protocol }; + } + + // else just return whatever property, we can optimize further later + const remainingPropertyNames = Object.keys(remainingProperties); + const firstRemainingProperty = remainingPropertyNames[0]; + const singlePropertyFilter: Filter = {}; + singlePropertyFilter[firstRemainingProperty] = filter[firstRemainingProperty]; + return singlePropertyFilter; + } +} \ No newline at end of file diff --git a/src/utils/object.ts b/src/utils/object.ts index bb6c1ee4c..ac2139950 100644 --- a/src/utils/object.ts +++ b/src/utils/object.ts @@ -1,15 +1,3 @@ -import flat from 'flat'; - -/** - * Flattens the given object. - * e.g. `{ a: { b: { c: 42 } } }` becomes `{ 'a.b.c': 42 }` - */ -export function flatten(obj: unknown): Record { - const flattened = flat.flatten>(obj); - removeEmptyObjects(flattened); - return flattened; -} - /** * Checks whether the given object has any properties. */ diff --git a/src/utils/records.ts b/src/utils/records.ts index 24a1b1c42..f998f0d55 100644 --- a/src/utils/records.ts +++ b/src/utils/records.ts @@ -1,12 +1,16 @@ import type { DerivedPrivateJwk } from './hd-key.js'; +import type { Filter } from '../types/query-types.js'; +import type { GenericSignaturePayload } from '../types/message-types.js'; import type { Readable } from 'readable-stream'; -import type { Filter, GenericSignaturePayload, RangeFilter } from '../types/message-types.js'; -import type { RangeCriterion, RecordsDeleteMessage, RecordsFilter, RecordsQueryMessage, RecordsReadMessage, RecordsWriteDescriptor, RecordsWriteMessage } from '../types/records-types.js'; +import type { RecordsDeleteMessage, RecordsFilter, RecordsQueryMessage, RecordsReadMessage, RecordsWriteDescriptor, RecordsWriteMessage } from '../types/records-types.js'; +import { DateSort } from '../types/records-types.js'; import { Encoder } from './encoder.js'; import { Encryption } from './encryption.js'; +import { FilterUtility } from './filter.js'; import { KeyDerivationScheme } from './hd-key.js'; import { Message } from '../core/message.js'; +import { removeUndefinedProperties } from './object.js'; import { Secp256k1 } from './secp256k1.js'; import { DwnError, DwnErrorCode } from '../core/dwn-error.js'; import { normalizeProtocolUrl, normalizeSchemaUrl } from './url.js'; @@ -234,11 +238,14 @@ export class Records { schema = normalizeSchemaUrl(filter.schema); } - return { + const filterCopy = { ...filter, protocol, schema, }; + + removeUndefinedProperties(filterCopy); + return filterCopy; } /** @@ -247,23 +254,28 @@ export class Records { * @param filter A RecordsFilter * @returns {Filter} a generic Filter able to be used with MessageStore. */ - public static convertFilter(filter: RecordsFilter): Filter { + public static convertFilter(filter: RecordsFilter, dateSort?: DateSort): Filter { const filterCopy = { ...filter } as Filter; const { dateCreated, datePublished, dateUpdated } = filter; - const dateCreatedFilter = dateCreated ? this.convertRangeCriterion(dateCreated) : undefined; + const dateCreatedFilter = dateCreated ? FilterUtility.convertRangeCriterion(dateCreated) : undefined; if (dateCreatedFilter) { filterCopy.dateCreated = dateCreatedFilter; } - const datePublishedFilter = datePublished ? this.convertRangeCriterion(datePublished): undefined; + const datePublishedFilter = datePublished ? FilterUtility.convertRangeCriterion(datePublished): undefined; if (datePublishedFilter) { // only return published records when filtering with a datePublished range. filterCopy.published = true; filterCopy.datePublished = datePublishedFilter; } - const messageTimestampFilter = dateUpdated ? this.convertRangeCriterion(dateUpdated) : undefined; + // if we sort by `PublishedAscending` or `PublishedDescending` we must filter for only published records. + if (filterCopy.published !== true && (dateSort === DateSort.PublishedAscending || dateSort === DateSort.PublishedDescending)) { + filterCopy.published = true; + } + + const messageTimestampFilter = dateUpdated ? FilterUtility.convertRangeCriterion(dateUpdated) : undefined; if (messageTimestampFilter) { filterCopy.messageTimestamp = messageTimestampFilter; delete filterCopy.dateUpdated; @@ -271,25 +283,6 @@ export class Records { return filterCopy as Filter; } - private static convertRangeCriterion(inputFilter: RangeCriterion): RangeFilter | undefined { - let rangeFilter: RangeFilter | undefined; - if (inputFilter.to !== undefined && inputFilter.from !== undefined) { - rangeFilter = { - gte : inputFilter.from, - lt : inputFilter.to, - }; - } else if (inputFilter.to !== undefined) { - rangeFilter = { - lt: inputFilter.to, - }; - } else if (inputFilter.from !== undefined) { - rangeFilter = { - gte: inputFilter.from, - }; - } - return rangeFilter; - } - /** * Validates the referential integrity regarding delegated grant. * @param signaturePayload Decoded payload of the signature of the message. `undefined` if message is not signed. diff --git a/tests/event-log/event-log-level.spec.ts b/tests/event-log/event-log-level.spec.ts index 1a820d48f..8051f5025 100644 --- a/tests/event-log/event-log-level.spec.ts +++ b/tests/event-log/event-log-level.spec.ts @@ -1,9 +1,10 @@ -import type { Event } from '../../src/types/event-log.js'; - -import chaiAsPromised from 'chai-as-promised'; +import { ArrayUtility } from '../../src/utils/array.js'; import { EventLogLevel } from '../../src/event-log/event-log-level.js'; import { Message } from '../../src/core/message.js'; import { TestDataGenerator } from '../utils/test-data-generator.js'; + +import chaiAsPromised from 'chai-as-promised'; +import sinon from 'sinon'; import chai, { expect } from 'chai'; chai.use(chaiAsPromised); @@ -24,142 +25,20 @@ describe('EventLogLevel Tests', () => { await eventLog.close(); }); - it('separates events by tenant', async () => { - const { author, message } = await TestDataGenerator.generateRecordsWrite(); - const messageCid = await Message.getCid(message); - const watermark = await eventLog.append(author.did, messageCid); - - const { author: author2, message: message2 } = await TestDataGenerator.generateRecordsWrite(); - const messageCid2 = await Message.getCid(message2); - const watermark2 = await eventLog.append(author2.did, messageCid2); - - let events = await eventLog.getEvents(author.did); - expect(events.length).to.equal(1); - expect(events[0].watermark).to.equal(watermark); - expect(events[0].messageCid).to.equal(messageCid); - - events = await eventLog.getEvents(author2.did); - expect(events.length).to.equal(1); - expect(events[0].watermark).to.equal(watermark2); - expect(events[0].messageCid).to.equal(messageCid2); - }); - - it('returns events in the order that they were appended', async () => { - const expectedEvents: Array = []; - - const { author, message } = await TestDataGenerator.generateRecordsWrite(); - const messageCid = await Message.getCid(message); - const watermark = await eventLog.append(author.did, messageCid); - - expectedEvents.push({ watermark, messageCid }); - - for (let i = 0; i < 9; i += 1) { - const { message } = await TestDataGenerator.generateRecordsWrite({ author }); - const messageCid = await Message.getCid(message); - const watermark = await eventLog.append(author.did, messageCid); - - expectedEvents.push({ watermark, messageCid }); - } - - const events = await eventLog.getEvents(author.did); - expect(events.length).to.equal(expectedEvents.length); - - for (let i = 0; i < 10; i += 1) { - expect(events[i].watermark).to.equal(expectedEvents[i].watermark); - expect(events[i].messageCid).to.equal(expectedEvents[i].messageCid); - } - }); - - describe('getEventsAfter', () => { - it('gets all events for a tenant if watermark is not provided', async () => { - const expectedEvents: Event[] = []; - - const { author, message } = await TestDataGenerator.generateRecordsWrite(); - const messageCid = await Message.getCid(message); - - const watermark = await eventLog.append(author.did, messageCid); - expectedEvents.push({ messageCid, watermark }); - - for (let i = 0; i < 9; i += 1) { - const { message } = await TestDataGenerator.generateRecordsWrite({ author }); - const messageCid = await Message.getCid(message); - - const watermark = await eventLog.append(author.did, messageCid); - expectedEvents.push({ messageCid, watermark }); - } - - const events = await eventLog.getEvents(author.did); - expect(events.length).to.equal(10); - - for (let i = 0; i < events.length; i += 1) { - expect(events[i].messageCid).to.equal(expectedEvents[i].messageCid); - expect(events[i].watermark).to.equal(expectedEvents[i].watermark); - } - }); - - it('gets all events that occured after the watermark provided', async () => { - const { author, message } = await TestDataGenerator.generateRecordsWrite(); - const messageCid = await Message.getCid(message); - - await eventLog.append(author.did, messageCid); - - const messageCids: string[] = []; - let testWatermark = ''; - - for (let i = 0; i < 9; i += 1) { - const { message } = await TestDataGenerator.generateRecordsWrite({ author }); - const messageCid = await Message.getCid(message); - - const watermark = await eventLog.append(author.did, messageCid); - - if (i === 4) { - testWatermark = watermark; - } - - if (i > 4) { - messageCids.push(messageCid); - } - } - - const events = await eventLog.getEvents(author.did, { gt: testWatermark }); - expect(events.length).to.equal(4); - - for (let i = 0; i < events.length; i += 1) { - expect(events[i].messageCid).to.equal(messageCids[i], `${i}`); - } - }); - }); - describe('deleteEventsByCid', () => { - it('finds and deletes events that whose values match the cids provided', async () => { - const cids: string[] = []; - const { author, message } = await TestDataGenerator.generateRecordsWrite(); + it('deletes all index related data', async () => { + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite(); const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, index); - await eventLog.append(author.did, messageCid); - - for (let i = 0; i < 9; i += 1) { - const { message } = await TestDataGenerator.generateRecordsWrite({ author }); - const messageCid = await Message.getCid(message); - - await eventLog.append(author.did, messageCid); - if (i % 2 === 0) { - cids.push(messageCid); - } - } - - const numEventsDeleted = await eventLog.deleteEventsByCid(author.did, cids); - expect(numEventsDeleted).to.equal(cids.length); + const indexLevelDeleteSpy = sinon.spy(eventLog.index, 'delete'); - const remainingEvents = await eventLog.getEvents(author.did); - expect(remainingEvents.length).to.equal(10 - cids.length); + await eventLog.deleteEventsByCid(author.did, [ messageCid ]); + expect(indexLevelDeleteSpy.callCount).to.equal(1); - const cidSet = new Set(cids); - for (const event of remainingEvents) { - if (cidSet.has(event.messageCid)) { - expect.fail(`${event.messageCid} should not exist`); - } - } + const keysAfterDelete = await ArrayUtility.fromAsyncGenerator(eventLog.index.db.keys()); + expect(keysAfterDelete.length).to.equal(0); }); }); }); \ No newline at end of file diff --git a/tests/event-log/event-log.spec.ts b/tests/event-log/event-log.spec.ts new file mode 100644 index 000000000..b486f2f5f --- /dev/null +++ b/tests/event-log/event-log.spec.ts @@ -0,0 +1,282 @@ +import type { EventLog } from '../../src/types/event-log.js'; + +import { Message } from '../../src/core/message.js'; +import { normalizeSchemaUrl } from '../../src/utils/url.js'; +import { TestDataGenerator } from '../utils/test-data-generator.js'; +import { TestStores } from '../test-stores.js'; + +import chaiAsPromised from 'chai-as-promised'; +import chai, { expect } from 'chai'; + +chai.use(chaiAsPromised); +export function testEventLog(): void { + describe('EventLog Tests', () => { + let eventLog: EventLog; + + before(async () => { + const stores = TestStores.get(); + eventLog = stores.eventLog; + await eventLog.open(); + }); + + beforeEach(async () => { + await eventLog.clear(); + }); + + after(async () => { + await eventLog.close(); + }); + + it('separates events by tenant', async () => { + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite(); + const message1Index = await recordsWrite.constructRecordsWriteIndexes(true); + const messageCid = await Message.getCid(message); + await eventLog.append(author.did, messageCid, message1Index); + + const { author: author2, message: message2, recordsWrite: recordsWrite2 } = await TestDataGenerator.generateRecordsWrite(); + const message2Index = await recordsWrite2.constructRecordsWriteIndexes(true); + const messageCid2 = await Message.getCid(message2); + await eventLog.append(author2.did, messageCid2, message2Index); + + let events = await eventLog.getEvents(author.did); + expect(events.length).to.equal(1); + expect(events[0]).to.equal(messageCid); + + events = await eventLog.getEvents(author2.did); + expect(events.length).to.equal(1); + expect(events[0]).to.equal(messageCid2); + }); + + it('returns events in the order that they were appended', async () => { + const expectedMessages: Array = []; + + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite(); + const messageCid = await Message.getCid(message); + const messageIndex = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, messageIndex); + + expectedMessages.push(messageCid); + + for (let i = 0; i < 9; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author }); + const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, index); + + expectedMessages.push(messageCid); + } + + const events = await eventLog.getEvents(author.did); + expect(events.length).to.equal(expectedMessages.length); + + for (let i = 0; i < 10; i += 1) { + expect(events[i]).to.equal(expectedMessages[i]); + } + }); + + describe('getEventsAfter', () => { + it('gets all events for a tenant if a cursor is not provided', async () => { + const expectedMessages: string[] = []; + + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite(); + const messageCid = await Message.getCid(message); + const messageIndex = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, messageIndex); + expectedMessages.push(messageCid); + + for (let i = 0; i < 9; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author }); + const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + + await eventLog.append(author.did, messageCid, index); + expectedMessages.push(messageCid); + } + + const events = await eventLog.getEvents(author.did); + expect(events.length).to.equal(10); + + for (let i = 0; i < events.length; i += 1) { + expect(events[i]).to.equal(expectedMessages[i]); + } + }); + + it('gets all events that occurred after the cursor provided', async () => { + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite(); + const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + + await eventLog.append(author.did, messageCid, index); + + const expectedMessages: string[] = []; + let cursor = ''; + + for (let i = 0; i < 9; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author }); + const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + + await eventLog.append(author.did, messageCid, index); + if (i === 4) { + cursor = messageCid; + } + if (i > 4) { + expectedMessages.push(messageCid); + } + } + + const events = await eventLog.getEvents(author.did, { cursor: cursor }); + expect(events.length).to.equal(4); + + for (let i = 0; i < events.length; i += 1) { + expect(events[i]).to.equal(expectedMessages[i], `${i}`); + } + }); + }); + + describe('deleteEventsByCid', () => { + it('finds and deletes events that whose values match the cids provided', async () => { + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite(); + const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + + await eventLog.append(author.did, messageCid, index); + + const deleteMessages: string[] = []; + for (let i = 0; i < 9; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author }); + const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + + await eventLog.append(author.did, messageCid, index); + if (i % 2 === 0) { + deleteMessages.push(messageCid); + } + } + + await eventLog.deleteEventsByCid(author.did, deleteMessages); + const remainingEvents = await eventLog.getEvents(author.did); + expect(remainingEvents.length).to.equal(10 - deleteMessages.length); + expect(remainingEvents).to.not.include.members(deleteMessages); + }); + + it('skips if cid is invalid', async () => { + const cids: string[] = []; + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite(); + const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + + await eventLog.append(author.did, messageCid, index); + cids.push(messageCid); + + for (let i = 0; i < 3; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author }); + const messageCid = await Message.getCid(message); + const index = await recordsWrite.constructRecordsWriteIndexes(true); + + await eventLog.append(author.did, messageCid, index); + cids.push(messageCid); + } + + // does not error and deletes all messages + await eventLog.deleteEventsByCid(author.did, [...cids, 'someInvalidCid' ]); + + const remainingEvents = await eventLog.getEvents(author.did); + expect(remainingEvents.length).to.equal(0); + }); + }); + + describe('query', () => { + it('returns filtered events in the order that they were appended', async () => { + const expectedMessages: Array = []; + + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ schema: 'schema1' }); + const messageCid = await Message.getCid(message); + const indexes = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, indexes); + + expectedMessages.push(messageCid); + + for (let i = 0; i < 5; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author, schema: 'schema1' }); + const messageCid = await Message.getCid(message); + const indexes = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, indexes); + + expectedMessages.push(messageCid); + } + + // insert a record that will not show up in the filtered query. + // not inserted into expected events. + const { message: message2, recordsWrite: recordsWrite2 } = await TestDataGenerator.generateRecordsWrite({ author }); + const message2Cid = await Message.getCid(message2); + const message2Indexes = await recordsWrite2.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, message2Cid, message2Indexes); + + for (let i = 0; i < 5; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author, schema: 'schema1' }); + const messageCid = await Message.getCid(message); + const indexes = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, indexes); + + expectedMessages.push(messageCid); + } + + const events = await eventLog.queryEvents(author.did, [{ schema: normalizeSchemaUrl('schema1') }]); + expect(events.length).to.equal(expectedMessages.length); + + for (let i = 0; i < expectedMessages.length; i += 1) { + expect(events[i]).to.equal(expectedMessages[i]); + } + }); + + it('returns filtered events after cursor', async () => { + const expectedEvents: Array = []; + let testCursor; + + const { author, message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ schema: 'schema1' }); + const messageCid = await Message.getCid(message); + const indexes = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, indexes); + + for (let i = 0; i < 5; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author, schema: 'schema1' }); + const messageCid = await Message.getCid(message); + const indexes = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, indexes); + + if (i === 3) { + testCursor = messageCid; + } + + if (i > 3) { + expectedEvents.push(messageCid); + } + } + + // insert a record that will not show up in the filtered query. + // not inserted into expected events because it's not a part of the schema. + const { message: message2, recordsWrite: recordsWrite2 } = await TestDataGenerator.generateRecordsWrite({ author }); + const message2Cid = await Message.getCid(message2); + const message2Indexes = await recordsWrite2.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, message2Cid, message2Indexes); + + for (let i = 0; i < 5; i += 1) { + const { message, recordsWrite } = await TestDataGenerator.generateRecordsWrite({ author, schema: 'schema1' }); + const messageCid = await Message.getCid(message); + const indexes = await recordsWrite.constructRecordsWriteIndexes(true); + await eventLog.append(author.did, messageCid, indexes); + + expectedEvents.push(messageCid); + } + + const events = await eventLog.queryEvents(author.did, [{ schema: normalizeSchemaUrl('schema1') }], testCursor); + expect(events.length).to.equal(expectedEvents.length); + + for (let i = 0; i < expectedEvents.length; i += 1) { + expect(events[i]).to.equal(expectedEvents[i]); + } + }); + }); + }); +} \ No newline at end of file diff --git a/tests/handlers/events-get.spec.ts b/tests/handlers/events-get.spec.ts index 68ef3b149..673e539f3 100644 --- a/tests/handlers/events-get.spec.ts +++ b/tests/handlers/events-get.spec.ts @@ -5,6 +5,7 @@ import type { MessageStore } from '../../src/index.js'; +import { EventsGetHandler } from '../../src/handlers/events-get.js'; import { expect } from 'chai'; import { TestDataGenerator } from '../utils/test-data-generator.js'; import { @@ -53,9 +54,11 @@ export function testEventsGetHandler(): void { const bob = await DidKeyResolver.generate(); const { message } = await TestDataGenerator.generateEventsGet({ author: alice }); - const reply = await dwn.processMessage(bob.did, message); + const eventsGetHandler = new EventsGetHandler(didResolver, eventLog); + const reply = await eventsGetHandler.handle({ tenant: bob.did, message }); expect(reply.status.code).to.equal(401); + expect(reply.events).to.not.exist; }); it('returns a 400 if message is invalid', async () => { @@ -63,13 +66,14 @@ export function testEventsGetHandler(): void { const { message } = await TestDataGenerator.generateEventsGet({ author: alice }); (message['descriptor'] as any)['troll'] = 'hehe'; - - const reply = await dwn.processMessage(alice.did, message); + const eventsGetHandler = new EventsGetHandler(didResolver, eventLog); + const reply = await eventsGetHandler.handle({ tenant: alice.did, message }); expect(reply.status.code).to.equal(400); + expect(reply.events).to.not.exist; }); - it('returns all events for a tenant if watermark is not provided', async () => { + it('returns all events for a tenant if cursor is not provided', async () => { const alice = await DidKeyResolver.generate(); const expectedCids: string[] = []; @@ -91,11 +95,11 @@ export function testEventsGetHandler(): void { expect(reply.events?.length).to.equal(expectedCids.length); for (let i = 0; i < reply.events!.length; i += 1) { - expect(reply.events![i].messageCid).to.equal(expectedCids[i]); + expect(reply.events![i]).to.equal(expectedCids[i]); } }); - it('returns all events after watermark if watermark is provided', async () => { + it('returns all events after cursor if provided', async () => { const alice = await DidKeyResolver.generate(); for (let i = 0; i < 5; i += 1) { @@ -110,7 +114,7 @@ export function testEventsGetHandler(): void { expect(reply.status.code).to.equal(200); - const watermark = reply.events![reply.events!.length - 1].watermark; + const cursor = reply.events![reply.events!.length - 1]; const expectedCids: string[] = []; for (let i = 0; i < 3; i += 1) { @@ -122,7 +126,7 @@ export function testEventsGetHandler(): void { expectedCids.push(messageCid); } - const { message: m } = await TestDataGenerator.generateEventsGet({ author: alice, watermark }); + const { message: m } = await TestDataGenerator.generateEventsGet({ author: alice, cursor }); reply = await dwn.processMessage(alice.did, m); expect(reply.status.code).to.equal(200); @@ -130,7 +134,7 @@ export function testEventsGetHandler(): void { expect(reply.events!.length).to.equal(expectedCids.length); for (let i = 0; i < reply.events!.length; i += 1) { - expect(reply.events![i].messageCid).to.equal(expectedCids[i]); + expect(reply.events![i]).to.equal(expectedCids[i]); } }); }); diff --git a/tests/handlers/events-query.spec.ts b/tests/handlers/events-query.spec.ts new file mode 100644 index 000000000..ddcb017db --- /dev/null +++ b/tests/handlers/events-query.spec.ts @@ -0,0 +1,111 @@ +import type { + DataStore, + EventLog, + MessageStore +} from '../../src/index.js'; + +import { EventsQueryHandler } from '../../src/handlers/events-query.js'; +import { expect } from 'chai'; +import { TestDataGenerator } from '../utils/test-data-generator.js'; +import { TestStores } from '../test-stores.js'; +import { + DidKeyResolver, + DidResolver, + Dwn, +} from '../../src/index.js'; + + +export function testEventsQueryHandler(): void { + describe('EventsQueryHandler.handle()', () => { + let didResolver: DidResolver; + let messageStore: MessageStore; + let dataStore: DataStore; + let eventLog: EventLog; + let dwn: Dwn; + + // important to follow the `before` and `after` pattern to initialize and clean the stores in tests + // so that different test suites can reuse the same backend store for testing + before(async () => { + didResolver = new DidResolver([new DidKeyResolver()]); + + const stores = TestStores.get(); + messageStore = stores.messageStore; + dataStore = stores.dataStore; + eventLog = stores.eventLog; + + dwn = await Dwn.create({ didResolver, messageStore, dataStore, eventLog }); + }); + + beforeEach(async () => { + // clean up before each test rather than after so that a test does not depend on other tests to do the clean up + await messageStore.clear(); + await dataStore.clear(); + await eventLog.clear(); + }); + + after(async () => { + await dwn.close(); + }); + + it('returns a 401 if tenant is not author', async () => { + const alice = await DidKeyResolver.generate(); + const bob = await DidKeyResolver.generate(); + + const { message } = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ schema: 'schema1' }] + }); + const eventsQueryHandler = new EventsQueryHandler(didResolver, eventLog); + const reply = await eventsQueryHandler.handle({ tenant: bob.did, message }); + + expect(reply.status.code).to.equal(401); + expect(reply.events).to.not.exist; + }); + + it('returns a 400 if message is invalid', async () => { + const alice = await DidKeyResolver.generate(); + + const { message } = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ schema: 'schema1' }] + }); + (message['descriptor'] as any)['troll'] = 'hehe'; + + const eventsQueryHandler = new EventsQueryHandler(didResolver, eventLog); + const reply = await eventsQueryHandler.handle({ tenant: alice.did, message }); + + expect(reply.status.code).to.equal(400); + expect(reply.events).to.not.exist; + }); + + it('returns 400 if no filters are provided', async () => { + const alice = await DidKeyResolver.generate(); + + const { message } = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ schema: 'schema1' }], + }); // create with filter to prevent failure on .create() + message.descriptor.filters = []; // remove filters + const eventsQueryHandler = new EventsQueryHandler(didResolver, eventLog); + const reply = await eventsQueryHandler.handle({ tenant: alice.did, message }); + + expect(reply.status.code).to.equal(400); + expect(reply.events).to.not.exist; + }); + + it('returns 400 if an empty filter without properties is provided', async () => { + const alice = await DidKeyResolver.generate(); + + const { message } = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ schema: 'schema1' }], + }); // create with filter to prevent failure on .create() + message.descriptor.filters = [{}]; // empty out filter properties + const eventsQueryHandler = new EventsQueryHandler(didResolver, eventLog); + const reply = await eventsQueryHandler.handle({ tenant: alice.did, message }); + + expect(reply.status.code).to.equal(400); + expect(reply.events).to.not.exist; + }); + }); +} diff --git a/tests/handlers/permissions-grant.spec.ts b/tests/handlers/permissions-grant.spec.ts index 93292a712..d312157e8 100644 --- a/tests/handlers/permissions-grant.spec.ts +++ b/tests/handlers/permissions-grant.spec.ts @@ -11,13 +11,14 @@ import { DidResolver } from '../../src/did/did-resolver.js'; import { Dwn } from '../../src/dwn.js'; import { DwnErrorCode } from '../../src/core/dwn-error.js'; import { expect } from 'chai'; +import { Jws } from '../../src/index.js'; import { Message } from '../../src/core/message.js'; import { PermissionsGrant } from '../../src/interfaces/permissions-grant.js'; import { PermissionsGrantHandler } from '../../src/handlers/permissions-grant.js'; import { TestDataGenerator } from '../utils/test-data-generator.js'; import { TestStores } from '../test-stores.js'; import { Time } from '../../src/utils/time.js'; -import { DwnInterfaceName, DwnMethodName, Jws } from '../../src/index.js'; +import { DwnInterfaceName, DwnMethodName } from '../../src/enums/dwn-interface-method.js'; export function testPermissionsGrantHandler(): void { describe('PermissionsGrantHandler.handle()', () => { @@ -262,7 +263,7 @@ export function testPermissionsGrantHandler(): void { expect(events.length).to.equal(1); const messageCid = await Message.getCid(message); - expect(events[0].messageCid).to.equal(messageCid); + expect(events[0]).to.equal(messageCid); }); it('should not add a new event if we have already stored this PermissionsRequest', async () => { @@ -282,7 +283,7 @@ export function testPermissionsGrantHandler(): void { expect(events.length).to.equal(1); const messageCid = await Message.getCid(message); - expect(events[0].messageCid).to.equal(messageCid); + expect(events[0]).to.equal(messageCid); }); }); }); diff --git a/tests/handlers/permissions-request.spec.ts b/tests/handlers/permissions-request.spec.ts index 78b1a3790..1ff897841 100644 --- a/tests/handlers/permissions-request.spec.ts +++ b/tests/handlers/permissions-request.spec.ts @@ -121,7 +121,7 @@ export function testPermissionsRequestHandler(): void { expect(events.length).to.equal(1); const messageCid = await Message.getCid(message); - expect(events[0].messageCid).to.equal(messageCid); + expect(events[0]).to.equal(messageCid); }); it('should not add a new event if we have already stored this PermissionsRequest', async () => { @@ -144,7 +144,7 @@ export function testPermissionsRequestHandler(): void { expect(events.length).to.equal(1); const messageCid = await Message.getCid(message); - expect(events[0].messageCid).to.equal(messageCid); + expect(events[0]).to.equal(messageCid); }); }); }); diff --git a/tests/handlers/permissions-revoke.spec.ts b/tests/handlers/permissions-revoke.spec.ts index 5a38590bf..8b45a3965 100644 --- a/tests/handlers/permissions-revoke.spec.ts +++ b/tests/handlers/permissions-revoke.spec.ts @@ -298,7 +298,7 @@ describe('PermissionsRevokeHandler.handle()', () => { // The revoke should be the second event const messageCid = await Message.getCid(permissionsRevoke.message); - expect(events[1].messageCid).to.equal(messageCid); + expect(events[1]).to.equal(messageCid); }); it('should remove events for existing PermissionsRevoke messages with timestamp after the incoming message', async () => { @@ -337,7 +337,7 @@ describe('PermissionsRevokeHandler.handle()', () => { const permissionsRevokeCid2 = await Message.getCid(permissionsRevoke2.message); events = await eventLog.getEvents(alice.did); expect(events.length).to.equal(2); - expect(events[1].messageCid).to.equal(permissionsRevokeCid2); + expect(events[1]).to.equal(permissionsRevokeCid2); // Process the pre-created Revoke const permissionsRevokeReply1 = await dwn.processMessage(alice.did, permissionsRevoke1.message); @@ -347,7 +347,7 @@ describe('PermissionsRevokeHandler.handle()', () => { const permissionsRevokeCid1 = await Message.getCid(permissionsRevoke1.message); events = await eventLog.getEvents(alice.did); expect(events.length).to.equal(2); - expect(events[1].messageCid).to.equal(permissionsRevokeCid1); + expect(events[1]).to.equal(permissionsRevokeCid1); }); }); }); diff --git a/tests/handlers/protocols-configure.spec.ts b/tests/handlers/protocols-configure.spec.ts index 707a9da0a..d55bd9be0 100644 --- a/tests/handlers/protocols-configure.spec.ts +++ b/tests/handlers/protocols-configure.spec.ts @@ -293,7 +293,7 @@ export function testProtocolsConfigureHandler(): void { expect(events.length).to.equal(1); const messageCid = await Message.getCid(message); - expect(events[0].messageCid).to.equal(messageCid); + expect(events[0]).to.equal(messageCid); }); it('should delete older ProtocolsConfigure events when one is overwritten', async () => { @@ -312,7 +312,7 @@ export function testProtocolsConfigureHandler(): void { expect(events.length).to.equal(1); const newestMessageCid = await Message.getCid(newestWrite.message); - expect(events[0].messageCid).to.equal(newestMessageCid); + expect(events[0]).to.equal(newestMessageCid); }); }); }); diff --git a/tests/handlers/records-delete.spec.ts b/tests/handlers/records-delete.spec.ts index 5a0fd19a7..863a63d07 100644 --- a/tests/handlers/records-delete.spec.ts +++ b/tests/handlers/records-delete.spec.ts @@ -20,7 +20,9 @@ import threadRoleProtocolDefinition from '../vectors/protocol-definitions/thread import { ArrayUtility } from '../../src/utils/array.js'; import { DidKeyResolver } from '../../src/did/did-key-resolver.js'; import { DwnErrorCode } from '../../src/index.js'; +import { DwnMethodName } from '../../src/enums/dwn-interface-method.js'; import { Message } from '../../src/core/message.js'; +import { normalizeSchemaUrl } from '../../src/utils/url.js'; import { RecordsDeleteHandler } from '../../src/handlers/records-delete.js'; import { stubInterface } from 'ts-sinon'; import { TestDataGenerator } from '../utils/test-data-generator.js'; @@ -648,6 +650,36 @@ export function testRecordsDeleteHandler(): void { expect(recordsDeleteReply.status.detail).to.contain(DwnErrorCode.RecordsDeleteAuthorizationFailed); }); + it('should index additional properties from the RecordsWrite being deleted', async () => { + const alice = await DidKeyResolver.generate(); + + // initial write + const initialWriteData = await TestDataGenerator.generateRecordsWrite({ author: alice, schema: 'testSchema' }); + const initialWriteReply = await dwn.processMessage(alice.did, initialWriteData.message, initialWriteData.dataStream); + expect(initialWriteReply.status.code).to.equal(202); + + // generate subsequent write and delete with the delete having an earlier timestamp + // NOTE: creating RecordsDelete first ensures it has an earlier `messageTimestamp` time + const recordsDelete = await RecordsDelete.create({ + recordId : initialWriteData.message.recordId, + signer : Jws.createSigner(alice) + }); + const deleteMessageCid = await Message.getCid(recordsDelete.message); + + const deleteReply = await dwn.processMessage(alice.did, recordsDelete.message); + expect(deleteReply.status.code).to.equal(202); + + // message store + const { messages } = await messageStore.query(alice.did, [{ schema: normalizeSchemaUrl('testSchema'), method: DwnMethodName.Delete }]); + expect(messages.length).to.equal(1); + expect(await Message.getCid(messages[0])).to.equal(deleteMessageCid); + + // event log + const events = await eventLog.queryEvents(alice.did, [{ schema: normalizeSchemaUrl('testSchema'), method: DwnMethodName.Delete }]); + expect(events.length).to.equal(1); + expect(events[0]).to.equal(deleteMessageCid); + }); + describe('event log', () => { it('should include RecordsDelete event and keep initial RecordsWrite event', async () => { const alice = await DidKeyResolver.generate(); @@ -671,7 +703,7 @@ export function testRecordsDeleteHandler(): void { const deleteMessageCid = await Message.getCid(recordsDelete.message); const expectedMessageCids = new Set([writeMessageCid, deleteMessageCid]); - for (const { messageCid } of events) { + for (const messageCid of events) { expectedMessageCids.delete(messageCid); } @@ -707,7 +739,7 @@ export function testRecordsDeleteHandler(): void { const deletedMessageCid = await Message.getCid(newWrite.message); - for (const { messageCid } of events) { + for (const messageCid of events) { if (messageCid === deletedMessageCid ) { expect.fail(`${messageCid} should not exist`); } diff --git a/tests/handlers/records-query.spec.ts b/tests/handlers/records-query.spec.ts index a70204c05..7632da719 100644 --- a/tests/handlers/records-query.spec.ts +++ b/tests/handlers/records-query.spec.ts @@ -14,19 +14,18 @@ import { ArrayUtility } from '../../src/utils/array.js'; import { DateSort } from '../../src/types/records-types.js'; import { DidKeyResolver } from '../../src/did/did-key-resolver.js'; import { DwnConstant } from '../../src/core/dwn-constant.js'; +import { DwnErrorCode } from '../../src/index.js'; import { Encoder } from '../../src/utils/encoder.js'; import { Jws } from '../../src/utils/jws.js'; import { Message } from '../../src/core/message.js'; import { RecordsQuery } from '../../src/interfaces/records-query.js'; import { RecordsQueryHandler } from '../../src/handlers/records-query.js'; import { RecordsWriteHandler } from '../../src/handlers/records-write.js'; -import { SortOrder } from '../../src/types/message-types.js'; import { stubInterface } from 'ts-sinon'; import { TestDataGenerator } from '../utils/test-data-generator.js'; import { TestStores } from '../test-stores.js'; import { TestStubGenerator } from '../utils/test-stub-generator.js'; import { DidResolver, Dwn, RecordsWrite, Time } from '../../src/index.js'; -import { DwnErrorCode, MessageStoreLevel } from '../../src/index.js'; chai.use(chaiAsPromised); @@ -65,6 +64,29 @@ export function testRecordsQueryHandler(): void { await dwn.close(); }); + it('should reject when published is set to false with a dateSort set to sorting by `PublishedAscending` or `PublishedDescending`', async () => { + const alice = await TestDataGenerator.generatePersona(); + TestStubGenerator.stubDidResolver(didResolver, [alice]); + + const query = await TestDataGenerator.generateRecordsQuery({ author: alice, filter: { published: false } }); + + //control + let reply = await dwn.processMessage(alice.did, query.message); + expect(reply.status.code).to.equal(200); + + // modify dateSort to publishedAscending + query.message.descriptor.dateSort = DateSort.PublishedAscending; + reply = await dwn.processMessage(alice.did, query.message); + expect(reply.status.code).to.equal(400); + expect(reply.status.detail).to.include('queries must not filter for `published:false` and sort'); + + // modify dateSort to publishedDescending + query.message.descriptor.dateSort = DateSort.PublishedDescending; + reply = await dwn.processMessage(alice.did, query.message); + expect(reply.status.code).to.equal(400); + expect(reply.status.detail).to.include('queries must not filter for `published:false` and sort'); + }); + it('should return recordId, descriptor, authorization and attestation', async () => { const alice = await TestDataGenerator.generatePersona(); const bob = await TestDataGenerator.generatePersona(); @@ -559,7 +581,7 @@ export function testRecordsQueryHandler(): void { }); it('should be able to range query by `dateCreated`', async () => { - // scenario: 3 records authored by alice, created on first of 2021, 2022, and 2023 respectively, + // scenario: 3 records authored by alice, created on first of 2021, 2022, and 2023 respectively // only the first 2 records share the same schema const firstDayOf2021 = Time.createTimestamp({ year: 2021, month: 1, day: 1 }); const firstDayOf2022 = Time.createTimestamp({ year: 2022, month: 1, day: 1 }); @@ -947,9 +969,8 @@ export function testRecordsQueryHandler(): void { }); it('should be able use range and exact match queries at the same time', async () => { - // scenario: 3 records authored by alice, created on first of 2021, 2022, and 2023 respectively, + // scenario: 3 records authored by alice, created on first of 2021, 2022, and 2023 respectively // only the first 2 records share the same schema - const firstDayOf2021 = Time.createTimestamp({ year: 2021, month: 1, day: 1 }); const firstDayOf2022 = Time.createTimestamp({ year: 2022, month: 1, day: 1 }); const firstDayOf2023 = Time.createTimestamp({ year: 2023, month: 1, day: 1 }); @@ -1054,36 +1075,36 @@ export function testRecordsQueryHandler(): void { expect(publishedWriteReply.status.code).to.equal(202); expect(unpublishedWriteReply.status.code).to.equal(202); - // test published date ascending sort does not include any records that is not published + // test published date ascending sort does not include any records that are not published const publishedAscendingQueryData = await TestDataGenerator.generateRecordsQuery({ author : alice, dateSort : DateSort.PublishedAscending, filter : { schema } }); const publishedAscendingQueryReply = await dwn.processMessage(alice.did, publishedAscendingQueryData.message); - expect(publishedAscendingQueryReply.entries?.length).to.equal(1); - expect(publishedAscendingQueryReply.entries![0].descriptor['datePublished']).to.equal(publishedWriteData.message.descriptor.datePublished); + expect(publishedAscendingQueryReply.entries![0].recordId).to.equal(publishedWriteData.message.recordId); - // test published date scending sort does not include any records that is not published + // test published date scending sort does not include any records that are not published const publishedDescendingQueryData = await TestDataGenerator.generateRecordsQuery({ author : alice, dateSort : DateSort.PublishedDescending, filter : { schema } }); const publishedDescendingQueryReply = await dwn.processMessage(alice.did, publishedDescendingQueryData.message); - expect(publishedDescendingQueryReply.entries?.length).to.equal(1); - expect(publishedDescendingQueryReply.entries![0].descriptor['datePublished']).to.equal(publishedWriteData.message.descriptor.datePublished); + expect(publishedDescendingQueryReply.entries![0].recordId).to.equal(publishedWriteData.message.recordId); }); - it('should sort records if `dateSort` is specified', async () => { - // insert three messages into DB + it('should sort records if `dateSort` is specified with and without a cursor', async () => { + // insert three messages into DB const alice = await TestDataGenerator.generatePersona(); const schema = 'aSchema'; const published = true; const write1Data = await TestDataGenerator.generateRecordsWrite({ author: alice, schema, published }); + await Time.minimalSleep(); const write2Data = await TestDataGenerator.generateRecordsWrite({ author: alice, schema, published }); + await Time.minimalSleep(); const write3Data = await TestDataGenerator.generateRecordsWrite({ author: alice, schema, published }); // setting up a stub method resolver @@ -1105,10 +1126,21 @@ export function testRecordsQueryHandler(): void { filter : { schema } }); const createdAscendingQueryReply = await dwn.processMessage(alice.did, createdAscendingQueryData.message); + expect(createdAscendingQueryReply.entries!.length).to.equal(3); + expect(createdAscendingQueryReply.entries?.[0].recordId).to.equal(write1Data.message.recordId); + expect(createdAscendingQueryReply.entries?.[1].recordId).to.equal(write2Data.message.recordId); + expect(createdAscendingQueryReply.entries?.[2].recordId).to.equal(write3Data.message.recordId); - expect(createdAscendingQueryReply.entries?.[0].descriptor['dateCreated']).to.equal(write1Data.message.descriptor.dateCreated); - expect(createdAscendingQueryReply.entries?.[1].descriptor['dateCreated']).to.equal(write2Data.message.descriptor.dateCreated); - expect(createdAscendingQueryReply.entries?.[2].descriptor['dateCreated']).to.equal(write3Data.message.descriptor.dateCreated); + const createdAscendingWithCursor = await TestDataGenerator.generateRecordsQuery({ + author : alice, + dateSort : DateSort.CreatedAscending, + filter : { schema }, + pagination : { cursor: await Message.getCid(write1Data.message) } + }); + const createdAscendingWithCursorReply = await dwn.processMessage(alice.did, createdAscendingWithCursor.message); + expect(createdAscendingWithCursorReply.entries!.length).to.equal(2); + expect(createdAscendingWithCursorReply.entries![0].recordId).to.equal(write2Data.message.recordId); + expect(createdAscendingWithCursorReply.entries![1].recordId).to.equal(write3Data.message.recordId); // createdDescending test const createdDescendingQueryData = await TestDataGenerator.generateRecordsQuery({ @@ -1117,10 +1149,20 @@ export function testRecordsQueryHandler(): void { filter : { schema } }); const createdDescendingQueryReply = await dwn.processMessage(alice.did, createdDescendingQueryData.message); + expect(createdDescendingQueryReply.entries!.length).to.equal(3); + expect(createdDescendingQueryReply.entries?.[0].recordId).to.equal(write3Data.message.recordId); + expect(createdDescendingQueryReply.entries?.[1].recordId).to.equal(write2Data.message.recordId); + expect(createdDescendingQueryReply.entries?.[2].recordId).to.equal(write1Data.message.recordId); - expect(createdDescendingQueryReply.entries?.[0].descriptor['dateCreated']).to.equal(write3Data.message.descriptor.dateCreated); - expect(createdDescendingQueryReply.entries?.[1].descriptor['dateCreated']).to.equal(write2Data.message.descriptor.dateCreated); - expect(createdDescendingQueryReply.entries?.[2].descriptor['dateCreated']).to.equal(write1Data.message.descriptor.dateCreated); + const createdDescendingWithCursor = await TestDataGenerator.generateRecordsQuery({ + author : alice, + dateSort : DateSort.CreatedDescending, + filter : { schema }, + pagination : { cursor: await Message.getCid(write2Data.message) } + }); + const createdDescendingWithCursorReply = await dwn.processMessage(alice.did, createdDescendingWithCursor.message); + expect(createdDescendingWithCursorReply.entries!.length).to.equal(1); + expect(createdDescendingWithCursorReply.entries![0].recordId).to.equal(write1Data.message.recordId); // publishedAscending test const publishedAscendingQueryData = await TestDataGenerator.generateRecordsQuery({ @@ -1129,10 +1171,21 @@ export function testRecordsQueryHandler(): void { filter : { schema } }); const publishedAscendingQueryReply = await dwn.processMessage(alice.did, publishedAscendingQueryData.message); + expect(publishedAscendingQueryReply.entries!.length).to.equal(3); + expect(publishedAscendingQueryReply.entries?.[0].recordId).to.equal(write1Data.message.recordId); + expect(publishedAscendingQueryReply.entries?.[1].recordId).to.equal(write2Data.message.recordId); + expect(publishedAscendingQueryReply.entries?.[2].recordId).to.equal(write3Data.message.recordId); - expect(publishedAscendingQueryReply.entries?.[0].descriptor['datePublished']).to.equal(write1Data.message.descriptor.datePublished); - expect(publishedAscendingQueryReply.entries?.[1].descriptor['datePublished']).to.equal(write2Data.message.descriptor.datePublished); - expect(publishedAscendingQueryReply.entries?.[2].descriptor['datePublished']).to.equal(write3Data.message.descriptor.datePublished); + const publishedAscendingWithCursor = await TestDataGenerator.generateRecordsQuery({ + author : alice, + dateSort : DateSort.PublishedAscending, + filter : { schema }, + pagination : { cursor: await Message.getCid(write1Data.message) } + }); + const publishedAscendingWithCursorReply = await dwn.processMessage(alice.did, publishedAscendingWithCursor.message); + expect(publishedAscendingWithCursorReply.entries!.length).to.equal(2); + expect(publishedAscendingWithCursorReply.entries![0].recordId).to.equal(write2Data.message.recordId); + expect(publishedAscendingWithCursorReply.entries![1].recordId).to.equal(write3Data.message.recordId); // publishedDescending test const publishedDescendingQueryData = await TestDataGenerator.generateRecordsQuery({ @@ -1141,10 +1194,20 @@ export function testRecordsQueryHandler(): void { filter : { schema } }); const publishedDescendingQueryReply = await dwn.processMessage(alice.did, publishedDescendingQueryData.message); + expect(publishedDescendingQueryReply.entries!.length).to.equal(3); + expect(publishedDescendingQueryReply.entries?.[0].recordId).to.equal(write3Data.message.recordId); + expect(publishedDescendingQueryReply.entries?.[1].recordId).to.equal(write2Data.message.recordId); + expect(publishedDescendingQueryReply.entries?.[2].recordId).to.equal(write1Data.message.recordId); - expect(publishedDescendingQueryReply.entries?.[0].descriptor['datePublished']).to.equal(write3Data.message.descriptor.datePublished); - expect(publishedDescendingQueryReply.entries?.[1].descriptor['datePublished']).to.equal(write2Data.message.descriptor.datePublished); - expect(publishedDescendingQueryReply.entries?.[2].descriptor['datePublished']).to.equal(write1Data.message.descriptor.datePublished); + const publishedDescendingWithCursor = await TestDataGenerator.generateRecordsQuery({ + author : alice, + dateSort : DateSort.CreatedDescending, + filter : { schema }, + pagination : { cursor: await Message.getCid(write2Data.message) } + }); + const publishedDescendingWithCursorReply = await dwn.processMessage(alice.did, publishedDescendingWithCursor.message); + expect(publishedDescendingWithCursorReply.entries!.length).to.equal(1); + expect(publishedDescendingWithCursorReply.entries![0].recordId).to.equal(write1Data.message.recordId); }); it('should tiebreak using `messageCid` when sorting encounters identical values', async () => { @@ -1182,12 +1245,64 @@ export function testRecordsQueryHandler(): void { // verify that messages returned are sorted/tiebreak by `messageCid` expect(queryReply.status.code).to.equal(200); expect(queryReply.entries?.length).to.equal(3); - expect(queryReply.entries![0].recordId).to.equal(oldestWrite.message.recordId); - expect(queryReply.entries![1].recordId).to.equal(middleWrite.message.recordId); - expect(queryReply.entries![2].recordId).to.equal(newestWrite.message.recordId); + expect((queryReply.entries![0]).recordId).to.equal(oldestWrite.message.recordId); + expect((queryReply.entries![1]).recordId).to.equal(middleWrite.message.recordId); + expect((queryReply.entries![2]).recordId).to.equal(newestWrite.message.recordId); + + // sort descending should be reversed + const queryMessageDescending = await TestDataGenerator.generateRecordsQuery({ + author : alice, + filter : { schema }, + dateSort : DateSort.CreatedDescending + }); + const descendingReply = await dwn.processMessage(alice.did, queryMessageDescending.message); + expect((descendingReply.entries![0]).recordId).to.equal(newestWrite.message.recordId); + expect((descendingReply.entries![1]).recordId).to.equal(middleWrite.message.recordId); + expect((descendingReply.entries![2]).recordId).to.equal(oldestWrite.message.recordId); + }); + + it('should paginate all records in ascending order', async () => { + const alice = await DidKeyResolver.generate(); + + const messages = await Promise.all(Array(12).fill({}).map(_ => TestDataGenerator.generateRecordsWrite({ + author : alice, + schema : 'https://schema' + }))); + for (const message of messages) { + const result = await dwn.processMessage(alice.did, message.message, message.dataStream); + expect(result.status.code).to.equal(202); + } + + const limit = 5; + const results: RecordsQueryReplyEntry[] = []; + let cursor; + while (true) { + const pageQuery = await TestDataGenerator.generateRecordsQuery({ + author : alice, + filter : { + schema: 'https://schema' + }, + pagination: { + limit: limit, + cursor, + }, + dateSort: DateSort.CreatedAscending + }); + + const pageReply = await dwn.processMessage(alice.did, pageQuery.message); + expect(pageReply.status.code).to.equal(200); + cursor = pageReply.cursor; + expect(pageReply.entries?.length).to.be.lte(limit); + results.push(...pageReply.entries!); + if (cursor === undefined) { + break; + } + } + expect(results.length).to.equal(messages.length); + expect(messages.every(({ message }) => results.map(e => (e as RecordsWriteMessage).recordId).includes(message.recordId))); }); - it('should paginate records if pagination is provided', async () => { + it('should paginate all records in descending order', async () => { const alice = await DidKeyResolver.generate(); const messages = await Promise.all(Array(12).fill({}).map(_ => TestDataGenerator.generateRecordsWrite({ @@ -1212,6 +1327,7 @@ export function testRecordsQueryHandler(): void { limit: limit, cursor, }, + dateSort: DateSort.CreatedDescending, }); const pageReply = await dwn.processMessage(alice.did, pageQuery.message); @@ -1338,30 +1454,30 @@ export function testRecordsQueryHandler(): void { // directly inserting data to datastore so that we don't have to setup to grant Bob permission to write to Alice's DWN const recordsWriteHandler = new RecordsWriteHandler(didResolver, messageStore, dataStore, eventLog); - const additionalIndexes1 = await record1Data.recordsWrite.constructRecordsWriteIndexes(true); + const recordIndexes1 = await record1Data.recordsWrite.constructRecordsWriteIndexes(true); record1Data.message = await recordsWriteHandler.processEncodedData(record1Data.message, record1Data.dataStream); - await messageStore.put(alice.did, record1Data.message, additionalIndexes1); - await eventLog.append(alice.did, await Message.getCid(record1Data.message)); + await messageStore.put(alice.did, record1Data.message, recordIndexes1); + await eventLog.append(alice.did, await Message.getCid(record1Data.message), recordIndexes1); - const additionalIndexes2 = await record2Data.recordsWrite.constructRecordsWriteIndexes(true); + const recordIndexes2 = await record2Data.recordsWrite.constructRecordsWriteIndexes(true); record2Data.message = await recordsWriteHandler.processEncodedData(record2Data.message, record2Data.dataStream); - await messageStore.put(alice.did, record2Data.message, additionalIndexes2); - await eventLog.append(alice.did, await Message.getCid(record2Data.message)); + await messageStore.put(alice.did, record2Data.message, recordIndexes2); + await eventLog.append(alice.did, await Message.getCid(record2Data.message), recordIndexes2); - const additionalIndexes3 = await record3Data.recordsWrite.constructRecordsWriteIndexes(true); + const recordIndexes3 = await record3Data.recordsWrite.constructRecordsWriteIndexes(true); record3Data.message = await recordsWriteHandler.processEncodedData(record3Data.message, record3Data.dataStream); - await messageStore.put(alice.did, record3Data.message, additionalIndexes3); - await eventLog.append(alice.did, await Message.getCid(record3Data.message)); + await messageStore.put(alice.did, record3Data.message, recordIndexes3); + await eventLog.append(alice.did, await Message.getCid(record3Data.message), recordIndexes3); - const additionalIndexes4 = await record4Data.recordsWrite.constructRecordsWriteIndexes(true); + const recordIndexes4 = await record4Data.recordsWrite.constructRecordsWriteIndexes(true); record4Data.message = await recordsWriteHandler.processEncodedData(record4Data.message, record4Data.dataStream); - await messageStore.put(alice.did, record4Data.message, additionalIndexes4); - await eventLog.append(alice.did, await Message.getCid(record4Data.message)); + await messageStore.put(alice.did, record4Data.message, recordIndexes4); + await eventLog.append(alice.did, await Message.getCid(record4Data.message), recordIndexes4); - const additionalIndexes5 = await record5Data.recordsWrite.constructRecordsWriteIndexes(true); + const recordIndexes5 = await record5Data.recordsWrite.constructRecordsWriteIndexes(true); record5Data.message = await recordsWriteHandler.processEncodedData(record5Data.message, record5Data.dataStream); - await messageStore.put(alice.did, record5Data.message, additionalIndexes5); - await eventLog.append(alice.did, await Message.getCid(record5Data.message)); + await messageStore.put(alice.did, record5Data.message, recordIndexes5); + await eventLog.append(alice.did, await Message.getCid(record5Data.message), recordIndexes5); // test correctness for Bob's query const bobQueryMessageData = await TestDataGenerator.generateRecordsQuery({ @@ -1468,10 +1584,14 @@ export function testRecordsQueryHandler(): void { const indexes = await recordsWrite.constructRecordsWriteIndexes(true); const processedMessage = await recordsWriteHandler.processEncodedData(message, dataStream); await messageStore.put(alice.did, processedMessage, indexes); - await eventLog.append(alice.did, await Message.getCid(processedMessage)); + await eventLog.append(alice.did, await Message.getCid(processedMessage), indexes); messages.push(processedMessage); } + const sortedMessages = await ArrayUtility.asyncSort( + messages as RecordsWriteMessage[], + async (a,b) => Message.compareMessageTimestamp(a,b) + ); // fetch all from alice for sanity, alice should get all of the records // page1 alice @@ -1482,7 +1602,6 @@ export function testRecordsQueryHandler(): void { pagination : { limit: 10 }, }); - const sortedMessages = await MessageStoreLevel.sortMessages(messages, { dateCreated: SortOrder.Ascending }); let results = await dwn.processMessage(alice.did, aliceQueryMessageDataPage1.message) ; expect(results.status.code).to.equal(200); expect(results.entries?.length).to.equal(10, 'alice page 1'); @@ -1534,7 +1653,7 @@ export function testRecordsQueryHandler(): void { expect(results.status.code).to.equal(200); expect(results.entries?.length).to.equal(10, 'bob page 1'); const page1BobPaginationLastMessage = await Message.getCid(bobSorted.at(9)!); - expect(results.cursor).to.equal(page1BobPaginationLastMessage, 'bob page 1'); + expect(results.cursor).to.equal(page1BobPaginationLastMessage, 'bob page last message 1'); bobRetrieved.push(...results.entries!); const bobQueryMessagePage2 = await TestDataGenerator.generateRecordsQuery({ diff --git a/tests/handlers/records-write.spec.ts b/tests/handlers/records-write.spec.ts index 8270b48fd..48b29d0c5 100644 --- a/tests/handlers/records-write.spec.ts +++ b/tests/handlers/records-write.spec.ts @@ -985,7 +985,7 @@ export function testRecordsWriteHandler(): void { expect(events.length).to.equal(1); const messageCid = await Message.getCid(message); - expect(events[0].messageCid).to.equal(messageCid); + expect(events[0]).to.equal(messageCid); }); it('should only keep first write and latest write when subsequent writes happen', async () => { @@ -1018,7 +1018,7 @@ export function testRecordsWriteHandler(): void { const deletedMessageCid = await Message.getCid(newWrite.message); - for (const { messageCid } of events) { + for (const messageCid of events) { if (messageCid === deletedMessageCid ) { expect.fail(`${messageCid} should not exist`); } diff --git a/tests/interfaces/events-get.spec.ts b/tests/interfaces/events-get.spec.ts index 1d15ebfd9..0427b6d8a 100644 --- a/tests/interfaces/events-get.spec.ts +++ b/tests/interfaces/events-get.spec.ts @@ -10,17 +10,17 @@ describe('EventsGet Message', () => { it('creates an EventsGet message', async () => { const alice = await TestDataGenerator.generatePersona(); const eventsGet = await EventsGet.create({ - watermark : 'yolo', - signer : await Jws.createSigner(alice) + cursor : 'yolo', + signer : await Jws.createSigner(alice) }); const { message } = eventsGet; expect(message.descriptor).to.exist; - expect(message.descriptor.watermark).to.equal('yolo'); + expect(message.descriptor.cursor).to.equal('yolo'); expect(message.authorization).to.exist; }); - it('does not require a watermark', async () => { + it('does not require a cursor', async () => { const alice = await TestDataGenerator.generatePersona(); const eventsGet = await EventsGet.create({ signer: await Jws.createSigner(alice) @@ -28,7 +28,7 @@ describe('EventsGet Message', () => { const message = eventsGet.message; expect(message.descriptor).to.exist; - expect(message.descriptor.watermark).to.not.exist; + expect(message.descriptor.cursor).to.not.exist; expect(message.authorization).to.exist; }); }); @@ -37,8 +37,8 @@ describe('EventsGet Message', () => { it('parses a message into an EventsGet instance', async () => { const alice = await TestDataGenerator.generatePersona(); const eventsGet = await EventsGet.create({ - watermark : 'yolo', - signer : await Jws.createSigner(alice) + cursor : 'yolo', + signer : await Jws.createSigner(alice) }); const parsed = await EventsGet.parse(eventsGet.message); @@ -53,8 +53,8 @@ describe('EventsGet Message', () => { it('throws an exception if message is not a valid EventsGet message', async () => { const alice = await TestDataGenerator.generatePersona(); const eventsGet = await EventsGet.create({ - watermark : 'yolo', - signer : await Jws.createSigner(alice) + cursor : 'yolo', + signer : await Jws.createSigner(alice) }); const { message } = eventsGet; diff --git a/tests/interfaces/events-query.spec.ts b/tests/interfaces/events-query.spec.ts new file mode 100644 index 000000000..00039eb4d --- /dev/null +++ b/tests/interfaces/events-query.spec.ts @@ -0,0 +1,162 @@ +import type { EventsQueryMessage } from '../../src/types/event-types.js'; +import type { ProtocolsQueryFilter } from '../../src/types/protocols-types.js'; +import type { RecordsFilter } from '../../src/types/records-types.js'; + +import { EventsQuery } from '../../src/interfaces/events-query.js'; +import { Jws } from '../../src/utils/jws.js'; +import { Message } from '../../src/core/message.js'; +import { TestDataGenerator } from '../utils/test-data-generator.js'; +import { Time } from '../../src/utils/time.js'; + +import chaiAsPromised from 'chai-as-promised'; +import chai, { expect } from 'chai'; + +chai.use(chaiAsPromised); + +describe('EventsQuery Message', () => { + describe('create()', () => { + it('should use `messageTimestamp` as is if given', async () => { + const alice = await TestDataGenerator.generatePersona(); + + const currentTime = Time.getCurrentTimestamp(); + const eventsQuery = await EventsQuery.create({ + filters : [{ schema: 'anything' }], + messageTimestamp : currentTime, + signer : Jws.createSigner(alice), + }); + + expect(eventsQuery.message.descriptor.messageTimestamp).to.equal(currentTime); + }); + + it('should auto-normalize protocol URL', async () => { + const alice = await TestDataGenerator.generatePersona(); + + const options = { + recipient : alice.did, + signer : Jws.createSigner(alice), + filters : [{ protocol: 'example.com/' }], + }; + const eventsQuery = await EventsQuery.create(options); + + const message = eventsQuery.message as EventsQueryMessage; + expect(message.descriptor.filters.length).to.equal(1); + expect((message.descriptor.filters[0] as ProtocolsQueryFilter).protocol).to.eq('http://example.com'); + }); + + it('should auto-normalize schema URL', async () => { + const alice = await TestDataGenerator.generatePersona(); + + const options = { + recipient : alice.did, + signer : Jws.createSigner(alice), + filters : [{ schema: 'example.com/' }], + }; + const eventsQuery = await EventsQuery.create(options); + + const message = eventsQuery.message as EventsQueryMessage; + + expect(message.descriptor.filters.length).to.equal(1); + expect((message.descriptor.filters[0] as RecordsFilter).schema).to.eq('http://example.com'); + }); + + it('throws an exception if message has no filters', async () => { + const alice = await TestDataGenerator.generatePersona(); + const currentTime = Time.getCurrentTimestamp(); + const eventsQueryPromise = EventsQuery.create({ + filters : [], + messageTimestamp : currentTime, + signer : Jws.createSigner(alice), + }); + await expect(eventsQueryPromise).to.eventually.be.rejectedWith('fewer than 1 items'); + }); + + it('removes empty filters', async () => { + const alice = await TestDataGenerator.generatePersona(); + const currentTime = Time.getCurrentTimestamp(); + + // single empty filter fails + const eventsQueryPromise = EventsQuery.create({ + filters : [{}], + messageTimestamp : currentTime, + signer : Jws.createSigner(alice), + }); + await expect(eventsQueryPromise).to.eventually.be.rejectedWith('fewer than 1 items'); + + // empty filter gets removed, valid filter remains + const eventsQuery = await EventsQuery.create({ + filters : [{ schema: 'schema' },{ }], // one empty filter + messageTimestamp : currentTime, + signer : Jws.createSigner(alice), + }); + expect(eventsQuery.message.descriptor.filters.length).to.equal(1); + }); + }); + + describe('parse', () => { + it('parses a message into an EventsQuery instance', async () => { + const alice = await TestDataGenerator.generatePersona(); + + const currentTime = Time.getCurrentTimestamp(); + + const eventsQuery = await EventsQuery.create({ + filters : [{ schema: 'anything' }], + messageTimestamp : currentTime, + signer : Jws.createSigner(alice), + }); + + const parsed = await EventsQuery.parse(eventsQuery.message); + expect(parsed).to.be.instanceof(EventsQuery); + + const expectedMessageCid = await Message.getCid(eventsQuery.message); + const messageCid = await Message.getCid(parsed.message); + + expect(messageCid).to.equal(expectedMessageCid); + }); + + it('throws an exception if message is not a valid EventsQuery message', async () => { + const alice = await TestDataGenerator.generatePersona(); + const currentTime = Time.getCurrentTimestamp(); + const eventsQuery = await EventsQuery.create({ + filters : [{ schema: 'anything' }], + messageTimestamp : currentTime, + signer : Jws.createSigner(alice), + }); + + const { message } = eventsQuery; + (message.descriptor as any)['bad_property'] = 'property'; + const eventsQueryPromise = EventsQuery.parse(message); + await expect(eventsQueryPromise).to.eventually.be.rejectedWith('must NOT have additional properties'); + }); + + it('throws an exception if message has no filters', async () => { + const alice = await TestDataGenerator.generatePersona(); + const currentTime = Time.getCurrentTimestamp(); + const eventsQuery = await EventsQuery.create({ + filters : [{ schema: 'anything' }], + messageTimestamp : currentTime, + signer : Jws.createSigner(alice), + }); + + const { message } = eventsQuery; + message.descriptor.filters = []; //empty out the filters + + const eventsQueryPromise = EventsQuery.parse(message); + await expect(eventsQueryPromise).to.eventually.be.rejectedWith('fewer than 1 items'); + }); + + it('throws an exception if message has an empty filter', async () => { + const alice = await TestDataGenerator.generatePersona(); + const currentTime = Time.getCurrentTimestamp(); + const eventsQuery = await EventsQuery.create({ + filters : [{ schema: 'anything' }], + messageTimestamp : currentTime, + signer : Jws.createSigner(alice), + }); + + const { message } = eventsQuery; + message.descriptor.filters.push({ }); // add an empty filter + const eventsQueryPromise = EventsQuery.parse(message); + await expect(eventsQueryPromise).to.eventually.be.rejectedWith('must NOT have fewer than 1 properties'); + }); + }); +}); diff --git a/tests/interfaces/records-query.spec.ts b/tests/interfaces/records-query.spec.ts index 43bdfaf3a..f6ef3b9d8 100644 --- a/tests/interfaces/records-query.spec.ts +++ b/tests/interfaces/records-query.spec.ts @@ -2,10 +2,10 @@ import chaiAsPromised from 'chai-as-promised'; import chai, { expect } from 'chai'; import dexProtocolDefinition from '../vectors/protocol-definitions/dex.json' assert { type: 'json' }; -import { Jws } from '../../src/index.js'; import { RecordsQuery } from '../../src/interfaces/records-query.js'; import { TestDataGenerator } from '../utils/test-data-generator.js'; import { Time } from '../../src/utils/time.js'; +import { DateSort, DwnErrorCode, Jws } from '../../src/index.js'; chai.use(chaiAsPromised); @@ -26,6 +26,22 @@ describe('RecordsQuery', () => { await expect(recordQueryRejected).to.eventually.be.rejectedWith('descriptor/filter/published: must be equal to one of the allowed values'); }); + it('should not allow published to be set to false with a dateSort set to sorting by `PublishedAscending` or `PublishedDescending`', async () => { + // test control + const recordQueryControl = TestDataGenerator.generateRecordsQuery({ + filter : { published: true }, + dateSort : DateSort.PublishedAscending, + }); + + await expect(recordQueryControl).to.eventually.not.be.rejected; + + const recordQueryRejected = TestDataGenerator.generateRecordsQuery({ + filter : { published: false }, + dateSort : DateSort.PublishedAscending, + }); + await expect(recordQueryRejected).to.eventually.be.rejectedWith(DwnErrorCode.RecordsQueryCreateFilterPublishedSortInvalid); + }); + it('should use `messageTimestamp` as is if given', async () => { const alice = await TestDataGenerator.generatePersona(); diff --git a/tests/scenarios/events-query.spec.ts b/tests/scenarios/events-query.spec.ts new file mode 100644 index 000000000..a4689dde0 --- /dev/null +++ b/tests/scenarios/events-query.spec.ts @@ -0,0 +1,1126 @@ +import type { + DataStore, + EventLog, + MessageStore +} from '../../src/index.js'; + +import freeForAll from '../vectors/protocol-definitions/free-for-all.json' assert { type: 'json' }; +import threadProtocol from '../vectors/protocol-definitions/thread-role.json' assert { type: 'json' }; + +import { TestStores } from '../test-stores.js'; +import { DidKeyResolver, DidResolver, Dwn, DwnConstant, DwnInterfaceName, DwnMethodName, Message, Time } from '../../src/index.js'; + +import { expect } from 'chai'; +import { TestDataGenerator } from '../utils/test-data-generator.js'; + +export function testEventsQueryScenarios(): void { + describe('events query tests', () => { + let didResolver: DidResolver; + let messageStore: MessageStore; + let dataStore: DataStore; + let eventLog: EventLog; + let dwn: Dwn; + + // important to follow the `before` and `after` pattern to initialize and clean the stores in tests + // so that different test suites can reuse the same backend store for testing + before(async () => { + didResolver = new DidResolver([new DidKeyResolver()]); + + const stores = TestStores.get(); + messageStore = stores.messageStore; + dataStore = stores.dataStore; + eventLog = stores.eventLog; + + dwn = await Dwn.create({ didResolver, messageStore, dataStore, eventLog }); + }); + + beforeEach(async () => { + // clean up before each test rather than after so that a test does not depend on other tests to do the clean up + await messageStore.clear(); + await dataStore.clear(); + await eventLog.clear(); + }); + + after(async () => { + await dwn.close(); + }); + + it('supports multiple filter types', async () => { + const alice = await DidKeyResolver.generate(); + const record = await TestDataGenerator.generateRecordsWrite({ author: alice }); + const grant = await TestDataGenerator.generatePermissionsGrant({ author: alice }); + const protocol = await TestDataGenerator.generateProtocolsConfigure({ author: alice }); + + // insert data + const recordReply = await dwn.processMessage(alice.did, record.message, record.dataStream); + const grantReply = await dwn.processMessage(alice.did, grant.message); + const protocolReply = await dwn.processMessage(alice.did, protocol.message); + expect(recordReply.status.code).to.equal(202, 'RecordsWrite'); + expect(grantReply.status.code).to.equal(202, 'PermissionsGrant'); + expect(protocolReply.status.code).to.equal(202, 'ProtocolConfigure'); + + const eventsQueryRecords = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [ + { interface: DwnInterfaceName.Permissions }, // PermissionsGrant + { recordId: record.message.recordId }, // RecordsWrite + { protocol: protocol.message.descriptor.definition.protocol } // ProtocolConfigure + ], + }); + const recordEventsReply = await dwn.processMessage(alice.did, eventsQueryRecords.message); + expect(recordEventsReply.status.code).to.equal(200); + expect(recordEventsReply.events?.length).to.equal(3); + expect(recordEventsReply.events).to.have.members([ + await Message.getCid(record.message), + await Message.getCid(grant.message), + await Message.getCid(protocol.message), + ]); + }); + + it('filters by interface type', async () => { + // scenario: + // alice creates 3 different types of messages (RecordsWrite, PermissionsGrant, ProtocolsConfigure) + // alice queries for messages from each interface respectively (Records, Permissions, Protocols) + // alice creates 2 additional messages (RecordsDelete, ProtocolsRevoke) + // alice queries for messages for each interface respectively providing a cursor. + + const alice = await DidKeyResolver.generate(); + const record = await TestDataGenerator.generateRecordsWrite({ author: alice }); + const grant = await TestDataGenerator.generatePermissionsGrant({ author: alice }); + const protocol = await TestDataGenerator.generateProtocolsConfigure({ author: alice }); + + // insert data + const recordReply = await dwn.processMessage(alice.did, record.message, record.dataStream); + const grantReply = await dwn.processMessage(alice.did, grant.message); + const protocolReply = await dwn.processMessage(alice.did, protocol.message); + expect(recordReply.status.code).to.equal(202, 'RecordsWrite'); + expect(grantReply.status.code).to.equal(202, 'PermissionsGrant'); + expect(protocolReply.status.code).to.equal(202, 'ProtocolConfigure'); + + let eventsQueryRecords = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ interface: DwnInterfaceName.Records }], + }); + const recordEventsReply = await dwn.processMessage(alice.did, eventsQueryRecords.message); + expect(recordEventsReply.status.code).to.equal(200); + expect(recordEventsReply.events?.length).to.equal(1); + expect(recordEventsReply.events![0]).to.equal(await Message.getCid(record.message!)); + + let eventsQueryGrants = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ interface: DwnInterfaceName.Permissions }], + }); + const grantEventsReply = await dwn.processMessage(alice.did, eventsQueryGrants.message); + expect(grantEventsReply.status.code).to.equal(200); + expect(grantEventsReply.events?.length).to.equal(1); + expect(grantEventsReply.events![0]).to.equal(await Message.getCid(grant.message!)); + + let eventsQueryProtocols = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ interface: DwnInterfaceName.Protocols }], + }); + const protocolEventsReply = await dwn.processMessage(alice.did, eventsQueryProtocols.message); + expect(protocolEventsReply.status.code).to.equal(200); + expect(protocolEventsReply.events?.length).to.equal(1); + expect(protocolEventsReply.events![0]).to.equal(await Message.getCid(protocol.message!)); + + + // insert additional data to query beyond a cursor + const recordDelete = await TestDataGenerator.generateRecordsDelete({ author: alice, recordId: record.message.recordId }); + const revokeGrant = await TestDataGenerator.generatePermissionsRevoke({ + author: alice, permissionsGrantId: await Message.getCid(grant.message) + }); + const recordDeleteReply = await dwn.processMessage(alice.did, recordDelete.message); + const revokeGrantReply = await dwn.processMessage(alice.did, revokeGrant.message); + expect(recordDeleteReply.status.code).to.equal(202, 'RecordsDelete'); + expect(revokeGrantReply.status.code).to.equal(202, 'PermissionsRevoke'); + + // query after cursor + eventsQueryRecords = await TestDataGenerator.generateEventsQuery({ + cursor : recordEventsReply.events![0], // the message returned from prior query + author : alice, + filters : [{ interface: DwnInterfaceName.Records }], + }); + const recordEventsReplyAfterCursor = await dwn.processMessage(alice.did, eventsQueryRecords.message); + expect(recordEventsReplyAfterCursor.status.code).to.equal(200); + expect(recordEventsReplyAfterCursor.events?.length).to.equal(1); + expect(recordEventsReplyAfterCursor.events![0]).to.equal(await Message.getCid(recordDelete.message!)); + + eventsQueryGrants = await TestDataGenerator.generateEventsQuery({ + cursor : grantEventsReply.events![0], // the message returned from prior query + author : alice, + filters : [{ interface: DwnInterfaceName.Permissions }], + }); + const grantEventsReplyAfterCursor = await dwn.processMessage(alice.did, eventsQueryGrants.message); + expect(grantEventsReplyAfterCursor.status.code).to.equal(200); + expect(grantEventsReplyAfterCursor.events?.length).to.equal(1); + expect(grantEventsReplyAfterCursor.events![0]).to.equal(await Message.getCid(revokeGrant.message!)); + + eventsQueryProtocols = await TestDataGenerator.generateEventsQuery({ + cursor : protocolEventsReply.events![0], // the message returned from prior query + author : alice, + filters : [{ interface: DwnInterfaceName.Protocols }], + }); + const protocolEventsReplyAfterCursor = await dwn.processMessage(alice.did, eventsQueryProtocols.message); + expect(protocolEventsReplyAfterCursor.status.code).to.equal(200); + expect(protocolEventsReplyAfterCursor.events?.length).to.equal(0); // no new messages + }); + + it('filters by method type', async () => { + // scenario: + // alice creates a variety of Messages (RecordsWrite, RecordsDelete, ProtocolConfigure, PermissionsGrant) + // alice queries for only RecordsWrite messages + // alice creates more messages to query beyond a cursor + + const alice = await DidKeyResolver.generate(); + + // write 1 + const record1 = await TestDataGenerator.generateRecordsWrite({ author: alice }); + const record1Reply = await dwn.processMessage(alice.did, record1.message, record1.dataStream); + expect(record1Reply.status.code).to.equal(202, 'RecordsWrite'); + + // other messages + const grant = await TestDataGenerator.generatePermissionsGrant({ author: alice }); + const grantReply = await dwn.processMessage(alice.did, grant.message); + expect(grantReply.status.code).to.equal(202, 'PermissionsGrant'); + const protocol = await TestDataGenerator.generateProtocolsConfigure({ author: alice }); + const protocolReply = await dwn.processMessage(alice.did, protocol.message); + expect(protocolReply.status.code).to.equal(202, 'ProtocolConfigure'); + + // write 2 + const record2 = await TestDataGenerator.generateRecordsWrite({ author: alice }); + const record2Reply = await dwn.processMessage(alice.did, record2.message, record2.dataStream); + expect(record2Reply.status.code).to.equal(202, 'RecordsWrite'); + + // delete write 1 + const delete1 = await TestDataGenerator.generateRecordsDelete({ author: alice, recordId: record1.message.recordId }); + const delete1Reply = await dwn.processMessage(alice.did, delete1.message); + expect(delete1Reply.status.code).to.equal(202, 'RecordsDelete'); + + + let recordsWriteEvents = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ interface: DwnInterfaceName.Records, method: DwnMethodName.Write }] + }); + + const recordsWriteEventsReply = await dwn.processMessage(alice.did, recordsWriteEvents.message); + expect(recordsWriteEventsReply.status.code).to.equal(200); + expect(recordsWriteEventsReply.events?.length).to.equal(2); + expect(recordsWriteEventsReply.events![0]).to.equal(await Message.getCid(record1.message)); + expect(recordsWriteEventsReply.events![1]).to.equal(await Message.getCid(record2.message)); + + // additional messages + const record2Update = await TestDataGenerator.generateFromRecordsWrite({ author: alice, existingWrite: record2.recordsWrite }); + const revokeGrant = await TestDataGenerator.generatePermissionsRevoke({ + author: alice, permissionsGrantId: await Message.getCid(grant.message) + }); + const record2UpdateReply = await dwn.processMessage(alice.did, record2Update.message, record2Update.dataStream); + const revokeGrantReply = await dwn.processMessage(alice.did, revokeGrant.message); + expect(record2UpdateReply.status.code).to.equal(202, 'RecordsDelete'); + expect(revokeGrantReply.status.code).to.equal(202, 'PermissionsRevoke'); + + recordsWriteEvents = await TestDataGenerator.generateEventsQuery({ + cursor : recordsWriteEventsReply.events![1], + author : alice, + filters : [{ interface: DwnInterfaceName.Records, method: DwnMethodName.Write }] + }); + + const recordsWriteEventsReplyAfterCursor = await dwn.processMessage(alice.did, recordsWriteEvents.message); + expect(recordsWriteEventsReplyAfterCursor.status.code).to.equal(200); + expect(recordsWriteEventsReplyAfterCursor.events?.length).to.equal(1); + expect(recordsWriteEventsReplyAfterCursor.events![0]).to.equal(await Message.getCid(record2Update.message)); + }); + + it('filters by a dateUpdated (messageTimestamp) range across different message types', async () => { + // scenario: + // alice creates (3) messages, (RecordsWrite, PermissionsGrant and ProtocolsConfigure + // each message on the first date of the year (2021, 2022 and 2023 respectively. + // alice queries for all records beyond the last day of 2021 and should return 2 of the 3 messages (Grant and ProtocolConfigure) + // alice then creates a RecordsDelete message for the original RecordsWrite + // alice queries once again however supplying a cursor of the last message from the prior query, returning the RecordsDelete message. + const firstDayOf2021 = Time.createTimestamp({ year: 2021, month: 1, day: 1 }); + const firstDayOf2022 = Time.createTimestamp({ year: 2022, month: 1, day: 1 }); + const firstDayOf2023 = Time.createTimestamp({ year: 2023, month: 1, day: 1 }); + + const alice = await DidKeyResolver.generate(); + const write = await TestDataGenerator.generateRecordsWrite({ author: alice, dateCreated: firstDayOf2021, messageTimestamp: firstDayOf2021 }); + const grant = await TestDataGenerator.generatePermissionsGrant({ author: alice, messageTimestamp: firstDayOf2022 }); + const protocol = await TestDataGenerator.generateProtocolsConfigure({ author: alice, messageTimestamp: firstDayOf2023 }); + + // insert data + const writeReply = await dwn.processMessage(alice.did, write.message, write.dataStream); + const grantReply = await dwn.processMessage(alice.did, grant.message); + const protocolReply = await dwn.processMessage(alice.did, protocol.message); + expect(writeReply.status.code).to.equal(202, 'RecordsWrite'); + expect(grantReply.status.code).to.equal(202, 'PermissionsGrant'); + expect(protocolReply.status.code).to.equal(202, 'ProtocolConfigure'); + + // query from last day of 2021 + const lastDayOf2021 = Time.createTimestamp({ year: 2021, month: 12, day: 31 }); + let eventsQuery1 = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ dateUpdated: { from: lastDayOf2021 } }], + }); + let reply1 = await dwn.processMessage(alice.did, eventsQuery1.message); + expect(reply1.status.code).to.equal(200); + expect(reply1.events?.length).to.equal(2); + expect(reply1.events![0]).to.equal(await Message.getCid(grant.message!)); + expect(reply1.events![1]).to.equal(await Message.getCid(protocol.message!)); + + + // delete the RecordsWrite + const delete1 = await TestDataGenerator.generateRecordsDelete({ author: alice, recordId: write.message.recordId }); + const delete1Reply = await dwn.processMessage(alice.did, delete1.message); + expect(delete1Reply.status.code).to.equal(202); + + eventsQuery1 = await TestDataGenerator.generateEventsQuery({ + cursor : reply1.events![1], // use the last messageCid from the prior query as a cursor + author : alice, + filters : [{ dateUpdated: { from: lastDayOf2021 } }], + }); + reply1 = await dwn.processMessage(alice.did, eventsQuery1.message); + expect(reply1.status.code).to.equal(200); + expect(reply1.events?.length).to.equal(1); + expect(reply1.events![0]).to.equal(await Message.getCid(delete1.message!)); + }); + + it('filters by dateCreated', async () => { + // scenario: 4 records, created on first of 2021, 2022, 2023, 2024 respectively, only the first 2 records + const firstDayOf2021 = Time.createTimestamp({ year: 2021, month: 1, day: 1 }); + const firstDayOf2022 = Time.createTimestamp({ year: 2022, month: 1, day: 1 }); + const firstDayOf2023 = Time.createTimestamp({ year: 2023, month: 1, day: 1 }); + const firstDayOf2024 = Time.createTimestamp({ year: 2024, month: 1, day: 1 }); + + const alice = await DidKeyResolver.generate(); + const write1 = await TestDataGenerator.generateRecordsWrite({ author: alice, dateCreated: firstDayOf2021, messageTimestamp: firstDayOf2021 }); + const write2 = await TestDataGenerator.generateRecordsWrite({ author: alice, dateCreated: firstDayOf2022, messageTimestamp: firstDayOf2022 }); + const write3 = await TestDataGenerator.generateRecordsWrite({ author: alice, dateCreated: firstDayOf2023, messageTimestamp: firstDayOf2023 }); + const write4 = await TestDataGenerator.generateRecordsWrite({ author: alice, dateCreated: firstDayOf2024, messageTimestamp: firstDayOf2024 }); + + // insert data + const writeReply1 = await dwn.processMessage(alice.did, write1.message, write1.dataStream); + const writeReply2 = await dwn.processMessage(alice.did, write2.message, write2.dataStream); + const writeReply3 = await dwn.processMessage(alice.did, write3.message, write3.dataStream); + const writeReply4 = await dwn.processMessage(alice.did, write4.message, write4.dataStream); + expect(writeReply1.status.code).to.equal(202); + expect(writeReply2.status.code).to.equal(202); + expect(writeReply3.status.code).to.equal(202); + expect(writeReply4.status.code).to.equal(202); + + // testing `from` range + const lastDayOf2021 = Time.createTimestamp({ year: 2021, month: 12, day: 31 }); + let eventsQuery1 = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ dateCreated: { from: lastDayOf2021 } }], + }); + let reply1 = await dwn.processMessage(alice.did, eventsQuery1.message); + expect(reply1.status.code).to.equal(200); + expect(reply1.events?.length).to.equal(3); + expect(reply1.events![0]).to.equal(await Message.getCid(write2.message!)); + expect(reply1.events![1]).to.equal(await Message.getCid(write3.message!)); + expect(reply1.events![2]).to.equal(await Message.getCid(write4.message!)); + + // using the cursor of the first message + eventsQuery1 = await TestDataGenerator.generateEventsQuery({ + cursor : reply1.events![0], + author : alice, + filters : [{ dateCreated: { from: lastDayOf2021 } }], + }); + reply1 = await dwn.processMessage(alice.did, eventsQuery1.message); + expect(reply1.status.code).to.equal(200); + expect(reply1.events?.length).to.equal(2); + expect(reply1.events![0]).to.equal(await Message.getCid(write3.message!)); + expect(reply1.events![1]).to.equal(await Message.getCid(write4.message!)); + + // testing `to` range + const lastDayOf2022 = Time.createTimestamp({ year: 2022, month: 12, day: 31 }); + let eventsQuery2 = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ dateCreated: { to: lastDayOf2022 } }], + }); + let reply2 = await dwn.processMessage(alice.did, eventsQuery2.message); + expect(reply2.status.code).to.equal(200); + expect(reply2.events?.length).to.equal(2); + expect(reply2.events![0]).to.equal(await Message.getCid(write1.message!)); + expect(reply2.events![1]).to.equal(await Message.getCid(write2.message!)); + + // using the cursor of the first message + eventsQuery2 = await TestDataGenerator.generateEventsQuery({ + cursor : reply2.events![0], + author : alice, + filters : [{ dateCreated: { to: lastDayOf2022 } }], + }); + reply2 = await dwn.processMessage(alice.did, eventsQuery2.message); + expect(reply2.status.code).to.equal(200); + expect(reply2.events?.length).to.equal(1); + expect(reply2.events![0]).to.equal(await Message.getCid(write2.message!)); + + // testing `from` and `to` range + const lastDayOf2023 = Time.createTimestamp({ year: 2023, month: 12, day: 31 }); + let eventsQuery3 = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ dateCreated: { from: lastDayOf2022, to: lastDayOf2023 } }], + }); + let reply3 = await dwn.processMessage(alice.did, eventsQuery3.message); + expect(reply3.status.code).to.equal(200); + expect(reply3.events?.length).to.equal(1); + expect(reply3.events![0]).to.equal(await Message.getCid(write3.message!)); + + // using the cursor of the only message, should not return any results + eventsQuery3 = await TestDataGenerator.generateEventsQuery({ + cursor : reply3.events![0], + author : alice, + filters : [{ dateCreated: { from: lastDayOf2022, to: lastDayOf2023 } }], + }); + reply3 = await dwn.processMessage(alice.did, eventsQuery3.message); + expect(reply3.status.code).to.equal(200); + expect(reply3.events?.length).to.equal(0); + + // testing edge case where value equals `from` and `to` + let eventsQuery4 = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ dateCreated: { from: firstDayOf2022, to: firstDayOf2023 } }], + }); + let reply4 = await dwn.processMessage(alice.did, eventsQuery4.message); + expect(reply4.status.code).to.equal(200); + expect(reply4.events?.length).to.equal(1); + expect(reply4.events![0]).to.equal(await Message.getCid(write2.message!)); + + // testing edge case where value equals `from` and `to` + eventsQuery4 = await TestDataGenerator.generateEventsQuery({ + cursor : reply4.events![0], + author : alice, + filters : [{ dateCreated: { from: firstDayOf2022, to: firstDayOf2023 } }], + }); + reply4 = await dwn.processMessage(alice.did, eventsQuery4.message); + expect(reply4.status.code).to.equal(200); + expect(reply4.events?.length).to.equal(0); + }); + + it('filters by a protocol across different message types', async () => { + // scenario: + // alice creates (3) different message types all related to "proto1" (Configure, RecordsWrite, RecordsDelete) + // alice creates (3) different message types all related to "proto2" (Configure, RecordsWrite, RecordsDelete) + // when issuing an EventsQuery for the specific protocol, only Events related to it should be returned. + // alice then creates an additional messages to query after a cursor + + const alice = await DidKeyResolver.generate(); + + // create a proto1 + const protoConf1 = await TestDataGenerator.generateProtocolsConfigure({ + author : alice, + protocolDefinition : { ...freeForAll, protocol: 'proto1' } + }); + + const postProperties = { + protocolPath : 'post', + schema : freeForAll.types.post.schema, + dataFormat : freeForAll.types.post.dataFormats[0], + }; + + const proto1 = protoConf1.message.descriptor.definition.protocol; + const protoConf1Response = await dwn.processMessage(alice.did, protoConf1.message); + expect(protoConf1Response.status.code).equals(202); + + // create a proto2 + const protoConf2 = await TestDataGenerator.generateProtocolsConfigure({ + author : alice, + protocolDefinition : { ...freeForAll, protocol: 'proto2' } + }); + const proto2 = protoConf2.message.descriptor.definition.protocol; + const protoConf2Response = await dwn.processMessage(alice.did, protoConf2.message); + expect(protoConf2Response.status.code).equals(202); + + // create a record for proto1 + const write1proto1 = await TestDataGenerator.generateRecordsWrite({ author: alice, protocol: proto1, ...postProperties }); + const write1Response = await dwn.processMessage(alice.did, write1proto1.message, write1proto1.dataStream); + expect(write1Response.status.code).equals(202); + + // create a record for proto2 + const write1proto2 = await TestDataGenerator.generateRecordsWrite({ author: alice, protocol: proto2, ...postProperties }); + const write1Proto2Response = await dwn.processMessage(alice.did, write1proto2.message, write1proto2.dataStream); + expect(write1Proto2Response.status.code).equals(202); + + // filter for proto1 + let proto1EventsQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ protocol: proto1 }] + }); + let proto1EventsReply = await dwn.processMessage(alice.did, proto1EventsQuery.message); + expect(proto1EventsReply.status.code).equals(200); + expect(proto1EventsReply.events?.length).equals(2); + + // check order of events returned. + expect(proto1EventsReply.events![0]).to.equal(await Message.getCid(protoConf1.message)); + expect(proto1EventsReply.events![1]).to.equal(await Message.getCid(write1proto1.message)); + + // filter for proto2 + let proto2EventsQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ protocol: proto2 }] + }); + let proto2EventsReply = await dwn.processMessage(alice.did, proto2EventsQuery.message); + expect(proto2EventsReply.status.code).equals(200); + expect(proto2EventsReply.events?.length).equals(2); + + // check order of events returned. + expect(proto2EventsReply.events![0]).to.equal(await Message.getCid(protoConf2.message)); + expect(proto2EventsReply.events![1]).to.equal(await Message.getCid(write1proto2.message)); + + // get cursor of the last event and add more events to query afterwards + const proto1Cursor = proto1EventsReply.events![1]; + const proto2Cursor = proto2EventsReply.events![1]; + + // delete proto1 message + const deleteProto1Message = await TestDataGenerator.generateRecordsDelete({ author: alice, recordId: write1proto1.message.recordId }); + const deleteProto1MessageReply = await dwn.processMessage(alice.did, deleteProto1Message.message); + expect(deleteProto1MessageReply.status.code).to.equal(202); + + // delete proto2 message + const deleteProto2Message = await TestDataGenerator.generateRecordsDelete({ author: alice, recordId: write1proto2.message.recordId }); + const deleteProto2MessageReply = await dwn.processMessage(alice.did, deleteProto2Message.message); + expect(deleteProto2MessageReply.status.code).to.equal(202); + + //query messages beyond the cursor + proto1EventsQuery = await TestDataGenerator.generateEventsQuery({ + cursor : proto1Cursor, + author : alice, + filters : [{ protocol: proto1 }], + }); + proto1EventsReply = await dwn.processMessage(alice.did, proto1EventsQuery.message); + expect(proto1EventsReply.status.code).equals(200); + expect(proto1EventsReply.events?.length).equals(1); + expect(proto1EventsReply.events![0]).to.equal(await Message.getCid(deleteProto1Message.message)); + + //query messages beyond the cursor + proto2EventsQuery = await TestDataGenerator.generateEventsQuery({ + cursor : proto2Cursor, + author : alice, + filters : [{ protocol: proto2 }], + }); + proto2EventsReply = await dwn.processMessage(alice.did, proto2EventsQuery.message); + expect(proto2EventsReply.status.code).equals(200); + expect(proto2EventsReply.events?.length).equals(1); + expect(proto2EventsReply.events![0]).to.equal(await Message.getCid(deleteProto2Message.message)); + }); + + it('filters by protocol, protocolPath & parentId', async () => { + // scenario: get all messages across a protocol & protocolPath combo + // alice installs a protocol and creates a thread + // alice adds bob and carol as participants + // alice, bob, and carol all create messages + // alice filter for 'thread', 'thread/participants' and 'thread/messages' + // alice deletes carol participant message + // alice filters for 'thread/participant' after a cursor + + const alice = await DidKeyResolver.generate(); + const bob = await DidKeyResolver.generate(); + const carol = await DidKeyResolver.generate(); + + // create protocol + const protocolConfigure = await TestDataGenerator.generateProtocolsConfigure({ + author : alice, + protocolDefinition : { ...threadProtocol } + }); + const protocolConfigureReply = await dwn.processMessage(alice.did, protocolConfigure.message); + expect(protocolConfigureReply.status.code).to.equal(202); + const protocol = protocolConfigure.message.descriptor.definition.protocol; + + // alice creates thread + const thread = await TestDataGenerator.generateRecordsWrite({ + author : alice, + protocol : protocol, + protocolPath : 'thread' + }); + const threadReply = await dwn.processMessage(alice.did, thread.message, thread.dataStream); + expect(threadReply.status.code).to.equal(202); + + // add bob as participant + const bobParticipant = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : bob.did, + parentId : thread.message.recordId, + contextId : thread.message.contextId, + protocol : protocol, + protocolPath : 'thread/participant' + }); + const bobParticipantReply = await dwn.processMessage(alice.did, bobParticipant.message, bobParticipant.dataStream); + expect(bobParticipantReply.status.code).to.equal(202); + + // add carol as participant + const carolParticipant = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : carol.did, + parentId : thread.message.recordId, + contextId : thread.message.contextId, + protocol : protocol, + protocolPath : 'thread/participant' + }); + const carolParticipantReply = await dwn.processMessage(alice.did, carolParticipant.message, carolParticipant.dataStream); + expect(carolParticipantReply.status.code).to.equal(202); + + // add a message to protocol1 + const message1 = await TestDataGenerator.generateRecordsWrite({ + author : bob, + recipient : alice.did, + parentId : thread.message.recordId, + contextId : thread.message.contextId, + protocol : protocol, + protocolPath : 'thread/chat', + protocolRole : 'thread/participant', + }); + const message1Reply = await dwn.processMessage(alice.did, message1.message, message1.dataStream); + expect(message1Reply.status.code).to.equal(202); + + const message2 = await TestDataGenerator.generateRecordsWrite({ + author : bob, + recipient : alice.did, + parentId : thread.message.recordId, + contextId : thread.message.contextId, + protocol : protocol, + protocolPath : 'thread/chat', + protocolRole : 'thread/participant', + }); + const message2Reply = await dwn.processMessage(alice.did, message2.message, message2.dataStream); + expect(message2Reply.status.code).to.equal(202); + + const message3 = await TestDataGenerator.generateRecordsWrite({ + author : carol, + recipient : alice.did, + parentId : thread.message.recordId, + contextId : thread.message.contextId, + protocol : protocol, + protocolPath : 'thread/chat', + protocolRole : 'thread/participant', + }); + const message3Reply = await dwn.processMessage(alice.did, message3.message, message3.dataStream); + expect(message3Reply.status.code).to.equal(202); + + // query for thread + const threadQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ protocol: protocol, protocolPath: 'thread' }], + }); + const threadQueryReply = await dwn.processMessage(alice.did, threadQuery.message); + expect(threadQueryReply.status.code).to.equal(200); + expect(threadQueryReply.events?.length).to.equal(1); + expect(threadQueryReply.events![0]).to.equal(await Message.getCid(thread.message)); + + // query for participants + const participantsQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ protocol: protocol, protocolPath: 'thread/participant', parentId: thread.message.recordId }], + }); + const participantsQueryReply = await dwn.processMessage(alice.did, participantsQuery.message); + expect(participantsQueryReply.status.code).to.equal(200); + expect(participantsQueryReply.events?.length).to.equal(2); + expect(participantsQueryReply.events![0]).to.equal(await Message.getCid(bobParticipant.message)); + expect(participantsQueryReply.events![1]).to.equal(await Message.getCid(carolParticipant.message)); + + // query for chats + const chatQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ protocol: protocol, protocolPath: 'thread/chat', parentId: thread.message.recordId }], + }); + const chatQueryReply = await dwn.processMessage(alice.did, chatQuery.message); + expect(chatQueryReply.status.code).to.equal(200); + expect(chatQueryReply.events?.length).to.equal(3); + expect(chatQueryReply.events![0]).to.equal(await Message.getCid(message1.message)); + expect(chatQueryReply.events![1]).to.equal(await Message.getCid(message2.message)); + expect(chatQueryReply.events![2]).to.equal(await Message.getCid(message3.message)); + }); + + it('filters by recipient', async () => { + // scenario: alice installs a free-for-all protocol and makes posts with both bob and carol as recipients + // carol and bob also make posts with alice as a recipient + // alice queries for events meant for specific recipients + // alice then makes another message to query for using the pervious as a cursor + + const alice = await DidKeyResolver.generate(); + const bob = await DidKeyResolver.generate(); + const carol = await DidKeyResolver.generate(); + + const protocolConfigure = await TestDataGenerator.generateProtocolsConfigure({ + author : alice, + protocolDefinition : { ...freeForAll } + }); + const protocolConfigureReply = await dwn.processMessage(alice.did, protocolConfigure.message); + expect(protocolConfigureReply.status.code).to.equal(202); + const protocol = protocolConfigure.message.descriptor.definition.protocol; + + const postProperties = { + protocol : protocol, + protocolPath : 'post', + schema : freeForAll.types.post.schema, + dataFormat : freeForAll.types.post.dataFormats[0], + }; + + const messageFromAliceToBob = await TestDataGenerator.generateRecordsWrite({ + ...postProperties, + author : alice, + recipient : bob.did, + }); + const messageFromAliceToBobReply = await dwn.processMessage(alice.did, messageFromAliceToBob.message, messageFromAliceToBob.dataStream); + expect(messageFromAliceToBobReply.status.code).to.equal(202); + + const messageFromAliceToCarol = await TestDataGenerator.generateRecordsWrite({ + ...postProperties, + author : alice, + recipient : carol.did, + }); + const messageFromAliceToCarolReply = await dwn.processMessage(alice.did, messageFromAliceToCarol.message, messageFromAliceToCarol.dataStream); + expect(messageFromAliceToCarolReply.status.code).to.equal(202); + + const messageFromBobToAlice = await TestDataGenerator.generateRecordsWrite({ + ...postProperties, + author : bob, + recipient : alice.did, + }); + const messageFromBobToAliceReply = await dwn.processMessage(alice.did, messageFromBobToAlice.message, messageFromBobToAlice.dataStream); + expect(messageFromBobToAliceReply.status.code).to.equal(202); + + const messageFromCarolToAlice = await TestDataGenerator.generateRecordsWrite({ + ...postProperties, + author : carol, + recipient : alice.did, + }); + const messageFromCarolToAliceReply = await dwn.processMessage(alice.did, messageFromCarolToAlice.message, messageFromCarolToAlice.dataStream); + expect(messageFromCarolToAliceReply.status.code).to.equal(202); + + let authorQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ recipient: alice.did }] + }); + let authorQueryReply = await dwn.processMessage(alice.did, authorQuery.message); + expect(authorQueryReply.status.code).to.equal(200); + expect(authorQueryReply.events?.length).to.equal(2); + expect(authorQueryReply.events![0]).to.equal(await Message.getCid(messageFromBobToAlice.message)); + expect(authorQueryReply.events![1]).to.equal(await Message.getCid(messageFromCarolToAlice.message)); + + authorQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ recipient: bob.did }] + }); + authorQueryReply = await dwn.processMessage(alice.did, authorQuery.message); + expect(authorQueryReply.status.code).to.equal(200); + expect(authorQueryReply.events?.length).to.equal(1); + expect(authorQueryReply.events![0]).to.equal(await Message.getCid(messageFromAliceToBob.message)); + + + // add another message + const messageFromAliceToBob2 = await TestDataGenerator.generateRecordsWrite({ + ...postProperties, + author : alice, + recipient : bob.did, + }); + const messageFromAliceToBob2Reply = await dwn.processMessage(alice.did, messageFromAliceToBob2.message, messageFromAliceToBob2.dataStream); + expect(messageFromAliceToBob2Reply.status.code).to.equal(202); + + authorQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ recipient: bob.did }], + cursor : authorQueryReply.events![0] + }); + + authorQueryReply = await dwn.processMessage(alice.did, authorQuery.message); + expect(authorQueryReply.status.code).to.equal(200); + expect(authorQueryReply.events?.length).to.equal(1); + expect(authorQueryReply.events![0]).to.equal(await Message.getCid(messageFromAliceToBob2.message)); + }); + + it('filters by schema', async () => { + const alice = await DidKeyResolver.generate(); + + const schema1Message1 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + schema : 'schema1' + }); + const schema1Message1Reply = await dwn.processMessage(alice.did, schema1Message1.message, schema1Message1.dataStream); + expect(schema1Message1Reply.status.code).to.equal(202); + + const schema2Message1 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + schema : 'schema2' + }); + const schema2Message1Reply = await dwn.processMessage(alice.did, schema2Message1.message, schema2Message1.dataStream); + expect(schema2Message1Reply.status.code).to.equal(202); + + const schema2Message2 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + schema : 'schema2' + }); + const schema2Message2Reply = await dwn.processMessage(alice.did, schema2Message2.message, schema2Message2.dataStream); + expect(schema2Message2Reply.status.code).to.equal(202); + + let schema1Query = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ schema: 'schema1' }], + }); + let schema1QueryReply = await dwn.processMessage(alice.did, schema1Query.message); + expect(schema1QueryReply.status.code).to.equal(200); + expect(schema1QueryReply.events?.length).to.equal(1); + expect(schema1QueryReply.events![0]).to.equal(await Message.getCid(schema1Message1.message)); + + let schema2Query = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ schema: 'schema2' }], + }); + let schema2QueryReply = await dwn.processMessage(alice.did, schema2Query.message); + expect(schema2QueryReply.status.code).to.equal(200); + expect(schema2QueryReply.events?.length).to.equal(2); + expect(schema2QueryReply.events![0]).to.equal(await Message.getCid(schema2Message1.message)); + expect(schema2QueryReply.events![1]).to.equal(await Message.getCid(schema2Message2.message)); + + const schema1Message2 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + schema : 'schema1' + }); + const schema1Message2Reply = await dwn.processMessage(alice.did, schema1Message2.message, schema1Message2.dataStream); + expect(schema1Message2Reply.status.code).to.equal(202); + + schema1Query = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ schema: 'schema1' }], + cursor : schema1QueryReply.events![0] + }); + schema1QueryReply = await dwn.processMessage(alice.did, schema1Query.message); + expect(schema1QueryReply.status.code).to.equal(200); + expect(schema1QueryReply.events?.length).to.equal(1); + expect(schema1QueryReply.events![0]).to.equal(await Message.getCid(schema1Message2.message)); + + schema2Query = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ schema: 'schema2' }], + cursor : schema2QueryReply.events![1] + }); + schema2QueryReply = await dwn.processMessage(alice.did, schema2Query.message); + expect(schema2QueryReply.status.code).to.equal(200); + expect(schema2QueryReply.events?.length).to.equal(0); + }); + + xit('filters by recordId', async () => { + const alice = await DidKeyResolver.generate(); + + // a write as a control, will not show up in query + const controlWrite = await TestDataGenerator.generateRecordsWrite({ + author : alice, + schema : 'schema1' + }); + const write2Reply = await dwn.processMessage(alice.did, controlWrite.message, controlWrite.dataStream); + expect(write2Reply.status.code).to.equal(202); + + const write = await TestDataGenerator.generateRecordsWrite({ + author : alice, + schema : 'schema1' + }); + const write1Reply = await dwn.processMessage(alice.did, write.message, write.dataStream); + expect(write1Reply.status.code).to.equal(202); + + const update = await TestDataGenerator.generateFromRecordsWrite({ + author : alice, + existingWrite : write.recordsWrite, + }); + const updateReply = await dwn.processMessage(alice.did, update.message, update.dataStream); + expect(updateReply.status.code).to.equal(202); + + let recordQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ recordId: write.message.recordId }], + }); + let recordQueryReply = await dwn.processMessage(alice.did, recordQuery.message); + expect(recordQueryReply.status.code).to.equal(200); + expect(recordQueryReply.events?.length).to.equal(2); + expect(recordQueryReply.events![0]).to.equal(await Message.getCid(write.message)); + expect(recordQueryReply.events![1]).to.equal(await Message.getCid(update.message)); + + const deleteRecord = await TestDataGenerator.generateRecordsDelete({ + author : alice, + recordId : write.message.recordId, + }); + const deleteRecordReply = await dwn.processMessage(alice.did, deleteRecord.message); + expect(deleteRecordReply.status.code).to.equal(202); + + recordQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ recordId: write.message.recordId }], + cursor : recordQueryReply.events![1] + }); + recordQueryReply = await dwn.processMessage(alice.did, recordQuery.message); + expect(recordQueryReply.status.code).to.equal(200); + expect(recordQueryReply.events?.length).to.equal(1); + expect(recordQueryReply.events![0]).to.equal(await Message.getCid(deleteRecord.message)); + }); + + it('filters by dataFormat', async () => { + // scenario: alice stores different file types and needs events relating to `image/jpeg` + // alice creates 3 files, one of them `image/jpeg` + // alice queries for `image/jpeg` retrieving the one message + // alice adds another image to query for using the prior image as a cursor + + const alice = await DidKeyResolver.generate(); + + const textFile = await TestDataGenerator.generateRecordsWrite({ + author : alice, + dataFormat : 'application/text' + }); + const textFileReply = await dwn.processMessage(alice.did, textFile.message, textFile.dataStream); + expect(textFileReply.status.code).to.equal(202); + + const jsonData = await TestDataGenerator.generateRecordsWrite({ + author : alice, + dataFormat : 'application/json' + }); + const jsonDataReply = await dwn.processMessage(alice.did, jsonData.message, jsonData.dataStream); + expect(jsonDataReply.status.code).to.equal(202); + + const imageData = await TestDataGenerator.generateRecordsWrite({ + author : alice, + dataFormat : 'image/jpeg' + }); + const imageDataReply = await dwn.processMessage(alice.did, imageData.message, imageData.dataStream); + expect(imageDataReply.status.code).to.equal(202); + + //get image data + let imageQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ + dataFormat: 'image/jpeg' + }] + }); + let imageQueryReply = await dwn.processMessage(alice.did, imageQuery.message); + expect(imageQueryReply.status.code).to.equal(200); + expect(imageQueryReply.events?.length).to.equal(1); + expect(imageQueryReply.events![0]).to.equal(await Message.getCid(imageData.message)); + + // add another image + const imageData2 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + dataFormat : 'image/jpeg' + }); + const imageData2Reply = await dwn.processMessage(alice.did, imageData2.message, imageData2.dataStream); + expect(imageData2Reply.status.code).to.equal(202); + + imageQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ + dataFormat: 'image/jpeg' + }], + cursor: imageQueryReply.events![0] + }); + imageQueryReply = await dwn.processMessage(alice.did, imageQuery.message); + expect(imageQueryReply.status.code).to.equal(200); + expect(imageQueryReply.events?.length).to.equal(1); + expect(imageQueryReply.events![0]).to.equal(await Message.getCid(imageData2.message)); + });; + + it('filters by dataSize', async () => { + // scenario: + // alice inserts both small and large data + // alice requests events for messages with data size under a threshold + + const alice = await DidKeyResolver.generate(); + + const smallSize1 = await TestDataGenerator.generateRecordsWrite({ + author: alice, + }); + const smallSize1Reply = await dwn.processMessage(alice.did, smallSize1.message, smallSize1.dataStream); + expect(smallSize1Reply.status.code).to.equal(202); + + const largeSize = await TestDataGenerator.generateRecordsWrite({ + author : alice, + data : TestDataGenerator.randomBytes(DwnConstant.maxDataSizeAllowedToBeEncoded + 1) + }); + const largeSizeReply = await dwn.processMessage(alice.did, largeSize.message, largeSize.dataStream); + expect(largeSizeReply.status.code).to.equal(202); + + const smallSize2 = await TestDataGenerator.generateRecordsWrite({ + author: alice, + }); + const smallSize2Reply = await dwn.processMessage(alice.did, smallSize2.message, smallSize2.dataStream); + expect(smallSize2Reply.status.code).to.equal(202); + + //get large sizes + let largeSizeQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ + dataSize: { gte: DwnConstant.maxDataSizeAllowedToBeEncoded + 1 } + }] + }); + let largeSizeQueryReply = await dwn.processMessage(alice.did, largeSizeQuery.message); + expect(largeSizeQueryReply.status.code).to.equal(200); + expect(largeSizeQueryReply.events?.length).to.equal(1); + expect(largeSizeQueryReply.events![0]).to.equal(await Message.getCid(largeSize.message)); + + const largeSize2 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + data : TestDataGenerator.randomBytes(DwnConstant.maxDataSizeAllowedToBeEncoded + 1) + }); + const largeSize2Reply = await dwn.processMessage(alice.did, largeSize2.message, largeSize2.dataStream); + expect(largeSize2Reply.status.code).to.equal(202); + + largeSizeQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ + dataSize: { gte: DwnConstant.maxDataSizeAllowedToBeEncoded + 1 } + }], + cursor: largeSizeQueryReply.events![0] + }); + largeSizeQueryReply = await dwn.processMessage(alice.did, largeSizeQuery.message); + expect(largeSizeQueryReply.status.code).to.equal(200); + expect(largeSizeQueryReply.events?.length).to.equal(1); + expect(largeSizeQueryReply.events![0]).to.equal(await Message.getCid(largeSize2.message)); + }); + + it('filters by contextId', async () => { + // scenario: + // alice configures a chat protocols and creates 2 chat threads + // alice invites bob as participant in thread1 and carol in thread2 + // alice writes messages to both bob and carol in their respective threads + // alice queries for events related to thread1 (gets the configure, bob participant, and chats to bob) + // alice writes more messages to both bob and carol in their respective threads + // alice queries for events beyond the latest from the last query, retrieving the additional messages to bob + + const alice = await DidKeyResolver.generate(); + const bob = await DidKeyResolver.generate(); + const carol = await DidKeyResolver.generate(); + + const protocolConfigure = await TestDataGenerator.generateProtocolsConfigure({ + author : alice, + protocolDefinition : { ...threadProtocol } + }); + const protocolConfigureReply = await dwn.processMessage(alice.did, protocolConfigure.message); + expect(protocolConfigureReply.status.code).to.equal(202); + const protocol = protocolConfigure.message.descriptor.definition.protocol; + + // alice creates 2 threads + const thread1 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + protocol : protocol, + protocolPath : 'thread', + }); + const thread1Reply = await dwn.processMessage(alice.did, thread1.message, thread1.dataStream); + expect(thread1Reply.status.code).to.equal(202); + + const thread2 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + protocol : protocol, + protocolPath : 'thread', + }); + const thread2Reply = await dwn.processMessage(alice.did, thread2.message, thread2.dataStream); + expect(thread2Reply.status.code).to.equal(202); + + // alice adds bob as a participant to thread 1 + const bobParticipant = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : bob.did, + parentId : thread1.message.recordId, + contextId : thread1.message.contextId, + protocol : protocol, + protocolPath : 'thread/participant' + }); + const bobParticipantReply = await dwn.processMessage(alice.did, bobParticipant.message, bobParticipant.dataStream); + expect(bobParticipantReply.status.code).to.equal(202); + + // alice adds carol as a participant to thread 1 + const carolParticipant = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : carol.did, + parentId : thread2.message.recordId, + contextId : thread2.message.contextId, + protocol : protocol, + protocolPath : 'thread/participant' + }); + const carolParticipantReply = await dwn.processMessage(alice.did, carolParticipant.message, carolParticipant.dataStream); + expect(carolParticipantReply.status.code).to.equal(202); + + // alice writes a message to bob on thread 1 + const thread1Chat1 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : bob.did, + parentId : thread1.message.recordId, + contextId : thread1.message.contextId, + protocol : protocol, + protocolPath : 'thread/chat', + }); + const thread1Chat1Reply = await dwn.processMessage(alice.did, thread1Chat1.message, thread1Chat1.dataStream); + expect(thread1Chat1Reply.status.code).to.equal(202); + + // alice writes a message to carol on thread 2 + const thread2Chat1 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : carol.did, + parentId : thread2.message.recordId, + contextId : thread2.message.contextId, + protocol : protocol, + protocolPath : 'thread/chat', + }); + const thread2Chat1Reply = await dwn.processMessage(alice.did, thread2Chat1.message, thread2Chat1.dataStream); + expect(thread2Chat1Reply.status.code).to.equal(202); + + // alice writes another message to bob on thread 1 + const thread1Chat2 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : bob.did, + parentId : thread1.message.recordId, + contextId : thread1.message.contextId, + protocol : protocol, + protocolPath : 'thread/chat', + }); + const chatMessage2Reply = await dwn.processMessage(alice.did, thread1Chat2.message, thread1Chat2.dataStream); + expect(chatMessage2Reply.status.code).to.equal(202); + + // alice queries events for thread1 + let threadContextQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ + protocol : protocol, + contextId : thread1.message.contextId, + }], + }); + let threadContextQueryReply = await dwn.processMessage(alice.did, threadContextQuery.message); + expect(threadContextQueryReply.status.code).to.equal(200); + expect(threadContextQueryReply.events?.length).to.equal(4); + expect(threadContextQueryReply.events![0]).to.equal(await Message.getCid(thread1.message)); + expect(threadContextQueryReply.events![1]).to.equal(await Message.getCid(bobParticipant.message)); + expect(threadContextQueryReply.events![2]).to.equal(await Message.getCid(thread1Chat1.message)); + expect(threadContextQueryReply.events![3]).to.equal(await Message.getCid(thread1Chat2.message)); + + // alice adds more chats to both threads + const thread1Chat3 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : bob.did, + parentId : thread1.message.recordId, + contextId : thread1.message.contextId, + protocol : protocol, + protocolPath : 'thread/chat', + }); + const thread1Chat3Reply = await dwn.processMessage(alice.did, thread1Chat3.message, thread1Chat3.dataStream); + expect(thread1Chat3Reply.status.code).to.equal(202); + + const thread2Chat2 = await TestDataGenerator.generateRecordsWrite({ + author : alice, + recipient : carol.did, + parentId : thread2.message.recordId, + contextId : thread2.message.contextId, + protocol : protocol, + protocolPath : 'thread/chat', + }); + const thread2Chat2Reply = await dwn.processMessage(alice.did, thread2Chat2.message, thread2Chat2.dataStream); + expect(thread2Chat2Reply.status.code).to.equal(202); + + // query beyond a cursor + threadContextQuery = await TestDataGenerator.generateEventsQuery({ + author : alice, + filters : [{ + protocol : protocol, + contextId : thread1.message.contextId, + }], + cursor: threadContextQueryReply.events![3] + }); + threadContextQueryReply = await dwn.processMessage(alice.did, threadContextQuery.message); + expect(threadContextQueryReply.status.code).to.equal(200); + expect(threadContextQueryReply.events?.length).to.equal(1); + expect(threadContextQueryReply.events![0]).to.equal(await Message.getCid(thread1Chat3.message)); + }); + }); +}; \ No newline at end of file diff --git a/tests/store/index-level.spec.ts b/tests/store/index-level.spec.ts index 872590b35..725be4807 100644 --- a/tests/store/index-level.spec.ts +++ b/tests/store/index-level.spec.ts @@ -1,85 +1,84 @@ -import type { LevelWrapper } from '../../src/store/level-wrapper.js'; - -import chaiAsPromised from 'chai-as-promised'; -import chai, { expect } from 'chai'; +import type { Filter } from '../../src/types/query-types.js'; import { ArrayUtility } from '../../src/utils/array.js'; +import { createLevelDatabase } from '../../src/store/level-wrapper.js'; +import { DwnErrorCode } from '../../src/index.js'; import { IndexLevel } from '../../src/store/index-level.js'; import { lexicographicalCompare } from '../../src/utils/string.js'; +import { SortDirection } from '../../src/types/query-types.js'; +import { Temporal } from '@js-temporal/polyfill'; import { TestDataGenerator } from '../utils/test-data-generator.js'; -import { Time } from '../../src/index.js'; import { v4 as uuid } from 'uuid'; -chai.use(chaiAsPromised); +import chaiAsPromised from 'chai-as-promised'; +import chai, { expect } from 'chai'; +chai.use(chaiAsPromised); -describe('Index Level', () => { +describe('IndexLevel', () => { + let testIndex: IndexLevel; + const tenant = 'did:alice:index-test'; describe('put', () => { - let index: IndexLevel; - let testPartition: LevelWrapper; - const tenant = 'did:alice:index'; before(async () => { - index = new IndexLevel({ location: 'TEST-INDEX' }); - await index.open(); - testPartition = await index.db.partition(tenant); + testIndex = new IndexLevel({ + createLevelDatabase, + location: 'TEST-INDEX', + }); + await testIndex.open(); }); beforeEach(async () => { - await index.clear(); + await testIndex.clear(); }); after(async () => { - await index.close(); + await testIndex.close(); }); - it('adds 1 key per property aside from id', async () => { - await index.put(tenant, uuid(), { - dateCreated : new Date().toISOString(), - 'a' : 'b', - 'c' : 'd' + describe('fails to index with no indexable properties', () => { + it('fails on empty indexes', async () => { + const id = uuid(); + const failedIndexPromise = testIndex.put(tenant, id, {}); + await expect(failedIndexPromise).to.eventually.be.rejectedWith(DwnErrorCode.IndexMissingIndexableProperty); }); - - const keys = await ArrayUtility.fromAsyncGenerator(testPartition.keys()); - expect(keys.length).to.equal(4); }); - it('flattens nested records', async () => { + it('successfully indexes', async () => { const id = uuid(); - const doc = { - some: { - nested: { - object: true - } - } - }; - await index.put(tenant, id, doc); - - const key = await testPartition.get(index['join']('some.nested.object', true, id)); - expect(key).to.equal(id); + const successfulIndex = testIndex.put(tenant, id, { + id, + foo: 'foo', + }); + await expect(successfulIndex).to.eventually.not.be.rejected; + const results = await testIndex.query(tenant, [{ id: id }], { sortProperty: 'id' }); + expect(results[0]).to.equal(id); }); - it('removes empty objects', async () => { + it('adds one index key per property, aside from id', async () => { const id = uuid(); - const doc = { - empty: { nested: { } } - }; - await index.put(tenant, id, doc); + const dateCreated = new Date().toISOString(); - await expect(testPartition.get(index['join']('empty', '[object Object]', id))).to.eventually.be.undefined; - await expect(testPartition.get(index['join']('empty.nested', '[object Object]', id))).to.eventually.be.undefined; - }); + await testIndex.put(tenant, id, { + 'a' : 'b', // 1 key + 'c' : 'd', // 1 key + dateCreated, // 1 key + }); - it('removes empty arrays', async () => { - const id = uuid(); - const doc = { - empty: [ [ ] ] - }; - await index.put(tenant, id, doc); + let keys = await ArrayUtility.fromAsyncGenerator(testIndex.db.keys()); + expect(keys.length).to.equal(4); + + await testIndex.clear(); - await expect(testPartition.get(index['join']('empty', '', id))).to.eventually.be.undefined; - await expect(testPartition.get(index['join']('empty.0', '', id))).to.eventually.be.undefined; + await testIndex.put(tenant, id, { + 'a' : 'b', // 1 key + 'c' : 'd', // 1 ke + 'e' : 'f', // 1 key + dateCreated, // 1 key + }); + keys = await ArrayUtility.fromAsyncGenerator(testIndex.db.keys()); + expect(keys.length).to.equal(5); }); it('should not put anything if aborted beforehand', async () => { @@ -87,390 +86,983 @@ describe('Index Level', () => { controller.abort('reason'); const id = uuid(); - const doc = { + const index = { + id, foo: 'bar' }; - try { - await index.put(tenant, id, doc, { signal: controller.signal }); - } catch (e) { - expect(e).to.equal('reason'); - } + const indexPromise = testIndex.put(tenant, id, index, { signal: controller.signal }); + await expect(indexPromise).to.eventually.rejectedWith('reason'); - const result = await index.query(tenant, [{ foo: 'bar' }]); + const result = await testIndex.query(tenant, [{ foo: 'bar' }], { sortProperty: 'id' }); expect(result.length).to.equal(0); }); - - it('should extract value from key', async () => { - const testValue = 'testValue'; - await index.put(tenant, uuid(), { - dateCreated : new Date().toISOString(), - 'testKey' : testValue, - }); - - const keys = await ArrayUtility.fromAsyncGenerator(testPartition.keys()); - // encoded string values are surrounded by quotes. - expect(keys.filter( k => IndexLevel.extractValueFromKey(k) === `"${testValue}"`).length).to.equal(1); - }); }); describe('query', () => { - let index: IndexLevel; - const tenant = 'did:alice:index'; before(async () => { - index = new IndexLevel({ location: 'TEST-INDEX' }); - await index.open(); + testIndex = new IndexLevel({ + createLevelDatabase, + location: 'TEST-INDEX', + }); + await testIndex.open(); }); beforeEach(async () => { - await index.clear(); + await testIndex.clear(); }); after(async () => { - await index.close(); + await testIndex.close(); }); - it('works', async () => { + it('works', async () =>{ const id1 = uuid(); const doc1 = { + id : id1, 'a' : 'b', 'c' : 'd' }; const id2 = uuid(); const doc2 = { + id : id2, 'a' : 'c', 'c' : 'd' }; const id3 = uuid(); const doc3 = { + id : id3, 'a' : 'b', 'c' : 'e' }; - await index.put(tenant, id1, doc1); - await index.put(tenant, id2, doc2); - await index.put(tenant, id3, doc3); + await testIndex.put(tenant, id1, doc1); + await testIndex.put(tenant, id2, doc2); + await testIndex.put(tenant, id3, doc3); - const result = await index.query(tenant, [{ + const result = await testIndex.query(tenant, [{ 'a' : 'b', 'c' : 'e' - }]); + }], { sortProperty: 'id' }); expect(result.length).to.equal(1); expect(result[0]).to.equal(id3); }); - it('should not match values prefixed with the query', async () => { - const id = uuid(); - const doc = { - value: 'foobar' - }; - - await index.put(tenant, id, doc); + it('should return all records if an empty filter array is passed', async () => { + const items = [ 'b', 'a', 'd', 'c' ]; + for (const item of items) { + await testIndex.put(tenant, item, { letter: item, index: items.indexOf(item) }); + } - const resp = await index.query(tenant, [{ - value: 'foo' - }]); + // empty array + let allResults = await testIndex.query(tenant, [],{ sortProperty: 'letter' }); + expect(allResults).to.eql(['a', 'b', 'c', 'd']); - expect(resp.length).to.equal(0); + // empty filter + allResults = await testIndex.query(tenant, [{}],{ sortProperty: 'letter' }); + expect(allResults).to.eql(['a', 'b', 'c', 'd']); }); - it('supports OR queries', async () => { - const id1 = uuid(); - const doc1 = { - 'a': 'a' - }; + describe('queryWithIteratorPaging()', () => { + it('invalid sort property returns no results', async () => { + const testVals = ['b', 'd', 'c', 'a']; + for (const val of testVals) { + await testIndex.put(tenant, val, { val, schema: 'schema' }); + } - const id2 = uuid(); - const doc2 = { - 'a': 'b' - }; + const filters = [{ schema: 'schema' }]; - const id3 = uuid(); - const doc3 = { - 'a': 'c' - }; + // control test: return all results + let validResults = await testIndex.queryWithIteratorPaging(tenant, filters, { sortProperty: 'val' }); + expect(validResults.length).to.equal(4); - await index.put(tenant, id1, doc1); - await index.put(tenant, id2, doc2); - await index.put(tenant, id3, doc3); + // sort by invalid property returns no results + let invalidResults = await testIndex.queryWithIteratorPaging(tenant, filters, { sortProperty: 'invalid' }); + expect(invalidResults.length).to.equal(0); - const resp = await index.query(tenant, [{ - a: [ 'a', 'b' ] - }]); + // control test: returns after cursor + validResults = await testIndex.queryWithIteratorPaging(tenant, filters, { sortProperty: 'val', cursor: 'a' }); + expect(validResults.length).to.equal(3); - expect(resp.length).to.equal(2); - expect(resp).to.include(id1); - expect(resp).to.include(id2); - }); + // invalid sort property with a valid cursor value + invalidResults = await testIndex.queryWithIteratorPaging(tenant, filters, { sortProperty: 'invalid', cursor: 'a' }); + expect(invalidResults.length).to.equal(0); + }); - it('supports range queries', async () => { - for (let i = -5; i < 5; ++i) { - const id = uuid(); - const doc = { - dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 15 + i }) - }; + it('invalid cursor returns no results', async () => { + const testVals = ['e', 'b', 'd', 'c']; + for (const val of testVals) { + await testIndex.put(tenant, val, { val, schema: 'schema' }); + } - await index.put(tenant, id, doc); - } + //insert 'a' as a valid cursor, but not valid match by assigning it to schema2 + await testIndex.put(tenant, 'a', { val: 'a', schema: 'schema2' }); - const resp = await index.query(tenant, [{ - dateCreated: { - gte: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) - } - }]); + const filters = [{ schema: 'schema' }]; - expect(resp.length).to.equal(5); - }); + // control test: return all results + const validResults = await testIndex.queryWithIteratorPaging(tenant, filters, { sortProperty: 'val', cursor: 'b' }); + expect(validResults.length).to.equal(3); + expect(validResults).to.eql([ 'c', 'd', 'e' ]); - it('supports prefixed range queries', async () => { - const id = uuid(); - const doc = { - value: 'foobar' - }; + // pass invalid cursor returns no results + let invalidResults = await testIndex.queryWithIteratorPaging(tenant, filters, { sortProperty: 'val', cursor: 'invalid' }); + expect(invalidResults.length).to.equal(0); - await index.put(tenant, id, doc); + // pass valid cursor that isn't part of the query/matches + invalidResults = await testIndex.queryWithIteratorPaging(tenant, filters, { sortProperty: 'val', cursor: 'a' }); + expect(invalidResults.length).to.equal(0); + }); + }); - const resp = await index.query(tenant, [{ - value: { - gte: 'foo' + describe('queryWithInMemoryPaging()', () => { + it('invalid sort property returns no results', async () => { + const testVals = ['b', 'd', 'c', 'a']; + for (const val of testVals) { + await testIndex.put(tenant, val, { val, schema: 'schema' }); } - }]); - expect(resp.length).to.equal(1); - expect(resp).to.include(id); - }); + const filters = [{ schema: 'schema' }]; - it('supports suffixed range queries', async () => { - const id1 = uuid(); - const doc1 = { - foo: 'bar' - }; + // control test: return all results + let validResults = await testIndex.queryWithInMemoryPaging(tenant, filters, { sortProperty: 'val' }); + expect(validResults.length).to.equal(4); - const id2 = uuid(); - const doc2 = { - foo: 'barbaz' - }; + // sort by invalid property returns no results + let invalidResults = await testIndex.queryWithInMemoryPaging(tenant, filters, { sortProperty: 'invalid' }); + expect(invalidResults.length).to.equal(0); - await index.put(tenant, id1, doc1); - await index.put(tenant, id2, doc2); + // control test: returns after cursor + validResults = await testIndex.queryWithInMemoryPaging(tenant, filters, { sortProperty: 'val', cursor: 'a' }); + expect(validResults.length).to.equal(3); + + // invalid sort property with a valid cursor value + invalidResults = await testIndex.queryWithInMemoryPaging(tenant, filters, { sortProperty: 'invalid', cursor: 'a' }); + expect(invalidResults.length).to.equal(0); + }); - const resp = await index.query(tenant, [{ - foo: { - lte: 'bar' + it('invalid cursor returns no results', async () => { + const testVals = ['e', 'b', 'd', 'c']; + for (const val of testVals) { + await testIndex.put(tenant, val, { val, schema: 'schema' }); } - }]); - expect(resp.length).to.equal(1); - expect(resp).to.include(id1); - }); + //insert 'a' as a valid cursor, but not valid match by assigning it to schema2 + await testIndex.put(tenant, 'a', { val: 'a', schema: 'schema2' }); - it('treats strings differently', async () => { - const id1 = uuid(); - const doc1 = { - foo: true - }; + const filters = [{ schema: 'schema' }]; - const id2 = uuid(); - const doc2 = { - foo: 'true' - }; + // control test: return all results + const validResults = await testIndex.queryWithInMemoryPaging(tenant, filters, { sortProperty: 'val', cursor: 'b' }); + expect(validResults.length).to.equal(3); + expect(validResults).to.eql([ 'c', 'd', 'e' ]); + + // pass invalid cursor returns no results + let invalidResults = await testIndex.queryWithInMemoryPaging(tenant, filters, { sortProperty: 'val', cursor: 'invalid' }); + expect(invalidResults.length).to.equal(0); - await index.put(tenant, id1, doc1); - await index.put(tenant, id2, doc2); + // pass valid cursor that isn't part of the query/matches + invalidResults = await testIndex.queryWithInMemoryPaging(tenant, filters, { sortProperty: 'val', cursor: 'a' }); + expect(invalidResults.length).to.equal(0); + }); - const resp = await index.query(tenant, [{ - foo: true - }]); + it('supports range queries', async () => { + const id = uuid(); + const doc1 = { + id, + value: 'foo' + }; + await testIndex.put(tenant, id, doc1); - expect(resp.length).to.equal(1); - expect(resp).to.include(id1); - }); + const id2 = uuid(); + const doc2 = { + id : id2, + value : 'foobar' + }; + await testIndex.put(tenant, id2, doc2); - describe('numbers', () => { + const id3 = uuid(); + const doc3 = { + id : id3, + value : 'foobaz' + }; + await testIndex.put(tenant, id3, doc3); - const positiveDigits = Array(10).fill({}).map( _ => TestDataGenerator.randomInt(0, Number.MAX_SAFE_INTEGER)).sort((a,b) => a - b); - const negativeDigits = - Array(10).fill({}).map( _ => TestDataGenerator.randomInt(0, Number.MAX_SAFE_INTEGER) * -1).sort((a,b) => a - b); - const testNumbers = Array.from(new Set([...positiveDigits, ...negativeDigits])); // unique numbers + const filters = [{ + value: { + gt : 'foo', + lte : 'foobaz' + } + }]; - it('should return records that match provided number equality filter', async () => { - const testIndex = Math.floor(Math.random() * testNumbers.length); + const resp = await testIndex.queryWithInMemoryPaging(tenant, filters, { sortProperty: 'id' }); - for (const digit of testNumbers) { - await index.put(tenant, digit.toString(), { digit }); - } - const resp = await index.query(tenant, [{ - digit: testNumbers.at(testIndex)! - }]); + expect(resp.length).to.equal(2); + expect(resp).to.have.members([id2, id3]); + + // only upper bounds + const lteFilter = [{ + value: { + lte: 'foobaz' + } + }]; + const lteReply = await testIndex.queryWithInMemoryPaging(tenant, lteFilter, { sortProperty: 'id' }); + + expect(lteReply.length).to.equal(3); + expect(lteReply).to.have.members([id, id2, id3]); - expect(resp.length).to.equal(1); - expect(resp.at(0)).to.equal(testNumbers.at(testIndex)!.toString()); }); + }); - it ('should not return records that do not match provided number equality filter', async() => { - // remove the potential (but unlikely) negative test result - for (const digit of testNumbers.filter(n => n !== 1)) { - await index.put(tenant, digit.toString(), { digit }); - } - const resp = await index.query(tenant, [{ - digit: 1 - }]); + describe('query()', () => { + it('should not match values prefixed with the query', async () => { + const id = uuid(); + const doc = { + id, + value: 'foobar' + }; + + await testIndex.put(tenant, id, doc); + const filters = [{ value: 'foo' }]; + const resp = await testIndex.query(tenant, filters, { sortProperty: 'id' }); expect(resp.length).to.equal(0); + }); - it('supports range queries with positive numbers inclusive', async () => { - for (const digit of testNumbers) { - await index.put(tenant, digit.toString(), { digit }); - } + it('supports OR queries', async () => { + const id1 = uuid(); + const doc1 = { + id : id1, + 'a' : 'a' + }; - const upperBound = positiveDigits.at(positiveDigits.length - 3)!; - const lowerBound = positiveDigits.at(2)!; - const resp = await index.query(tenant, [{ - digit: { - gte : lowerBound, - lte : upperBound - } - }]); + const id2 = uuid(); + const doc2 = { + id : id2, + 'a' : 'b' + }; + + const id3 = uuid(); + const doc3 = { + id : id3, + 'a' : 'c' + }; + + await testIndex.put(tenant, id1, doc1); + await testIndex.put(tenant, id2, doc2); + await testIndex.put(tenant, id3, doc3); + + const filters = [{ + a: [ 'a', 'b' ] + }]; + + const resp = await testIndex.query(tenant, filters , { sortProperty: 'id' }); - const testResults = testNumbers.filter( n => n >= lowerBound && n <= upperBound).map(n => n.toString()); - expect(resp.sort()).to.eql(testResults.sort()); + expect(resp.length).to.equal(2); + expect(resp).to.include(id1); + expect(resp).to.include(id2); }); - it('supports range queries with negative numbers inclusive', async () => { - for (const digit of testNumbers) { - await index.put(tenant, digit.toString(), { digit }); + it('supports range queries', async () => { + for (let i = -5; i < 5; ++i) { + const id = uuid(); + const doc = { + id, + dateCreated: Temporal.PlainDateTime.from({ year: 2023, month: 1, day: 15 + i }).toString({ smallestUnit: 'microseconds' }) + }; + + await testIndex.put(tenant, id, doc); } - const upperBound = negativeDigits.at(negativeDigits.length - 2)!; - const lowerBound = negativeDigits.at(2)!; - const resp = await index.query(tenant, [{ - digit: { - gte : lowerBound, - lte : upperBound + const filters = [{ + dateCreated: { + gte: Temporal.PlainDateTime.from({ year: 2023, month: 1, day: 15 }).toString({ smallestUnit: 'microseconds' }) } - }]); + }]; + const resp = await testIndex.query(tenant, filters, { sortProperty: 'id' }); - const testResults = testNumbers.filter( n => n >= lowerBound && n <= upperBound).map(n => n.toString()); - expect(resp.sort()).to.eql(testResults.sort()); + expect(resp.length).to.equal(5); }); - it('should return numbers gt a negative digit', async () => { - for (const digit of testNumbers) { - await index.put(tenant, digit.toString(), { digit }); - } + it('supports prefixed range queries', async () => { + const id = uuid(); + const doc = { + id, + value: 'foobar' + }; - const lowerBound = negativeDigits.at(4)!; + await testIndex.put(tenant, id, doc); - const resp = await index.query(tenant, [{ - digit: { - gt: lowerBound, + const filters = [{ + value: { + gte: 'foo' } - }]); + }]; + + const resp = await testIndex.query(tenant, filters, { sortProperty: 'id' }); - const testResults = testNumbers.filter( n => n > lowerBound).map(n => n.toString()); - expect(resp.sort()).to.eql(testResults.sort()); + expect(resp.length).to.equal(1); + expect(resp).to.include(id); }); - it('should return numbers gt a digit', async () => { - for (const digit of testNumbers) { - await index.put(tenant, digit.toString(), { digit }); - } + it('supports suffixed range queries', async () => { + const id1 = uuid(); + const doc1 = { + id : id1, + foo : 'bar' + }; + + const id2 = uuid(); + const doc2 = { + id : id2, + foo : 'barbaz' + }; - const lowerBound = positiveDigits.at(4)!; + await testIndex.put(tenant, id1, doc1); + await testIndex.put(tenant, id2, doc2); - const resp = await index.query(tenant, [{ - digit: { - gt: lowerBound, + const filters = [{ + foo: { + lte: 'bar' } - }]); + }]; + + const resp = await testIndex.query(tenant, filters, { sortProperty: 'id' }); - const testResults = testNumbers.filter( n => n > lowerBound).map(n => n.toString()); - expect(resp.sort()).to.eql(testResults.sort()); + expect(resp.length).to.equal(1); + expect(resp).to.include(id1); }); - it('should return numbers lt a negative digit', async () => { - for (const digit of testNumbers) { - await index.put(tenant, digit.toString(), { digit }); - } + it('treats strings differently', async () => { + const id1 = uuid(); + const doc1 = { + id : id1, + foo : true + }; - const upperBound = negativeDigits.at(4)!; + const id2 = uuid(); + const doc2 = { + id : id2, + foo : 'true' + }; + + await testIndex.put(tenant, id1, doc1); + await testIndex.put(tenant, id2, doc2); + + const filters = [{ + foo: true + }]; + + const resp = await testIndex.query(tenant, filters, { sortProperty: 'id' }); + + expect(resp.length).to.equal(1); + expect(resp).to.include(id1); + }); + + describe('numbers', () => { + + const positiveDigits = Array(10).fill({}).map( _ => TestDataGenerator.randomInt(0, Number.MAX_SAFE_INTEGER)).sort((a,b) => a - b); + const negativeDigits = + Array(10).fill({}).map( _ => TestDataGenerator.randomInt(0, Number.MAX_SAFE_INTEGER) * -1).sort((a,b) => a - b); + const testNumbers = Array.from(new Set([...negativeDigits, ...positiveDigits])); // unique numbers + + it('should return records that match provided number equality filter', async () => { + const index = Math.floor(Math.random() * testNumbers.length); + + for (const digit of testNumbers) { + await testIndex.put(tenant, digit.toString(), { digit }); + } + + const filters = [{ + digit: testNumbers.at(index)! + }]; + + const resp = await testIndex.query(tenant, filters, { sortProperty: 'digit' }); + + expect(resp.length).to.equal(1); + expect(resp.at(0)).to.equal(testNumbers.at(index)!.toString()); + }); + + it ('should not return records that do not match provided number equality filter', async() => { + // remove the potential (but unlikely) negative test result + for (const digit of testNumbers.filter(n => n !== 1)) { + await testIndex.put(tenant, digit.toString(), { digit }); + } + + const filters = [{ digit: 1 }]; + const resp = await testIndex.query(tenant, filters, { sortProperty: 'digit' }); + + expect(resp.length).to.equal(0); + }); + + it('supports range queries with positive numbers inclusive', async () => { + for (const digit of testNumbers) { + await testIndex.put(tenant, digit.toString(), { digit }); + } + + const upperBound = positiveDigits.at(positiveDigits.length - 3)!; + const lowerBound = positiveDigits.at(2)!; + const filters = [{ + digit: { + gte : lowerBound, + lte : upperBound + } + }]; + + const resp = await testIndex.query(tenant, filters, { sortProperty: 'digit' }); + + const testResults = testNumbers.filter( n => n >= lowerBound && n <= upperBound).map(n => n.toString()); + expect(resp).to.eql(testResults); + }); - const resp = await index.query(tenant, [{ - digit: { - lt: upperBound, + it('supports range queries with negative numbers inclusive', async () => { + for (const digit of testNumbers) { + await testIndex.put(tenant, digit.toString(), { digit }); } - }]); - const testResults = testNumbers.filter( n => n < upperBound).map(n => n.toString()); - expect(resp.sort()).to.eql(testResults.sort()); + const upperBound = negativeDigits.at(negativeDigits.length - 2)!; + const lowerBound = negativeDigits.at(2)!; + + const filters = [{ + digit: { + gte : lowerBound, + lte : upperBound + } + }]; + const resp = await testIndex.query(tenant, filters, { sortProperty: 'digit' }); + + const testResults = testNumbers.filter( n => n >= lowerBound && n <= upperBound).map(n => n.toString()); + expect(resp).to.eql(testResults); + }); + + it('should return numbers gt a negative digit', async () => { + for (const digit of testNumbers) { + await testIndex.put(tenant, digit.toString(), { digit }); + } + + const lowerBound = negativeDigits.at(4)!; + const filters = [{ + digit: { + gt: lowerBound, + } + }]; + const resp = await testIndex.query(tenant, filters, { sortProperty: 'digit' }); + + const testResults = testNumbers.filter( n => n > lowerBound).map(n => n.toString()); + expect(resp).to.eql(testResults); + }); + + it('should return numbers gt a digit', async () => { + for (const digit of testNumbers) { + await testIndex.put(tenant,digit.toString(), { digit }); + } + + const lowerBound = positiveDigits.at(4)!; + + const filters = [{ + digit: { + gt: lowerBound, + } + }]; + + const resp = await testIndex.query(tenant, filters, { sortProperty: 'digit' }); + const testResults = testNumbers.filter( n => n > lowerBound).map(n => n.toString()); + expect(resp).to.eql(testResults); + }); + + it('should return numbers lt a negative digit', async () => { + for (const digit of testNumbers) { + await testIndex.put(tenant,digit.toString(), { digit }); + } + + const upperBound = negativeDigits.at(4)!; + + const filters = [{ + digit: { + lt: upperBound, + } + }]; + + const resp = await testIndex.query(tenant, filters, { sortProperty: 'digit' }); + + const testResults = testNumbers.filter( n => n < upperBound).map(n => n.toString()); + expect(resp).to.eql(testResults); + }); + + it('should return numbers lt a digit', async () => { + for (const digit of testNumbers) { + await testIndex.put(tenant,digit.toString(), { digit }); + } + + const upperBound = positiveDigits.at(4)!; + + const filters = [{ + digit: { + lt: upperBound, + } + }]; + + const resp = await testIndex.query(tenant, filters, { sortProperty: 'digit' }); + + const testResults = testNumbers.filter( n => n < upperBound).map(n => n.toString()); + expect(resp).to.eql(testResults); + }); }); - it('should return numbers lt a digit', async () => { - for (const digit of testNumbers) { - await index.put(tenant, digit.toString(), { digit }); - } + describe('booleans', () => { + it('should return records that match provided boolean equality filter', async () => { + const itemTrueId = uuid(); + const boolTrueItem = { + id : itemTrueId, + schema : 'schema', + published : true, + }; + await testIndex.put(tenant, itemTrueId, boolTrueItem); + + const itemFalseId = uuid(); + const boolFalseItem = { + id : itemFalseId, + schema : 'schema', + published : false, + }; + await testIndex.put(tenant, itemFalseId, boolFalseItem); + + const bothFilter = [{ schema: 'schema' }]; + // control + const resp = await testIndex.query(tenant, bothFilter, { sortProperty: 'id' }); + expect(resp.length).to.equal(2); + expect(resp).to.have.members([ itemTrueId, itemFalseId ]); + + const trueFilter = [{ published: true, schema: 'schema' }]; + // equality true + const respTrue = await testIndex.query(tenant, trueFilter, { sortProperty: 'id' }); + expect(respTrue.length).to.equal(1); + expect(respTrue).to.have.members([ itemTrueId ]); + + const falseFilter = [{ published: false, schema: 'schema' }]; + // equality false + const respFalse = await testIndex.query(tenant, falseFilter, { sortProperty: 'id' }); + expect(respFalse.length).to.equal(1); + expect(respFalse).to.have.members([ itemFalseId ]); + }); + }); + + describe('sort, limit and cursor', () => { + it('only returns the number of results specified by the limit property', async () => { + const testVals = [ 'b', 'a', 'd', 'c']; + for (const val of testVals) { + await testIndex.put(tenant, val, { val, schema: 'schema' }); + } + + const filters = [{ schema: 'schema' }]; + + // limit results without cursor + let ascResults = await testIndex.query(tenant, filters, { sortProperty: 'val', limit: 2 }); + expect(ascResults.length).to.equal(2); + expect(ascResults).to.eql(['a', 'b']); + + // limit results with a cursor + ascResults = await testIndex.query(tenant, filters, { sortProperty: 'val', limit: 2, cursor: 'b' }); + expect(ascResults.length).to.equal(2); + expect(ascResults).to.eql(['c', 'd']); + }); + + it('can sort by any indexed property', async () => { + const testVals = ['b', 'd', 'c', 'a']; + for (const val of testVals) { + await testIndex.put(tenant, val, { val, schema: 'schema', index: testVals.indexOf(val) }); + } + + const filters = [{ schema: 'schema' }]; + + // sort by value ascending + const ascResults = await testIndex.query(tenant, filters, { sortProperty: 'val' }); + expect(ascResults.length).to.equal(testVals.length); + expect(ascResults).to.eql(['a', 'b', 'c', 'd']); + + // sort by index ascending + const ascIndexResults = await testIndex.query(tenant, filters, { sortProperty: 'index' }); + expect(ascIndexResults.length).to.equal(testVals.length); + expect(ascIndexResults).eql(testVals); + + // sort by value descending + const descResults = await testIndex.query(tenant, filters, { sortProperty: 'val', sortDirection: SortDirection.Descending }); + expect(descResults.length).to.equal(testVals.length); + expect(descResults).to.eql(['d', 'c', 'b', 'a']); + + // sort by index descending + const descIndexResults = await testIndex.query(tenant, filters, { sortProperty: 'index', sortDirection: SortDirection.Descending }); + expect(descIndexResults.length).to.equal(testVals.length); + expect(descIndexResults).eql([...testVals].reverse()); + }); + + it('sorts lexicographic with and without a cursor', async () => { + const testVals = [ 'b', 'a', 'd', 'c']; + for (const val of testVals) { + await testIndex.put(tenant, val, { val, schema: 'schema' }); + } + const filters = [{ schema: 'schema' }]; + // sort ascending without a cursor + const ascResults = await testIndex.query(tenant, filters, { sortProperty: 'val' }); + expect(ascResults.length).to.equal(4); + expect(ascResults).to.eql(['a', 'b', 'c', 'd']); + + // sort ascending with cursor + const ascResultsCursor = await testIndex.query(tenant, filters, { sortProperty: 'val', cursor: 'b' }); + expect(ascResultsCursor.length).to.equal(2); + expect(ascResultsCursor).to.eql(['c', 'd']); + + // sort descending without a cursor + const descResults = await testIndex.query(tenant, filters, { sortProperty: 'val', sortDirection: SortDirection.Descending }); + expect(descResults.length).to.equal(4); + expect(descResults).to.eql(['d', 'c', 'b', 'a']); + + // sort descending with cursor + const descResultsCursor = await testIndex.query(tenant, filters, { sortProperty: 'val', sortDirection: SortDirection.Descending, cursor: 'b' }); + expect(descResultsCursor.length).to.equal(1); + expect(descResultsCursor).to.eql(['a']); + }); + + it('sorts numeric with and without a cursor', async () => { + const testVals = [ -2, -1, 0, 1, 2 , 3 , 4 ]; + for (const val of testVals) { + await testIndex.put(tenant, val.toString(), { val, schema: 'schema' }); + } + + const filters = [{ schema: 'schema' }]; + // sort ascending without a cursor + const ascResults = await testIndex.query(tenant, filters, { sortProperty: 'val' }); + expect(ascResults.length).to.equal(testVals.length); + expect(ascResults).to.eql(['-2', '-1', '0', '1', '2' , '3' , '4']); + + // sort ascending with a cursor + const ascResultsCursor = await testIndex.query(tenant, filters, { sortProperty: 'val', cursor: '2' }); + expect(ascResultsCursor.length).to.equal(2); + expect(ascResultsCursor).to.eql(['3', '4']); + + // sort descending without a cursor + const descResults = await testIndex.query(tenant, filters, { sortProperty: 'val', sortDirection: SortDirection.Descending }); + expect(descResults.length).to.eql(testVals.length); + expect(descResults).to.eql(['4', '3', '2', '1', '0' , '-1' , '-2']); + + // sort descending with a cursor + const descResultsCursor = await testIndex.query(tenant, filters, { sortProperty: 'val', sortDirection: SortDirection.Descending, cursor: '2' }); + expect(descResultsCursor.length).to.equal(4); + expect(descResultsCursor).to.eql(['1', '0', '-1', '-2']); + }); + + it('sorts range queries with or without a cursor', async () => { + + const testItems = [ 'a', 'b', 'c', 'd', 'e', 'f', 'g', 'h' ]; - const upperBound = positiveDigits.at(4)!; + for (const item of testItems) { + await testIndex.put(tenant, item, { letter: item }); + } + + // test both upper and lower bounds + const lowerBound = 'b'; + const upperBound = 'g'; + + const bothBoundsFilters = [{ + letter: { + gte : lowerBound, + lte : upperBound + }, + }]; + + // ascending without a cursor + let response = await testIndex.query(tenant, bothBoundsFilters, { sortProperty: 'letter' }); + expect(response).to.eql(['b', 'c', 'd', 'e', 'f', 'g']); + + // descending without a cursor + response = await testIndex.query(tenant, bothBoundsFilters, { sortProperty: 'letter', sortDirection: SortDirection.Descending }); + expect(response).to.eql(['g', 'f', 'e', 'd', 'c', 'b']); + + // ascending with a cursor + response = await testIndex.query(tenant, bothBoundsFilters, { sortProperty: 'letter', cursor: 'e' }); + expect(response).to.eql([ 'f', 'g' ]); // should only return greater than e + + // descending with a cursor + response = await testIndex.query(tenant, bothBoundsFilters, { sortProperty: 'letter', sortDirection: SortDirection.Descending, cursor: 'e' }); + expect(response).to.eql([ 'd', 'c', 'b' ]); // should only return less than e + + + // test only upper bounds + const upperBoundsFilters = [{ + letter: { + lte: upperBound + }, + }]; + + // ascending without a cursor + response = await testIndex.query(tenant, upperBoundsFilters, { sortProperty: 'letter' }); + expect(response).to.eql(['a', 'b', 'c', 'd', 'e', 'f', 'g']); + + // descending without a cursor + response = await testIndex.query(tenant, upperBoundsFilters, { sortProperty: 'letter', sortDirection: SortDirection.Descending }); + expect(response).to.eql(['g', 'f', 'e', 'd', 'c', 'b', 'a']); + + // ascending with a cursor + response = await testIndex.query(tenant, upperBoundsFilters, { sortProperty: 'letter', cursor: 'e' }); + expect(response).to.eql([ 'f', 'g' ]); // should only return items greater than e + + // descending with a cursor + response = await testIndex.query(tenant, upperBoundsFilters, { sortProperty: 'letter', sortDirection: SortDirection.Descending, cursor: 'e' }); + expect(response).to.eql([ 'd', 'c', 'b', 'a' ]); // should only return items less than e + + // test only lower bounds + const lowerBoundsFilters = [{ + letter: { + gte: lowerBound + }, + }]; + + // ascending without a cursor + response = await testIndex.query(tenant, lowerBoundsFilters, { sortProperty: 'letter' }); + expect(response).to.eql(['b', 'c', 'd', 'e', 'f', 'g', 'h']); + + // descending without a cursor + response = await testIndex.query(tenant, lowerBoundsFilters, { sortProperty: 'letter', sortDirection: SortDirection.Descending }); + expect(response).to.eql(['h', 'g', 'f', 'e', 'd', 'c', 'b']); + + // ascending with a cursor + response = await testIndex.query(tenant, lowerBoundsFilters, { sortProperty: 'letter', cursor: 'e' }); + expect(response).to.eql([ 'f', 'g', 'h' ]); // should only return items greater than e + + // descending with a cursor + response = await testIndex.query(tenant, lowerBoundsFilters, { sortProperty: 'letter', sortDirection: SortDirection.Descending, cursor: 'e' }); + expect(response).to.eql([ 'd', 'c', 'b' ]); // should only return items less than e + }); + + it('sorts range queries negative integers with or without a cursor', async () => { + const testNumbers = [ -5, -4, -3 , -2, -1, 0, 1, 2, 3, 4, 5 ]; + for (const digit of testNumbers) { + await testIndex.put(tenant,digit.toString(), { digit }); + } + + const upperBound = 3; + const lowerBound = -2; + + const filters = [{ + digit: { + gte : lowerBound, + lte : upperBound + } + }]; + + let results = await testIndex.query(tenant,filters , { sortProperty: 'digit' }); + expect(results).to.eql([ '-2', '-1', '0', '1', '2', '3' ]); + + results = await testIndex.query(tenant, filters, { sortProperty: 'digit', cursor: '-2' }); + expect(results).to.eql(['-1', '0', '1', '2', '3']); + }); + + it('sorts range queries with remaining results in lte after cursor', async () => { + // create an array with unique IDs but multiple items representing the same digit. + const testItems = [{ + id : 'a', + digit : 1, + },{ + id : 'b', + digit : 2, + }, { + id : 'c', + digit : 3, + }, { + id : 'd', + digit : 4, + }, { + id : 'e', + digit : 4, + },{ + id : 'f', + digit : 4, + },{ + id : 'g', + digit : 4, + },{ + id : 'h', + digit : 5, + }]; + + for (const item of testItems) { + await testIndex.put(tenant, item.id, item); + } + + const lowerBound = 2; + const upperBound = 4; + + // with both lower and upper bounds + // ascending with a cursor + // this cursor should ony return results from the 'lte' part of the filter + let response = await testIndex.query(tenant, [{ + digit: { + gte : lowerBound, + lte : upperBound + }, + }], { sortProperty: 'id', cursor: 'd' }); + + expect(response).to.eql([ 'e', 'f', 'g' ]); + + // with no lower bounds + // ascending with a cursor + // this cursor should ony return results from the 'lte' part of the filter + response = await testIndex.query(tenant, [{ + digit: { + lte: upperBound + }, + }], { sortProperty: 'id', cursor: 'd' }); + + expect(response).to.eql([ 'e', 'f', 'g']); // should only return two matching items + }); + + it('sorts OR queries with or without a cursor', async () => { + const testValsSchema1 = ['a1', 'b1', 'c1', 'd1']; + for (const val of testValsSchema1) { + await testIndex.put(tenant, val, { val, schema: 'schema1' }); + } + + const testValsSchema2 = ['a2', 'b2', 'c2', 'd2']; + for (const val of testValsSchema2) { + await testIndex.put(tenant, val, { val, schema: 'schema2' }); + } + + const filters = [{ + schema: ['schema1', 'schema2'] + }]; + + // sort ascending without cursor + let results = await testIndex.query(tenant, filters, { sortProperty: 'val' }); + expect(results).to.eql(['a1', 'a2', 'b1', 'b2', 'c1', 'c2', 'd1', 'd2']); + + // sort ascending from b2 onwards + results = await testIndex.query(tenant, filters, { sortProperty: 'val', cursor: 'b2' }); + expect(results).to.eql(['c1', 'c2', 'd1', 'd2']); + }); + + it('supports multiple filtered queries', async () => { + const items:Array<{ val: string, digit: number, property: boolean }> = []; + + const lowerBounds = -2; + const upperBounds = 3; + + // create 30 records with random digits between 1-9 + // every 3rd record should be a negative number + // every 5th record a property should be set to true + // every property not set to true should be set to false + + // we artificially use index #4 to be within the bounds of our query to be used as a cursor point. + for (let i = 0; i < 30; i++) { - const resp = await index.query(tenant, [{ - digit: { - lt: upperBound, + const digit = i === 4 ? TestDataGenerator.randomInt(lowerBounds, upperBounds) : + i % 3 === 0 ? + TestDataGenerator.randomInt(1,9) * -1: + TestDataGenerator.randomInt(1,9); + + const property = i % 5 === 0 ? true : false; + + const item = { val: IndexLevel.encodeNumberValue(i), digit, property }; + await testIndex.put(tenant, item.val, item); + items.push(item); } - }]); - const testResults = testNumbers.filter( n => n < upperBound).map(n => n.toString()); - expect(resp.sort()).to.eql(testResults.sort()); + const cursor = items.at(4)!.val; + // create the expected results; + const compareResults = new Set([ + ...items.filter(i => i.digit >= lowerBounds && i.digit <= upperBounds), + ...items.filter(i => i.property === true), + ].sort((a,b) => lexicographicalCompare(a.val, b.val)).map(i => i.val)); + + + const filters:Filter[] = [ + { digit: { gte: lowerBounds, lte: upperBounds } }, + { property: true } + ]; + + // query in ascending order. + const results = await testIndex.query(tenant, filters, { sortProperty: 'val' }); + expect(results).to.eql([...compareResults], 'results ascending'); + + const compareResultsAfterCursor = new Set([ + ...items.slice(5).filter(i => i.digit >= lowerBounds && i.digit <= upperBounds), + ...items.slice(5).filter(i => i.property === true), + ].sort((a,b) => lexicographicalCompare(a.val, b.val)) + .map(i => i.val)); + + // query in ascending order with cursor. + const resultsWithCursor = await testIndex.query(tenant, filters, { sortProperty: 'val', cursor: cursor }); + expect(resultsWithCursor).to.eql([...compareResultsAfterCursor], 'results after cursor ascending'); + + const descResults = await testIndex.query(tenant, filters, { sortProperty: 'val', sortDirection: SortDirection.Descending }); + expect(descResults).to.eql([...compareResults].reverse(), 'results descending'); + + const descResultsAfterCursor = await testIndex.query(tenant, filters, { sortProperty: 'val', sortDirection: SortDirection.Descending, cursor: cursor }); + + const compareResultsAfterCursorDesc = new Set([ + ...items.slice(0, 4).filter(i => i.digit >= lowerBounds && i.digit <= upperBounds), + ...items.slice(0, 4).filter(i => i.property === true), + ].sort((a,b) => lexicographicalCompare(b.val, a.val)) + .map(i => i.val)); + expect(descResultsAfterCursor).to.eql([...compareResultsAfterCursorDesc], 'results after cursor descending'); + }); }); }); }); describe('delete', () => { - let index: IndexLevel; - const tenant = 'did:alice:index'; before(async () => { - index = new IndexLevel({ location: 'TEST-INDEX' }); - await index.open(); + testIndex = new IndexLevel({ + createLevelDatabase, + location: 'TEST-INDEX', + }); + await testIndex.open(); }); beforeEach(async () => { - await index.clear(); + await testIndex.clear(); }); after(async () => { - await index.close(); + await testIndex.close(); }); - it('works', async () => { + it('purges indexes', async () => { const id1 = uuid(); const doc1 = { + id : id1, 'a' : 'b', 'c' : 'd' }; const id2 = uuid(); const doc2 = { + id : id2, 'a' : 'b', 'c' : 'd' }; - await index.put(tenant, id1, doc1); - await index.put(tenant, id2, doc2); + await testIndex.put(tenant, id1, doc1); + await testIndex.put(tenant, id2, doc2); - let result = await index.query(tenant, [{ 'a': 'b', 'c': 'd' }]); + let result = await testIndex.query(tenant, [{ 'a': 'b', 'c': 'd' }], { sortProperty: 'id' }); expect(result.length).to.equal(2); expect(result).to.contain(id1); - await index.delete(tenant, id1); - + await testIndex.delete(tenant, id1); - result = await index.query(tenant, [{ 'a': 'b', 'c': 'd' }]); + result = await testIndex.query(tenant, [{ 'a': 'b', 'c': 'd' }], { sortProperty: 'id' }); expect(result.length).to.equal(1); + + await testIndex.delete(tenant, id2); + + const allKeys = await ArrayUtility.fromAsyncGenerator(testIndex.db.keys()); + expect(allKeys.length).to.equal(0); }); it('should not delete anything if aborted beforehand', async () => { @@ -479,21 +1071,57 @@ describe('Index Level', () => { const id = uuid(); const doc = { - foo: 'bar' + id : id, + foo : 'bar' }; - await index.put(tenant, id, doc); + await testIndex.put(tenant, id, doc); try { - await index.delete(tenant, id, { signal: controller.signal }); + await testIndex.delete(tenant, id, { signal: controller.signal }); } catch (e) { expect(e).to.equal('reason'); } - const result = await index.query(tenant, [{ foo: 'bar' }]); + const result = await testIndex.query(tenant, [{ foo: 'bar' }], { sortProperty: 'id' }); expect(result.length).to.equal(1); expect(result).to.contain(id); }); + + it('does nothing when attempting to purge key that does not exist', async () => { + const controller = new AbortController(); + controller.abort('reason'); + + const id = uuid(); + const doc = { + id : id, + foo : 'bar' + }; + + await testIndex.put(tenant, id, doc); + + // attempt purge an invalid id + await testIndex.delete(tenant, 'invalid-id'); + + const result = await testIndex.query(tenant, [{ foo: 'bar' }], { sortProperty: 'id' }); + expect(result.length).to.equal(1); + expect(result).to.contain(id); + }); + }); + + describe('encodeValue', () => { + it('should wrap string in quotes', async () => { + expect(IndexLevel.encodeValue('test')).to.equal(`"test"`); + }); + + it('should return string encoded number using encodeNumberValue()', async () => { + expect(IndexLevel.encodeValue(10)).to.equal(IndexLevel.encodeNumberValue(10)); + }); + + it('should return stringified boolean', () => { + expect(IndexLevel.encodeValue(true)).to.equal('true'); + expect(IndexLevel.encodeValue(false)).to.equal('false'); + }); }); describe('encodeNumberValue', () => { @@ -503,6 +1131,7 @@ describe('Index Level', () => { expect(encoded.length).to.equal(expectedLength); expect(encoded).to.equal('0000000000000100'); }); + it('should encode negative digits as an offset with a prefix', () => { const expectedPrefix = '!'; // expected length is maximum padding + the prefix. @@ -512,6 +1141,7 @@ describe('Index Level', () => { expect(encoded.length).to.equal(expectedLength); expect(encoded).to.equal('!9007199254740891'); }); + it('should encode digits to sort using lexicographical comparison', () => { const digits = [ -1000, -100, -10, 10, 100, 1000 ].sort((a,b) => a - b); const encodedDigits = digits.map(d => IndexLevel.encodeNumberValue(d)) @@ -520,4 +1150,50 @@ describe('Index Level', () => { digits.forEach((n,i) => expect(encodedDigits.at(i)).to.equal(IndexLevel.encodeNumberValue(n))); }); }); + + describe('isFilterConcise', () => { + const queryOptionsWithCursor = { sortProperty: 'sort', cursor: 'cursor' }; + const queryOptionsWithoutCursor = { sortProperty: 'sort' }; + + it('recordId is always concise', async () => { + expect(IndexLevel.isFilterConcise({ recordId: 'record-id' }, queryOptionsWithCursor)).to.equal(true); + expect(IndexLevel.isFilterConcise({ recordId: 'record-id' }, queryOptionsWithoutCursor)).to.equal(true); + }); + + it('other than if `recordId` exists, if a cursor exists it is never concise', async () => { + expect(IndexLevel.isFilterConcise({ schema: 'schema', contextId: 'contextId', parentId: 'parentId' }, queryOptionsWithCursor)).to.equal(false); + + // control + expect(IndexLevel.isFilterConcise({ schema: 'schema', contextId: 'contextId', parentId: 'parentId' }, queryOptionsWithoutCursor)).to.equal(true); + expect(IndexLevel.isFilterConcise({ recordId: 'record-id' }, queryOptionsWithCursor)).to.equal(true); + }); + + it('if there is no cursor - protocolPath, contextId, parentId, or schema return a concise filter', async () => { + expect(IndexLevel.isFilterConcise({ protocolPath: 'protocolPath' }, queryOptionsWithoutCursor)).to.equal(true); + expect(IndexLevel.isFilterConcise({ protocolPath: 'protocolPath' }, queryOptionsWithCursor)).to.equal(false); // control + + expect(IndexLevel.isFilterConcise({ contextId: 'contextId' }, queryOptionsWithoutCursor)).to.equal(true); + expect(IndexLevel.isFilterConcise({ contextId: 'contextId' }, queryOptionsWithCursor)).to.equal(false); // control + + expect(IndexLevel.isFilterConcise({ contextId: 'parentId' }, queryOptionsWithoutCursor)).to.equal(true); + expect(IndexLevel.isFilterConcise({ contextId: 'parentId' }, queryOptionsWithCursor)).to.equal(false); // control + + expect(IndexLevel.isFilterConcise({ contextId: 'schema' }, queryOptionsWithoutCursor)).to.equal(true); + expect(IndexLevel.isFilterConcise({ contextId: 'schema' }, queryOptionsWithCursor)).to.equal(false); // control + }); + + it('if there is no cursor, and it is not one of the conditions, return not concise', async () => { + expect(IndexLevel.isFilterConcise({ dataSize: { gt: 123 } }, queryOptionsWithoutCursor)).to.equal(false); + + // control + expect(IndexLevel.isFilterConcise({ schema: 'schema', contextId: 'contextId', parentId: 'parentId' }, queryOptionsWithoutCursor)).to.equal(true); + }); + + it('if protocol filter exists by itself it is not a concise filter', async () => { + expect(IndexLevel.isFilterConcise({ protocol: 'protocol' }, queryOptionsWithoutCursor)).to.equal(false); + + // control + expect(IndexLevel.isFilterConcise({ protocol: 'protocol', protocolPath: 'path/to' }, queryOptionsWithoutCursor)).to.equal(true); + }); + }); }); \ No newline at end of file diff --git a/tests/store/message-store.spec.ts b/tests/store/message-store.spec.ts index 50b55fe80..20b3b022c 100644 --- a/tests/store/message-store.spec.ts +++ b/tests/store/message-store.spec.ts @@ -6,7 +6,7 @@ import { expect } from 'chai'; import { DidKeyResolver } from '../../src/index.js'; import { lexicographicalCompare } from '../../src/utils/string.js'; import { Message } from '../../src/core/message.js'; -import { SortOrder } from '../../src/types/message-types.js'; +import { SortDirection } from '../../src/types/query-types.js'; import { TestDataGenerator } from '../utils/test-data-generator.js'; import { TestStores } from '../test-stores.js'; @@ -36,8 +36,9 @@ export function testMessageStore(): void { const alice = await DidKeyResolver.generate(); const { message } = await TestDataGenerator.generatePermissionsRequest(); + const { messageTimestamp } = message.descriptor; - await messageStore.put(alice.did, message, {}); + await messageStore.put(alice.did, message, { messageTimestamp }); const expectedCid = await Message.getCid(message); @@ -52,9 +53,10 @@ export function testMessageStore(): void { const alice = await DidKeyResolver.generate(); const { message } = await TestDataGenerator.generateRecordsWrite(); + const { messageTimestamp } = message.descriptor; // inserting the message indicating it is the 'latest' in the index - await messageStore.put(alice.did, message, { latest: 'true' }); + await messageStore.put(alice.did, message, { latest: 'true', messageTimestamp }); const { messages: results1 } = await messageStore.query(alice.did, [{ latest: 'true' }]); expect(results1.length).to.equal(1); @@ -65,7 +67,7 @@ export function testMessageStore(): void { // deleting the existing indexes and replacing it indicating it is no longer the 'latest' const cid = await Message.getCid(message); await messageStore.delete(alice.did, cid); - await messageStore.put(alice.did, message, { latest: 'false' }); + await messageStore.put(alice.did, message, { latest: 'false', messageTimestamp }); const { messages: results3 } = await messageStore.query(alice.did, [{ latest: 'true' }]); expect(results3.length).to.equal(0); @@ -79,8 +81,9 @@ export function testMessageStore(): void { const schema = 'http://my-awesome-schema/awesomeness_schema'; const { message } = await TestDataGenerator.generateRecordsWrite({ schema }); + const { messageTimestamp } = message.descriptor; - await messageStore.put(alice.did, message, { schema }); + await messageStore.put(alice.did, message, { schema, messageTimestamp }); const { messages: results } = await messageStore.query(alice.did, [{ schema }]); expect((results[0] as RecordsWriteMessage).descriptor.schema).to.equal(schema); @@ -90,13 +93,14 @@ export function testMessageStore(): void { const alice = await DidKeyResolver.generate(); const { message } = await TestDataGenerator.generateRecordsWrite(); + const { messageTimestamp } = message.descriptor; const controller = new AbortController(); controller.signal.throwIfAborted = (): void => { }; // simulate aborting happening async controller.abort('reason'); try { - await messageStore.put(alice.did, message, {}, { signal: controller.signal }); + await messageStore.put(alice.did, message, { messageTimestamp }, { signal: controller.signal }); } catch (e) { expect(e).to.equal('reason'); } @@ -112,6 +116,7 @@ export function testMessageStore(): void { const schema = 'http://my-awesome-schema/awesomeness_schema#awesome-1?id=awesome_1'; const { message } = await TestDataGenerator.generateRecordsWrite({ schema }); + const { messageTimestamp } = message.descriptor; const controller = new AbortController(); queueMicrotask(() => { @@ -119,7 +124,7 @@ export function testMessageStore(): void { }); try { - await messageStore.put(alice.did, message, { schema }, { signal: controller.signal }); + await messageStore.put(alice.did, message, { schema, messageTimestamp }, { signal: controller.signal }); } catch (e) { expect(e).to.equal('reason'); } @@ -134,11 +139,34 @@ export function testMessageStore(): void { expect(fetchedMessage).to.be.undefined; }); + it('should not store anything if aborted beforehand', async () => { + const alice = await DidKeyResolver.generate(); + + const { message } = await TestDataGenerator.generateRecordsWrite(); + const { messageTimestamp } = message.descriptor; + + const controller = new AbortController(); + controller.signal.throwIfAborted = (): void => { }; // simulate aborting happening async + controller.abort('reason'); + + try { + await messageStore.put(alice.did, message, { messageTimestamp }, { signal: controller.signal }); + } catch (e) { + expect(e).to.equal('reason'); + } + + const expectedCid = await Message.getCid(message); + + const jsonMessage = await messageStore.get(alice.did, expectedCid); + expect(jsonMessage).to.equal(undefined); + }); + it('should not delete if aborted', async () => { const alice = await DidKeyResolver.generate(); const { message } = await TestDataGenerator.generateRecordsWrite(); - await messageStore.put(alice.did, message, { latest: 'true' }); + const { messageTimestamp } = message.descriptor; + await messageStore.put(alice.did, message, { latest: 'true', messageTimestamp }); const messageCid = await Message.getCid(message); const resultsAlice1 = await messageStore.get(alice.did, messageCid); @@ -158,8 +186,9 @@ export function testMessageStore(): void { const bob = await DidKeyResolver.generate(); const { message } = await TestDataGenerator.generateRecordsWrite(); - await messageStore.put(alice.did, message, { latest: 'true' }); - await messageStore.put(bob.did, message, { latest: 'true' }); + const { messageTimestamp } = message.descriptor; + await messageStore.put(alice.did, message, { latest: 'true', messageTimestamp }); + await messageStore.put(bob.did, message, { latest: 'true', messageTimestamp }); const messageCid = await Message.getCid(message); const resultsAlice1 = await messageStore.get(alice.did, messageCid); @@ -182,8 +211,10 @@ export function testMessageStore(): void { const bob = await DidKeyResolver.generate(); const { message } = await TestDataGenerator.generateRecordsWrite(); - await messageStore.put(alice.did, message, { latest: 'true' }); - await messageStore.put(bob.did, message, { latest: 'true' }); + const { messageTimestamp } = message.descriptor; + + await messageStore.put(alice.did, message, { latest: 'true', messageTimestamp }); + await messageStore.put(bob.did, message, { latest: 'true', messageTimestamp }); const messageCid = await Message.getCid(message); const resultsAlice1 = await messageStore.query(alice.did, [{ latest: 'true' }]); @@ -250,7 +281,7 @@ export function testMessageStore(): void { for (const message of messages) { await messageStore.put(alice.did, message.message, await message.recordsWrite.constructRecordsWriteIndexes(true)); } - const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { messageTimestamp: SortOrder.Ascending }); + const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { messageTimestamp: SortDirection.Ascending }); expect(messageQuery.length).to.equal(messages.length); const sortedRecords = messages.sort((a,b) => @@ -269,7 +300,7 @@ export function testMessageStore(): void { await messageStore.put(alice.did, message.message, await message.recordsWrite.constructRecordsWriteIndexes(true)); } - const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { dateCreated: SortOrder.Ascending }); + const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { dateCreated: SortDirection.Ascending }); expect(messageQuery.length).to.equal(messages.length); const sortedRecords = messages.sort((a,b) => @@ -289,7 +320,7 @@ export function testMessageStore(): void { await messageStore.put(alice.did, message.message, await message.recordsWrite.constructRecordsWriteIndexes(true)); } - const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { dateCreated: SortOrder.Descending }); + const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { dateCreated: SortDirection.Descending }); expect(messageQuery.length).to.equal(messages.length); const sortedRecords = messages.sort((a,b) => @@ -310,7 +341,7 @@ export function testMessageStore(): void { await messageStore.put(alice.did, message.message, await message.recordsWrite.constructRecordsWriteIndexes(true)); } - const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { datePublished: SortOrder.Ascending }); + const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { datePublished: SortDirection.Ascending }); expect(messageQuery.length).to.equal(messages.length); const sortedRecords = messages.sort((a,b) => @@ -331,7 +362,7 @@ export function testMessageStore(): void { await messageStore.put(alice.did, message.message, await message.recordsWrite.constructRecordsWriteIndexes(true)); } - const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { datePublished: SortOrder.Descending }); + const { messages: messageQuery } = await messageStore.query(alice.did, [{}], { datePublished: SortDirection.Descending }); expect(messageQuery.length).to.equal(messages.length); const sortedRecords = messages.sort((a,b) => diff --git a/tests/test-suite.ts b/tests/test-suite.ts index 960c164a9..21c373709 100644 --- a/tests/test-suite.ts +++ b/tests/test-suite.ts @@ -3,7 +3,10 @@ import type { DataStore, EventLog, MessageStore } from '../src/index.js'; import { testDelegatedGrantScenarios } from './scenarios/delegated-grant.spec.js'; import { testDwnClass } from './dwn.spec.js'; import { testEndToEndScenarios } from './scenarios/end-to-end-tests.spec.js'; +import { testEventLog } from './event-log/event-log.spec.js'; import { testEventsGetHandler } from './handlers/events-get.spec.js'; +import { testEventsQueryHandler } from './handlers/events-query.spec.js'; +import { testEventsQueryScenarios } from './scenarios/events-query.spec.js'; import { testMessagesGetHandler } from './handlers/messages-get.spec.js'; import { testMessageStore } from './store/message-store.spec.js'; import { testPermissionsGrantHandler } from './handlers/permissions-grant.spec.js'; @@ -33,9 +36,11 @@ export class TestSuite { testDwnClass(); testMessageStore(); + testEventLog(); // handler tests testEventsGetHandler(); + testEventsQueryHandler(); testMessagesGetHandler(); testPermissionsGrantHandler(); testPermissionsRequestHandler(); @@ -49,5 +54,6 @@ export class TestSuite { // scenario tests testDelegatedGrantScenarios(); testEndToEndScenarios(); + testEventsQueryScenarios(); } } \ No newline at end of file diff --git a/tests/utils/filters.spec.ts b/tests/utils/filters.spec.ts new file mode 100644 index 000000000..97ef4ff65 --- /dev/null +++ b/tests/utils/filters.spec.ts @@ -0,0 +1,314 @@ +import type { Filter } from '../../src/types/query-types.js'; + +import { Time } from '../../src/utils/time.js'; +import { FilterSelector, FilterUtility } from '../../src/utils/filter.js'; + +import chaiAsPromised from 'chai-as-promised'; +import chai, { expect } from 'chai'; + + +chai.use(chaiAsPromised); + +describe('filters util', () => { + describe('FilterUtility', () => { + describe ('filter type', () => { + const filter: Filter = { + equal : 'to', + oneOf : [ 'these', 'items' ], + range : { gte: 10, lte: 20 }, + rangeGT : { gt: 10 }, + rangeGTE : { gte: 10 }, + rangeLT : { lt: 20 }, + rangeLTE : { lte: 20 }, + }; + + it('isEqualFilter', async () => { + const { equal, oneOf, range } = filter; + expect(FilterUtility.isEqualFilter(equal)).to.be.true; + expect(FilterUtility.isEqualFilter(oneOf)).to.be.false; + expect(FilterUtility.isEqualFilter(range)).to.be.false; + });; + + it('isRangeFilter', async () => { + const { equal, oneOf, range, rangeGT, rangeGTE, rangeLT, rangeLTE } = filter; + expect(FilterUtility.isRangeFilter(range)).to.be.true; + expect(FilterUtility.isRangeFilter(rangeGT)).to.be.true; + expect(FilterUtility.isRangeFilter(rangeGTE)).to.be.true; + expect(FilterUtility.isRangeFilter(rangeLT)).to.be.true; + expect(FilterUtility.isRangeFilter(rangeLTE)).to.be.true; + expect(FilterUtility.isRangeFilter(oneOf)).to.be.false; + expect(FilterUtility.isRangeFilter(equal)).to.be.false; + }); + + it('isOneOfFilter', async () => { + const { equal, oneOf, range } = filter; + expect(FilterUtility.isOneOfFilter(oneOf)).to.be.true; + expect(FilterUtility.isOneOfFilter(equal)).to.be.false; + expect(FilterUtility.isOneOfFilter(range)).to.be.false; + }); + }); + + describe('matchFilter', () => { + it('should match with EqualFilter', async () => { + const filters = [{ foo: 'bar' }]; + expect(FilterUtility.matchAnyFilter({ foo: 'bar' }, filters)).to.be.true; + expect(FilterUtility.matchAnyFilter({ foo: 'bar', bar: 'baz' }, filters)).to.be.true; + expect(FilterUtility.matchAnyFilter({ bar: 'baz' }, filters)).to.be.false; + }); + + it('should not match partial values with an EqualFilter', async () => { + const filters = [{ foo: 'bar' }]; + expect(FilterUtility.matchAnyFilter({ foo: 'barbaz' }, filters)).to.be.false; + }); + + it('should match with OneOfFilter', async () => { + const filters = [{ + a: [ 'a', 'b' ] + }]; + + expect(FilterUtility.matchAnyFilter({ 'a': 'a' }, filters)).to.be.true; + expect(FilterUtility.matchAnyFilter({ 'a': 'b' }, filters)).to.be.true; + expect(FilterUtility.matchAnyFilter({ 'a': 'c' }, filters)).to.be.false; + }); + + it('should match string within a RangeFilter', async () => { + const gteFilter = [{ + dateCreated: { + gte: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) + } + }]; + + // test the equal to the desired range. + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) + }, gteFilter)).to.be.true; + + // test greater than the desired range. + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 16 }) + }, gteFilter)).to.be.true; + + // test less than desired range. + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 10 }) + }, gteFilter)).to.be.false; + + const gtFilter = [{ + dateCreated: { + gt: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) + } + }]; + // test the equal to + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) + }, gtFilter)).to.be.false; + + // test greater than. + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 16 }) + }, gtFilter)).to.be.true; + + const lteFilter = [{ + dateCreated: { + lte: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) + } + }]; + + // test the equal to the desired range. + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) + }, lteFilter)).to.be.true; + + // test less than desired range. + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 13 }) + }, lteFilter)).to.be.true; + + // test greater than desired range. + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 16 }) + }, lteFilter)).to.be.false; + + const ltFilter = [{ + dateCreated: { + lt: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) + } + }]; + + // checks less than + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 14 }) + }, ltFilter)).to.be.true; + + // checks equal to + expect(FilterUtility.matchAnyFilter({ + dateCreated: Time.createTimestamp({ year: 2023, month: 1, day: 15 }) + }, ltFilter)).to.be.false; + }); + + it('should match prefixed RangeFilter', async () => { + const filters = [{ + value: { + gte: 'foo' + } + }]; + + expect(FilterUtility.matchAnyFilter({ value: 'foobar' }, filters)).to.be.true; + }); + + it('should match suffixed RangeFilter', async () => { + const filters = [{ + foo: { + lte: 'bar' + } + }]; + + expect(FilterUtility.matchAnyFilter({ foo: 'bar' }, filters)).to.be.true; + expect(FilterUtility.matchAnyFilter({ foo: 'barbaz' }, filters)).to.be.false; + }); + + it('should match multiple properties', async () => { + const filters = [{ + foo : 'bar', + bar : 'baz' + }]; + expect(FilterUtility.matchAnyFilter({ foo: 'bar', bar: 'baz' }, filters)).to.be.true; + expect(FilterUtility.matchAnyFilter({ foo: 'baz', bar: 'baz' }, filters)).to.be.false; + }); + + it('should match with multiple filters', async () => { + const filters:Filter[] = [{ + foo : 'bar', + bar : 'baz' + },{ + foobar: 'baz' + }]; + + // match first filter + expect(FilterUtility.matchAnyFilter({ foo: 'bar', bar: 'baz' }, filters)).to.be.true; + // match second filter + expect(FilterUtility.matchAnyFilter({ foobar: 'baz', foo: 'bar' }, filters)).to.be.true; + // control no match + expect(FilterUtility.matchAnyFilter({ foo: 'bar' }, filters)).to.be.false; + }); + + it('should match anything if an empty array or empty filters are provided', async () => { + expect(FilterUtility.matchAnyFilter({ foo: 'bar', bar: 'baz' }, [])).to.be.true; + expect(FilterUtility.matchAnyFilter({ foobar: 'baz', foo: 'bar' }, [{}])).to.be.true; + }); + + describe('booleans', () => { + it('treats strings and boolean EqualFilter differently', async () => { + + const filters = [{ + foo: true + }]; + + expect(FilterUtility.matchAnyFilter({ foo: true }, filters)).to.be.true; + expect(FilterUtility.matchAnyFilter({ foo: 'true' }, filters)).to.be.false; + }); + + it('should return records that match provided boolean equality filter', async () => { + const boolTrueItem = { + schema : 'schema', + published : true, + }; + + const boolFalseItem = { + schema : 'schema', + published : false, + }; + + // control + expect(FilterUtility.matchAnyFilter(boolTrueItem, [{ published: true }])).to.be.true; + expect(FilterUtility.matchAnyFilter(boolTrueItem, [{ published: false }])).to.be.false; + expect(FilterUtility.matchAnyFilter(boolFalseItem, [{ published: false }])).to.be.true; + expect(FilterUtility.matchAnyFilter(boolFalseItem, [{ published: true }])).to.be.false; + }); + }); + + describe('numbers', () => { + }); + }); + + describe('convertRangeCriterion',() => { + it('converts `from` to `gte`', async () => { + const inputFilter = { + from: 'from-value', + }; + expect(FilterUtility.convertRangeCriterion(inputFilter)).to.deep.equal({ gte: 'from-value' }); + }); + + it('converts `to` to `lt` ', async () => { + const inputFilter = { + to: 'to-value', + }; + expect(FilterUtility.convertRangeCriterion(inputFilter)).to.deep.equal({ lt: 'to-value' }); + }); + + it('converts `from` and `to` to `gte` and `lt`, respectively', async () => { + const inputFilter = { + from : 'from-value', + to : 'to-value' + }; + expect(FilterUtility.convertRangeCriterion(inputFilter)).to.deep.equal({ gte: 'from-value', lt: 'to-value' }); + }); + }); + + describe('reduceFilter', () => { + it('returns incoming filter if it only has one or no properties', async () => { + expect(FilterSelector.reduceFilter({ some: 'property' })).to.deep.equal({ some: 'property' }); + expect(FilterSelector.reduceFilter({})).to.deep.equal({}); + }); + + it('prioritizes known properties', async () => { + // recordId + const inputFilter:Filter = { + recordId : 'some-record-id', + attester : 'some-attester', + parentId : 'some-parent-id', + recipient : 'some-recipient', + contextId : 'some-context-id', + protocolPath : 'some-protocol-path', + schema : 'some-schema', + protocol : 'some-protocol', + some : 'property' + }; + + // go through in order of priority deleting the property after checking for it + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ recordId: 'some-record-id' }); + delete inputFilter.recordId; + + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ attester: 'some-attester' }); + delete inputFilter.attester; + + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ parentId: 'some-parent-id' }); + delete inputFilter.parentId; + + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ recipient: 'some-recipient' }); + delete inputFilter.recipient; + + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ contextId: 'some-context-id' }); + delete inputFilter.contextId; + + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ protocolPath: 'some-protocol-path' }); + delete inputFilter.protocolPath; + + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ schema: 'some-schema' }); + delete inputFilter.schema; + + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ protocol: 'some-protocol' }); + }); + + it('returns first filter if no known filters exist', async () => { + const inputFilter = { + foo : 'bar', + bar : 'baz', + baz : 'buzz' + }; + + expect(FilterSelector.reduceFilter(inputFilter)).to.deep.equal({ foo: 'bar' }); + }); + }); + }); +}); \ No newline at end of file diff --git a/tests/utils/test-data-generator.ts b/tests/utils/test-data-generator.ts index e0af88aff..2c81cee0e 100644 --- a/tests/utils/test-data-generator.ts +++ b/tests/utils/test-data-generator.ts @@ -1,8 +1,10 @@ import type { DidResolutionResult } from '../../src/types/did-types.js'; +import type { EventsQueryOptions } from '../../src/interfaces/events-query.js'; import type { GeneralJws } from '../../src/types/jws-types.js'; import type { Readable } from 'readable-stream'; import type { RecordsFilter } from '../../src/types/records-types.js'; import type { AuthorizationModel, Pagination } from '../../src/types/message-types.js'; +import type { EventsQueryFilter, EventsQueryMessage } from '../../src/types/event-types.js'; import type { CreateFromOptions, @@ -39,6 +41,7 @@ import type { PrivateJwk, PublicJwk } from '../../src/types/jose-types.js'; import * as cbor from '@ipld/dag-cbor'; import { CID } from 'multiformats/cid'; import { DataStream } from '../../src/utils/data-stream.js'; +import { EventsQuery } from '../../src/interfaces/events-query.js'; import { PermissionsGrant } from '../../src/interfaces/permissions-grant.js'; import { PermissionsRequest } from '../../src/interfaces/permissions-request.js'; import { PermissionsRevoke } from '../../src/interfaces/permissions-revoke.js'; @@ -236,7 +239,7 @@ export type GeneratePermissionsRevokeOutput = { export type GenerateEventsGetInput = { author?: Persona; - watermark?: string; + cursor?: string; }; export type GenerateEventsGetOutput = { @@ -245,6 +248,18 @@ export type GenerateEventsGetOutput = { message: EventsGetMessage; }; +export type GenerateEventsQueryInput = { + author?: Persona; + filters: EventsQueryFilter[]; + cursor?: string; +}; + +export type GenerateEventsQueryOutput = { + author: Persona; + eventsQuery: EventsQuery; + message: EventsQueryMessage; +}; + export type GenerateMessagesGetInput = { author?: Persona; messageCids: string[] @@ -730,8 +745,8 @@ export class TestDataGenerator { const signer = Jws.createSigner(author); const options: EventsGetOptions = { signer }; - if (input?.watermark) { - options.watermark = input.watermark; + if (input?.cursor) { + options.cursor = input.cursor; } const eventsGet = await EventsGet.create(options); @@ -743,6 +758,22 @@ export class TestDataGenerator { }; } + public static async generateEventsQuery(input: GenerateEventsQueryInput): Promise { + const { filters, cursor } = input; + const author = input.author ?? await TestDataGenerator.generatePersona(); + const signer = Jws.createSigner(author); + + const options: EventsQueryOptions = { signer, filters, cursor }; + + const eventsQuery = await EventsQuery.create(options); + + return { + author, + eventsQuery, + message: eventsQuery.message + }; + } + public static async generateMessagesGet(input: GenerateMessagesGetInput): Promise { const author = input?.author ?? await TestDataGenerator.generatePersona(); const signer = Jws.createSigner(author);