diff --git a/.vscode/extensions.json b/.vscode/extensions.json index dcc97be..5f44864 100644 --- a/.vscode/extensions.json +++ b/.vscode/extensions.json @@ -1,5 +1,3 @@ { - "recommendations": [ - "biomejs.biome" - ] + "recommendations": ["biomejs.biome"] } diff --git a/codegen.ts b/codegen.ts index 3db02a1..4ce7bce 100644 --- a/codegen.ts +++ b/codegen.ts @@ -1,43 +1,43 @@ import { CodegenConfig } from "@graphql-codegen/cli"; const config: CodegenConfig = { - schema: "./schema.graphql", // change to endpoint url after release - documents: "./src/graphql/*.graphql", - ignoreNoDocuments: true, // for better experience with the watcher - generates: { - "./src/generated/": { - preset: "client", - plugins: [], - config: { - scalars: { - ID: { - input: "string | number", - output: "string", - }, - Address: { - input: "string", - output: "string", - }, - BigInt: { - input: "string", - output: "string", - }, - TxID: { - input: "string", - output: "string", - }, - Long: { - input: "number", - output: "number", - }, - Guid: { - input: "string", - output: "string", - }, + schema: "./schema.graphql", // change to endpoint url after release + documents: "./src/graphql/*.graphql", + ignoreNoDocuments: true, // for better experience with the watcher + generates: { + "./src/generated/": { + preset: "client", + plugins: [], + config: { + scalars: { + ID: { + input: "string | number", + output: "string", + }, + Address: { + input: "string", + output: "string", + }, + BigInt: { + input: "string", + output: "string", + }, + TxID: { + input: "string", + output: "string", + }, + Long: { + input: "number", + output: "number", + }, + Guid: { + input: "string", + output: "string", + }, + }, + }, }, - }, }, - }, }; export default config; diff --git a/jest.config.js b/jest.config.js index b413e10..b0b26c2 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,5 +1,5 @@ /** @type {import('ts-jest').JestConfigWithTsJest} */ module.exports = { - preset: 'ts-jest', - testEnvironment: 'node', -}; \ No newline at end of file + preset: "ts-jest", + testEnvironment: "node", +}; diff --git a/src/events/assets-transferred.ts b/src/events/assets-transferred.ts index 6cc587f..836b1a6 100644 --- a/src/events/assets-transferred.ts +++ b/src/events/assets-transferred.ts @@ -3,7 +3,10 @@ import { IHeadlessGraphQLClient } from "../headless-graphql-client"; import { AssetTransferredEvent } from "../types/asset-transferred-event"; import { TransactionLocation } from "../types/transaction-location"; -export type ValidatedAssetTransferredEvent = Omit & { +export type ValidatedAssetTransferredEvent = Omit< + AssetTransferredEvent, + "memo" +> & { targetAddress: Address; }; diff --git a/src/events/garage-unload.ts b/src/events/garage-unload.ts index 4ee7e09..8d15f2a 100644 --- a/src/events/garage-unload.ts +++ b/src/events/garage-unload.ts @@ -44,7 +44,7 @@ export async function getGarageUnloadEvents( try { if (event.memo === null) { console.error( - `Skip ${event.txId} because event.memo field is required but null.` + `Skip ${event.txId} because event.memo field is required but null.`, ); continue; } diff --git a/src/headless-graphql-client.ts b/src/headless-graphql-client.ts index 42c41a2..e611fda 100644 --- a/src/headless-graphql-client.ts +++ b/src/headless-graphql-client.ts @@ -1,5 +1,11 @@ import { Address } from "@planetarium/account"; -import { BencodexDictionary, Dictionary, Value, decode, isDictionary } from "@planetarium/bencodex"; +import { + BencodexDictionary, + Dictionary, + Value, + decode, + isDictionary, +} from "@planetarium/bencodex"; import { Currency, FungibleAssetValue } from "@planetarium/tx"; import { Client, fetchExchange, mapExchange } from "@urql/core"; import { retryExchange } from "@urql/exchange-retry"; @@ -42,7 +48,7 @@ export interface IHeadlessGraphQLClient { } function isArray(obj: unknown): obj is T[] { - return Array.isArray(obj) + return Array.isArray(obj); } export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { @@ -123,13 +129,15 @@ export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { return data.transaction.ncTransactions .map((tx) => { - if (tx === null || tx.actions.length > 1 || tx.actions[0] === null) { + if ( + tx === null || + tx.actions.length > 1 || + tx.actions[0] === null + ) { return null; } - const action = decode( - Buffer.from(tx.actions[0].raw, "hex"), - ); + const action = decode(Buffer.from(tx.actions[0].raw, "hex")); if (!isDictionary(action)) { return null; } @@ -144,7 +152,8 @@ export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { return null; } - if (!(payload[0] instanceof Uint8Array) || + if ( + !(payload[0] instanceof Uint8Array) || !(isArray(payload[1]) || payload[1] === null) || !(isArray(payload[2]) || payload[2] === null) || !(typeof payload[3] === "string" || payload[3] === null) @@ -152,14 +161,26 @@ export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { return null; } - function isValidFungibleAssetValuePayload(x: Value): x is [Uint8Array, [BencodexDictionary, bigint]] { - return isArray(x) && x[0] instanceof Uint8Array && - isArray(x[1]) && isDictionary(x[1][0]) && typeof x[1][1] === "bigint"; + function isValidFungibleAssetValuePayload( + x: Value, + ): x is [Uint8Array, [BencodexDictionary, bigint]] { + return ( + isArray(x) && + x[0] instanceof Uint8Array && + isArray(x[1]) && + isDictionary(x[1][0]) && + typeof x[1][1] === "bigint" + ); } - function isValidFungibleItemPayload(x: Value): x is [Uint8Array, bigint] { - return isArray(x) && x[0] instanceof Uint8Array && - typeof x[1] === "bigint"; + function isValidFungibleItemPayload( + x: Value, + ): x is [Uint8Array, bigint] { + return ( + isArray(x) && + x[0] instanceof Uint8Array && + typeof x[1] === "bigint" + ); } const recipientAvatarAddress = Address.fromBytes(payload[0]); @@ -208,7 +229,9 @@ export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { } async getBlockIndex(blockHash: BlockHash): Promise { - const response = await this._client.query(GetBlockIndexDocument, { hash: blockHash }); + const response = await this._client.query(GetBlockIndexDocument, { + hash: blockHash, + }); const block = response.data?.chainQuery.blockQuery?.block; if (!block) { throw new Error("Failed to fetch data through GraphQL."); @@ -231,9 +254,9 @@ export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { const response = await this._client.query(GetBlockHashDocument, { index, }); - const block = response.data?.chainQuery.blockQuery?.block; + const block = response.data?.chainQuery.blockQuery?.block; if (!block) { - throw new Error("Failed to fetch data through GraphQL."); + throw new Error("Failed to fetch data through GraphQL."); } return block.hash; @@ -247,17 +270,17 @@ export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { blockIndex, }); - const transactions = response.data?.transaction.ncTransactions; if (!transactions) { throw new Error("Invalid operation."); } - return transactions.filter(x => x !== null) - .filter(x => x.actions.every(v => v !== null)) + return transactions + .filter((x) => x !== null) + .filter((x) => x.actions.every((v) => v !== null)) .map((tx) => { const txId = tx.id; - const actions = tx.actions.filter(x => x !== null); + const actions = tx.actions.filter((x) => x !== null); const action = decode( Buffer.from(actions[0].raw, "hex"), ) as Dictionary; @@ -294,7 +317,9 @@ export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { } async getNextTxNonce(address: string): Promise { - const response = await this._client.query(GetNextTxNonceDocument, { address }); + const response = await this._client.query(GetNextTxNonceDocument, { + address, + }); if (!response.data) { throw new Error("Failed to fetch data through GraphQL."); } @@ -318,19 +343,24 @@ export class HeadlessGraphQLClient implements IHeadlessGraphQLClient { } async stageTransaction(payload: string): Promise { - const response = await this._client.mutation(StageTransactionDocument, { payload }); + const response = await this._client.mutation(StageTransactionDocument, { + payload, + }); const txid = response.data?.stageTransaction; if (!txid) { - throw new Error("Failed to stage transaction."); + throw new Error("Failed to stage transaction."); } return txid; } async getTransactionResult(txId: TxId): Promise { - const response = await this._client.query(GetTransactionResultDocument, { txId }); + const response = await this._client.query( + GetTransactionResultDocument, + { txId }, + ); if (!response.data) { - throw new Error("Failed to fetch data through GraphQL."); + throw new Error("Failed to fetch data through GraphQL."); } return response.data.transaction.transactionResult; diff --git a/src/slack/messages/bridge-event.ts b/src/slack/messages/bridge-event.ts index 767e7d7..4c59f96 100644 --- a/src/slack/messages/bridge-event.ts +++ b/src/slack/messages/bridge-event.ts @@ -1,6 +1,6 @@ import { ISlackMessage, SlackMessageSymbol } from "."; import { TxIdWithNetwork, ncscanTxLinkGetter } from "./utils"; -import { ResponseType } from "@prisma/client" +import { ResponseType } from "@prisma/client"; export class BridgeEvent implements ISlackMessage { [SlackMessageSymbol] = true as const; diff --git a/src/sync/upstream/index.ts b/src/sync/upstream/index.ts index a0b570c..9d8c09a 100644 --- a/src/sync/upstream/index.ts +++ b/src/sync/upstream/index.ts @@ -33,178 +33,165 @@ export async function processUpstreamEvents( await downstreamGQLClient.getGenesisHash(), "hex", ); - const [isSkipped, responseTransactions] = - await client.$transaction( - async (tx) => { - const nextBlockIndex = await getNextBlockIndex( - tx, - upstreamNetworkId, - defaultStartBlockIndex, - ); - - const tipIndex = await upstreamGQLClient.getTipIndex(); + const [isSkipped, responseTransactions] = await client.$transaction( + async (tx) => { + const nextBlockIndex = await getNextBlockIndex( + tx, + upstreamNetworkId, + defaultStartBlockIndex, + ); - if (nextBlockIndex >= tipIndex) { - console.debug( - "[sync][upstream] skip. nextBlockIndex / tipIndex", - nextBlockIndex, - tipIndex, - ); - return [true, [], []]; - } + const tipIndex = await upstreamGQLClient.getTipIndex(); + if (nextBlockIndex >= tipIndex) { console.debug( - "[sync][upstream] nextBlockIndex", + "[sync][upstream] skip. nextBlockIndex / tipIndex", nextBlockIndex, - ); - console.log( - "[sync][upstream] networkId", - upstreamGQLClient.getPlanetID(), - ); + tipIndex, + ); + return [true, [], []]; + } + + console.debug("[sync][upstream] nextBlockIndex", nextBlockIndex); + console.log( + "[sync][upstream] networkId", + upstreamGQLClient.getPlanetID(), + ); + + await tx.block.create({ + data: { + networkId: upstreamGQLClient.getPlanetID(), + index: nextBlockIndex, + }, + }); + + console.debug("[sync][upstream] block row created."); + + const unloadGarageEvents = await getGarageUnloadEvents( + upstreamGQLClient, + agentAddress, + avatarAddress, + Number(nextBlockIndex), + ); + const transferAssetEvents = await getAssetTransferredEvents( + upstreamGQLClient, + agentAddress, + Number(nextBlockIndex), + ); + + console.debug( + "[sync][upstream] unloadGarageEvents.length", + unloadGarageEvents.length, + ); + console.debug( + "[sync][upstream] unloadGarageEvents", + unloadGarageEvents, + ); + console.debug( + "[sync][upstream] transferAssetEvents.length", + transferAssetEvents.length, + ); + console.debug( + "[sync][upstream] transferAssetEvents", + transferAssetEvents, + ); + + await tx.requestTransaction.createMany({ + data: [ + ...unloadGarageEvents.map((ev) => { + return { + blockIndex: nextBlockIndex, + networkId: upstreamNetworkId, + sender: Address.fromHex(ev.signer, true).toString(), + type: RequestType.UNLOAD_FROM_MY_GARAGES, + category: RequestCategory.PROCESS, + id: ev.txId, + }; + }), + ...transferAssetEvents.map((ev) => { + return { + blockIndex: nextBlockIndex, + networkId: upstreamNetworkId, + type: RequestType.TRANSFER_ASSET, + sender: ev.sender.toString(), + category: RequestCategory.PROCESS, + id: ev.txId, + }; + }), + ], + }); - await tx.block.create({ - data: { - networkId: upstreamGQLClient.getPlanetID(), - index: nextBlockIndex, - }, - }); + console.debug("[sync][upstream] request transaction rows created."); - console.debug("[sync][upstream] block row created."); + const downstreamNextTxNonce = await getNextTxNonce( + tx, + downstreamGQLClient, + downstreamAccount, + ); - const unloadGarageEvents = await getGarageUnloadEvents( - upstreamGQLClient, - agentAddress, - avatarAddress, - Number(nextBlockIndex), - ); - const transferAssetEvents = await getAssetTransferredEvents( - upstreamGQLClient, - agentAddress, - Number(nextBlockIndex), - ); + console.debug( + "[sync][upstream] downstreamNextTxNonce", + downstreamNextTxNonce, + ); - console.debug( - "[sync][upstream] unloadGarageEvents.length", - unloadGarageEvents.length, - ); - console.debug( - "[sync][upstream] unloadGarageEvents", + const responseTransactions = [ + ...(await responseTransactionsFromGarageEvents( unloadGarageEvents, - ); - console.debug( - "[sync][upstream] transferAssetEvents.length", - transferAssetEvents.length, - ); - console.debug( - "[sync][upstream] transferAssetEvents", - transferAssetEvents, - ); - - await tx.requestTransaction.createMany({ - data: [ - ...unloadGarageEvents.map((ev) => { - return { - blockIndex: nextBlockIndex, - networkId: upstreamNetworkId, - sender: Address.fromHex( - ev.signer, - true, - ).toString(), - type: RequestType.UNLOAD_FROM_MY_GARAGES, - category: RequestCategory.PROCESS, - id: ev.txId, - }; - }), - ...transferAssetEvents.map((ev) => { - return { - blockIndex: nextBlockIndex, - networkId: upstreamNetworkId, - type: RequestType.TRANSFER_ASSET, - sender: ev.sender.toString(), - category: RequestCategory.PROCESS, - id: ev.txId, - }; - }), - ], - }); - - console.debug( - "[sync][upstream] request transaction rows created.", - ); - - const downstreamNextTxNonce = await getNextTxNonce( - tx, - downstreamGQLClient, downstreamAccount, - ); - - console.debug( - "[sync][upstream] downstreamNextTxNonce", + downstreamNetworkId, + downstreamGenesisHash, downstreamNextTxNonce, - ); - - const responseTransactions = [ - ...(await responseTransactionsFromGarageEvents( - unloadGarageEvents, - downstreamAccount, - downstreamNetworkId, - downstreamGenesisHash, - downstreamNextTxNonce, - { - agentAddress, - avatarAddress, - }, - )), - ...(await responseTransactionsFromTransferEvents( - transferAssetEvents, - downstreamAccount, - downstreamNetworkId, - downstreamGenesisHash, - downstreamNextTxNonce + - BigInt(unloadGarageEvents.length), - )), - ]; - - console.debug( - "[sync][upstream] responseTransactions.length", - responseTransactions.length, - ); - console.debug( - "[sync][upstream] responseTransactions", - responseTransactions, - ); - - const responseTransactionsDBPayload = responseTransactions.map( - ({ signedTx, requestTxId, networkId, type }) => { - const serializedTx = encode(encodeSignedTx(signedTx)); - const txid = getTxId(serializedTx); - return { - id: txid, - nonce: signedTx.nonce, - raw: Buffer.from(serializedTx), - type, - networkId: networkId, - requestTransactionId: requestTxId, - }; + { + agentAddress, + avatarAddress, }, - ); - await tx.responseTransaction.createMany({ - data: responseTransactionsDBPayload, - }); - - console.debug( - "[sync][upstream] response transaction rows created.", - ); - - return [ - false, - responseTransactionsDBPayload, - ]; - }, - { - timeout: 60 * 1000, - }, - ); + )), + ...(await responseTransactionsFromTransferEvents( + transferAssetEvents, + downstreamAccount, + downstreamNetworkId, + downstreamGenesisHash, + downstreamNextTxNonce + BigInt(unloadGarageEvents.length), + )), + ]; + + console.debug( + "[sync][upstream] responseTransactions.length", + responseTransactions.length, + ); + console.debug( + "[sync][upstream] responseTransactions", + responseTransactions, + ); + + const responseTransactionsDBPayload = responseTransactions.map( + ({ signedTx, requestTxId, networkId, type }) => { + const serializedTx = encode(encodeSignedTx(signedTx)); + const txid = getTxId(serializedTx); + return { + id: txid, + nonce: signedTx.nonce, + raw: Buffer.from(serializedTx), + type, + networkId: networkId, + requestTransactionId: requestTxId, + }; + }, + ); + await tx.responseTransaction.createMany({ + data: responseTransactionsDBPayload, + }); + + console.debug( + "[sync][upstream] response transaction rows created.", + ); + + return [false, responseTransactionsDBPayload]; + }, + { + timeout: 60 * 1000, + }, + ); if (isSkipped) { await delay(1000);