diff --git a/src/consensus/mina-consensus.ts b/src/consensus/mina-consensus.ts index ebe66ed..8e1be47 100644 --- a/src/consensus/mina-consensus.ts +++ b/src/consensus/mina-consensus.ts @@ -1,16 +1,6 @@ import { blake2bHex } from 'blakejs'; import { type BlockInfo } from 'src/models/types'; -export { select, findAllIndexes, getAllPredicate, filterBestTip }; - -function findAllIndexes(arr: T[], target: T): number[] { - const indexes: number[] = []; - arr.forEach((element, index) => { - if (element === target) { - indexes.push(index); - } - }); - return indexes; -} +export { select, getAllPredicate, filterBestTip }; function getAllPredicate(array: T[], predicate: (arg: T) => boolean) { const data: T[] = []; diff --git a/src/db/archive-node-adapter/actions-service.ts b/src/db/archive-node-adapter/actions-service.ts index e375c57..5e83fba 100644 --- a/src/db/archive-node-adapter/actions-service.ts +++ b/src/db/archive-node-adapter/actions-service.ts @@ -3,9 +3,10 @@ import { BlockStatusFilter, BlocksWithTransactionsMap, DEFAULT_TOKEN_ID, - FieldElementIdWithValueMap, + FieldIdValuePairs, Action, Actions, + Transaction, } from '../../models/types'; import { ActionFilterOptionsInput } from 'src/resolvers-types'; import { TracingService } from 'src/tracing/tracing'; @@ -31,25 +32,28 @@ class ActionsService { } async getActions(input: ActionFilterOptionsInput): Promise { - const actionsData = await this.getActionData(input); - return actionsData ?? []; + return (await this.getActionData(input)) ?? []; } async getActionData(input: ActionFilterOptionsInput): Promise { + // Request action zkApp info from the Archive Node Database this.tracingService.startSpan('Actions SQL'); const rows = await this.executeActionsQuery(input); this.tracingService.endSpan(); this.tracingService.startSpan('Actions Processing'); + // Partition the rows into a map where the keys are element ids and the values are field values. const elementIdFieldValues = getElementIdFieldValues(rows); + // Partition the rows into a map where the keys are block hashes and the values are maps of transaction hashes to array of rows. const blocksWithTransactions = partitionBlocks(rows); - const actionsData = this.deriveActionsFromBlocks( + // Map the rows into Action instances. + const actionsData = this.blocksToActions( blocksWithTransactions, elementIdFieldValues ); this.tracingService.endSpan(); - const sortedActionsData = sortAndFilterBlocks(actionsData); - return sortedActionsData; + // Sort and filter the actions. + return sortAndFilterBlocks(actionsData); } async executeActionsQuery(input: ActionFilterOptionsInput) { @@ -72,15 +76,26 @@ class ActionsService { ); } - deriveActionsFromBlocks( + /** + * This function is used to map the rows into Action instances. + * The rows are partitioned into a map where the keys are block hashes and the values are maps of transaction hashes to array of rows. + * We + * + * @param blocksWithTransactions + * @param elementIdFieldValues + * @returns + */ + blocksToActions( blocksWithTransactions: BlocksWithTransactionsMap, - elementIdFieldValues: FieldElementIdWithValueMap + elementIdFieldValues: FieldIdValuePairs ) { const actions: Actions = []; - const blockMapEntries = Array.from(blocksWithTransactions.entries()); - for (let i = 0; i < blockMapEntries.length; i++) { - const transactions = blockMapEntries[i][1]; - const transaction = transactions.values().next().value[0]; + const blockTransactionEntries = Array.from( + blocksWithTransactions.entries() + ); + for (let i = 0; i < blockTransactionEntries.length; i++) { + const transactions = blockTransactionEntries[i][1]; + const transaction = transactions.values().next().value[0]; // Get the first transaction row in the block. const blockInfo = createBlockInfo(transaction); const { action_state_value1, @@ -90,19 +105,13 @@ class ActionsService { action_state_value5, } = transaction; - const actionsData: Action[][] = []; - for (const [, transaction] of transactions) { - const filteredBlocks = removeRedundantEmittedFields(transaction); - const actionData = mapActionOrEvent( - 'action', - filteredBlocks, - elementIdFieldValues - ) as Action[]; - actionsData.push(actionData); - } + const actionsData = this.transactionToActions( + elementIdFieldValues, + transactions + ); actions.push({ blockInfo, - actionData: actionsData.flat(), + actionData: actionsData, actionState: { /* eslint-disable */ actionStateOne: action_state_value1!, @@ -116,4 +125,22 @@ class ActionsService { } return actions; } + + transactionToActions( + elementIdFieldValues: FieldIdValuePairs, + transactions: Transaction + ) { + const actions: Action[][] = []; + + for (const [, transactionRow] of transactions) { + const filteredTransactions = removeRedundantEmittedFields(transactionRow); + const actionData = mapActionOrEvent( + 'action', + filteredTransactions, + elementIdFieldValues + ) as Action[]; + actions.push(actionData); + } + return actions.flat(); + } } diff --git a/src/db/archive-node-adapter/events-service.ts b/src/db/archive-node-adapter/events-service.ts index 27c0d1b..7c0157b 100644 --- a/src/db/archive-node-adapter/events-service.ts +++ b/src/db/archive-node-adapter/events-service.ts @@ -3,7 +3,7 @@ import { BlockStatusFilter, BlocksWithTransactionsMap, DEFAULT_TOKEN_ID, - FieldElementIdWithValueMap, + FieldIdValuePairs, Events, Event, } from '../../models/types'; @@ -34,8 +34,7 @@ class EventsService { } async getEvents(input: EventFilterOptionsInput): Promise { - const eventsData = await this.getEventData(input); - return eventsData ?? []; + return (await this.getEventData(input)) ?? []; } async getEventData(input: EventFilterOptionsInput): Promise { @@ -51,7 +50,7 @@ class EventsService { // print out map // console.log(util.inspect(blocksWithTransactions, false, null, true)); - const eventsData = this.deriveEventsFromBlocks( + const eventsData = this.blocksToEvents( blocksWithTransactions, elementIdFieldValues ); @@ -80,9 +79,9 @@ class EventsService { ); } - deriveEventsFromBlocks( + blocksToEvents( blocksWithTransactions: BlocksWithTransactionsMap, - elementIdFieldValues: FieldElementIdWithValueMap + elementIdFieldValues: FieldIdValuePairs ) { const events: Events = []; const blockMapEntries = Array.from(blocksWithTransactions.entries()); diff --git a/src/db/archive-node-adapter/queries.ts b/src/db/archive-node-adapter/queries.ts index 5d04b85..104d828 100644 --- a/src/db/archive-node-adapter/queries.ts +++ b/src/db/archive-node-adapter/queries.ts @@ -73,9 +73,7 @@ function blocksAccessedCTE( SELECT requesting_zkapp_account_identifier_id, block_id, - account_identifier_id, zkapp_id, - id AS account_access_id, state_hash, parent_hash, height, @@ -110,14 +108,11 @@ function emittedZkAppCommandsCTE(db_client: postgres.Sql) { SELECT blocks_accessed.*, zkcu.id AS zkapp_account_update_id, - zkapp_fee_payer_body_id, zkapp_account_updates_ids, authorization_kind, status, memo, hash, - body_id, - events_id, actions_id FROM blocks_accessed diff --git a/src/db/archive-node-adapter/utils.ts b/src/db/archive-node-adapter/utils.ts index 9169e49..ca2c1a7 100644 --- a/src/db/archive-node-adapter/utils.ts +++ b/src/db/archive-node-adapter/utils.ts @@ -4,7 +4,7 @@ import { Event, ArchiveNodeDatabaseRow, BlocksWithTransactionsMap, - FieldElementIdWithValueMap, + FieldIdValuePairs, BlockInfo, } from 'src/models/types'; import { @@ -12,7 +12,7 @@ import { createEvent, createAction, } from '../../models/utils'; -import { filterBestTip, findAllIndexes } from '../../consensus/mina-consensus'; +import { filterBestTip } from '../../consensus/mina-consensus'; export { partitionBlocks, @@ -22,73 +22,64 @@ export { sortAndFilterBlocks, }; +/** + * @fileoverview + * This file contains utility functions used by the {@link ArchiveNodeAdapter}` class. When we query for zkApp data from the Mina Archive Node Database, we get back an array of rows. + * Each row contains a block hash, transaction hash, and an array of element ids. The element ids are used to look up the corresponding field values in the Archive Node. The field values are then used to create Action or Event instances. + * A field value is referenced by an element id. For example, the element id '1' might correspond to the field value '1' and the element id '2' might correspond to the field value '2'. These element_ids is what's used to look up the field values. + * Due to the nature of getting rows that represent the same block, there is a lot of processing that needs to be done to get the data into a usable format. + * The functions in this file are used to perform this processing. The functions are described in more detail below. + */ + /** * Partitions the given rows into a map where the keys are block hashes * and the values are maps of transaction hashes to array of rows. * - * For each row, if a block with the same block hash exists in the map, - * it adds the row to the corresponding transaction in the map. - * If the block does not exist, it creates a new map entry with the block hash. - * - * If the transaction does not exist within a block, it creates a new transaction entry with the transaction hash. - * * @param rows The array of rows to be partitioned. * @returns A map where the keys are block hashes and the values are maps of transaction hashes to array of rows. */ -function partitionBlocks(rows: postgres.RowList) { +function partitionBlocks( + rows: ArchiveNodeDatabaseRow[] +): BlocksWithTransactionsMap { const blocks: BlocksWithTransactionsMap = new Map(); if (rows.length === 0) return blocks; for (let i = 0; i < rows.length; i++) { - const { state_hash: blockHash, hash: transactionHash } = rows[i]; - const blockData = blocks.get(blockHash); + const currentRow = rows[i]; + const { state_hash: blockHash, hash: transactionHash } = currentRow; + const existingBlock = blocks.get(blockHash); - if (blockData === undefined) { - const firstEntry = new Map(); - firstEntry.set(transactionHash, [rows[i]]); - blocks.set(blockHash, firstEntry); + if (existingBlock === undefined) { + // If the block does not exist in the map, create a new entry + const newTransactionEntry = new Map([[transactionHash, [currentRow]]]); + blocks.set(blockHash, newTransactionEntry); } else { - const blockDataRows = blockData.get(transactionHash); - if (blockDataRows) { - blockDataRows.push(rows[i]); + const existingTransactionRows = existingBlock.get(transactionHash); + if (existingTransactionRows) { + // If the transaction exists in the block, add the row to the transaction + existingTransactionRows.push(currentRow); } else { - blockData.set(transactionHash, [rows[i]]); + // If the transaction does not exist in the block, create a new entry + existingBlock.set(transactionHash, [currentRow]); } } } - return sortParitionedBlocks(blocks); -} - -function sortParitionedBlocks( - blocks: BlocksWithTransactionsMap -): BlocksWithTransactionsMap { - const sortedBlocks: BlocksWithTransactionsMap = new Map(); - for (const [blockHash, transactions] of blocks) { - const sortedTransactions = new Map( - [...transactions.entries()].sort((a, b) => { - const aHeight = a[1][0].height; - const bHeight = b[1][0].height; - return Number(bHeight) - Number(aHeight); - }) - ); - sortedBlocks.set(blockHash, sortedTransactions); - } - return sortedBlocks; + return blocks; } /** - * Extracts 'id' and 'field' properties from each row in the provided array and maps them together. + * Extracts 'id' and 'field' properties from each {@link ArchiveNodeDatabaseRow} in the provided array and maps them together. * - * This function iterates over each row in the given array, and for each row, it retrieves - * the 'id' and 'field' properties. It then stores these values in a Map where the key is - * the stringified 'id' and the value is the 'field'. + * Each id represents a specific field value in the Archive Node. + * Since the id field is what's returned in the SQL output, we need to use it to look up the corresponding field value it references. + * Thus, we create a map of id to field value so that we can easily look up the field value for a given id. * * @param rows An array of ArchiveNodeDatabaseRow objects to process. * @returns A Map where the keys are the stringified 'id' properties from the input rows and * the values are the corresponding 'field' properties. */ function getElementIdFieldValues(rows: ArchiveNodeDatabaseRow[]) { - const elementIdValues: FieldElementIdWithValueMap = new Map(); + const elementIdValues: FieldIdValuePairs = new Map(); for (let i = 0; i < rows.length; i++) { const { id, field } = rows[i]; elementIdValues.set(id.toString(), field); @@ -97,36 +88,31 @@ function getElementIdFieldValues(rows: ArchiveNodeDatabaseRow[]) { } /** - * Removes redundant fields from an array of blocks based on unique event and account update identifiers. + * Removes redundant fields from an array of rows based on unique event and account update identifiers. * - * This function iterates over each block in the input array, and for each block, - * it retrieves the event array id, event element ids, account update id, and account updates ids. - * - * It then constructs a unique identifier for each block by joining the account update id and - * event array id with a comma. If this unique identifier has not been seen before, the function - * continues processing the block and adding it to a new array of blocks. - * - * If any inconsistencies are detected during the processing of a block, such as a missing account - * update for a given id, the function will throw an error. - * - * @param blocks An array of ArchiveNodeDatabaseRow objects to process. + * @param archiveNodeRow An array of ArchiveNodeDatabaseRow objects to process. * @returns A new array of ArchiveNodeDatabaseRow objects where redundant fields have been removed. * @throws {Error} If no matching account update is found for a given account update id and event array id. */ -function removeRedundantEmittedFields(blocks: ArchiveNodeDatabaseRow[]) { - const newBlocks: ArchiveNodeDatabaseRow[][] = []; +function removeRedundantEmittedFields( + archiveNodeRow: ArchiveNodeDatabaseRow[] +) { + const newRows: ArchiveNodeDatabaseRow[][] = []; const seenEventIds = new Set(); - for (let i = 0; i < blocks.length; i++) { - const block = blocks[i]; + for (let i = 0; i < archiveNodeRow.length; i++) { + const currentRow = archiveNodeRow[i]; + console.log(currentRow); const { - zkapp_event_array_id, + zkapp_event_array_id, // This is the unique event identifier zkapp_event_element_ids, zkapp_account_update_id, zkapp_account_updates_ids, - } = block; - const uniqueId = [zkapp_account_update_id, zkapp_event_array_id].join(','); - if (!seenEventIds.has(uniqueId)) { + } = currentRow; + const uniqueEventId = [zkapp_account_update_id, zkapp_event_array_id].join( + ',' + ); + if (!seenEventIds.has(uniqueEventId)) { const accountUpdateIndexes = findAllIndexes( zkapp_account_updates_ids, zkapp_account_update_id @@ -136,23 +122,25 @@ function removeRedundantEmittedFields(blocks: ArchiveNodeDatabaseRow[]) { `No matching account update found for the given account update ID (${zkapp_account_update_id}) and event array ID (${zkapp_event_array_id}).` ); } + // AccountUpdate Ids are always unique so we can assume it will return an array with one element const accountUpdateIdIndex = accountUpdateIndexes[0]; + const eventIndexes = findAllIndexes( zkapp_event_element_ids, zkapp_event_array_id ); eventIndexes.forEach((index) => { - if (newBlocks[accountUpdateIdIndex] === undefined) { - newBlocks[accountUpdateIdIndex] = []; + if (newRows[accountUpdateIdIndex] === undefined) { + newRows[accountUpdateIdIndex] = []; } - newBlocks[accountUpdateIdIndex][index] = block; + newRows[accountUpdateIdIndex][index] = currentRow; }); - seenEventIds.add(uniqueId); + seenEventIds.add(uniqueEventId); } } - return newBlocks.flat(); + return newRows.flat(); } /** @@ -178,7 +166,7 @@ function removeRedundantEmittedFields(blocks: ArchiveNodeDatabaseRow[]) { function mapActionOrEvent( kind: 'action' | 'event', rows: ArchiveNodeDatabaseRow[], - elementIdFieldValues: FieldElementIdWithValueMap + elementIdFieldValues: FieldIdValuePairs ) { const data: (Event | Action)[] = []; @@ -216,3 +204,13 @@ function sortAndFilterBlocks( filterBestTip(data); return data; } + +function findAllIndexes(arr: T[], target: T): number[] { + const indexes: number[] = []; + arr.forEach((element, index) => { + if (element === target) { + indexes.push(index); + } + }); + return indexes; +} diff --git a/src/models/types.ts b/src/models/types.ts index 0019b07..47b4142 100644 --- a/src/models/types.ts +++ b/src/models/types.ts @@ -1,12 +1,11 @@ export const DEFAULT_TOKEN_ID = 'wSHV2S4qX9jFsLjQo8r1BsMLH2ZRKsZx6EJd1sbozGPieEC4Jf' as const; -export type BlocksWithTransactionsMap = Map< - string, - Map ->; +export type Transaction = Map; -export type FieldElementIdWithValueMap = Map; +export type BlocksWithTransactionsMap = Map; + +export type FieldIdValuePairs = Map; export enum BlockStatusFilter { all = 'ALL', @@ -66,42 +65,102 @@ export type Actions = { blockInfo: BlockInfo; }[]; +/** + * Type representing a database row with detailed information related to the archive node. + * This includes fields such as block-related hashes, account information, action states, and more. + */ export type ArchiveNodeDatabaseRow = { + // Unique identifier for the zkapp account update. zkapp_account_update_id: number; + + // Identifier for the requesting zkapp account. requesting_zkapp_account_identifier_id: number; + + // Unique block identifier. block_id: number; - account_identifier_id: number; + + // zkapp identifier. zkapp_id: number; - account_access_id: number; + + // Hash representing the state of the block. state_hash: string; + + // Hash representing the parent of the block. parent_hash: string; + + // Numeric representation of block's height in the chain. height: string; + + // Slot count since genesis. global_slot_since_genesis: string; + + // Slot count since the last hard fork. global_slot_since_hard_fork: string; + + // Type of authorization used for the block. authorization_kind: string; + + // Timestamp when the block was created. timestamp: string; + + // Current status of the block within the chain. chain_status: string; + + // Hash representing the ledger state. ledger_hash: string; + + // Distance from the block with the maximum height. distance_from_max_block_height: string; - zkapp_fee_payer_body_id: number; + + // List of identifiers for zkapp account updates. zkapp_account_updates_ids: number[]; + + // Status of the transaction. status: string; + + // Optional memo field for additional details. memo: string; + + // Unique hash identifier. hash: string; - body_id: number; - events_id: number; - actions_id: number; + + // Unique id that references a field value. Each field value in the Archive Node has it's own unique id. id: number; + + // Element ids represent a list of ids that map to specific field values. + // These are used to identify which field values are used in a zkApp transaction and construct the data returned to the user. element_ids: number[]; + + // Field value information. field: string; + + // List of zkapp event element identifiers. zkapp_event_element_ids: number[]; + + // zkapp event array identifier. zkapp_event_array_id: number; + + // Output of the last VRF (Verifiable Random Function). last_vrf_output: string; + + // Minimum window density value. min_window_density: string; + + // List of densities for each sub-window. sub_window_densities: string[]; + + // (Optional) Action state value 1. action_state_value1?: string; + + // (Optional) Action state value 2. action_state_value2?: string; + + // (Optional) Action state value 3. action_state_value3?: string; + + // (Optional) Action state value 4. action_state_value4?: string; + + // (Optional) Action state value 5. action_state_value5?: string; }; diff --git a/tsconfig.json b/tsconfig.json index 526b1be..90a806e 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -100,5 +100,5 @@ // "skipDefaultLibCheck": true, /* Skip type checking .d.ts files that are included with TypeScript. */ "skipLibCheck": true /* Skip type checking all .d.ts files. */ }, - "include": ["src/**/*", "package.json"] + "include": ["src/**/*", "package.json", "scripts/run-compare.ts"] }