From 2181df1c47e5ebb2e018df30c3b8e7751db5a2ed Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Fri, 18 Oct 2024 17:14:52 +0500 Subject: [PATCH 01/22] some cases with logging to file --- packages/telemetry/src/context-aware-slog.js | 247 +++++++++++++++++++ 1 file changed, 247 insertions(+) create mode 100644 packages/telemetry/src/context-aware-slog.js diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js new file mode 100644 index 00000000000..40b43691786 --- /dev/null +++ b/packages/telemetry/src/context-aware-slog.js @@ -0,0 +1,247 @@ +import { makeFsStreamWriter } from '@agoric/internal/src/node/fs-stream.js'; + +/** + * @typedef {Partial<{ + * block: Partial<{ + * height: number; + * time: number; + * }>; + * crank: Partial<{ + * num: bigint; + * type: string; + * }>; + * init: boolean; + * replay: boolean; + * run: Partial<{ + * id: string; + * trigger: Partial<{ + * blockHeight: number; + * msgIdx: number; + * sender: string; + * source: string; + * time: number; + * txHash: string; + * type: string; + * }> + * }>; + * }> + * } Context + */ + +const FILE_PATH = 'slogs-temp.log'; + +const SLOG_TYPES = { + CRANK: { + RESULT: 'crank-result', + START: 'crank-start', + }, + KERNEL: { + INIT: { + FINISH: 'kernel-init-finish', + START: 'kernel-init-start', + }, + }, + REPLAY: { + FINISH: 'finish-replay', + START: 'start-replay', + }, + SWINGSET: { + AFTER_COMMIT_STATS: 'cosmic-swingset-after-commit-stats', + BEGIN_BLOCK: 'cosmic-swingset-begin-block', + BOOTSTRAP_BLOCK: { + FINISH: 'cosmic-swingset-bootstrap-block-finish', + START: 'cosmic-swingset-bootstrap-block-start', + }, + BRIDGE_INBOUND: 'cosmic-swingset-bridge-inbound', + COMMIT: { + FINISH: 'cosmic-swingset-commit-finish', + START: 'cosmic-swingset-commit-start', + }, + DELIVER_INBOUND: 'cosmic-swingset-deliver-inbound', + END_BLOCK: { + FINISH: 'cosmic-swingset-end-block-finish', + START: 'cosmic-swingset-end-block-start', + }, + }, +}; + +const stringify = data => + JSON.stringify(data, (_, value) => + typeof value === 'bigint' ? Number(value) : value, + ); + +/** + * + * @param {{env: typeof process.env}} options + */ +export const makeSlogSender = async ({ env: _ }) => { + const stream = await makeFsStreamWriter(FILE_PATH); + + if (!stream) { + return undefined; + } + + /** @type Array */ + let [blockContext, crankContext, initContext, replayContext, triggerContext] = + [null, null, null, null, null]; + + /** + * + * @param {{ + * blockHeight?: number; + * blockTime?: number; + * crankNum?: bigint; + * crankType?: string; + * inboundNum?: string; + * monotime: number; + * sender?: string; + * source?: string; + * time: number; + * type: string; + * }} slog + */ + const slogSender = async ({ + monotime, + time: timestamp, + type: slogType, + ...body + }) => { + await Promise.resolve(); + + let [afterProcessed, beforeProcessed] = [true, true]; + + /** + * Add any before report operations here + * like setting context data + */ + switch (slogType) { + case SLOG_TYPES.CRANK.START: { + crankContext = { + crank: { num: body.crankNum, type: body.crankType }, + }; + break; + } + case SLOG_TYPES.KERNEL.INIT.START: { + initContext = { init: true }; + break; + } + case SLOG_TYPES.REPLAY.START: { + replayContext = { replay: true }; + break; + } + case SLOG_TYPES.SWINGSET.BEGIN_BLOCK: { + blockContext = { + block: { + height: body.blockHeight, + time: body.blockTime, + }, + }; + break; + } + case SLOG_TYPES.SWINGSET.BOOTSTRAP_BLOCK.START: { + blockContext = { + block: { + height: body.blockHeight || 0, + time: body.blockTime, + }, + }; + triggerContext = { + run: { + id: `bootstrap-${body.blockTime}`, + trigger: { + time: body.blockTime, + type: 'bootstrap', + }, + }, + }; + break; + } + case SLOG_TYPES.SWINGSET.BRIDGE_INBOUND: + case SLOG_TYPES.SWINGSET.DELIVER_INBOUND: { + const [blockHeight, txHash, msgIdx] = (body.inboundNum || '').split(''); + const triggerType = slogType.split('-')[2]; + + triggerContext = { + run: { + id: `${triggerType}-${body.inboundNum}`, + trigger: { + blockHeight: Number(blockHeight), + msgIdx: Number(msgIdx), + sender: body.sender, + source: body.source, + time: body.blockTime, + txHash, + type: triggerType, + }, + }, + }; + break; + } + case SLOG_TYPES.SWINGSET.COMMIT.FINISH: + case SLOG_TYPES.SWINGSET.COMMIT.START: + case SLOG_TYPES.SWINGSET.END_BLOCK.FINISH: { + assert(!!blockContext); + break; + } + case SLOG_TYPES.SWINGSET.END_BLOCK.START: { + assert(!!blockContext); + break; + } + default: + beforeProcessed = false; + } + + const finalSlog = { + body, + context: { + ...blockContext, + ...crankContext, + ...initContext, + ...replayContext, + ...triggerContext, + }, + process: { uptime: monotime }, + timestamp, + type: slogType, + }; + + /** + * Add any after report operations here + * like resetting context data + */ + switch (slogType) { + case SLOG_TYPES.CRANK.RESULT: { + crankContext = null; + break; + } + case SLOG_TYPES.KERNEL.INIT.FINISH: { + initContext = null; + break; + } + case SLOG_TYPES.REPLAY.START: { + replayContext = null; + break; + } + case SLOG_TYPES.SWINGSET.AFTER_COMMIT_STATS: { + blockContext = null; + break; + } + case SLOG_TYPES.SWINGSET.BOOTSTRAP_BLOCK.FINISH: { + blockContext = null; + break; + } + case SLOG_TYPES.SWINGSET.END_BLOCK.START: { + // TODO: restore the trigger context here + break; + } + default: + afterProcessed = false; + } + + if (afterProcessed || beforeProcessed) + await stream.write(`${stringify(finalSlog)}\n`); + else console.log(`Unexpected slog type: ${slogType}`); + }; + + return slogSender; +}; From aba689a14f5c817c3edc988d8eb027a3a27b9f3c Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Sun, 20 Oct 2024 17:51:43 +0500 Subject: [PATCH 02/22] cases handled, remaining persistence and reporting to otel libraries --- packages/telemetry/src/context-aware-slog.js | 229 +++++++++++++------ 1 file changed, 156 insertions(+), 73 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 40b43691786..eeef2f3d76a 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -1,40 +1,64 @@ +/* eslint-disable no-restricted-syntax */ import { makeFsStreamWriter } from '@agoric/internal/src/node/fs-stream.js'; /** + * @typedef {{ + * blockHeight?: number; + * blockTime?: number; + * crankNum?: bigint; + * crankType?: string; + * deliveryNum?: bigint; + * inboundNum?: string; + * monotime: number; + * replay?: boolean; + * runNum?: number; + * sender?: string; + * source?: string; + * syscallNum?: number; + * time: number; + * type: string; + * vatID?: string; + * }} Slog + * * @typedef {Partial<{ - * block: Partial<{ - * height: number; - * time: number; - * }>; - * crank: Partial<{ - * num: bigint; - * type: string; - * }>; + * 'block.height': Slog['blockHeight']; + * 'block.time': Slog['blockTime']; + * 'crank.deliveryNum': Slog['deliveryNum']; + * 'crank.num': Slog['crankNum']; + * 'crank.type': Slog['crankType']; + * 'crank.vatID': Slog['vatID']; * init: boolean; * replay: boolean; - * run: Partial<{ - * id: string; - * trigger: Partial<{ - * blockHeight: number; - * msgIdx: number; - * sender: string; - * source: string; - * time: number; - * txHash: string; - * type: string; - * }> - * }>; + * 'run.id': string; + * 'run.num': string; + * 'run.trigger.blockHeight': Slog['blockHeight']; + * 'run.trigger.msgIdx': number; + * 'run.trigger.sender': Slog['sender']; + * 'run.trigger.source': Slog['source']; + * 'run.trigger.time': Slog['blockTime']; + * 'run.trigger.txHash': string; + * 'run.trigger.type': string; * }> * } Context + * + * @typedef {{ + * 'crank.syscallNum'?: Slog['syscallNum']; + * 'process.uptime': Slog['monotime']; + * timestamp: Slog['time']; + * } & Context & Partial} ReportedSlog */ const FILE_PATH = 'slogs-temp.log'; const SLOG_TYPES = { + CLIST: 'clist', + CONSOLE: 'console', CRANK: { RESULT: 'crank-result', START: 'crank-start', }, + DELIVER: 'deliver', + DELIVER_RESULT: 'deliver-result', KERNEL: { INIT: { FINISH: 'kernel-init-finish', @@ -45,6 +69,10 @@ const SLOG_TYPES = { FINISH: 'finish-replay', START: 'start-replay', }, + RUN: { + FINISH: 'cosmic-swingset-run-finish', + START: 'cosmic-swingset-run-start', + }, SWINGSET: { AFTER_COMMIT_STATS: 'cosmic-swingset-after-commit-stats', BEGIN_BLOCK: 'cosmic-swingset-begin-block', @@ -63,6 +91,8 @@ const SLOG_TYPES = { START: 'cosmic-swingset-end-block-start', }, }, + SYSCALL: 'syscall', + SYSCALL_RESULT: 'syscall-result', }; const stringify = data => @@ -86,19 +116,7 @@ export const makeSlogSender = async ({ env: _ }) => { [null, null, null, null, null]; /** - * - * @param {{ - * blockHeight?: number; - * blockTime?: number; - * crankNum?: bigint; - * crankType?: string; - * inboundNum?: string; - * monotime: number; - * sender?: string; - * source?: string; - * time: number; - * type: string; - * }} slog + * @param {Slog} slog */ const slogSender = async ({ monotime, @@ -110,17 +128,61 @@ export const makeSlogSender = async ({ env: _ }) => { let [afterProcessed, beforeProcessed] = [true, true]; + /** @type ReportedSlog */ + const extractedFields = { 'process.uptime': monotime, timestamp }; + const finalBody = { ...body }; + /** * Add any before report operations here * like setting context data */ switch (slogType) { + case SLOG_TYPES.CONSOLE: { + delete finalBody.crankNum; + delete finalBody.deliveryNum; + + break; + } + case SLOG_TYPES.CLIST: { + assert(!!crankContext); + crankContext['crank.vatID'] = finalBody.vatID; + break; + } case SLOG_TYPES.CRANK.START: { crankContext = { - crank: { num: body.crankNum, type: body.crankType }, + 'crank.num': finalBody.crankNum, + 'crank.type': finalBody.crankType, }; break; } + case SLOG_TYPES.DELIVER: { + if (replayContext) { + assert(finalBody.replay); + replayContext = { + ...replayContext, + 'crank.deliveryNum': finalBody.deliveryNum, + 'crank.vatID': finalBody.vatID, + }; + } else { + assert(!!crankContext); + crankContext = { + ...crankContext, + 'crank.deliveryNum': finalBody.deliveryNum, + 'crank.vatID': finalBody.vatID, + }; + } + + delete finalBody.deliveryNum; + delete finalBody.replay; + + break; + } + case SLOG_TYPES.DELIVER_RESULT: { + delete finalBody.deliveryNum; + delete finalBody.replay; + + break; + } case SLOG_TYPES.KERNEL.INIT.START: { initContext = { init: true }; break; @@ -129,52 +191,61 @@ export const makeSlogSender = async ({ env: _ }) => { replayContext = { replay: true }; break; } + case SLOG_TYPES.RUN.START: { + if (!(triggerContext || finalBody.runNum === 0)) + // TBD: add explicit slog events of both timer poll and install bundle + triggerContext = { + 'run.id': `timer-${finalBody.blockHeight}`, + 'run.trigger.time': finalBody.blockTime, + 'run.trigger.type': 'timer', + }; + // TODO: Persist this context + + if (!triggerContext) triggerContext = {}; + triggerContext = { + ...triggerContext, + 'run.num': `${finalBody.runNum}`, + }; + + break; + } case SLOG_TYPES.SWINGSET.BEGIN_BLOCK: { blockContext = { - block: { - height: body.blockHeight, - time: body.blockTime, - }, + 'block.height': finalBody.blockHeight, + 'block.time': finalBody.blockTime, }; break; } case SLOG_TYPES.SWINGSET.BOOTSTRAP_BLOCK.START: { blockContext = { - block: { - height: body.blockHeight || 0, - time: body.blockTime, - }, + 'block.height': finalBody.blockHeight || 0, + 'block.time': finalBody.blockTime, }; triggerContext = { - run: { - id: `bootstrap-${body.blockTime}`, - trigger: { - time: body.blockTime, - type: 'bootstrap', - }, - }, + 'run.id': `bootstrap-${finalBody.blockTime}`, + 'run.trigger.time': finalBody.blockTime, + 'run.trigger.type': 'bootstrap', }; break; } case SLOG_TYPES.SWINGSET.BRIDGE_INBOUND: case SLOG_TYPES.SWINGSET.DELIVER_INBOUND: { - const [blockHeight, txHash, msgIdx] = (body.inboundNum || '').split(''); + const [blockHeight, txHash, msgIdx] = ( + finalBody.inboundNum || '' + ).split('-'); const triggerType = slogType.split('-')[2]; triggerContext = { - run: { - id: `${triggerType}-${body.inboundNum}`, - trigger: { - blockHeight: Number(blockHeight), - msgIdx: Number(msgIdx), - sender: body.sender, - source: body.source, - time: body.blockTime, - txHash, - type: triggerType, - }, - }, + 'run.id': `${triggerType}-${finalBody.inboundNum}`, + 'run.trigger.blockHeight': Number(blockHeight), + 'run.trigger.msgIdx': Number(msgIdx), + 'run.trigger.sender': finalBody.sender, + 'run.trigger.source': finalBody.source, + 'run.trigger.time': finalBody.blockTime, + 'run.trigger.txHash': txHash, + 'run.trigger.type': triggerType, }; + // TODO: Persist this context break; } case SLOG_TYPES.SWINGSET.COMMIT.FINISH: @@ -187,21 +258,29 @@ export const makeSlogSender = async ({ env: _ }) => { assert(!!blockContext); break; } + case SLOG_TYPES.SYSCALL: + case SLOG_TYPES.SYSCALL_RESULT: { + extractedFields['crank.syscallNum'] = finalBody.syscallNum; + + delete finalBody.deliveryNum; + delete finalBody.replay; + delete finalBody.syscallNum; + + break; + } default: beforeProcessed = false; } + /** @type ReportedSlog */ const finalSlog = { - body, - context: { - ...blockContext, - ...crankContext, - ...initContext, - ...replayContext, - ...triggerContext, - }, - process: { uptime: monotime }, - timestamp, + ...blockContext, + ...crankContext, + ...extractedFields, + ...finalBody, + ...initContext, + ...replayContext, + ...triggerContext, type: slogType, }; @@ -218,10 +297,14 @@ export const makeSlogSender = async ({ env: _ }) => { initContext = null; break; } - case SLOG_TYPES.REPLAY.START: { + case SLOG_TYPES.REPLAY.FINISH: { replayContext = null; break; } + case SLOG_TYPES.RUN.FINISH: { + triggerContext = null; + break; + } case SLOG_TYPES.SWINGSET.AFTER_COMMIT_STATS: { blockContext = null; break; From 36eeddf6512178045be89a8d19c976c5194fcbd9 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Mon, 21 Oct 2024 17:52:40 +0500 Subject: [PATCH 03/22] persistence --- packages/telemetry/src/context-aware-slog.js | 117 ++++++++++++++++++- 1 file changed, 111 insertions(+), 6 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index eeef2f3d76a..13c66c41f1a 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -1,5 +1,8 @@ +/* globals process */ /* eslint-disable no-restricted-syntax */ import { makeFsStreamWriter } from '@agoric/internal/src/node/fs-stream.js'; +import sqlite from 'better-sqlite3'; +import { closeSync, existsSync, openSync } from 'fs'; /** * @typedef {{ @@ -49,6 +52,27 @@ import { makeFsStreamWriter } from '@agoric/internal/src/node/fs-stream.js'; */ const FILE_PATH = 'slogs-temp.log'; +const DATABASE_FILE_PATH = + process.env.SLOG_CONTEXT_DATABASE_FILE_PATH || '/state/slog-db.sqlite3'; +const DATABASE_TABLE_NAME = 'context'; + +const DATABASE_TYPES = { + INTEGER: 'INTEGER', + TEXT: 'TEXT', +}; + +const DATABASE_SCHEMA = { + key: `${DATABASE_TYPES.TEXT} PRIMARY KEY`, + 'run.id': DATABASE_TYPES.TEXT, + 'run.num': DATABASE_TYPES.TEXT, + 'run.trigger.blockHeight': DATABASE_TYPES.INTEGER, + 'run.trigger.msgIdx': DATABASE_TYPES.INTEGER, + 'run.trigger.sender': DATABASE_TYPES.TEXT, + 'run.trigger.source': DATABASE_TYPES.TEXT, + 'run.trigger.time': DATABASE_TYPES.INTEGER, + 'run.trigger.txHash': DATABASE_TYPES.TEXT, + 'run.trigger.type': DATABASE_TYPES.TEXT, +}; const SLOG_TYPES = { CLIST: 'clist', @@ -95,6 +119,82 @@ const SLOG_TYPES = { SYSCALL_RESULT: 'syscall-result', }; +const getDatabaseInstance = async () => { + if (!existsSync(DATABASE_FILE_PATH)) + closeSync(openSync(DATABASE_FILE_PATH, 'w')); + + const databaseInstance = sqlite(DATABASE_FILE_PATH, { fileMustExist: true }); + databaseInstance.exec( + ` + CREATE TABLE IF NOT EXISTS '${DATABASE_TABLE_NAME}'( + ${Object.entries(DATABASE_SCHEMA) + .map(([name, type]) => `[${name}] ${type}`) + .join(',\n')} + ) + `, + ); + + /** + * @type {Array<{ cid: number; name: string; type: string; notnull: number; dflt_value: null; pk: number; }>} + */ + // @ts-expect-error + const rows = databaseInstance + .prepare(`PRAGMA table_info('${DATABASE_TABLE_NAME}')`) + .all(); + + const existingColumns = rows.map(row => row.name); + + Object.entries(DATABASE_SCHEMA).map( + ([name, type]) => + !existingColumns.includes(name) && + databaseInstance + .prepare(`ALTER TABLE '${DATABASE_TABLE_NAME}' ADD [${name}] ${type}`) + .run(), + ); + + return { + /** + * + * @param {Context} context + */ + persistContext: context => { + /** @type {Array} */ + const keys = []; + const values = []; + + for (const [key, value] of Object.entries(context)) { + keys.push(`[${key}]`); + if (value === undefined) values.push(null); + else + values.push( + DATABASE_SCHEMA[key].startsWith(DATABASE_TYPES.TEXT) + ? `'${value}'` + : value, + ); + } + + databaseInstance + .prepare( + `INSERT OR REPLACE INTO '${DATABASE_TABLE_NAME}' (key, ${keys.join(', ')}) VALUES ('trigger-context', ${values.join(', ')})`, + ) + .run(); + }, + + restoreContext: () => { + /** + * @type {Context | undefined} + */ + // @ts-expect-error + const row = databaseInstance + .prepare( + `SELECT * FROM '${DATABASE_TABLE_NAME}' WHERE key = 'trigger-context'`, + ) + .get(); + return row || null; + }, + }; +}; + const stringify = data => JSON.stringify(data, (_, value) => typeof value === 'bigint' ? Number(value) : value, @@ -111,6 +211,8 @@ export const makeSlogSender = async ({ env: _ }) => { return undefined; } + const db = await getDatabaseInstance(); + /** @type Array */ let [blockContext, crankContext, initContext, replayContext, triggerContext] = [null, null, null, null, null]; @@ -192,14 +294,18 @@ export const makeSlogSender = async ({ env: _ }) => { break; } case SLOG_TYPES.RUN.START: { - if (!(triggerContext || finalBody.runNum === 0)) + if (!(triggerContext || finalBody.runNum === 0)) { + const blockTime = finalBody.blockTime || blockContext?.['block.time']; + + assert(blockTime); // TBD: add explicit slog events of both timer poll and install bundle triggerContext = { 'run.id': `timer-${finalBody.blockHeight}`, - 'run.trigger.time': finalBody.blockTime, + 'run.trigger.time': blockTime, 'run.trigger.type': 'timer', }; - // TODO: Persist this context + db.persistContext(triggerContext); + } if (!triggerContext) triggerContext = {}; triggerContext = { @@ -245,7 +351,7 @@ export const makeSlogSender = async ({ env: _ }) => { 'run.trigger.txHash': txHash, 'run.trigger.type': triggerType, }; - // TODO: Persist this context + db.persistContext(triggerContext); break; } case SLOG_TYPES.SWINGSET.COMMIT.FINISH: @@ -272,7 +378,6 @@ export const makeSlogSender = async ({ env: _ }) => { beforeProcessed = false; } - /** @type ReportedSlog */ const finalSlog = { ...blockContext, ...crankContext, @@ -314,7 +419,7 @@ export const makeSlogSender = async ({ env: _ }) => { break; } case SLOG_TYPES.SWINGSET.END_BLOCK.START: { - // TODO: restore the trigger context here + triggerContext = db.restoreContext(); break; } default: From 99cbfa77505eccaca17560223e60a61707a1c4f8 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Mon, 21 Oct 2024 22:03:38 +0500 Subject: [PATCH 04/22] otel reporting --- packages/telemetry/package.json | 3 + packages/telemetry/src/context-aware-slog.js | 55 ++++++++----- yarn.lock | 87 +++++++++++++++++++- 3 files changed, 124 insertions(+), 21 deletions(-) diff --git a/packages/telemetry/package.json b/packages/telemetry/package.json index 32333995d33..9162cf01bd8 100644 --- a/packages/telemetry/package.json +++ b/packages/telemetry/package.json @@ -29,9 +29,12 @@ "@endo/marshal": "^1.5.4", "@endo/stream": "^1.2.6", "@opentelemetry/api": "~1.3.0", + "@opentelemetry/api-logs": "0.53.0", "@opentelemetry/exporter-prometheus": "~0.35.0", + "@opentelemetry/exporter-logs-otlp-http": "0.53.0", "@opentelemetry/exporter-trace-otlp-http": "~0.35.0", "@opentelemetry/resources": "~1.9.0", + "@opentelemetry/sdk-logs": "0.53.0", "@opentelemetry/sdk-metrics": "~1.9.0", "@opentelemetry/sdk-trace-base": "~1.9.0", "@opentelemetry/semantic-conventions": "~1.9.0", diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 13c66c41f1a..96c533115b6 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -1,8 +1,14 @@ /* globals process */ -/* eslint-disable no-restricted-syntax */ -import { makeFsStreamWriter } from '@agoric/internal/src/node/fs-stream.js'; +import { logs, SeverityNumber } from '@opentelemetry/api-logs'; +import { Resource } from '@opentelemetry/resources'; +import { + LoggerProvider, + SimpleLogRecordProcessor, +} from '@opentelemetry/sdk-logs'; +import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; import sqlite from 'better-sqlite3'; import { closeSync, existsSync, openSync } from 'fs'; +import { getResourceAttributes } from './index.js'; /** * @typedef {{ @@ -51,7 +57,6 @@ import { closeSync, existsSync, openSync } from 'fs'; * } & Context & Partial} ReportedSlog */ -const FILE_PATH = 'slogs-temp.log'; const DATABASE_FILE_PATH = process.env.SLOG_CONTEXT_DATABASE_FILE_PATH || '/state/slog-db.sqlite3'; const DATABASE_TABLE_NAME = 'context'; @@ -93,6 +98,7 @@ const SLOG_TYPES = { FINISH: 'finish-replay', START: 'start-replay', }, + // eslint-disable-next-line no-restricted-syntax RUN: { FINISH: 'cosmic-swingset-run-finish', START: 'cosmic-swingset-run-start', @@ -204,12 +210,22 @@ const stringify = data => * * @param {{env: typeof process.env}} options */ -export const makeSlogSender = async ({ env: _ }) => { - const stream = await makeFsStreamWriter(FILE_PATH); +export const makeSlogSender = async options => { + const { OTEL_EXPORTER_OTLP_ENDPOINT } = options.env; + if (!OTEL_EXPORTER_OTLP_ENDPOINT) + return console.warn( + 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" envrionment variable', + ); + + const loggerProvider = new LoggerProvider({ + resource: new Resource(getResourceAttributes(options)), + }); + loggerProvider.addLogRecordProcessor( + new SimpleLogRecordProcessor(new OTLPLogExporter({ keepAlive: true })), + ); - if (!stream) { - return undefined; - } + logs.setGlobalLoggerProvider(loggerProvider); + const logger = logs.getLogger('default'); const db = await getDatabaseInstance(); @@ -220,12 +236,7 @@ export const makeSlogSender = async ({ env: _ }) => { /** * @param {Slog} slog */ - const slogSender = async ({ - monotime, - time: timestamp, - type: slogType, - ...body - }) => { + const slogSender = async ({ monotime, time: timestamp, ...body }) => { await Promise.resolve(); let [afterProcessed, beforeProcessed] = [true, true]; @@ -238,7 +249,7 @@ export const makeSlogSender = async ({ env: _ }) => { * Add any before report operations here * like setting context data */ - switch (slogType) { + switch (body.type) { case SLOG_TYPES.CONSOLE: { delete finalBody.crankNum; delete finalBody.deliveryNum; @@ -293,6 +304,7 @@ export const makeSlogSender = async ({ env: _ }) => { replayContext = { replay: true }; break; } + // eslint-disable-next-line no-restricted-syntax case SLOG_TYPES.RUN.START: { if (!(triggerContext || finalBody.runNum === 0)) { const blockTime = finalBody.blockTime || blockContext?.['block.time']; @@ -339,7 +351,7 @@ export const makeSlogSender = async ({ env: _ }) => { const [blockHeight, txHash, msgIdx] = ( finalBody.inboundNum || '' ).split('-'); - const triggerType = slogType.split('-')[2]; + const triggerType = body.type.split('-')[2]; triggerContext = { 'run.id': `${triggerType}-${finalBody.inboundNum}`, @@ -386,14 +398,13 @@ export const makeSlogSender = async ({ env: _ }) => { ...initContext, ...replayContext, ...triggerContext, - type: slogType, }; /** * Add any after report operations here * like resetting context data */ - switch (slogType) { + switch (body.type) { case SLOG_TYPES.CRANK.RESULT: { crankContext = null; break; @@ -406,6 +417,7 @@ export const makeSlogSender = async ({ env: _ }) => { replayContext = null; break; } + // eslint-disable-next-line no-restricted-syntax case SLOG_TYPES.RUN.FINISH: { triggerContext = null; break; @@ -427,8 +439,11 @@ export const makeSlogSender = async ({ env: _ }) => { } if (afterProcessed || beforeProcessed) - await stream.write(`${stringify(finalSlog)}\n`); - else console.log(`Unexpected slog type: ${slogType}`); + logger.emit({ + body: JSON.parse(stringify(finalSlog)), + severityNumber: SeverityNumber.INFO, + }); + else console.log(`Unexpected slog type: ${body.type}`); }; return slogSender; diff --git a/yarn.lock b/yarn.lock index 4e7094d28f3..7b079ed23a8 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3181,6 +3181,13 @@ dependencies: "@octokit/openapi-types" "^18.0.0" +"@opentelemetry/api-logs@0.53.0": + version "0.53.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/api-logs/-/api-logs-0.53.0.tgz#c478cbd8120ec2547b64edfa03a552cfe42170be" + integrity sha512-8HArjKx+RaAI8uEIgcORbZIPklyh1YLjPSBus8hjRmvLi6DeFzgOcdZ7KwPabKj8mXF8dX0hyfAyGfycz0DbFw== + dependencies: + "@opentelemetry/api" "^1.0.0" + "@opentelemetry/api@^1.0.0": version "1.4.1" resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.4.1.tgz#ff22eb2e5d476fbc2450a196e40dd243cc20c28f" @@ -3191,6 +3198,13 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/api/-/api-1.3.0.tgz#27c6f776ac3c1c616651e506a89f438a0ed6a055" integrity sha512-YveTnGNsFFixTKJz09Oi4zYkiLT5af3WpZDu4aIUM7xX+2bHAkOJayFTVQd6zB8kkWPpbua4Ha6Ql00grdLlJQ== +"@opentelemetry/core@1.26.0": + version "1.26.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.26.0.tgz#7d84265aaa850ed0ca5813f97d831155be42b328" + integrity sha512-1iKxXXE8415Cdv0yjG3G6hQnB5eVEsJce3QaawX8SjDn0mAS0ZM8fAbZZJD4ajvhC15cePvosSCut404KrIIvQ== + dependencies: + "@opentelemetry/semantic-conventions" "1.27.0" + "@opentelemetry/core@1.9.1": version "1.9.1" resolved "https://registry.yarnpkg.com/@opentelemetry/core/-/core-1.9.1.tgz#e343337e1a7bf30e9a6aef3ef659b9b76379762a" @@ -3205,6 +3219,17 @@ dependencies: "@opentelemetry/semantic-conventions" "1.15.2" +"@opentelemetry/exporter-logs-otlp-http@0.53.0": + version "0.53.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-logs-otlp-http/-/exporter-logs-otlp-http-0.53.0.tgz#1b4a152ea427ec4581532880fd0d620cc559cb11" + integrity sha512-cSRKgD/n8rb+Yd+Cif6EnHEL/VZg1o8lEcEwFji1lwene6BdH51Zh3feAD9p2TyVoBKrl6Q9Zm2WltSp2k9gWQ== + dependencies: + "@opentelemetry/api-logs" "0.53.0" + "@opentelemetry/core" "1.26.0" + "@opentelemetry/otlp-exporter-base" "0.53.0" + "@opentelemetry/otlp-transformer" "0.53.0" + "@opentelemetry/sdk-logs" "0.53.0" + "@opentelemetry/exporter-prometheus@~0.35.0": version "0.35.1" resolved "https://registry.yarnpkg.com/@opentelemetry/exporter-prometheus/-/exporter-prometheus-0.35.1.tgz#c2fd5fcd17dac8106b33e5d354d98c47ebaa8804" @@ -3232,6 +3257,14 @@ dependencies: "@opentelemetry/core" "1.9.1" +"@opentelemetry/otlp-exporter-base@0.53.0": + version "0.53.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-exporter-base/-/otlp-exporter-base-0.53.0.tgz#dfe51874b869c687c3cb463b70cddda7de282762" + integrity sha512-UCWPreGQEhD6FjBaeDuXhiMf6kkBODF0ZQzrk/tuQcaVDJ+dDQ/xhJp192H9yWnKxVpEjFrSSLnpqmX4VwX+eA== + dependencies: + "@opentelemetry/core" "1.26.0" + "@opentelemetry/otlp-transformer" "0.53.0" + "@opentelemetry/otlp-transformer@0.35.1": version "0.35.1" resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-transformer/-/otlp-transformer-0.35.1.tgz#d4333b71324b83dbb1b0b3a4cfd769b3e214c6f9" @@ -3242,6 +3275,27 @@ "@opentelemetry/sdk-metrics" "1.9.1" "@opentelemetry/sdk-trace-base" "1.9.1" +"@opentelemetry/otlp-transformer@0.53.0": + version "0.53.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/otlp-transformer/-/otlp-transformer-0.53.0.tgz#55d435db5ed5cf56b99c010827294dd4921c45c2" + integrity sha512-rM0sDA9HD8dluwuBxLetUmoqGJKSAbWenwD65KY9iZhUxdBHRLrIdrABfNDP7aiTjcgK8XFyTn5fhDz7N+W6DA== + dependencies: + "@opentelemetry/api-logs" "0.53.0" + "@opentelemetry/core" "1.26.0" + "@opentelemetry/resources" "1.26.0" + "@opentelemetry/sdk-logs" "0.53.0" + "@opentelemetry/sdk-metrics" "1.26.0" + "@opentelemetry/sdk-trace-base" "1.26.0" + protobufjs "^7.3.0" + +"@opentelemetry/resources@1.26.0": + version "1.26.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.26.0.tgz#da4c7366018bd8add1f3aa9c91c6ac59fd503cef" + integrity sha512-CPNYchBE7MBecCSVy0HKpUISEeJOniWqcHaAHpmasZ3j9o6V3AyBzhRc90jdmemq0HOxDr6ylhUbDhBqqPpeNw== + dependencies: + "@opentelemetry/core" "1.26.0" + "@opentelemetry/semantic-conventions" "1.27.0" + "@opentelemetry/resources@1.9.1", "@opentelemetry/resources@~1.9.0": version "1.9.1" resolved "https://registry.yarnpkg.com/@opentelemetry/resources/-/resources-1.9.1.tgz#5ad3d80ba968a3a0e56498ce4bc82a6a01f2682f" @@ -3250,6 +3304,23 @@ "@opentelemetry/core" "1.9.1" "@opentelemetry/semantic-conventions" "1.9.1" +"@opentelemetry/sdk-logs@0.53.0": + version "0.53.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-logs/-/sdk-logs-0.53.0.tgz#ec8b69278c4e683c13c58ed4285a47c27f5799c6" + integrity sha512-dhSisnEgIj/vJZXZV6f6KcTnyLDx/VuQ6l3ejuZpMpPlh9S1qMHiZU9NMmOkVkwwHkMy3G6mEBwdP23vUZVr4g== + dependencies: + "@opentelemetry/api-logs" "0.53.0" + "@opentelemetry/core" "1.26.0" + "@opentelemetry/resources" "1.26.0" + +"@opentelemetry/sdk-metrics@1.26.0": + version "1.26.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-metrics/-/sdk-metrics-1.26.0.tgz#37bb0afb1d4447f50aab9cdd05db6f2d8b86103e" + integrity sha512-0SvDXmou/JjzSDOjUmetAAvcKQW6ZrvosU0rkbDGpXvvZN+pQF6JbK/Kd4hNdK4q/22yeruqvukXEJyySTzyTQ== + dependencies: + "@opentelemetry/core" "1.26.0" + "@opentelemetry/resources" "1.26.0" + "@opentelemetry/sdk-metrics@1.9.1", "@opentelemetry/sdk-metrics@~1.9.0": version "1.9.1" resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-metrics/-/sdk-metrics-1.9.1.tgz#babc162a81df9884c16b1e67c2dd26ab478f3080" @@ -3259,6 +3330,15 @@ "@opentelemetry/resources" "1.9.1" lodash.merge "4.6.2" +"@opentelemetry/sdk-trace-base@1.26.0": + version "1.26.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.26.0.tgz#0c913bc6d2cfafd901de330e4540952269ae579c" + integrity sha512-olWQldtvbK4v22ymrKLbIcBi9L2SpMO84sCPY54IVsJhP9fRsxJT194C/AVaAuJzLE30EdhhM1VmvVYR7az+cw== + dependencies: + "@opentelemetry/core" "1.26.0" + "@opentelemetry/resources" "1.26.0" + "@opentelemetry/semantic-conventions" "1.27.0" + "@opentelemetry/sdk-trace-base@1.9.1", "@opentelemetry/sdk-trace-base@~1.9.0": version "1.9.1" resolved "https://registry.yarnpkg.com/@opentelemetry/sdk-trace-base/-/sdk-trace-base-1.9.1.tgz#c349491b432a7e0ae7316f0b48b2d454d79d2b84" @@ -3273,6 +3353,11 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.15.2.tgz#3bafb5de3e20e841dff6cb3c66f4d6e9694c4241" integrity sha512-CjbOKwk2s+3xPIMcd5UNYQzsf+v94RczbdNix9/kQh38WiQkM90sUOi3if8eyHFgiBjBjhwXrA7W3ydiSQP9mw== +"@opentelemetry/semantic-conventions@1.27.0": + version "1.27.0" + resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz#1a857dcc95a5ab30122e04417148211e6f945e6c" + integrity sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg== + "@opentelemetry/semantic-conventions@1.9.1", "@opentelemetry/semantic-conventions@~1.9.0": version "1.9.1" resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.9.1.tgz#ad3367684a57879392513479e0a436cb2ac46dad" @@ -10175,7 +10260,7 @@ proto-list@~1.2.1: resolved "https://registry.yarnpkg.com/proto-list/-/proto-list-1.2.4.tgz#212d5bfe1318306a420f6402b8e26ff39647a849" integrity sha1-IS1b/hMYMGpCD2QCuOJv85ZHqEk= -protobufjs@^6.8.8, protobufjs@^7.2.4, protobufjs@^7.2.6: +protobufjs@^6.8.8, protobufjs@^7.2.4, protobufjs@^7.2.6, protobufjs@^7.3.0: version "7.4.0" resolved "https://registry.yarnpkg.com/protobufjs/-/protobufjs-7.4.0.tgz#7efe324ce9b3b61c82aae5de810d287bc08a248a" integrity sha512-mRUWCc3KUU4w1jU8sGxICXH/gNS94DvI1gxqDvBzhj1JpcsimQkYiOJfwsPUykUI5ZaspFbSgmBLER8IrQ3tqw== From f258b97086bdde610d2e6d609f4bf66d2de89e3b Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Tue, 22 Oct 2024 13:46:45 +0500 Subject: [PATCH 05/22] minor final slog changes and incorporate scaling considerations --- packages/telemetry/src/context-aware-slog.js | 48 +++++++++++++++++--- 1 file changed, 41 insertions(+), 7 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 96c533115b6..2cca7c36d6f 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -188,7 +188,7 @@ const getDatabaseInstance = async () => { restoreContext: () => { /** - * @type {Context | undefined} + * @type {Context & {key?: string} | undefined} */ // @ts-expect-error const row = databaseInstance @@ -196,6 +196,9 @@ const getDatabaseInstance = async () => { `SELECT * FROM '${DATABASE_TABLE_NAME}' WHERE key = 'trigger-context'`, ) .get(); + + delete row?.key; + return row || null; }, }; @@ -211,7 +214,7 @@ const stringify = data => * @param {{env: typeof process.env}} options */ export const makeSlogSender = async options => { - const { OTEL_EXPORTER_OTLP_ENDPOINT } = options.env; + const { CHAIN_ID, OTEL_EXPORTER_OTLP_ENDPOINT } = options.env; if (!OTEL_EXPORTER_OTLP_ENDPOINT) return console.warn( 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" envrionment variable', @@ -230,8 +233,27 @@ export const makeSlogSender = async options => { const db = await getDatabaseInstance(); /** @type Array */ - let [blockContext, crankContext, initContext, replayContext, triggerContext] = - [null, null, null, null, null]; + let [ + blockContext, + crankContext, + initContext, + lastPersistedTriggerContext, + replayContext, + triggerContext, + ] = [null, null, null, null, null, null]; + + /** + * @param {Context} context + */ + const persistContext = context => { + lastPersistedTriggerContext = context; + db.persistContext(context); + }; + + const restoreContext = () => { + if (lastPersistedTriggerContext) return lastPersistedTriggerContext; + return db.restoreContext(); + }; /** * @param {Slog} slog @@ -316,7 +338,7 @@ export const makeSlogSender = async options => { 'run.trigger.time': blockTime, 'run.trigger.type': 'timer', }; - db.persistContext(triggerContext); + persistContext(triggerContext); } if (!triggerContext) triggerContext = {}; @@ -363,7 +385,7 @@ export const makeSlogSender = async options => { 'run.trigger.txHash': txHash, 'run.trigger.type': triggerType, }; - db.persistContext(triggerContext); + persistContext(triggerContext); break; } case SLOG_TYPES.SWINGSET.COMMIT.FINISH: @@ -400,6 +422,17 @@ export const makeSlogSender = async options => { ...triggerContext, }; + if (finalSlog['block.height']) delete finalSlog.blockHeight; + if (finalSlog['block.time']) delete finalSlog.blockTime; + if (finalSlog['crank.deliveryNum']) delete finalSlog.deliveryNum; + if (finalSlog['crank.num']) delete finalSlog.crankNum; + if (finalSlog['crank.syscallNum']) delete finalSlog.syscallNum; + if (finalSlog['crank.type']) delete finalSlog.crankType; + if (finalSlog['crank.vatID']) delete finalSlog.vatID; + if (finalSlog['run.num']) delete finalSlog.runNum; + if (finalSlog['run.trigger.sender']) delete finalSlog.sender; + if (finalSlog['run.trigger.source']) delete finalSlog.source; + /** * Add any after report operations here * like resetting context data @@ -431,7 +464,7 @@ export const makeSlogSender = async options => { break; } case SLOG_TYPES.SWINGSET.END_BLOCK.START: { - triggerContext = db.restoreContext(); + triggerContext = restoreContext(); break; } default: @@ -440,6 +473,7 @@ export const makeSlogSender = async options => { if (afterProcessed || beforeProcessed) logger.emit({ + attributes: { 'chain-id': CHAIN_ID }, body: JSON.parse(stringify(finalSlog)), severityNumber: SeverityNumber.INFO, }); From 1d40420c459644cbf4b48b1c6678731c867ec760 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Tue, 22 Oct 2024 14:33:53 +0500 Subject: [PATCH 06/22] cleanup --- packages/telemetry/src/context-aware-slog.js | 55 ++++++++------------ 1 file changed, 21 insertions(+), 34 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 2cca7c36d6f..7c869bf0f3f 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -60,6 +60,7 @@ import { getResourceAttributes } from './index.js'; const DATABASE_FILE_PATH = process.env.SLOG_CONTEXT_DATABASE_FILE_PATH || '/state/slog-db.sqlite3'; const DATABASE_TABLE_NAME = 'context'; +const DATABASE_TRIGGER_CONTEXT_KEY_NAME = 'trigger-context'; const DATABASE_TYPES = { INTEGER: 'INTEGER', @@ -181,7 +182,8 @@ const getDatabaseInstance = async () => { databaseInstance .prepare( - `INSERT OR REPLACE INTO '${DATABASE_TABLE_NAME}' (key, ${keys.join(', ')}) VALUES ('trigger-context', ${values.join(', ')})`, + `INSERT OR REPLACE INTO '${DATABASE_TABLE_NAME}' (key, ${keys.join(', ')}) + VALUES ('${DATABASE_TRIGGER_CONTEXT_KEY_NAME}', ${values.join(', ')})`, ) .run(); }, @@ -193,7 +195,7 @@ const getDatabaseInstance = async () => { // @ts-expect-error const row = databaseInstance .prepare( - `SELECT * FROM '${DATABASE_TABLE_NAME}' WHERE key = 'trigger-context'`, + `SELECT * FROM '${DATABASE_TABLE_NAME}' WHERE key = '${DATABASE_TRIGGER_CONTEXT_KEY_NAME}'`, ) .get(); @@ -204,11 +206,6 @@ const getDatabaseInstance = async () => { }; }; -const stringify = data => - JSON.stringify(data, (_, value) => - typeof value === 'bigint' ? Number(value) : value, - ); - /** * * @param {{env: typeof process.env}} options @@ -261,11 +258,11 @@ export const makeSlogSender = async options => { const slogSender = async ({ monotime, time: timestamp, ...body }) => { await Promise.resolve(); - let [afterProcessed, beforeProcessed] = [true, true]; - - /** @type ReportedSlog */ - const extractedFields = { 'process.uptime': monotime, timestamp }; - const finalBody = { ...body }; + const finalBody = { + ...body, + 'process.uptime': monotime, + timestamp, + }; /** * Add any before report operations here @@ -400,7 +397,7 @@ export const makeSlogSender = async options => { } case SLOG_TYPES.SYSCALL: case SLOG_TYPES.SYSCALL_RESULT: { - extractedFields['crank.syscallNum'] = finalBody.syscallNum; + finalBody['crank.syscallNum'] = finalBody.syscallNum; delete finalBody.deliveryNum; delete finalBody.replay; @@ -409,29 +406,27 @@ export const makeSlogSender = async options => { break; } default: - beforeProcessed = false; + break; } const finalSlog = { ...blockContext, ...crankContext, - ...extractedFields, ...finalBody, ...initContext, ...replayContext, ...triggerContext, }; - if (finalSlog['block.height']) delete finalSlog.blockHeight; - if (finalSlog['block.time']) delete finalSlog.blockTime; - if (finalSlog['crank.deliveryNum']) delete finalSlog.deliveryNum; - if (finalSlog['crank.num']) delete finalSlog.crankNum; - if (finalSlog['crank.syscallNum']) delete finalSlog.syscallNum; - if (finalSlog['crank.type']) delete finalSlog.crankType; - if (finalSlog['crank.vatID']) delete finalSlog.vatID; - if (finalSlog['run.num']) delete finalSlog.runNum; - if (finalSlog['run.trigger.sender']) delete finalSlog.sender; - if (finalSlog['run.trigger.source']) delete finalSlog.source; + logger.emit({ + attributes: { 'chain-id': CHAIN_ID }, + body: JSON.parse( + JSON.stringify(finalSlog, (_, value) => + typeof value === 'bigint' ? Number(value) : value, + ), + ), + severityNumber: SeverityNumber.INFO, + }); /** * Add any after report operations here @@ -468,16 +463,8 @@ export const makeSlogSender = async options => { break; } default: - afterProcessed = false; + break; } - - if (afterProcessed || beforeProcessed) - logger.emit({ - attributes: { 'chain-id': CHAIN_ID }, - body: JSON.parse(stringify(finalSlog)), - severityNumber: SeverityNumber.INFO, - }); - else console.log(`Unexpected slog type: ${body.type}`); }; return slogSender; From 7d4a9b4d0fcf516d25af02447610b48538ec4a7c Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Tue, 22 Oct 2024 16:02:59 +0500 Subject: [PATCH 07/22] remove unnecessary async usage --- packages/telemetry/src/context-aware-slog.js | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 7c869bf0f3f..1bd200111fd 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -255,9 +255,7 @@ export const makeSlogSender = async options => { /** * @param {Slog} slog */ - const slogSender = async ({ monotime, time: timestamp, ...body }) => { - await Promise.resolve(); - + const slogSender = ({ monotime, time: timestamp, ...body }) => { const finalBody = { ...body, 'process.uptime': monotime, From 696d9370516dfe39e3f155d6aa4252d605dbaa6e Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Tue, 22 Oct 2024 20:30:23 +0500 Subject: [PATCH 08/22] move context data to labels --- packages/telemetry/src/context-aware-slog.js | 30 ++++++++++++-------- 1 file changed, 18 insertions(+), 12 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 1bd200111fd..be91aa9c7cd 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -51,10 +51,11 @@ import { getResourceAttributes } from './index.js'; * } Context * * @typedef {{ + * 'chain-id': string; * 'crank.syscallNum'?: Slog['syscallNum']; * 'process.uptime': Slog['monotime']; * timestamp: Slog['time']; - * } & Context & Partial} ReportedSlog + * } & Context} LogAttributes */ const DATABASE_FILE_PATH = @@ -206,6 +207,11 @@ const getDatabaseInstance = async () => { }; }; +const stringify = data => + JSON.stringify(data, (_, value) => + typeof value === 'bigint' ? Number(value) : value, + ); + /** * * @param {{env: typeof process.env}} options @@ -256,8 +262,11 @@ export const makeSlogSender = async options => { * @param {Slog} slog */ const slogSender = ({ monotime, time: timestamp, ...body }) => { - const finalBody = { - ...body, + const finalBody = { ...body }; + + /** @type {LogAttributes} */ + let logAttributes = { + 'chain-id': String(CHAIN_ID), 'process.uptime': monotime, timestamp, }; @@ -395,7 +404,7 @@ export const makeSlogSender = async options => { } case SLOG_TYPES.SYSCALL: case SLOG_TYPES.SYSCALL_RESULT: { - finalBody['crank.syscallNum'] = finalBody.syscallNum; + logAttributes['crank.syscallNum'] = finalBody.syscallNum; delete finalBody.deliveryNum; delete finalBody.replay; @@ -407,22 +416,19 @@ export const makeSlogSender = async options => { break; } - const finalSlog = { + /** @type {LogAttributes} */ + logAttributes = { ...blockContext, ...crankContext, - ...finalBody, ...initContext, + ...logAttributes, ...replayContext, ...triggerContext, }; logger.emit({ - attributes: { 'chain-id': CHAIN_ID }, - body: JSON.parse( - JSON.stringify(finalSlog, (_, value) => - typeof value === 'bigint' ? Number(value) : value, - ), - ), + attributes: JSON.parse(stringify(logAttributes)), + body: JSON.parse(stringify(finalBody)), severityNumber: SeverityNumber.INFO, }); From 43fec2d7beaf79b377b5a9a4032899aa79691894 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Wed, 23 Oct 2024 16:13:51 +0500 Subject: [PATCH 09/22] address mathieu comments --- packages/telemetry/src/context-aware-slog.js | 427 +++++++++---------- 1 file changed, 205 insertions(+), 222 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index be91aa9c7cd..aaf48b33e7a 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -1,34 +1,16 @@ -/* globals process */ +/* eslint-env node */ import { logs, SeverityNumber } from '@opentelemetry/api-logs'; +import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; import { Resource } from '@opentelemetry/resources'; import { LoggerProvider, SimpleLogRecordProcessor, } from '@opentelemetry/sdk-logs'; -import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; -import sqlite from 'better-sqlite3'; -import { closeSync, existsSync, openSync } from 'fs'; +import { readFile, writeFile } from 'fs'; +import { dirSync } from 'tmp'; import { getResourceAttributes } from './index.js'; /** - * @typedef {{ - * blockHeight?: number; - * blockTime?: number; - * crankNum?: bigint; - * crankType?: string; - * deliveryNum?: bigint; - * inboundNum?: string; - * monotime: number; - * replay?: boolean; - * runNum?: number; - * sender?: string; - * source?: string; - * syscallNum?: number; - * time: number; - * type: string; - * vatID?: string; - * }} Slog - * * @typedef {Partial<{ * 'block.height': Slog['blockHeight']; * 'block.time': Slog['blockTime']; @@ -39,7 +21,7 @@ import { getResourceAttributes } from './index.js'; * init: boolean; * replay: boolean; * 'run.id': string; - * 'run.num': string; + * 'run.num': string | null; * 'run.trigger.blockHeight': Slog['blockHeight']; * 'run.trigger.msgIdx': number; * 'run.trigger.sender': Slog['sender']; @@ -54,58 +36,35 @@ import { getResourceAttributes } from './index.js'; * 'chain-id': string; * 'crank.syscallNum'?: Slog['syscallNum']; * 'process.uptime': Slog['monotime']; - * timestamp: Slog['time']; * } & Context} LogAttributes + * + * @typedef {{env: typeof process.env; stateDir: string;}} Options + * + * @typedef {{ + * blockHeight?: number; + * blockTime?: number; + * crankNum?: bigint; + * crankType?: string; + * deliveryNum?: bigint; + * inboundNum?: string; + * monotime: number; + * replay?: boolean; + * runNum?: number; + * sender?: string; + * source?: string; + * syscallNum?: number; + * time: number; + * type: string; + * vatID?: string; + * }} Slog */ -const DATABASE_FILE_PATH = - process.env.SLOG_CONTEXT_DATABASE_FILE_PATH || '/state/slog-db.sqlite3'; -const DATABASE_TABLE_NAME = 'context'; -const DATABASE_TRIGGER_CONTEXT_KEY_NAME = 'trigger-context'; - -const DATABASE_TYPES = { - INTEGER: 'INTEGER', - TEXT: 'TEXT', -}; - -const DATABASE_SCHEMA = { - key: `${DATABASE_TYPES.TEXT} PRIMARY KEY`, - 'run.id': DATABASE_TYPES.TEXT, - 'run.num': DATABASE_TYPES.TEXT, - 'run.trigger.blockHeight': DATABASE_TYPES.INTEGER, - 'run.trigger.msgIdx': DATABASE_TYPES.INTEGER, - 'run.trigger.sender': DATABASE_TYPES.TEXT, - 'run.trigger.source': DATABASE_TYPES.TEXT, - 'run.trigger.time': DATABASE_TYPES.INTEGER, - 'run.trigger.txHash': DATABASE_TYPES.TEXT, - 'run.trigger.type': DATABASE_TYPES.TEXT, -}; +const FILE_ENCODING = 'utf8'; const SLOG_TYPES = { CLIST: 'clist', CONSOLE: 'console', - CRANK: { - RESULT: 'crank-result', - START: 'crank-start', - }, - DELIVER: 'deliver', - DELIVER_RESULT: 'deliver-result', - KERNEL: { - INIT: { - FINISH: 'kernel-init-finish', - START: 'kernel-init-start', - }, - }, - REPLAY: { - FINISH: 'finish-replay', - START: 'start-replay', - }, - // eslint-disable-next-line no-restricted-syntax - RUN: { - FINISH: 'cosmic-swingset-run-finish', - START: 'cosmic-swingset-run-start', - }, - SWINGSET: { + COSMIC_SWINGSET: { AFTER_COMMIT_STATS: 'cosmic-swingset-after-commit-stats', BEGIN_BLOCK: 'cosmic-swingset-begin-block', BOOTSTRAP_BLOCK: { @@ -122,88 +81,71 @@ const SLOG_TYPES = { FINISH: 'cosmic-swingset-end-block-finish', START: 'cosmic-swingset-end-block-start', }, + // eslint-disable-next-line no-restricted-syntax + RUN: { + FINISH: 'cosmic-swingset-run-finish', + START: 'cosmic-swingset-run-start', + }, + }, + CRANK: { + RESULT: 'crank-result', + START: 'crank-start', + }, + DELIVER: 'deliver', + DELIVER_RESULT: 'deliver-result', + KERNEL: { + INIT: { + FINISH: 'kernel-init-finish', + START: 'kernel-init-start', + }, + }, + REPLAY: { + FINISH: 'finish-replay', + START: 'start-replay', }, SYSCALL: 'syscall', SYSCALL_RESULT: 'syscall-result', }; -const getDatabaseInstance = async () => { - if (!existsSync(DATABASE_FILE_PATH)) - closeSync(openSync(DATABASE_FILE_PATH, 'w')); - - const databaseInstance = sqlite(DATABASE_FILE_PATH, { fileMustExist: true }); - databaseInstance.exec( - ` - CREATE TABLE IF NOT EXISTS '${DATABASE_TABLE_NAME}'( - ${Object.entries(DATABASE_SCHEMA) - .map(([name, type]) => `[${name}] ${type}`) - .join(',\n')} - ) - `, - ); - - /** - * @type {Array<{ cid: number; name: string; type: string; notnull: number; dflt_value: null; pk: number; }>} - */ - // @ts-expect-error - const rows = databaseInstance - .prepare(`PRAGMA table_info('${DATABASE_TABLE_NAME}')`) - .all(); - - const existingColumns = rows.map(row => row.name); - - Object.entries(DATABASE_SCHEMA).map( - ([name, type]) => - !existingColumns.includes(name) && - databaseInstance - .prepare(`ALTER TABLE '${DATABASE_TABLE_NAME}' ADD [${name}] ${type}`) - .run(), - ); +/** + * @param {string} filePath + */ +export const getContextFilePersistenceUtils = async filePath => { + console.log(`Using file ${filePath} for slogger`); return { /** - * * @param {Context} context + * @returns {Promise} */ - persistContext: context => { - /** @type {Array} */ - const keys = []; - const values = []; - - for (const [key, value] of Object.entries(context)) { - keys.push(`[${key}]`); - if (value === undefined) values.push(null); - else - values.push( - DATABASE_SCHEMA[key].startsWith(DATABASE_TYPES.TEXT) - ? `'${value}'` - : value, - ); - } + persistContext: context => + new Promise(resolve => + writeFile(filePath, JSON.stringify(context), FILE_ENCODING, err => { + if (err) console.warn('Error writing context to file: ', err); + resolve(null); + }), + ), - databaseInstance - .prepare( - `INSERT OR REPLACE INTO '${DATABASE_TABLE_NAME}' (key, ${keys.join(', ')}) - VALUES ('${DATABASE_TRIGGER_CONTEXT_KEY_NAME}', ${values.join(', ')})`, - ) - .run(); - }, - - restoreContext: () => { - /** - * @type {Context & {key?: string} | undefined} - */ - // @ts-expect-error - const row = databaseInstance - .prepare( - `SELECT * FROM '${DATABASE_TABLE_NAME}' WHERE key = '${DATABASE_TRIGGER_CONTEXT_KEY_NAME}'`, - ) - .get(); - - delete row?.key; - - return row || null; - }, + /** + * @returns {Promise} + */ + restoreContext: () => + new Promise(resolve => + readFile(filePath, FILE_ENCODING, (err, data) => { + if (!err) { + try { + resolve(JSON.parse(data)); + } catch (parseErr) { + err = parseErr; + } + } + + if (err) { + console.warn('Error reading context from file: ', err); + return resolve(null); + } + }), + ), }; }; @@ -213,27 +155,12 @@ const stringify = data => ); /** - * - * @param {{env: typeof process.env}} options + * @param {(log: import('@opentelemetry/api-logs').LogRecord) => void} emitLog + * @param {Options} options + * @param {Partial<{ persistContext: (context: Context) => Promise; restoreContext: () => Promise; }>} persistenceUtils */ -export const makeSlogSender = async options => { - const { CHAIN_ID, OTEL_EXPORTER_OTLP_ENDPOINT } = options.env; - if (!OTEL_EXPORTER_OTLP_ENDPOINT) - return console.warn( - 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" envrionment variable', - ); - - const loggerProvider = new LoggerProvider({ - resource: new Resource(getResourceAttributes(options)), - }); - loggerProvider.addLogRecordProcessor( - new SimpleLogRecordProcessor(new OTLPLogExporter({ keepAlive: true })), - ); - - logs.setGlobalLoggerProvider(loggerProvider); - const logger = logs.getLogger('default'); - - const db = await getDatabaseInstance(); +export const logCreator = (emitLog, options, persistenceUtils) => { + const { CHAIN_ID } = options.env; /** @type Array */ let [ @@ -250,26 +177,28 @@ export const makeSlogSender = async options => { */ const persistContext = context => { lastPersistedTriggerContext = context; - db.persistContext(context); + return persistenceUtils.persistContext?.(context); }; - const restoreContext = () => { - if (lastPersistedTriggerContext) return lastPersistedTriggerContext; - return db.restoreContext(); + const restoreContext = async () => { + await Promise.resolve(); + + if (!lastPersistedTriggerContext) + lastPersistedTriggerContext = + (await persistenceUtils.restoreContext?.()) || null; + return lastPersistedTriggerContext; }; /** * @param {Slog} slog */ - const slogSender = ({ monotime, time: timestamp, ...body }) => { + const slogSender = async ({ monotime, time: timestamp, ...body }) => { + await Promise.resolve(); + const finalBody = { ...body }; - /** @type {LogAttributes} */ - let logAttributes = { - 'chain-id': String(CHAIN_ID), - 'process.uptime': monotime, - timestamp, - }; + /** @type {{'crank.syscallNum'?: Slog['syscallNum']}} */ + const eventLogAttributes = {}; /** * Add any before report operations here @@ -303,7 +232,7 @@ export const makeSlogSender = async options => { 'crank.vatID': finalBody.vatID, }; } else { - assert(!!crankContext); + assert(!!crankContext && !finalBody.replay); crankContext = { ...crankContext, 'crank.deliveryNum': finalBody.deliveryNum, @@ -330,81 +259,87 @@ export const makeSlogSender = async options => { replayContext = { replay: true }; break; } - // eslint-disable-next-line no-restricted-syntax - case SLOG_TYPES.RUN.START: { - if (!(triggerContext || finalBody.runNum === 0)) { - const blockTime = finalBody.blockTime || blockContext?.['block.time']; - - assert(blockTime); - // TBD: add explicit slog events of both timer poll and install bundle - triggerContext = { - 'run.id': `timer-${finalBody.blockHeight}`, - 'run.trigger.time': blockTime, - 'run.trigger.type': 'timer', - }; - persistContext(triggerContext); - } - - if (!triggerContext) triggerContext = {}; - triggerContext = { - ...triggerContext, - 'run.num': `${finalBody.runNum}`, - }; - + case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: { + assert(!!blockContext); break; } - case SLOG_TYPES.SWINGSET.BEGIN_BLOCK: { + case SLOG_TYPES.COSMIC_SWINGSET.BEGIN_BLOCK: { blockContext = { 'block.height': finalBody.blockHeight, 'block.time': finalBody.blockTime, }; break; } - case SLOG_TYPES.SWINGSET.BOOTSTRAP_BLOCK.START: { + case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.FINISH: { + assert(!!blockContext); + break; + } + case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.START: { blockContext = { 'block.height': finalBody.blockHeight || 0, 'block.time': finalBody.blockTime, }; triggerContext = { + 'run.num': null, 'run.id': `bootstrap-${finalBody.blockTime}`, - 'run.trigger.time': finalBody.blockTime, 'run.trigger.type': 'bootstrap', + 'run.trigger.time': finalBody.blockTime, }; break; } - case SLOG_TYPES.SWINGSET.BRIDGE_INBOUND: - case SLOG_TYPES.SWINGSET.DELIVER_INBOUND: { + case SLOG_TYPES.COSMIC_SWINGSET.BRIDGE_INBOUND: + case SLOG_TYPES.COSMIC_SWINGSET.DELIVER_INBOUND: { const [blockHeight, txHash, msgIdx] = ( finalBody.inboundNum || '' ).split('-'); - const triggerType = body.type.split('-')[2]; + const [, triggerType] = + /cosmic-swingset-([^-]+)-inbound/.exec(body.type) || []; triggerContext = { + 'run.num': null, 'run.id': `${triggerType}-${finalBody.inboundNum}`, - 'run.trigger.blockHeight': Number(blockHeight), - 'run.trigger.msgIdx': Number(msgIdx), - 'run.trigger.sender': finalBody.sender, + 'run.trigger.type': triggerType, 'run.trigger.source': finalBody.source, - 'run.trigger.time': finalBody.blockTime, + 'run.trigger.sender': finalBody.sender, + 'run.trigger.blockHeight': Number(blockHeight), 'run.trigger.txHash': txHash, - 'run.trigger.type': triggerType, + 'run.trigger.msgIdx': Number(msgIdx), }; - persistContext(triggerContext); + await persistContext(triggerContext); break; } - case SLOG_TYPES.SWINGSET.COMMIT.FINISH: - case SLOG_TYPES.SWINGSET.COMMIT.START: - case SLOG_TYPES.SWINGSET.END_BLOCK.FINISH: { + case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.FINISH: + case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.START: + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.FINISH: { assert(!!blockContext); break; } - case SLOG_TYPES.SWINGSET.END_BLOCK.START: { + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { assert(!!blockContext); break; } + // eslint-disable-next-line no-restricted-syntax + case SLOG_TYPES.COSMIC_SWINGSET.RUN.START: { + if (!triggerContext && finalBody.runNum !== 0) { + assert(!!blockContext); + // TBD: add explicit slog events of both timer poll and install bundle + triggerContext = { + 'run.num': null, + 'run.id': `timer-${finalBody.blockHeight}`, + 'run.trigger.type': 'timer', + 'run.trigger.time': blockContext['block.time'], + }; + await persistContext(triggerContext); + } + + if (!triggerContext) triggerContext = {}; + triggerContext['run.num'] = `${finalBody.runNum}`; + + break; + } case SLOG_TYPES.SYSCALL: case SLOG_TYPES.SYSCALL_RESULT: { - logAttributes['crank.syscallNum'] = finalBody.syscallNum; + eventLogAttributes['crank.syscallNum'] = finalBody.syscallNum; delete finalBody.deliveryNum; delete finalBody.replay; @@ -413,22 +348,30 @@ export const makeSlogSender = async options => { break; } default: + // All other log types are logged as is (using existing contexts) without + // any change to the slogs or any contributions to the contexts. This also + // means that any unexpected slog type will pass through. To fix that, add + // all remaining cases of expected slog types above with a simple break + // statement and log a warning here break; } /** @type {LogAttributes} */ - logAttributes = { - ...blockContext, - ...crankContext, - ...initContext, - ...logAttributes, - ...replayContext, - ...triggerContext, + const logAttributes = { + 'chain-id': String(CHAIN_ID), + 'process.uptime': monotime, + ...initContext, // Optional prelude + ...blockContext, // Block is the first level of execution nesting + ...triggerContext, // run and trigger info is nested next + ...crankContext, // Finally cranks are the last level of nesting + ...replayContext, // Replay is a substitute for crank context during vat page in + ...eventLogAttributes, }; - logger.emit({ + emitLog({ attributes: JSON.parse(stringify(logAttributes)), body: JSON.parse(stringify(finalBody)), + observedTimestamp: timestamp, severityNumber: SeverityNumber.INFO, }); @@ -450,20 +393,20 @@ export const makeSlogSender = async options => { break; } // eslint-disable-next-line no-restricted-syntax - case SLOG_TYPES.RUN.FINISH: { + case SLOG_TYPES.COSMIC_SWINGSET.RUN.FINISH: { triggerContext = null; break; } - case SLOG_TYPES.SWINGSET.AFTER_COMMIT_STATS: { + case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: { blockContext = null; break; } - case SLOG_TYPES.SWINGSET.BOOTSTRAP_BLOCK.FINISH: { + case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.FINISH: { blockContext = null; break; } - case SLOG_TYPES.SWINGSET.END_BLOCK.START: { - triggerContext = restoreContext(); + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { + triggerContext = await restoreContext(); break; } default: @@ -473,3 +416,43 @@ export const makeSlogSender = async options => { return slogSender; }; + +/** + * @param {Options} options + */ +export const makeSlogSender = async options => { + const { OTEL_EXPORTER_OTLP_ENDPOINT } = options.env; + if (!OTEL_EXPORTER_OTLP_ENDPOINT) + return console.warn( + 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" envrionment variable', + ); + + const loggerProvider = new LoggerProvider({ + resource: new Resource(getResourceAttributes(options)), + }); + const logRecordProcessor = new SimpleLogRecordProcessor( + new OTLPLogExporter({ keepAlive: true }), + ); + + loggerProvider.addLogRecordProcessor(logRecordProcessor); + + logs.setGlobalLoggerProvider(loggerProvider); + const logger = logs.getLogger('default'); + + const persistenceUtils = await getContextFilePersistenceUtils( + process.env.SLOG_CONTEXT_FILE_PATH || + `${options.stateDir || dirSync({ prefix: 'slog-context' }).name}/slog-context.json`, + ); + + const slogSender = logCreator( + logRecord => logger.emit(logRecord), + options, + persistenceUtils, + ); + + return Object.assign(slogSender, { + forceFlush: () => logRecordProcessor.forceFlush(), + shutdown: () => + logRecordProcessor.forceFlush().then(logRecordProcessor.shutdown), + }); +}; From 00bc5e45fc487857669ee4a11b03062c1c8fb514 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Wed, 23 Oct 2024 16:39:56 +0500 Subject: [PATCH 10/22] yarn --- packages/telemetry/src/context-aware-slog.js | 3 ++- yarn.lock | 11 +---------- 2 files changed, 3 insertions(+), 11 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index aaf48b33e7a..4433496e8d7 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -36,6 +36,7 @@ import { getResourceAttributes } from './index.js'; * 'chain-id': string; * 'crank.syscallNum'?: Slog['syscallNum']; * 'process.uptime': Slog['monotime']; + * timestamp: Slog['time']; * } & Context} LogAttributes * * @typedef {{env: typeof process.env; stateDir: string;}} Options @@ -366,12 +367,12 @@ export const logCreator = (emitLog, options, persistenceUtils) => { ...crankContext, // Finally cranks are the last level of nesting ...replayContext, // Replay is a substitute for crank context during vat page in ...eventLogAttributes, + timestamp, }; emitLog({ attributes: JSON.parse(stringify(logAttributes)), body: JSON.parse(stringify(finalBody)), - observedTimestamp: timestamp, severityNumber: SeverityNumber.INFO, }); diff --git a/yarn.lock b/yarn.lock index 2991d4322ab..e53046ad0be 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3214,25 +3214,16 @@ resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.15.2.tgz#3bafb5de3e20e841dff6cb3c66f4d6e9694c4241" integrity sha512-CjbOKwk2s+3xPIMcd5UNYQzsf+v94RczbdNix9/kQh38WiQkM90sUOi3if8eyHFgiBjBjhwXrA7W3ydiSQP9mw== -<<<<<<< HEAD -"@opentelemetry/semantic-conventions@1.27.0": +"@opentelemetry/semantic-conventions@1.27.0", "@opentelemetry/semantic-conventions@~1.27.0": version "1.27.0" resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz#1a857dcc95a5ab30122e04417148211e6f945e6c" integrity sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg== -"@opentelemetry/semantic-conventions@1.9.1", "@opentelemetry/semantic-conventions@~1.9.0": -======= "@opentelemetry/semantic-conventions@1.9.1": ->>>>>>> master version "1.9.1" resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.9.1.tgz#ad3367684a57879392513479e0a436cb2ac46dad" integrity sha512-oPQdbFDmZvjXk5ZDoBGXG8B4tSB/qW5vQunJWQMFUBp7Xe8O1ByPANueJ+Jzg58esEBegyyxZ7LRmfJr7kFcFg== -"@opentelemetry/semantic-conventions@~1.27.0": - version "1.27.0" - resolved "https://registry.yarnpkg.com/@opentelemetry/semantic-conventions/-/semantic-conventions-1.27.0.tgz#1a857dcc95a5ab30122e04417148211e6f945e6c" - integrity sha512-sAay1RrB+ONOem0OZanAR1ZI/k7yDpnOQSQmTMuGImUQb2y8EbSaCJ94FQluM74xoU03vlb2d2U90hZluL6nQg== - "@parcel/watcher@2.0.4": version "2.0.4" resolved "https://registry.yarnpkg.com/@parcel/watcher/-/watcher-2.0.4.tgz#f300fef4cc38008ff4b8c29d92588eced3ce014b" From 14deff754f15e1af8f063741df2f9053f924b6cc Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Wed, 23 Oct 2024 18:10:17 +0500 Subject: [PATCH 11/22] remove async usage --- packages/telemetry/src/context-aware-slog.js | 69 ++++++++------------ 1 file changed, 27 insertions(+), 42 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 4433496e8d7..7445235a3ff 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -6,7 +6,7 @@ import { LoggerProvider, SimpleLogRecordProcessor, } from '@opentelemetry/sdk-logs'; -import { readFile, writeFile } from 'fs'; +import { readFileSync, writeFileSync } from 'fs'; import { dirSync } from 'tmp'; import { getResourceAttributes } from './index.js'; @@ -111,42 +111,32 @@ const SLOG_TYPES = { /** * @param {string} filePath */ -export const getContextFilePersistenceUtils = async filePath => { +export const getContextFilePersistenceUtils = filePath => { console.log(`Using file ${filePath} for slogger`); return { /** * @param {Context} context - * @returns {Promise} */ - persistContext: context => - new Promise(resolve => - writeFile(filePath, JSON.stringify(context), FILE_ENCODING, err => { - if (err) console.warn('Error writing context to file: ', err); - resolve(null); - }), - ), + persistContext: context => { + try { + writeFileSync(filePath, JSON.stringify(context), FILE_ENCODING); + } catch (err) { + console.warn('Error writing context to file: ', err); + } + }, /** - * @returns {Promise} + * @returns {Context | null} */ - restoreContext: () => - new Promise(resolve => - readFile(filePath, FILE_ENCODING, (err, data) => { - if (!err) { - try { - resolve(JSON.parse(data)); - } catch (parseErr) { - err = parseErr; - } - } - - if (err) { - console.warn('Error reading context from file: ', err); - return resolve(null); - } - }), - ), + restoreContext: () => { + try { + return JSON.parse(readFileSync(filePath, FILE_ENCODING)); + } catch (parseErr) { + console.warn('Error reading context from file: ', parseErr); + return null; + } + }, }; }; @@ -158,7 +148,7 @@ const stringify = data => /** * @param {(log: import('@opentelemetry/api-logs').LogRecord) => void} emitLog * @param {Options} options - * @param {Partial<{ persistContext: (context: Context) => Promise; restoreContext: () => Promise; }>} persistenceUtils + * @param {Partial<{ persistContext: (context: Context) => void; restoreContext: () => Context | null; }>} persistenceUtils */ export const logCreator = (emitLog, options, persistenceUtils) => { const { CHAIN_ID } = options.env; @@ -181,21 +171,16 @@ export const logCreator = (emitLog, options, persistenceUtils) => { return persistenceUtils.persistContext?.(context); }; - const restoreContext = async () => { - await Promise.resolve(); - + const restoreContext = () => { if (!lastPersistedTriggerContext) - lastPersistedTriggerContext = - (await persistenceUtils.restoreContext?.()) || null; + lastPersistedTriggerContext = persistenceUtils.restoreContext?.() || null; return lastPersistedTriggerContext; }; /** * @param {Slog} slog */ - const slogSender = async ({ monotime, time: timestamp, ...body }) => { - await Promise.resolve(); - + const slogSender = ({ monotime, time: timestamp, ...body }) => { const finalBody = { ...body }; /** @type {{'crank.syscallNum'?: Slog['syscallNum']}} */ @@ -306,7 +291,7 @@ export const logCreator = (emitLog, options, persistenceUtils) => { 'run.trigger.txHash': txHash, 'run.trigger.msgIdx': Number(msgIdx), }; - await persistContext(triggerContext); + persistContext(triggerContext); break; } case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.FINISH: @@ -330,7 +315,7 @@ export const logCreator = (emitLog, options, persistenceUtils) => { 'run.trigger.type': 'timer', 'run.trigger.time': blockContext['block.time'], }; - await persistContext(triggerContext); + persistContext(triggerContext); } if (!triggerContext) triggerContext = {}; @@ -407,7 +392,7 @@ export const logCreator = (emitLog, options, persistenceUtils) => { break; } case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { - triggerContext = await restoreContext(); + triggerContext = restoreContext(); break; } default: @@ -421,7 +406,7 @@ export const logCreator = (emitLog, options, persistenceUtils) => { /** * @param {Options} options */ -export const makeSlogSender = async options => { +export const makeSlogSender = options => { const { OTEL_EXPORTER_OTLP_ENDPOINT } = options.env; if (!OTEL_EXPORTER_OTLP_ENDPOINT) return console.warn( @@ -440,7 +425,7 @@ export const makeSlogSender = async options => { logs.setGlobalLoggerProvider(loggerProvider); const logger = logs.getLogger('default'); - const persistenceUtils = await getContextFilePersistenceUtils( + const persistenceUtils = getContextFilePersistenceUtils( process.env.SLOG_CONTEXT_FILE_PATH || `${options.stateDir || dirSync({ prefix: 'slog-context' }).name}/slog-context.json`, ); From 638d5363849736328efb6a31e89e3ef87ed9c517 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Thu, 24 Oct 2024 14:04:45 +0500 Subject: [PATCH 12/22] address mathieu comments 2.0 --- packages/telemetry/src/context-aware-slog.js | 271 ++++++------------ .../telemetry/src/otel-context-aware-slog.js | 99 +++++++ 2 files changed, 187 insertions(+), 183 deletions(-) create mode 100644 packages/telemetry/src/otel-context-aware-slog.js diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 7445235a3ff..7e52cb4b6e1 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -1,14 +1,4 @@ /* eslint-env node */ -import { logs, SeverityNumber } from '@opentelemetry/api-logs'; -import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; -import { Resource } from '@opentelemetry/resources'; -import { - LoggerProvider, - SimpleLogRecordProcessor, -} from '@opentelemetry/sdk-logs'; -import { readFileSync, writeFileSync } from 'fs'; -import { dirSync } from 'tmp'; -import { getResourceAttributes } from './index.js'; /** * @typedef {Partial<{ @@ -39,8 +29,6 @@ import { getResourceAttributes } from './index.js'; * timestamp: Slog['time']; * } & Context} LogAttributes * - * @typedef {{env: typeof process.env; stateDir: string;}} Options - * * @typedef {{ * blockHeight?: number; * blockTime?: number; @@ -60,8 +48,6 @@ import { getResourceAttributes } from './index.js'; * }} Slog */ -const FILE_ENCODING = 'utf8'; - const SLOG_TYPES = { CLIST: 'clist', CONSOLE: 'console', @@ -109,49 +95,11 @@ const SLOG_TYPES = { }; /** - * @param {string} filePath + * @param {(log: { attributes: LogAttributes, body: Partial }) => void} emitLog + * @param {Partial<{ persistContext: (context: Context) => void; restoreContext: () => Context | null; }>?} persistenceUtils */ -export const getContextFilePersistenceUtils = filePath => { - console.log(`Using file ${filePath} for slogger`); - - return { - /** - * @param {Context} context - */ - persistContext: context => { - try { - writeFileSync(filePath, JSON.stringify(context), FILE_ENCODING); - } catch (err) { - console.warn('Error writing context to file: ', err); - } - }, - - /** - * @returns {Context | null} - */ - restoreContext: () => { - try { - return JSON.parse(readFileSync(filePath, FILE_ENCODING)); - } catch (parseErr) { - console.warn('Error reading context from file: ', parseErr); - return null; - } - }, - }; -}; - -const stringify = data => - JSON.stringify(data, (_, value) => - typeof value === 'bigint' ? Number(value) : value, - ); - -/** - * @param {(log: import('@opentelemetry/api-logs').LogRecord) => void} emitLog - * @param {Options} options - * @param {Partial<{ persistContext: (context: Context) => void; restoreContext: () => Context | null; }>} persistenceUtils - */ -export const logCreator = (emitLog, options, persistenceUtils) => { - const { CHAIN_ID } = options.env; +export const logCreator = (emitLog, persistenceUtils = {}) => { + const { CHAIN_ID } = process.env; /** @type Array */ let [ @@ -168,19 +116,20 @@ export const logCreator = (emitLog, options, persistenceUtils) => { */ const persistContext = context => { lastPersistedTriggerContext = context; - return persistenceUtils.persistContext?.(context); + return persistenceUtils?.persistContext?.(context); }; const restoreContext = () => { if (!lastPersistedTriggerContext) - lastPersistedTriggerContext = persistenceUtils.restoreContext?.() || null; + lastPersistedTriggerContext = + persistenceUtils?.restoreContext?.() || null; return lastPersistedTriggerContext; }; /** * @param {Slog} slog */ - const slogSender = ({ monotime, time: timestamp, ...body }) => { + const slogProcessor = ({ monotime, time: timestamp, ...body }) => { const finalBody = { ...body }; /** @type {{'crank.syscallNum'?: Slog['syscallNum']}} */ @@ -191,64 +140,10 @@ export const logCreator = (emitLog, options, persistenceUtils) => { * like setting context data */ switch (body.type) { - case SLOG_TYPES.CONSOLE: { - delete finalBody.crankNum; - delete finalBody.deliveryNum; - - break; - } - case SLOG_TYPES.CLIST: { - assert(!!crankContext); - crankContext['crank.vatID'] = finalBody.vatID; - break; - } - case SLOG_TYPES.CRANK.START: { - crankContext = { - 'crank.num': finalBody.crankNum, - 'crank.type': finalBody.crankType, - }; - break; - } - case SLOG_TYPES.DELIVER: { - if (replayContext) { - assert(finalBody.replay); - replayContext = { - ...replayContext, - 'crank.deliveryNum': finalBody.deliveryNum, - 'crank.vatID': finalBody.vatID, - }; - } else { - assert(!!crankContext && !finalBody.replay); - crankContext = { - ...crankContext, - 'crank.deliveryNum': finalBody.deliveryNum, - 'crank.vatID': finalBody.vatID, - }; - } - - delete finalBody.deliveryNum; - delete finalBody.replay; - - break; - } - case SLOG_TYPES.DELIVER_RESULT: { - delete finalBody.deliveryNum; - delete finalBody.replay; - - break; - } case SLOG_TYPES.KERNEL.INIT.START: { initContext = { init: true }; break; } - case SLOG_TYPES.REPLAY.START: { - replayContext = { replay: true }; - break; - } - case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: { - assert(!!blockContext); - break; - } case SLOG_TYPES.COSMIC_SWINGSET.BEGIN_BLOCK: { blockContext = { 'block.height': finalBody.blockHeight, @@ -256,10 +151,20 @@ export const logCreator = (emitLog, options, persistenceUtils) => { }; break; } - case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.FINISH: { + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { + assert(!!blockContext); + break; + } + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.FINISH: + case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.START: + case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.FINISH: { assert(!!blockContext); break; } + case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: { + assert(!!blockContext && !triggerContext); + break; + } case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.START: { blockContext = { 'block.height': finalBody.blockHeight || 0, @@ -273,6 +178,10 @@ export const logCreator = (emitLog, options, persistenceUtils) => { }; break; } + case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.FINISH: { + assert(!!blockContext && !triggerContext); + break; + } case SLOG_TYPES.COSMIC_SWINGSET.BRIDGE_INBOUND: case SLOG_TYPES.COSMIC_SWINGSET.DELIVER_INBOUND: { const [blockHeight, txHash, msgIdx] = ( @@ -294,16 +203,6 @@ export const logCreator = (emitLog, options, persistenceUtils) => { persistContext(triggerContext); break; } - case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.FINISH: - case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.START: - case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.FINISH: { - assert(!!blockContext); - break; - } - case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { - assert(!!blockContext); - break; - } // eslint-disable-next-line no-restricted-syntax case SLOG_TYPES.COSMIC_SWINGSET.RUN.START: { if (!triggerContext && finalBody.runNum !== 0) { @@ -323,6 +222,50 @@ export const logCreator = (emitLog, options, persistenceUtils) => { break; } + case SLOG_TYPES.CRANK.START: { + crankContext = { + 'crank.num': finalBody.crankNum, + 'crank.type': finalBody.crankType, + }; + break; + } + case SLOG_TYPES.CLIST: { + assert(!!crankContext); + crankContext['crank.vatID'] = finalBody.vatID; + break; + } + case SLOG_TYPES.REPLAY.START: { + replayContext = { replay: true }; + break; + } + case SLOG_TYPES.DELIVER: { + if (replayContext) { + assert(finalBody.replay); + replayContext = { + ...replayContext, + 'crank.deliveryNum': finalBody.deliveryNum, + 'crank.vatID': finalBody.vatID, + }; + } else { + assert(!!crankContext && !finalBody.replay); + crankContext = { + ...crankContext, + 'crank.deliveryNum': finalBody.deliveryNum, + 'crank.vatID': finalBody.vatID, + }; + } + + delete finalBody.deliveryNum; + delete finalBody.replay; + + break; + } + case SLOG_TYPES.DELIVER_RESULT: { + delete finalBody.deliveryNum; + delete finalBody.replay; + + break; + } case SLOG_TYPES.SYSCALL: case SLOG_TYPES.SYSCALL_RESULT: { eventLogAttributes['crank.syscallNum'] = finalBody.syscallNum; @@ -333,6 +276,12 @@ export const logCreator = (emitLog, options, persistenceUtils) => { break; } + case SLOG_TYPES.CONSOLE: { + delete finalBody.crankNum; + delete finalBody.deliveryNum; + + break; + } default: // All other log types are logged as is (using existing contexts) without // any change to the slogs or any contributions to the contexts. This also @@ -355,32 +304,19 @@ export const logCreator = (emitLog, options, persistenceUtils) => { timestamp, }; - emitLog({ - attributes: JSON.parse(stringify(logAttributes)), - body: JSON.parse(stringify(finalBody)), - severityNumber: SeverityNumber.INFO, - }); + emitLog({ attributes: logAttributes, body: finalBody }); /** * Add any after report operations here * like resetting context data */ switch (body.type) { - case SLOG_TYPES.CRANK.RESULT: { - crankContext = null; - break; - } case SLOG_TYPES.KERNEL.INIT.FINISH: { initContext = null; break; } - case SLOG_TYPES.REPLAY.FINISH: { - replayContext = null; - break; - } - // eslint-disable-next-line no-restricted-syntax - case SLOG_TYPES.COSMIC_SWINGSET.RUN.FINISH: { - triggerContext = null; + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { + triggerContext = restoreContext(); break; } case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: { @@ -391,8 +327,17 @@ export const logCreator = (emitLog, options, persistenceUtils) => { blockContext = null; break; } - case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { - triggerContext = restoreContext(); + // eslint-disable-next-line no-restricted-syntax + case SLOG_TYPES.COSMIC_SWINGSET.RUN.FINISH: { + triggerContext = null; + break; + } + case SLOG_TYPES.CRANK.RESULT: { + crankContext = null; + break; + } + case SLOG_TYPES.REPLAY.FINISH: { + replayContext = null; break; } default: @@ -400,45 +345,5 @@ export const logCreator = (emitLog, options, persistenceUtils) => { } }; - return slogSender; -}; - -/** - * @param {Options} options - */ -export const makeSlogSender = options => { - const { OTEL_EXPORTER_OTLP_ENDPOINT } = options.env; - if (!OTEL_EXPORTER_OTLP_ENDPOINT) - return console.warn( - 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" envrionment variable', - ); - - const loggerProvider = new LoggerProvider({ - resource: new Resource(getResourceAttributes(options)), - }); - const logRecordProcessor = new SimpleLogRecordProcessor( - new OTLPLogExporter({ keepAlive: true }), - ); - - loggerProvider.addLogRecordProcessor(logRecordProcessor); - - logs.setGlobalLoggerProvider(loggerProvider); - const logger = logs.getLogger('default'); - - const persistenceUtils = getContextFilePersistenceUtils( - process.env.SLOG_CONTEXT_FILE_PATH || - `${options.stateDir || dirSync({ prefix: 'slog-context' }).name}/slog-context.json`, - ); - - const slogSender = logCreator( - logRecord => logger.emit(logRecord), - options, - persistenceUtils, - ); - - return Object.assign(slogSender, { - forceFlush: () => logRecordProcessor.forceFlush(), - shutdown: () => - logRecordProcessor.forceFlush().then(logRecordProcessor.shutdown), - }); + return slogProcessor; }; diff --git a/packages/telemetry/src/otel-context-aware-slog.js b/packages/telemetry/src/otel-context-aware-slog.js new file mode 100644 index 00000000000..4f03fe31326 --- /dev/null +++ b/packages/telemetry/src/otel-context-aware-slog.js @@ -0,0 +1,99 @@ +/* eslint-env node */ +import { logs, SeverityNumber } from '@opentelemetry/api-logs'; +import { OTLPLogExporter } from '@opentelemetry/exporter-logs-otlp-http'; +import { Resource } from '@opentelemetry/resources'; +import { + LoggerProvider, + SimpleLogRecordProcessor, +} from '@opentelemetry/sdk-logs'; +import { readFileSync, writeFileSync } from 'fs'; +import { getResourceAttributes } from './index.js'; +import { logCreator } from './context-aware-slog.js'; + +/** + * @typedef {import('./index.js').MakeSlogSenderOptions} Options + */ + +const DEFAULT_CONTEXT_FILE = 'slog-context.json'; +const FILE_ENCODING = 'utf8'; + +const stringify = data => + JSON.stringify(data, (_, value) => + // eslint-disable-next-line valid-typeof + typeof value === BigInt.name.toLowerCase() ? Number(value) : value, + ); + +/** + * @param {string} filePath + */ +export const getContextFilePersistenceUtils = filePath => { + console.log(`Using file ${filePath} for slogger context`); + + return { + /** + * @param {import('./context-aware-slog.js').Context} context + */ + persistContext: context => { + try { + writeFileSync(filePath, stringify(context), FILE_ENCODING); + } catch (err) { + console.warn('Error writing context to file: ', err); + } + }, + + /** + * @returns {import('./context-aware-slog.js').Context | null} + */ + restoreContext: () => { + try { + return JSON.parse(readFileSync(filePath, FILE_ENCODING)); + } catch (parseErr) { + console.warn('Error reading context from file: ', parseErr); + return null; + } + }, + }; +}; + +/** + * @param {Options} options + */ +export const makeSlogSender = async options => { + const { OTEL_EXPORTER_OTLP_ENDPOINT } = options.env || {}; + if (!(OTEL_EXPORTER_OTLP_ENDPOINT && options.stateDir)) + return console.warn( + 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" and "stateDir"', + ); + + const loggerProvider = new LoggerProvider({ + resource: new Resource(getResourceAttributes(options)), + }); + const logRecordProcessor = new SimpleLogRecordProcessor( + new OTLPLogExporter({ keepAlive: true }), + ); + + loggerProvider.addLogRecordProcessor(logRecordProcessor); + + logs.setGlobalLoggerProvider(loggerProvider); + const logger = logs.getLogger('default'); + + const persistenceUtils = getContextFilePersistenceUtils( + process.env.SLOG_CONTEXT_FILE_PATH || + `${options.stateDir}/${DEFAULT_CONTEXT_FILE}`, + ); + + const slogSender = logCreator( + logRecord => + logger.emit({ + ...JSON.parse(stringify(logRecord)), + severityNumber: SeverityNumber.INFO, + }), + persistenceUtils, + ); + + return Object.assign(slogSender, { + forceFlush: () => logRecordProcessor.forceFlush(), + shutdown: () => + logRecordProcessor.forceFlush().then(logRecordProcessor.shutdown), + }); +}; From 67d577acc2918d03fc3381638ddbc068019596d6 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Thu, 24 Oct 2024 16:55:35 +0500 Subject: [PATCH 13/22] local runner script --- .../telemetry/src/otel-context-aware-slog.js | 13 ++--- packages/telemetry/src/temp.js | 48 +++++++++++++++++++ 2 files changed, 52 insertions(+), 9 deletions(-) create mode 100644 packages/telemetry/src/temp.js diff --git a/packages/telemetry/src/otel-context-aware-slog.js b/packages/telemetry/src/otel-context-aware-slog.js index 4f03fe31326..123f8ec74d0 100644 --- a/packages/telemetry/src/otel-context-aware-slog.js +++ b/packages/telemetry/src/otel-context-aware-slog.js @@ -7,8 +7,9 @@ import { SimpleLogRecordProcessor, } from '@opentelemetry/sdk-logs'; import { readFileSync, writeFileSync } from 'fs'; -import { getResourceAttributes } from './index.js'; import { logCreator } from './context-aware-slog.js'; +import { getResourceAttributes } from './index.js'; +import { serializeSlogObj } from './serialize-slog-obj.js'; /** * @typedef {import('./index.js').MakeSlogSenderOptions} Options @@ -17,12 +18,6 @@ import { logCreator } from './context-aware-slog.js'; const DEFAULT_CONTEXT_FILE = 'slog-context.json'; const FILE_ENCODING = 'utf8'; -const stringify = data => - JSON.stringify(data, (_, value) => - // eslint-disable-next-line valid-typeof - typeof value === BigInt.name.toLowerCase() ? Number(value) : value, - ); - /** * @param {string} filePath */ @@ -35,7 +30,7 @@ export const getContextFilePersistenceUtils = filePath => { */ persistContext: context => { try { - writeFileSync(filePath, stringify(context), FILE_ENCODING); + writeFileSync(filePath, serializeSlogObj(context), FILE_ENCODING); } catch (err) { console.warn('Error writing context to file: ', err); } @@ -85,7 +80,7 @@ export const makeSlogSender = async options => { const slogSender = logCreator( logRecord => logger.emit({ - ...JSON.parse(stringify(logRecord)), + ...JSON.parse(serializeSlogObj(logRecord)), severityNumber: SeverityNumber.INFO, }), persistenceUtils, diff --git a/packages/telemetry/src/temp.js b/packages/telemetry/src/temp.js new file mode 100644 index 00000000000..1e892c9fda4 --- /dev/null +++ b/packages/telemetry/src/temp.js @@ -0,0 +1,48 @@ +/* eslint-env node */ +import { createReadStream, existsSync } from 'fs'; +import { createInterface } from 'readline'; +import { logCreator } from './context-aware-slog.js'; +import { serializeSlogObj } from './serialize-slog-obj.js'; + +const main = async () => { + const [, , slogFilePath] = process.argv; + + if (!(slogFilePath && existsSync(slogFilePath))) + throw Error(`Filepath "${slogFilePath}" not valid`); + + const inputFileStream = createReadStream(slogFilePath); + + const fileReader = createInterface({ input: inputFileStream }); + + const logger = logCreator(log => + process.stdout.write( + serializeSlogObj(log), + err => + err && + console.error( + `Unable to write line "${serializeSlogObj(log)}" due to error: `, + err, + ), + ), + ); + + for await (const line of fileReader) { + try { + logger(JSON.parse(line)); + } catch (err) { + console.error(`Unable to parse line "${line}" due to error: `, err); + } + } +}; + +// @ts-expect-error +global.assert = x => { + if (!x) throw Error(`value "${x}" is not truthy`); +}; + +console.log(`Sample run: CHAIN_ID= node temp.js `); + +main().catch(err => { + console.error('Caught error: ', err); + process.exit(1); +}); From bc8baa16f29f1d86935d596274d44ba231a01fc7 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Thu, 24 Oct 2024 16:56:29 +0500 Subject: [PATCH 14/22] remove local runner script --- packages/telemetry/src/temp.js | 48 ---------------------------------- 1 file changed, 48 deletions(-) delete mode 100644 packages/telemetry/src/temp.js diff --git a/packages/telemetry/src/temp.js b/packages/telemetry/src/temp.js deleted file mode 100644 index 1e892c9fda4..00000000000 --- a/packages/telemetry/src/temp.js +++ /dev/null @@ -1,48 +0,0 @@ -/* eslint-env node */ -import { createReadStream, existsSync } from 'fs'; -import { createInterface } from 'readline'; -import { logCreator } from './context-aware-slog.js'; -import { serializeSlogObj } from './serialize-slog-obj.js'; - -const main = async () => { - const [, , slogFilePath] = process.argv; - - if (!(slogFilePath && existsSync(slogFilePath))) - throw Error(`Filepath "${slogFilePath}" not valid`); - - const inputFileStream = createReadStream(slogFilePath); - - const fileReader = createInterface({ input: inputFileStream }); - - const logger = logCreator(log => - process.stdout.write( - serializeSlogObj(log), - err => - err && - console.error( - `Unable to write line "${serializeSlogObj(log)}" due to error: `, - err, - ), - ), - ); - - for await (const line of fileReader) { - try { - logger(JSON.parse(line)); - } catch (err) { - console.error(`Unable to parse line "${line}" due to error: `, err); - } - } -}; - -// @ts-expect-error -global.assert = x => { - if (!x) throw Error(`value "${x}" is not truthy`); -}; - -console.log(`Sample run: CHAIN_ID= node temp.js `); - -main().catch(err => { - console.error('Caught error: ', err); - process.exit(1); -}); From 5f7c7b8ac4f12461d30611a76ee1ccc827352684 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Fri, 25 Oct 2024 14:43:20 +0500 Subject: [PATCH 15/22] address mathieu comments 3.0 + fix timestamp issue --- packages/telemetry/src/context-aware-slog.js | 63 +++++++++---------- .../telemetry/src/otel-context-aware-slog.js | 24 ++++--- 2 files changed, 47 insertions(+), 40 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 7e52cb4b6e1..85d2c644c19 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -23,10 +23,8 @@ * } Context * * @typedef {{ - * 'chain-id': string; * 'crank.syscallNum'?: Slog['syscallNum']; * 'process.uptime': Slog['monotime']; - * timestamp: Slog['time']; * } & Context} LogAttributes * * @typedef {{ @@ -95,12 +93,14 @@ const SLOG_TYPES = { }; /** - * @param {(log: { attributes: LogAttributes, body: Partial }) => void} emitLog - * @param {Partial<{ persistContext: (context: Context) => void; restoreContext: () => Context | null; }>?} persistenceUtils + * @template {Record} [T={}] + * @param {T} [staticContext] + * @param {Partial<{ persistContext: (context: Context) => void; restoreContext: () => Context | null; }>} [persistenceUtils] */ -export const logCreator = (emitLog, persistenceUtils = {}) => { - const { CHAIN_ID } = process.env; - +export const makeContextualSlogProcessor = ( + staticContext, + persistenceUtils = {}, +) => { /** @type Array */ let [ blockContext, @@ -128,6 +128,7 @@ export const logCreator = (emitLog, persistenceUtils = {}) => { /** * @param {Slog} slog + * @returns {{ attributes: T & LogAttributes, body: Partial; timestamp: Slog['time'] }} */ const slogProcessor = ({ monotime, time: timestamp, ...body }) => { const finalBody = { ...body }; @@ -151,20 +152,6 @@ export const logCreator = (emitLog, persistenceUtils = {}) => { }; break; } - case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { - assert(!!blockContext); - break; - } - case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.FINISH: - case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.START: - case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.FINISH: { - assert(!!blockContext); - break; - } - case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: { - assert(!!blockContext && !triggerContext); - break; - } case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.START: { blockContext = { 'block.height': finalBody.blockHeight || 0, @@ -178,7 +165,15 @@ export const logCreator = (emitLog, persistenceUtils = {}) => { }; break; } - case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.FINISH: { + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { + assert(!!blockContext); + break; + } + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.FINISH: + case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.FINISH: + case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.START: + case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.FINISH: + case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: { assert(!!blockContext && !triggerContext); break; } @@ -207,12 +202,14 @@ export const logCreator = (emitLog, persistenceUtils = {}) => { case SLOG_TYPES.COSMIC_SWINGSET.RUN.START: { if (!triggerContext && finalBody.runNum !== 0) { assert(!!blockContext); - // TBD: add explicit slog events of both timer poll and install bundle + // TODO: add explicit slog events of both timer poll and install bundle + // https://github.com/Agoric/agoric-sdk/issues/10332 triggerContext = { 'run.num': null, - 'run.id': `timer-${finalBody.blockHeight}`, - 'run.trigger.type': 'timer', + 'run.id': `unknown-${finalBody.blockHeight}-${finalBody.runNum}`, + 'run.trigger.type': 'unknown', 'run.trigger.time': blockContext['block.time'], + 'run.trigger.blockHeight': finalBody.blockHeight, }; persistContext(triggerContext); } @@ -243,15 +240,15 @@ export const logCreator = (emitLog, persistenceUtils = {}) => { assert(finalBody.replay); replayContext = { ...replayContext, - 'crank.deliveryNum': finalBody.deliveryNum, 'crank.vatID': finalBody.vatID, + 'crank.deliveryNum': finalBody.deliveryNum, }; } else { assert(!!crankContext && !finalBody.replay); crankContext = { ...crankContext, - 'crank.deliveryNum': finalBody.deliveryNum, 'crank.vatID': finalBody.vatID, + 'crank.deliveryNum': finalBody.deliveryNum, }; } @@ -291,9 +288,7 @@ export const logCreator = (emitLog, persistenceUtils = {}) => { break; } - /** @type {LogAttributes} */ const logAttributes = { - 'chain-id': String(CHAIN_ID), 'process.uptime': monotime, ...initContext, // Optional prelude ...blockContext, // Block is the first level of execution nesting @@ -301,11 +296,9 @@ export const logCreator = (emitLog, persistenceUtils = {}) => { ...crankContext, // Finally cranks are the last level of nesting ...replayContext, // Replay is a substitute for crank context during vat page in ...eventLogAttributes, - timestamp, + ...staticContext, }; - emitLog({ attributes: logAttributes, body: finalBody }); - /** * Add any after report operations here * like resetting context data @@ -343,6 +336,12 @@ export const logCreator = (emitLog, persistenceUtils = {}) => { default: break; } + + return { + attributes: /** @type {T & LogAttributes} */ (logAttributes), + body: finalBody, + timestamp, + }; }; return slogProcessor; diff --git a/packages/telemetry/src/otel-context-aware-slog.js b/packages/telemetry/src/otel-context-aware-slog.js index 123f8ec74d0..6c8e78781c3 100644 --- a/packages/telemetry/src/otel-context-aware-slog.js +++ b/packages/telemetry/src/otel-context-aware-slog.js @@ -7,7 +7,7 @@ import { SimpleLogRecordProcessor, } from '@opentelemetry/sdk-logs'; import { readFileSync, writeFileSync } from 'fs'; -import { logCreator } from './context-aware-slog.js'; +import { makeContextualSlogProcessor } from './context-aware-slog.js'; import { getResourceAttributes } from './index.js'; import { serializeSlogObj } from './serialize-slog-obj.js'; @@ -54,7 +54,7 @@ export const getContextFilePersistenceUtils = filePath => { * @param {Options} options */ export const makeSlogSender = async options => { - const { OTEL_EXPORTER_OTLP_ENDPOINT } = options.env || {}; + const { CHAIN_ID, OTEL_EXPORTER_OTLP_ENDPOINT } = options.env || {}; if (!(OTEL_EXPORTER_OTLP_ENDPOINT && options.stateDir)) return console.warn( 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" and "stateDir"', @@ -77,15 +77,23 @@ export const makeSlogSender = async options => { `${options.stateDir}/${DEFAULT_CONTEXT_FILE}`, ); - const slogSender = logCreator( - logRecord => - logger.emit({ - ...JSON.parse(serializeSlogObj(logRecord)), - severityNumber: SeverityNumber.INFO, - }), + const contextualSlogProcessor = makeContextualSlogProcessor( + { 'chain-id': CHAIN_ID }, persistenceUtils, ); + /** + * @param {import('./context-aware-slog.js').Slog} slog + */ + const slogSender = slog => { + const { timestamp, ...logRecord } = contextualSlogProcessor(slog); + return logger.emit({ + ...JSON.parse(serializeSlogObj(logRecord)), + severityNumber: SeverityNumber.INFO, + timestamp: timestamp * 1000, + }); + }; + return Object.assign(slogSender, { forceFlush: () => logRecordProcessor.forceFlush(), shutdown: () => From 4cb1b6fc1adbc627816fcf4780b0e5f71b85001a Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Fri, 25 Oct 2024 16:09:40 +0500 Subject: [PATCH 16/22] fix timestamp reporting --- packages/telemetry/src/otel-context-aware-slog.js | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/packages/telemetry/src/otel-context-aware-slog.js b/packages/telemetry/src/otel-context-aware-slog.js index 6c8e78781c3..ca14c87efca 100644 --- a/packages/telemetry/src/otel-context-aware-slog.js +++ b/packages/telemetry/src/otel-context-aware-slog.js @@ -87,10 +87,18 @@ export const makeSlogSender = async options => { */ const slogSender = slog => { const { timestamp, ...logRecord } = contextualSlogProcessor(slog); + + const [secondsStr, fractionStr] = String(timestamp).split('.'); + const seconds = parseInt(secondsStr, 10); + const nanoSeconds = parseInt( + (fractionStr || String(0)).padEnd(9, String(0)), + 10, + ); + return logger.emit({ ...JSON.parse(serializeSlogObj(logRecord)), severityNumber: SeverityNumber.INFO, - timestamp: timestamp * 1000, + timestamp: [seconds, nanoSeconds], }); }; From f5d57228073948b86ba3513d86f5c1e13031a0e2 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Sat, 26 Oct 2024 14:25:30 +0500 Subject: [PATCH 17/22] address mathieu comments 4.0 --- packages/telemetry/src/context-aware-slog.js | 52 ++++++++++---------- packages/telemetry/src/temp.js | 49 ++++++++++++++++++ 2 files changed, 74 insertions(+), 27 deletions(-) create mode 100644 packages/telemetry/src/temp.js diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 85d2c644c19..9073f458fea 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -35,6 +35,7 @@ * deliveryNum?: bigint; * inboundNum?: string; * monotime: number; + * remainingBeans?: bigint; * replay?: boolean; * runNum?: number; * sender?: string; @@ -73,7 +74,7 @@ const SLOG_TYPES = { }, }, CRANK: { - RESULT: 'crank-result', + FINISH: 'crank-finish', START: 'crank-start', }, DELIVER: 'deliver', @@ -157,18 +158,9 @@ export const makeContextualSlogProcessor = ( 'block.height': finalBody.blockHeight || 0, 'block.time': finalBody.blockTime, }; - triggerContext = { - 'run.num': null, - 'run.id': `bootstrap-${finalBody.blockTime}`, - 'run.trigger.type': 'bootstrap', - 'run.trigger.time': finalBody.blockTime, - }; - break; - } - case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { - assert(!!blockContext); break; } + case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.FINISH: case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.FINISH: case SLOG_TYPES.COSMIC_SWINGSET.COMMIT.START: @@ -186,7 +178,7 @@ export const makeContextualSlogProcessor = ( /cosmic-swingset-([^-]+)-inbound/.exec(body.type) || []; triggerContext = { - 'run.num': null, + 'run.num': undefined, 'run.id': `${triggerType}-${finalBody.inboundNum}`, 'run.trigger.type': triggerType, 'run.trigger.source': finalBody.source, @@ -195,23 +187,23 @@ export const makeContextualSlogProcessor = ( 'run.trigger.txHash': txHash, 'run.trigger.msgIdx': Number(msgIdx), }; - persistContext(triggerContext); break; } // eslint-disable-next-line no-restricted-syntax case SLOG_TYPES.COSMIC_SWINGSET.RUN.START: { - if (!triggerContext && finalBody.runNum !== 0) { + if (!finalBody.runNum) { + assert(!triggerContext); + triggerContext = restoreContext(); // Restore persisted context if any + } else if (!triggerContext) { assert(!!blockContext); // TODO: add explicit slog events of both timer poll and install bundle // https://github.com/Agoric/agoric-sdk/issues/10332 triggerContext = { - 'run.num': null, + 'run.num': undefined, 'run.id': `unknown-${finalBody.blockHeight}-${finalBody.runNum}`, 'run.trigger.type': 'unknown', - 'run.trigger.time': blockContext['block.time'], 'run.trigger.blockHeight': finalBody.blockHeight, }; - persistContext(triggerContext); } if (!triggerContext) triggerContext = {}; @@ -231,8 +223,9 @@ export const makeContextualSlogProcessor = ( crankContext['crank.vatID'] = finalBody.vatID; break; } - case SLOG_TYPES.REPLAY.START: { - replayContext = { replay: true }; + case SLOG_TYPES.REPLAY.START: + case SLOG_TYPES.REPLAY.FINISH: { + replayContext = { replay: true, 'crank.vatID': finalBody.vatID }; break; } case SLOG_TYPES.DELIVER: { @@ -289,6 +282,7 @@ export const makeContextualSlogProcessor = ( } const logAttributes = { + ...staticContext, 'process.uptime': monotime, ...initContext, // Optional prelude ...blockContext, // Block is the first level of execution nesting @@ -296,7 +290,6 @@ export const makeContextualSlogProcessor = ( ...crankContext, // Finally cranks are the last level of nesting ...replayContext, // Replay is a substitute for crank context during vat page in ...eventLogAttributes, - ...staticContext, }; /** @@ -308,24 +301,29 @@ export const makeContextualSlogProcessor = ( initContext = null; break; } - case SLOG_TYPES.COSMIC_SWINGSET.END_BLOCK.START: { - triggerContext = restoreContext(); - break; - } - case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: { - blockContext = null; + case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.START: { + triggerContext = { + 'run.num': undefined, + 'run.id': `bootstrap-${finalBody.blockTime}`, + 'run.trigger.type': 'bootstrap', + 'run.trigger.time': finalBody.blockTime, + }; break; } + case SLOG_TYPES.COSMIC_SWINGSET.AFTER_COMMIT_STATS: case SLOG_TYPES.COSMIC_SWINGSET.BOOTSTRAP_BLOCK.FINISH: { blockContext = null; break; } // eslint-disable-next-line no-restricted-syntax case SLOG_TYPES.COSMIC_SWINGSET.RUN.FINISH: { + persistContext( + finalBody.remainingBeans || !triggerContext ? {} : triggerContext, + ); triggerContext = null; break; } - case SLOG_TYPES.CRANK.RESULT: { + case SLOG_TYPES.CRANK.FINISH: { crankContext = null; break; } diff --git a/packages/telemetry/src/temp.js b/packages/telemetry/src/temp.js new file mode 100644 index 00000000000..2a821f194fa --- /dev/null +++ b/packages/telemetry/src/temp.js @@ -0,0 +1,49 @@ +/* eslint-env node */ +import { createReadStream, existsSync } from 'fs'; +import { createInterface } from 'readline'; +import { makeContextualSlogProcessor } from './context-aware-slog.js'; +import { serializeSlogObj } from './serialize-slog-obj.js'; + +const main = async () => { + const [, , slogFilePath] = process.argv; + + if (!(slogFilePath && existsSync(slogFilePath))) + throw Error(`Filepath "${slogFilePath}" not valid`); + + const inputFileStream = createReadStream(slogFilePath); + + const fileReader = createInterface({ input: inputFileStream }); + + const logger = makeContextualSlogProcessor({ + 'chain-id': process.env.CHAIN_ID, + }); + + for await (const line of fileReader) { + try { + const logRecord = logger(JSON.parse(line)); + process.stdout.write( + serializeSlogObj(logRecord), + err => + err && + console.error( + `Unable to write line "${serializeSlogObj(logRecord)}" due to error: `, + err, + ), + ); + } catch (err) { + console.error(`Unable to parse line "${line}" due to error: `, err); + } + } +}; + +// @ts-expect-error +global.assert = x => { + if (!x) throw Error(`value "${x}" is not truthy`); +}; + +console.log(`Sample run: CHAIN_ID= node temp.js `); + +main().catch(err => { + console.error('Caught error: ', err); + process.exit(1); +}); From 6b6927fa86cf1929abf874a12c2590aeba21a624 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Sat, 26 Oct 2024 14:26:10 +0500 Subject: [PATCH 18/22] revert temp file --- packages/telemetry/src/temp.js | 49 ---------------------------------- 1 file changed, 49 deletions(-) delete mode 100644 packages/telemetry/src/temp.js diff --git a/packages/telemetry/src/temp.js b/packages/telemetry/src/temp.js deleted file mode 100644 index 2a821f194fa..00000000000 --- a/packages/telemetry/src/temp.js +++ /dev/null @@ -1,49 +0,0 @@ -/* eslint-env node */ -import { createReadStream, existsSync } from 'fs'; -import { createInterface } from 'readline'; -import { makeContextualSlogProcessor } from './context-aware-slog.js'; -import { serializeSlogObj } from './serialize-slog-obj.js'; - -const main = async () => { - const [, , slogFilePath] = process.argv; - - if (!(slogFilePath && existsSync(slogFilePath))) - throw Error(`Filepath "${slogFilePath}" not valid`); - - const inputFileStream = createReadStream(slogFilePath); - - const fileReader = createInterface({ input: inputFileStream }); - - const logger = makeContextualSlogProcessor({ - 'chain-id': process.env.CHAIN_ID, - }); - - for await (const line of fileReader) { - try { - const logRecord = logger(JSON.parse(line)); - process.stdout.write( - serializeSlogObj(logRecord), - err => - err && - console.error( - `Unable to write line "${serializeSlogObj(logRecord)}" due to error: `, - err, - ), - ); - } catch (err) { - console.error(`Unable to parse line "${line}" due to error: `, err); - } - } -}; - -// @ts-expect-error -global.assert = x => { - if (!x) throw Error(`value "${x}" is not truthy`); -}; - -console.log(`Sample run: CHAIN_ID= node temp.js `); - -main().catch(err => { - console.error('Caught error: ', err); - process.exit(1); -}); From f47d99cdfca61a8d59e430e0b2e042b5d96fd6a6 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Sun, 27 Oct 2024 12:27:54 +0500 Subject: [PATCH 19/22] address mathieu comments 4.1 --- packages/telemetry/src/context-aware-slog.js | 5 ++--- packages/telemetry/src/otel-context-aware-slog.js | 2 +- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog.js b/packages/telemetry/src/context-aware-slog.js index 9073f458fea..34bb4867422 100644 --- a/packages/telemetry/src/context-aware-slog.js +++ b/packages/telemetry/src/context-aware-slog.js @@ -317,9 +317,8 @@ export const makeContextualSlogProcessor = ( } // eslint-disable-next-line no-restricted-syntax case SLOG_TYPES.COSMIC_SWINGSET.RUN.FINISH: { - persistContext( - finalBody.remainingBeans || !triggerContext ? {} : triggerContext, - ); + assert(!!triggerContext); + persistContext(finalBody.remainingBeans ? {} : triggerContext); triggerContext = null; break; } diff --git a/packages/telemetry/src/otel-context-aware-slog.js b/packages/telemetry/src/otel-context-aware-slog.js index ca14c87efca..542a71b499b 100644 --- a/packages/telemetry/src/otel-context-aware-slog.js +++ b/packages/telemetry/src/otel-context-aware-slog.js @@ -91,7 +91,7 @@ export const makeSlogSender = async options => { const [secondsStr, fractionStr] = String(timestamp).split('.'); const seconds = parseInt(secondsStr, 10); const nanoSeconds = parseInt( - (fractionStr || String(0)).padEnd(9, String(0)), + (fractionStr || String(0)).padEnd(9, String(0)).slice(0, 9), 10, ); From d10fccd91faf32e9d9da37a26212388daea99ecf Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Mon, 28 Oct 2024 14:28:36 +0500 Subject: [PATCH 20/22] address mathieu comments 4.0 --- .../telemetry/src/context-aware-slog-file.js | 38 +++++++++++++++++++ .../telemetry/src/otel-context-aware-slog.js | 14 +++---- 2 files changed, 43 insertions(+), 9 deletions(-) create mode 100644 packages/telemetry/src/context-aware-slog-file.js diff --git a/packages/telemetry/src/context-aware-slog-file.js b/packages/telemetry/src/context-aware-slog-file.js new file mode 100644 index 00000000000..8d862b6fc5a --- /dev/null +++ b/packages/telemetry/src/context-aware-slog-file.js @@ -0,0 +1,38 @@ +/* eslint-env node */ + +import { makeFsStreamWriter } from '@agoric/internal/src/node/fs-stream.js'; +import { makeContextualSlogProcessor } from './context-aware-slog.js'; +import { serializeSlogObj } from './serialize-slog-obj.js'; + +/** + * @param {import('./index.js').MakeSlogSenderOptions} options + */ +export const makeSlogSender = async options => { + const { CHAIN_ID, CONTEXTUAL_SLOGFILE } = options.env || {}; + if (!CONTEXTUAL_SLOGFILE) + return console.warn( + 'Ignoring invocation of slogger "context-aware-slog-file" without the presence of "CONTEXTUAL_SLOGFILE"', + ); + + const stream = await makeFsStreamWriter(CONTEXTUAL_SLOGFILE); + + if (!stream) + return console.error( + `Couldn't create a write stream on file "${CONTEXTUAL_SLOGFILE}"`, + ); + + const contextualSlogProcessor = makeContextualSlogProcessor({ + 'chain-id': CHAIN_ID, + }); + + /** + * @param {import('./context-aware-slog.js').Slog} slog + */ + const slogSender = slog => + stream.write(`${serializeSlogObj(contextualSlogProcessor(slog))}\n`); + + return Object.assign(slogSender, { + forceFlush: () => stream.flush(), + shutdown: () => stream.close(), + }); +}; diff --git a/packages/telemetry/src/otel-context-aware-slog.js b/packages/telemetry/src/otel-context-aware-slog.js index 542a71b499b..e98027ed2fb 100644 --- a/packages/telemetry/src/otel-context-aware-slog.js +++ b/packages/telemetry/src/otel-context-aware-slog.js @@ -11,10 +11,6 @@ import { makeContextualSlogProcessor } from './context-aware-slog.js'; import { getResourceAttributes } from './index.js'; import { serializeSlogObj } from './serialize-slog-obj.js'; -/** - * @typedef {import('./index.js').MakeSlogSenderOptions} Options - */ - const DEFAULT_CONTEXT_FILE = 'slog-context.json'; const FILE_ENCODING = 'utf8'; @@ -22,7 +18,7 @@ const FILE_ENCODING = 'utf8'; * @param {string} filePath */ export const getContextFilePersistenceUtils = filePath => { - console.log(`Using file ${filePath} for slogger context`); + console.warn(`Using file ${filePath} for slogger context`); return { /** @@ -32,7 +28,7 @@ export const getContextFilePersistenceUtils = filePath => { try { writeFileSync(filePath, serializeSlogObj(context), FILE_ENCODING); } catch (err) { - console.warn('Error writing context to file: ', err); + console.error('Error writing context to file: ', err); } }, @@ -43,7 +39,7 @@ export const getContextFilePersistenceUtils = filePath => { try { return JSON.parse(readFileSync(filePath, FILE_ENCODING)); } catch (parseErr) { - console.warn('Error reading context from file: ', parseErr); + console.error('Error reading context from file: ', parseErr); return null; } }, @@ -51,12 +47,12 @@ export const getContextFilePersistenceUtils = filePath => { }; /** - * @param {Options} options + * @param {import('./index.js').MakeSlogSenderOptions} options */ export const makeSlogSender = async options => { const { CHAIN_ID, OTEL_EXPORTER_OTLP_ENDPOINT } = options.env || {}; if (!(OTEL_EXPORTER_OTLP_ENDPOINT && options.stateDir)) - return console.warn( + return console.error( 'Ignoring invocation of slogger "context-aware-slog" without the presence of "OTEL_EXPORTER_OTLP_ENDPOINT" and "stateDir"', ); From 4392faa17d89adeb4b08af00ea4a7d45601dbaf8 Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Tue, 29 Oct 2024 01:01:20 +0500 Subject: [PATCH 21/22] address mathien comments 6.0 --- .../telemetry/src/context-aware-slog-file.js | 8 +++++-- .../telemetry/src/otel-context-aware-slog.js | 24 ++++++++++++++----- 2 files changed, 24 insertions(+), 8 deletions(-) diff --git a/packages/telemetry/src/context-aware-slog-file.js b/packages/telemetry/src/context-aware-slog-file.js index 8d862b6fc5a..003680fc2bd 100644 --- a/packages/telemetry/src/context-aware-slog-file.js +++ b/packages/telemetry/src/context-aware-slog-file.js @@ -28,8 +28,12 @@ export const makeSlogSender = async options => { /** * @param {import('./context-aware-slog.js').Slog} slog */ - const slogSender = slog => - stream.write(`${serializeSlogObj(contextualSlogProcessor(slog))}\n`); + const slogSender = slog => { + const contextualizedSlog = contextualSlogProcessor(slog); + + // eslint-disable-next-line prefer-template + stream.write(serializeSlogObj(contextualizedSlog) + '\n').catch(() => {}); + }; return Object.assign(slogSender, { forceFlush: () => stream.flush(), diff --git a/packages/telemetry/src/otel-context-aware-slog.js b/packages/telemetry/src/otel-context-aware-slog.js index e98027ed2fb..a66d5e11a38 100644 --- a/packages/telemetry/src/otel-context-aware-slog.js +++ b/packages/telemetry/src/otel-context-aware-slog.js @@ -59,9 +59,9 @@ export const makeSlogSender = async options => { const loggerProvider = new LoggerProvider({ resource: new Resource(getResourceAttributes(options)), }); - const logRecordProcessor = new SimpleLogRecordProcessor( - new OTLPLogExporter({ keepAlive: true }), - ); + + const otelLogExporter = new OTLPLogExporter({ keepAlive: true }); + const logRecordProcessor = new SimpleLogRecordProcessor(otelLogExporter); loggerProvider.addLogRecordProcessor(logRecordProcessor); @@ -91,7 +91,7 @@ export const makeSlogSender = async options => { 10, ); - return logger.emit({ + logger.emit({ ...JSON.parse(serializeSlogObj(logRecord)), severityNumber: SeverityNumber.INFO, timestamp: [seconds, nanoSeconds], @@ -99,8 +99,20 @@ export const makeSlogSender = async options => { }; return Object.assign(slogSender, { - forceFlush: () => logRecordProcessor.forceFlush(), + forceFlush: otelLogExporter.forceFlush, shutdown: () => - logRecordProcessor.forceFlush().then(logRecordProcessor.shutdown), + logRecordProcessor + .shutdown() + .then(otelLogExporter.forceFlush, shutdownError => + otelLogExporter.forceFlush().then( + () => Promise.reject(shutdownError), + flushError => + Promise.reject(AggregateError([shutdownError, flushError])), + ), + ) + .then( + () => {}, + flushError => Promise.reject(flushError), + ), }); }; From 3ddcc6da704f39281223b501dc2bac16bef5e33c Mon Sep 17 00:00:00 2001 From: usmanmani1122 Date: Tue, 29 Oct 2024 02:00:36 +0500 Subject: [PATCH 22/22] bound methods --- .../telemetry/src/otel-context-aware-slog.js | 43 ++++++++++++------- 1 file changed, 28 insertions(+), 15 deletions(-) diff --git a/packages/telemetry/src/otel-context-aware-slog.js b/packages/telemetry/src/otel-context-aware-slog.js index a66d5e11a38..51891460eba 100644 --- a/packages/telemetry/src/otel-context-aware-slog.js +++ b/packages/telemetry/src/otel-context-aware-slog.js @@ -98,21 +98,34 @@ export const makeSlogSender = async options => { }); }; + const shutdown = async () => { + await Promise.resolve(); + const errors = []; + + try { + await logRecordProcessor.shutdown(); + } catch (err) { + errors.push(err); + } + + try { + await otelLogExporter.forceFlush(); + } catch (err) { + errors.push(err); + } + + switch (errors.length) { + case 0: + return; + case 1: + throw errors[0]; + default: + throw AggregateError(errors); + } + }; + return Object.assign(slogSender, { - forceFlush: otelLogExporter.forceFlush, - shutdown: () => - logRecordProcessor - .shutdown() - .then(otelLogExporter.forceFlush, shutdownError => - otelLogExporter.forceFlush().then( - () => Promise.reject(shutdownError), - flushError => - Promise.reject(AggregateError([shutdownError, flushError])), - ), - ) - .then( - () => {}, - flushError => Promise.reject(flushError), - ), + forceFlush: () => otelLogExporter.forceFlush(), + shutdown, }); };