diff --git a/benchmark/metaLogger.js b/benchmark/metaLogger.js index 2af1f599c9..b89ad71066 100644 --- a/benchmark/metaLogger.js +++ b/benchmark/metaLogger.js @@ -2,30 +2,30 @@ const logger = require('../src/logger'); -logger.setLogLevel(Number.POSITIVE_INFINITY); +logger.setLogLevel('random'); const debug = (...args) => { - logger.setLogLevel(logger.levelDebug); + logger.setLogLevel('debug'); logger.debug(...args); - logger.setLogLevel(Number.POSITIVE_INFINITY); + logger.setLogLevel('random'); }; const info = (...args) => { - logger.setLogLevel(logger.levelInfo); + logger.setLogLevel('info'); logger.info(...args); - logger.setLogLevel(Number.POSITIVE_INFINITY); + logger.setLogLevel('random'); }; const warn = (...args) => { - logger.setLogLevel(logger.levelWarn); + logger.setLogLevel('warn'); logger.warn(...args); - logger.setLogLevel(Number.POSITIVE_INFINITY); + logger.setLogLevel('random'); }; const error = (...args) => { - logger.setLogLevel(logger.levelError); + logger.setLogLevel('error'); logger.error(...args); - logger.setLogLevel(Number.POSITIVE_INFINITY); + logger.setLogLevel('random'); }; module.exports = { diff --git a/package-lock.json b/package-lock.json index 1e6b2c7341..066dac2c88 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ "@koa/router": "^12.0.0", "@ndhoule/extend": "^2.0.0", "@pyroscope/nodejs": "^0.2.9", - "@rudderstack/integrations-lib": "^0.2.8", + "@rudderstack/integrations-lib": "^0.2.10", "@rudderstack/json-template-engine": "^0.13.2", "@rudderstack/workflow-engine": "^0.8.2", "@shopify/jest-koa-mocks": "^5.1.1", @@ -143,6 +143,49 @@ "mocha": "2.1.0" } }, + "../rudder-integrations-lib": { + "name": "@rudderstack/integrations-lib", + "version": "0.2.9", + "extraneous": true, + "license": "MIT", + "dependencies": { + "axios": "^1.4.0", + "axios-mock-adapter": "^1.22.0", + "crypto": "^1.0.1", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-airbnb-typescript": "^17.1.0", + "get-value": "^3.0.1", + "handlebars": "^4.7.8", + "lodash": "^4.17.21", + "moment": "^2.29.4", + "moment-timezone": "^0.5.43", + "set-value": "^4.1.0", + "sha256": "^0.2.0", + "tslib": "^2.4.0", + "winston": "^3.11.0" + }, + "devDependencies": { + "@commitlint/config-conventional": "^18.5.0", + "@types/get-value": "^3.0.3", + "@types/jest": "^29.5.4", + "@types/lodash": "^4.14.195", + "@types/node": "^20.3.3", + "@types/set-value": "^4.0.1", + "@types/sha256": "^0.2.0", + "@typescript-eslint/eslint-plugin": "^6.20.0", + "@typescript-eslint/parser": "^6.20.0", + "commitlint": "^18.6.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "husky": "^8.0.0", + "jest": "^29.4.3", + "pre-commit": "^1.2.2", + "prettier": "^2.8.4", + "ts-jest": "^29.0.5", + "ts-node": "^10.9.1", + "typescript": "^5.1.6" + } + }, "node_modules/@aashutoshrathi/word-wrap": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", @@ -4462,9 +4505,9 @@ } }, "node_modules/@rudderstack/integrations-lib": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/@rudderstack/integrations-lib/-/integrations-lib-0.2.8.tgz", - "integrity": "sha512-5CJoFFCRDhG7busCGVktKqEEXO0DbFqJ56TOT+jyDdoTf8sZ7SsSJ4NCZYmSplZrbQGj2R+aArnQnpxA4hPGmA==", + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@rudderstack/integrations-lib/-/integrations-lib-0.2.10.tgz", + "integrity": "sha512-PVlRIxO9PVYpR+UNm1qQt85wo0wO9oX0PvoC9XqzYO+C0PfRvkMqac8ghA5ytqeCYNfSIye7DtidaII5ZoCQCA==", "dependencies": { "axios": "^1.4.0", "axios-mock-adapter": "^1.22.0", diff --git a/package.json b/package.json index 304ed36d13..4c558430d9 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "@koa/router": "^12.0.0", "@ndhoule/extend": "^2.0.0", "@pyroscope/nodejs": "^0.2.9", - "@rudderstack/integrations-lib": "^0.2.8", + "@rudderstack/integrations-lib": "^0.2.10", "@rudderstack/json-template-engine": "^0.13.2", "@rudderstack/workflow-engine": "^0.8.2", "@shopify/jest-koa-mocks": "^5.1.1", diff --git a/src/adapters/network.js b/src/adapters/network.js index 0720638d12..850ba649c8 100644 --- a/src/adapters/network.js +++ b/src/adapters/network.js @@ -45,14 +45,20 @@ const networkClientConfigs = { httpsAgent: new https.Agent({ keepAlive: true }), }; -const fireHTTPStats = (clientResponse, startTime, statTags) => { - const destType = statTags.destType ? statTags.destType : ''; - const feature = statTags.feature ? statTags.feature : ''; - const endpointPath = statTags.endpointPath ? statTags.endpointPath : ''; - const requestMethod = statTags.requestMethod ? statTags.requestMethod : ''; - const module = statTags.module ? statTags.module : ''; - const statusCode = clientResponse.success ? clientResponse.response.status : ''; +const fireOutgoingReqStats = ({ + destType, + feature, + endpointPath, + requestMethod, + module, + metadata = {}, + startTime, + statusCode, + clientResponse, +}) => { + const logMetaInfo = log.getLogMetadata(metadata); stats.timing('outgoing_request_latency', startTime, { + ...logMetaInfo, feature, destType, endpointPath, @@ -60,6 +66,7 @@ const fireHTTPStats = (clientResponse, startTime, statTags) => { module, }); stats.counter('outgoing_request_count', 1, { + ...logMetaInfo, feature, destType, endpointPath, @@ -70,6 +77,36 @@ const fireHTTPStats = (clientResponse, startTime, statTags) => { }); }; +const fireHTTPStats = (clientResponse, startTime, statTags) => { + const destType = statTags.destType ? statTags.destType : ''; + const feature = statTags.feature ? statTags.feature : ''; + const endpointPath = statTags.endpointPath ? statTags.endpointPath : ''; + const requestMethod = statTags.requestMethod ? statTags.requestMethod : ''; + const module = statTags.module ? statTags.module : ''; + const statusCode = clientResponse.success ? clientResponse.response.status : ''; + const defArgs = { + destType, + endpointPath, + feature, + module, + requestMethod, + statusCode, + startTime, + clientResponse, + }; + if (statTags?.metadata) { + const metadata = !Array.isArray(statTags?.metadata) ? [statTags.metadata] : statTags.metadata; + metadata?.forEach((m) => { + fireOutgoingReqStats({ + ...defArgs, + metadata: m, + }); + }); + return; + } + fireOutgoingReqStats(defArgs); +}; + const enhanceRequestOptions = (options) => { const requestOptions = { ...networkClientConfigs, @@ -322,25 +359,6 @@ const prepareProxyRequest = (request) => { return removeUndefinedValues({ endpoint, data, params, headers, method, config }); }; -/** - * depricating: handles proxying requests to destinations from server, expects requsts in "defaultRequestConfig" - * note: needed for test api - * @param {*} request - * @returns - */ -const proxyRequest = async (request, destType) => { - const { endpoint, data, method, params, headers } = prepareProxyRequest(request); - const requestOptions = { - url: endpoint, - data, - params, - headers, - method, - }; - const response = await httpSend(requestOptions, { feature: 'proxy', destType }); - return response; -}; - /** * handles http request and sends the response in a simple format that is followed in transformer * @@ -392,6 +410,38 @@ const handleHttpRequest = async (requestType = 'post', ...httpArgs) => { return { httpResponse, processedResponse }; }; +/** + * depricating: handles proxying requests to destinations from server, expects requsts in "defaultRequestConfig" + * note: needed for test api + * @param {*} request + * @returns + */ +const proxyRequest = async (request, destType) => { + const { metadata } = request; + const { endpoint, data, method, params, headers } = prepareProxyRequest(request); + const requestOptions = { + url: endpoint, + data, + params, + headers, + method, + }; + log.requestLog(`[${destType.toUpperCase()}] delivering data`, { + metadata, + requestDetails: { + body: data, + url: endpoint, + method, + }, + }); + const response = await httpSend(requestOptions, { + feature: 'proxy', + destType, + metadata, + }); + return response; +}; + module.exports = { httpSend, httpGET, diff --git a/src/adapters/utils/networkUtils.js b/src/adapters/utils/networkUtils.js index 0dcb9931e9..4b8dd4fc39 100644 --- a/src/adapters/utils/networkUtils.js +++ b/src/adapters/utils/networkUtils.js @@ -144,10 +144,11 @@ const processAxiosResponse = (clientResponse) => { } // non 2xx status handling for axios response if (response) { - const { data, status } = response; + const { data, status, headers } = response; return { response: data || '', status: status || 500, + ...(isDefinedAndNotNullAndNotEmpty(headers) ? { headers } : {}), }; } // (edge case) response and code is not present @@ -157,10 +158,11 @@ const processAxiosResponse = (clientResponse) => { }; } // success(2xx) axios response - const { data, status } = clientResponse.response; + const { data, status, headers } = clientResponse.response; return { response: data || '', status: status || 500, + ...(isDefinedAndNotNullAndNotEmpty(headers) ? { headers } : {}), }; }; diff --git a/src/cdk/v2/destinations/gladly/procWorkflow.yaml b/src/cdk/v2/destinations/gladly/procWorkflow.yaml index a53a0ca8f5..dcefc9d774 100644 --- a/src/cdk/v2/destinations/gladly/procWorkflow.yaml +++ b/src/cdk/v2/destinations/gladly/procWorkflow.yaml @@ -61,7 +61,8 @@ steps: headers: $.getHeaders(.destination) } const endpoint = $.getEndpoint(.destination) + "?" + $.getQueryParams($.context.payload); - const rawResponse = await $.httpGET(endpoint,requestOptions) + const reqStats = {metadata:.metadata, module: 'router',feature: "transformation", destType:"gladly",requestMethod:"get",endpointPath:"/api/v1/customer-profiles"} + const rawResponse = await $.httpGET(endpoint,requestOptions, reqStats) const processedResponse = $.processAxiosResponse(rawResponse) processedResponse diff --git a/src/cdk/v2/handler.ts b/src/cdk/v2/handler.ts index c437247f74..74ebb716e6 100644 --- a/src/cdk/v2/handler.ts +++ b/src/cdk/v2/handler.ts @@ -17,6 +17,8 @@ import { isCdkV2Destination, } from './utils'; +import logger from '../../logger'; + const defTags = { [tags.TAG_NAMES.IMPLEMENTATION]: tags.IMPLEMENTATIONS.CDK_V2, }; @@ -82,12 +84,12 @@ export async function processCdkV2Workflow( destType: string, parsedEvent: FixMe, feature: string, - logger: FixMe, requestMetadata: NonNullable = {}, bindings: Record = {}, ) { try { - logger.debug(`Processing cdkV2 workflow`); + logger.debug(`Processing cdkV2 workflow`, { destType }); + const workflowEngine = await getCachedWorkflowEngine(destType, feature, bindings); return await executeWorkflow(workflowEngine, parsedEvent, requestMetadata); } catch (error) { diff --git a/src/controllers/bulkUpload.ts b/src/controllers/bulkUpload.ts index 28556dd5df..cb0bcfed3c 100644 --- a/src/controllers/bulkUpload.ts +++ b/src/controllers/bulkUpload.ts @@ -1,5 +1,4 @@ /* eslint-disable global-require, import/no-dynamic-require, @typescript-eslint/no-unused-vars */ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { client as errNotificationClient } from '../util/errorNotifier'; import { getDestFileUploadHandler, @@ -7,6 +6,7 @@ import { getPollStatusHandler, } from '../util/fetchDestinationHandlers'; import { CatchErr, ContextBodySimple } from '../util/types'; +import logger from '../logger'; // TODO: To be refactored and redisgned const ERROR_MESSAGE_PROCESSOR_STRING = 'Error occurred while processing payload.'; diff --git a/src/controllers/delivery.ts b/src/controllers/delivery.ts index 0dc27553cb..9e06b23f4d 100644 --- a/src/controllers/delivery.ts +++ b/src/controllers/delivery.ts @@ -1,9 +1,6 @@ /* eslint-disable prefer-destructuring */ /* eslint-disable sonarjs/no-duplicate-string */ -import { - isDefinedAndNotNullAndNotEmpty, - structuredLogger as logger, -} from '@rudderstack/integrations-lib'; +import { isDefinedAndNotNullAndNotEmpty } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { ServiceSelector } from '../helpers/serviceSelector'; import { DeliveryTestService } from '../services/delivertTest/deliveryTest'; @@ -19,12 +16,13 @@ import { import { FixMe } from '../util/types'; import tags from '../v0/util/tags'; import { ControllerUtility } from './util'; +import logger from '../logger'; const NON_DETERMINABLE = 'Non-determinable'; export class DeliveryController { public static async deliverToDestination(ctx: Context) { - logger.debug('Native(Delivery):: Request to transformer::', ctx.request.body); + logger.debug('Native(Delivery):: Request to transformer for delivery::', ctx.request.body); let deliveryResponse: DeliveryV0Response; const requestMetadata = MiscService.getRequestMetadata(ctx); const deliveryRequest = ctx.request.body as ProxyV0Request; @@ -54,12 +52,12 @@ export class DeliveryController { ctx.body = { output: deliveryResponse }; ControllerUtility.deliveryPostProcess(ctx, deliveryResponse.status); - logger.debug('Native(Delivery):: Response from transformer::', ctx.body); + logger.debug('Native(Delivery):: Response from transformer after delivery::', ctx.body); return ctx; } public static async deliverToDestinationV1(ctx: Context) { - logger.debug('Native(Delivery):: Request to transformer::', ctx.request.body); + logger.debug('Native(Delivery):: Request to transformer for delivery::', ctx.request.body); let deliveryResponse: DeliveryV1Response; const requestMetadata = MiscService.getRequestMetadata(ctx); const deliveryRequest = ctx.request.body as ProxyV1Request; @@ -116,7 +114,7 @@ export class DeliveryController { ); ctx.body = { output: response }; ControllerUtility.postProcess(ctx); - logger.debug('Native(Delivery-Test):: Response from transformer::', ctx.body); + logger.debug('Native(Delivery-Test):: Response from transformer after delivery::', ctx.body); return ctx; } } diff --git a/src/controllers/destination.ts b/src/controllers/destination.ts index 92ef4b4c19..998cab67bb 100644 --- a/src/controllers/destination.ts +++ b/src/controllers/destination.ts @@ -1,11 +1,9 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { ServiceSelector } from '../helpers/serviceSelector'; import { DestinationPostTransformationService } from '../services/destination/postTransformation'; import { DestinationPreTransformationService } from '../services/destination/preTransformation'; import { MiscService } from '../services/misc'; import { - ErrorDetailer, ProcessorTransformationRequest, ProcessorTransformationResponse, RouterTransformationRequest, @@ -17,6 +15,7 @@ import { getIntegrationVersion } from '../util/utils'; import { checkInvalidRtTfEvents } from '../v0/util'; import tags from '../v0/util/tags'; import { ControllerUtility } from './util'; +import logger from '../logger'; export class DestinationController { public static async destinationTransformAtProcessor(ctx: Context) { @@ -33,9 +32,6 @@ export class DestinationController { ...metaTags, }); const integrationService = ServiceSelector.getDestinationService(events); - const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), - }); try { integrationService.init(); events = DestinationPreTransformationService.preProcess( @@ -51,7 +47,6 @@ export class DestinationController { destination, version, requestMetadata, - loggerWithCtx, ); } catch (error: any) { resplist = events.map((ev) => { @@ -71,7 +66,6 @@ export class DestinationController { } ctx.body = resplist; ControllerUtility.postProcess(ctx); - loggerWithCtx.debug('Native(Process-Transform):: Response from transformer::', ctx.body); stats.histogram('dest_transform_output_events', resplist.length, { destination, version, @@ -113,9 +107,6 @@ export class DestinationController { return ctx; } const metaTags = MiscService.getMetaTags(events[0].metadata); - const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), - }); stats.histogram('dest_transform_input_events', events.length, { destination, version: 'v0', @@ -132,7 +123,6 @@ export class DestinationController { destination, getIntegrationVersion(), requestMetadata, - loggerWithCtx, ); } catch (error: any) { const metaTO = integrationService.getTags( @@ -155,7 +145,6 @@ export class DestinationController { version: 'v0', ...metaTags, }); - loggerWithCtx.debug('Native(Router-Transform):: Response from transformer::', ctx.body); stats.timing('dest_transform_request_latency', startTime, { destination, version: 'v0', @@ -172,9 +161,6 @@ export class DestinationController { const routerRequest = ctx.request.body as RouterTransformationRequest; const destination = routerRequest.destType; let events = routerRequest.input; - const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), - }); const integrationService = ServiceSelector.getDestinationService(events); try { events = DestinationPreTransformationService.preProcess(events, ctx); @@ -184,7 +170,6 @@ export class DestinationController { destination, getIntegrationVersion(), requestMetadata, - loggerWithCtx, ); ctx.body = resplist; } catch (error: any) { @@ -202,7 +187,6 @@ export class DestinationController { ctx.body = [errResp]; } ControllerUtility.postProcess(ctx); - loggerWithCtx.debug('Native(Process-Transform-Batch):: Response from transformer::', ctx.body); stats.timing('dest_transform_request_latency', startTime, { destination, feature: tags.FEATURES.BATCH, diff --git a/src/controllers/regulation.ts b/src/controllers/regulation.ts index 4b8f87e3fa..2d40c518f4 100644 --- a/src/controllers/regulation.ts +++ b/src/controllers/regulation.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { ServiceSelector } from '../helpers/serviceSelector'; import { DestinationPostTransformationService } from '../services/destination/postTransformation'; @@ -7,6 +6,7 @@ import stats from '../util/stats'; import tags from '../v0/util/tags'; // eslint-disable-next-line @typescript-eslint/no-unused-vars import { CatchErr } from '../util/types'; +import logger from '../logger'; export class RegulationController { public static async deleteUsers(ctx: Context) { diff --git a/src/controllers/source.ts b/src/controllers/source.ts index e1a4931371..bc4b77bd3d 100644 --- a/src/controllers/source.ts +++ b/src/controllers/source.ts @@ -1,9 +1,9 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { ServiceSelector } from '../helpers/serviceSelector'; import { MiscService } from '../services/misc'; import { SourcePostTransformationService } from '../services/source/postTransformation'; import { ControllerUtility } from './util'; +import logger from '../logger'; export class SourceController { public static async sourceTransform(ctx: Context) { @@ -12,7 +12,6 @@ export class SourceController { const events = ctx.request.body as object[]; const { version, source }: { version: string; source: string } = ctx.params; const integrationService = ServiceSelector.getNativeSourceService(); - const loggerWithCtx = logger.child({ version, source }); try { const { implementationVersion, input } = ControllerUtility.adaptInputToVersion( source, @@ -24,7 +23,6 @@ export class SourceController { source, implementationVersion, requestMetadata, - loggerWithCtx, ); ctx.body = resplist; } catch (err: any) { @@ -33,7 +31,11 @@ export class SourceController { ctx.body = [resp]; } ControllerUtility.postProcess(ctx); - loggerWithCtx.debug('Native(Source-Transform):: Response from transformer::', ctx.body); + logger.debug('Native(Source-Transform):: Response from transformer::', { + srcResponse: ctx.body, + version, + source, + }); return ctx; } } diff --git a/src/controllers/userTransform.ts b/src/controllers/userTransform.ts index 0e288c6f04..c81fb1dcb6 100644 --- a/src/controllers/userTransform.ts +++ b/src/controllers/userTransform.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { UserTransformService } from '../services/userTransform'; import { ProcessorTransformationRequest, UserTransformationServiceResponse } from '../types/index'; @@ -8,6 +7,7 @@ import { validateCode, } from '../util/customTransformer'; import { ControllerUtility } from './util'; +import logger from '../logger'; export class UserTransformController { public static async transform(ctx: Context) { diff --git a/src/index.ts b/src/index.ts index 5557994b2e..c5de26c776 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import dotenv from 'dotenv'; import gracefulShutdown from 'http-graceful-shutdown'; import Koa from 'koa'; @@ -11,6 +10,10 @@ import { RedisDB } from './util/redis/redisConnector'; import { logProcessInfo } from './util/utils'; dotenv.config(); + +// eslint-disable-next-line import/first +import logger from './logger'; + const clusterEnabled = process.env.CLUSTER_ENABLED !== 'false'; const port = parseInt(process.env.PORT ?? '9090', 10); const metricsPort = parseInt(process.env.METRICS_PORT || '9091', 10); @@ -32,7 +35,6 @@ app.use( addRequestSizeMiddleware(app); addSwaggerRoutes(app); -logger.info('Using new routes'); applicationRoutes(app); function finalFunction() { diff --git a/src/interfaces/DestinationService.ts b/src/interfaces/DestinationService.ts index 5d7596dac5..b45d9a427c 100644 --- a/src/interfaces/DestinationService.ts +++ b/src/interfaces/DestinationService.ts @@ -28,7 +28,6 @@ export interface DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: NonNullable, ): Promise; doRouterTransformation( @@ -36,7 +35,6 @@ export interface DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: NonNullable, ): Promise; doBatchTransformation( @@ -44,7 +42,6 @@ export interface DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: NonNullable, ): RouterTransformationResponse[]; deliver( diff --git a/src/interfaces/SourceService.ts b/src/interfaces/SourceService.ts index fab6490264..c7de8cfe8b 100644 --- a/src/interfaces/SourceService.ts +++ b/src/interfaces/SourceService.ts @@ -1,5 +1,4 @@ import { MetaTransferObject, SourceTransformationResponse } from '../types/index'; -import { FixMe } from '../util/types'; export interface SourceService { getTags(): MetaTransferObject; @@ -9,6 +8,5 @@ export interface SourceService { sourceType: string, version: string, requestMetadata: NonNullable, - logger: FixMe, ): Promise; } diff --git a/src/logger.js b/src/logger.js index 0685df3387..6daff56c67 100644 --- a/src/logger.js +++ b/src/logger.js @@ -1,38 +1,162 @@ /* istanbul ignore file */ +const { LOGLEVELS, structuredLogger } = require('@rudderstack/integrations-lib'); -const levelDebug = 0; // Most verbose logging level -const levelInfo = 1; // Logs about state of the application -const levelWarn = 2; // Logs about warnings which dont immediately halt the application -const levelError = 3; // Logs about errors which dont immediately halt the application -// any value greater than levelError will work as levelNone +// LOGGER_IMPL can be `console` or `winston` +const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; -let logLevel = process.env.LOG_LEVEL ? parseInt(process.env.LOG_LEVEL, 10) : levelInfo; +let logLevel = process.env.LOG_LEVEL ?? 'error'; + +const logger = structuredLogger({ + level: logLevel, + fillExcept: [ + 'destinationId', + 'sourceId', + 'destinationType', + 'workspaceId', + 'module', + 'implementation', + 'feature', + 'destType', + ], +}); + +const getLogger = () => { + switch (loggerImpl) { + case 'winston': + return logger; + case 'console': + return console; + } +}; const setLogLevel = (level) => { + const logger = getLogger(); logLevel = level || logLevel; + logger?.setLogLevel(logLevel); +}; + +/** + * obtains the metadata for logging + * + * @param {*} metadata + * @returns { destinationId:string, sourceId:string, workspaceId: string, destType:string, module:string, implementation:string, feature:string } + */ +const getLogMetadata = (metadata) => { + let reqMeta = metadata; + if (Array.isArray(metadata)) { + [reqMeta] = metadata; + } + const destType = reqMeta?.destType || reqMeta?.destinationType; + return { + ...(reqMeta?.destinationId && { destinationId: reqMeta.destinationId }), + ...(reqMeta?.sourceId && { sourceId: reqMeta.sourceId }), + ...(reqMeta?.workspaceId && { workspaceId: reqMeta.workspaceId }), + ...(destType && { destType }), + ...(reqMeta?.module && { module: reqMeta.module }), + ...(reqMeta?.implementation && { implementation: reqMeta.implementation }), + ...(reqMeta?.feature && { feature: reqMeta.feature }), + }; +}; + +const formLogArgs = (args) => { + let msg = ''; + let otherArgs = []; + args.forEach((arg) => { + if (typeof arg !== 'object') { + msg += ' ' + arg; + return; + } + otherArgs.push(arg); + }); + return [msg, ...otherArgs]; +}; + +/** + * Perform logging operation on logMethod passed + * + * **Good practices**: + * - Do not have more than one array args in logger + * @param {*} logMethod + * - instance method reference + * - The logger should implement all of debug/info/warn/error methods + * @param {*} logArgs + * - the arguments that needs to be passed to logger instance method + */ +const log = (logMethod, logArgs) => { + const [message, ...args] = formLogArgs(logArgs); + const [logInfo, ...otherArgs] = args; + if (logInfo) { + const { metadata, ...otherLogInfoArgs } = logInfo; + if (Array.isArray(metadata)) { + metadata.forEach((m) => { + logMethod( + message, + { + ...getLogMetadata(m), + ...otherLogInfoArgs, + }, + ...otherArgs, + ); + }); + return; + } + logMethod( + message, + { + ...getLogMetadata(metadata), + ...otherLogInfoArgs, + }, + ...otherArgs, + ); + return; + } + logMethod(message); }; const debug = (...args) => { - if (levelDebug >= logLevel) { - console.debug(...args); + const logger = getLogger(); + if (LOGLEVELS.debug <= LOGLEVELS[logLevel]) { + log(logger.debug, args); } }; const info = (...args) => { - if (levelInfo >= logLevel) { - console.info(...args); + const logger = getLogger(); + if (LOGLEVELS.info <= LOGLEVELS[logLevel]) { + log(logger.info, args); } }; const warn = (...args) => { - if (levelWarn >= logLevel) { - console.warn(...args); + const logger = getLogger(); + if (LOGLEVELS.warn <= LOGLEVELS[logLevel]) { + log(logger.warn, args); } }; const error = (...args) => { - if (levelError >= logLevel) { - console.error(...args); + const logger = getLogger(); + if (LOGLEVELS.error <= LOGLEVELS[logLevel]) { + log(logger.error, args); + } +}; + +const requestLog = (identifierMsg, { metadata, requestDetails: { url, body, method } }) => { + const logger = getLogger(); + if (LOGLEVELS[logLevel] === LOGLEVELS.warn) { + const reqLogArgs = [identifierMsg, { metadata, url, body, method }]; + log(logger.warn, reqLogArgs); + } +}; + +const responseLog = ( + identifierMsg, + { metadata, responseDetails: { response: body, status, headers } }, +) => { + const logger = getLogger(); + if (LOGLEVELS[logLevel] === LOGLEVELS.warn) { + const resLogArgs = [identifierMsg, { metadata, body, status, headers }]; + log(logger.warn, resLogArgs); } }; @@ -42,8 +166,11 @@ module.exports = { warn, error, setLogLevel, - levelDebug, - levelInfo, - levelWarn, - levelError, + // levelDebug, + // levelInfo, + // levelWarn, + // levelError, + responseLog, + getLogMetadata, + requestLog, }; diff --git a/src/services/comparator.ts b/src/services/comparator.ts index 511436dfd1..0e28339797 100644 --- a/src/services/comparator.ts +++ b/src/services/comparator.ts @@ -1,5 +1,4 @@ /* eslint-disable class-methods-use-this */ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { DestinationService } from '../interfaces/DestinationService'; import { DeliveryV0Response, @@ -18,6 +17,7 @@ import { import { CommonUtils } from '../util/common'; import stats from '../util/stats'; import tags from '../v0/util/tags'; +import logger from '../logger'; const NS_PER_SEC = 1e9; @@ -204,7 +204,6 @@ export class ComparatorService implements DestinationService { destinationType, version, requestMetadata, - logger, ); const primaryTimeDiff = process.hrtime(primaryStartTime); const primaryTime = primaryTimeDiff[0] * NS_PER_SEC + primaryTimeDiff[1]; @@ -263,7 +262,6 @@ export class ComparatorService implements DestinationService { destinationType, version, requestMetadata, - logger, ); const primaryTimeDiff = process.hrtime(primaryStartTime); const primaryTime = primaryTimeDiff[0] * NS_PER_SEC + primaryTimeDiff[1]; @@ -322,7 +320,6 @@ export class ComparatorService implements DestinationService { destinationType, version, requestMetadata, - {}, ); const primaryTimeDiff = process.hrtime(primaryStartTime); const primaryTime = primaryTimeDiff[0] * NS_PER_SEC + primaryTimeDiff[1]; diff --git a/src/services/destination/__tests__/nativeIntegration.test.ts b/src/services/destination/__tests__/nativeIntegration.test.ts index 85d099d292..3ec3222b9d 100644 --- a/src/services/destination/__tests__/nativeIntegration.test.ts +++ b/src/services/destination/__tests__/nativeIntegration.test.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { FetchHandler } from '../../../helpers/fetchHandlers'; import { ProcessorTransformationOutput, @@ -48,7 +47,6 @@ describe('NativeIntegration Service', () => { destType, version, requestMetadata, - logger, ); expect(resp).toEqual(tresponse); @@ -79,7 +77,6 @@ describe('NativeIntegration Service', () => { destType, version, requestMetadata, - logger, ); const expected = [ diff --git a/src/services/destination/__tests__/postTransformation.test.ts b/src/services/destination/__tests__/postTransformation.test.ts index f961dcbce7..050ae57b7b 100644 --- a/src/services/destination/__tests__/postTransformation.test.ts +++ b/src/services/destination/__tests__/postTransformation.test.ts @@ -1,4 +1,4 @@ -import { MetaTransferObject, ProcessorTransformationRequest } from '../../../types/index'; +import { MetaTransferObject } from '../../../types/index'; import { DestinationPostTransformationService } from '../postTransformation'; import { ProcessorTransformationResponse } from '../../../types'; diff --git a/src/services/destination/cdkV2Integration.ts b/src/services/destination/cdkV2Integration.ts index a649da9154..a91bc5674b 100644 --- a/src/services/destination/cdkV2Integration.ts +++ b/src/services/destination/cdkV2Integration.ts @@ -19,9 +19,8 @@ import { UserDeletionResponse, } from '../../types/index'; import stats from '../../util/stats'; -import { CatchErr, FixMe } from '../../util/types'; +import { CatchErr } from '../../util/types'; import tags from '../../v0/util/tags'; -import { MiscService } from '../misc'; import { DestinationPostTransformationService } from './postTransformation'; export class CDKV2DestinationService implements DestinationService { @@ -56,7 +55,6 @@ export class CDKV2DestinationService implements DestinationService { destinationType: string, _version: string, requestMetadata: NonNullable, - logger: any, ): Promise { // TODO: Change the promise type const respList: ProcessorTransformationResponse[][] = await Promise.all( @@ -68,7 +66,6 @@ export class CDKV2DestinationService implements DestinationService { tags.FEATURES.PROCESSOR, ); metaTo.metadata = event.metadata; - const loggerWithCtx = logger.child({ ...MiscService.getLoggableData(metaTo.errorDetails) }); try { const transformedPayloads: | ProcessorTransformationOutput @@ -76,7 +73,6 @@ export class CDKV2DestinationService implements DestinationService { destinationType, event, tags.FEATURES.PROCESSOR, - loggerWithCtx, requestMetadata, ); stats.increment('event_transform_success', { @@ -115,7 +111,6 @@ export class CDKV2DestinationService implements DestinationService { destinationType: string, _version: string, requestMetadata: NonNullable, - logger: FixMe, ): Promise { const allDestEvents: object = groupBy( events, @@ -131,16 +126,12 @@ export class CDKV2DestinationService implements DestinationService { tags.FEATURES.ROUTER, ); metaTo.metadata = destInputArray[0].metadata; - const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(metaTo.errorDetails), - }); try { const doRouterTransformationResponse: RouterTransformationResponse[] = await processCdkV2Workflow( destinationType, destInputArray, tags.FEATURES.ROUTER, - loggerWithCtx, requestMetadata, ); return DestinationPostTransformationService.handleRouterTransformSuccessEvents( diff --git a/src/services/destination/nativeIntegration.ts b/src/services/destination/nativeIntegration.ts index 8fd0f09857..38a27ea71d 100644 --- a/src/services/destination/nativeIntegration.ts +++ b/src/services/destination/nativeIntegration.ts @@ -25,7 +25,6 @@ import { } from '../../types/index'; import stats from '../../util/stats'; import tags from '../../v0/util/tags'; -import { MiscService } from '../misc'; import { DestinationPostTransformationService } from './postTransformation'; export class NativeIntegrationDestinationService implements DestinationService { @@ -60,7 +59,6 @@ export class NativeIntegrationDestinationService implements DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: any, ): Promise { const destHandler = FetchHandler.getDestHandler(destinationType, version); const respList: ProcessorTransformationResponse[][] = await Promise.all( @@ -72,15 +70,10 @@ export class NativeIntegrationDestinationService implements DestinationService { tags.FEATURES.PROCESSOR, ); metaTO.metadata = event.metadata; - const loggerWithCtx = logger.child({ ...MiscService.getLoggableData(metaTO.errorDetails) }); try { const transformedPayloads: | ProcessorTransformationOutput - | ProcessorTransformationOutput[] = await destHandler.process( - event, - requestMetadata, - loggerWithCtx, - ); + | ProcessorTransformationOutput[] = await destHandler.process(event, requestMetadata); return DestinationPostTransformationService.handleProcessorTransformSucessEvents( event, transformedPayloads, @@ -104,7 +97,6 @@ export class NativeIntegrationDestinationService implements DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: any, ): Promise { const destHandler = FetchHandler.getDestHandler(destinationType, version); const allDestEvents: NonNullable = groupBy( @@ -120,16 +112,9 @@ export class NativeIntegrationDestinationService implements DestinationService { destInputArray[0].metadata?.workspaceId, tags.FEATURES.ROUTER, ); - const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(metaTO.errorDetails), - }); try { const doRouterTransformationResponse: RouterTransformationResponse[] = - await destHandler.processRouterDest( - cloneDeep(destInputArray), - requestMetadata, - loggerWithCtx, - ); + await destHandler.processRouterDest(cloneDeep(destInputArray), requestMetadata); metaTO.metadata = destInputArray[0].metadata; return DestinationPostTransformationService.handleRouterTransformSuccessEvents( doRouterTransformationResponse, @@ -156,7 +141,6 @@ export class NativeIntegrationDestinationService implements DestinationService { destinationType: string, version: any, requestMetadata: NonNullable, - logger: any, ): RouterTransformationResponse[] { const destHandler = FetchHandler.getDestHandler(destinationType, version); if (!destHandler.batch) { @@ -175,14 +159,10 @@ export class NativeIntegrationDestinationService implements DestinationService { tags.FEATURES.BATCH, ); metaTO.metadatas = events.map((event) => event.metadata); - const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(metaTO.errorDetails), - }); try { const destBatchedRequests: RouterTransformationResponse[] = destHandler.batch( destEvents, requestMetadata, - loggerWithCtx, ); return destBatchedRequests; } catch (error: any) { diff --git a/src/services/destination/postTransformation.ts b/src/services/destination/postTransformation.ts index 40cee61e66..7ab0d96af8 100644 --- a/src/services/destination/postTransformation.ts +++ b/src/services/destination/postTransformation.ts @@ -19,7 +19,7 @@ import { FixMe } from '../../util/types'; import { generateErrorObject } from '../../v0/util'; import tags from '../../v0/util/tags'; import { ErrorReportingService } from '../errorReporting'; -import { MiscService } from '../misc'; +import logger from '../../logger'; const defaultErrorMessages = { router: '[Router Transform] Error occurred while processing the payload.', @@ -68,7 +68,7 @@ export class DestinationPostTransformationService { error: errObj.message || '[Processor Transform] Error occurred while processing the payload.', statTags: errObj.statTags, } as ProcessorTransformationResponse; - MiscService.logError( + logger.error( errObj.message || '[Processor Transform] Error occurred while processing the payload.', metaTo.errorDetails, ); @@ -109,7 +109,7 @@ export class DestinationPostTransformationService { ...resp.statTags, ...metaTo.errorDetails, }; - MiscService.logError(resp.error || defaultErrorMessages.router, metaTo.errorDetails); + logger.error(resp.error || defaultErrorMessages.router, metaTo.errorDetails); stats.increment('event_transform_failure', metaTo.errorDetails); } else { stats.increment('event_transform_success', { @@ -138,7 +138,7 @@ export class DestinationPostTransformationService { error: errObj.message || defaultErrorMessages.router, statTags: errObj.statTags, } as RouterTransformationResponse; - MiscService.logError(errObj.message || defaultErrorMessages.router, metaTo.errorDetails); + logger.error(errObj.message || defaultErrorMessages.router, metaTo.errorDetails); ErrorReportingService.reportError(error, metaTo.errorContext, resp); stats.increment('event_transform_failure', metaTo.errorDetails); return resp; @@ -156,7 +156,7 @@ export class DestinationPostTransformationService { error: errObj.message || defaultErrorMessages.delivery, statTags: errObj.statTags, } as RouterTransformationResponse; - MiscService.logError(error as string, metaTo.errorDetails); + logger.error(error as string, metaTo.errorDetails); ErrorReportingService.reportError(error, metaTo.errorContext, resp); return resp; } @@ -187,10 +187,7 @@ export class DestinationPostTransformationService { const errObj = generateErrorObject(error, metaTo.errorDetails, false); const metadataArray = metaTo.metadatas; if (!Array.isArray(metadataArray)) { - MiscService.logError( - 'Proxy v1 endpoint error : metadataArray is not an array', - metaTo.errorDetails, - ); + logger.error('Proxy v1 endpoint error : metadataArray is not an array', metaTo.errorDetails); // Panic throw new PlatformError('Proxy v1 endpoint error : metadataArray is not an array'); } @@ -215,7 +212,7 @@ export class DestinationPostTransformationService { authErrorCategory: errObj.authErrorCategory, }), } as DeliveryV1Response; - MiscService.logError(errObj.message, metaTo.errorDetails); + logger.error(errObj.message, metaTo.errorDetails); ErrorReportingService.reportError(error, metaTo.errorContext, resp); return resp; } @@ -233,7 +230,7 @@ export class DestinationPostTransformationService { authErrorCategory: errObj.authErrorCategory, }), } as UserDeletionResponse; - MiscService.logError(errObj.message, metaTo.errorDetails); + logger.error(errObj.message, metaTo.errorDetails); ErrorReportingService.reportError(error, metaTo.errorContext, resp); return resp; } diff --git a/src/services/misc.ts b/src/services/misc.ts index 3df1196c1d..09051edeec 100644 --- a/src/services/misc.ts +++ b/src/services/misc.ts @@ -1,11 +1,10 @@ /* eslint-disable global-require, import/no-dynamic-require */ -import { LoggableExtraData, structuredLogger as logger } from '@rudderstack/integrations-lib'; import fs from 'fs'; import { Context } from 'koa'; import path from 'path'; import { DestHandlerMap } from '../constants/destinationCanonicalNames'; import { getCPUProfile, getHeapProfile } from '../middleware'; -import { ErrorDetailer, Metadata } from '../types'; +import { Metadata } from '../types'; export class MiscService { public static getDestHandler(dest: string, version: string) { @@ -75,21 +74,4 @@ export class MiscService { public static async getHeapProfile() { return getHeapProfile(); } - - public static getLoggableData(errorDetailer: ErrorDetailer): Partial { - return { - ...(errorDetailer?.destinationId && { destinationId: errorDetailer.destinationId }), - ...(errorDetailer?.sourceId && { sourceId: errorDetailer.sourceId }), - ...(errorDetailer?.workspaceId && { workspaceId: errorDetailer.workspaceId }), - ...(errorDetailer?.destType && { destType: errorDetailer.destType }), - ...(errorDetailer?.module && { module: errorDetailer.module }), - ...(errorDetailer?.implementation && { implementation: errorDetailer.implementation }), - ...(errorDetailer?.feature && { feature: errorDetailer.feature }), - }; - } - - public static logError(message: string, errorDetailer: ErrorDetailer) { - const loggableExtraData: Partial = this.getLoggableData(errorDetailer); - logger.errorw(message || '', loggableExtraData); - } } diff --git a/src/services/source/__tests__/nativeIntegration.test.ts b/src/services/source/__tests__/nativeIntegration.test.ts index 77e355fd1a..a2a5af041e 100644 --- a/src/services/source/__tests__/nativeIntegration.test.ts +++ b/src/services/source/__tests__/nativeIntegration.test.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { FetchHandler } from '../../../helpers/fetchHandlers'; import { RudderMessage, SourceTransformationResponse } from '../../../types/index'; import stats from '../../../util/stats'; @@ -44,13 +43,7 @@ describe('NativeIntegration Source Service', () => { }); const service = new NativeIntegrationSourceService(); - const resp = await service.sourceTransformRoutine( - events, - sourceType, - version, - requestMetadata, - logger, - ); + const resp = await service.sourceTransformRoutine(events, sourceType, version, requestMetadata); expect(resp).toEqual(tresponse); @@ -87,13 +80,7 @@ describe('NativeIntegration Source Service', () => { jest.spyOn(stats, 'increment').mockImplementation(() => {}); const service = new NativeIntegrationSourceService(); - const resp = await service.sourceTransformRoutine( - events, - sourceType, - version, - requestMetadata, - logger, - ); + const resp = await service.sourceTransformRoutine(events, sourceType, version, requestMetadata); expect(resp).toEqual(tresponse); diff --git a/src/services/source/nativeIntegration.ts b/src/services/source/nativeIntegration.ts index 2ecfc30066..a4f26d068a 100644 --- a/src/services/source/nativeIntegration.ts +++ b/src/services/source/nativeIntegration.ts @@ -9,8 +9,8 @@ import { import stats from '../../util/stats'; import { FixMe } from '../../util/types'; import tags from '../../v0/util/tags'; -import { MiscService } from '../misc'; import { SourcePostTransformationService } from './postTransformation'; +import logger from '../../logger'; export class NativeIntegrationSourceService implements SourceService { public getTags(): MetaTransferObject { @@ -32,23 +32,23 @@ export class NativeIntegrationSourceService implements SourceService { version: string, // eslint-disable-next-line @typescript-eslint/no-unused-vars _requestMetadata: NonNullable, - logger: FixMe, ): Promise { const sourceHandler = FetchHandler.getSourceHandler(sourceType, version); const metaTO = this.getTags(); - const loggerWithCtx = logger.child({ ...MiscService.getLoggableData(metaTO.errorDetails) }); const respList: SourceTransformationResponse[] = await Promise.all( sourceEvents.map(async (sourceEvent) => { try { const respEvents: RudderMessage | RudderMessage[] | SourceTransformationResponse = - await sourceHandler.process(sourceEvent, loggerWithCtx); + await sourceHandler.process(sourceEvent); return SourcePostTransformationService.handleSuccessEventsSource(respEvents); } catch (error: FixMe) { stats.increment('source_transform_errors', { source: sourceType, version, }); - logger.debug('Error during source Transform', error); + logger.debug(`Error during source Transform: ${error}`, { + ...logger.getLogMetadata(metaTO.errorDetails), + }); return SourcePostTransformationService.handleFailureEventsSource(error, metaTO); } }), diff --git a/src/util/errorNotifier/bugsnag.js b/src/util/errorNotifier/bugsnag.js index ef01c58730..a6a22655ad 100644 --- a/src/util/errorNotifier/bugsnag.js +++ b/src/util/errorNotifier/bugsnag.js @@ -22,7 +22,7 @@ const { NetworkInstrumentationError, } = require('@rudderstack/integrations-lib'); const { FilteredEventsError } = require('../../v0/util/errorTypes'); -const { logger } = require('../../logger'); +const logger = require('../../logger'); const pkg = require('../../../package.json'); const { diff --git a/src/util/errorNotifier/default.js b/src/util/errorNotifier/default.js index 28557a22f2..18f04f055d 100644 --- a/src/util/errorNotifier/default.js +++ b/src/util/errorNotifier/default.js @@ -3,7 +3,7 @@ const logger = require('../../logger'); function init() {} function notify(err, context, metadata) { - logger.error(err, context, metadata); + logger.error(err, { context, metadata }); } module.exports = { diff --git a/src/util/prometheus.js b/src/util/prometheus.js index bc4c6f2eb9..72f424d39a 100644 --- a/src/util/prometheus.js +++ b/src/util/prometheus.js @@ -567,6 +567,11 @@ class Prometheus { 'statusCode', 'requestMethod', 'module', + 'workspaceId', + 'destinationId', + 'module', + 'implementation', + 'sourceId', ], }, @@ -631,7 +636,18 @@ class Prometheus { name: 'outgoing_request_latency', help: 'Outgoing HTTP requests duration in seconds', type: 'histogram', - labelNames: ['feature', 'destType', 'endpointPath', 'requestMethod', 'module'], + labelNames: [ + 'feature', + 'destType', + 'endpointPath', + 'requestMethod', + 'module', + 'workspaceId', + 'destinationId', + 'module', + 'implementation', + 'sourceId', + ], }, { name: 'http_request_duration', diff --git a/src/util/redis/redisConnector.js b/src/util/redis/redisConnector.js index 84d578d3b3..7dc20a305d 100644 --- a/src/util/redis/redisConnector.js +++ b/src/util/redis/redisConnector.js @@ -1,7 +1,7 @@ const Redis = require('ioredis'); const { RedisError } = require('@rudderstack/integrations-lib'); -const log = require('../../logger'); const stats = require('../stats'); +const logger = require('../../logger'); const timeoutPromise = () => new Promise((_, reject) => { @@ -29,13 +29,13 @@ const RedisDB = { stats.increment('redis_error', { operation: 'redis_down', }); - log.error(`Redis is down at ${this.host}:${this.port}`); + logger.error(`Redis is down at ${this.host}:${this.port}`); return false; // stop retrying }, tls: {}, }); this.client.on('ready', () => { - log.info(`Connected to redis at ${this.host}:${this.port}`); + logger.info(`Connected to redis at ${this.host}:${this.port}`); }); } }, @@ -89,7 +89,7 @@ const RedisDB = { stats.increment('redis_error', { operation: 'get', }); - log.error(`Error getting value from Redis: ${e}`); + logger.error(`Error getting value from Redis: ${e}`); throw new RedisError(`Error getting value from Redis: ${e}`); } }, @@ -124,13 +124,13 @@ const RedisDB = { stats.increment('redis_error', { operation: 'set', }); - log.error(`Error setting value in Redis due ${e}`); + logger.error(`Error setting value in Redis due ${e}`); throw new RedisError(`Error setting value in Redis due ${e}`); } }, async disconnect() { if (process.env.USE_REDIS_DB && process.env.USE_REDIS_DB !== 'false') { - log.info(`Disconnecting from redis at ${this.host}:${this.port}`); + logger.info(`Disconnecting from redis at ${this.host}:${this.port}`); this.client.disconnect(); } }, diff --git a/src/util/redis/redisConnector.test.js b/src/util/redis/redisConnector.test.js index 7cf2ccbbcf..659b32925b 100644 --- a/src/util/redis/redisConnector.test.js +++ b/src/util/redis/redisConnector.test.js @@ -2,8 +2,8 @@ const fs = require('fs'); const path = require('path'); const version = 'v0'; const { RedisDB } = require('./redisConnector'); -const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); jest.mock('ioredis', () => require('../../../test/__mocks__/redis')); + const sourcesList = ['shopify']; process.env.USE_REDIS_DB = 'true'; @@ -55,7 +55,7 @@ describe(`Source Tests`, () => { data.forEach((dataPoint, index) => { it(`${index}. ${source} - ${dataPoint.description}`, async () => { try { - const output = await transformer.process(dataPoint.input, logger); + const output = await transformer.process(dataPoint.input); expect(output).toEqual(dataPoint.output); } catch (error) { expect(error.message).toEqual(dataPoint.output.error); diff --git a/src/util/utils.js b/src/util/utils.js index d74603dd7a..1ac70b9541 100644 --- a/src/util/utils.js +++ b/src/util/utils.js @@ -170,7 +170,8 @@ function processInfo() { } function logProcessInfo() { - logger.error(`Process info: `, util.inspect(processInfo(), false, null, true)); + const inspectedInfo = util.inspect(processInfo(), false, Infinity, true); + logger.error(`Process info: ${inspectedInfo}`); } // stringLiterals expected to be an array of strings. A line in trace should contain diff --git a/src/v0/destinations/braze/util.js b/src/v0/destinations/braze/util.js index f131c40f5f..4253619d33 100644 --- a/src/v0/destinations/braze/util.js +++ b/src/v0/destinations/braze/util.js @@ -1,7 +1,7 @@ /* eslint-disable */ const _ = require('lodash'); const get = require('get-value'); -const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const logger = require('../../../logger'); const stats = require('../../../util/stats'); const { handleHttpRequest } = require('../../../adapters/network'); const { diff --git a/src/v0/destinations/campaign_manager/transform.js b/src/v0/destinations/campaign_manager/transform.js index ef208df5d1..7f007ef633 100644 --- a/src/v0/destinations/campaign_manager/transform.js +++ b/src/v0/destinations/campaign_manager/transform.js @@ -256,8 +256,7 @@ const batchEvents = (eventChunksArray) => { return batchedResponseList; }; -const processRouterDest = async (inputs, reqMetadata, logger) => { - logger.debug(`Transformation router request received with size ${inputs.length}`); +const processRouterDest = async (inputs, reqMetadata) => { const batchErrorRespList = []; const eventChunksArray = []; const { destination } = inputs[0]; diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/config.js b/src/v0/destinations/google_adwords_enhanced_conversions/config.js index 8d194655f7..e8f486fb7a 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/config.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/config.js @@ -16,4 +16,5 @@ module.exports = { BASE_ENDPOINT, hashAttributes, CONVERSION_ACTION_ID_CACHE_TTL, + destType: 'google_adwords_enhanced_conversions', }; diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js index f7ac660f53..d82349c04d 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js @@ -4,8 +4,9 @@ const { NetworkError, NetworkInstrumentationError } = require('@rudderstack/inte const SqlString = require('sqlstring'); const { prepareProxyRequest, handleHttpRequest } = require('../../../adapters/network'); const { isHttpStatusSuccess, getAuthErrCategoryFromStCode } = require('../../util/index'); -const { CONVERSION_ACTION_ID_CACHE_TTL } = require('./config'); +const { CONVERSION_ACTION_ID_CACHE_TTL, destType } = require('./config'); const Cache = require('../../util/cache'); +const logger = require('../../../logger'); const conversionActionIdCache = new Cache(CONVERSION_ACTION_ID_CACHE_TTL); @@ -27,7 +28,7 @@ const ERROR_MSG_PATH = 'response[0].error.message'; * @returns */ -const getConversionActionId = async (method, headers, params) => { +const getConversionActionId = async ({ method, headers, params, metadata }) => { const conversionActionIdKey = sha256(params.event + params.customerId).toString(); return conversionActionIdCache.get(conversionActionIdKey, async () => { const queryString = SqlString.format( @@ -37,8 +38,13 @@ const getConversionActionId = async (method, headers, params) => { const data = { query: queryString, }; + const searchStreamEndpoint = `${BASE_ENDPOINT}/${params.customerId}/googleAds:searchStream`; + logger.requestLog(`[${destType.toUpperCase()}] get conversion action id request`, { + metadata, + requestDetails: { url: searchStreamEndpoint, body: data, method }, + }); const requestBody = { - url: `${BASE_ENDPOINT}/${params.customerId}/googleAds:searchStream`, + url: searchStreamEndpoint, data, headers, method, @@ -52,21 +58,31 @@ const getConversionActionId = async (method, headers, params) => { endpointPath: `/googleAds:searchStream`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); - if (!isHttpStatusSuccess(gaecConversionActionIdResponse.status)) { + const { status, response, headers: responseHeaders } = gaecConversionActionIdResponse; + logger.responseLog(`[${destType.toUpperCase()}] get conversion action id response`, { + metadata, + responseDetails: { + response, + status, + headers: responseHeaders, + }, + }); + if (!isHttpStatusSuccess(status)) { throw new NetworkError( `"${JSON.stringify( get(gaecConversionActionIdResponse, ERROR_MSG_PATH, '') ? get(gaecConversionActionIdResponse, ERROR_MSG_PATH, '') - : gaecConversionActionIdResponse.response, + : response, )} during Google_adwords_enhanced_conversions response transformation"`, - gaecConversionActionIdResponse.status, + status, { - [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(gaecConversionActionIdResponse.status), + [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(status), }, - gaecConversionActionIdResponse.response, - getAuthErrCategoryFromStCode(gaecConversionActionIdResponse.status), + response, + getAuthErrCategoryFromStCode(status), ); } const conversionActionId = get( @@ -90,23 +106,41 @@ const getConversionActionId = async (method, headers, params) => { * @returns */ const ProxyRequest = async (request) => { - const { body, method, endpoint, params } = request; + const { body, method, endpoint, params, metadata } = request; const { headers } = request; - const conversionActionId = await getConversionActionId(method, headers, params); + const conversionActionId = await getConversionActionId({ method, headers, params, metadata }); set( body.JSON, 'conversionAdjustments[0].conversionAction', `customers/${params.customerId}/conversionActions/${conversionActionId}`, ); + logger.requestLog(`[${destType.toUpperCase()}] conversion enhancement request`, { + metadata, + requestDetails: { url: endpoint, body: body.JSON, method }, + }); const requestBody = { url: endpoint, data: body.JSON, headers, method }; - const { httpResponse: response } = await handleHttpRequest('constructor', requestBody, { - destType: 'google_adwords_enhanced_conversions', - feature: 'proxy', - endpointPath: `/googleAds:uploadOfflineUserData`, - requestMethod: 'POST', - module: 'dataDelivery', + const { httpResponse: response, processedResponse } = await handleHttpRequest( + 'constructor', + requestBody, + { + destType: 'google_adwords_enhanced_conversions', + feature: 'proxy', + endpointPath: `/googleAds:uploadOfflineUserData`, + requestMethod: 'POST', + module: 'dataDelivery', + metadata, + }, + ); + const { response: processedResp, status, headers: responseHeaders } = processedResponse; + logger.responseLog(`[${destType.toUpperCase()}] conversion enhancement response`, { + metadata, + responseDetails: { + response: processedResp, + status, + headers: responseHeaders, + }, }); return response; }; diff --git a/src/v0/destinations/google_adwords_offline_conversions/config.js b/src/v0/destinations/google_adwords_offline_conversions/config.js index f065be946c..6eec1068a6 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/config.js +++ b/src/v0/destinations/google_adwords_offline_conversions/config.js @@ -48,6 +48,7 @@ const consentConfigMap = { }; module.exports = { + destType: 'google_adwords_offline_conversions', trackClickConversionsMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.TRACK_CLICK_CONVERSIONS_CONFIG.name], trackCallConversionsMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.TRACK_CALL_CONVERSIONS_CONFIG.name], diff --git a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js index 5541fd6e1e..51bc57d176 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js @@ -6,7 +6,7 @@ const { NetworkInstrumentationError, NetworkError, } = require('@rudderstack/integrations-lib'); -const { prepareProxyRequest, httpSend, httpPOST } = require('../../../adapters/network'); +const { prepareProxyRequest, httpPOST, handleHttpRequest } = require('../../../adapters/network'); const { isHttpStatusSuccess, getHashFromArray, @@ -15,31 +15,49 @@ const { } = require('../../util'); const { getConversionActionId } = require('./utils'); const Cache = require('../../util/cache'); -const { CONVERSION_CUSTOM_VARIABLE_CACHE_TTL, SEARCH_STREAM } = require('./config'); +const { CONVERSION_CUSTOM_VARIABLE_CACHE_TTL, SEARCH_STREAM, destType } = require('./config'); const { processAxiosResponse, getDynamicErrorType, } = require('../../../adapters/utils/networkUtils'); const tags = require('../../util/tags'); +const logger = require('../../../logger'); const conversionCustomVariableCache = new Cache(CONVERSION_CUSTOM_VARIABLE_CACHE_TTL); -const createJob = async (endpoint, headers, payload) => { +const createJob = async ({ endpoint, headers, payload, metadata }) => { const endPoint = `${endpoint}:create`; + logger.requestLog(`[${destType.toUpperCase()}] job creation request`, { + metadata, + requestDetails: { + url: endpoint, + body: payload, + method: 'post', + }, + }); let createJobResponse = await httpPOST( endPoint, payload, { headers }, { - destType: 'google_adwords_offline_conversions', + destType, feature: 'proxy', endpointPath: `/create`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); createJobResponse = processAxiosResponse(createJobResponse); - const { response, status } = createJobResponse; + const { response, status, headers: responseHeaders } = createJobResponse; + logger.responseLog(`[${destType.toUpperCase()}] create job`, { + metadata, + responseDetails: { + headers: responseHeaders, + status, + response, + }, + }); if (!isHttpStatusSuccess(status)) { throw new AbortedError( `[Google Ads Offline Conversions]:: ${response?.error?.message} during google_ads_offline_store_conversions Job Creation`, @@ -51,8 +69,16 @@ const createJob = async (endpoint, headers, payload) => { return response.resourceName.split('/')[3]; }; -const addConversionToJob = async (endpoint, headers, jobId, payload) => { +const addConversionToJob = async ({ endpoint, headers, jobId, payload, metadata }) => { const endPoint = `${endpoint}/${jobId}:addOperations`; + logger.requestLog(`[${destType.toUpperCase()}] add conversion to job request`, { + metadata, + requestDetails: { + url: endpoint, + body: payload, + method: 'post', + }, + }); let addConversionToJobResponse = await httpPOST( endPoint, payload, @@ -63,23 +89,42 @@ const addConversionToJob = async (endpoint, headers, jobId, payload) => { endpointPath: `/addOperations`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); addConversionToJobResponse = processAxiosResponse(addConversionToJobResponse); - if (!isHttpStatusSuccess(addConversionToJobResponse.status)) { + const { response, status, headers: responseHeaders } = addConversionToJobResponse; + logger.responseLog(`[${destType.toUpperCase()}] add conversion to job`, { + metadata, + responseDetails: { + response, + status, + headers: responseHeaders, + }, + }); + if (!isHttpStatusSuccess(status)) { throw new AbortedError( - `[Google Ads Offline Conversions]:: ${addConversionToJobResponse.response?.error?.message} during google_ads_offline_store_conversions Add Conversion`, - addConversionToJobResponse.status, - addConversionToJobResponse.response, + `[Google Ads Offline Conversions]:: ${response?.error?.message} during google_ads_offline_store_conversions Add Conversion`, + status, + response, getAuthErrCategoryFromStCode(get(addConversionToJobResponse, 'status')), ); } return true; }; -const runTheJob = async (endpoint, headers, payload, jobId) => { +const runTheJob = async ({ endpoint, headers, payload, jobId, metadata }) => { const endPoint = `${endpoint}/${jobId}:run`; - const executeJobResponse = await httpPOST( + logger.requestLog(`[${destType.toUpperCase()}] run job request`, { + metadata, + requestDetails: { + body: payload, + method: 'POST', + url: endPoint, + }, + }); + const { httpResponse: executeJobResponse, processedResponse } = await handleHttpRequest( + 'post', endPoint, payload, { headers }, @@ -89,8 +134,18 @@ const runTheJob = async (endpoint, headers, payload, jobId) => { endpointPath: `/run`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); + const { headers: responseHeaders, response, status } = processedResponse; + logger.responseLog(`[${destType.toUpperCase()}] run job`, { + metadata, + responseDetails: { + response, + status, + responseHeaders, + }, + }); return executeJobResponse; }; @@ -102,7 +157,7 @@ const runTheJob = async (endpoint, headers, payload, jobId) => { * @param {*} headers * @returns */ -const getConversionCustomVariable = async (headers, params) => { +const getConversionCustomVariable = async ({ headers, params, metadata }) => { const conversionCustomVariableKey = sha256(params.customerId).toString(); return conversionCustomVariableCache.get(conversionCustomVariableKey, async () => { const data = { @@ -112,23 +167,41 @@ const getConversionCustomVariable = async (headers, params) => { const requestOptions = { headers, }; + logger.requestLog(`[${destType.toUpperCase()}] get conversion custom variable request`, { + metadata, + requestDetails: { + url: endpoint, + body: data, + method: 'post', + }, + }); let searchStreamResponse = await httpPOST(endpoint, data, requestOptions, { destType: 'google_adwords_offline_conversions', feature: 'proxy', endpointPath: `/searchStream`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }); searchStreamResponse = processAxiosResponse(searchStreamResponse); - if (!isHttpStatusSuccess(searchStreamResponse.status)) { + const { response, status, headers: responseHeaders } = searchStreamResponse; + logger.responseLog(`[${destType.toUpperCase()}] get conversion custom variable`, { + metadata, + responseDetails: { + response, + status, + headers: responseHeaders, + }, + }); + if (!isHttpStatusSuccess(status)) { throw new NetworkError( - `[Google Ads Offline Conversions]:: ${searchStreamResponse?.response?.[0]?.error?.message} during google_ads_offline_conversions response transformation`, - searchStreamResponse.status, + `[Google Ads Offline Conversions]:: ${response?.[0]?.error?.message} during google_ads_offline_conversions response transformation`, + status, { - [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(searchStreamResponse.status), + [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(status), }, - searchStreamResponse?.response || searchStreamResponse, - getAuthErrCategoryFromStCode(searchStreamResponse.status), + response || searchStreamResponse, + getAuthErrCategoryFromStCode(status), ); } const conversionCustomVariable = get(searchStreamResponse, 'response.0.results'); @@ -195,37 +268,53 @@ const isValidCustomVariables = (customVariables) => { * @returns */ const ProxyRequest = async (request) => { - const { method, endpoint, headers, params, body } = request; + const { method, endpoint, headers, params, body, metadata } = request; if (body.JSON?.isStoreConversion) { - const firstResponse = await createJob(endpoint, headers, body.JSON.createJobPayload); + const firstResponse = await createJob({ + endpoint, + headers, + payload: body.JSON.createJobPayload, + metadata, + }); const addPayload = body.JSON.addConversionPayload; // Mapping Conversion Action - const conversionId = await getConversionActionId(headers, params); + const conversionId = await getConversionActionId({ headers, params, metadata }); if (Array.isArray(addPayload.operations)) { addPayload.operations.forEach((operation) => { set(operation, 'create.transaction_attribute.conversion_action', conversionId); }); } - await addConversionToJob(endpoint, headers, firstResponse, addPayload); - const thirdResponse = await runTheJob( + await addConversionToJob({ endpoint, headers, - body.JSON.executeJobPayload, - firstResponse, - ); + jobId: firstResponse, + payload: addPayload, + metadata, + }); + const thirdResponse = await runTheJob({ + endpoint, + headers, + payload: body.JSON.executeJobPayload, + jobId: firstResponse, + metadata, + }); return thirdResponse; } // fetch conversionAction // httpPOST -> myAxios.post() if (params?.event) { - const conversionActionId = await getConversionActionId(headers, params); + const conversionActionId = await getConversionActionId({ headers, params, metadata }); set(body.JSON, 'conversions.0.conversionAction', conversionActionId); } // customVariables would be undefined in case of Store Conversions if (isValidCustomVariables(params.customVariables)) { // fetch all conversion custom variable in google ads - let conversionCustomVariable = await getConversionCustomVariable(headers, params); + let conversionCustomVariable = await getConversionCustomVariable({ + headers, + params, + metadata, + }); // convert it into hashMap conversionCustomVariable = getConversionCustomVariableHashMap(conversionCustomVariable); @@ -249,16 +338,29 @@ const ProxyRequest = async (request) => { set(body.JSON, 'conversions.0.customVariables', resultantCustomVariables); } } - const requestBody = { url: endpoint, data: body.JSON, headers, method }; - const response = await httpSend(requestBody, { + logger.requestLog(`[${destType.toUpperCase()}] offline conversion creation request`, { + metadata, + requestDetails: { url: requestBody.url, body: requestBody.data, method }, + }); + const { httpResponse, processedResponse } = await handleHttpRequest('constructor', requestBody, { feature: 'proxy', destType: 'gogole_adwords_offline_conversions', endpointPath: `/proxy`, requestMethod: 'POST', module: 'dataDelivery', + metadata, + }); + const { headers: responseHeaders, status, response } = processedResponse; + logger.responseLog(`[${destType.toUpperCase()}] deliver event to destination`, { + metadata, + responseDetails: { + response, + headers: responseHeaders, + status, + }, }); - return response; + return httpResponse; }; const responseHandler = (responseParams) => { diff --git a/src/v0/destinations/google_adwords_offline_conversions/utils.js b/src/v0/destinations/google_adwords_offline_conversions/utils.js index dfa892a769..bf1773d450 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/utils.js +++ b/src/v0/destinations/google_adwords_offline_conversions/utils.js @@ -30,11 +30,13 @@ const { CLICK_CONVERSION, trackCallConversionsMapping, consentConfigMap, + destType, } = require('./config'); const { processAxiosResponse } = require('../../../adapters/utils/networkUtils'); const Cache = require('../../util/cache'); const helper = require('./helper'); const { finaliseConsent } = require('../../util/googleUtils'); +const logger = require('../../../logger'); const conversionActionIdCache = new Cache(CONVERSION_ACTION_ID_CACHE_TTL); @@ -55,7 +57,7 @@ const validateDestinationConfig = ({ Config }) => { * @param {*} headers * @returns */ -const getConversionActionId = async (headers, params) => { +const getConversionActionId = async ({ headers, params, metadata }) => { const conversionActionIdKey = sha256(params.event + params.customerId).toString(); return conversionActionIdCache.get(conversionActionIdKey, async () => { const queryString = SqlString.format( @@ -69,21 +71,39 @@ const getConversionActionId = async (headers, params) => { const requestOptions = { headers, }; + logger.requestLog(`[${destType.toUpperCase()}] get conversion action id request`, { + metadata, + requestDetails: { + url: endpoint, + body: data, + method: 'post', + }, + }); let searchStreamResponse = await httpPOST(endpoint, data, requestOptions, { destType: 'google_adwords_offline_conversions', feature: 'transformation', endpointPath: `/googleAds:searchStream`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }); searchStreamResponse = processAxiosResponse(searchStreamResponse); - if (!isHttpStatusSuccess(searchStreamResponse.status)) { + const { response, status, headers: responseHeaders } = searchStreamResponse; + logger.responseLog(`[${destType.toUpperCase()}] get conversion action id response`, { + metadata, + responseDetails: { + response, + status, + headers: responseHeaders, + }, + }); + if (!isHttpStatusSuccess(status)) { throw new AbortedError( `[Google Ads Offline Conversions]:: ${JSON.stringify( - searchStreamResponse.response, + response, )} during google_ads_offline_conversions response transformation`, - searchStreamResponse.status, - searchStreamResponse.response, + status, + response, getAuthErrCategoryFromStCode(get(searchStreamResponse, 'status')), ); } diff --git a/src/v0/destinations/google_adwords_remarketing_lists/config.js b/src/v0/destinations/google_adwords_remarketing_lists/config.js index 0f08b3866d..f8983699c6 100644 --- a/src/v0/destinations/google_adwords_remarketing_lists/config.js +++ b/src/v0/destinations/google_adwords_remarketing_lists/config.js @@ -29,4 +29,5 @@ module.exports = { offlineDataJobsMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.AUDIENCE_LIST.name], addressInfoMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.ADDRESSINFO.name], consentConfigMap, + destType: 'google_adwords_remarketing_lists', }; diff --git a/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js b/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js index 3045c1713f..82fb62b74e 100644 --- a/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js +++ b/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js @@ -1,12 +1,14 @@ const { NetworkError } = require('@rudderstack/integrations-lib'); -const { httpSend, prepareProxyRequest } = require('../../../adapters/network'); +const { prepareProxyRequest, handleHttpRequest } = require('../../../adapters/network'); const { isHttpStatusSuccess, getAuthErrCategoryFromStCode } = require('../../util/index'); +const logger = require('../../../logger'); const { processAxiosResponse, getDynamicErrorType, } = require('../../../adapters/utils/networkUtils'); const tags = require('../../util/tags'); +const { destType } = require('./config'); /** * This function helps to create a offlineUserDataJobs * @param endpoint @@ -18,7 +20,7 @@ const tags = require('../../util/tags'); * ref: https://developers.google.com/google-ads/api/rest/reference/rest/v15/CustomerMatchUserListMetadata */ -const createJob = async (endpoint, headers, method, params) => { +const createJob = async ({ endpoint, headers, method, params, metadata }) => { const jobCreatingUrl = `${endpoint}:create`; const customerMatchUserListMetadata = { userList: `customers/${params.customerId}/userLists/${params.listId}`, @@ -37,14 +39,31 @@ const createJob = async (endpoint, headers, method, params) => { headers, method, }; - const response = await httpSend(jobCreatingRequest, { - destType: 'google_adwords_remarketing_lists', - feature: 'proxy', - endpointPath: '/customers/create', - requestMethod: 'POST', - module: 'dataDelivery', + logger.requestLog(`[${destType.toUpperCase()}] job creation request`, { + metadata, + requestDetails: { + url: jobCreatingRequest.url, + body: jobCreatingRequest.data, + method: jobCreatingRequest.method, + }, }); - return response; + const { httpResponse, processedResponse } = await handleHttpRequest( + 'constructor', + jobCreatingRequest, + { + destType: 'google_adwords_remarketing_lists', + feature: 'proxy', + endpointPath: '/customers/create', + requestMethod: 'POST', + module: 'dataDelivery', + metadata, + }, + ); + logger.responseLog(`[${destType.toUpperCase()}] job creation response`, { + metadata, + responseDetails: processedResponse, + }); + return httpResponse; }; /** * This function helps to put user details in a offlineUserDataJobs @@ -55,7 +74,7 @@ const createJob = async (endpoint, headers, method, params) => { * @param body */ -const addUserToJob = async (endpoint, headers, method, jobId, body) => { +const addUserToJob = async ({ endpoint, headers, method, jobId, body, metadata }) => { const jobAddingUrl = `${endpoint}/${jobId}:addOperations`; const secondRequest = { url: jobAddingUrl, @@ -63,12 +82,29 @@ const addUserToJob = async (endpoint, headers, method, jobId, body) => { headers, method, }; - const response = await httpSend(secondRequest, { - destType: 'google_adwords_remarketing_lists', - feature: 'proxy', - endpointPath: '/addOperations', - requestMethod: 'POST', - module: 'dataDelivery', + logger.requestLog(`[${destType.toUpperCase()}] add user to job request`, { + metadata, + requestDetails: { + url: secondRequest.url, + body: secondRequest.data, + method: secondRequest.method, + }, + }); + const { httpResponse: response, processedResponse } = await handleHttpRequest( + 'constructor', + secondRequest, + { + destType: 'google_adwords_remarketing_lists', + feature: 'proxy', + endpointPath: '/addOperations', + requestMethod: 'POST', + module: 'dataDelivery', + metadata, + }, + ); + logger.responseLog(`[${destType.toUpperCase()}] add user to job response`, { + metadata, + responseDetails: processedResponse, }); return response; }; @@ -80,19 +116,35 @@ const addUserToJob = async (endpoint, headers, method, jobId, body) => { * @param method * @param jobId */ -const runTheJob = async (endpoint, headers, method, jobId) => { +const runTheJob = async ({ endpoint, headers, method, jobId, metadata }) => { const jobRunningUrl = `${endpoint}/${jobId}:run`; const thirdRequest = { url: jobRunningUrl, headers, method, }; - const response = await httpSend(thirdRequest, { - destType: 'google_adwords_remarketing_lists', - feature: 'proxy', - endpointPath: '/run', - requestMethod: 'POST', - module: 'dataDelivery', + logger.requestLog(`[${destType.toUpperCase()}] run job request`, { + metadata, + requestDetails: { + url: thirdRequest.url, + body: thirdRequest.data, + method: thirdRequest.method, + }, + }); + const { httpResponse: response, processedResponse } = await handleHttpRequest( + 'constructor', + thirdRequest, + { + destType: 'google_adwords_remarketing_lists', + feature: 'proxy', + endpointPath: '/run', + requestMethod: 'POST', + module: 'dataDelivery', + }, + ); + logger.responseLog(`[${destType.toUpperCase()}] run job response`, { + metadata, + responseDetails: processedResponse, }); return response; }; @@ -104,12 +156,12 @@ const runTheJob = async (endpoint, headers, method, jobId) => { * @returns */ const gaAudienceProxyRequest = async (request) => { - const { body, method, params, endpoint } = request; + const { body, method, params, endpoint, metadata } = request; const { headers } = request; // step1: offlineUserDataJobs creation - const firstResponse = await createJob(endpoint, headers, method, params); + const firstResponse = await createJob({ endpoint, headers, method, params, metadata }); if (!firstResponse.success && !isHttpStatusSuccess(firstResponse?.response?.status)) { return firstResponse; } @@ -126,7 +178,7 @@ const gaAudienceProxyRequest = async (request) => { if (firstResponse?.response?.data?.resourceName) // eslint-disable-next-line prefer-destructuring jobId = firstResponse.response.data.resourceName.split('/')[3]; - const secondResponse = await addUserToJob(endpoint, headers, method, jobId, body); + const secondResponse = await addUserToJob({ endpoint, headers, method, jobId, body, metadata }); if (!secondResponse.success && !isHttpStatusSuccess(secondResponse?.response?.status)) { return secondResponse; } @@ -139,7 +191,7 @@ const gaAudienceProxyRequest = async (request) => { } // step3: running the job - const thirdResponse = await runTheJob(endpoint, headers, method, jobId); + const thirdResponse = await runTheJob({ endpoint, headers, method, jobId, metadata }); return thirdResponse; }; diff --git a/src/v0/destinations/klaviyo/config.js b/src/v0/destinations/klaviyo/config.js index 5c15804e14..d8583ab9cb 100644 --- a/src/v0/destinations/klaviyo/config.js +++ b/src/v0/destinations/klaviyo/config.js @@ -57,6 +57,7 @@ const LIST_CONF = { }; const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname); +const destType = 'klaviyo'; module.exports = { BASE_ENDPOINT, @@ -68,4 +69,5 @@ module.exports = { ecomEvents, eventNameMapping, jsonNameMapping, + destType, }; diff --git a/src/v0/destinations/klaviyo/transform.js b/src/v0/destinations/klaviyo/transform.js index a0fe3e81a7..09e75919f9 100644 --- a/src/v0/destinations/klaviyo/transform.js +++ b/src/v0/destinations/klaviyo/transform.js @@ -51,7 +51,10 @@ const { JSON_MIME_TYPE, HTTP_STATUS_CODES } = require('../../util/constant'); * @param {*} reqMetadata * @returns */ -const identifyRequestHandler = async (message, category, destination, reqMetadata) => { +const identifyRequestHandler = async ( + { message, category, destination, metadata }, + reqMetadata, +) => { // If listId property is present try to subscribe/member user in list const { privateApiKey, enforceEmailAsPrimary, listId, flattenProperties } = destination.Config; const mappedToDestination = get(message, MappedToDestinationKey); @@ -109,11 +112,12 @@ const identifyRequestHandler = async (message, category, destination, reqMetadat }, }; - const { profileId, response, statusCode } = await getIdFromNewOrExistingProfile( + const { profileId, response, statusCode } = await getIdFromNewOrExistingProfile({ endpoint, payload, requestOptions, - ); + metadata, + }); const responseMap = { profileUpdateResponse: profileUpdateResponseBuilder( @@ -271,7 +275,8 @@ const groupRequestHandler = (message, category, destination) => { }; // Main event processor using specific handler funcs -const processEvent = async (message, destination, reqMetadata) => { +const processEvent = async (event, reqMetadata) => { + const { message, destination, metadata } = event; if (!message.type) { throw new InstrumentationError('Event type is required'); } @@ -285,7 +290,10 @@ const processEvent = async (message, destination, reqMetadata) => { switch (messageType) { case EventType.IDENTIFY: category = CONFIG_CATEGORIES.IDENTIFY; - response = await identifyRequestHandler(message, category, destination, reqMetadata); + response = await identifyRequestHandler( + { message, category, destination, metadata }, + reqMetadata, + ); break; case EventType.SCREEN: case EventType.TRACK: @@ -303,7 +311,7 @@ const processEvent = async (message, destination, reqMetadata) => { }; const process = async (event, reqMetadata) => { - const result = await processEvent(event.message, event.destination, reqMetadata); + const result = await processEvent(event, reqMetadata); return result; }; diff --git a/src/v0/destinations/klaviyo/util.js b/src/v0/destinations/klaviyo/util.js index df2dbb4712..4db59cfb05 100644 --- a/src/v0/destinations/klaviyo/util.js +++ b/src/v0/destinations/klaviyo/util.js @@ -2,6 +2,7 @@ const { defaultRequestConfig } = require('rudder-transformer-cdk/build/utils'); const lodash = require('lodash'); const { NetworkError, InstrumentationError } = require('@rudderstack/integrations-lib'); const { WhiteListedTraits } = require('../../../constants'); +const logger = require('../../../logger'); const { constructPayload, @@ -17,7 +18,13 @@ const tags = require('../../util/tags'); const { handleHttpRequest } = require('../../../adapters/network'); const { JSON_MIME_TYPE, HTTP_STATUS_CODES } = require('../../util/constant'); const { getDynamicErrorType } = require('../../../adapters/utils/networkUtils'); -const { BASE_ENDPOINT, MAPPING_CONFIG, CONFIG_CATEGORIES, MAX_BATCH_SIZE } = require('./config'); +const { + BASE_ENDPOINT, + MAPPING_CONFIG, + CONFIG_CATEGORIES, + MAX_BATCH_SIZE, + destType, +} = require('./config'); const REVISION_CONSTANT = '2023-02-22'; @@ -32,10 +39,18 @@ const REVISION_CONSTANT = '2023-02-22'; * @param {*} requestOptions * @returns */ -const getIdFromNewOrExistingProfile = async (endpoint, payload, requestOptions) => { +const getIdFromNewOrExistingProfile = async ({ endpoint, payload, requestOptions, metadata }) => { let response; let profileId; const endpointPath = '/api/profiles'; + logger.requestLog(`[${destType.toUpperCase()}] get id from profile request`, { + metadata, + requestDetails: { + url: endpoint, + body: payload, + method: 'post', + }, + }); const { processedResponse: resp } = await handleHttpRequest( 'post', endpoint, @@ -49,6 +64,10 @@ const getIdFromNewOrExistingProfile = async (endpoint, payload, requestOptions) module: 'router', }, ); + logger.responseLog(`[${destType.toUpperCase()}] get id from profile response`, { + metadata, + responseDetails: resp, + }); /** * 201 - profile is created with updated payload no need to update it again (suppress event with 299 status code) diff --git a/src/v0/destinations/mailchimp/utils.js b/src/v0/destinations/mailchimp/utils.js index a726f23a39..f678742f2d 100644 --- a/src/v0/destinations/mailchimp/utils.js +++ b/src/v0/destinations/mailchimp/utils.js @@ -1,10 +1,6 @@ const get = require('get-value'); const md5 = require('md5'); -const { - InstrumentationError, - NetworkError, - structuredLogger: logger, -} = require('@rudderstack/integrations-lib'); +const { InstrumentationError, NetworkError } = require('@rudderstack/integrations-lib'); const myAxios = require('../../../util/myAxios'); const { MappedToDestinationKey } = require('../../../constants'); const { @@ -19,6 +15,7 @@ const { defaultBatchRequestConfig, constructPayload, } = require('../../util'); +const logger = require('../../../logger'); const { MERGE_CONFIG, MERGE_ADDRESS, SUBSCRIPTION_STATUS, VALID_STATUSES } = require('./config'); const { getDynamicErrorType } = require('../../../adapters/utils/networkUtils'); const tags = require('../../util/tags'); diff --git a/src/v0/sources/adjust/transform.js b/src/v0/sources/adjust/transform.js index 8568622aeb..9da90751b7 100644 --- a/src/v0/sources/adjust/transform.js +++ b/src/v0/sources/adjust/transform.js @@ -1,7 +1,8 @@ const lodash = require('lodash'); const path = require('path'); const fs = require('fs'); -const { TransformationError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { TransformationError } = require('@rudderstack/integrations-lib'); +const logger = require('../../../logger'); const Message = require('../message'); const { CommonUtils } = require('../../../util/common'); const { excludedFieldList } = require('./config'); diff --git a/src/v0/sources/canny/transform.js b/src/v0/sources/canny/transform.js index 38ed5e137e..aad5a881c1 100644 --- a/src/v0/sources/canny/transform.js +++ b/src/v0/sources/canny/transform.js @@ -2,6 +2,7 @@ const sha256 = require('sha256'); const { TransformationError } = require('@rudderstack/integrations-lib'); const Message = require('../message'); const { voterMapping, authorMapping, checkForRequiredFields } = require('./util'); +const logger = require('../../../logger'); const CannyOperation = { VOTE_CREATED: 'vote.created', @@ -14,7 +15,7 @@ const CannyOperation = { * @param {*} event * @param {*} typeOfUser */ -function settingIds(message, event, typeOfUser, logger) { +function settingIds(message, event, typeOfUser) { const clonedMessage = { ...message }; try { // setting up userId @@ -47,7 +48,7 @@ function settingIds(message, event, typeOfUser, logger) { * @param {*} typeOfUser * @returns message */ -function createMessage(event, typeOfUser, logger) { +function createMessage(event, typeOfUser) { const message = new Message(`Canny`); message.setEventType('track'); @@ -60,7 +61,7 @@ function createMessage(event, typeOfUser, logger) { message.context.integration.version = '1.0.0'; - const finalMessage = settingIds(message, event, typeOfUser, logger); + const finalMessage = settingIds(message, event, typeOfUser); checkForRequiredFields(finalMessage); @@ -72,7 +73,7 @@ function createMessage(event, typeOfUser, logger) { return finalMessage; } -function process(event, logger) { +function process(event) { let typeOfUser; switch (event.type) { @@ -85,6 +86,6 @@ function process(event, logger) { typeOfUser = 'author'; } - return createMessage(event, typeOfUser, logger); + return createMessage(event, typeOfUser); } module.exports = { process }; diff --git a/src/v0/sources/shopify/transform.js b/src/v0/sources/shopify/transform.js index 4886fb3df1..bc2135d215 100644 --- a/src/v0/sources/shopify/transform.js +++ b/src/v0/sources/shopify/transform.js @@ -13,6 +13,7 @@ const { getHashLineItems, getDataFromRedis, } = require('./util'); +const logger = require('../../../logger'); const { RedisDB } = require('../../../util/redis/redisConnector'); const { removeUndefinedAndNullValues, isDefinedAndNotNull } = require('../../util'); const Message = require('../message'); @@ -205,7 +206,7 @@ const processEvent = async (inputEvent, metricMetadata) => { }; const isIdentifierEvent = (event) => ['rudderIdentifier', 'rudderSessionIdentifier'].includes(event?.event); -const processIdentifierEvent = async (event, metricMetadata, logger) => { +const processIdentifierEvent = async (event, metricMetadata) => { if (useRedisDatabase) { let value; let field; @@ -243,7 +244,11 @@ const processIdentifierEvent = async (event, metricMetadata, logger) => { }); await RedisDB.setVal(`${event.cartToken}`, value); } catch (e) { - logger.debug(`{{SHOPIFY::}} cartToken map set call Failed due redis error ${e}`); + logger.debug(`{{SHOPIFY::}} cartToken map set call Failed due redis error ${e}`, { + type: 'set', + source: metricMetadata.source, + writeKey: metricMetadata.writeKey, + }); stats.increment('shopify_redis_failures', { type: 'set', source: metricMetadata.source, @@ -255,13 +260,13 @@ const processIdentifierEvent = async (event, metricMetadata, logger) => { } return NO_OPERATION_SUCCESS; }; -const process = async (event, logger) => { +const process = async (event) => { const metricMetadata = { writeKey: event.query_parameters?.writeKey?.[0], source: 'SHOPIFY', }; if (isIdentifierEvent(event)) { - return processIdentifierEvent(event, metricMetadata, logger); + return processIdentifierEvent(event, metricMetadata); } const response = await processEvent(event, metricMetadata); return response; diff --git a/src/v0/sources/shopify/util.js b/src/v0/sources/shopify/util.js index 3dc54cc434..6aea0d19bd 100644 --- a/src/v0/sources/shopify/util.js +++ b/src/v0/sources/shopify/util.js @@ -2,7 +2,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ const { v5 } = require('uuid'); const sha256 = require('sha256'); -const { TransformationError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { TransformationError } = require('@rudderstack/integrations-lib'); const stats = require('../../../util/stats'); const { constructPayload, @@ -22,6 +22,7 @@ const { useRedisDatabase, maxTimeToIdentifyRSGeneratedCall, } = require('./config'); +const logger = require('../../../logger'); const getDataFromRedis = async (key, metricMetadata) => { try { diff --git a/src/v1/destinations/campaign_manager/networkHandler.js b/src/v1/destinations/campaign_manager/networkHandler.js index 300b5f9676..eee3869fb5 100644 --- a/src/v1/destinations/campaign_manager/networkHandler.js +++ b/src/v1/destinations/campaign_manager/networkHandler.js @@ -9,6 +9,7 @@ const { getDynamicErrorType, } = require('../../../adapters/utils/networkUtils'); const tags = require('../../../v0/util/tags'); +const logger = require('../../../logger'); function isEventAbortableAndExtractErrMsg(element, proxyOutputObj) { let isAbortable = false; @@ -38,7 +39,16 @@ const responseHandler = (responseParams) => { const { destinationResponse, rudderJobMetadata } = responseParams; const message = `[CAMPAIGN_MANAGER Response V1 Handler] - Request Processed Successfully`; const responseWithIndividualEvents = []; - const { response, status } = destinationResponse; + const { response, status, headers } = destinationResponse; + + logger.responseLog('[campaign_manager] response handling', { + metadata: rudderJobMetadata, + responseDetails: { + headers, + response, + status, + }, + }); if (isHttpStatusSuccess(status)) { // check for Partial Event failures and Successes diff --git a/src/v1/destinations/monday/networkHandler.js b/src/v1/destinations/monday/networkHandler.js index 28a7f1abc0..5a0313a27b 100644 --- a/src/v1/destinations/monday/networkHandler.js +++ b/src/v1/destinations/monday/networkHandler.js @@ -6,6 +6,7 @@ const { } = require('../../../adapters/utils/networkUtils'); const { isHttpStatusSuccess } = require('../../../v0/util/index'); const tags = require('../../../v0/util/tags'); +const logger = require('../../../logger'); const checkIfUpdationOfStatusRequired = (response) => { let errorMsg = ''; @@ -41,8 +42,16 @@ const responseHandler = (responseParams) => { const message = '[MONDAY Response V1 Handler] - Request Processed Successfully'; const responseWithIndividualEvents = []; - const { response, status } = destinationResponse; + const { response, status, headers } = destinationResponse; + logger.responseLog('[monday] proxy response', { + metadata: rudderJobMetadata, + responseDetails: { + headers, + response, + status, + }, + }); // batching not supported if (isHttpStatusSuccess(status)) { const proxyOutput = { diff --git a/test/__tests__/pinterestConversion-cdk.test.ts b/test/__tests__/pinterestConversion-cdk.test.ts index 6aaa710ed7..2afde331d5 100644 --- a/test/__tests__/pinterestConversion-cdk.test.ts +++ b/test/__tests__/pinterestConversion-cdk.test.ts @@ -1,8 +1,8 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import fs from 'fs'; import path from 'path'; import { executeWorkflow, getWorkflowEngine, processCdkV2Workflow } from '../../src/cdk/v2/handler'; import tags from '../../src/v0/util/tags'; +import logger from '../../src/logger'; const integration = 'pinterest_tag'; const name = 'Pinterest Conversion API'; diff --git a/test/integrations/destinations/am/dataDelivery/data.ts b/test/integrations/destinations/am/dataDelivery/data.ts index a4faa7e60c..19baca02c3 100644 --- a/test/integrations/destinations/am/dataDelivery/data.ts +++ b/test/integrations/destinations/am/dataDelivery/data.ts @@ -68,6 +68,15 @@ export const data = [ message: '[Generic Response Handler] Request for destination: am Processed Successfully', destinationResponse: { + headers: { + 'access-control-allow-methods': 'GET, POST', + 'access-control-allow-origin': '*', + connection: 'keep-alive', + 'content-length': '93', + 'content-type': 'application/json', + date: 'Sat, 11 Dec 2021 15:08:22 GMT', + 'strict-transport-security': 'max-age=15768000', + }, response: { code: 200, server_upload_time: 1639235302252, @@ -145,6 +154,15 @@ export const data = [ message: '[Generic Response Handler] Request failed for destination am with status: 400', destinationResponse: { + headers: { + 'access-control-allow-methods': 'GET, POST', + 'access-control-allow-origin': '*', + connection: 'keep-alive', + 'content-length': '93', + 'content-type': 'application/json', + date: 'Sat, 11 Dec 2021 15:08:22 GMT', + 'strict-transport-security': 'max-age=15768000', + }, response: { code: 400, server_upload_time: 1639235302252,