diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md index cfcb1fc0d8..c52962467d 100644 --- a/.github/pull_request_template.md +++ b/.github/pull_request_template.md @@ -46,7 +46,7 @@ N/A - [ ] Is the PR limited to one linear task? -- [ ] Are relevant unit and component test-cases added? +- [ ] Are relevant unit and component test-cases added in **new readability format**? ### Reviewer checklist diff --git a/.github/workflows/build-push-docker-image.yml b/.github/workflows/build-push-docker-image.yml index 7ddae0a3ae..0d3494e8d1 100644 --- a/.github/workflows/build-push-docker-image.yml +++ b/.github/workflows/build-push-docker-image.yml @@ -38,6 +38,7 @@ jobs: - name: Checkout uses: actions/checkout@v4.1.1 with: + ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 1 - name: Setup Docker Buildx @@ -88,6 +89,7 @@ jobs: - name: Checkout uses: actions/checkout@v4.1.1 with: + ref: ${{ github.event.pull_request.head.sha }} fetch-depth: 1 - name: Setup Docker Buildx diff --git a/CHANGELOG.md b/CHANGELOG.md index 87ca4738ec..d1f97b8b5f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,113 @@ All notable changes to this project will be documented in this file. See [standard-version](https://github.com/conventional-changelog/standard-version) for commit guidelines. +### [1.68.1](https://github.com/rudderlabs/rudder-transformer/compare/v1.68.0...v1.68.1) (2024-05-29) + + +### Bug Fixes + +* tiktok_v2 assigning value to undefined properties ([#3426](https://github.com/rudderlabs/rudder-transformer/issues/3426)) ([323396b](https://github.com/rudderlabs/rudder-transformer/commit/323396b09fd6b7fda3cce53cc4f1cc443d7a78c1)) + +## [1.68.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.67.0...v1.68.0) (2024-05-27) + + +### Features + +* add json-data type support in redis ([#3336](https://github.com/rudderlabs/rudder-transformer/issues/3336)) ([0196f20](https://github.com/rudderlabs/rudder-transformer/commit/0196f20cc79e1f470d96a649dd9404c3c9284329)) +* facebook custom audience app secret support ([#3357](https://github.com/rudderlabs/rudder-transformer/issues/3357)) ([fce4ef9](https://github.com/rudderlabs/rudder-transformer/commit/fce4ef973500411c7ad812e7949bb1b73dabc3ba)) +* filtering unknown events in awin ([#3392](https://github.com/rudderlabs/rudder-transformer/issues/3392)) ([d842da8](https://github.com/rudderlabs/rudder-transformer/commit/d842da87a34cb63023eba288e0c5258e29997dcf)) +* **ga4:** component test refactor ([#3220](https://github.com/rudderlabs/rudder-transformer/issues/3220)) ([3ff9a5e](https://github.com/rudderlabs/rudder-transformer/commit/3ff9a5e8e955b929a1b04a89dcf0ccbc49e18648)) +* **integrations/auth0:** include Auth0 event type in Rudderstack message ([#3370](https://github.com/rudderlabs/rudder-transformer/issues/3370)) ([e9409fd](https://github.com/rudderlabs/rudder-transformer/commit/e9409fde6063d7eaa8558396b85b5fdf99f964e1)) +* onboard koddi destination ([#3359](https://github.com/rudderlabs/rudder-transformer/issues/3359)) ([f74c4a0](https://github.com/rudderlabs/rudder-transformer/commit/f74c4a0bc92ae6ccb0c00ac5b21745e496a015bc)) +* onboarding adjust source ([#3395](https://github.com/rudderlabs/rudder-transformer/issues/3395)) ([668d331](https://github.com/rudderlabs/rudder-transformer/commit/668d3311aadacbb92b1873bf43919db7d341afbb)) + + +### Bug Fixes + +* fb custom audience html response ([#3402](https://github.com/rudderlabs/rudder-transformer/issues/3402)) ([d1a2bd6](https://github.com/rudder +* standardise hashing for all CAPI integrations ([#3379](https://github.com/rudderlabs/rudder-transformer/issues/3379)) ([c249a69](https://github.com/rudderlabs/rudder-transformer/commit/c249a694d735f6d241a35b6e21f493c54890ac84)) +* tiktok_v2 remove default value for content-type for custom events ([#3383](https://github.com/rudderlabs/rudder-transformer/issues/3383)) ([6e7b5a0](https://github.com/rudderlabs/rudder-transformer/commit/6e7b5a0d8bf2c859dfb15b9cad7ed6070bd0892b)) +* added step for reconciling openfaas functions for python transformations ([#3420](https://github.com/rudderlabs/rudder-transformer/issues/3420)) ([7a2ab63](https://github.com/rudderlabs/rudder-transformer/commit/7a2ab63674d40870af4d16f0673a2a2594c899e9)) + +## [1.67.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.66.1...v1.67.0) (2024-05-23) + + +### Features + +* sre 456 ut move high cardinality histogram metrics to summaries cp ([#3409](https://github.com/rudderlabs/rudder-transformer/issues/3409)) ([be20dc2](https://github.com/rudderlabs/rudder-transformer/commit/be20dc26ade2fa0212dc91126cf42087a84a07c9)) + +### [1.66.1](https://github.com/rudderlabs/rudder-transformer/compare/v1.66.0...v1.66.1) (2024-05-20) + + +### Bug Fixes + +* add validation for null/undefined traits in slack ([#3382](https://github.com/rudderlabs/rudder-transformer/issues/3382)) ([755073c](https://github.com/rudderlabs/rudder-transformer/commit/755073c4341a454785050d835021d9f17e0b9d3f)) +* gaoc store sales batching transform contract ([#3384](https://github.com/rudderlabs/rudder-transformer/issues/3384)) ([e7678cb](https://github.com/rudderlabs/rudder-transformer/commit/e7678cbdae4c06449ea9352ce3db390d2a29da14)) +* move af_currency outside properties in eventValue ([#3316](https://github.com/rudderlabs/rudder-transformer/issues/3316)) ([71c3d46](https://github.com/rudderlabs/rudder-transformer/commit/71c3d46236fff9209625cfb0737c21db2d275345)) +* remove default traits from ortto ([#3389](https://github.com/rudderlabs/rudder-transformer/issues/3389)) ([fbb0811](https://github.com/rudderlabs/rudder-transformer/commit/fbb0811aa0e417b0cffcea4ecc103979afccfe74)) +* update validation of event duration ([#3376](https://github.com/rudderlabs/rudder-transformer/issues/3376)) ([3ad7850](https://github.com/rudderlabs/rudder-transformer/commit/3ad78506446915ada8bdc5f5594dc2710e6b0646)) + +## [1.66.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.65.1...v1.66.0) (2024-05-13) + + +### Features + +* add slack source ([#3148](https://github.com/rudderlabs/rudder-transformer/issues/3148)) +* onboard monday to proxy ([#3347](https://github.com/rudderlabs/rudder-transformer/issues/3347)) +* onboard emarsys destination ([#3369](https://github.com/rudderlabs/rudder-transformer/issues/3369)) + + +### Bug Fixes + +* ninetailed: modify parameter requirements and add default values ([#3364](https://github.com/rudderlabs/rudder-transformer/issues/3364)) + +### [1.65.1](https://github.com/rudderlabs/rudder-transformer/compare/v1.65.0...v1.65.1) (2024-05-10) + + +### Bug Fixes + +* update regex which was discarding firstname and lastname ([#3360](https://github.com/rudderlabs/rudder-transformer/issues/3360)) ([cb10aa7](https://github.com/rudderlabs/rudder-transformer/commit/cb10aa7707518b52edcf7fb1081c6969bcb5f8f8)) + +## [1.65.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.64.0...v1.65.0) (2024-05-06) + + +### Features + +* generate anonymousId and move to component testcases ([447f85f](https://github.com/rudderlabs/rudder-transformer/commit/447f85faf6ccca2179ab33b7fe43e281fc4f5897)) + + +### Bug Fixes + +* auth0 error handling ([2e22075](https://github.com/rudderlabs/rudder-transformer/commit/2e22075ddf792c573642fd09c5f9d31d8132525b)) +* auth0 error handling for missing userId ([#3334](https://github.com/rudderlabs/rudder-transformer/issues/3334)) ([d2cce77](https://github.com/rudderlabs/rudder-transformer/commit/d2cce772d6f0485ff8ee51af261006eb2066c3a2)) +* braze dedup for non-billable attributes ([#3320](https://github.com/rudderlabs/rudder-transformer/issues/3320)) ([ac59fdc](https://github.com/rudderlabs/rudder-transformer/commit/ac59fdc37d6fe08847d79d249d166de7cc358fd6)) +* multiple event mappings in ortto ([#3341](https://github.com/rudderlabs/rudder-transformer/issues/3341)) ([0a9a2e9](https://github.com/rudderlabs/rudder-transformer/commit/0a9a2e9e2a3029ad7b4834820ba2132d3f57ce98)) + +## [1.64.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.63.0...v1.64.0) (2024-04-29) + + +### Features + +* adding custom properties support to bluecore ([#3282](https://github.com/rudderlabs/rudder-transformer/issues/3282)) ([8592e66](https://github.com/rudderlabs/rudder-transformer/commit/8592e664eb568e70a00261e275ab2faed8f6f618)) +* onboard Yandex Metrica Offline Events Destination ([#3232](https://github.com/rudderlabs/rudder-transformer/issues/3232)) ([8f79f53](https://github.com/rudderlabs/rudder-transformer/commit/8f79f53d30326e07fc92dd624e799015ff9f87c2)) +* transactional custom property support for awin ([#3325](https://github.com/rudderlabs/rudder-transformer/issues/3325)) ([fdecaf3](https://github.com/rudderlabs/rudder-transformer/commit/fdecaf36d91db7540d6f68a013e4f7fb2a36ebaa)) + + +### Bug Fixes + +* algolia enhancement ( adding currency, price, subType and objectData support ) ([#3290](https://github.com/rudderlabs/rudder-transformer/issues/3290)) ([f06ebde](https://github.com/rudderlabs/rudder-transformer/commit/f06ebde110693fe32f8e450dc395f1f4019defab)) +* **delighted:** replace myAxios utility with handleHttpRequest utility ([#3237](https://github.com/rudderlabs/rudder-transformer/issues/3237)) ([bac3cc5](https://github.com/rudderlabs/rudder-transformer/commit/bac3cc5670c149454a6063a55a4b901043b0ff02)) +* handle empty userId ([5402b21](https://github.com/rudderlabs/rudder-transformer/commit/5402b219ccdeaafb710c8c2828e983e9864a415f)) +* handle empty userId (movable ink, bloomreach) ([#3300](https://github.com/rudderlabs/rudder-transformer/issues/3300)) ([038c1aa](https://github.com/rudderlabs/rudder-transformer/commit/038c1aa04049aaa1caa1bf82cf6c69611b5d3fd9)) +* send content_ids as a string if there is only one value ([#3317](https://github.com/rudderlabs/rudder-transformer/issues/3317)) ([54eca32](https://github.com/rudderlabs/rudder-transformer/commit/54eca3220ea48fae64c655813fe4430dd704639e)) + +## [1.63.0](https://github.com/rudderlabs/rudder-transformer/compare/v1.62.2...v1.63.0) (2024-04-25) + + +### Features + +* remove redundant data from traits in hubspot ([#3310](https://github.com/rudderlabs/rudder-transformer/issues/3310)) ([4b21f13](https://github.com/rudderlabs/rudder-transformer/commit/4b21f1353d3d9a431a0d5446d019f66a543b977b)) + ### [1.62.2](https://github.com/rudderlabs/rudder-transformer/compare/v1.62.1...v1.62.2) (2024-04-18) diff --git a/package-lock.json b/package-lock.json index b5b413f936..ded836ef59 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,12 +1,12 @@ { "name": "rudder-transformer", - "version": "1.62.2", + "version": "1.68.1", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "rudder-transformer", - "version": "1.62.2", + "version": "1.68.1", "license": "ISC", "dependencies": { "@amplitude/ua-parser-js": "0.7.24", diff --git a/package.json b/package.json index a291bbab90..73cac39767 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "rudder-transformer", - "version": "1.62.2", + "version": "1.68.1", "description": "", "homepage": "https://github.com/rudderlabs/rudder-transformer#readme", "bugs": { diff --git a/src/cdk/v2/destinations/algolia/procWorkflow.yaml b/src/cdk/v2/destinations/algolia/procWorkflow.yaml index f9ac8e3ae6..402b48dabd 100644 --- a/src/cdk/v2/destinations/algolia/procWorkflow.yaml +++ b/src/cdk/v2/destinations/algolia/procWorkflow.yaml @@ -5,7 +5,10 @@ bindings: - path: ../../../../v0/destinations/algolia/config - name: removeUndefinedAndNullValues path: ../../../../v0/util + - name: isDefinedAndNotNull + path: ../../../../v0/util - path: ../../bindings/jsontemplate + - path: '@rudderstack/integrations-lib' steps: - name: validateInput @@ -24,6 +27,7 @@ steps: let eventTypeMap = $.eventTypeMapping(.destination.Config); let event = .message.event.trim().toLowerCase(); let eventType = .message.properties.eventType ?? eventTypeMap[event]; + let eventSubType = .message.properties.eventSubtype && eventType === 'conversion' && (.message.properties.eventSubtype in $.ALLOWED_EVENT_SUBTYPES) ? .message.properties.eventSubtype; $.assert(eventType, "eventType is mandatory for track call"); let payload = .message.().({ index: .properties.index, @@ -32,12 +36,28 @@ steps: filters: .properties.filters, objectIDs: .properties.objectIds, positions: .properties.positions, + value: $.isDefinedAndNotNull(.properties.currency) ? .properties.value, + currency: .properties.currency, userToken: {{{{$.getGenericPaths("userId", "||")}}}}, eventName: event, - eventType: eventType + eventType: eventType, + eventSubtype: eventSubType }); $.context.payload = $.genericpayloadValidator(payload); + - name: prepareObjectDataBlock + condition: $.context.payload.eventType === "conversion" && $.isDefinedAndNotNull(^.message.properties.products) && Array.isArray(^.message.properties.products) + description: | + Populate list of objectData + template: | + const products = ^.message.properties.products + products.($.removeUndefinedAndNullValues({ + "queryID" : $.isDefinedAndNotNull(.queryID) ? String(.queryID) : null, + "price": $.isDefinedAndNotNull(.price) && $.isDefinedAndNotNull(^.message.properties.currency) ? String(.price) : null, + "quantity": $.isDefinedAndNotNull(.quantity)? Number(.quantity) : null, + "discount": $.isDefinedAndNotNull(.discount) ? String(.discount) : null + }))[] + - name: populateProductsData condition: | .message.properties.products && @@ -55,11 +75,14 @@ steps: const products = .message.properties.products; const objectIDs = ~r products.objectId; $.context.payload.objectIDs = Array.isArray(objectIDs) ? objectIDs[:20]:$.context.payload.objectIDs; + $.context.payload.objectData = $.outputs.prepareObjectDataBlock + $.validatePayload($.context.payload) - name: validateDestPayload template: | const filters = $.context.payload.filters; const objectIDs = $.context.payload.objectIDs; + const objectData = $.context.payload.objectData; $.assert(!(filters && objectIDs), "event can't have both objectIds and filters at the same time."); $.assert(filters.length || objectIDs.length, "Either filters or objectIds is required and must be non empty."); diff --git a/src/cdk/v2/destinations/bloomreach/procWorkflow.yaml b/src/cdk/v2/destinations/bloomreach/procWorkflow.yaml index f092d90382..5a9dcaa18d 100644 --- a/src/cdk/v2/destinations/bloomreach/procWorkflow.yaml +++ b/src/cdk/v2/destinations/bloomreach/procWorkflow.yaml @@ -39,7 +39,7 @@ steps: const userId = .message.().( {{{{$.getGenericPaths("userIdOnly")}}}}; ); - $.assert(userId ?? .message.anonymousId, "Either one of userId or anonymousId is required. Aborting"); + $.assert(userId || .message.anonymousId, "Either one of userId or anonymousId is required. Aborting"); - name: prepareIdentifyPayload condition: $.context.messageType === {{$.EventType.IDENTIFY}} @@ -64,7 +64,7 @@ steps: - name: pageEventName condition: $.context.messageType === {{$.EventType.PAGE}} template: | - const category = .message.category ?? .message.properties.category; + const category = .message.category || .message.properties.category; const name = .message.name || .message.properties.name; const eventNameArray = ["Viewed"]; category ? eventNameArray.push(category); @@ -74,7 +74,7 @@ steps: - name: screenEventName condition: $.context.messageType === {{$.EventType.SCREEN}} template: | - const category = .message.category ?? .message.properties.category; + const category = .message.category || .message.properties.category; const name = .message.name || .message.properties.name; const eventNameArray = ["Viewed"]; category ? eventNameArray.push(category); diff --git a/src/cdk/v2/destinations/emarsys/config.js b/src/cdk/v2/destinations/emarsys/config.js new file mode 100644 index 0000000000..83067c3cd3 --- /dev/null +++ b/src/cdk/v2/destinations/emarsys/config.js @@ -0,0 +1,24 @@ +const ALLOWED_OPT_IN_VALUES = ['1', '2', '']; +const groupedSuccessfulPayload = { + identify: { + method: 'PUT', + batches: [], + }, + group: { + method: 'POST', + batches: [], + }, + track: { + method: 'POST', + batches: [], + }, +}; + +module.exports = { + MAX_BATCH_SIZE: 1000, + EMAIL_FIELD_ID: 3, + OPT_IN_FILED_ID: 31, + ALLOWED_OPT_IN_VALUES, + MAX_BATCH_SIZE_BYTES: 8000000, // 8 MB, + groupedSuccessfulPayload, +}; diff --git a/src/cdk/v2/destinations/emarsys/procWorkflow.yaml b/src/cdk/v2/destinations/emarsys/procWorkflow.yaml new file mode 100644 index 0000000000..a5c0b33f38 --- /dev/null +++ b/src/cdk/v2/destinations/emarsys/procWorkflow.yaml @@ -0,0 +1,88 @@ +bindings: + - name: EventType + path: ../../../../constants + - path: ../../bindings/jsontemplate + exportAll: true + - name: removeUndefinedValues + path: ../../../../v0/util + - name: removeUndefinedAndNullValues + path: ../../../../v0/util + - name: defaultRequestConfig + path: ../../../../v0/util + - name: getIntegrationsObj + path: ../../../../v0/util + - name: getFieldValueFromMessage + path: ../../../../v0/util + - name: CommonUtils + path: ../../../../util/common + - path: ./utils + - path: ./config + - path: lodash + name: cloneDeep + +steps: + - name: checkIfProcessed + condition: .message.statusCode + template: | + $.batchMode ? .message.body.JSON : .message + onComplete: return + - name: messageType + template: | + .message.type.toLowerCase() + - name: validateInput + template: | + let messageType = $.outputs.messageType; + $.assert(messageType, "Message type is not present. Aborting message."); + $.assert(messageType in {{$.EventType.([.TRACK, .IDENTIFY, .GROUP])}}, + "message type " + messageType + " is not supported") + $.assertConfig(.destination.Config.emersysUsername, "Emersys user name is not configured. Aborting"); + $.assertConfig(.destination.Config.emersysUserSecret, "Emersys user secret is not configured. Aborting"); + + - name: validateInputForTrack + description: Additional validation for Track events + condition: $.outputs.messageType === {{$.EventType.TRACK}} + template: | + $.assert(.message.event, "event could not be mapped to conversion rule. Aborting.") + - name: preparePayloadForIdentify + description: | + Builds identify payload. ref: https://dev.emarsys.com/docs/core-api-reference/f8ljhut3ac2i1-update-contacts + condition: $.outputs.messageType === {{$.EventType.IDENTIFY}} + template: | + $.context.payload = $.buildIdentifyPayload(.message, .destination.Config,); + - name: preparePayloadForGroup + description: | + Builds group payload. ref: https://dev.emarsys.com/docs/core-api-reference/1m0m70hy3tuov-add-contacts-to-a-contact-list + condition: $.outputs.messageType === {{$.EventType.GROUP}} + template: | + $.context.payload = $.buildGroupPayload(.message, .destination.Config,); + - name: preparePayloadForTrack + description: | + Builds track payload. ref: https://dev.emarsys.com/docs/core-api-reference/fl0xx6rwfbwqb-trigger-an-external-event + condition: $.outputs.messageType === {{$.EventType.TRACK}} + template: | + const properties = ^.message.properties; + const integrationObject = $.getIntegrationsObj(^.message, 'emarsys'); + const emersysIdentifierId = $.deduceCustomIdentifier(integrationObject, ^.destination.Config.emersysCustomIdentifier); + const payload = { + key_id: emersysIdentifierId, + external_id: $.deduceExternalIdValue(^.message,emersysIdentifierId,.destination.Config.fieldMapping), + trigger_id: integrationObject.trigger_id, + data: properties.data, + attachment:$.CommonUtils.toArray(properties.attachment), + event_time: $.getFieldValueFromMessage(^.message, 'timestamp'), + }; + $.context.payload = { + eventType: ^.message.type, + destinationPayload: { + payload: $.removeUndefinedAndNullValues(payload), + eventId: $.deduceEventId(^.message,.destination.Config), + }, + }; + - name: buildResponse + template: | + const response = $.defaultRequestConfig(); + response.body.JSON = $.context.payload; + response.endpoint = $.deduceEndPoint($.context.payload,.destination.Config); + response.method = "POST"; + response.headers = $.buildHeader(.destination.Config) + response diff --git a/src/cdk/v2/destinations/emarsys/rtWorkflow.yaml b/src/cdk/v2/destinations/emarsys/rtWorkflow.yaml new file mode 100644 index 0000000000..0e7132ccad --- /dev/null +++ b/src/cdk/v2/destinations/emarsys/rtWorkflow.yaml @@ -0,0 +1,38 @@ +bindings: + - path: ./utils + - name: handleRtTfSingleEventError + path: ../../../../v0/util/index + +steps: + - name: validateInput + template: | + $.assert(Array.isArray(^) && ^.length > 0, "Invalid event array") + + - name: transform + externalWorkflow: + path: ./procWorkflow.yaml + bindings: + - name: batchMode + value: true + loopOverInput: true + - name: successfulEvents + template: | + $.outputs.transform#idx.output.({ + "message": .[], + "destination": ^ [idx].destination, + "metadata": ^ [idx].metadata + })[] + - name: failedEvents + template: | + $.outputs.transform#idx.error.( + $.handleRtTfSingleEventError(^[idx], .originalError ?? ., {}) + )[] + + - name: batchSuccessfulEvents + description: Batches the successfulEvents + template: | + $.context.batchedPayload = $.batchResponseBuilder($.outputs.successfulEvents); + + - name: finalPayload + template: | + [...$.outputs.failedEvents, ...$.context.batchedPayload] diff --git a/src/cdk/v2/destinations/emarsys/utils.js b/src/cdk/v2/destinations/emarsys/utils.js new file mode 100644 index 0000000000..e093064fc5 --- /dev/null +++ b/src/cdk/v2/destinations/emarsys/utils.js @@ -0,0 +1,411 @@ +const lodash = require('lodash'); +const crypto = require('crypto'); +const { + InstrumentationError, + ConfigurationError, + isDefinedAndNotNullAndNotEmpty, + removeUndefinedAndNullAndEmptyValues, + removeUndefinedAndNullValues, + isDefinedAndNotNull, +} = require('@rudderstack/integrations-lib'); +const { + getIntegrationsObj, + validateEventName, + getValueFromMessage, + getHashFromArray, +} = require('../../../../v0/util'); +const { + EMAIL_FIELD_ID, + MAX_BATCH_SIZE, + OPT_IN_FILED_ID, + ALLOWED_OPT_IN_VALUES, + MAX_BATCH_SIZE_BYTES, + groupedSuccessfulPayload, +} = require('./config'); +const { EventType } = require('../../../../constants'); + +const base64Sha = (str) => { + const hexDigest = crypto.createHash('sha1').update(str).digest('hex'); + return Buffer.from(hexDigest).toString('base64'); +}; + +const getWsseHeader = (user, secret) => { + const nonce = crypto.randomBytes(16).toString('hex'); + const timestamp = new Date().toISOString(); + + const digest = base64Sha(nonce + timestamp + secret); + return `UsernameToken Username="${user}", PasswordDigest="${digest}", Nonce="${nonce}", Created="${timestamp}"`; +}; + +const buildHeader = (destConfig) => { + const { emersysUsername, emersysUserSecret } = destConfig; + if ( + !isDefinedAndNotNullAndNotEmpty(emersysUsername) || + !isDefinedAndNotNullAndNotEmpty(emersysUserSecret) + ) { + throw new ConfigurationError('Either Emarsys user name or user secret is missing. Aborting'); + } + return { + 'Content-Type': 'application/json', + Accept: 'application/json', + 'X-WSSE': getWsseHeader(emersysUsername, emersysUserSecret), + }; +}; + +const deduceCustomIdentifier = (integrationObject, emersysCustomIdentifier) => + integrationObject?.customIdentifierId || emersysCustomIdentifier || EMAIL_FIELD_ID; + +const buildIdentifyPayload = (message, destConfig) => { + let destinationPayload; + const { fieldMapping, emersysCustomIdentifier, discardEmptyProperties, defaultContactList } = + destConfig; + const payload = {}; + + const integrationObject = getIntegrationsObj(message, 'emarsys'); + const finalContactList = integrationObject?.contactListId || defaultContactList; + if (!finalContactList || !isDefinedAndNotNullAndNotEmpty(String(finalContactList))) { + throw new InstrumentationError( + 'Cannot a find a specific contact list either through configuration or via integrations object', + ); + } + if (fieldMapping) { + fieldMapping.forEach((trait) => { + const { rudderProperty, emersysProperty } = trait; + const value = getValueFromMessage(message, [ + `traits.${rudderProperty}`, + `context.traits.${rudderProperty}`, + ]); + if (value) { + payload[emersysProperty] = value; + } + }); + } + const emersysIdentifier = deduceCustomIdentifier(integrationObject, emersysCustomIdentifier); + const finalPayload = + discardEmptyProperties === true + ? removeUndefinedAndNullAndEmptyValues(payload) // empty property value has a significance in emersys + : removeUndefinedAndNullValues(payload); + if ( + isDefinedAndNotNull(finalPayload[OPT_IN_FILED_ID]) && + !ALLOWED_OPT_IN_VALUES.includes(String(finalPayload[OPT_IN_FILED_ID])) + ) { + throw new InstrumentationError( + `Only ${ALLOWED_OPT_IN_VALUES} values are allowed for optin field`, + ); + } + + if (isDefinedAndNotNullAndNotEmpty(payload[emersysIdentifier])) { + destinationPayload = { + key_id: emersysIdentifier, + contacts: [finalPayload], + contact_list_id: finalContactList, + }; + } else { + throw new InstrumentationError( + 'Either configured custom contact identifier value or default identifier email value is missing', + ); + } + return { eventType: message.type, destinationPayload }; +}; + +const findRudderPropertyByEmersysProperty = (emersysProperty, fieldMapping) => { + // find the object where the emersysProperty matches the input + const item = lodash.find(fieldMapping, { emersysProperty: String(emersysProperty) }); + // Return the rudderProperty if the object is found, otherwise return null + return item ? item.rudderProperty : 'email'; +}; + +const deduceExternalIdValue = (message, emersysIdentifier, fieldMapping) => { + const configuredPayloadProperty = findRudderPropertyByEmersysProperty( + emersysIdentifier, + fieldMapping, + ); + const externalIdValue = getValueFromMessage(message, [ + `traits.${configuredPayloadProperty}`, + `context.traits.${configuredPayloadProperty}`, + ]); + + if (!isDefinedAndNotNull(deduceExternalIdValue)) { + throw new InstrumentationError( + `Could not find value for externalId required in ${message.type} call. Aborting.`, + ); + } + + return externalIdValue; +}; + +const buildGroupPayload = (message, destConfig) => { + const { emersysCustomIdentifier, defaultContactList, fieldMapping } = destConfig; + const integrationObject = getIntegrationsObj(message, 'emarsys'); + const emersysIdentifier = deduceCustomIdentifier(integrationObject, emersysCustomIdentifier); + const externalIdValue = deduceExternalIdValue(message, emersysIdentifier, fieldMapping); + if (!isDefinedAndNotNull(externalIdValue)) { + throw new InstrumentationError( + `No value found in payload for contact custom identifier of id ${emersysIdentifier}`, + ); + } + const payload = { + key_id: emersysIdentifier, + external_ids: [externalIdValue], + }; + return { + eventType: message.type, + destinationPayload: { + payload, + contactListId: message.groupId || defaultContactList, + }, + }; +}; + +const deduceEventId = (message, destConfig) => { + let eventId; + const { eventsMapping } = destConfig; + const { event } = message; + validateEventName(event); + if (Array.isArray(eventsMapping) && eventsMapping.length > 0) { + const keyMap = getHashFromArray(eventsMapping, 'from', 'to', false); + eventId = keyMap[event]; + } + if (!eventId) { + throw new ConfigurationError(`${event} is not mapped to any Emersys external event. Aborting`); + } + return eventId; +}; + +const deduceEndPoint = (finalPayload) => { + let endPoint; + let eventId; + let contactListId; + const { eventType, destinationPayload } = finalPayload; + switch (eventType) { + case EventType.IDENTIFY: + endPoint = 'https://api.emarsys.net/api/v2/contact/?create_if_not_exists=1'; + break; + case EventType.GROUP: + contactListId = destinationPayload.contactListId; + endPoint = `https://api.emarsys.net/api/v2/contactlist/${contactListId}/add`; + break; + case EventType.TRACK: + eventId = destinationPayload.eventId; + endPoint = `https://api.emarsys.net/api/v2/event/${eventId}/trigger`; + break; + default: + break; + } + return endPoint; +}; + +const estimateJsonSize = (obj) => new Blob([JSON.stringify(obj)]).size; + +const createSingleIdentifyPayload = (keyId, contacts, contactListId) => ({ + key_id: keyId, + contacts, + contact_list_id: contactListId, +}); + +const ensureSizeConstraints = (contacts) => { + const chunks = []; + let currentBatch = []; + + contacts.forEach((contact) => { + // Start a new batch if adding the next contact exceeds size limits + if ( + currentBatch.length === 0 || + estimateJsonSize([...currentBatch, contact]) < MAX_BATCH_SIZE_BYTES + ) { + currentBatch.push(contact); + } else { + chunks.push(currentBatch); + currentBatch = [contact]; + } + }); + + // Add the remaining batch if not empty + if (currentBatch.length > 0) { + chunks.push(currentBatch); + } + + return chunks; +}; + +const createIdentifyBatches = (events) => { + const groupedIdentifyPayload = lodash.groupBy( + events, + (item) => + `${item.message[0].body.JSON.destinationPayload.key_id}-${item.message[0].body.JSON.destinationPayload.contact_list_id}`, + ); + return lodash.flatMap(groupedIdentifyPayload, (group) => { + const firstItem = group[0].message[0].body.JSON.destinationPayload; + // eslint-disable-next-line @typescript-eslint/naming-convention + const { key_id, contact_list_id } = firstItem; + + const allContacts = lodash.flatMap( + group, + (item) => item.message[0].body.JSON.destinationPayload.contacts, + ); + const initialChunks = lodash.chunk(allContacts, MAX_BATCH_SIZE); + const finalChunks = lodash.flatMap(initialChunks, ensureSizeConstraints); + + // Include metadata for each chunk + return finalChunks.map((contacts) => ({ + payload: createSingleIdentifyPayload(key_id, contacts, contact_list_id), + metadata: group.map((g) => g.metadata), + })); + }); +}; + +const createGroupBatches = (events) => { + const grouped = lodash.groupBy( + events, + (item) => + `${item.message[0].body.JSON.destinationPayload.payload.key_id}-${item.message[0].body.JSON.destinationPayload.contactListId}`, + ); + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + return Object.entries(grouped).flatMap(([key, group]) => { + const keyId = group[0].message[0].body.JSON.destinationPayload.payload.key_id; + const { contactListId } = group[0].message[0].body.JSON.destinationPayload; + const combinedExternalIds = group.reduce((acc, item) => { + acc.push(...item.message[0].body.JSON.destinationPayload.payload.external_ids); + return acc; + }, []); + + const idChunks = lodash.chunk(combinedExternalIds, MAX_BATCH_SIZE); + + return idChunks.map((chunk) => ({ + endpoint: `https://api.emarsys.net/api/v2/contactlist/${contactListId}/add`, + payload: { + key_id: keyId, + external_ids: chunk, + }, + metadata: group.map((g) => g.metadata), + })); + }); +}; + +const createTrackBatches = (events) => [ + { + endpoint: events[0].message[0].endpoint, + payload: events[0].message[0].body.JSON.destinationPayload.payload, + metadata: [events[0].metadata], + }, +]; +const formatIdentifyPayloadsWithEndpoint = (combinedPayloads, endpointUrl = '') => + combinedPayloads.map((singleCombinedPayload) => ({ + endpoint: endpointUrl, + payload: singleCombinedPayload.payload, + metadata: singleCombinedPayload.metadata, + })); + +const buildBatchedRequest = (batches, method, constants, batchedStatus = true) => + batches.map((batch) => ({ + batchedRequest: { + body: { + JSON: batch.payload, + JSON_ARRAY: {}, + XML: {}, + FORM: {}, + }, + version: constants.version, + type: constants.type, + method, + endpoint: batch.endpoint, + headers: constants.headers, + params: {}, + files: {}, + }, + metadata: batch.metadata, + batched: batchedStatus, + statusCode: 200, + destination: constants.destination, + })); + +// Helper to initialize the constants used across batch processing +function initializeConstants(successfulEvents) { + if (successfulEvents.length === 0) return null; + return { + version: successfulEvents[0].message[0].version, + type: successfulEvents[0].message[0].type, + headers: successfulEvents[0].message[0].headers, + destination: successfulEvents[0].destination, + }; +} + +// Helper to append requests based on batched events and constants +function appendRequestsToOutput(groupPayload, output, constants, batched = true) { + if (groupPayload.batches) { + const requests = buildBatchedRequest( + groupPayload.batches, + groupPayload.method, + constants, + batched, + ); + output.push(...requests); + } +} + +// Process batches based on event types +function processEventBatches(typedEventGroups, constants) { + let batchesOfIdentifyEvents; + const finalOutput = []; + + // Process each event group based on type + Object.keys(typedEventGroups).forEach((eventType) => { + switch (eventType) { + case EventType.IDENTIFY: + batchesOfIdentifyEvents = createIdentifyBatches(typedEventGroups[eventType]); + groupedSuccessfulPayload.identify.batches = formatIdentifyPayloadsWithEndpoint( + batchesOfIdentifyEvents, + 'https://api.emarsys.net/api/v2/contact/?create_if_not_exists=1', + ); + break; + case EventType.GROUP: + groupedSuccessfulPayload.group.batches = createGroupBatches(typedEventGroups[eventType]); + break; + case EventType.TRACK: + groupedSuccessfulPayload.track.batches = createTrackBatches(typedEventGroups[eventType]); + break; + default: + break; + } + }); + + // Convert batches into requests for each event type and push to final output + appendRequestsToOutput(groupedSuccessfulPayload.identify, finalOutput, constants); + appendRequestsToOutput(groupedSuccessfulPayload.group, finalOutput, constants); + appendRequestsToOutput(groupedSuccessfulPayload.track, finalOutput, constants, false); + + return finalOutput; +} + +// Entry function to create batches from successful events +function batchResponseBuilder(successfulEvents) { + const constants = initializeConstants(successfulEvents); + if (!constants) return []; + + const typedEventGroups = lodash.groupBy( + successfulEvents, + (event) => event.message[0].body.JSON.eventType, + ); + + return processEventBatches(typedEventGroups, constants); +} + +module.exports = { + buildIdentifyPayload, + buildGroupPayload, + buildHeader, + deduceEndPoint, + batchResponseBuilder, + base64Sha, + getWsseHeader, + findRudderPropertyByEmersysProperty, + formatIdentifyPayloadsWithEndpoint, + createSingleIdentifyPayload, + createIdentifyBatches, + ensureSizeConstraints, + createGroupBatches, + deduceExternalIdValue, + deduceEventId, + deduceCustomIdentifier, +}; diff --git a/src/cdk/v2/destinations/emarsys/utils.test.js b/src/cdk/v2/destinations/emarsys/utils.test.js new file mode 100644 index 0000000000..3802567ecb --- /dev/null +++ b/src/cdk/v2/destinations/emarsys/utils.test.js @@ -0,0 +1,543 @@ +const { EVENT_TYPE } = require('rudder-transformer-cdk/build/constants'); +const { + buildIdentifyPayload, + buildGroupPayload, + base64Sha, + getWsseHeader, + findRudderPropertyByEmersysProperty, + createGroupBatches, + deduceEventId, +} = require('./utils'); +const { + checkIfEventIsAbortableAndExtractErrorMessage, +} = require('../../../../v1/destinations/emarsys/networkHandler'); +const crypto = require('crypto'); +const { InstrumentationError, ConfigurationError } = require('@rudderstack/integrations-lib'); +const { responses } = require('../../../../../test/testHelper'); + +describe('Emarsys utils', () => { + describe('base64Sha', () => { + it('should return a base64 encoded SHA1 hash of the input string', () => { + const input = 'test'; + const expected = 'YTk0YThmZTVjY2IxOWJhNjFjNGMwODczZDM5MWU5ODc5ODJmYmJkMw=='; + const result = base64Sha(input); + expect(result).toEqual(expected); + }); + + it('should return an empty string when input is empty', () => { + const input = ''; + const expected = 'ZGEzOWEzZWU1ZTZiNGIwZDMyNTViZmVmOTU2MDE4OTBhZmQ4MDcwOQ=='; + const result = base64Sha(input); + expect(result).toEqual(expected); + }); + }); + + describe('getWsseHeader', () => { + beforeEach(() => { + jest + .spyOn(crypto, 'randomBytes') + .mockReturnValue(Buffer.from('abcdef1234567890abcdef1234567890', 'hex')); + jest.spyOn(Date.prototype, 'toISOString').mockReturnValue('2024-04-28T12:34:56.789Z'); + }); + + afterEach(() => { + jest.restoreAllMocks(); + }); + + it('should generate a correct WSSE header', () => { + const user = 'testUser'; + const secret = 'testSecret'; + const expectedNonce = 'abcdef1234567890abcdef1234567890'; + const expectedTimestamp = '2024-04-28T12:34:56.789Z'; + const expectedDigest = base64Sha(expectedNonce + expectedTimestamp + secret); + const expectedHeader = `UsernameToken Username="${user}", PasswordDigest="${expectedDigest}", Nonce="${expectedNonce}", Created="${expectedTimestamp}"`; + const result = getWsseHeader(user, secret); + + expect(result).toBe(expectedHeader); + }); + }); + + describe('buildIdentifyPayload', () => { + it('should correctly build payload with field mapping', () => { + const message = { + type: 'identify', + traits: { + firstName: 'John', + lastName: 'Doe', + email: 'john.doe@example.com', + optin: 1, + }, + }; + const destination = { + fieldMapping: [ + { rudderProperty: 'firstName', emersysProperty: '1' }, + { rudderProperty: 'lastName', emersysProperty: '2' }, + { rudderProperty: 'email', emersysProperty: '3' }, + { rudderProperty: 'optin', emersysProperty: '31' }, + ], + defaultContactList: 'dummyContactList', + }; + const expectedPayload = { + contact_list_id: 'dummyContactList', + contacts: [ + { + 1: 'John', + 2: 'Doe', + 3: 'john.doe@example.com', + 31: 1, + }, + ], + key_id: 3, + }; + + const result = buildIdentifyPayload(message, destination); + + expect(result.eventType).toBe(EVENT_TYPE.IDENTIFY); + expect(result.destinationPayload).toEqual(expectedPayload); + }); + + it('should throw error when opt-in field value is not allowed', () => { + const message = { + type: 'identify', + traits: { + firstName: 'John', + lastName: 'Doe', + email: 'john.doe@example.com', + optin: 3, + }, + }; + const destination = { + fieldMapping: [ + { rudderProperty: 'firstName', emersysProperty: '1' }, + { rudderProperty: 'lastName', emersysProperty: '2' }, + { rudderProperty: 'email', emersysProperty: '3' }, + { rudderProperty: 'optin', emersysProperty: '31' }, + ], + defaultContactList: 'dummyList', + }; + expect(() => { + buildIdentifyPayload(message, destination); + }).toThrow('Only 1,2, values are allowed for optin field'); + }); + + it('should throw error when no contact list can be assigned field value is not allowed', () => { + const message = { + type: 'identify', + traits: { + firstName: 'John', + lastName: 'Doe', + email: 'john.doe@example.com', + optin: 1, + }, + }; + const destination = { + fieldMapping: [ + { rudderProperty: 'firstName', emersysProperty: '1' }, + { rudderProperty: 'lastName', emersysProperty: '2' }, + { rudderProperty: 'email', emersysProperty: '3' }, + { rudderProperty: 'optin', emersysProperty: '31' }, + ], + }; + expect(() => { + buildIdentifyPayload(message, destination); + }).toThrow( + 'Cannot a find a specific contact list either through configuration or via integrations object', + ); + }); + + it('should correctly build payload with field mapping present in integrations object', () => { + const message = { + type: 'identify', + traits: { + firstName: 'John', + lastName: 'Doe', + email: 'john.doe@example.com', + optin: 1, + }, + integrations: { + EMARSYS: { + customIdentifierId: 1, + contactListId: 'objectListId', + }, + }, + }; + const destination = { + fieldMapping: [ + { rudderProperty: 'firstName', emersysProperty: '1' }, + { rudderProperty: 'lastName', emersysProperty: '2' }, + { rudderProperty: 'email', emersysProperty: '3' }, + { rudderProperty: 'optin', emersysProperty: '31' }, + ], + defaultContactList: 'dummyContactList', + }; + const expectedPayload = { + contact_list_id: 'objectListId', + contacts: [ + { + 1: 'John', + 2: 'Doe', + 3: 'john.doe@example.com', + 31: 1, + }, + ], + key_id: 1, + }; + + const result = buildIdentifyPayload(message, destination); + + expect(result.eventType).toBe(EVENT_TYPE.IDENTIFY); + expect(result.destinationPayload).toEqual(expectedPayload); + }); + }); + + describe('buildGroupPayload', () => { + // Returns an object with eventType and destinationPayload keys when given valid message and destination inputs + it('should return an object with eventType and destinationPayload keys when given valid message and destination inputs with default externalId', () => { + const message = { + type: 'group', + groupId: 'group123', + context: { + traits: { + email: 'test@example.com', + }, + }, + }; + const destination = { + Config: { + emersysCustomIdentifier: '3', + defaultContactList: 'list123', + fieldMapping: [ + { emersysProperty: '100', rudderProperty: 'customId' }, + { emersysProperty: '3', rudderProperty: 'email' }, + ], + }, + }; + const result = buildGroupPayload(message, destination); + expect(result).toEqual({ + eventType: 'group', + destinationPayload: { + payload: { + key_id: 3, + external_ids: ['test@example.com'], + }, + contactListId: 'group123', + }, + }); + }); + + it('should return an object with eventType and destinationPayload keys when given valid message and destination inputs with configured externalId', () => { + const message = { + type: 'group', + groupId: 'group123', + context: { + traits: { + email: 'test@example.com', + customId: '123', + }, + }, + }; + const destination = { + emersysCustomIdentifier: '100', + defaultContactList: 'list123', + fieldMapping: [ + { emersysProperty: '100', rudderProperty: 'customId' }, + { emersysProperty: '3', rudderProperty: 'email' }, + ], + }; + const result = buildGroupPayload(message, destination); + expect(result).toEqual({ + eventType: 'group', + destinationPayload: { + payload: { + key_id: '100', + external_ids: ['123'], + }, + contactListId: 'group123', + }, + }); + }); + + it('should throw an InstrumentationError if emersysCustomIdentifier value is not present in payload', () => { + const message = { + type: 'group', + groupId: 'group123', + context: { + traits: { + email: 'test@example.com', + }, + }, + }; + const destination = { + emersysCustomIdentifier: 'customId', + defaultContactList: 'list123', + fieldMapping: [ + { emersysProperty: 'customId', rudderProperty: 'customId' }, + { emersysProperty: 'email', rudderProperty: 'email' }, + ], + }; + expect(() => { + buildGroupPayload(message, destination); + }).toThrow(InstrumentationError); + }); + }); + + describe('createGroupBatches', () => { + // Should group events by key_id and contactListId + it('should group events by key_id and contactListId when events are provided', () => { + // Arrange + const events = [ + { + message: [ + { + body: { + JSON: { + destinationPayload: { + payload: { + key_id: 'key1', + external_ids: ['id1', 'id2'], + }, + contactListId: 'list1', + }, + }, + }, + }, + ], + metadata: { jobId: 1, userId: 'u1' }, + }, + { + message: [ + { + body: { + JSON: { + destinationPayload: { + payload: { + key_id: 'key2', + external_ids: ['id3', 'id4'], + }, + contactListId: 'list2', + }, + }, + }, + }, + ], + metadata: { jobId: 2, userId: 'u2' }, + }, + { + message: [ + { + body: { + JSON: { + destinationPayload: { + payload: { + key_id: 'key1', + external_ids: ['id5', 'id6'], + }, + contactListId: 'list1', + }, + }, + }, + }, + ], + metadata: { jobId: 3, userId: 'u3' }, + }, + ]; + + // Act + const result = createGroupBatches(events); + + // Assert + expect(result).toEqual([ + { + endpoint: 'https://api.emarsys.net/api/v2/contactlist/list1/add', + payload: { + key_id: 'key1', + external_ids: ['id1', 'id2', 'id5', 'id6'], + }, + metadata: [ + { jobId: 1, userId: 'u1' }, + { jobId: 3, userId: 'u3' }, + ], + }, + { + endpoint: 'https://api.emarsys.net/api/v2/contactlist/list2/add', + payload: { + key_id: 'key2', + external_ids: ['id3', 'id4'], + }, + metadata: [{ jobId: 2, userId: 'u2' }], + }, + ]); + }); + + // Should return an empty array if no events are provided + it('should return an empty array when no events are provided', () => { + // Arrange + const events = []; + + // Act + const result = createGroupBatches(events); + + // Assert + expect(result).toEqual([]); + }); + }); + + describe('findRudderPropertyByEmersysProperty', () => { + // Returns the correct rudderProperty when given a valid emersysProperty and fieldMapping + it('should return the correct rudderProperty when given a valid emersysProperty and fieldMapping', () => { + const emersysProperty = 'firstName'; + const fieldMapping = [ + { emersysProperty: 'email', rudderProperty: 'email' }, + { emersysProperty: 'firstName', rudderProperty: 'firstName' }, + { emersysProperty: 'lastName', rudderProperty: 'lastName' }, + ]; + + const result = findRudderPropertyByEmersysProperty(emersysProperty, fieldMapping); + + expect(result).toBe('firstName'); + }); + + // Returns null when given an empty fieldMapping + it('should return null when given an empty fieldMapping', () => { + const emersysProperty = 'email'; + const fieldMapping = []; + + const result = findRudderPropertyByEmersysProperty(emersysProperty, fieldMapping); + + expect(result).toBe('email'); + }); + }); + + describe('checkIfEventIsAbortableAndExtractErrorMessage', () => { + // Returns {isAbortable: false, errorMsg: ''} if event is neither a string nor an object with keyId. + it('should return {isAbortable: false, errorMsg: ""} when event is neither a string nor an object with keyId', () => { + const event = 123; + const destinationResponse = { + response: { + data: { + errors: { + errorKey: { + errorCode: 'errorMessage', + }, + }, + }, + }, + }; + const keyId = 'keyId'; + + const result = checkIfEventIsAbortableAndExtractErrorMessage( + event, + destinationResponse, + keyId, + ); + + expect(result).toEqual({ isAbortable: false, errorMsg: '' }); + }); + + // Returns {isAbortable: false, errorMsg: ''} if errors object is empty. + it('should return {isAbortable: false, errorMsg: ""} when errors object is empty', () => { + const event = 'event'; + const destinationResponse = { + response: { + data: { + errors: {}, + }, + }, + }; + const keyId = 'keyId'; + + const result = checkIfEventIsAbortableAndExtractErrorMessage( + event, + destinationResponse, + keyId, + ); + + expect(result).toEqual({ isAbortable: false, errorMsg: '' }); + }); + + // Returns {isAbortable: true, errorMsg} if event is a string and has a corresponding error in the errors object. + it('should return {isAbortable: true, errorMsg} when event is a string and has a corresponding error in the errors object', () => { + const event = 'event'; + const destinationResponse = { + response: { + data: { + errors: { + event: { + errorCode: 'errorMessage', + }, + }, + }, + }, + }; + const keyId = 'keyId'; + + const result = checkIfEventIsAbortableAndExtractErrorMessage( + event, + destinationResponse, + keyId, + ); + + expect(result).toEqual({ isAbortable: true, errorMsg: '{"errorCode":"errorMessage"}' }); + }); + + // Returns {isAbortable: true, errorMsg} if event is an object with keyId and has a corresponding error in the errors object. + it('should return {isAbortable: true, errorMsg} when event is an object with keyId and has a corresponding error in the errors object', () => { + const event = { + keyId: 'event', + }; + const destinationResponse = { + response: { + data: { + errors: { + event: { + errorCode: 'errorMessage', + }, + }, + }, + }, + }; + const keyId = 'keyId'; + + const result = checkIfEventIsAbortableAndExtractErrorMessage( + event, + destinationResponse, + keyId, + ); + + expect(result).toEqual({ isAbortable: true, errorMsg: '{"errorCode":"errorMessage"}' }); + }); + }); + + describe('deduceEventId', () => { + // When a valid event name is provided and there is a mapping for it, the function should return the corresponding eventId. + it('should return the corresponding eventId when a valid event name is provided and there is a mapping for it', () => { + const message = { event: 'validEvent' }; + const destConfig = { eventsMapping: [{ from: 'validEvent', to: 'eventId' }] }; + const result = deduceEventId(message, destConfig); + expect(result).toBe('eventId'); + }); + + // When an invalid event name is provided, the function should throw a ConfigurationError. + it('should throw a ConfigurationError when an invalid event name is provided', () => { + const message = { event: 'invalidEvent' }; + const destConfig = { eventsMapping: [{ from: 'validEvent', to: 'eventId' }] }; + expect(() => deduceEventId(message, destConfig)).toThrow(ConfigurationError); + }); + + // When a valid event name is provided and there is no mapping for it, the function should throw a ConfigurationError. + it('should throw a ConfigurationError when a valid event name is provided and there is no mapping for it', () => { + const message = { event: 'validEvent' }; + const destConfig = { eventsMapping: [] }; + expect(() => deduceEventId(message, destConfig)).toThrow(ConfigurationError); + }); + + // When eventsMapping is not an array, the function should throw a TypeError. + it('should throw a TypeError when eventsMapping is not an array', () => { + const message = { event: 'validEvent' }; + const destConfig = { eventsMapping: 'notAnArray' }; + expect(() => deduceEventId(message, destConfig)).toThrow( + 'validEvent is not mapped to any Emersys external event. Aborting', + ); + }); + }); +}); diff --git a/src/cdk/v2/destinations/koddi/config.js b/src/cdk/v2/destinations/koddi/config.js new file mode 100644 index 0000000000..927e1858fc --- /dev/null +++ b/src/cdk/v2/destinations/koddi/config.js @@ -0,0 +1,39 @@ +const { getMappingConfig } = require('../../../../v0/util'); + +/** + * ref :- https://developers.koddi.com/reference/winning-ads + * impressions - https://developers.koddi.com/reference/impressions-1 + * clicks - https://developers.koddi.com/reference/clicks-1 + * conversions - https://developers.koddi.com/reference/conversions-1 + */ +const EVENT_TYPES = { + IMPRESSIONS: 'impressions', + CLICKS: 'clicks', + CONVERSIONS: 'conversions', +}; + +const CONFIG_CATEGORIES = { + IMPRESSIONS: { + type: 'track', + name: 'ImpressionsConfig', + }, + CLICKS: { + type: 'track', + name: 'ClicksConfig', + }, + CONVERSIONS: { + type: 'track', + name: 'ConversionsConfig', + }, +}; + +const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname); + +module.exports = { + EVENT_TYPES, + CONFIG_CATEGORIES, + MAPPING_CONFIG, + IMPRESSIONS_CONFIG: MAPPING_CONFIG[CONFIG_CATEGORIES.IMPRESSIONS.name], + CLICKS_CONFIG: MAPPING_CONFIG[CONFIG_CATEGORIES.CLICKS.name], + CONVERSIONS_CONFIG: MAPPING_CONFIG[CONFIG_CATEGORIES.CONVERSIONS.name], +}; diff --git a/src/cdk/v2/destinations/koddi/data/ClicksConfig.json b/src/cdk/v2/destinations/koddi/data/ClicksConfig.json new file mode 100644 index 0000000000..96ab27b2ae --- /dev/null +++ b/src/cdk/v2/destinations/koddi/data/ClicksConfig.json @@ -0,0 +1,35 @@ +[ + { + "sourceKeys": "properties.tracking_data", + "required": true, + "destKey": "trackingData" + }, + { + "sourceKeys": "properties.rank", + "required": true, + "destKey": "rank" + }, + { + "sourceKeys": "properties.beacon_issued", + "required": true, + "destKey": "beaconIssued" + }, + { + "sourceKeys": "userId", + "sourceFromGenericMap": true, + "required": true, + "destKey": "userGuid" + }, + { + "sourceKeys": "properties.test_version_override", + "destKey": "testVersionOverride" + }, + { + "sourceKeys": "properties.destination_url", + "destKey": "destinationUrl" + }, + { + "sourceKeys": "properties.overrides", + "destKey": "overrides" + } +] diff --git a/src/cdk/v2/destinations/koddi/data/ConversionsConfig.json b/src/cdk/v2/destinations/koddi/data/ConversionsConfig.json new file mode 100644 index 0000000000..495574f198 --- /dev/null +++ b/src/cdk/v2/destinations/koddi/data/ConversionsConfig.json @@ -0,0 +1,53 @@ +[ + { + "sourceKeys": "context.page.referring_domain", + "destKey": "domain" + }, + { + "sourceKeys": "context.locale", + "required": true, + "destKey": "culture" + }, + { + "sourceKeys": "properties.currency", + "required": true, + "destKey": "currency" + }, + { + "sourceKeys": ["context.ip", "request_ip"], + "destKey": "user_ip" + }, + { + "sourceKeys": "context.userAgent", + "destKey": "user_agent" + }, + { + "sourceKeys": "userId", + "sourceFromGenericMap": true, + "required": true, + "destKey": "user_guid" + }, + { + "sourceKeys": "context.device.type", + "destKey": "device_type" + }, + { + "sourceKeys": ["properties.order_id", "properties.transaction_id"], + "required": true, + "destKey": "transaction_id" + }, + { + "sourceKeys": "properties.conversion_source", + "destKey": "conversion_source" + }, + { + "sourceKeys": "timestamp", + "sourceFromGenericMap": true, + "destKey": "unixtime" + }, + { + "sourceKeys": "properties.bidders", + "required": true, + "destKey": "bidders" + } +] diff --git a/src/cdk/v2/destinations/koddi/data/ImpressionsConfig.json b/src/cdk/v2/destinations/koddi/data/ImpressionsConfig.json new file mode 100644 index 0000000000..de53703b32 --- /dev/null +++ b/src/cdk/v2/destinations/koddi/data/ImpressionsConfig.json @@ -0,0 +1,22 @@ +[ + { + "sourceKeys": "properties.tracking_data", + "required": true, + "destKey": "trackingData" + }, + { + "sourceKeys": "properties.rank", + "required": true, + "destKey": "rank" + }, + { + "sourceKeys": "properties.beacon_issued", + "required": true, + "destKey": "beaconIssued" + }, + { + "sourceKeys": "timestamp", + "sourceFromGenericMap": true, + "destKey": "ts" + } +] diff --git a/src/cdk/v2/destinations/koddi/procWorkflow.yaml b/src/cdk/v2/destinations/koddi/procWorkflow.yaml new file mode 100644 index 0000000000..cc3f0166dc --- /dev/null +++ b/src/cdk/v2/destinations/koddi/procWorkflow.yaml @@ -0,0 +1,33 @@ +bindings: + - name: EventType + path: ../../../../constants + - path: ../../bindings/jsontemplate + - name: removeUndefinedAndNullValues + path: ../../../../v0/util + - path: ./utils + - path: ./config + +steps: + - name: messageType + template: | + .message.type.toLowerCase(); + - name: eventType + template: | + .message.integrations.koddi.eventType.toLowerCase(); + - name: validateInput + template: | + let messageType = $.outputs.messageType; + let eventType = $.outputs.eventType; + $.assert(messageType, "message Type is not present. Aborting message."); + $.assert(messageType in {{$.EventType.([.TRACK])}}, "message type " + messageType + " is not supported"); + $.assert(eventType in {{$.EVENT_TYPES.([.IMPRESSIONS, .CLICKS, .CONVERSIONS])}}, "event type " + eventType + " is not supported"); + $.assertConfig(.destination.Config.apiBaseUrl, "API Base URL is not present. Aborting"); + $.assertConfig(.destination.Config.clientName, "Client Name is not present. Aborting"); + - name: preparePayload + template: | + const payload = $.constructFullPayload($.outputs.eventType, .message, .destination.Config); + $.context.payload = $.removeUndefinedAndNullValues(payload); + - name: buildResponse + template: | + const response = $.constructResponse($.outputs.eventType, .destination.Config, $.context.payload); + response diff --git a/src/cdk/v2/destinations/koddi/rtWorkflow.yaml b/src/cdk/v2/destinations/koddi/rtWorkflow.yaml new file mode 100644 index 0000000000..dd438a911c --- /dev/null +++ b/src/cdk/v2/destinations/koddi/rtWorkflow.yaml @@ -0,0 +1,31 @@ +bindings: + - name: handleRtTfSingleEventError + path: ../../../../v0/util/index + +steps: + - name: validateInput + template: | + $.assert(Array.isArray(^) && ^.length > 0, "Invalid event array") + + - name: transform + externalWorkflow: + path: ./procWorkflow.yaml + loopOverInput: true + + - name: successfulEvents + template: | + $.outputs.transform#idx.output.({ + "batchedRequest": ., + "batched": false, + "destination": ^[idx].destination, + "metadata": ^[idx].metadata[], + "statusCode": 200 + })[] + - name: failedEvents + template: | + $.outputs.transform#idx.error.( + $.handleRtTfSingleEventError(^[idx], .originalError ?? ., {}) + )[] + - name: finalPayload + template: | + [...$.outputs.successfulEvents, ...$.outputs.failedEvents] diff --git a/src/cdk/v2/destinations/koddi/utils.js b/src/cdk/v2/destinations/koddi/utils.js new file mode 100644 index 0000000000..13014e2e7c --- /dev/null +++ b/src/cdk/v2/destinations/koddi/utils.js @@ -0,0 +1,116 @@ +const { InstrumentationError } = require('@rudderstack/integrations-lib'); +const { EVENT_TYPES, IMPRESSIONS_CONFIG, CLICKS_CONFIG, CONVERSIONS_CONFIG } = require('./config'); +const { + constructPayload, + defaultRequestConfig, + toUnixTimestamp, + stripTrailingSlash, +} = require('../../../../v0/util'); + +const validateBidders = (bidders) => { + if (!Array.isArray(bidders)) { + throw new InstrumentationError('properties.bidders should be an array of objects. Aborting.'); + } + if (bidders.length === 0) { + throw new InstrumentationError( + 'properties.bidders should contains at least one bidder. Aborting.', + ); + } + bidders.forEach((bidder) => { + if (!(bidder.bidder || bidder.alternate_bidder)) { + throw new InstrumentationError('bidder or alternate_bidder is not present. Aborting.'); + } + if (!bidder.count) { + throw new InstrumentationError('count is not present. Aborting.'); + } + if (!bidder.base_price) { + throw new InstrumentationError('base_price is not present. Aborting.'); + } + }); +}; + +/** + * This function constructs payloads based upon mappingConfig for all calls. + * @param {*} eventType + * @param {*} message + * @param {*} Config + * @returns + */ +const constructFullPayload = (eventType, message, Config) => { + let payload; + switch (eventType) { + case EVENT_TYPES.IMPRESSIONS: + payload = constructPayload(message, IMPRESSIONS_CONFIG); + payload.clientName = Config.clientName; + break; + case EVENT_TYPES.CLICKS: + payload = constructPayload(message, CLICKS_CONFIG); + payload.clientName = Config.clientName; + if (!Config.testVersionOverride) { + payload.testVersionOverride = null; + } + if (!Config.overrides) { + payload.overrides = null; + } + break; + case EVENT_TYPES.CONVERSIONS: + payload = constructPayload(message, CONVERSIONS_CONFIG); + payload.client_name = Config.clientName; + payload.unixtime = toUnixTimestamp(payload.unixtime); + validateBidders(payload.bidders); + break; + default: + throw new InstrumentationError(`event type ${eventType} is not supported.`); + } + return payload; +}; + +const getEndpoint = (eventType, Config) => { + let endpoint = stripTrailingSlash(Config.apiBaseUrl); + switch (eventType) { + case EVENT_TYPES.IMPRESSIONS: + endpoint += '?action=impression'; + break; + case EVENT_TYPES.CLICKS: + endpoint += '?action=click'; + break; + case EVENT_TYPES.CONVERSIONS: + endpoint += '/conversion'; + break; + default: + throw new InstrumentationError(`event type ${eventType} is not supported.`); + } + return endpoint; +}; + +/** + * This function constructs response based upon event. + * @param {*} eventType + * @param {*} Config + * @param {*} payload + * @returns + */ +const constructResponse = (eventType, Config, payload) => { + if (!Object.values(EVENT_TYPES).includes(eventType)) { + throw new InstrumentationError(`event type ${eventType} is not supported.`); + } + const response = defaultRequestConfig(); + response.endpoint = getEndpoint(eventType, Config); + response.headers = { + accept: 'application/json', + }; + if (eventType === EVENT_TYPES.CONVERSIONS) { + response.body.JSON = payload; + response.method = 'POST'; + response.headers = { + ...response.headers, + 'content-type': 'application/json', + }; + } else { + response.params = payload; + response.method = 'GET'; + } + return response; +}; + +module.exports = { getEndpoint, validateBidders, constructFullPayload, constructResponse }; diff --git a/src/cdk/v2/destinations/koddi/utils.test.js b/src/cdk/v2/destinations/koddi/utils.test.js new file mode 100644 index 0000000000..2c1f660f70 --- /dev/null +++ b/src/cdk/v2/destinations/koddi/utils.test.js @@ -0,0 +1,421 @@ +const { + getEndpoint, + validateBidders, + constructFullPayload, + constructResponse, +} = require('./utils'); +const { InstrumentationError } = require('@rudderstack/integrations-lib'); + +describe('getEndpoint', () => { + it('returns the correct endpoint for IMPRESSIONS event', () => { + const eventType = 'impressions'; + const Config = { + apiBaseUrl: 'https://www.test-client.com/', + clientName: 'test-client', + }; + const result = getEndpoint(eventType, Config); + expect(result).toEqual('https://www.test-client.com?action=impression'); + }); + + it('returns the correct endpoint for CLICKS event', () => { + const eventType = 'clicks'; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + const result = getEndpoint(eventType, Config); + expect(result).toEqual('https://www.test-client.com?action=click'); + }); + + it('returns the correct endpoint for IMPRESSIONS event', () => { + const eventType = 'conversions'; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + const result = getEndpoint(eventType, Config); + expect(result).toEqual('https://www.test-client.com/conversion'); + }); + + it('should throw error for unsupported event', () => { + const eventType = 'test'; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + expect(() => getEndpoint(eventType, Config)).toThrow(InstrumentationError); + expect(() => getEndpoint(eventType, Config)).toThrow('event type test is not supported.'); + }); +}); + +describe('validateBidders', () => { + it('should throw error if bidders is not an array', () => { + const bidders = {}; + expect(() => validateBidders(bidders)).toThrow(InstrumentationError); + expect(() => validateBidders(bidders)).toThrow( + 'properties.bidders should be an array of objects. Aborting.', + ); + }); + + it('should throw error if bidders is an empty array', () => { + const bidders = []; + expect(() => validateBidders(bidders)).toThrow(InstrumentationError); + expect(() => validateBidders(bidders)).toThrow( + 'properties.bidders should contains at least one bidder. Aborting.', + ); + }); + + it('should throw error if bidder or alternate_bidder is not present', () => { + const bidders = [ + { count: 1, base_price: 100 }, + { bidder: 'bidder1', count: 2, base_price: 200 }, + { alternate_bidder: 'alternate1', count: 3, base_price: 300 }, + ]; + expect(() => validateBidders(bidders)).toThrow(InstrumentationError); + expect(() => validateBidders(bidders)).toThrow( + 'bidder or alternate_bidder is not present. Aborting.', + ); + }); + + it('should throw error if count is not present', () => { + const bidders = [{ bidder: 'bidder1', alternate_bidder: 'alternate1', base_price: 100 }]; + expect(() => validateBidders(bidders)).toThrow(InstrumentationError); + expect(() => validateBidders(bidders)).toThrow('count is not present. Aborting.'); + }); + + it('should throw error if base_price is not present', () => { + const bidders = [{ bidder: 'bidder1', alternate_bidder: 'alternate1', count: 1 }]; + expect(() => validateBidders(bidders)).toThrow(InstrumentationError); + expect(() => validateBidders(bidders)).toThrow('base_price is not present. Aborting.'); + }); + + it('should not throw error if all required fields are present for all bidders', () => { + const bidders = [ + { bidder: 'bidder1', alternate_bidder: 'alternate1', count: 1, base_price: 100 }, + { bidder: 'bidder2', alternate_bidder: 'alternate2', count: 2, base_price: 200 }, + ]; + expect(() => validateBidders(bidders)).not.toThrow(); + }); +}); + +describe('constructFullPayload', () => { + it('should construct payload for IMPRESSIONS event', () => { + const eventType = 'impressions'; + const message = { + type: 'track', + event: 'Impressions Event', + properties: { + tracking_data: 'dummy-tracking-data', + rank: 1, + beacon_issued: '2024-03-04T15:32:56.409Z', + }, + timestamp: '2024-03-03T00:29:12.117+05:30', + }; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + const expectedPayload = { + beaconIssued: '2024-03-04T15:32:56.409Z', + clientName: 'test-client', + rank: 1, + trackingData: 'dummy-tracking-data', + ts: '2024-03-03T00:29:12.117+05:30', + }; + const payload = constructFullPayload(eventType, message, Config); + expect(payload).toEqual(expectedPayload); + }); + it('should throw error if required value is missing for IMPRESSIONS event', () => { + const eventType = 'impressions'; + const message = { + type: 'track', + event: 'Impressions Event', + properties: { + tracking_data: '', + rank: 1, + beacon_issued: '2024-03-04T15:32:56.409Z', + }, + timestamp: '2024-03-03T00:29:12.117+05:30', + }; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + try { + const payload = constructFullPayload(eventType, message, Config); + } catch (error) { + expect(error.message).toEqual('Missing required value from "properties.tracking_data"'); + } + }); + + it('should construct payload for CLICKS event', () => { + const eventType = 'clicks'; + const message = { + type: 'track', + event: 'Clicks Event', + properties: { + tracking_data: 'dummy-tracking-data', + rank: 1, + beacon_issued: '2024-03-04T15:32:56.409Z', + }, + anonymousId: '1234', + }; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + const expectedPayload = { + beaconIssued: '2024-03-04T15:32:56.409Z', + clientName: 'test-client', + rank: 1, + trackingData: 'dummy-tracking-data', + userGuid: '1234', + overrides: null, + testVersionOverride: null, + }; + const payload = constructFullPayload(eventType, message, Config); + expect(payload).toEqual(expectedPayload); + }); + it('should construct payload with non-null value if overrides and testVersionOverride are enable and values for these are provided for CLICKS event ', () => { + const eventType = 'clicks'; + const message = { + type: 'track', + event: 'Clicks Event', + properties: { + tracking_data: 'dummy-tracking-data', + rank: 1, + beacon_issued: '2024-03-04T15:32:56.409Z', + overrides: 'overridden-value', + testVersionOverride: 1, + }, + anonymousId: '1234', + }; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + overrides: true, + testVersionOverride: false, + }; + const expectedPayload = { + beaconIssued: '2024-03-04T15:32:56.409Z', + clientName: 'test-client', + rank: 1, + trackingData: 'dummy-tracking-data', + userGuid: '1234', + overrides: 'overridden-value', + testVersionOverride: null, + }; + const payload = constructFullPayload(eventType, message, Config); + expect(payload).toEqual(expectedPayload); + }); + it('should throw error if required value is missing for CLICKS event', () => { + const eventType = 'clicks'; + const message = { + type: 'track', + event: 'Clicks Event', + properties: { + tracking_data: 'dummy-tracking-data', + rank: 1, + beacon_issued: '2024-03-04T15:32:56.409Z', + }, + }; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + try { + const payload = constructFullPayload(eventType, message, Config); + } catch (error) { + expect(error.message).toEqual('Missing required value from "userId"'); + } + }); + + it('should construct payload for CONVERSIONS event', () => { + const eventType = 'conversions'; + const message = { + type: 'track', + event: 'Conversions Event', + properties: { + currency: 'USD', + order_id: '123', + bidders: [ + { + bidder: 'dummy-bidder-id', + count: 1, + base_price: 100.1, + }, + ], + }, + context: { + locale: 'en-US', + ip: '127.0.0.1', + }, + timestamp: '2024-03-03T00:29:12.117+05:30', + anonymousId: '1234', + }; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + const expectedPayload = { + client_name: 'test-client', + culture: 'en-US', + currency: 'USD', + transaction_id: '123', + unixtime: 1709405952, + user_guid: '1234', + user_ip: '127.0.0.1', + bidders: [ + { + bidder: 'dummy-bidder-id', + count: 1, + base_price: 100.1, + }, + ], + }; + const payload = constructFullPayload(eventType, message, Config); + expect(payload).toEqual(expectedPayload); + }); + it('should throw error if required value is missing for CONVERSIONS event', () => { + const eventType = 'conversions'; + const message = { + type: 'track', + event: 'Conversions Event', + properties: { + currency: 'USD', + order_id: '123', + bidders: [ + { + bidder: 'dummy-bidder-id', + count: 1, + base_price: 100.1, + }, + ], + }, + context: { + ip: '127.0.0.1', + }, + timestamp: '2024-03-03T00:29:12.117+05:30', + anonymousId: '1234', + }; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + try { + const payload = constructFullPayload(eventType, message, Config); + } catch (error) { + expect(error.message).toEqual('Missing required value from "context.locale"'); + } + }); + + it('should throw error for unsupported event', () => { + const eventType = 'test'; + const message = {}; + const Config = {}; + expect(() => constructFullPayload(eventType, message, Config)).toThrow(InstrumentationError); + expect(() => constructFullPayload(eventType, message, Config)).toThrow( + 'event type test is not supported.', + ); + }); +}); + +describe('constructResponse', () => { + it('should construct response for IMPRESSIONS event', () => { + const eventType = 'impressions'; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + const payload = { + beaconIssued: '2024-03-04T15:32:56.409Z', + clientName: 'test-client', + rank: 1, + trackingData: 'dummy-tracking-data', + ts: '2024-03-03T00:29:12.117+05:30', + }; + const expectedResponse = { + endpoint: 'https://www.test-client.com?action=impression', + headers: { + accept: 'application/json', + }, + method: 'GET', + params: payload, + }; + const response = constructResponse(eventType, Config, payload); + expect(response).toMatchObject(expectedResponse); + }); + + it('should construct response for CLICKS event', () => { + const eventType = 'clicks'; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + const payload = { + beaconIssued: '2024-03-04T15:32:56.409Z', + clientName: 'test-client', + rank: 1, + trackingData: 'dummy-tracking-data', + userGuid: '1234', + }; + const expectedResponse = { + endpoint: 'https://www.test-client.com?action=click', + headers: { + accept: 'application/json', + }, + method: 'GET', + params: payload, + }; + const response = constructResponse(eventType, Config, payload); + expect(response).toMatchObject(expectedResponse); + }); + + it('should construct response for CONVERSIONS event', () => { + const eventType = 'conversions'; + const Config = { + apiBaseUrl: 'https://www.test-client.com', + clientName: 'test-client', + }; + const payload = { + client_name: 'test-client', + culture: 'en-US', + currency: 'USD', + transaction_id: '123', + unixtime: 1709405952, + userGuid: '1234', + user_ip: '127.0.0.1', + bidders: [ + { + bidder: 'dummy-bidder-id', + count: 1, + base_price: 100.1, + }, + ], + }; + + const expectedResponse = { + endpoint: 'https://www.test-client.com/conversion', + headers: { + accept: 'application/json', + 'content-type': 'application/json', + }, + method: 'POST', + body: { + JSON: payload, + }, + }; + const response = constructResponse(eventType, Config, payload); + expect(response).toMatchObject(expectedResponse); + }); + + it('should throw error for unsupported event', () => { + const eventType = 'test'; + const Config = {}; + const payload = {}; + expect(() => constructResponse(eventType, Config, payload)).toThrow(InstrumentationError); + expect(() => constructResponse(eventType, Config, payload)).toThrow( + 'event type test is not supported.', + ); + }); +}); diff --git a/src/cdk/v2/destinations/movable_ink/procWorkflow.yaml b/src/cdk/v2/destinations/movable_ink/procWorkflow.yaml index 394190049b..fef11124b3 100644 --- a/src/cdk/v2/destinations/movable_ink/procWorkflow.yaml +++ b/src/cdk/v2/destinations/movable_ink/procWorkflow.yaml @@ -33,7 +33,7 @@ steps: {{{{$.getGenericPaths("email")}}}}; ); - $.assert(userId ?? email ?? .message.anonymousId, "Either one of userId or email or anonymousId is required. Aborting"); + $.assert(userId || email || .message.anonymousId, "Either one of userId or email or anonymousId is required. Aborting"); $.validateEventPayload(.message); - name: preparePayload @@ -50,7 +50,7 @@ steps: )); $.context.payload = { ...(.message), - userId: userId ?? email, + userId: userId || email, timestamp: timestampInUnix, anonymousId: .message.anonymousId } diff --git a/src/cdk/v2/destinations/ninetailed/config.js b/src/cdk/v2/destinations/ninetailed/config.js index a59b2a1671..efb1a8908e 100644 --- a/src/cdk/v2/destinations/ninetailed/config.js +++ b/src/cdk/v2/destinations/ninetailed/config.js @@ -19,7 +19,6 @@ const ConfigCategories = { }, }; -// MAX_BATCH_SIZE : // Maximum number of events to send in a single batch const mappingConfig = getMappingConfig(ConfigCategories, __dirname); const batchEndpoint = 'https://experience.ninetailed.co/v2/organizations/{{organisationId}}/environments/{{environment}}/events'; diff --git a/src/cdk/v2/destinations/ninetailed/data/contextMapping.json b/src/cdk/v2/destinations/ninetailed/data/contextMapping.json index f2373b61c1..ad301c8150 100644 --- a/src/cdk/v2/destinations/ninetailed/data/contextMapping.json +++ b/src/cdk/v2/destinations/ninetailed/data/contextMapping.json @@ -1,12 +1,10 @@ [ { "sourceKeys": "app.name", - "required": true, "destKey": "app.name" }, { "sourceKeys": "app.version", - "required": true, "destKey": "app.version" }, { @@ -15,12 +13,16 @@ }, { "sourceKeys": "library.name", - "required": true, - "destKey": "library.name" + "destKey": "library.name", + "metadata": { + "defaultValue": "Rudderstack Ninetailed Destination" + } }, { "sourceKeys": "library.version", - "required": true, + "metadata": { + "defaultValue": "1" + }, "destKey": "library.version" }, { @@ -37,7 +39,6 @@ }, { "sourceKeys": "location", - "required": false, "metadata": { "defaultValue": {} }, diff --git a/src/cdk/v2/destinations/ninetailed/data/generalPayloadMapping.json b/src/cdk/v2/destinations/ninetailed/data/generalPayloadMapping.json index 3ab72d1b9f..22fc637728 100644 --- a/src/cdk/v2/destinations/ninetailed/data/generalPayloadMapping.json +++ b/src/cdk/v2/destinations/ninetailed/data/generalPayloadMapping.json @@ -11,7 +11,9 @@ }, { "sourceKeys": "channel", - "required": true, + "metadata": { + "defaultValue": "server" + }, "destKey": "channel" }, { diff --git a/src/cdk/v2/destinations/ninetailed/data/identifyMapping.json b/src/cdk/v2/destinations/ninetailed/data/identifyMapping.json index e8d3f7797d..b1a340bd98 100644 --- a/src/cdk/v2/destinations/ninetailed/data/identifyMapping.json +++ b/src/cdk/v2/destinations/ninetailed/data/identifyMapping.json @@ -2,13 +2,14 @@ { "sourceKeys": "traits", "sourceFromGenericMap": true, - "required": true, + "metadata": { + "defaultValue": {} + }, "destKey": "traits" }, { "sourceKeys": "userIdOnly", "sourceFromGenericMap": true, - "required": true, "destKey": "userId" } ] diff --git a/src/cdk/v2/destinations/ninetailed/data/trackMapping.json b/src/cdk/v2/destinations/ninetailed/data/trackMapping.json index 44af6dd1a3..5a13f5bba2 100644 --- a/src/cdk/v2/destinations/ninetailed/data/trackMapping.json +++ b/src/cdk/v2/destinations/ninetailed/data/trackMapping.json @@ -1,7 +1,9 @@ [ { "sourceKeys": "properties", - "required": true, + "metadata": { + "defaultValue": {} + }, "destKey": "properties" }, { diff --git a/src/cdk/v2/destinations/ninetailed/procWorkflow.yaml b/src/cdk/v2/destinations/ninetailed/procWorkflow.yaml index 383b850a4d..e31912386a 100644 --- a/src/cdk/v2/destinations/ninetailed/procWorkflow.yaml +++ b/src/cdk/v2/destinations/ninetailed/procWorkflow.yaml @@ -23,7 +23,6 @@ steps: template: | const payload = $.constructFullPayload(.message); $.context.payload = $.removeUndefinedAndNullValues(payload); - - name: buildResponse template: | const response = $.defaultRequestConfig(); diff --git a/src/cdk/v2/destinations/ninetailed/utils.js b/src/cdk/v2/destinations/ninetailed/utils.js index 47b27b3b9d..fec1271561 100644 --- a/src/cdk/v2/destinations/ninetailed/utils.js +++ b/src/cdk/v2/destinations/ninetailed/utils.js @@ -30,6 +30,7 @@ const constructFullPayload = (message) => { message, config.mappingConfig[config.ConfigCategories.IDENTIFY.name], ); + typeSpecifcPayload.userId = typeSpecifcPayload.userId || ''; break; default: break; diff --git a/src/cdk/v2/destinations/ortto/procWorkflow.yaml b/src/cdk/v2/destinations/ortto/procWorkflow.yaml index dfd7118c41..27c3749cc8 100644 --- a/src/cdk/v2/destinations/ortto/procWorkflow.yaml +++ b/src/cdk/v2/destinations/ortto/procWorkflow.yaml @@ -48,9 +48,9 @@ steps: "str::ei": {{{{$.getGenericPaths("userId")}}}}, "str::language": .context.traits.language || .context.locale, "phn::phone": phone ? {"n": phone}, - "bol::gdpr": .context.traits.gdpr ?? true, - "bol::p": .context.traits.emailConsent || false, - "bol::sp": .context.traits.smsConsent || false, + "bol::gdpr": .context.traits.gdpr, + "bol::p": .context.traits.emailConsent, + "bol::sp": .context.traits.smsConsent, }, "location": {"source_ip": .context.ip} }); @@ -80,11 +80,8 @@ steps: steps: - name: getTrimmedEvent template: | - let customEvent = ""; const event = .message.event; - .destination.Config.orttoEventsMapping@order.( - customEvent = event === .rsEventName ? .orttoEventName : null; - ) + const customEvent = .destination.Config.orttoEventsMapping.{.rsEventName === event}.orttoEventName; $.assert(customEvent, "Event names is not mapped"); "act:cm:"+customEvent.trim().toLowerCase().replace(new RegExp('\\s+', 'g'),'-'); diff --git a/src/cdk/v2/destinations/reddit/procWorkflow.yaml b/src/cdk/v2/destinations/reddit/procWorkflow.yaml index 7b989f15e4..06d2c95f25 100644 --- a/src/cdk/v2/destinations/reddit/procWorkflow.yaml +++ b/src/cdk/v2/destinations/reddit/procWorkflow.yaml @@ -36,13 +36,13 @@ steps: const os = (.message.context.os.name)? .message.context.os.name.toLowerCase(): null; const hashData = .destination.Config.hashData; let user = .message.().({ - "email": hashData ? $.SHA256({{{{$.getGenericPaths("email")}}}}) : ({{{{$.getGenericPaths("email")}}}}), - "external_id": hashData ? $.SHA256({{{{$.getGenericPaths("userId")}}}}) : ({{{{$.getGenericPaths("userId")}}}}), - "ip_address": hashData? $.SHA256(.context.ip || .request_ip) : (.context.ip || .request_ip), + "email": hashData ? $.SHA256({{{{$.getGenericPaths("email")}}}}.trim()) : ({{{{$.getGenericPaths("email")}}}}), + "external_id": hashData ? $.SHA256({{{{$.getGenericPaths("userId")}}}}.trim()) : ({{{{$.getGenericPaths("userId")}}}}), + "ip_address": hashData? $.SHA256(.context.ip.trim() || .request_ip.trim()) : (.context.ip || .request_ip), "uuid": .properties.uuid, "user_agent": .context.userAgent, - "idfa": $.isAppleFamily(os)? (hashData? $.SHA256(.context.device.advertisingId): .context.device.advertisingId): null, - "aaid": os === "android" && .context.device ? (hashData? $.SHA256(.context.device.advertisingId): .context.device.advertisingId): null, + "idfa": $.isAppleFamily(os)? (hashData? $.SHA256(.context.device.advertisingId.trim()): .context.device.advertisingId): null, + "aaid": os === "android" && .context.device ? (hashData? $.SHA256(.context.device.advertisingId.trim()): .context.device.advertisingId): null, "opt_out": .properties.optOut, "screen_dimensions": {"width": .context.screen.width, "height": .context.screen.height}, }); diff --git a/src/cdk/v2/destinations/yandex_metrica_offline_events/config.js b/src/cdk/v2/destinations/yandex_metrica_offline_events/config.js new file mode 100644 index 0000000000..83513c3856 --- /dev/null +++ b/src/cdk/v2/destinations/yandex_metrica_offline_events/config.js @@ -0,0 +1,5 @@ +const YANDEX_METRICA_OFFLINE_EVENTS = 'yandex_metrica_offline_events'; + +module.exports = { + YANDEX_METRICA_OFFLINE_EVENTS, +}; diff --git a/src/cdk/v2/destinations/yandex_metrica_offline_events/procWorkflow.yaml b/src/cdk/v2/destinations/yandex_metrica_offline_events/procWorkflow.yaml new file mode 100644 index 0000000000..690bc399ee --- /dev/null +++ b/src/cdk/v2/destinations/yandex_metrica_offline_events/procWorkflow.yaml @@ -0,0 +1,36 @@ +bindings: + - name: EventType + path: ../../../../constants + - path: ../../bindings/jsontemplate + exportAll: true + - path: ./config + - name: removeUndefinedAndNullValues + path: ../../../../v0/util + - name: defaultRequestConfig + path: ../../../../v0/util + - path: ./utils + +steps: + - name: validateInput + template: | + let messageType = .message.type; + $.assert(messageType, "message Type is not present. Aborting message."); + $.assert(.message.type.toLowerCase() ==='identify', "Event type " + .message.type.toLowerCase() + " is not supported. Aborting message."); + $.assert(.message.traits || .message.properties, "Message traits/properties not present. Aborting message."); + + - name: prepareData + template: | + let data = .message.traits + let identifierType = .message.context.externalId[0].identifierType; + let identifierValue = .message.context.externalId[0].id; + identifierValue = String(identifierValue); + data = $.setIdentifier(data, identifierType, identifierValue) + data = $.validateData(data) + data + + - name: buildResponseForProcessTransformation + description: build response + template: | + const response = $.defaultRequestConfig(); + response.body.JSON = $.outputs.prepareData + response diff --git a/src/cdk/v2/destinations/yandex_metrica_offline_events/utils.js b/src/cdk/v2/destinations/yandex_metrica_offline_events/utils.js new file mode 100644 index 0000000000..032b0b636d --- /dev/null +++ b/src/cdk/v2/destinations/yandex_metrica_offline_events/utils.js @@ -0,0 +1,51 @@ +/* eslint-disable no-param-reassign */ +const { InstrumentationError, isDefinedNotNullNotEmpty } = require('@rudderstack/integrations-lib'); +const moment = require('moment'); + +const setIdentifier = (data, identifierType, identifierValue) => { + const updatedData = data; + if (identifierType === 'ClientId') { + updatedData.ClientId = identifierValue; + } else if (identifierType === 'YCLID') { + updatedData.Yclid = identifierValue; + } else if (identifierType === 'UserId') { + updatedData.UserId = identifierValue; + } else { + throw new InstrumentationError( + 'Invalid identifier type passed in external Id. Valid types are ClientId, YCLID, UserId. Aborting!', + ); + } + return updatedData; +}; + +function isUnixTimestamp(datetime) { + if (moment.unix(datetime).isValid()) { + return datetime; + } + const unixTimestamp = moment(datetime).unix(); + if (moment.unix(unixTimestamp).isValid()) { + return unixTimestamp; + } + throw new InstrumentationError('Invalid timestamp. Aborting!'); +} + +const validateData = (data) => { + const { Price, DateTime } = data; + if (!isDefinedNotNullNotEmpty(data)) { + throw new InstrumentationError('No traits found in the payload. Aborting!'); + } + if (Price && typeof Price !== 'number') { + throw new InstrumentationError('Price can only be a numerical value. Aborting!'); + } + if (!isDefinedNotNullNotEmpty(DateTime)) { + throw new InstrumentationError('DateTime cannot be empty. Aborting!'); + } + data.DateTime = String(isUnixTimestamp(DateTime)); + return data; +}; + +module.exports = { + setIdentifier, + validateData, + isUnixTimestamp, +}; diff --git a/src/constants/destinationCanonicalNames.js b/src/constants/destinationCanonicalNames.js index ee4f4f0b33..19136eab59 100644 --- a/src/constants/destinationCanonicalNames.js +++ b/src/constants/destinationCanonicalNames.js @@ -166,6 +166,7 @@ const DestCanonicalNames = { ], koala: ['Koala', 'koala', 'KOALA'], bloomreach: ['Bloomreach', 'bloomreach', 'BLOOMREACH'], + emarsys: ['EMARSYS', 'Emarsys', 'emarsys'], }; module.exports = { DestHandlerMap, DestCanonicalNames }; diff --git a/src/features.json b/src/features.json index 6d2cac9340..58af795a77 100644 --- a/src/features.json +++ b/src/features.json @@ -70,7 +70,9 @@ "KOALA": true, "LINKEDIN_ADS": true, "BLOOMREACH": true, - "MOVABLE_INK": true + "MOVABLE_INK": true, + "EMARSYS": true, + "KODDI": true }, "regulations": [ "BRAZE", @@ -84,7 +86,8 @@ "ENGAGE", "CUSTIFY", "SENDGRID", - "SPRIG" + "SPRIG", + "EMARSYS" ], "supportSourceTransformV1": true, "supportTransformerProxyV1": true diff --git a/src/legacy/router.js b/src/legacy/router.js index 9dd83b5988..afc8c1a797 100644 --- a/src/legacy/router.js +++ b/src/legacy/router.js @@ -649,6 +649,9 @@ if (startDestTransformer) { stats.timing('user_transform_request_latency', startTime, { processSessions, }); + stats.timingSummary('user_transform_request_latency_summary', startTime, { + processSessions, + }); stats.increment('user_transform_requests', { processSessions }); stats.histogram('user_transform_output_events', transformedEvents.length, { processSessions, diff --git a/src/services/destination/nativeIntegration.ts b/src/services/destination/nativeIntegration.ts index 0bc9308fcd..8fd0f09857 100644 --- a/src/services/destination/nativeIntegration.ts +++ b/src/services/destination/nativeIntegration.ts @@ -221,6 +221,7 @@ export class NativeIntegrationDestinationService implements DestinationService { destinationResponse: processedProxyResponse, rudderJobMetadata, destType: destinationType, + destinationRequest: deliveryRequest, }; let responseProxy = networkHandler.responseHandler(responseParams); // Adaption Logic for V0 to V1 diff --git a/src/services/userTransform.ts b/src/services/userTransform.ts index 18c47ddc83..62980a935a 100644 --- a/src/services/userTransform.ts +++ b/src/services/userTransform.ts @@ -173,7 +173,17 @@ export class UserTransformService { ...getTransformationMetadata(eventsToProcess[0]?.metadata), }); - stats.histogram('user_transform_batch_size', requestSize, { + stats.timing('user_transform_batch_size', requestSize, { + ...metaTags, + ...getTransformationMetadata(eventsToProcess[0]?.metadata), + }); + + stats.timingSummary('user_transform_request_latency_summary', userFuncStartTime, { + ...metaTags, + ...getTransformationMetadata(eventsToProcess[0]?.metadata), + }); + + stats.timingSummary('user_transform_batch_size_summary', requestSize, { ...metaTags, ...getTransformationMetadata(eventsToProcess[0]?.metadata), }); diff --git a/src/types/index.ts b/src/types/index.ts index 68dfe3870d..b3d45c062e 100644 --- a/src/types/index.ts +++ b/src/types/index.ts @@ -131,6 +131,8 @@ type Destination = { WorkspaceID: string; Transformations: UserTransformationInput[]; RevisionID?: string; + IsProcessorEnabled?: boolean; + IsConnectionEnabled?: boolean; }; type UserTransformationLibrary = { diff --git a/src/util/customTransformer-faas.js b/src/util/customTransformer-faas.js index 2c0bbfd8c0..9ac9804097 100644 --- a/src/util/customTransformer-faas.js +++ b/src/util/customTransformer-faas.js @@ -11,9 +11,10 @@ const { } = require('./openfaas'); const { getLibraryCodeV1 } = require('./customTransforrmationsStore-v1'); +const HASH_SECRET = process.env.OPENFAAS_FN_HASH_SECRET || ''; const libVersionIdsCache = new NodeCache(); -function generateFunctionName(userTransformation, libraryVersionIds, testMode) { +function generateFunctionName(userTransformation, libraryVersionIds, testMode, hashSecret = '') { if (userTransformation.versionId === FAAS_AST_VID) return FAAS_AST_FN_NAME; if (testMode) { @@ -21,10 +22,15 @@ function generateFunctionName(userTransformation, libraryVersionIds, testMode) { return funcName.substring(0, 63).toLowerCase(); } - const ids = [userTransformation.workspaceId, userTransformation.versionId].concat( + let ids = [userTransformation.workspaceId, userTransformation.versionId].concat( (libraryVersionIds || []).sort(), ); + if (hashSecret !== '') { + ids = ids.concat([hashSecret]); + } + + // FIXME: Why the id's are sorted ?! const hash = crypto.createHash('md5').update(`${ids}`).digest('hex'); return `fn-${userTransformation.workspaceId}-${hash}`.substring(0, 63).toLowerCase(); } @@ -90,7 +96,13 @@ async function setOpenFaasUserTransform( testMode, }; const functionName = - pregeneratedFnName || generateFunctionName(userTransformation, libraryVersionIds, testMode); + pregeneratedFnName || + generateFunctionName( + userTransformation, + libraryVersionIds, + testMode, + process.env.OPENFAAS_FN_HASH_SECRET, + ); const setupTime = new Date(); await setupFaasFunction( @@ -130,7 +142,13 @@ async function runOpenFaasUserTransform( const trMetadata = events[0].metadata ? getTransformationMetadata(events[0].metadata) : {}; // check and deploy faas function if not exists - const functionName = generateFunctionName(userTransformation, libraryVersionIds, testMode); + const functionName = generateFunctionName( + userTransformation, + libraryVersionIds, + testMode, + process.env.OPENFAAS_FN_HASH_SECRET, + ); + if (testMode) { await setOpenFaasUserTransform( userTransformation, diff --git a/src/util/customTransformer-v1.js b/src/util/customTransformer-v1.js index 7e854a3714..e9877a614d 100644 --- a/src/util/customTransformer-v1.js +++ b/src/util/customTransformer-v1.js @@ -93,6 +93,7 @@ async function userTransformHandlerV1( }; stats.counter('user_transform_function_input_events', events.length, tags); stats.timing('user_transform_function_latency', invokeTime, tags); + stats.timingSummary('user_transform_function_latency_summary', invokeTime, tags); } return { transformedEvents, logs }; diff --git a/src/util/customTransformer.js b/src/util/customTransformer.js index a87c12dd6e..37364ef5d0 100644 --- a/src/util/customTransformer.js +++ b/src/util/customTransformer.js @@ -260,6 +260,7 @@ async function runUserTransform( stats.counter('user_transform_function_input_events', events.length, tags); stats.timing('user_transform_function_latency', invokeTime, tags); + stats.timingSummary('user_transform_function_latency_summary', invokeTime, tags); } return { diff --git a/src/util/customTransforrmationsStore-v1.js b/src/util/customTransforrmationsStore-v1.js index 3263049b6f..6e2d799f3a 100644 --- a/src/util/customTransforrmationsStore-v1.js +++ b/src/util/customTransforrmationsStore-v1.js @@ -31,6 +31,7 @@ async function getTransformationCodeV1(versionId) { responseStatusHandler(response.status, 'Transformation', versionId, url); stats.increment('get_transformation_code', { success: 'true', ...tags }); stats.timing('get_transformation_code_time', startTime, tags); + stats.timingSummary('get_transformation_code_time_summary', startTime, tags); const myJson = await response.json(); transformationCache[versionId] = myJson; return myJson; @@ -56,6 +57,7 @@ async function getLibraryCodeV1(versionId) { responseStatusHandler(response.status, 'Transformation Library', versionId, url); stats.increment('get_libraries_code', { success: 'true', ...tags }); stats.timing('get_libraries_code_time', startTime, tags); + stats.timingSummary('get_libraries_code_time_summary', startTime, tags); const myJson = await response.json(); libraryCache[versionId] = myJson; return myJson; @@ -83,6 +85,7 @@ async function getRudderLibByImportName(importName) { responseStatusHandler(response.status, 'Rudder Library', importName, url); stats.increment('get_libraries_code', { success: 'true', ...tags }); stats.timing('get_libraries_code_time', startTime, tags); + stats.timingSummary('get_libraries_code_time_summary', startTime, tags); const myJson = await response.json(); rudderLibraryCache[importName] = myJson; return myJson; diff --git a/src/util/customTransforrmationsStore.js b/src/util/customTransforrmationsStore.js index 08d417c07c..2c5a7b446d 100644 --- a/src/util/customTransforrmationsStore.js +++ b/src/util/customTransforrmationsStore.js @@ -24,6 +24,7 @@ async function getTransformationCode(versionId) { responseStatusHandler(response.status, 'Transformation', versionId, url); stats.increment('get_transformation_code', { versionId, success: 'true' }); stats.timing('get_transformation_code_time', startTime, { versionId }); + stats.timingSummary('get_transformation_code_time_summary', startTime, { versionId }); const myJson = await response.json(); myCache.set(versionId, myJson); return myJson; diff --git a/src/util/openfaas/faasApi.js b/src/util/openfaas/faasApi.js index f8f830f6e4..b932b70032 100644 --- a/src/util/openfaas/faasApi.js +++ b/src/util/openfaas/faasApi.js @@ -1,6 +1,8 @@ const axios = require('axios'); const { RespStatusError, RetryRequestError } = require('../utils'); +const logger = require('../../logger'); + const OPENFAAS_GATEWAY_URL = process.env.OPENFAAS_GATEWAY_URL || 'http://localhost:8080'; const OPENFAAS_GATEWAY_USERNAME = process.env.OPENFAAS_GATEWAY_USERNAME || ''; const OPENFAAS_GATEWAY_PASSWORD = process.env.OPENFAAS_GATEWAY_PASSWORD || ''; @@ -12,7 +14,7 @@ const basicAuth = { const parseAxiosError = (error) => { if (error.response) { - const status = error.response.status || 400; + const status = error.response.status || 500; const errorData = error.response?.data; const message = (errorData && (errorData.message || errorData.error || errorData)) || error.message; @@ -61,6 +63,8 @@ const invokeFunction = async (functionName, payload) => }); const checkFunctionHealth = async (functionName) => { + logger.debug(`Checking function health: ${functionName}`); + return new Promise((resolve, reject) => { const url = `${OPENFAAS_GATEWAY_URL}/function/${functionName}`; axios @@ -76,8 +80,10 @@ const checkFunctionHealth = async (functionName) => { }); }; -const deployFunction = async (payload) => - new Promise((resolve, reject) => { +const deployFunction = async (payload) => { + logger.debug(`Deploying function: ${payload?.name}`); + + return new Promise((resolve, reject) => { const url = `${OPENFAAS_GATEWAY_URL}/system/functions`; axios .post(url, payload, { auth: basicAuth }) @@ -86,6 +92,21 @@ const deployFunction = async (payload) => reject(parseAxiosError(err)); }); }); +}; + +const updateFunction = async (fnName, payload) => { + logger.debug(`Updating function: ${fnName}`); + + return new Promise((resolve, reject) => { + const url = `${OPENFAAS_GATEWAY_URL}/system/functions`; + axios + .put(url, payload, { auth: basicAuth }) + .then((resp) => resolve(resp.data)) + .catch((err) => { + reject(parseAxiosError(err)); + }); + }); +}; module.exports = { deleteFunction, @@ -94,4 +115,5 @@ module.exports = { getFunctionList, invokeFunction, checkFunctionHealth, + updateFunction, }; diff --git a/src/util/openfaas/index.js b/src/util/openfaas/index.js index 7a1fce3cfa..c0369deb81 100644 --- a/src/util/openfaas/index.js +++ b/src/util/openfaas/index.js @@ -4,6 +4,7 @@ const { deployFunction, invokeFunction, checkFunctionHealth, + updateFunction, } = require('./faasApi'); const logger = require('../../logger'); const { RetryRequestError, RespStatusError } = require('../utils'); @@ -33,6 +34,7 @@ const FAAS_AST_FN_NAME = 'fn-ast'; const CUSTOM_NETWORK_POLICY_WORKSPACE_IDS = process.env.CUSTOM_NETWORK_POLICY_WORKSPACE_IDS || ''; const customNetworkPolicyWorkspaceIds = CUSTOM_NETWORK_POLICY_WORKSPACE_IDS.split(','); const CUSTOMER_TIER = process.env.CUSTOMER_TIER || 'shared'; +const DISABLE_RECONCILE_FN = process.env.DISABLE_RECONCILE_FN == 'true' || false; // Initialise node cache const functionListCache = new NodeCache(); @@ -67,6 +69,8 @@ const awaitFunctionReadiness = async ( maxWaitInMs = 22000, waitBetweenIntervalsInMs = 250, ) => { + logger.debug(`Awaiting function readiness: ${functionName}`); + const executionPromise = new Promise(async (resolve) => { try { await callWithRetry( @@ -121,7 +125,7 @@ const invalidateFnCache = () => { functionListCache.set(FUNC_LIST_KEY, []); }; -const deployFaasFunction = async ( +const updateFaasFunction = async ( functionName, code, versionId, @@ -130,73 +134,50 @@ const deployFaasFunction = async ( trMetadata = {}, ) => { try { - logger.debug(`[Faas] Deploying a faas function: ${functionName}`); - let envProcess = 'python index.py'; - - const lvidsString = libraryVersionIDs.join(','); + logger.debug(`Updating faas fn: ${functionName}`); - if (!testMode) { - envProcess = `${envProcess} --vid ${versionId} --config-backend-url ${CONFIG_BACKEND_URL} --lvids "${lvidsString}"`; - } else { - envProcess = `${envProcess} --code "${code}" --config-backend-url ${CONFIG_BACKEND_URL} --lvids "${lvidsString}"`; - } - - const envVars = {}; - if (FAAS_ENABLE_WATCHDOG_ENV_VARS.trim().toLowerCase() === 'true') { - envVars.max_inflight = FAAS_MAX_INFLIGHT; - envVars.exec_timeout = FAAS_EXEC_TIMEOUT; - } - if (GEOLOCATION_URL) { - envVars.geolocation_url = GEOLOCATION_URL; - } - // labels - const labels = { - 'openfaas-fn': 'true', - 'parent-component': 'openfaas', - 'com.openfaas.scale.max': FAAS_MAX_PODS_IN_TEXT, - 'com.openfaas.scale.min': FAAS_MIN_PODS_IN_TEXT, - 'com.openfaas.scale.zero': FAAS_SCALE_ZERO, - 'com.openfaas.scale.zero-duration': FAAS_SCALE_ZERO_DURATION, - 'com.openfaas.scale.target': FAAS_SCALE_TARGET, - 'com.openfaas.scale.target-proportion': FAAS_SCALE_TARGET_PROPORTION, - 'com.openfaas.scale.type': FAAS_SCALE_TYPE, - transformationId: trMetadata.transformationId, - workspaceId: trMetadata.workspaceId, - team: 'data-management', - service: 'openfaas-fn', - customer: 'shared', - 'customer-tier': CUSTOMER_TIER, - }; - if ( - trMetadata.workspaceId && - customNetworkPolicyWorkspaceIds.includes(trMetadata.workspaceId) - ) { - labels['custom-network-policy'] = 'true'; + const payload = buildOpenfaasFn( + functionName, + code, + versionId, + libraryVersionIDs, + testMode, + trMetadata, + ); + await updateFunction(functionName, payload); + // wait for function to be ready and then set it in cache + await awaitFunctionReadiness(functionName); + setFunctionInCache(functionName); + } catch (error) { + // 404 is statuscode returned from openfaas community edition + // when the function don't exist, so we can safely ignore this error + // and let the function be created in the next step. + if (error.statusCode !== 404) { + throw error; } + } +}; - // TODO: investigate and add more required labels and annotations - const payload = { - service: functionName, - name: functionName, - image: FAAS_BASE_IMG, - envProcess, - envVars, - labels, - annotations: { - 'prometheus.io.scrape': 'true', - }, - limits: { - memory: FAAS_LIMITS_MEMORY, - cpu: FAAS_LIMITS_CPU, - }, - requests: { - memory: FAAS_REQUESTS_MEMORY, - cpu: FAAS_REQUESTS_CPU, - }, - }; +const deployFaasFunction = async ( + functionName, + code, + versionId, + libraryVersionIDs, + testMode, + trMetadata = {}, +) => { + try { + logger.debug(`Deploying faas fn: ${functionName}`); + const payload = buildOpenfaasFn( + functionName, + code, + versionId, + libraryVersionIDs, + testMode, + trMetadata, + ); await deployFunction(payload); - logger.debug('[Faas] Deployed a faas function'); } catch (error) { logger.error(`[Faas] Error while deploying ${functionName}: ${error.message}`); // To handle concurrent create requests, @@ -246,6 +227,95 @@ async function setupFaasFunction( } } +// reconcileFn runs everytime the service boot's up +// trying to update the functions which are not in cache to the +// latest label and envVars +const reconcileFn = async (name, versionId, libraryVersionIDs, trMetadata) => { + if (DISABLE_RECONCILE_FN) { + return; + } + + logger.debug(`Reconciling faas function: ${name}`); + try { + if (isFunctionDeployed(name)) { + return; + } + await updateFaasFunction(name, null, versionId, libraryVersionIDs, false, trMetadata); + } catch (error) { + logger.error( + `unexpected error occurred when reconciling the function ${name}: ${error.message}`, + ); + throw error; + } +}; + +// buildOpenfaasFn is helper function to build openfaas fn CRUD payload +function buildOpenfaasFn(name, code, versionId, libraryVersionIDs, testMode, trMetadata = {}) { + logger.debug(`Building faas fn: ${name}`); + + let envProcess = 'python index.py'; + const lvidsString = libraryVersionIDs.join(','); + + if (!testMode) { + envProcess = `${envProcess} --vid ${versionId} --config-backend-url ${CONFIG_BACKEND_URL} --lvids "${lvidsString}"`; + } else { + envProcess = `${envProcess} --code "${code}" --config-backend-url ${CONFIG_BACKEND_URL} --lvids "${lvidsString}"`; + } + + const envVars = {}; + + if (FAAS_ENABLE_WATCHDOG_ENV_VARS.trim().toLowerCase() === 'true') { + envVars.max_inflight = FAAS_MAX_INFLIGHT; + envVars.exec_timeout = FAAS_EXEC_TIMEOUT; + } + + if (GEOLOCATION_URL) { + envVars.geolocation_url = GEOLOCATION_URL; + } + + const labels = { + 'openfaas-fn': 'true', + 'parent-component': 'openfaas', + 'com.openfaas.scale.max': FAAS_MAX_PODS_IN_TEXT, + 'com.openfaas.scale.min': FAAS_MIN_PODS_IN_TEXT, + 'com.openfaas.scale.zero': FAAS_SCALE_ZERO, + 'com.openfaas.scale.zero-duration': FAAS_SCALE_ZERO_DURATION, + 'com.openfaas.scale.target': FAAS_SCALE_TARGET, + 'com.openfaas.scale.target-proportion': FAAS_SCALE_TARGET_PROPORTION, + 'com.openfaas.scale.type': FAAS_SCALE_TYPE, + transformationId: trMetadata.transformationId, + workspaceId: trMetadata.workspaceId, + team: 'data-management', + service: 'openfaas-fn', + customer: 'shared', + 'customer-tier': CUSTOMER_TIER, + }; + + if (trMetadata.workspaceId && customNetworkPolicyWorkspaceIds.includes(trMetadata.workspaceId)) { + labels['custom-network-policy'] = 'true'; + } + + return { + service: name, + name: name, + image: FAAS_BASE_IMG, + envProcess, + envVars, + labels, + annotations: { + 'prometheus.io.scrape': 'true', + }, + limits: { + memory: FAAS_LIMITS_MEMORY, + cpu: FAAS_LIMITS_CPU, + }, + requests: { + memory: FAAS_REQUESTS_MEMORY, + cpu: FAAS_REQUESTS_CPU, + }, + }; +} + const executeFaasFunction = async ( name, events, @@ -260,7 +330,11 @@ const executeFaasFunction = async ( let errorRaised; try { - if (testMode) await awaitFunctionReadiness(name); + if (testMode) { + await awaitFunctionReadiness(name); + } else { + await reconcileFn(name, versionId, libraryVersionIDs, trMetadata); + } return await invokeFunction(name, events); } catch (error) { logger.error(`Error while invoking ${name}: ${error.message}`); @@ -268,6 +342,7 @@ const executeFaasFunction = async ( if (error.statusCode === 404 && error.message.includes(`error finding function ${name}`)) { removeFunctionFromCache(name); + await setupFaasFunction(name, null, versionId, libraryVersionIDs, testMode, trMetadata); throw new RetryRequestError(`${name} not found`); } @@ -305,6 +380,7 @@ const executeFaasFunction = async ( stats.counter('user_transform_function_input_events', events.length, tags); stats.timing('user_transform_function_latency', startTime, tags); + stats.timingSummary('user_transform_function_latency_summary', startTime, tags); } }; @@ -313,6 +389,8 @@ module.exports = { executeFaasFunction, setupFaasFunction, invalidateFnCache, + buildOpenfaasFn, FAAS_AST_VID, FAAS_AST_FN_NAME, + setFunctionInCache, }; diff --git a/src/util/prometheus.js b/src/util/prometheus.js index 882dff9e75..78d32c9cb9 100644 --- a/src/util/prometheus.js +++ b/src/util/prometheus.js @@ -11,7 +11,7 @@ function appendPrefix(name) { } class Prometheus { - constructor() { + constructor(enableSummaryMetrics = true) { this.prometheusRegistry = new prometheusClient.Registry(); this.prometheusRegistry.setDefaultLabels(defaultLabels); prometheusClient.collectDefaultMetrics({ @@ -21,7 +21,7 @@ class Prometheus { prometheusClient.AggregatorRegistry.setRegistries(this.prometheusRegistry); this.aggregatorRegistry = new prometheusClient.AggregatorRegistry(); - this.createMetrics(); + this.createMetrics(enableSummaryMetrics); } async metricsController(ctx) { @@ -56,11 +56,22 @@ class Prometheus { return gauge; } - newSummaryStat(name, help, labelNames) { + newSummaryStat( + name, + help, + labelNames, + percentiles = [0.5, 0.9, 0.99], + maxAgeSeconds = 300, + ageBuckets = 5, + ) { + // we enable a 5 minute sliding window and calculate the 50th, 90th, and 99th percentiles by default const summary = new prometheusClient.Summary({ name, help, labelNames, + percentiles, + maxAgeSeconds, + ageBuckets, }); this.prometheusRegistry.registerMetric(summary); return summary; @@ -117,6 +128,21 @@ class Prometheus { } } + timingSummary(name, start, tags = {}) { + try { + let metric = this.prometheusRegistry.getSingleMetric(appendPrefix(name)); + if (!metric) { + logger.warn( + `Prometheus: summary metric ${name} not found in the registry. Creating a new one`, + ); + metric = this.newSummaryStat(name, name, Object.keys(tags)); + } + metric.observe(tags, (new Date() - start) / 1000); + } catch (e) { + logger.error(`Prometheus: Summary metric ${name} failed with error ${e}`); + } + } + histogram(name, value, tags = {}) { try { let metric = this.prometheusRegistry.getSingleMetric(appendPrefix(name)); @@ -166,7 +192,7 @@ class Prometheus { } } - createMetrics() { + createMetrics(enableSummaryMetrics) { const metrics = [ // Counters { @@ -587,12 +613,6 @@ class Prometheus { type: 'gauge', labelNames: ['destination_id'], }, - { - name: 'mixpanel_batch_track_pack_size', - help: 'mixpanel_batch_track_pack_size', - type: 'gauge', - labelNames: ['destination_id'], - }, { name: 'mixpanel_batch_import_pack_size', help: 'mixpanel_batch_import_pack_size', @@ -698,6 +718,18 @@ class Prometheus { 'k8_namespace', ], }, + { + name: 'user_transform_request_latency_summary', + help: 'user_transform_request_latency_summary', + type: 'summary', + labelNames: [ + 'workspaceId', + 'transformationId', + 'sourceType', + 'destinationType', + 'k8_namespace', + ], + }, { name: 'user_transform_batch_size', help: 'user_transform_batch_size', @@ -714,6 +746,18 @@ class Prometheus { 524288000, ], // 1KB, 100KB, 0.5MB, 1MB, 10MB, 20MB, 50MB, 100MB, 200MB, 500MB }, + { + name: 'user_transform_batch_size_summary', + help: 'user_transform_batch_size_summary', + type: 'summary', + labelNames: [ + 'workspaceId', + 'transformationId', + 'sourceType', + 'destinationType', + 'k8_namespace', + ], + }, { name: 'source_transform_request_latency', help: 'source_transform_request_latency', @@ -770,12 +814,24 @@ class Prometheus { type: 'histogram', labelNames: ['versionId', 'version'], }, + { + name: 'get_transformation_code_time_summary', + help: 'get_transformation_code_time_summary', + type: 'summary', + labelNames: ['versionId', 'version'], + }, { name: 'get_libraries_code_time', help: 'get_libraries_code_time', type: 'histogram', labelNames: ['libraryVersionId', 'versionId', 'type', 'version'], }, + { + name: 'get_libraries_code_time_summary', + help: 'get_libraries_code_time_summary', + type: 'summary', + labelNames: ['libraryVersionId', 'versionId', 'type', 'version'], + }, { name: 'isolate_cpu_time', help: 'isolate_cpu_time', @@ -1027,6 +1083,22 @@ class Prometheus { 'workspaceId', ], }, + { + name: 'user_transform_function_latency_summary', + help: 'user_transform_function_latency_summary', + type: 'summary', + labelNames: [ + 'identifier', + 'testMode', + 'sourceType', + 'destinationType', + 'k8_namespace', + 'errored', + 'statusCode', + 'transformationId', + 'workspaceId', + ], + }, ]; metrics.forEach((metric) => { @@ -1042,6 +1114,17 @@ class Prometheus { metric.labelNames, metric.buckets, ); + } else if (metric.type === 'summary') { + if (enableSummaryMetrics) { + this.newSummaryStat( + appendPrefix(metric.name), + metric.help, + metric.labelNames, + metric.percentiles, + metric.maxAge, + metric.ageBuckets, + ); + } } else { logger.error( `Prometheus: Metric creation failed. Name: ${metric.name}. Invalid type: ${metric.type}`, diff --git a/src/util/stats.js b/src/util/stats.js index 9a32fd1de3..0aa13fc85c 100644 --- a/src/util/stats.js +++ b/src/util/stats.js @@ -4,6 +4,8 @@ const logger = require('../logger'); const enableStats = process.env.ENABLE_STATS !== 'false'; const statsClientType = process.env.STATS_CLIENT || 'statsd'; +// summary metrics are enabled by default. To disable set ENABLE_SUMMARY_METRICS='false'. +const enableSummaryMetrics = process.env.ENABLE_SUMMARY_METRICS !== 'false'; let statsClient; function init() { @@ -19,7 +21,7 @@ function init() { case 'prometheus': logger.info('setting up prometheus client'); - statsClient = new prometheus.Prometheus(); + statsClient = new prometheus.Prometheus(enableSummaryMetrics); break; default: @@ -38,6 +40,15 @@ const timing = (name, start, tags = {}) => { statsClient.timing(name, start, tags); }; +// timingSummary is used to record observations for a summary metric +const timingSummary = (name, start, tags = {}) => { + if (!enableStats || !statsClient || !enableSummaryMetrics) { + return; + } + + statsClient.timingSummary(name, start, tags); +}; + const increment = (name, tags = {}) => { if (!enableStats || !statsClient) { return; @@ -88,4 +99,13 @@ async function metricsController(ctx) { init(); -module.exports = { init, timing, increment, counter, gauge, histogram, metricsController }; +module.exports = { + init, + timing, + timingSummary, + increment, + counter, + gauge, + histogram, + metricsController, +}; diff --git a/src/util/statsd.js b/src/util/statsd.js index a32a6f6f30..7613de7975 100644 --- a/src/util/statsd.js +++ b/src/util/statsd.js @@ -21,6 +21,11 @@ class Statsd { this.statsdClient.timing(name, start, tags); } + // timingSummary is just a wrapper around timing for statsd.For prometheus, we will have to implement a different function. + timingSummary(name, start, tags = {}) { + this.statsdClient.timing(name, start, tags); + } + increment(name, tags = {}) { this.statsdClient.increment(name, 1, tags); } diff --git a/src/v0/destinations/af/transform.js b/src/v0/destinations/af/transform.js index d6c41937a1..72ba47a227 100644 --- a/src/v0/destinations/af/transform.js +++ b/src/v0/destinations/af/transform.js @@ -113,13 +113,9 @@ function getEventValueForUnIdentifiedTrackEvent(message) { return { eventValue }; } -function getEventValueMapFromMappingJson( - message, - mappingJson, - isMultiSupport, - addPropertiesAtRoot, -) { +function getEventValueMapFromMappingJson(message, mappingJson, isMultiSupport, config) { let eventValue = {}; + const { addPropertiesAtRoot, afCurrencyAtRoot } = config; if (addPropertiesAtRoot) { eventValue = message.properties; @@ -152,6 +148,9 @@ function getEventValueMapFromMappingJson( af_price: prices, }; } + if (afCurrencyAtRoot) { + eventValue.af_currency = message.properties.currency; + } eventValue = removeUndefinedValues(eventValue); if (Object.keys(eventValue).length > 0) { eventValue = JSON.stringify(eventValue); @@ -171,7 +170,7 @@ function processNonTrackEvents(message, eventName) { return payload; } -function processEventTypeTrack(message, addPropertiesAtRoot) { +function processEventTypeTrack(message, config) { let isMultiSupport = true; const evType = message.event && message.event.toLowerCase(); let category = ConfigCategory.DEFAULT; @@ -195,7 +194,7 @@ function processEventTypeTrack(message, addPropertiesAtRoot) { message, mappingConfig[category.name], isMultiSupport, - addPropertiesAtRoot, + config, ); payload.eventName = message.event; payload.eventCurrency = message?.properties?.currency; @@ -208,7 +207,7 @@ function processSingleMessage(message, destination) { let payload; switch (messageType) { case EventType.TRACK: { - payload = processEventTypeTrack(message, destination.Config.addPropertiesAtRoot); + payload = processEventTypeTrack(message, destination.Config); break; } case EventType.SCREEN: { diff --git a/src/v0/destinations/algolia/config.js b/src/v0/destinations/algolia/config.js index 11b4ec99f2..4e20294dd2 100644 --- a/src/v0/destinations/algolia/config.js +++ b/src/v0/destinations/algolia/config.js @@ -5,6 +5,7 @@ const CONFIG_CATEGORIES = { TRACK: { type: 'track', name: 'AlgoliaTrack' }, }; const EVENT_TYPES = ['click', 'view', 'conversion']; +const ALLOWED_EVENT_SUBTYPES = ['addToCart', 'purchase']; const MAX_BATCH_SIZE = 1000; const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname); module.exports = { @@ -12,4 +13,5 @@ module.exports = { MAX_BATCH_SIZE, EVENT_TYPES, trackMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.TRACK.name], + ALLOWED_EVENT_SUBTYPES, }; diff --git a/src/v0/destinations/algolia/data/AlgoliaTrack.json b/src/v0/destinations/algolia/data/AlgoliaTrack.json index bdc3449147..41f43af9cb 100644 --- a/src/v0/destinations/algolia/data/AlgoliaTrack.json +++ b/src/v0/destinations/algolia/data/AlgoliaTrack.json @@ -34,5 +34,15 @@ "destKey": "positions", "sourceKeys": "properties.positions", "required": false + }, + { + "destKey": "value", + "sourceKeys": "properties.value", + "required": false + }, + { + "destKey": "currency", + "sourceKeys": "properties.currency", + "required": false } ] diff --git a/src/v0/destinations/algolia/transform.js b/src/v0/destinations/algolia/transform.js index 8e9cd57e8b..33ae6f2101 100644 --- a/src/v0/destinations/algolia/transform.js +++ b/src/v0/destinations/algolia/transform.js @@ -16,7 +16,7 @@ const { handleRtTfSingleEventError, } = require('../../util/index'); -const { ENDPOINT, MAX_BATCH_SIZE, trackMapping } = require('./config'); +const { ENDPOINT, MAX_BATCH_SIZE, trackMapping, ALLOWED_EVENT_SUBTYPES } = require('./config'); const { genericpayloadValidator, @@ -38,6 +38,12 @@ const trackResponseBuilder = (message, { Config }) => { const eventMapping = eventTypeMapping(Config); payload.eventName = event; payload.eventType = getValueFromMessage(message, 'properties.eventType') || eventMapping[event]; + if ( + payload.eventType === 'conversion' && + ALLOWED_EVENT_SUBTYPES.includes(getValueFromMessage(message, 'properties.eventSubtype')) + ) { + payload.eventSubtype = getValueFromMessage(message, 'properties.eventSubtype'); + } if (!payload.eventType) { throw new InstrumentationError('eventType is mandatory for track call'); @@ -47,9 +53,13 @@ const trackResponseBuilder = (message, { Config }) => { if (event === 'product list viewed' || event === 'order completed') { const products = getValueFromMessage(message, 'properties.products'); if (products) { - const { objectList, positionList } = createObjectArray(products, payload.eventType); + const { objectList, positionList, objectData } = createObjectArray( + products, + payload.eventType, + ); const objLen = objectList.length; const posLen = positionList.length; + const objDataLen = objectData.length; if (objLen > 0) { payload.objectIDs = objectList; payload.objectIDs.splice(20); @@ -58,10 +68,20 @@ const trackResponseBuilder = (message, { Config }) => { payload.positions = positionList; payload.positions.splice(20); } + + if (objDataLen > 0) { + payload.objectData = objectData; + } // making size of object list and position list equal if (posLen > 0 && objLen > 0 && posLen !== objLen) { throw new InstrumentationError('length of objectId and position should be equal'); } + + if (objDataLen > 0 && objLen > 0 && objDataLen !== objLen) { + throw new InstrumentationError( + 'length of objectId and properties.products array should be equal', + ); + } } } // for all events either filter or objectID should be there diff --git a/src/v0/destinations/algolia/util.js b/src/v0/destinations/algolia/util.js index eddb4dc16d..b10097bbee 100644 --- a/src/v0/destinations/algolia/util.js +++ b/src/v0/destinations/algolia/util.js @@ -1,4 +1,8 @@ -const { InstrumentationError } = require('@rudderstack/integrations-lib'); +const { + InstrumentationError, + isDefined, + removeUndefinedAndNullValues, +} = require('@rudderstack/integrations-lib'); const logger = require('../../../logger'); const { EVENT_TYPES } = require('./config'); @@ -66,6 +70,8 @@ const genericpayloadValidator = (payload) => { const createObjectArray = (objects, eventType) => { const objectList = []; const positionList = []; + // eslint-disable-next-line sonarjs/no-unused-collection + const objectData = []; if (objects.length > 0) { objects.forEach((object, index) => { if (object.objectId) { @@ -80,13 +86,22 @@ const createObjectArray = (objects, eventType) => { } } else { objectList.push(object.objectId); + if (eventType === 'conversion') { + const singleObjData = { + queryID: isDefined(object.queryID) ? `${object.queryID}` : null, + price: isDefined(object.price) ? `${object.price}` : null, + quantity: object.quantity, + discount: isDefined(object.discount) ? `${object.discount}` : null, + }; + objectData.push(removeUndefinedAndNullValues(singleObjData)); + } } } else { logger.error(`object at index ${index} dropped. objectId is required.`); } }); } - return { objectList, positionList }; + return { objectList, positionList, objectData }; }; const clickPayloadValidator = (payload) => { @@ -128,9 +143,28 @@ const clickPayloadValidator = (payload) => { return updatedPayload; }; +// ref : https://www.algolia.com/doc/rest-api/insights/#method-param-objectdata-2:~:text=currency-,%23,currency%20as%20ISO%2D4217%20currency%20code%2C%20such%20as%20USD%20or%20EUR.,-ObjectData +function validatePayload(payload) { + if (payload.objectData && Array.isArray(payload.objectData)) { + const hasRelevantFields = payload.objectData.some( + (obj) => + obj.hasOwnProperty('price') || + obj.hasOwnProperty('quantity') || + obj.hasOwnProperty('discount'), + ); + + if (hasRelevantFields && !payload.currency) { + throw new InstrumentationError( + 'Currency missing when objectData fields has price informations.', + ); + } + } +} + module.exports = { genericpayloadValidator, createObjectArray, eventTypeMapping, clickPayloadValidator, + validatePayload, }; diff --git a/src/v0/destinations/algolia/util.test.js b/src/v0/destinations/algolia/util.test.js new file mode 100644 index 0000000000..850d93f1c6 --- /dev/null +++ b/src/v0/destinations/algolia/util.test.js @@ -0,0 +1,58 @@ +const { InstrumentationError } = require('@rudderstack/integrations-lib'); +const { validatePayload } = require('./util'); + +describe('validatePayload', () => { + // When payload is valid and contains relevant fields and currency + it('should validate payload when it is valid and contains relevant fields and currency', () => { + const payload = { + objectData: [ + { price: 10, quantity: 2, discount: 0.1 }, + { price: 20, quantity: 1, discount: 0 }, + ], + currency: 'USD', + }; + + expect(() => validatePayload(payload)).not.toThrow(); + }); + + // When payload contains objects with missing relevant fields + it('should throw an error when payload contains objects with missing relevant fields', () => { + const payload = { + objectData: [ + { price: 10, quantity: 2 }, + { price: 20, discount: 0 }, + ], + }; + + expect(() => validatePayload(payload)).toThrow(InstrumentationError); + }); + + // When payload is valid and contains relevant fields but no currency + it('should throw an InstrumentationError when currency is missing', () => { + const payload = { + objectData: [ + { price: 10, quantity: 2, discount: 0.1 }, + { price: 20, quantity: 1, discount: 0 }, + ], + }; + + expect(() => validatePayload(payload)).toThrow(InstrumentationError); + }); + + // When payload is valid but does not contain relevant fields + it('should not throw an error when payload does not contain relevant fields', () => { + const payload = { + objectData: [{ position: 'Product A' }, { position: 'Product B' }], + currency: 'USD', + }; + + expect(() => validatePayload(payload)).not.toThrow(); + }); + + // When payload is empty + it('should not throw an error when payload is empty', () => { + const payload = {}; + + expect(() => validatePayload(payload)).not.toThrow(); + }); +}); diff --git a/src/v0/destinations/awin/transform.js b/src/v0/destinations/awin/transform.js index 0d7fd95c33..0e1e220548 100644 --- a/src/v0/destinations/awin/transform.js +++ b/src/v0/destinations/awin/transform.js @@ -2,10 +2,11 @@ const { InstrumentationError, ConfigurationError } = require('@rudderstack/integ const { BASE_URL, ConfigCategory, mappingConfig } = require('./config'); const { defaultRequestConfig, constructPayload, simpleProcessRouterDest } = require('../../util'); -const { getParams, trackProduct } = require('./utils'); +const { getParams, trackProduct, populateCustomTransactionProperties } = require('./utils'); +const { FilteredEventsError } = require('../../util/errorTypes'); const responseBuilder = (message, { Config }) => { - const { advertiserId, eventsToTrack } = Config; + const { advertiserId, eventsToTrack, customFieldMap } = Config; const { event, properties } = message; let finalParams = {}; @@ -22,15 +23,20 @@ const responseBuilder = (message, { Config }) => { if (eventsList.includes(event)) { params = getParams(payload.params, advertiserId); const productTrackObject = trackProduct(properties, advertiserId, params.parts); + const customTransactionProperties = populateCustomTransactionProperties( + properties, + customFieldMap, + ); finalParams = { ...params, ...productTrackObject, + ...customTransactionProperties, }; } else { - throw new InstrumentationError( - "Event is not present in 'Events to Track' list. Aborting message.", - 400, + throw new FilteredEventsError( + "Event is not present in 'Events to Track' list. Dropping the event.", + 298, ); } } diff --git a/src/v0/destinations/awin/utils.js b/src/v0/destinations/awin/utils.js index f0daea9b99..715fb5818d 100644 --- a/src/v0/destinations/awin/utils.js +++ b/src/v0/destinations/awin/utils.js @@ -1,3 +1,4 @@ +const { getHashFromArray } = require('@rudderstack/integrations-lib'); const lodash = require('lodash'); /** @@ -77,8 +78,26 @@ const trackProduct = (properties, advertiserId, commissionParts) => { return transformedProductInfoObj; }; +// ref: https://wiki.awin.com/index.php/Advertiser_Tracking_Guide/Product_Level_Tracking#PLT_Via_Conversion_Pixel +const populateCustomTransactionProperties = (properties, customFieldMap) => { + const customObject = {}; + const customPropertyPattern = '^\\s*p\\d+\\s*$'; + const regex = new RegExp(customPropertyPattern, 'i'); + const propertyMap = getHashFromArray(customFieldMap, 'from', 'to', false); + Object.entries(propertyMap).forEach(([rudderProperty, awinProperty]) => { + if (regex.test(awinProperty)) { + const fieldValue = properties[rudderProperty]; + if (fieldValue) { + customObject[awinProperty] = fieldValue; + } + } + }); + return customObject; +}; + module.exports = { getParams, trackProduct, buildProductPayloadString, + populateCustomTransactionProperties, }; diff --git a/src/v0/destinations/awin/utils.test.js b/src/v0/destinations/awin/utils.test.js index e60c07e96c..ca7d079b1b 100644 --- a/src/v0/destinations/awin/utils.test.js +++ b/src/v0/destinations/awin/utils.test.js @@ -1,4 +1,8 @@ -const { buildProductPayloadString, trackProduct } = require('./utils'); +const { + buildProductPayloadString, + trackProduct, + populateCustomTransactionProperties, +} = require('./utils'); describe('buildProductPayloadString', () => { // Should correctly build the payload string with all fields provided @@ -163,3 +167,28 @@ describe('trackProduct', () => { }); }); }); + +describe('populateCustomTransactionProperties', () => { + // The function should correctly map properties from the input object to the output object based on the customFieldMap. + it('should correctly map properties from the input object to the output object based on the customFieldMap', () => { + const properties = { + rudderProperty1: 'value1', + rudderProperty2: 123, + rudderProperty3: 'value3', + rudderProperty4: 234, + }; + const customFieldMap = [ + { from: 'rudderProperty1', to: 'p1' }, + { from: 'rudderProperty2', to: 'p2' }, + { from: 'rudderProperty4', to: 'anotherp2' }, + ]; + const expectedOutput = { + p1: 'value1', + p2: 123, + }; + + const result = populateCustomTransactionProperties(properties, customFieldMap); + + expect(result).toEqual(expectedOutput); + }); +}); diff --git a/src/v0/destinations/braze/braze.util.test.js b/src/v0/destinations/braze/braze.util.test.js index 7b6a93d359..cc50ae921e 100644 --- a/src/v0/destinations/braze/braze.util.test.js +++ b/src/v0/destinations/braze/braze.util.test.js @@ -739,7 +739,7 @@ describe('dedup utility tests', () => { }); }); - test('returns null if all keys are in BRAZE_NON_BILLABLE_ATTRIBUTES', () => { + test('returns only non-billable attribute if there is key of BRAZE_NON_BILLABLE_ATTRIBUTES', () => { const userData = { external_id: '123', country: 'US', @@ -757,7 +757,7 @@ describe('dedup utility tests', () => { }; store.set('123', storeData); const result = BrazeDedupUtility.deduplicate(userData, store); - expect(result).toBeNull(); + expect(result).toEqual({ country: 'US', external_id: '123', language: 'en' }); }); test('returns null if all keys have $add, $update, or $remove properties', () => { diff --git a/src/v0/destinations/braze/util.js b/src/v0/destinations/braze/util.js index 5f1f1e6205..ce83ebc244 100644 --- a/src/v0/destinations/braze/util.js +++ b/src/v0/destinations/braze/util.js @@ -26,7 +26,7 @@ const { const { JSON_MIME_TYPE, HTTP_STATUS_CODES } = require('../../util/constant'); const { isObject } = require('../../util'); const { removeUndefinedValues, getIntegrationsObj } = require('../../util'); -const { InstrumentationError } = require('@rudderstack/integrations-lib'); +const { InstrumentationError, isDefined } = require('@rudderstack/integrations-lib'); const getEndpointFromConfig = (destination) => { // Init -- mostly for test cases @@ -284,12 +284,17 @@ const BrazeDedupUtility = { return true; }); - if (keys.length === 0) { - return null; + if (keys.length > 0) { + keys.forEach((key) => { + if (!_.isEqual(userData[key], storedUserData[key])) { + deduplicatedUserData[key] = userData[key]; + } + }); } - keys.forEach((key) => { - if (!_.isEqual(userData[key], storedUserData[key])) { + // add non billable attributes back to the deduplicated user object + BRAZE_NON_BILLABLE_ATTRIBUTES.forEach((key) => { + if (isDefined(userData[key])) { deduplicatedUserData[key] = userData[key]; } }); @@ -305,13 +310,6 @@ const BrazeDedupUtility = { const identifier = external_id || user_alias?.alias_name; store.set(identifier, { ...storedUserData, ...deduplicatedUserData }); - // add non billable attributes back to the deduplicated user object - BRAZE_NON_BILLABLE_ATTRIBUTES.forEach((key) => { - if (isDefinedAndNotNull(userData[key])) { - deduplicatedUserData[key] = userData[key]; - } - }); - return removeUndefinedValues(deduplicatedUserData); }, }; diff --git a/src/v0/destinations/delighted/util.js b/src/v0/destinations/delighted/util.js index c690bf5f5c..53f416b48d 100644 --- a/src/v0/destinations/delighted/util.js +++ b/src/v0/destinations/delighted/util.js @@ -1,14 +1,10 @@ -const { - NetworkInstrumentationError, - InstrumentationError, - NetworkError, -} = require('@rudderstack/integrations-lib'); -const myAxios = require('../../../util/myAxios'); +const { InstrumentationError, NetworkError } = require('@rudderstack/integrations-lib'); const { getDynamicErrorType } = require('../../../adapters/utils/networkUtils'); const { getValueFromMessage } = require('../../util'); const { ENDPOINT } = require('./config'); const tags = require('../../util/tags'); const { JSON_MIME_TYPE } = require('../../util/constant'); +const { handleHttpRequest } = require('../../../adapters/network'); const isValidEmail = (email) => { const re = @@ -41,6 +37,30 @@ const isValidUserIdOrError = (channel, userId) => { }; }; +/** + * Returns final status + * @param {*} status + * @returns + */ +const getErrorStatus = (status) => { + let errStatus; + switch (status) { + case 422: + case 401: + case 406: + case 403: + errStatus = 400; + break; + case 500: + case 503: + errStatus = 500; + break; + default: + errStatus = status; + } + return errStatus; +}; + const userValidity = async (channel, Config, userId) => { const paramsdata = {}; if (channel === 'email') { @@ -50,53 +70,38 @@ const userValidity = async (channel, Config, userId) => { } const basicAuth = Buffer.from(Config.apiKey).toString('base64'); - let response; - try { - response = await myAxios.get( - `${ENDPOINT}`, - { - headers: { - Authorization: `Basic ${basicAuth}`, - 'Content-Type': JSON_MIME_TYPE, - }, - params: paramsdata, + const { processedResponse } = await handleHttpRequest( + 'get', + `${ENDPOINT}`, + { + headers: { + Authorization: `Basic ${basicAuth}`, + 'Content-Type': JSON_MIME_TYPE, }, - { - destType: 'delighted', - feature: 'transformation', - requestMethod: 'GET', - endpointPath: '/people.json', - module: 'router', - }, - ); - if (response && response.data && response.status === 200 && Array.isArray(response.data)) { - return response.data.length > 0; - } - throw new NetworkInstrumentationError('Invalid response'); - } catch (error) { - let errMsg = ''; - let errStatus = 400; - if (error.response && error.response.data) { - errMsg = JSON.stringify(error.response.data); - switch (error.response.status) { - case 422: - case 401: - case 406: - case 403: - errStatus = 400; - break; - case 500: - case 503: - errStatus = 500; - break; - default: - errStatus = 400; - } - } - throw new NetworkError(`Error occurred while checking user : ${errMsg}`, errStatus, { - [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(errStatus), - }); + params: paramsdata, + }, + { + destType: 'delighted', + feature: 'transformation', + requestMethod: 'GET', + endpointPath: '/people.json', + module: 'router', + }, + ); + + if (processedResponse.status === 200 && Array.isArray(processedResponse?.response)) { + return processedResponse.response.length > 0; } + + const errStatus = getErrorStatus(processedResponse.status); + throw new NetworkError( + `Error occurred while checking user: ${JSON.stringify(processedResponse?.response || 'Invalid response')}`, + errStatus, + { + [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(errStatus), + }, + processedResponse, + ); }; const eventValidity = (Config, message) => { const event = getValueFromMessage(message, 'event'); diff --git a/src/v0/destinations/emarsys/deleteUsers.js b/src/v0/destinations/emarsys/deleteUsers.js new file mode 100644 index 0000000000..c6ca746217 --- /dev/null +++ b/src/v0/destinations/emarsys/deleteUsers.js @@ -0,0 +1,93 @@ +const { + NetworkError, + isDefinedAndNotNull, + ConfigurationAuthError, +} = require('@rudderstack/integrations-lib'); +const { httpPOST } = require('../../../adapters/network'); +const { + processAxiosResponse, + getDynamicErrorType, +} = require('../../../adapters/utils/networkUtils'); +const { isHttpStatusSuccess } = require('../../util'); +const { executeCommonValidations } = require('../../util/regulation-api'); +const tags = require('../../util/tags'); +const { getCustomIdBatches } = require('../../util/deleteUserUtils'); +const { + buildHeader, + deduceCustomIdentifier, + findRudderPropertyByEmersysProperty, +} = require('../../../cdk/v2/destinations/emarsys/utils'); + +/** + * This function will help to delete the users one by one from the userAttributes array. + * @param {*} userAttributes Array of objects with userId, email and phone + * @param {*} config Destination.Config provided in dashboard + * @returns + */ +const userDeletionHandler = async (userAttributes, config) => { + const endpoint = 'https://api.emarsys.net/api/v2/contact/delete'; + const headers = buildHeader(config); + const customIdentifier = deduceCustomIdentifier({}, config.emersysCustomIdentifier); + const configuredPayloadProperty = findRudderPropertyByEmersysProperty( + customIdentifier, + config.fieldMapping, + ); + if (!isDefinedAndNotNull(config.defaultContactList)) { + throw new ConfigurationAuthError('No audience list is configured. Aborting'); + } + /** + * identifierBatches = [[u1,u2,u3,...batchSize],[u1,u2,u3,...batchSize]...] + * Ref doc : https://dev.emarsys.com/docs/core-api-reference/szmq945esac90-delete-contacts + */ + const identifierBatches = getCustomIdBatches(userAttributes, configuredPayloadProperty, 1000); + // Note: we will only get 400 status code when no user deletion is present for given userIds so we will not throw error in that case + // eslint-disable-next-line no-restricted-syntax + for (const curBatch of identifierBatches) { + const deleteContactPayload = { + key_id: customIdentifier, + contact_list_id: config.defaultContactList, + }; + deleteContactPayload[`${customIdentifier}`] = curBatch; + // eslint-disable-next-line no-await-in-loop + const deletionResponse = await httpPOST( + endpoint, + { + ...deleteContactPayload, + }, + { + headers, + }, + { + destType: 'emarsys', + feature: 'deleteUsers', + endpointPath: '/contact/delete', + requestMethod: 'POST', + module: 'deletion', + }, + ); + const handledDelResponse = processAxiosResponse(deletionResponse); + if (!isHttpStatusSuccess(handledDelResponse.status) && handledDelResponse.status !== 400) { + throw new NetworkError( + 'User deletion request failed', + handledDelResponse.status, + { + [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(handledDelResponse.status), + [tags.TAG_NAMES.STATUS]: handledDelResponse.status, + }, + handledDelResponse, + ); + } + } + + return { + statusCode: 200, + status: 'successful', + }; +}; +const processDeleteUsers = async (event) => { + const { userAttributes, config } = event; + executeCommonValidations(userAttributes); + const resp = await userDeletionHandler(userAttributes, config); + return resp; +}; +module.exports = { processDeleteUsers }; diff --git a/src/v0/destinations/facebook_conversions/transform.js b/src/v0/destinations/facebook_conversions/transform.js index 1bb97b2672..e4aee9c503 100644 --- a/src/v0/destinations/facebook_conversions/transform.js +++ b/src/v0/destinations/facebook_conversions/transform.js @@ -1,6 +1,5 @@ /* eslint-disable no-param-reassign */ const get = require('get-value'); -const moment = require('moment'); const { InstrumentationError, ConfigurationError } = require('@rudderstack/integrations-lib'); const { CONFIG_CATEGORIES, @@ -33,6 +32,7 @@ const { fetchUserData, formingFinalResponse, } = require('../../util/facebookUtils'); +const { verifyEventDuration } = require('../facebook_pixel/utils'); const responseBuilderSimple = (message, category, destination) => { const { Config, ID } = destination; @@ -121,19 +121,7 @@ const processEvent = (message, destination) => { } const timeStamp = getFieldValueFromMessage(message, 'timestamp'); - if (timeStamp) { - const start = moment.unix(moment(timeStamp).format('X')); - const current = moment.unix(moment().format('X')); - // calculates past event in days - const deltaDay = Math.ceil(moment.duration(current.diff(start)).asDays()); - // calculates future event in minutes - const deltaMin = Math.ceil(moment.duration(start.diff(current)).asMinutes()); - if (deltaDay > 7 || deltaMin > 1) { - throw new InstrumentationError( - 'Events must be sent within seven days of their occurrence or up to one minute in the future.', - ); - } - } + verifyEventDuration(message, destination, timeStamp); const { datasetId, accessToken } = destination.Config; if (!datasetId) { diff --git a/src/v0/destinations/facebook_conversions/utils.js b/src/v0/destinations/facebook_conversions/utils.js index c6e3993e33..87fb0ea606 100644 --- a/src/v0/destinations/facebook_conversions/utils.js +++ b/src/v0/destinations/facebook_conversions/utils.js @@ -134,7 +134,7 @@ const populateCustomDataBasedOnCategory = (customData, message, category, catego const { contentIds, contents } = populateContentsAndContentIDs([message.properties]); eventTypeCustomData = { ...eventTypeCustomData, - content_ids: contentIds, + content_ids: contentIds.length === 1 ? contentIds[0] : contentIds, contents, content_type: contentType, content_category: getContentCategory(contentCategory), diff --git a/src/v0/destinations/facebook_offline_conversions/utils.js b/src/v0/destinations/facebook_offline_conversions/utils.js index c48de4e0b9..460ef71176 100644 --- a/src/v0/destinations/facebook_offline_conversions/utils.js +++ b/src/v0/destinations/facebook_offline_conversions/utils.js @@ -396,7 +396,7 @@ const preparePayload = (facebookOfflineConversionsPayload, destination) => { const keys = Object.keys(facebookOfflineConversionsPayload); keys.forEach((key) => { if (isHashRequired && HASHING_REQUIRED_KEYS.includes(key)) { - payload[key] = sha256(facebookOfflineConversionsPayload[key]); + payload[key] = sha256(facebookOfflineConversionsPayload[key].trim()); } else { payload[key] = facebookOfflineConversionsPayload[key]; } @@ -407,8 +407,8 @@ const preparePayload = (facebookOfflineConversionsPayload, destination) => { ? facebookOfflineConversionsPayload.name.split(' ') : null; if (split !== null && Array.isArray(split) && split.length === 2) { - payload.fn = isHashRequired ? sha256(split[0]) : split[0]; - payload.ln = isHashRequired ? sha256(split[1]) : split[1]; + payload.fn = isHashRequired ? sha256(split[0].trim()) : split[0]; + payload.ln = isHashRequired ? sha256(split[1].trim()) : split[1]; } delete payload.name; } diff --git a/src/v0/destinations/facebook_pixel/transform.js b/src/v0/destinations/facebook_pixel/transform.js index 8a63998b45..d44a38aa69 100644 --- a/src/v0/destinations/facebook_pixel/transform.js +++ b/src/v0/destinations/facebook_pixel/transform.js @@ -1,8 +1,6 @@ /* eslint-disable no-param-reassign */ const get = require('get-value'); -const moment = require('moment'); const { InstrumentationError, ConfigurationError } = require('@rudderstack/integrations-lib'); -const stats = require('../../../util/stats'); const { VERSION, CONFIG_CATEGORIES, @@ -31,6 +29,7 @@ const { handleOrder, populateCustomDataBasedOnCategory, getCategoryFromEvent, + verifyEventDuration, } = require('./utils'); const { @@ -170,23 +169,7 @@ const processEvent = (message, destination) => { } const timeStamp = message.timestamp || message.originalTimestamp; - if (timeStamp) { - const start = moment.unix(moment(timeStamp).format('X')); - const current = moment.unix(moment().format('X')); - // calculates past event in days - const deltaDay = Math.ceil(moment.duration(current.diff(start)).asDays()); - // calculates future event in minutes - const deltaMin = Math.ceil(moment.duration(start.diff(current)).asMinutes()); - if (deltaDay > 7 || deltaMin > 1) { - // TODO: Remove after testing in mirror transformer - stats.increment('fb_pixel_timestamp_error', { - destinationId: destination.ID, - }); - throw new InstrumentationError( - 'Events must be sent within seven days of their occurrence or up to one minute in the future.', - ); - } - } + verifyEventDuration(message, destination, timeStamp); let eventsToEvents; if (Array.isArray(destination.Config.eventsToEvents)) { diff --git a/src/v0/destinations/facebook_pixel/utils.js b/src/v0/destinations/facebook_pixel/utils.js index cfa625ee3d..74d5240f2a 100644 --- a/src/v0/destinations/facebook_pixel/utils.js +++ b/src/v0/destinations/facebook_pixel/utils.js @@ -1,4 +1,6 @@ const { InstrumentationError } = require('@rudderstack/integrations-lib'); +const get = require('get-value'); +const moment = require('moment'); const { isObject } = require('../../util'); const { ACTION_SOURCES_VALUES, @@ -339,6 +341,29 @@ const getCategoryFromEvent = (eventName) => { return category; }; +const verifyEventDuration = (message, destination, timeStamp) => { + const actionSource = + get(message, 'traits.action_source') || + get(message, 'context.traits.action_source') || + get(message, 'properties.action_source'); + + const start = moment.unix(moment(timeStamp).format('X')); + const current = moment.unix(moment().format('X')); + // calculates past event in days + const deltaDay = Math.ceil(moment.duration(current.diff(start)).asDays()); + // calculates future event in minutes + const deltaMin = Math.ceil(moment.duration(start.diff(current)).asMinutes()); + let defaultSupportedDelta = 7; + if (actionSource === 'physical_store') { + defaultSupportedDelta = 62; + } + if (deltaDay > defaultSupportedDelta || deltaMin > 1) { + throw new InstrumentationError( + `Events must be sent within ${defaultSupportedDelta} days of their occurrence or up to one minute in the future.`, + ); + } +}; + module.exports = { formatRevenue, getActionSource, @@ -348,4 +373,5 @@ module.exports = { handleOrder, populateCustomDataBasedOnCategory, getCategoryFromEvent, + verifyEventDuration, }; diff --git a/src/v0/destinations/facebook_pixel/utils.test.js b/src/v0/destinations/facebook_pixel/utils.test.js index f32d7d7024..fa17aebd76 100644 --- a/src/v0/destinations/facebook_pixel/utils.test.js +++ b/src/v0/destinations/facebook_pixel/utils.test.js @@ -1,7 +1,13 @@ const { InstrumentationError } = require('@rudderstack/integrations-lib'); -const { getActionSource, formatRevenue, getCategoryFromEvent } = require('./utils'); +const { + getActionSource, + formatRevenue, + getCategoryFromEvent, + verifyEventDuration, +} = require('./utils'); const { CONFIG_CATEGORIES, OTHER_STANDARD_EVENTS } = require('./config'); +Date.now = jest.fn(() => new Date('2022-01-20T00:00:00Z')); describe('Test Facebook Pixel Utils', () => { describe('getActionSource', () => { // Returns 'other' if payload.action_source is not defined and channel is neither 'web' nor 'mobile' @@ -151,4 +157,75 @@ describe('Test Facebook Pixel Utils', () => { expect(result).toEqual(CONFIG_CATEGORIES.SIMPLE_TRACK); }); }); + + describe('verifyEventDuration', () => { + it('should not throw an InstrumentationError when event duration is less than 8 days after the event occurred', () => { + const message = { + traits: { + action_source: 'some_action_source', + }, + context: { + traits: { + action_source: 'some_action_source', + }, + }, + properties: { + action_source: 'some_action_source', + }, + }; + const destination = { + ID: 'some_destination_id', + }; + const timeStamp = '2022-01-20T00:00:00Z'; + expect(() => { + verifyEventDuration(message, destination, timeStamp); + }).not.toThrow(InstrumentationError); + }); + it('should throw an InstrumentationError when event duration is exactly 8 days after the event occurred', () => { + const message = { + traits: { + action_source: 'some_action_source', + }, + context: { + traits: { + action_source: 'some_action_source', + }, + }, + properties: { + action_source: 'some_action_source', + }, + }; + const destination = { + ID: 'some_destination_id', + }; + const timeStamp = '2022-01-12T00:00:00Z'; + + expect(() => { + verifyEventDuration(message, destination, timeStamp); + }).toThrow(InstrumentationError); + }); + it('should not throw an InstrumentationError when event duration is greater than 8 days after the event occurred and action_source is physical_store', () => { + const message = { + traits: { + action_source: 'physical_store', + }, + context: { + traits: { + action_source: 'some_action_source', + }, + }, + properties: { + action_source: 'some_action_source', + }, + }; + const destination = { + ID: 'some_destination_id', + }; + const timeStamp = '2022-01-12T00:00:00Z'; + + expect(() => { + verifyEventDuration(message, destination, timeStamp); + }).not.toThrow(InstrumentationError); + }); + }); }); diff --git a/src/v0/destinations/fb/transform.js b/src/v0/destinations/fb/transform.js index e6f8e986cf..1160cef407 100644 --- a/src/v0/destinations/fb/transform.js +++ b/src/v0/destinations/fb/transform.js @@ -90,7 +90,7 @@ function sanityCheckPayloadForTypesAndModifications(updatedEvent) { clonedUpdatedEvent[prop] !== '' ) { isUDSet = true; - clonedUpdatedEvent[prop] = sha256(clonedUpdatedEvent[prop].toLowerCase()); + clonedUpdatedEvent[prop] = sha256(clonedUpdatedEvent[prop].trim().toLowerCase()); } break; case 'ud[zp]': @@ -113,7 +113,7 @@ function sanityCheckPayloadForTypesAndModifications(updatedEvent) { } else { isUDSet = true; clonedUpdatedEvent[prop] = sha256( - clonedUpdatedEvent[prop].toLowerCase() === 'female' ? 'f' : 'm', + clonedUpdatedEvent[prop].trim().toLowerCase() === 'female' ? 'f' : 'm', ); } } @@ -128,7 +128,7 @@ function sanityCheckPayloadForTypesAndModifications(updatedEvent) { if (clonedUpdatedEvent[prop] && clonedUpdatedEvent[prop] !== '') { isUDSet = true; clonedUpdatedEvent[prop] = sha256( - clonedUpdatedEvent[prop].toLowerCase().replace(/ /g, ''), + clonedUpdatedEvent[prop].trim().toLowerCase().replace(/ /g, ''), ); } break; diff --git a/src/v0/destinations/fb_custom_audience/recordTransform.js b/src/v0/destinations/fb_custom_audience/recordTransform.js new file mode 100644 index 0000000000..0f7b65c0bf --- /dev/null +++ b/src/v0/destinations/fb_custom_audience/recordTransform.js @@ -0,0 +1,277 @@ +/* eslint-disable no-const-assign */ +const lodash = require('lodash'); +const get = require('get-value'); +const { + InstrumentationError, + ConfigurationError, + getErrorRespEvents, +} = require('@rudderstack/integrations-lib'); +const { schemaFields } = require('./config'); +const { MappedToDestinationKey } = require('../../../constants'); +const stats = require('../../../util/stats'); +const { + getDestinationExternalIDInfoForRetl, + isDefinedAndNotNullAndNotEmpty, + checkSubsetOfArray, + returnArrayOfSubarrays, + getSuccessRespEvents, + generateErrorObject, +} = require('../../util'); +const { + ensureApplicableFormat, + getUpdatedDataElement, + getSchemaForEventMappedToDest, + batchingWithPayloadSize, + responseBuilderSimple, + getDataSource, +} = require('./util'); + +function getErrorMetaData(inputs, acceptedOperations) { + const metadata = []; + // eslint-disable-next-line no-restricted-syntax + for (const key in inputs) { + if (!acceptedOperations.includes(key)) { + inputs[key].forEach((input) => { + metadata.push(input.metadata); + }); + } + } + return metadata; +} + +const processRecordEventArray = ( + recordChunksArray, + userSchema, + isHashRequired, + disableFormat, + paramsPayload, + prepareParams, + destination, + operation, + operationAudienceId, +) => { + const toSendEvents = []; + const metadata = []; + recordChunksArray.forEach((recordArray) => { + const data = []; + recordArray.forEach((input) => { + const { fields } = input.message; + let dataElement = []; + let nullUserData = true; + + userSchema.forEach((eachProperty) => { + const userProperty = fields[eachProperty]; + let updatedProperty = userProperty; + + if (isHashRequired && !disableFormat) { + updatedProperty = ensureApplicableFormat(eachProperty, userProperty); + } + + dataElement = getUpdatedDataElement( + dataElement, + isHashRequired, + eachProperty, + updatedProperty, + ); + + if (dataElement[dataElement.length - 1]) { + nullUserData = false; + } + }); + + if (nullUserData) { + stats.increment('fb_custom_audience_event_having_all_null_field_values_for_a_user', { + destinationId: destination.ID, + nullFields: userSchema, + }); + } + data.push(dataElement); + metadata.push(input.metadata); + }); + + const prepareFinalPayload = lodash.cloneDeep(paramsPayload); + prepareFinalPayload.schema = userSchema; + prepareFinalPayload.data = data; + const payloadBatches = batchingWithPayloadSize(prepareFinalPayload); + + payloadBatches.forEach((payloadBatch) => { + const response = { + ...prepareParams, + payload: payloadBatch, + }; + + const wrappedResponse = { + responseField: response, + operationCategory: operation, + }; + + const builtResponse = responseBuilderSimple(wrappedResponse, operationAudienceId); + + toSendEvents.push(builtResponse); + }); + }); + + const response = getSuccessRespEvents(toSendEvents, metadata, destination, true); + + return response; +}; + +async function processRecordInputs(groupedRecordInputs) { + const { destination } = groupedRecordInputs[0]; + const { message } = groupedRecordInputs[0]; + const { + isHashRequired, + accessToken, + disableFormat, + type, + subType, + isRaw, + maxUserCount, + audienceId, + } = destination.Config; + const prepareParams = { + access_token: accessToken, + }; + + // maxUserCount validation + const maxUserCountNumber = parseInt(maxUserCount, 10); + if (Number.isNaN(maxUserCountNumber)) { + throw new ConfigurationError('Batch size must be an Integer.'); + } + + // audience id validation + let operationAudienceId = audienceId; + const mappedToDestination = get(message, MappedToDestinationKey); + if (mappedToDestination) { + const { objectType } = getDestinationExternalIDInfoForRetl(message, 'FB_CUSTOM_AUDIENCE'); + operationAudienceId = objectType; + } + if (!isDefinedAndNotNullAndNotEmpty(operationAudienceId)) { + throw new ConfigurationError('Audience ID is a mandatory field'); + } + + // user schema validation + let { userSchema } = destination.Config; + if (mappedToDestination) { + userSchema = getSchemaForEventMappedToDest(message); + } + if (!Array.isArray(userSchema)) { + userSchema = [userSchema]; + } + if (!checkSubsetOfArray(schemaFields, userSchema)) { + throw new ConfigurationError('One or more of the schema fields are not supported'); + } + + const paramsPayload = {}; + + if (isRaw) { + paramsPayload.is_raw = isRaw; + } + + const dataSource = getDataSource(type, subType); + if (Object.keys(dataSource).length > 0) { + paramsPayload.data_source = dataSource; + } + + const groupedRecordsByAction = lodash.groupBy(groupedRecordInputs, (record) => + record.message.action?.toLowerCase(), + ); + + const finalResponse = []; + + let insertResponse; + let deleteResponse; + let updateResponse; + + if (groupedRecordsByAction.delete) { + const deleteRecordChunksArray = returnArrayOfSubarrays( + groupedRecordsByAction.delete, + maxUserCountNumber, + ); + deleteResponse = processRecordEventArray( + deleteRecordChunksArray, + userSchema, + isHashRequired, + disableFormat, + paramsPayload, + prepareParams, + destination, + 'remove', + operationAudienceId, + ); + } + + if (groupedRecordsByAction.insert) { + const insertRecordChunksArray = returnArrayOfSubarrays( + groupedRecordsByAction.insert, + maxUserCountNumber, + ); + + insertResponse = processRecordEventArray( + insertRecordChunksArray, + userSchema, + isHashRequired, + disableFormat, + paramsPayload, + prepareParams, + destination, + 'add', + operationAudienceId, + ); + } + + if (groupedRecordsByAction.update) { + const updateRecordChunksArray = returnArrayOfSubarrays( + groupedRecordsByAction.update, + maxUserCountNumber, + ); + updateResponse = processRecordEventArray( + updateRecordChunksArray, + userSchema, + isHashRequired, + disableFormat, + paramsPayload, + prepareParams, + destination, + 'add', + operationAudienceId, + ); + } + + const eventTypes = ['update', 'insert', 'delete']; + const errorMetaData = []; + const errorMetaDataObject = getErrorMetaData(groupedRecordsByAction, eventTypes); + if (errorMetaDataObject.length > 0) { + errorMetaData.push(errorMetaDataObject); + } + + const error = new InstrumentationError('Invalid action type in record event'); + const errorObj = generateErrorObject(error); + const errorResponseList = errorMetaData.map((metadata) => + getErrorRespEvents(metadata, errorObj.status, errorObj.message, errorObj.statTags), + ); + + if (deleteResponse && deleteResponse.batchedRequest.length > 0) { + finalResponse.push(deleteResponse); + } + if (insertResponse && insertResponse.batchedRequest.length > 0) { + finalResponse.push(insertResponse); + } + if (updateResponse && updateResponse.batchedRequest.length > 0) { + finalResponse.push(updateResponse); + } + if (errorResponseList.length > 0) { + finalResponse.push(...errorResponseList); + } + + if (finalResponse.length === 0) { + throw new InstrumentationError( + 'Missing valid parameters, unable to generate transformed payload', + ); + } + return finalResponse; +} + +module.exports = { + processRecordInputs, +}; diff --git a/src/v0/destinations/fb_custom_audience/transform.js b/src/v0/destinations/fb_custom_audience/transform.js index 9320a3476b..c5c340c043 100644 --- a/src/v0/destinations/fb_custom_audience/transform.js +++ b/src/v0/destinations/fb_custom_audience/transform.js @@ -1,14 +1,7 @@ const lodash = require('lodash'); const get = require('get-value'); +const { InstrumentationError, ConfigurationError } = require('@rudderstack/integrations-lib'); const { - InstrumentationError, - TransformationError, - ConfigurationError, -} = require('@rudderstack/integrations-lib'); -const { - defaultRequestConfig, - defaultPostRequestConfig, - defaultDeleteRequestConfig, checkSubsetOfArray, isDefinedAndNotNullAndNotEmpty, returnArrayOfSubarrays, @@ -20,40 +13,28 @@ const { prepareDataField, getSchemaForEventMappedToDest, batchingWithPayloadSize, + generateAppSecretProof, + responseBuilderSimple, + getDataSource, } = require('./util'); -const { - getEndPoint, - schemaFields, - USER_ADD, - USER_DELETE, - typeFields, - subTypeFields, -} = require('./config'); +const { schemaFields, USER_ADD, USER_DELETE } = require('./config'); const { MappedToDestinationKey } = require('../../../constants'); +const { processRecordInputs } = require('./recordTransform'); +const logger = require('../../../logger'); -const responseBuilderSimple = (payload, audienceId) => { - if (payload) { - const responseParams = payload.responseField; - const response = defaultRequestConfig(); - response.endpoint = getEndPoint(audienceId); - - if (payload.operationCategory === 'add') { - response.method = defaultPostRequestConfig.requestMethod; +function checkForUnsupportedEventTypes(dictionary, keyList) { + const unsupportedEventTypes = []; + // eslint-disable-next-line no-restricted-syntax + for (const key in dictionary) { + if (!keyList.includes(key)) { + unsupportedEventTypes.push(key); } - if (payload.operationCategory === 'remove') { - response.method = defaultDeleteRequestConfig.requestMethod; - } - - response.params = responseParams; - return response; } - // fail-safety for developer error - throw new TransformationError(`Payload could not be constructed`); -}; + return unsupportedEventTypes; +} // Function responsible prepare the payload field of every event parameter - const preparePayload = ( userUpdateList, userSchema, @@ -88,7 +69,7 @@ const prepareResponse = ( userSchema, isHashRequired = true, ) => { - const { accessToken, disableFormat, type, subType, isRaw } = destination.Config; + const { accessToken, disableFormat, type, subType, isRaw, appSecret } = destination.Config; const mappedToDestination = get(message, MappedToDestinationKey); @@ -101,23 +82,22 @@ const prepareResponse = ( const prepareParams = {}; // creating the parameters field const paramsPayload = {}; - const dataSource = {}; prepareParams.access_token = accessToken; + if (isDefinedAndNotNullAndNotEmpty(appSecret)) { + const dateNow = Date.now(); + prepareParams.appsecret_time = Math.floor(dateNow / 1000); // Get current Unix time in seconds + prepareParams.appsecret_proof = generateAppSecretProof(accessToken, appSecret, dateNow); + } + // creating the payload field for parameters if (isRaw) { paramsPayload.is_raw = isRaw; } // creating the data_source block - if (type && type !== 'NA' && typeFields.includes(type)) { - dataSource.type = type; - } - - if (subType && subType !== 'NA' && subTypeFields.includes(subType)) { - dataSource.sub_type = subType; - } + const dataSource = getDataSource(type, subType); if (Object.keys(dataSource).length > 0) { paramsPayload.data_source = dataSource; } @@ -243,6 +223,7 @@ const processEvent = (message, destination) => { ), ); } + toSendEvents.forEach((sendEvent) => { respList.push(responseBuilderSimple(sendEvent, operationAudienceId)); }); @@ -258,7 +239,31 @@ const processEvent = (message, destination) => { const process = (event) => processEvent(event.message, event.destination); const processRouterDest = async (inputs, reqMetadata) => { - const respList = await simpleProcessRouterDest(inputs, process, reqMetadata); + const respList = []; + const groupedInputs = lodash.groupBy(inputs, (input) => input.message.type?.toLowerCase()); + let transformedRecordEvent = []; + let transformedAudienceEvent = []; + + const eventTypes = ['record', 'audiencelist']; + const unsupportedEventList = checkForUnsupportedEventTypes(groupedInputs, eventTypes); + if (unsupportedEventList.length > 0) { + logger.info(`unsupported events found ${unsupportedEventList}`); + throw new ConfigurationError('unsupported events present in the event'); + } + + if (groupedInputs.record) { + transformedRecordEvent = await processRecordInputs(groupedInputs.record, reqMetadata); + } + + if (groupedInputs.audiencelist) { + transformedAudienceEvent = await simpleProcessRouterDest( + groupedInputs.audiencelist, + process, + reqMetadata, + ); + } + + respList.push(...transformedRecordEvent, ...transformedAudienceEvent); return flattenMap(respList); }; diff --git a/src/v0/destinations/fb_custom_audience/util.js b/src/v0/destinations/fb_custom_audience/util.js index 6c53ed2814..401b601869 100644 --- a/src/v0/destinations/fb_custom_audience/util.js +++ b/src/v0/destinations/fb_custom_audience/util.js @@ -1,8 +1,16 @@ const lodash = require('lodash'); const sha256 = require('sha256'); +const crypto = require('crypto'); const get = require('get-value'); const jsonSize = require('json-size'); const { InstrumentationError, ConfigurationError } = require('@rudderstack/integrations-lib'); +const { TransformationError } = require('@rudderstack/integrations-lib'); +const { typeFields, subTypeFields, getEndPoint } = require('./config'); +const { + defaultRequestConfig, + defaultPostRequestConfig, + defaultDeleteRequestConfig, +} = require('../../util'); const stats = require('../../../util/stats'); const { isDefinedAndNotNull } = require('../../util'); @@ -92,11 +100,11 @@ const ensureApplicableFormat = (userProperty, userInformation) => { case 'FN': case 'FI': if (userProperty !== 'FI') { - updatedProperty = stringifiedUserInformation.toLowerCase().replace(/[!#$%&@A-Za-z]/g, ''); + updatedProperty = stringifiedUserInformation.toLowerCase().replace(/[^#$%&'*+/a-z]/g, ''); } else { updatedProperty = stringifiedUserInformation .toLowerCase() - .replace(/[^!#$%&,.?@A-Za-z]/g, ''); + .replace(/[^!"#$%&'()*+,-./a-z]/g, ''); } break; case 'MADID': @@ -206,4 +214,57 @@ const prepareDataField = ( return data; }; -module.exports = { prepareDataField, getSchemaForEventMappedToDest, batchingWithPayloadSize }; +// ref: https://developers.facebook.com/docs/facebook-login/security/#generate-the-proof +const generateAppSecretProof = (accessToken, appSecret, dateNow) => { + const currentTime = Math.floor(dateNow / 1000); // Get current Unix time in seconds + const data = `${accessToken}|${currentTime}`; + + // Creating a HMAC SHA-256 hash with the app_secret as the key + const hmac = crypto.createHmac('sha256', appSecret); + hmac.update(data); + const appsecretProof = hmac.digest('hex'); + + return appsecretProof; +}; + +const getDataSource = (type, subType) => { + const dataSource = {}; + if (type && type !== 'NA' && typeFields.includes(type)) { + dataSource.type = type; + } + if (subType && subType !== 'NA' && subTypeFields.includes(subType)) { + dataSource.sub_type = subType; + } + return dataSource; +}; + +const responseBuilderSimple = (payload, audienceId) => { + if (payload) { + const responseParams = payload.responseField; + const response = defaultRequestConfig(); + response.endpoint = getEndPoint(audienceId); + + if (payload.operationCategory === 'add') { + response.method = defaultPostRequestConfig.requestMethod; + } + if (payload.operationCategory === 'remove') { + response.method = defaultDeleteRequestConfig.requestMethod; + } + + response.params = responseParams; + return response; + } + // fail-safety for developer error + throw new TransformationError(`Payload could not be constructed`); +}; + +module.exports = { + prepareDataField, + getSchemaForEventMappedToDest, + batchingWithPayloadSize, + ensureApplicableFormat, + getUpdatedDataElement, + generateAppSecretProof, + responseBuilderSimple, + getDataSource, +}; diff --git a/src/v0/destinations/fb_custom_audience/util.test.js b/src/v0/destinations/fb_custom_audience/util.test.js new file mode 100644 index 0000000000..60e0aff464 --- /dev/null +++ b/src/v0/destinations/fb_custom_audience/util.test.js @@ -0,0 +1,122 @@ +const { getDataSource, responseBuilderSimple, getUpdatedDataElement } = require('./util'); + +const basePayload = { + responseField: { + access_token: 'ABC', + payload: { + schema: ['EMAIL', 'FI'], + data: [ + [ + 'b100c2ec0718fe6b4805b623aeec6710719d042ceea55f5c8135b010ec1c7b36', + '1e14a2f476f7611a8b22bc85d14237fdc88aac828737e739416c32c5bce3bd16', + ], + ], + }, + }, +}; + +const baseResponse = { + version: '1', + type: 'REST', + endpoint: 'https://graph.facebook.com/v18.0/23848494844100489/users', + headers: {}, + params: { + access_token: 'ABC', + payload: { + schema: ['EMAIL', 'FI'], + data: [ + [ + 'b100c2ec0718fe6b4805b623aeec6710719d042ceea55f5c8135b010ec1c7b36', + '1e14a2f476f7611a8b22bc85d14237fdc88aac828737e739416c32c5bce3bd16', + ], + ], + }, + }, + body: { + JSON: {}, + JSON_ARRAY: {}, + XML: {}, + FORM: {}, + }, + files: {}, +}; + +describe('FB_custom_audience utils test', () => { + describe('getDataSource function tests', () => { + it('Should return empty datasource if type and subType are both NA', () => { + const expectedDataSource = {}; + const dataSource = getDataSource('NA', 'NA'); + expect(dataSource).toEqual(expectedDataSource); + }); + it('Should set subType and type if value present in destination config macthes with preset list', () => { + const expectedDataSource = { + type: 'EVENT_BASED', + }; + const dataSource = getDataSource('EVENT_BASED', 'something'); + expect(dataSource).toEqual(expectedDataSource); + }); + }); + + describe('responseBuilderSimple function tests', () => { + it('Should return correct response for add payload', () => { + const payload = basePayload; + payload.operationCategory = 'add'; + const expectedResponse = baseResponse; + expectedResponse.method = 'POST'; + const response = responseBuilderSimple(payload, '23848494844100489'); + expect(response).toEqual(expectedResponse); + }); + + it('Should return correct response for delete payload', () => { + const payload = basePayload; + payload.operationCategory = 'remove'; + const expectedResponse = baseResponse; + expectedResponse.method = 'DELETE'; + const response = responseBuilderSimple(payload, '23848494844100489'); + expect(response).toEqual(expectedResponse); + }); + + it('Should throw error if payload is empty', () => { + try { + const response = responseBuilderSimple(payload, ''); + expect(response).toEqual(); + } catch (error) { + expect(error.message).toEqual(`payload is not defined`); + } + }); + }); + + describe('getUpdatedDataElement function tests', () => { + it('Should hash field if isHashRequired is set to true', () => { + const expectedDataElement = [ + '59107c750fd5ee2758d1988f2bf12d9f110439221ebdb7997e70d6a2c1c5afda', + ]; + let dataElement = []; + dataElement = getUpdatedDataElement(dataElement, true, 'FN', 'some-name'); + expect(dataElement).toEqual(expectedDataElement); + }); + + it('Should not hash field if isHashRequired is set to false', () => { + const expectedDataElement = ['some-name']; + let dataElement = []; + dataElement = getUpdatedDataElement(dataElement, false, 'FN', 'some-name'); + expect(dataElement).toEqual(expectedDataElement); + }); + + it('Should not hash MADID or EXTERN_ID and just pass value', () => { + const expectedDataElement = ['some-id', 'some-ext-id']; + let dataElement = []; + dataElement = getUpdatedDataElement(dataElement, true, 'MADID', 'some-id'); + dataElement = getUpdatedDataElement(dataElement, true, 'EXTERN_ID', 'some-ext-id'); + expect(dataElement).toEqual(expectedDataElement); + }); + + it('Should not hash MADID or EXTERN_ID and just pass empty value if value does not exist', () => { + const expectedDataElement = ['', '']; + let dataElement = []; + dataElement = getUpdatedDataElement(dataElement, true, 'MADID', ''); + dataElement = getUpdatedDataElement(dataElement, true, 'EXTERN_ID', ''); + expect(dataElement).toEqual(expectedDataElement); + }); + }); +}); diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/data/trackConfig.json b/src/v0/destinations/google_adwords_enhanced_conversions/data/trackConfig.json index bf5485270b..c38b24598d 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/data/trackConfig.json +++ b/src/v0/destinations/google_adwords_enhanced_conversions/data/trackConfig.json @@ -55,7 +55,7 @@ "sourceFromGenericMap": true, "required": false, "metadata": { - "type": "hashToSha256" + "type": ["trim", "hashToSha256"] } }, { @@ -64,7 +64,7 @@ "sourceFromGenericMap": true, "required": false, "metadata": { - "type": "hashToSha256" + "type": ["trim", "hashToSha256"] } }, { @@ -73,7 +73,7 @@ "sourceFromGenericMap": true, "required": false, "metadata": { - "type": "hashToSha256" + "type": ["trim", "hashToSha256"] } }, { @@ -82,7 +82,7 @@ "sourceFromGenericMap": true, "required": false, "metadata": { - "type": "hashToSha256" + "type": ["trim", "hashToSha256"] } }, { @@ -127,7 +127,7 @@ "sourceKeys": ["context.traits.streetAddress", "context.traits.address"], "required": false, "metadata": { - "type": "hashToSha256" + "type": ["trim", "hashToSha256"] } } ] diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/transform.js b/src/v0/destinations/google_adwords_enhanced_conversions/transform.js index 0be7c3f0ee..55d0c16c8c 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/transform.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/transform.js @@ -24,7 +24,7 @@ const { JSON_MIME_TYPE } = require('../../util/constant'); const updateMappingJson = (mapping) => { const newMapping = []; mapping.forEach((element) => { - if (get(element, 'metadata.type') && element.metadata.type === 'hashToSha256') { + if (get(element, 'metadata.type') && element.metadata.type.includes('hashToSha256')) { element.metadata.type = 'toString'; } newMapping.push(element); diff --git a/src/v0/destinations/google_adwords_offline_conversions/transform.js b/src/v0/destinations/google_adwords_offline_conversions/transform.js index c3be0f7cab..76b12587cd 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/transform.js +++ b/src/v0/destinations/google_adwords_offline_conversions/transform.js @@ -170,9 +170,8 @@ const batchEvents = (storeSalesEvents) => { storeSalesEvent.message?.body?.JSON?.addConversionPayload?.operations, ); batchEventResponse.metadatas.push(storeSalesEvent.metadata); - batchEventResponse.destination = storeSalesEvent.destination; }); - + batchEventResponse.destination = storeSalesEvents[0].destination; return [ getSuccessRespEvents( batchEventResponse.batchedRequest, diff --git a/src/v0/destinations/google_adwords_offline_conversions/utils.js b/src/v0/destinations/google_adwords_offline_conversions/utils.js index 70b42e2157..dfa892a769 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/utils.js +++ b/src/v0/destinations/google_adwords_offline_conversions/utils.js @@ -140,17 +140,17 @@ const buildAndGetAddress = (message, hashUserIdentifier) => { const address = constructPayload(message, trackAddStoreAddressConversionsMapping); if (address.hashed_last_name) { address.hashed_last_name = hashUserIdentifier - ? sha256(address.hashed_last_name).toString() + ? sha256(address.hashed_last_name.trim()).toString() : address.hashed_last_name; } if (address.hashed_first_name) { address.hashed_first_name = hashUserIdentifier - ? sha256(address.hashed_first_name).toString() + ? sha256(address.hashed_first_name.trim()).toString() : address.hashed_first_name; } if (address.hashed_street_address) { address.hashed_street_address = hashUserIdentifier - ? sha256(address.hashed_street_address).toString() + ? sha256(address.hashed_street_address.trim()).toString() : address.hashed_street_address; } return Object.keys(address).length > 0 ? address : null; @@ -269,8 +269,10 @@ const getAddConversionPayload = (message, Config) => { const phone = getFieldValueFromMessage(message, 'phone'); const userIdentifierInfo = { - email: hashUserIdentifier && isDefinedAndNotNull(email) ? sha256(email).toString() : email, - phone: hashUserIdentifier && isDefinedAndNotNull(phone) ? sha256(phone).toString() : phone, + email: + hashUserIdentifier && isDefinedAndNotNull(email) ? sha256(email.trim()).toString() : email, + phone: + hashUserIdentifier && isDefinedAndNotNull(phone) ? sha256(phone.trim()).toString() : phone, address: buildAndGetAddress(message, hashUserIdentifier), }; @@ -363,8 +365,10 @@ const getClickConversionPayloadAndEndpoint = ( // Ref - https://developers.google.com/google-ads/api/rest/reference/rest/v11/customers/uploadClickConversions#ClickConversion const userIdentifierInfo = { - email: hashUserIdentifier && isDefinedAndNotNull(email) ? sha256(email).toString() : email, - phone: hashUserIdentifier && isDefinedAndNotNull(phone) ? sha256(phone).toString() : phone, + email: + hashUserIdentifier && isDefinedAndNotNull(email) ? sha256(email.trim()).toString() : email, + phone: + hashUserIdentifier && isDefinedAndNotNull(phone) ? sha256(phone.trim()).toString() : phone, }; const keyName = getExisitingUserIdentifier(userIdentifierInfo, defaultUserIdentifier); diff --git a/src/v0/destinations/hs/HSTransform-v2.js b/src/v0/destinations/hs/HSTransform-v2.js index 3699e1c789..d2b26f1ab8 100644 --- a/src/v0/destinations/hs/HSTransform-v2.js +++ b/src/v0/destinations/hs/HSTransform-v2.js @@ -110,11 +110,11 @@ const processIdentify = async (message, destination, propertyMap) => { GENERIC_TRUE_VALUES.includes(mappedToDestination.toString()) && operation ) { - addExternalIdToHSTraits(message); if (!objectType) { throw new InstrumentationError('objectType not found'); } if (operation === 'createObject') { + addExternalIdToHSTraits(message); endpoint = CRM_CREATE_UPDATE_ALL_OBJECTS.replace(':objectType', objectType); } else if (operation === 'updateObject' && getHsSearchId(message)) { const { hsSearchId } = getHsSearchId(message); diff --git a/src/v0/destinations/impact/transform.js b/src/v0/destinations/impact/transform.js index 2eefdf7992..729f988938 100644 --- a/src/v0/destinations/impact/transform.js +++ b/src/v0/destinations/impact/transform.js @@ -59,7 +59,7 @@ const buildPageLoadPayload = (message, campaignId, impactAppId, enableEmailHashi let payload = constructPayload(message, MAPPING_CONFIG[CONFIG_CATEGORIES.PAGELOAD.name]); if (isDefinedAndNotNull(payload.CustomerEmail)) { payload.CustomerEmail = enableEmailHashing - ? sha1(payload?.CustomerEmail) + ? sha1(payload?.CustomerEmail.trim()) : payload?.CustomerEmail; } payload.CampaignId = campaignId; @@ -155,7 +155,7 @@ const processTrackEvent = (message, Config) => { payload.ImpactAppId = impactAppId; if (isDefinedAndNotNull(payload.CustomerEmail)) { payload.CustomerEmail = enableEmailHashing - ? sha1(payload?.CustomerEmail) + ? sha1(payload?.CustomerEmail.trim()) : payload?.CustomerEmail; } diff --git a/src/v0/destinations/mp/config.js b/src/v0/destinations/mp/config.js index 35b40294f5..3abdf2eebb 100644 --- a/src/v0/destinations/mp/config.js +++ b/src/v0/destinations/mp/config.js @@ -49,7 +49,6 @@ const MP_IDENTIFY_EXCLUSION_LIST = [ ]; const GEO_SOURCE_ALLOWED_VALUES = [null, 'reverse_geocoding']; -const TRACK_MAX_BATCH_SIZE = 50; const IMPORT_MAX_BATCH_SIZE = 2000; const ENGAGE_MAX_BATCH_SIZE = 2000; const GROUPS_MAX_BATCH_SIZE = 200; @@ -68,7 +67,6 @@ module.exports = { MP_IDENTIFY_EXCLUSION_LIST, getCreateDeletionTaskEndpoint, DISTINCT_ID_MAX_BATCH_SIZE, - TRACK_MAX_BATCH_SIZE, IMPORT_MAX_BATCH_SIZE, ENGAGE_MAX_BATCH_SIZE, GROUPS_MAX_BATCH_SIZE, diff --git a/src/v0/destinations/mp/transform.js b/src/v0/destinations/mp/transform.js index 09a7862f9a..2065764b98 100644 --- a/src/v0/destinations/mp/transform.js +++ b/src/v0/destinations/mp/transform.js @@ -24,7 +24,6 @@ const { mappingConfig, BASE_ENDPOINT, BASE_ENDPOINT_EU, - TRACK_MAX_BATCH_SIZE, IMPORT_MAX_BATCH_SIZE, ENGAGE_MAX_BATCH_SIZE, GROUPS_MAX_BATCH_SIZE, @@ -47,21 +46,19 @@ const mPEventPropertiesConfigJson = mappingConfig[ConfigCategory.EVENT_PROPERTIE const setImportCredentials = (destConfig) => { const endpoint = destConfig.dataResidency === 'eu' ? `${BASE_ENDPOINT_EU}/import/` : `${BASE_ENDPOINT}/import/`; - const headers = { 'Content-Type': 'application/json' }; const params = { strict: destConfig.strictMode ? 1 : 0 }; - const { apiSecret, serviceAccountUserName, serviceAccountSecret, projectId } = destConfig; - if (apiSecret) { - headers.Authorization = `Basic ${base64Convertor(`${apiSecret}:`)}`; + const { serviceAccountUserName, serviceAccountSecret, projectId, token } = destConfig; + let credentials; + if (token) { + credentials = `${token}:`; } else if (serviceAccountUserName && serviceAccountSecret && projectId) { - headers.Authorization = `Basic ${base64Convertor( - `${serviceAccountUserName}:${serviceAccountSecret}`, - )}`; + credentials = `${serviceAccountUserName}:${serviceAccountSecret}`; params.projectId = projectId; - } else { - throw new InstrumentationError( - 'Event timestamp is older than 5 days and no API secret or service account credentials (i.e. username, secret and projectId) are provided in destination configuration', - ); } + const headers = { + 'Content-Type': 'application/json', + Authorization: `Basic ${base64Convertor(credentials)}`, + }; return { endpoint, headers, params }; }; @@ -70,46 +67,34 @@ const responseBuilderSimple = (payload, message, eventType, destConfig) => { response.method = defaultPostRequestConfig.requestMethod; response.userId = message.userId || message.anonymousId; response.body.JSON_ARRAY = { batch: JSON.stringify([removeUndefinedValues(payload)]) }; - const { apiSecret, serviceAccountUserName, serviceAccountSecret, projectId, dataResidency } = - destConfig; + const { dataResidency } = destConfig; const duration = getTimeDifference(message.timestamp); + + const setCredentials = () => { + const credentials = setImportCredentials(destConfig); + response.endpoint = credentials.endpoint; + response.headers = credentials.headers; + response.params = { + project_id: credentials.params?.projectId, + strict: credentials.params.strict, + }; + }; + switch (eventType) { case EventType.ALIAS: case EventType.TRACK: case EventType.SCREEN: - case EventType.PAGE: - if ( - !apiSecret && - !(serviceAccountUserName && serviceAccountSecret && projectId) && - duration.days <= 5 - ) { - response.endpoint = - dataResidency === 'eu' ? `${BASE_ENDPOINT_EU}/track/` : `${BASE_ENDPOINT}/track/`; - response.headers = {}; - } else if (duration.years > 5) { + case EventType.PAGE: { + if (duration.years > 5) { throw new InstrumentationError('Event timestamp should be within last 5 years'); - } else { - const credentials = setImportCredentials(destConfig); - response.endpoint = credentials.endpoint; - response.headers = credentials.headers; - response.params = { - project_id: credentials.params?.projectId, - strict: credentials.params.strict, - }; - break; } + setCredentials(); break; - case 'merge': - // eslint-disable-next-line no-case-declarations - const credentials = setImportCredentials(destConfig); - response.endpoint = credentials.endpoint; - response.headers = credentials.headers; - response.params = { - project_id: credentials.params?.projectId, - strict: credentials.params.strict, - }; + } + case 'merge': { + setCredentials(); break; - + } default: response.endpoint = dataResidency === 'eu' ? `${BASE_ENDPOINT_EU}/engage/` : `${BASE_ENDPOINT}/engage/`; @@ -484,7 +469,6 @@ const processRouterDest = async (inputs, reqMetadata) => { const batchSize = { engage: 0, groups: 0, - track: 0, import: 0, }; @@ -516,23 +500,16 @@ const processRouterDest = async (inputs, reqMetadata) => { ); transformedPayloads = lodash.flatMap(transformedPayloads); - const { engageEvents, groupsEvents, trackEvents, importEvents, batchErrorRespList } = + const { engageEvents, groupsEvents, importEvents, batchErrorRespList } = groupEventsByEndpoint(transformedPayloads); const engageRespList = batchEvents(engageEvents, ENGAGE_MAX_BATCH_SIZE, reqMetadata); const groupsRespList = batchEvents(groupsEvents, GROUPS_MAX_BATCH_SIZE, reqMetadata); - const trackRespList = batchEvents(trackEvents, TRACK_MAX_BATCH_SIZE, reqMetadata); const importRespList = batchEvents(importEvents, IMPORT_MAX_BATCH_SIZE, reqMetadata); - const batchSuccessRespList = [ - ...engageRespList, - ...groupsRespList, - ...trackRespList, - ...importRespList, - ]; + const batchSuccessRespList = [...engageRespList, ...groupsRespList, ...importRespList]; batchSize.engage += engageRespList.length; batchSize.groups += groupsRespList.length; - batchSize.track += trackRespList.length; batchSize.import += importRespList.length; return [...batchSuccessRespList, ...batchErrorRespList]; diff --git a/src/v0/destinations/mp/util.js b/src/v0/destinations/mp/util.js index d564e805ad..b2807d6e11 100644 --- a/src/v0/destinations/mp/util.js +++ b/src/v0/destinations/mp/util.js @@ -136,7 +136,7 @@ const createIdentifyResponse = (message, type, destination, responseBuilderSimpl * @returns */ const isImportAuthCredentialsAvailable = (destination) => - destination.Config.apiSecret || + destination.Config.token || (destination.Config.serviceAccountSecret && destination.Config.serviceAccountUserName && destination.Config.projectId); @@ -179,7 +179,6 @@ const groupEventsByEndpoint = (events) => { const eventMap = { engage: [], groups: [], - track: [], import: [], }; const batchErrorRespList = []; @@ -204,7 +203,6 @@ const groupEventsByEndpoint = (events) => { return { engageEvents: eventMap.engage, groupsEvents: eventMap.groups, - trackEvents: eventMap.track, importEvents: eventMap.import, batchErrorRespList, }; @@ -349,7 +347,6 @@ const generatePageOrScreenCustomEventName = (message, userDefinedEventTemplate) * @param {Object} batchSize - The object containing the batch size for different endpoints. * @param {number} batchSize.engage - The batch size for engage endpoint. * @param {number} batchSize.groups - The batch size for group endpoint. - * @param {number} batchSize.track - The batch size for track endpoint. * @param {number} batchSize.import - The batch size for import endpoint. * @param {string} destinationId - The ID of the destination. * @returns {void} @@ -361,9 +358,6 @@ const recordBatchSizeMetrics = (batchSize, destinationId) => { stats.gauge('mixpanel_batch_group_pack_size', batchSize.groups, { destination_id: destinationId, }); - stats.gauge('mixpanel_batch_track_pack_size', batchSize.track, { - destination_id: destinationId, - }); stats.gauge('mixpanel_batch_import_pack_size', batchSize.import, { destination_id: destinationId, }); diff --git a/src/v0/destinations/mp/util.test.js b/src/v0/destinations/mp/util.test.js index 40cdb34649..3666081f59 100644 --- a/src/v0/destinations/mp/util.test.js +++ b/src/v0/destinations/mp/util.test.js @@ -18,7 +18,6 @@ describe('Unit test cases for groupEventsByEndpoint', () => { expect(result).toEqual({ engageEvents: [], groupsEvents: [], - trackEvents: [], importEvents: [], batchErrorRespList: [], }); @@ -122,19 +121,6 @@ describe('Unit test cases for groupEventsByEndpoint', () => { }, }, ], - trackEvents: [ - { - message: { - endpoint: '/track', - body: { - JSON_ARRAY: { - batch: '[{prop:4}]', - }, - }, - userId: 'user1', - }, - }, - ], importEvents: [ { message: { diff --git a/src/v0/destinations/pinterest_tag/utils.js b/src/v0/destinations/pinterest_tag/utils.js index 340fba498e..57d595571f 100644 --- a/src/v0/destinations/pinterest_tag/utils.js +++ b/src/v0/destinations/pinterest_tag/utils.js @@ -41,8 +41,8 @@ const getHashedValue = (key, value) => { case 'fn': case 'ge': value = Array.isArray(value) - ? value.map((val) => val.toString().toLowerCase()) - : value.toString().toLowerCase(); + ? value.map((val) => val.toString().trim().toLowerCase()) + : value.toString().trim().toLowerCase(); break; case 'ph': // phone numbers should only contain digits & should not contain leading zeros @@ -53,7 +53,7 @@ const getHashedValue = (key, value) => { case 'zp': // zip fields should only contain digits value = Array.isArray(value) - ? value.map((val) => val.toString().replace(/\D/g, '')) + ? value.map((val) => val.toString().trim().replace(/\D/g, '')) : value.toString().replace(/\D/g, ''); break; case 'hashed_maids': diff --git a/src/v0/destinations/redis/transform.js b/src/v0/destinations/redis/transform.js index 23c73f0ba4..ec0e858d3e 100644 --- a/src/v0/destinations/redis/transform.js +++ b/src/v0/destinations/redis/transform.js @@ -2,7 +2,7 @@ const lodash = require('lodash'); const flatten = require('flat'); const { InstrumentationError } = require('@rudderstack/integrations-lib'); -const { isEmpty, isObject } = require('../../util'); +const { isEmpty, isObject, getFieldValueFromMessage } = require('../../util'); const { EventType } = require('../../../constants'); // processValues: @@ -46,6 +46,19 @@ const transformSubEventTypeProfiles = (message, workspaceId, destinationId) => { }; }; +const getJSONValue = (message) => { + const eventType = message.type.toLowerCase(); + if (eventType === EventType.IDENTIFY) { + return getFieldValueFromMessage(message, 'traits'); + } + return {}; +}; + +const getTransformedPayloadForJSON = ({ key, path, value, userId }) => ({ + message: { key, path, value }, + userId, +}); + const process = (event) => { const { message, destination, metadata } = event; const messageType = message && message.type && message.type.toLowerCase(); @@ -58,15 +71,35 @@ const process = (event) => { throw new InstrumentationError('Blank userId passed in identify event'); } - const { prefix } = destination.Config; + const { prefix, useJSONModule } = destination.Config; const destinationId = destination.ID; const keyPrefix = isEmpty(prefix) ? '' : `${prefix.trim()}:`; + const jsonValue = getJSONValue(message); + if (isSubEventTypeProfiles(message)) { const { workspaceId } = metadata; + if (useJSONModule) { + // If redis should store information as JSON type + return getTransformedPayloadForJSON({ + key: `${workspaceId}:${destinationId}:${message.context.sources.profiles_entity}:${message.context.sources.profiles_id_type}:${message.userId}`, + path: message.context.sources.profiles_model, + value: jsonValue, + userId: message.userId, + }); + } return transformSubEventTypeProfiles(message, workspaceId, destinationId); } + if (useJSONModule) { + // If redis should store information as JSON type + return getTransformedPayloadForJSON({ + key: `${keyPrefix}user:${lodash.toString(message.userId)}`, + value: jsonValue, + userId: message.userId, + }); + } + const hmap = { key: `${keyPrefix}user:${lodash.toString(message.userId)}`, fields: {}, diff --git a/src/v0/destinations/slack/util.js b/src/v0/destinations/slack/util.js index f5d407018b..2267aa0bcd 100644 --- a/src/v0/destinations/slack/util.js +++ b/src/v0/destinations/slack/util.js @@ -83,7 +83,7 @@ const buildDefaultTraitTemplate = (traitsList, traits, template) => { generatedStringFromTemplate += `${trait}: {{"${trait}"}} `; }); // else with all traits - if (traitsList.length === 0) { + if (traitsList.length === 0 && !!traits) { Object.keys(traits).forEach((traitKey) => { generatedStringFromTemplate += `${traitKey}: {{"${traitKey}"}} `; }); diff --git a/src/v0/destinations/tiktok_ads/data/TikTokTrackV2.json b/src/v0/destinations/tiktok_ads/data/TikTokTrackV2.json index 530d6e392a..2910f1b44c 100644 --- a/src/v0/destinations/tiktok_ads/data/TikTokTrackV2.json +++ b/src/v0/destinations/tiktok_ads/data/TikTokTrackV2.json @@ -25,10 +25,7 @@ }, { "destKey": "properties.content_type", - "sourceKeys": ["properties.contentType", "properties.content_type"], - "metadata": { - "defaultValue": "product" - } + "sourceKeys": ["properties.contentType", "properties.content_type"] }, { "destKey": "properties.shop_id", diff --git a/src/v0/destinations/tiktok_ads/transformV2.js b/src/v0/destinations/tiktok_ads/transformV2.js index 3bd8699e3a..8760dee52c 100644 --- a/src/v0/destinations/tiktok_ads/transformV2.js +++ b/src/v0/destinations/tiktok_ads/transformV2.js @@ -31,7 +31,7 @@ const { JSON_MIME_TYPE } = require('../../util/constant'); * @param {*} event * @returns track payload */ -const getTrackResponsePayload = (message, destConfig, event) => { +const getTrackResponsePayload = (message, destConfig, event, setDefaultForContentType = true) => { const payload = constructPayload(message, trackMappingV2); // if contents is not an array converting it into array @@ -42,6 +42,8 @@ const getTrackResponsePayload = (message, destConfig, event) => { // if contents is not present but we have properties.products present which has fields with superset of contents fields if (!payload.properties?.contents && message.properties?.products) { // retreiving data from products only when contents is not present + // properties object may be empty due which next line may give some error + payload.properties = payload.properties || {}; payload.properties.contents = getContents(message, false); } @@ -53,6 +55,12 @@ const getTrackResponsePayload = (message, destConfig, event) => { if (destConfig.hashUserProperties && isDefinedAndNotNullAndNotEmpty(payload.user)) { payload.user = hashUserField(payload.user); } + // setting content-type default value in case of all standard event except `page-view` + if (!payload.properties?.content_type && setDefaultForContentType) { + // properties object may be empty due which next line may give some error + payload.properties = payload.properties || {}; + payload.properties.content_type = 'product'; + } payload.event = event; // add partner name and return payload return removeUndefinedAndNullValues(payload); @@ -90,13 +98,17 @@ const trackResponseBuilder = async (message, { Config }) => { }); } }); - } else { + } else if (!eventNameMapping[event]) { /* + Custom Event Case -> if there exists no event mapping we will build payload with custom event recieved For custom event we do not want to lower case the event or trim it we just want to send those as it is Doc https://ads.tiktok.com/help/article/standard-events-parameters?lang=en */ - event = eventNameMapping[event] || message.event; - // if there exists no event mapping we will build payload with custom event recieved + event = message.event; + responseList.push(getTrackResponsePayload(message, Config, event, false)); + } else { + // incoming event name is already a standard event name + event = eventNameMapping[event]; responseList.push(getTrackResponsePayload(message, Config, event)); } // set event source and event_source_id diff --git a/src/v0/destinations/yahoo_dsp/util.js b/src/v0/destinations/yahoo_dsp/util.js index 255f84d1c9..54002a3bce 100644 --- a/src/v0/destinations/yahoo_dsp/util.js +++ b/src/v0/destinations/yahoo_dsp/util.js @@ -51,7 +51,7 @@ const populateIdentifiers = (audienceList, Config) => { } // here, hashing the data if is not hashed and pushing in the seedList array. if (hashRequired) { - seedList.push(sha256(userTraits[audienceAttribute])); + seedList.push(sha256(userTraits[audienceAttribute].trim())); } else { seedList.push(userTraits[audienceAttribute]); } diff --git a/src/v0/sources/adjust/config.ts b/src/v0/sources/adjust/config.ts new file mode 100644 index 0000000000..d1c6ab8242 --- /dev/null +++ b/src/v0/sources/adjust/config.ts @@ -0,0 +1,16 @@ +export const excludedFieldList = [ + 'activity_kind', + 'event', + 'event_name', + 'gps_adid', + 'idfa', + 'idfv', + 'adid', + 'tracker', + 'tracker_name', + 'app_name', + 'ip_address', + 'tracking_enabled', + 'tracker_token', + 'created_at', +]; diff --git a/src/v0/sources/adjust/mapping.json b/src/v0/sources/adjust/mapping.json new file mode 100644 index 0000000000..60ea66281e --- /dev/null +++ b/src/v0/sources/adjust/mapping.json @@ -0,0 +1,52 @@ +[ + { + "sourceKeys": "activity_kind", + "destKeys": "properties.activity_kind" + }, + { + "sourceKeys": "event", + "destKeys": "properties.event_token" + }, + { + "sourceKeys": "event_name", + "destKeys": "event" + }, + { + "sourceKeys": "gps_adid", + "destKeys": "properties.gps_adid" + }, + { + "sourceKeys": "idfa", + "destKeys": "context.device.advertisingId" + }, + { + "sourceKeys": "idfv", + "destKeys": "context.device.id" + }, + { + "sourceKeys": "adid", + "destKeys": "context.device.id " + }, + { + "sourceKeys": "tracker", + "destKeys": "properties.tracker" + }, + { + "sourceKeys": "tracker_name", + "destKeys": "properties.tracker_name" + }, + { "sourceKeys": "tracker_token", "destKeys": "properties.tracker_token" }, + + { + "sourceKeys": "app_name", + "destKeys": "context.app.name" + }, + { + "sourceKeys": "ip_address", + "destKeys": ["context.ip", "request_ip"] + }, + { + "sourceKeys": "tracking_enabled", + "destKeys": "properties.tracking_enabled" + } +] diff --git a/src/v0/sources/adjust/transform.js b/src/v0/sources/adjust/transform.js new file mode 100644 index 0000000000..8568622aeb --- /dev/null +++ b/src/v0/sources/adjust/transform.js @@ -0,0 +1,61 @@ +const lodash = require('lodash'); +const path = require('path'); +const fs = require('fs'); +const { TransformationError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const Message = require('../message'); +const { CommonUtils } = require('../../../util/common'); +const { excludedFieldList } = require('./config'); +const { extractCustomFields, generateUUID } = require('../../util'); + +// ref : https://help.adjust.com/en/article/global-callbacks#general-recommended-placeholders +// import mapping json using JSON.parse to preserve object key order +const mapping = JSON.parse(fs.readFileSync(path.resolve(__dirname, './mapping.json'), 'utf-8')); + +const formatProperties = (input) => { + const { query_parameters: qParams } = input; + logger.debug(`[Adjust] Input event: query_params: ${JSON.stringify(qParams)}`); + if (!qParams) { + throw new TransformationError('Query_parameters is missing'); + } + const formattedOutput = {}; + Object.entries(qParams).forEach(([key, [value]]) => { + formattedOutput[key] = value; + }); + return formattedOutput; +}; + +const processEvent = (inputEvent) => { + const message = new Message(`Adjust`); + const event = lodash.cloneDeep(inputEvent); + const formattedPayload = formatProperties(event); + // event type is always track + const eventType = 'track'; + message.setEventType(eventType); + message.setPropertiesV2(formattedPayload, mapping); + let customProperties = {}; + customProperties = extractCustomFields( + formattedPayload, + customProperties, + 'root', + excludedFieldList, + ); + message.properties = { ...message.properties, ...customProperties }; + + if (formattedPayload.created_at) { + const ts = new Date(formattedPayload.created_at * 1000).toISOString(); + message.setProperty('originalTimestamp', ts); + message.setProperty('timestamp', ts); + } + + // adjust does not has the concept of user but we need to set some random anonymousId in order to make the server accept the message + message.anonymousId = generateUUID(); + return message; +}; + +// This fucntion just converts the incoming payload to array of already not and sends it to processEvent +const process = (events) => { + const eventsArray = CommonUtils.toArray(events); + return eventsArray.map(processEvent); +}; + +module.exports = { process }; diff --git a/src/v0/sources/auth0/mapping.json b/src/v0/sources/auth0/mapping.json index 45dcf939ad..bc5869a19b 100644 --- a/src/v0/sources/auth0/mapping.json +++ b/src/v0/sources/auth0/mapping.json @@ -62,5 +62,9 @@ { "sourceKeys": "date", "destKeys": ["originalTimestamp", "sentAt"] + }, + { + "sourceKeys": "type", + "destKeys": "source_type" } ] diff --git a/src/v0/sources/auth0/transform.js b/src/v0/sources/auth0/transform.js index 4b78621418..5a1bf42e28 100644 --- a/src/v0/sources/auth0/transform.js +++ b/src/v0/sources/auth0/transform.js @@ -1,11 +1,11 @@ const path = require('path'); const fs = require('fs'); -const { InstrumentationError } = require('@rudderstack/integrations-lib'); const { removeUndefinedAndNullValues } = require('../../util'); const { getGroupId } = require('./util'); // import mapping json using JSON.parse to preserve object key order const mapping = JSON.parse(fs.readFileSync(path.resolve(__dirname, './mapping.json'), 'utf-8')); const Message = require('../message'); +const { generateUUID } = require('../../util'); // Ref: https://auth0.com/docs/logs/references/log-event-type-codes const eventNameMap = JSON.parse( @@ -59,11 +59,11 @@ function processEvents(eventList) { } else { response = prepareTrackPayload(data); } - if (response?.userId) { - // eslint-disable-next-line camelcase - response.properties.log_id = log_id; - responses.push(removeUndefinedAndNullValues(response)); - } + + // eslint-disable-next-line camelcase + response.properties.log_id = log_id; + response.anonymousId = generateUUID(); + responses.push(removeUndefinedAndNullValues(response)); } }); return responses; @@ -74,11 +74,7 @@ function process(events) { if (!Array.isArray(events)) { eventList = events.logs || [events]; } - const responses = processEvents(eventList); - if (responses.length === 0) { - throw new InstrumentationError('UserId is not present'); - } - return responses; + return processEvents(eventList); } exports.process = process; diff --git a/src/v0/sources/slack/mapping.json b/src/v0/sources/slack/mapping.json new file mode 100644 index 0000000000..f7825bd88d --- /dev/null +++ b/src/v0/sources/slack/mapping.json @@ -0,0 +1,50 @@ +[ + { + "sourceKeys": "event.type", + "destKeys": "event" + }, + { + "sourceKeys": "event.user.tz", + "destKeys": "timezone" + }, + { + "sourceKeys": "event.user.profile.email", + "destKeys": "context.traits.email" + }, + { + "sourceKeys": "event.user.profile.phone", + "destKeys": "context.traits.phone" + }, + { + "sourceKeys": "event.user.profile.real_name_normalized", + "destKeys": "context.traits.name" + }, + { + "sourceKeys": "event.user.profile.real_name", + "destKeys": "context.traits.name" + }, + { + "sourceKeys": "event.user.profile.display_name_normalized", + "destKeys": "context.traits.name" + }, + { + "sourceKeys": "event.user.profile.display_name", + "destKeys": "context.traits.name" + }, + { + "sourceKeys": "event.user.profile.first_name", + "destKeys": "context.traits.firstName" + }, + { + "sourceKeys": "event.user.profile.last_name", + "destKeys": "context.traits.lastName" + }, + { + "sourceKeys": "event.user.profile.image_original", + "destKeys": "context.traits.avatar" + }, + { + "sourceKeys": "event.user.profile.title", + "destKeys": "context.traits.title" + } +] diff --git a/src/v0/sources/slack/transform.js b/src/v0/sources/slack/transform.js new file mode 100644 index 0000000000..98324a7b65 --- /dev/null +++ b/src/v0/sources/slack/transform.js @@ -0,0 +1,110 @@ +const sha256 = require('sha256'); +const { TransformationError } = require('@rudderstack/integrations-lib'); +const Message = require('../message'); +const { mapping, tsToISODate, normalizeEventName } = require('./util'); +const { generateUUID, removeUndefinedAndNullValues } = require('../../util'); +const { JSON_MIME_TYPE } = require('../../util/constant'); +const { EventType } = require('../../../constants'); + +/** + * Transform event data to RudderStack supported standard event schema + * @param {Object} slackPayload - The complete data received on the webhook from Slack + * @param {Object} slackPayload.event - The data object specific to the Slack event received. Has different schema for different event types. + * @returns {Object} Event data transformed to RudderStack supported standard event schema + */ +function processNormalEvent(slackPayload) { + const message = new Message(`SLACK`); + if (!slackPayload?.event) { + throw new TransformationError('Missing the required event data'); + } + switch (slackPayload.event.type) { + case 'team_join': + message.setEventType(EventType.IDENTIFY); + break; + case 'user_change': + message.setEventType(EventType.IDENTIFY); + break; + default: + message.setEventType(EventType.TRACK); + break; + } + message.setEventName(normalizeEventName(slackPayload.event.type)); + if (!slackPayload.event.user) { + throw new TransformationError('UserId not found'); + } + const stringifiedUserId = + typeof slackPayload.event.user === 'object' + ? slackPayload.event.user.id + : slackPayload.event.user; + message.setProperty( + 'anonymousId', + stringifiedUserId ? sha256(stringifiedUserId).toString().substring(0, 36) : generateUUID(), + ); + // Set the user id received from Slack into externalId + message.context.externalId = [ + { + type: 'slackUserId', + id: stringifiedUserId, + }, + ]; + // Set the standard common event fields. More info at https://www.rudderstack.com/docs/event-spec/standard-events/common-fields/ + // originalTimestamp - The actual time (in UTC) when the event occurred + message.setProperty( + 'originalTimestamp', + tsToISODate(slackPayload.event.ts || slackPayload.event.event_ts || slackPayload.event_time), + ); + // sentAt - Time, client-side, when the event was sent from the client to RudderStack + message.setProperty('sentAt', tsToISODate(slackPayload.event_time)); + // Map the remaining standard event properties according to mappings for the payload properties + message.setPropertiesV2(slackPayload, mapping); + // Copy the complete Slack event payload to message.properties + if (!message.properties) message.properties = {}; + Object.assign(message.properties, slackPayload.event); + return message; +} + +/** + * Handles a special event for webhook url verification. + * Responds back with the challenge key received in the request. + * Reference - https://api.slack.com/apis/connections/events-api#subscribing + * @param {Object} event - Event data received from Slack + * @param {string} event.challenge - The challenge key received in the request + * @returns response that needs to be sent back to the source, alongwith the same challenge key received int the request + */ +function processUrlVerificationEvent(event) { + const response = { challenge: event?.challenge }; + return { + outputToSource: { + body: Buffer.from(JSON.stringify(response)).toString('base64'), + contentType: JSON_MIME_TYPE, + }, + statusCode: 200, + }; +} + +/** + * Checks if the event is a special url verification event or not. + * Slack sends this event at the time of webhook setup to verify webhook url ownership for the security purpose. + * Reference - https://api.slack.com/apis/connections/events-api#subscribing + * @param {Object} event - Event data received from Slack + * @param {string} event.challenge - The challenge key received in the request + * @param {string} event.type - The type of Slack event. `url_verification` when it is a special webhook url verification event. + * @returns {boolean} true if it is a valid challenge event for url verification event + */ +function isWebhookUrlVerificationEvent(event) { + return event?.type === 'url_verification' && !!event?.challenge; +} + +/** + * Processes the event with needed transformation and sends back the response + * Reference - https://api.slack.com/apis/connections/events-api + * @param {Object} event + */ +function process(event) { + const response = isWebhookUrlVerificationEvent(event) + ? processUrlVerificationEvent(event) + : processNormalEvent(event); + return removeUndefinedAndNullValues(response); +} + +exports.process = process; diff --git a/src/v0/sources/slack/util.js b/src/v0/sources/slack/util.js new file mode 100644 index 0000000000..b9c39db223 --- /dev/null +++ b/src/v0/sources/slack/util.js @@ -0,0 +1,62 @@ +/* eslint-disable no-restricted-syntax */ +const path = require('path'); +const fs = require('fs'); + +const mapping = JSON.parse(fs.readFileSync(path.resolve(__dirname, './mapping.json'), 'utf-8')); + +/** + * Converts a Slack timestamp to RudderStack's standard timestamp format - ISO 8601 date string. + * The Slack timestamp is a string that represents unix timestamp (seconds since the Unix Epoch) + * with fractional seconds for millisecond precision. + * If the timestamp is not provided, the function returns the current date and time in ISO 8601 format. + * + * @param {string} [slackTs] - The Slack timestamp to be converted. + * @returns {string} The ISO 8601 formatted date string corresponding to the given Slack timestamp + * or the current date and time if no timestamp is provided. + * + * @example + * // Convert a Slack timestamp to an ISO 8601 date string + * const slackTimestamp = "1609459200.123000"; + * const isoDate = tsToISODate(slackTimestamp); + * console.log(isoDate); // Output: "2021-01-01T00:00:00.123Z" (depending on your timezone) + */ +function tsToISODate(slackTs) { + // Default to current date if slackTs is not provided + if (!slackTs) return new Date().toISOString(); + + // Convert slackTs string into unix timestamp in milliseconds + const msTimestamp = parseFloat(slackTs) * 1000; + // Convert to a date object + if (Number.isNaN(msTimestamp)) { + // If timestamp was not a valid float, the parser will return NaN, stop processing the timestamp further and return null + return null; + } + const date = new Date(msTimestamp); + + // Return the date in ISO 8601 format + return date.toISOString(); +} + +/** + * Converts an event name from snake_case to a RudderStack format - space-separated string with each word capitalized. + * @param {string} evtName - The event name in snake_case format to be normalized. + * @returns {string} The normalized event name with spaces between words and each word capitalized. + * + * @example + * // Convert a slack event name to RudderStack format + * const eventName = "member_joined_channel"; + * const normalizedEventName = normalizeEventName(eventName); + * console.log(normalizedEventName); // Output: "Member Joined Channel" + */ +function normalizeEventName(evtName) { + try { + return evtName + .split('_') + .map((s) => s.charAt(0).toUpperCase() + s.slice(1)) + .join(' '); + } catch (e) { + return 'undefined'; + } +} + +module.exports = { mapping, tsToISODate, normalizeEventName }; diff --git a/src/v0/sources/slack/util.test.js b/src/v0/sources/slack/util.test.js new file mode 100644 index 0000000000..b83f22f058 --- /dev/null +++ b/src/v0/sources/slack/util.test.js @@ -0,0 +1,51 @@ +const { tsToISODate, normalizeEventName } = require('./util.js'); + +describe('Unit test cases for tsToISODate', () => { + it('should return a valid iso date string for a valid slack timestamp input', () => { + const result = tsToISODate('1609459200.123000'); + expect(result).toBe('2021-01-01T00:00:00.123Z'); + }); + + it('should return iso date string of today when slack timestamp argument is not provided', () => { + const result = tsToISODate(); + expect(result).not.toBeNull(); + expect(typeof result).toBe('string'); + expect(result).not.toHaveLength(0); + // Check if the result is a valid date + const dateObject = new Date(result); + const resultTime = dateObject.getTime(); + expect(resultTime).not.toBeNaN(); + // Check if the result is close to the current time with precision tolerance of upto a minute + const nowTime = new Date().getTime(); + const TOLERANCE = 60000; // In ms + const timeDiff = Math.abs(nowTime - resultTime); + expect(timeDiff).toBeLessThanOrEqual(TOLERANCE); + }); + + it('should return null if the slack timestamp argument is invalid', () => { + const result = tsToISODate('invalid.slack.timestamp'); + expect(result).toBeNull(); + }); +}); + +describe('Unit test cases for normalizeEventName', () => { + it('should normalize a valid snake case string "member_joined_channel" to RudderStack format "Member Joined Channel"', () => { + const result = normalizeEventName('member_joined_channel'); + expect(result).toBe('Member Joined Channel'); + }); + + it('should return undefined string when event name is undefined', () => { + const result = normalizeEventName(undefined); + expect(result).toBe('undefined'); + }); + + it('should return undefined string when event name is null', () => { + const result = normalizeEventName(null); + expect(result).toBe('undefined'); + }); + + it('should return undefined string when event name argument cannot be parsed to string', () => { + const result = normalizeEventName({}); + expect(result).toBe('undefined'); + }); +}); diff --git a/src/v0/util/deleteUserUtils.js b/src/v0/util/deleteUserUtils.js index 6cf16d7f9e..22b5ba6a81 100644 --- a/src/v0/util/deleteUserUtils.js +++ b/src/v0/util/deleteUserUtils.js @@ -18,4 +18,16 @@ const getUserIdBatches = (userAttributes, MAX_BATCH_SIZE) => { return userIdBatches; }; -module.exports = { getUserIdBatches }; +const getCustomIdBatches = (userAttributes, customIdentifier, MAX_BATCH_SIZE) => { + const identifierArray = []; + userAttributes.forEach((userAttribute) => { + // Dropping the user if customIdentifier is not present + if (userAttribute[customIdentifier]) { + identifierArray.push(userAttribute[customIdentifier]); + } + }); + const identifierBatches = lodash.chunk(identifierArray, MAX_BATCH_SIZE); + return identifierBatches; +}; + +module.exports = { getUserIdBatches, getCustomIdBatches }; diff --git a/src/v0/util/facebookUtils/index.js b/src/v0/util/facebookUtils/index.js index c7753d255f..7462320cca 100644 --- a/src/v0/util/facebookUtils/index.js +++ b/src/v0/util/facebookUtils/index.js @@ -292,7 +292,13 @@ const formingFinalResponse = ( throw new TransformationError('Payload could not be constructed'); }; +const isHtmlFormat = (string) => { + const htmlTags = /<(?!(!doctype\s*html|html))\b[^>]*>[\S\s]*?<\/[^>]*>/i; + return htmlTags.test(string); +}; + module.exports = { + isHtmlFormat, getContentType, getContentCategory, transformedPayloadData, diff --git a/src/v0/util/facebookUtils/index.test.js b/src/v0/util/facebookUtils/index.test.js index 20c4ee59f2..1a2de4ed12 100644 --- a/src/v0/util/facebookUtils/index.test.js +++ b/src/v0/util/facebookUtils/index.test.js @@ -3,6 +3,7 @@ const { fetchUserData, deduceFbcParam, getContentType, + isHtmlFormat, } = require('./index'); const sha256 = require('sha256'); const { MAPPING_CONFIG, CONFIG_CATEGORIES } = require('../../destinations/facebook_pixel/config'); @@ -639,3 +640,53 @@ describe('getContentType', () => { expect(result).toBe(defaultValue); }); }); + +describe('isHtmlFormat', () => { + it('should return false for Json', () => { + expect(isHtmlFormat('{"a": 1, "b":2}')).toBe(false); + }); + + it('should return false for empty Json', () => { + expect(isHtmlFormat('{}')).toBe(false); + }); + + it('should return false for undefined', () => { + expect(isHtmlFormat(undefined)).toBe(false); + }); + + it('should return false for null', () => { + expect(isHtmlFormat(null)).toBe(false); + }); + + it('should return false for empty array', () => { + expect(isHtmlFormat([])).toBe(false); + }); + + it('should return true for html doctype', () => { + expect( + isHtmlFormat( + '
', + ), + ).toBe(true); + }); + + it('should return true for html', () => { + expect( + isHtmlFormat( + '