From b982cb4a0cf0647bc763070e12e2425d96175a85 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Wed, 22 May 2024 05:23:35 +0530 Subject: [PATCH 01/37] chore: add logging capability at integration level Signed-off-by: Sai Sankeerth --- src/adapters/utils/networkUtils.js | 6 +- src/controllers/destination.ts | 8 +- src/services/destination/cdkV2Integration.ts | 5 +- src/services/destination/nativeIntegration.ts | 7 +- src/services/misc.ts | 15 +-- src/services/source/nativeIntegration.ts | 4 +- .../config.js | 1 + .../networkHandler.js | 100 +++++++++++++----- .../utils.js | 18 +++- src/v0/util/index.js | 13 +++ .../destinations/am/dataDelivery/data.ts | 18 ++++ 11 files changed, 137 insertions(+), 58 deletions(-) diff --git a/src/adapters/utils/networkUtils.js b/src/adapters/utils/networkUtils.js index 0dcb9931e9..4b8dd4fc39 100644 --- a/src/adapters/utils/networkUtils.js +++ b/src/adapters/utils/networkUtils.js @@ -144,10 +144,11 @@ const processAxiosResponse = (clientResponse) => { } // non 2xx status handling for axios response if (response) { - const { data, status } = response; + const { data, status, headers } = response; return { response: data || '', status: status || 500, + ...(isDefinedAndNotNullAndNotEmpty(headers) ? { headers } : {}), }; } // (edge case) response and code is not present @@ -157,10 +158,11 @@ const processAxiosResponse = (clientResponse) => { }; } // success(2xx) axios response - const { data, status } = clientResponse.response; + const { data, status, headers } = clientResponse.response; return { response: data || '', status: status || 500, + ...(isDefinedAndNotNullAndNotEmpty(headers) ? { headers } : {}), }; }; diff --git a/src/controllers/destination.ts b/src/controllers/destination.ts index 92ef4b4c19..b13e728761 100644 --- a/src/controllers/destination.ts +++ b/src/controllers/destination.ts @@ -14,7 +14,7 @@ import { import { DynamicConfigParser } from '../util/dynamicConfigParser'; import stats from '../util/stats'; import { getIntegrationVersion } from '../util/utils'; -import { checkInvalidRtTfEvents } from '../v0/util'; +import { checkInvalidRtTfEvents, getLoggableData } from '../v0/util'; import tags from '../v0/util/tags'; import { ControllerUtility } from './util'; @@ -34,7 +34,7 @@ export class DestinationController { }); const integrationService = ServiceSelector.getDestinationService(events); const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), + ...getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), }); try { integrationService.init(); @@ -114,7 +114,7 @@ export class DestinationController { } const metaTags = MiscService.getMetaTags(events[0].metadata); const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), + ...getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), }); stats.histogram('dest_transform_input_events', events.length, { destination, @@ -173,7 +173,7 @@ export class DestinationController { const destination = routerRequest.destType; let events = routerRequest.input; const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), + ...getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), }); const integrationService = ServiceSelector.getDestinationService(events); try { diff --git a/src/services/destination/cdkV2Integration.ts b/src/services/destination/cdkV2Integration.ts index a649da9154..78bff7495d 100644 --- a/src/services/destination/cdkV2Integration.ts +++ b/src/services/destination/cdkV2Integration.ts @@ -22,6 +22,7 @@ import stats from '../../util/stats'; import { CatchErr, FixMe } from '../../util/types'; import tags from '../../v0/util/tags'; import { MiscService } from '../misc'; +import { getLoggableData } from '../../v0/util'; import { DestinationPostTransformationService } from './postTransformation'; export class CDKV2DestinationService implements DestinationService { @@ -68,7 +69,7 @@ export class CDKV2DestinationService implements DestinationService { tags.FEATURES.PROCESSOR, ); metaTo.metadata = event.metadata; - const loggerWithCtx = logger.child({ ...MiscService.getLoggableData(metaTo.errorDetails) }); + const loggerWithCtx = logger.child({ ...getLoggableData(metaTo.errorDetails) }); try { const transformedPayloads: | ProcessorTransformationOutput @@ -132,7 +133,7 @@ export class CDKV2DestinationService implements DestinationService { ); metaTo.metadata = destInputArray[0].metadata; const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(metaTo.errorDetails), + ...getLoggableData(metaTo.errorDetails), }); try { const doRouterTransformationResponse: RouterTransformationResponse[] = diff --git a/src/services/destination/nativeIntegration.ts b/src/services/destination/nativeIntegration.ts index 8fd0f09857..f34d717b03 100644 --- a/src/services/destination/nativeIntegration.ts +++ b/src/services/destination/nativeIntegration.ts @@ -25,6 +25,7 @@ import { } from '../../types/index'; import stats from '../../util/stats'; import tags from '../../v0/util/tags'; +import { getLoggableData } from '../../v0/util'; import { MiscService } from '../misc'; import { DestinationPostTransformationService } from './postTransformation'; @@ -72,7 +73,7 @@ export class NativeIntegrationDestinationService implements DestinationService { tags.FEATURES.PROCESSOR, ); metaTO.metadata = event.metadata; - const loggerWithCtx = logger.child({ ...MiscService.getLoggableData(metaTO.errorDetails) }); + const loggerWithCtx = logger.child({ ...getLoggableData(metaTO.errorDetails) }); try { const transformedPayloads: | ProcessorTransformationOutput @@ -121,7 +122,7 @@ export class NativeIntegrationDestinationService implements DestinationService { tags.FEATURES.ROUTER, ); const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(metaTO.errorDetails), + ...getLoggableData(metaTO.errorDetails), }); try { const doRouterTransformationResponse: RouterTransformationResponse[] = @@ -176,7 +177,7 @@ export class NativeIntegrationDestinationService implements DestinationService { ); metaTO.metadatas = events.map((event) => event.metadata); const loggerWithCtx = logger.child({ - ...MiscService.getLoggableData(metaTO.errorDetails), + ...getLoggableData(metaTO.errorDetails), }); try { const destBatchedRequests: RouterTransformationResponse[] = destHandler.batch( diff --git a/src/services/misc.ts b/src/services/misc.ts index 3df1196c1d..3727f6df7b 100644 --- a/src/services/misc.ts +++ b/src/services/misc.ts @@ -6,6 +6,7 @@ import path from 'path'; import { DestHandlerMap } from '../constants/destinationCanonicalNames'; import { getCPUProfile, getHeapProfile } from '../middleware'; import { ErrorDetailer, Metadata } from '../types'; +import { getLoggableData } from '../v0/util'; export class MiscService { public static getDestHandler(dest: string, version: string) { @@ -76,20 +77,8 @@ export class MiscService { return getHeapProfile(); } - public static getLoggableData(errorDetailer: ErrorDetailer): Partial { - return { - ...(errorDetailer?.destinationId && { destinationId: errorDetailer.destinationId }), - ...(errorDetailer?.sourceId && { sourceId: errorDetailer.sourceId }), - ...(errorDetailer?.workspaceId && { workspaceId: errorDetailer.workspaceId }), - ...(errorDetailer?.destType && { destType: errorDetailer.destType }), - ...(errorDetailer?.module && { module: errorDetailer.module }), - ...(errorDetailer?.implementation && { implementation: errorDetailer.implementation }), - ...(errorDetailer?.feature && { feature: errorDetailer.feature }), - }; - } - public static logError(message: string, errorDetailer: ErrorDetailer) { - const loggableExtraData: Partial = this.getLoggableData(errorDetailer); + const loggableExtraData: Partial = getLoggableData(errorDetailer); logger.errorw(message || '', loggableExtraData); } } diff --git a/src/services/source/nativeIntegration.ts b/src/services/source/nativeIntegration.ts index 2ecfc30066..b68b5c44ad 100644 --- a/src/services/source/nativeIntegration.ts +++ b/src/services/source/nativeIntegration.ts @@ -9,7 +9,7 @@ import { import stats from '../../util/stats'; import { FixMe } from '../../util/types'; import tags from '../../v0/util/tags'; -import { MiscService } from '../misc'; +import { getLoggableData } from '../../v0/util'; import { SourcePostTransformationService } from './postTransformation'; export class NativeIntegrationSourceService implements SourceService { @@ -36,7 +36,7 @@ export class NativeIntegrationSourceService implements SourceService { ): Promise { const sourceHandler = FetchHandler.getSourceHandler(sourceType, version); const metaTO = this.getTags(); - const loggerWithCtx = logger.child({ ...MiscService.getLoggableData(metaTO.errorDetails) }); + const loggerWithCtx = logger.child({ ...getLoggableData(metaTO.errorDetails) }); const respList: SourceTransformationResponse[] = await Promise.all( sourceEvents.map(async (sourceEvent) => { try { diff --git a/src/v0/destinations/google_adwords_offline_conversions/config.js b/src/v0/destinations/google_adwords_offline_conversions/config.js index f065be946c..6eec1068a6 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/config.js +++ b/src/v0/destinations/google_adwords_offline_conversions/config.js @@ -48,6 +48,7 @@ const consentConfigMap = { }; module.exports = { + destType: 'google_adwords_offline_conversions', trackClickConversionsMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.TRACK_CLICK_CONVERSIONS_CONFIG.name], trackCallConversionsMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.TRACK_CALL_CONVERSIONS_CONFIG.name], diff --git a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js index 5541fd6e1e..36badb5bf8 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js @@ -5,6 +5,7 @@ const { AbortedError, NetworkInstrumentationError, NetworkError, + structuredLogger: logger, } = require('@rudderstack/integrations-lib'); const { prepareProxyRequest, httpSend, httpPOST } = require('../../../adapters/network'); const { @@ -12,10 +13,11 @@ const { getHashFromArray, isDefinedAndNotNullAndNotEmpty, getAuthErrCategoryFromStCode, + getLoggableData, } = require('../../util'); const { getConversionActionId } = require('./utils'); const Cache = require('../../util/cache'); -const { CONVERSION_CUSTOM_VARIABLE_CACHE_TTL, SEARCH_STREAM } = require('./config'); +const { CONVERSION_CUSTOM_VARIABLE_CACHE_TTL, SEARCH_STREAM, destType } = require('./config'); const { processAxiosResponse, getDynamicErrorType, @@ -24,14 +26,14 @@ const tags = require('../../util/tags'); const conversionCustomVariableCache = new Cache(CONVERSION_CUSTOM_VARIABLE_CACHE_TTL); -const createJob = async (endpoint, headers, payload) => { +const createJob = async ({ endpoint, headers, payload, metadata }) => { const endPoint = `${endpoint}:create`; let createJobResponse = await httpPOST( endPoint, payload, { headers }, { - destType: 'google_adwords_offline_conversions', + destType, feature: 'proxy', endpointPath: `/create`, requestMethod: 'POST', @@ -39,7 +41,11 @@ const createJob = async (endpoint, headers, payload) => { }, ); createJobResponse = processAxiosResponse(createJobResponse); - const { response, status } = createJobResponse; + const { response, status, headers: responseHeaders } = createJobResponse; + logger.debug(`[${destType.toUpperCase()}] create job`, { + ...getLoggableData(metadata), + ...(responseHeaders ? { responseHeaders } : {}), + }); if (!isHttpStatusSuccess(status)) { throw new AbortedError( `[Google Ads Offline Conversions]:: ${response?.error?.message} during google_ads_offline_store_conversions Job Creation`, @@ -51,7 +57,7 @@ const createJob = async (endpoint, headers, payload) => { return response.resourceName.split('/')[3]; }; -const addConversionToJob = async (endpoint, headers, jobId, payload) => { +const addConversionToJob = async ({ endpoint, headers, jobId, payload, metadata }) => { const endPoint = `${endpoint}/${jobId}:addOperations`; let addConversionToJobResponse = await httpPOST( endPoint, @@ -66,18 +72,23 @@ const addConversionToJob = async (endpoint, headers, jobId, payload) => { }, ); addConversionToJobResponse = processAxiosResponse(addConversionToJobResponse); - if (!isHttpStatusSuccess(addConversionToJobResponse.status)) { + const { response, status, headers: responseHeaders } = addConversionToJobResponse; + logger.debug(`[${destType.toUpperCase()}] add conversion to job`, { + ...getLoggableData(metadata), + ...(responseHeaders ? { responseHeaders } : {}), + }); + if (!isHttpStatusSuccess(status)) { throw new AbortedError( - `[Google Ads Offline Conversions]:: ${addConversionToJobResponse.response?.error?.message} during google_ads_offline_store_conversions Add Conversion`, - addConversionToJobResponse.status, - addConversionToJobResponse.response, + `[Google Ads Offline Conversions]:: ${response?.error?.message} during google_ads_offline_store_conversions Add Conversion`, + status, + response, getAuthErrCategoryFromStCode(get(addConversionToJobResponse, 'status')), ); } return true; }; -const runTheJob = async (endpoint, headers, payload, jobId) => { +const runTheJob = async ({ endpoint, headers, payload, jobId, metadata }) => { const endPoint = `${endpoint}/${jobId}:run`; const executeJobResponse = await httpPOST( endPoint, @@ -91,6 +102,11 @@ const runTheJob = async (endpoint, headers, payload, jobId) => { module: 'dataDelivery', }, ); + const { headers: responseHeaders } = executeJobResponse; + logger.debug(`[${destType.toUpperCase()}] run job`, { + ...getLoggableData(metadata), + ...(responseHeaders ? { responseHeaders } : {}), + }); return executeJobResponse; }; @@ -102,7 +118,7 @@ const runTheJob = async (endpoint, headers, payload, jobId) => { * @param {*} headers * @returns */ -const getConversionCustomVariable = async (headers, params) => { +const getConversionCustomVariable = async ({ headers, params, metadata }) => { const conversionCustomVariableKey = sha256(params.customerId).toString(); return conversionCustomVariableCache.get(conversionCustomVariableKey, async () => { const data = { @@ -120,15 +136,20 @@ const getConversionCustomVariable = async (headers, params) => { module: 'dataDelivery', }); searchStreamResponse = processAxiosResponse(searchStreamResponse); - if (!isHttpStatusSuccess(searchStreamResponse.status)) { + const { response, status, headers: responseHeaders } = searchStreamResponse; + logger.debug(`[${destType.toUpperCase()}] get conversion custom variable`, { + ...getLoggableData(metadata), + ...(responseHeaders ? { responseHeaders } : {}), + }); + if (!isHttpStatusSuccess(status)) { throw new NetworkError( - `[Google Ads Offline Conversions]:: ${searchStreamResponse?.response?.[0]?.error?.message} during google_ads_offline_conversions response transformation`, - searchStreamResponse.status, + `[Google Ads Offline Conversions]:: ${response?.[0]?.error?.message} during google_ads_offline_conversions response transformation`, + status, { - [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(searchStreamResponse.status), + [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(status), }, - searchStreamResponse?.response || searchStreamResponse, - getAuthErrCategoryFromStCode(searchStreamResponse.status), + response || searchStreamResponse, + getAuthErrCategoryFromStCode(status), ); } const conversionCustomVariable = get(searchStreamResponse, 'response.0.results'); @@ -195,37 +216,57 @@ const isValidCustomVariables = (customVariables) => { * @returns */ const ProxyRequest = async (request) => { - const { method, endpoint, headers, params, body } = request; + const { method, endpoint, headers, params, body, metadata } = request; + let reqMeta = metadata; + if (Array.isArray(metadata)) { + [reqMeta] = metadata; + } if (body.JSON?.isStoreConversion) { - const firstResponse = await createJob(endpoint, headers, body.JSON.createJobPayload); + const firstResponse = await createJob({ + endpoint, + headers, + payload: body.JSON.createJobPayload, + metadata: reqMeta, + }); const addPayload = body.JSON.addConversionPayload; // Mapping Conversion Action - const conversionId = await getConversionActionId(headers, params); + const conversionId = await getConversionActionId({ headers, params, metadata: reqMeta }); if (Array.isArray(addPayload.operations)) { addPayload.operations.forEach((operation) => { set(operation, 'create.transaction_attribute.conversion_action', conversionId); }); } - await addConversionToJob(endpoint, headers, firstResponse, addPayload); - const thirdResponse = await runTheJob( + await addConversionToJob({ endpoint, headers, - body.JSON.executeJobPayload, - firstResponse, - ); + jobId: firstResponse, + payload: addPayload, + metadata: reqMeta, + }); + const thirdResponse = await runTheJob({ + endpoint, + headers, + payload: body.JSON.executeJobPayload, + jobId: firstResponse, + metadata: reqMeta, + }); return thirdResponse; } // fetch conversionAction // httpPOST -> myAxios.post() if (params?.event) { - const conversionActionId = await getConversionActionId(headers, params); + const conversionActionId = await getConversionActionId({ headers, params, metadata: reqMeta }); set(body.JSON, 'conversions.0.conversionAction', conversionActionId); } // customVariables would be undefined in case of Store Conversions if (isValidCustomVariables(params.customVariables)) { // fetch all conversion custom variable in google ads - let conversionCustomVariable = await getConversionCustomVariable(headers, params); + let conversionCustomVariable = await getConversionCustomVariable({ + headers, + params, + metadata: reqMeta, + }); // convert it into hashMap conversionCustomVariable = getConversionCustomVariableHashMap(conversionCustomVariable); @@ -258,6 +299,11 @@ const ProxyRequest = async (request) => { requestMethod: 'POST', module: 'dataDelivery', }); + const { headers: responseHeaders } = response; + logger.debug(`[${destType.toUpperCase()}] deliver event to destination`, { + ...getLoggableData(metadata), + ...(responseHeaders ? { responseHeaders } : {}), + }); return response; }; diff --git a/src/v0/destinations/google_adwords_offline_conversions/utils.js b/src/v0/destinations/google_adwords_offline_conversions/utils.js index 70b42e2157..ee992137c6 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/utils.js +++ b/src/v0/destinations/google_adwords_offline_conversions/utils.js @@ -5,6 +5,7 @@ const { AbortedError, ConfigurationError, InstrumentationError, + structuredLogger: logger, } = require('@rudderstack/integrations-lib'); const { httpPOST } = require('../../../adapters/network'); const { @@ -19,6 +20,7 @@ const { getAuthErrCategoryFromStCode, getAccessToken, getIntegrationsObj, + getLoggableData, } = require('../../util'); const { SEARCH_STREAM, @@ -30,6 +32,7 @@ const { CLICK_CONVERSION, trackCallConversionsMapping, consentConfigMap, + destType, } = require('./config'); const { processAxiosResponse } = require('../../../adapters/utils/networkUtils'); const Cache = require('../../util/cache'); @@ -55,7 +58,7 @@ const validateDestinationConfig = ({ Config }) => { * @param {*} headers * @returns */ -const getConversionActionId = async (headers, params) => { +const getConversionActionId = async ({ headers, params, metadata }) => { const conversionActionIdKey = sha256(params.event + params.customerId).toString(); return conversionActionIdCache.get(conversionActionIdKey, async () => { const queryString = SqlString.format( @@ -77,13 +80,18 @@ const getConversionActionId = async (headers, params) => { module: 'dataDelivery', }); searchStreamResponse = processAxiosResponse(searchStreamResponse); - if (!isHttpStatusSuccess(searchStreamResponse.status)) { + const { response, status, headers: responseHeaders } = searchStreamResponse; + logger.debug(`[${destType.toUpperCase()}] get conversion custom variable`, { + ...getLoggableData(metadata), + ...(responseHeaders ? { responseHeaders } : {}), + }); + if (!isHttpStatusSuccess(status)) { throw new AbortedError( `[Google Ads Offline Conversions]:: ${JSON.stringify( - searchStreamResponse.response, + response, )} during google_ads_offline_conversions response transformation`, - searchStreamResponse.status, - searchStreamResponse.response, + status, + response, getAuthErrCategoryFromStCode(get(searchStreamResponse, 'status')), ); } diff --git a/src/v0/util/index.js b/src/v0/util/index.js index ac1bacf404..5b1224fd03 100644 --- a/src/v0/util/index.js +++ b/src/v0/util/index.js @@ -2234,6 +2234,18 @@ const validateEventAndLowerCaseConversion = (event, isMandatory, convertToLowerC return convertToLowerCase ? event.toString().toLowerCase() : event.toString(); }; +function getLoggableData(errorDetailer) { + return { + ...(errorDetailer?.destinationId && { destinationId: errorDetailer.destinationId }), + ...(errorDetailer?.sourceId && { sourceId: errorDetailer.sourceId }), + ...(errorDetailer?.workspaceId && { workspaceId: errorDetailer.workspaceId }), + ...(errorDetailer?.destType && { destType: errorDetailer.destType }), + ...(errorDetailer?.module && { module: errorDetailer.module }), + ...(errorDetailer?.implementation && { implementation: errorDetailer.implementation }), + ...(errorDetailer?.feature && { feature: errorDetailer.feature }), + }; +} + // ======================================================================== // EXPORTS // ======================================================================== @@ -2352,4 +2364,5 @@ module.exports = { removeDuplicateMetadata, combineBatchRequestsWithSameJobIds, validateEventAndLowerCaseConversion, + getLoggableData, }; diff --git a/test/integrations/destinations/am/dataDelivery/data.ts b/test/integrations/destinations/am/dataDelivery/data.ts index a4faa7e60c..19baca02c3 100644 --- a/test/integrations/destinations/am/dataDelivery/data.ts +++ b/test/integrations/destinations/am/dataDelivery/data.ts @@ -68,6 +68,15 @@ export const data = [ message: '[Generic Response Handler] Request for destination: am Processed Successfully', destinationResponse: { + headers: { + 'access-control-allow-methods': 'GET, POST', + 'access-control-allow-origin': '*', + connection: 'keep-alive', + 'content-length': '93', + 'content-type': 'application/json', + date: 'Sat, 11 Dec 2021 15:08:22 GMT', + 'strict-transport-security': 'max-age=15768000', + }, response: { code: 200, server_upload_time: 1639235302252, @@ -145,6 +154,15 @@ export const data = [ message: '[Generic Response Handler] Request failed for destination am with status: 400', destinationResponse: { + headers: { + 'access-control-allow-methods': 'GET, POST', + 'access-control-allow-origin': '*', + connection: 'keep-alive', + 'content-length': '93', + 'content-type': 'application/json', + date: 'Sat, 11 Dec 2021 15:08:22 GMT', + 'strict-transport-security': 'max-age=15768000', + }, response: { code: 400, server_upload_time: 1639235302252, From 37b4f47f289396e2b3c14483f748506aad248639 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Wed, 22 May 2024 05:57:54 +0530 Subject: [PATCH 02/37] chore: add structured logger in klaviyo Signed-off-by: Sai Sankeerth --- .../networkHandler.js | 16 ++++++-------- src/v0/destinations/klaviyo/config.js | 2 ++ src/v0/destinations/klaviyo/transform.js | 20 ++++++++++++------ src/v0/destinations/klaviyo/util.js | 21 ++++++++++++++++--- src/v0/util/index.js | 20 +++++++++++------- 5 files changed, 52 insertions(+), 27 deletions(-) diff --git a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js index 36badb5bf8..526b37969d 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js @@ -217,21 +217,17 @@ const isValidCustomVariables = (customVariables) => { */ const ProxyRequest = async (request) => { const { method, endpoint, headers, params, body, metadata } = request; - let reqMeta = metadata; - if (Array.isArray(metadata)) { - [reqMeta] = metadata; - } if (body.JSON?.isStoreConversion) { const firstResponse = await createJob({ endpoint, headers, payload: body.JSON.createJobPayload, - metadata: reqMeta, + metadata, }); const addPayload = body.JSON.addConversionPayload; // Mapping Conversion Action - const conversionId = await getConversionActionId({ headers, params, metadata: reqMeta }); + const conversionId = await getConversionActionId({ headers, params, metadata }); if (Array.isArray(addPayload.operations)) { addPayload.operations.forEach((operation) => { set(operation, 'create.transaction_attribute.conversion_action', conversionId); @@ -242,21 +238,21 @@ const ProxyRequest = async (request) => { headers, jobId: firstResponse, payload: addPayload, - metadata: reqMeta, + metadata, }); const thirdResponse = await runTheJob({ endpoint, headers, payload: body.JSON.executeJobPayload, jobId: firstResponse, - metadata: reqMeta, + metadata, }); return thirdResponse; } // fetch conversionAction // httpPOST -> myAxios.post() if (params?.event) { - const conversionActionId = await getConversionActionId({ headers, params, metadata: reqMeta }); + const conversionActionId = await getConversionActionId({ headers, params, metadata }); set(body.JSON, 'conversions.0.conversionAction', conversionActionId); } // customVariables would be undefined in case of Store Conversions @@ -265,7 +261,7 @@ const ProxyRequest = async (request) => { let conversionCustomVariable = await getConversionCustomVariable({ headers, params, - metadata: reqMeta, + metadata, }); // convert it into hashMap diff --git a/src/v0/destinations/klaviyo/config.js b/src/v0/destinations/klaviyo/config.js index 5c15804e14..d8583ab9cb 100644 --- a/src/v0/destinations/klaviyo/config.js +++ b/src/v0/destinations/klaviyo/config.js @@ -57,6 +57,7 @@ const LIST_CONF = { }; const MAPPING_CONFIG = getMappingConfig(CONFIG_CATEGORIES, __dirname); +const destType = 'klaviyo'; module.exports = { BASE_ENDPOINT, @@ -68,4 +69,5 @@ module.exports = { ecomEvents, eventNameMapping, jsonNameMapping, + destType, }; diff --git a/src/v0/destinations/klaviyo/transform.js b/src/v0/destinations/klaviyo/transform.js index a0fe3e81a7..09e75919f9 100644 --- a/src/v0/destinations/klaviyo/transform.js +++ b/src/v0/destinations/klaviyo/transform.js @@ -51,7 +51,10 @@ const { JSON_MIME_TYPE, HTTP_STATUS_CODES } = require('../../util/constant'); * @param {*} reqMetadata * @returns */ -const identifyRequestHandler = async (message, category, destination, reqMetadata) => { +const identifyRequestHandler = async ( + { message, category, destination, metadata }, + reqMetadata, +) => { // If listId property is present try to subscribe/member user in list const { privateApiKey, enforceEmailAsPrimary, listId, flattenProperties } = destination.Config; const mappedToDestination = get(message, MappedToDestinationKey); @@ -109,11 +112,12 @@ const identifyRequestHandler = async (message, category, destination, reqMetadat }, }; - const { profileId, response, statusCode } = await getIdFromNewOrExistingProfile( + const { profileId, response, statusCode } = await getIdFromNewOrExistingProfile({ endpoint, payload, requestOptions, - ); + metadata, + }); const responseMap = { profileUpdateResponse: profileUpdateResponseBuilder( @@ -271,7 +275,8 @@ const groupRequestHandler = (message, category, destination) => { }; // Main event processor using specific handler funcs -const processEvent = async (message, destination, reqMetadata) => { +const processEvent = async (event, reqMetadata) => { + const { message, destination, metadata } = event; if (!message.type) { throw new InstrumentationError('Event type is required'); } @@ -285,7 +290,10 @@ const processEvent = async (message, destination, reqMetadata) => { switch (messageType) { case EventType.IDENTIFY: category = CONFIG_CATEGORIES.IDENTIFY; - response = await identifyRequestHandler(message, category, destination, reqMetadata); + response = await identifyRequestHandler( + { message, category, destination, metadata }, + reqMetadata, + ); break; case EventType.SCREEN: case EventType.TRACK: @@ -303,7 +311,7 @@ const processEvent = async (message, destination, reqMetadata) => { }; const process = async (event, reqMetadata) => { - const result = await processEvent(event.message, event.destination, reqMetadata); + const result = await processEvent(event, reqMetadata); return result; }; diff --git a/src/v0/destinations/klaviyo/util.js b/src/v0/destinations/klaviyo/util.js index df2dbb4712..1f514a0c5b 100644 --- a/src/v0/destinations/klaviyo/util.js +++ b/src/v0/destinations/klaviyo/util.js @@ -1,6 +1,10 @@ const { defaultRequestConfig } = require('rudder-transformer-cdk/build/utils'); const lodash = require('lodash'); -const { NetworkError, InstrumentationError } = require('@rudderstack/integrations-lib'); +const { + NetworkError, + InstrumentationError, + structuredLogger: logger, +} = require('@rudderstack/integrations-lib'); const { WhiteListedTraits } = require('../../../constants'); const { @@ -12,12 +16,19 @@ const { defaultBatchRequestConfig, getSuccessRespEvents, defaultPatchRequestConfig, + getLoggableData, } = require('../../util'); const tags = require('../../util/tags'); const { handleHttpRequest } = require('../../../adapters/network'); const { JSON_MIME_TYPE, HTTP_STATUS_CODES } = require('../../util/constant'); const { getDynamicErrorType } = require('../../../adapters/utils/networkUtils'); -const { BASE_ENDPOINT, MAPPING_CONFIG, CONFIG_CATEGORIES, MAX_BATCH_SIZE } = require('./config'); +const { + BASE_ENDPOINT, + MAPPING_CONFIG, + CONFIG_CATEGORIES, + MAX_BATCH_SIZE, + destType, +} = require('./config'); const REVISION_CONSTANT = '2023-02-22'; @@ -32,7 +43,7 @@ const REVISION_CONSTANT = '2023-02-22'; * @param {*} requestOptions * @returns */ -const getIdFromNewOrExistingProfile = async (endpoint, payload, requestOptions) => { +const getIdFromNewOrExistingProfile = async ({ endpoint, payload, requestOptions, metadata }) => { let response; let profileId; const endpointPath = '/api/profiles'; @@ -49,6 +60,10 @@ const getIdFromNewOrExistingProfile = async (endpoint, payload, requestOptions) module: 'router', }, ); + logger.debug(`[${destType.toUpperCase()}] get id from profile`, { + ...getLoggableData(metadata), + ...(resp.headers ? { responseHeaders: resp.headers } : {}), + }); /** * 201 - profile is created with updated payload no need to update it again (suppress event with 299 status code) diff --git a/src/v0/util/index.js b/src/v0/util/index.js index 5b1224fd03..362652d489 100644 --- a/src/v0/util/index.js +++ b/src/v0/util/index.js @@ -2234,15 +2234,19 @@ const validateEventAndLowerCaseConversion = (event, isMandatory, convertToLowerC return convertToLowerCase ? event.toString().toLowerCase() : event.toString(); }; -function getLoggableData(errorDetailer) { +function getLoggableData(metadata) { + let reqMeta = metadata; + if (Array.isArray(metadata)) { + [reqMeta] = metadata; + } return { - ...(errorDetailer?.destinationId && { destinationId: errorDetailer.destinationId }), - ...(errorDetailer?.sourceId && { sourceId: errorDetailer.sourceId }), - ...(errorDetailer?.workspaceId && { workspaceId: errorDetailer.workspaceId }), - ...(errorDetailer?.destType && { destType: errorDetailer.destType }), - ...(errorDetailer?.module && { module: errorDetailer.module }), - ...(errorDetailer?.implementation && { implementation: errorDetailer.implementation }), - ...(errorDetailer?.feature && { feature: errorDetailer.feature }), + ...(reqMeta?.destinationId && { destinationId: reqMeta.destinationId }), + ...(reqMeta?.sourceId && { sourceId: reqMeta.sourceId }), + ...(reqMeta?.workspaceId && { workspaceId: reqMeta.workspaceId }), + ...(reqMeta?.destType && { destType: reqMeta.destType }), + ...(reqMeta?.module && { module: reqMeta.module }), + ...(reqMeta?.implementation && { implementation: reqMeta.implementation }), + ...(reqMeta?.feature && { feature: reqMeta.feature }), }; } From 445ce1f8e848dec00a1624df7a9e4a7025efffca Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Wed, 22 May 2024 06:36:06 +0530 Subject: [PATCH 03/37] chore: add structured logger to src transform & proxyV1 integrations Signed-off-by: Sai Sankeerth --- src/util/redis/redisConnector.js | 13 ++++++------- src/v0/sources/canny/transform.js | 12 ++++++------ src/v0/sources/shopify/transform.js | 14 +++++++++----- .../campaign_manager/networkHandler.js | 16 ++++++++++++++-- src/v1/destinations/monday/networkHandler.js | 11 +++++++++-- 5 files changed, 44 insertions(+), 22 deletions(-) diff --git a/src/util/redis/redisConnector.js b/src/util/redis/redisConnector.js index 84d578d3b3..404dc02e60 100644 --- a/src/util/redis/redisConnector.js +++ b/src/util/redis/redisConnector.js @@ -1,6 +1,5 @@ const Redis = require('ioredis'); -const { RedisError } = require('@rudderstack/integrations-lib'); -const log = require('../../logger'); +const { RedisError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); const stats = require('../stats'); const timeoutPromise = () => @@ -29,13 +28,13 @@ const RedisDB = { stats.increment('redis_error', { operation: 'redis_down', }); - log.error(`Redis is down at ${this.host}:${this.port}`); + logger.error(`Redis is down at ${this.host}:${this.port}`); return false; // stop retrying }, tls: {}, }); this.client.on('ready', () => { - log.info(`Connected to redis at ${this.host}:${this.port}`); + logger.info(`Connected to redis at ${this.host}:${this.port}`); }); } }, @@ -89,7 +88,7 @@ const RedisDB = { stats.increment('redis_error', { operation: 'get', }); - log.error(`Error getting value from Redis: ${e}`); + logger.error(`Error getting value from Redis: ${e}`); throw new RedisError(`Error getting value from Redis: ${e}`); } }, @@ -124,13 +123,13 @@ const RedisDB = { stats.increment('redis_error', { operation: 'set', }); - log.error(`Error setting value in Redis due ${e}`); + logger.error(`Error setting value in Redis due ${e}`); throw new RedisError(`Error setting value in Redis due ${e}`); } }, async disconnect() { if (process.env.USE_REDIS_DB && process.env.USE_REDIS_DB !== 'false') { - log.info(`Disconnecting from redis at ${this.host}:${this.port}`); + logger.info(`Disconnecting from redis at ${this.host}:${this.port}`); this.client.disconnect(); } }, diff --git a/src/v0/sources/canny/transform.js b/src/v0/sources/canny/transform.js index 38ed5e137e..d2ceadb699 100644 --- a/src/v0/sources/canny/transform.js +++ b/src/v0/sources/canny/transform.js @@ -1,5 +1,5 @@ const sha256 = require('sha256'); -const { TransformationError } = require('@rudderstack/integrations-lib'); +const { TransformationError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); const Message = require('../message'); const { voterMapping, authorMapping, checkForRequiredFields } = require('./util'); @@ -14,7 +14,7 @@ const CannyOperation = { * @param {*} event * @param {*} typeOfUser */ -function settingIds(message, event, typeOfUser, logger) { +function settingIds(message, event, typeOfUser) { const clonedMessage = { ...message }; try { // setting up userId @@ -47,7 +47,7 @@ function settingIds(message, event, typeOfUser, logger) { * @param {*} typeOfUser * @returns message */ -function createMessage(event, typeOfUser, logger) { +function createMessage(event, typeOfUser) { const message = new Message(`Canny`); message.setEventType('track'); @@ -60,7 +60,7 @@ function createMessage(event, typeOfUser, logger) { message.context.integration.version = '1.0.0'; - const finalMessage = settingIds(message, event, typeOfUser, logger); + const finalMessage = settingIds(message, event, typeOfUser); checkForRequiredFields(finalMessage); @@ -72,7 +72,7 @@ function createMessage(event, typeOfUser, logger) { return finalMessage; } -function process(event, logger) { +function process(event) { let typeOfUser; switch (event.type) { @@ -85,6 +85,6 @@ function process(event, logger) { typeOfUser = 'author'; } - return createMessage(event, typeOfUser, logger); + return createMessage(event, typeOfUser); } module.exports = { process }; diff --git a/src/v0/sources/shopify/transform.js b/src/v0/sources/shopify/transform.js index 4886fb3df1..7d427ebe06 100644 --- a/src/v0/sources/shopify/transform.js +++ b/src/v0/sources/shopify/transform.js @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ const lodash = require('lodash'); const get = require('get-value'); -const { RedisError } = require('@rudderstack/integrations-lib'); +const { RedisError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); const stats = require('../../../util/stats'); const { getShopifyTopic, @@ -205,7 +205,7 @@ const processEvent = async (inputEvent, metricMetadata) => { }; const isIdentifierEvent = (event) => ['rudderIdentifier', 'rudderSessionIdentifier'].includes(event?.event); -const processIdentifierEvent = async (event, metricMetadata, logger) => { +const processIdentifierEvent = async (event, metricMetadata) => { if (useRedisDatabase) { let value; let field; @@ -243,7 +243,11 @@ const processIdentifierEvent = async (event, metricMetadata, logger) => { }); await RedisDB.setVal(`${event.cartToken}`, value); } catch (e) { - logger.debug(`{{SHOPIFY::}} cartToken map set call Failed due redis error ${e}`); + logger.debug(`{{SHOPIFY::}} cartToken map set call Failed due redis error ${e}`, { + type: 'set', + source: metricMetadata.source, + writeKey: metricMetadata.writeKey, + }); stats.increment('shopify_redis_failures', { type: 'set', source: metricMetadata.source, @@ -255,13 +259,13 @@ const processIdentifierEvent = async (event, metricMetadata, logger) => { } return NO_OPERATION_SUCCESS; }; -const process = async (event, logger) => { +const process = async (event) => { const metricMetadata = { writeKey: event.query_parameters?.writeKey?.[0], source: 'SHOPIFY', }; if (isIdentifierEvent(event)) { - return processIdentifierEvent(event, metricMetadata, logger); + return processIdentifierEvent(event, metricMetadata); } const response = await processEvent(event, metricMetadata); return response; diff --git a/src/v1/destinations/campaign_manager/networkHandler.js b/src/v1/destinations/campaign_manager/networkHandler.js index 300b5f9676..53e7dc1b35 100644 --- a/src/v1/destinations/campaign_manager/networkHandler.js +++ b/src/v1/destinations/campaign_manager/networkHandler.js @@ -1,8 +1,13 @@ /* eslint-disable no-param-reassign */ /* eslint-disable no-restricted-syntax */ +const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); const { TransformerProxyError } = require('../../../v0/util/errorTypes'); const { prepareProxyRequest, proxyRequest } = require('../../../adapters/network'); -const { isHttpStatusSuccess, getAuthErrCategoryFromStCode } = require('../../../v0/util/index'); +const { + isHttpStatusSuccess, + getAuthErrCategoryFromStCode, + getLoggableData, +} = require('../../../v0/util/index'); const { processAxiosResponse, @@ -38,7 +43,14 @@ const responseHandler = (responseParams) => { const { destinationResponse, rudderJobMetadata } = responseParams; const message = `[CAMPAIGN_MANAGER Response V1 Handler] - Request Processed Successfully`; const responseWithIndividualEvents = []; - const { response, status } = destinationResponse; + const { response, status, headers } = destinationResponse; + + logger.debug('[campaign_manager] response handling', { + ...getLoggableData(rudderJobMetadata), + ...(headers ? { headers } : {}), + response, + status, + }); if (isHttpStatusSuccess(status)) { // check for Partial Event failures and Successes diff --git a/src/v1/destinations/monday/networkHandler.js b/src/v1/destinations/monday/networkHandler.js index 28a7f1abc0..d55b4ce907 100644 --- a/src/v1/destinations/monday/networkHandler.js +++ b/src/v1/destinations/monday/networkHandler.js @@ -1,10 +1,11 @@ +const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); const { TransformerProxyError } = require('../../../v0/util/errorTypes'); const { proxyRequest, prepareProxyRequest } = require('../../../adapters/network'); const { processAxiosResponse, getDynamicErrorType, } = require('../../../adapters/utils/networkUtils'); -const { isHttpStatusSuccess } = require('../../../v0/util/index'); +const { isHttpStatusSuccess, getLoggableData } = require('../../../v0/util/index'); const tags = require('../../../v0/util/tags'); const checkIfUpdationOfStatusRequired = (response) => { @@ -41,8 +42,14 @@ const responseHandler = (responseParams) => { const message = '[MONDAY Response V1 Handler] - Request Processed Successfully'; const responseWithIndividualEvents = []; - const { response, status } = destinationResponse; + const { response, status, headers } = destinationResponse; + logger.debug('[campaign_manager] response handling', { + ...getLoggableData(rudderJobMetadata), + ...(headers ? { headers } : {}), + response, + status, + }); // batching not supported if (isHttpStatusSuccess(status)) { const proxyOutput = { From 5bbb38eabc73732c3089fe3a7eabf88f1410ac13 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Mon, 27 May 2024 11:42:57 +0530 Subject: [PATCH 04/37] chore: add logging to gaec Signed-off-by: Sai Sankeerth --- .../config.js | 1 + .../networkHandler.js | 62 +++++++++++++------ 2 files changed, 45 insertions(+), 18 deletions(-) diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/config.js b/src/v0/destinations/google_adwords_enhanced_conversions/config.js index 8d194655f7..e8f486fb7a 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/config.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/config.js @@ -16,4 +16,5 @@ module.exports = { BASE_ENDPOINT, hashAttributes, CONVERSION_ACTION_ID_CACHE_TTL, + destType: 'google_adwords_enhanced_conversions', }; diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js index f7ac660f53..9720e7f09f 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js @@ -1,10 +1,18 @@ const { get, set } = require('lodash'); const sha256 = require('sha256'); -const { NetworkError, NetworkInstrumentationError } = require('@rudderstack/integrations-lib'); +const { + NetworkError, + NetworkInstrumentationError, + structuredLogger: logger, +} = require('@rudderstack/integrations-lib'); const SqlString = require('sqlstring'); const { prepareProxyRequest, handleHttpRequest } = require('../../../adapters/network'); -const { isHttpStatusSuccess, getAuthErrCategoryFromStCode } = require('../../util/index'); -const { CONVERSION_ACTION_ID_CACHE_TTL } = require('./config'); +const { + isHttpStatusSuccess, + getAuthErrCategoryFromStCode, + getLoggableData, +} = require('../../util/index'); +const { CONVERSION_ACTION_ID_CACHE_TTL, destType } = require('./config'); const Cache = require('../../util/cache'); const conversionActionIdCache = new Cache(CONVERSION_ACTION_ID_CACHE_TTL); @@ -27,7 +35,7 @@ const ERROR_MSG_PATH = 'response[0].error.message'; * @returns */ -const getConversionActionId = async (method, headers, params) => { +const getConversionActionId = async ({ method, headers, params, metadata }) => { const conversionActionIdKey = sha256(params.event + params.customerId).toString(); return conversionActionIdCache.get(conversionActionIdKey, async () => { const queryString = SqlString.format( @@ -54,19 +62,26 @@ const getConversionActionId = async (method, headers, params) => { module: 'dataDelivery', }, ); - if (!isHttpStatusSuccess(gaecConversionActionIdResponse.status)) { + const { status, response, headers: responseHeaders } = gaecConversionActionIdResponse; + logger.debug(`[${destType.toUpperCase()}] get conversion action id response`, { + ...getLoggableData(metadata), + ...(responseHeaders ? { responseHeaders } : {}), + ...(response ? { response } : {}), + status, + }); + if (!isHttpStatusSuccess(status)) { throw new NetworkError( `"${JSON.stringify( get(gaecConversionActionIdResponse, ERROR_MSG_PATH, '') ? get(gaecConversionActionIdResponse, ERROR_MSG_PATH, '') - : gaecConversionActionIdResponse.response, + : response, )} during Google_adwords_enhanced_conversions response transformation"`, - gaecConversionActionIdResponse.status, + status, { - [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(gaecConversionActionIdResponse.status), + [tags.TAG_NAMES.ERROR_TYPE]: getDynamicErrorType(status), }, - gaecConversionActionIdResponse.response, - getAuthErrCategoryFromStCode(gaecConversionActionIdResponse.status), + response, + getAuthErrCategoryFromStCode(status), ); } const conversionActionId = get( @@ -90,10 +105,10 @@ const getConversionActionId = async (method, headers, params) => { * @returns */ const ProxyRequest = async (request) => { - const { body, method, endpoint, params } = request; + const { body, method, endpoint, params, metadata } = request; const { headers } = request; - const conversionActionId = await getConversionActionId(method, headers, params); + const conversionActionId = await getConversionActionId({ method, headers, params, metadata }); set( body.JSON, @@ -101,12 +116,23 @@ const ProxyRequest = async (request) => { `customers/${params.customerId}/conversionActions/${conversionActionId}`, ); const requestBody = { url: endpoint, data: body.JSON, headers, method }; - const { httpResponse: response } = await handleHttpRequest('constructor', requestBody, { - destType: 'google_adwords_enhanced_conversions', - feature: 'proxy', - endpointPath: `/googleAds:uploadOfflineUserData`, - requestMethod: 'POST', - module: 'dataDelivery', + const { httpResponse: response, processedResponse } = await handleHttpRequest( + 'constructor', + requestBody, + { + destType: 'google_adwords_enhanced_conversions', + feature: 'proxy', + endpointPath: `/googleAds:uploadOfflineUserData`, + requestMethod: 'POST', + module: 'dataDelivery', + }, + ); + const { response: resp, status, headers: responseHeaders } = processedResponse; + logger.debug(`[${destType.toUpperCase()}] get conversion action id response`, { + ...getLoggableData(metadata), + ...(responseHeaders ? { responseHeaders } : {}), + ...(resp ? { response: resp } : {}), + status, }); return response; }; From 0b7bfecec5fdfd9d770f9652afec5c3fb8dda15a Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Mon, 27 May 2024 14:23:33 +0530 Subject: [PATCH 05/37] chore: add structured logger to transformer logger & use it for gaec response logging Signed-off-by: Sai Sankeerth --- src/logger.js | 37 ++++++++++++++++-- .../networkHandler.js | 39 +++++++++---------- 2 files changed, 51 insertions(+), 25 deletions(-) diff --git a/src/logger.js b/src/logger.js index 0685df3387..8594eab79a 100644 --- a/src/logger.js +++ b/src/logger.js @@ -1,38 +1,66 @@ /* istanbul ignore file */ +const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { getLoggableData } = require('./v0/util'); const levelDebug = 0; // Most verbose logging level const levelInfo = 1; // Logs about state of the application const levelWarn = 2; // Logs about warnings which dont immediately halt the application const levelError = 3; // Logs about errors which dont immediately halt the application // any value greater than levelError will work as levelNone +const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; let logLevel = process.env.LOG_LEVEL ? parseInt(process.env.LOG_LEVEL, 10) : levelInfo; const setLogLevel = (level) => { + const logger = getLogger(); logLevel = level || logLevel; + logger?.setLogLevel(`${loglevel}`); +}; + +const getLogger = () => { + return loggerImpl === 'winston' ? logger : console; }; const debug = (...args) => { + const logger = getLogger(); if (levelDebug >= logLevel) { - console.debug(...args); + logger.debug(...args); } }; const info = (...args) => { + const logger = getLogger(); if (levelInfo >= logLevel) { - console.info(...args); + logger.info(...args); } }; const warn = (...args) => { + const logger = getLogger(); if (levelWarn >= logLevel) { - console.warn(...args); + logger.warn(...args); } }; const error = (...args) => { + const logger = getLogger(); + if (levelError >= logLevel) { + logger.error(...args); + } +}; + +const responseLog = ( + identifierMsg, + { metadata, responseDetails: { response: responseBody, status, headers: responseHeaders } }, +) => { + const logger = getLogger(); if (levelError >= logLevel) { - console.error(...args); + logger.debug(identifierMsg, { + ...getLoggableData(metadata), + ...(responseBody ? { responseBody } : {}), + ...(responseHeaders ? { responseHeaders } : {}), + status, + }); } }; @@ -46,4 +74,5 @@ module.exports = { levelInfo, levelWarn, levelError, + responseLog, }; diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js index 9720e7f09f..5a1ba2d69f 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js @@ -1,19 +1,12 @@ const { get, set } = require('lodash'); const sha256 = require('sha256'); -const { - NetworkError, - NetworkInstrumentationError, - structuredLogger: logger, -} = require('@rudderstack/integrations-lib'); +const { NetworkError, NetworkInstrumentationError } = require('@rudderstack/integrations-lib'); const SqlString = require('sqlstring'); const { prepareProxyRequest, handleHttpRequest } = require('../../../adapters/network'); -const { - isHttpStatusSuccess, - getAuthErrCategoryFromStCode, - getLoggableData, -} = require('../../util/index'); +const { isHttpStatusSuccess, getAuthErrCategoryFromStCode } = require('../../util/index'); const { CONVERSION_ACTION_ID_CACHE_TTL, destType } = require('./config'); const Cache = require('../../util/cache'); +const logger = require('../../../logger'); const conversionActionIdCache = new Cache(CONVERSION_ACTION_ID_CACHE_TTL); @@ -63,11 +56,13 @@ const getConversionActionId = async ({ method, headers, params, metadata }) => { }, ); const { status, response, headers: responseHeaders } = gaecConversionActionIdResponse; - logger.debug(`[${destType.toUpperCase()}] get conversion action id response`, { - ...getLoggableData(metadata), - ...(responseHeaders ? { responseHeaders } : {}), - ...(response ? { response } : {}), - status, + logger.responseLog(`[${destType.toUpperCase()}] get conversion action id response`, { + metadata, + responseDetails: { + response, + status, + headers: responseHeaders, + }, }); if (!isHttpStatusSuccess(status)) { throw new NetworkError( @@ -127,12 +122,14 @@ const ProxyRequest = async (request) => { module: 'dataDelivery', }, ); - const { response: resp, status, headers: responseHeaders } = processedResponse; - logger.debug(`[${destType.toUpperCase()}] get conversion action id response`, { - ...getLoggableData(metadata), - ...(responseHeaders ? { responseHeaders } : {}), - ...(resp ? { response: resp } : {}), - status, + const { response: processedResp, status, headers: responseHeaders } = processedResponse; + logger.responseLog(`[${destType.toUpperCase()}] get conversion action id response`, { + metadata, + responseDetails: { + response: processedResp, + status, + headers: responseHeaders, + }, }); return response; }; From 65b614681dd75e3fb53ad1f06f97a4b0e8b36292 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Mon, 27 May 2024 15:46:27 +0530 Subject: [PATCH 06/37] chore: structured logger referred from transformer logger refactor Signed-off-by: Sai Sankeerth --- src/cdk/v2/handler.ts | 3 +- src/controllers/bulkUpload.ts | 2 +- src/controllers/delivery.ts | 14 ++-- src/controllers/destination.ts | 20 +----- src/controllers/regulation.ts | 2 +- src/controllers/source.ts | 10 +-- src/controllers/userTransform.ts | 2 +- src/index.ts | 2 +- src/interfaces/DestinationService.ts | 3 - src/interfaces/SourceService.ts | 2 - src/logger.js | 20 +++++- src/services/comparator.ts | 5 +- .../__tests__/nativeIntegration.test.ts | 3 - .../__tests__/postTransformation.test.ts | 2 +- src/services/destination/cdkV2Integration.ts | 12 +--- src/services/destination/nativeIntegration.ts | 25 +------ src/services/misc.ts | 8 +-- .../__tests__/nativeIntegration.test.ts | 17 +---- src/services/source/nativeIntegration.ts | 10 +-- src/util/errorNotifier/bugsnag.js | 2 +- src/util/redis/redisConnector.js | 3 +- src/util/redis/redisConnector.test.js | 3 +- src/util/utils.js | 3 +- .../campaign_manager/transform.js | 3 +- .../networkHandler.js | 2 +- .../networkHandler.js | 72 +++++++++++++------ .../utils.js | 13 ++-- src/v0/destinations/klaviyo/util.js | 16 ++--- src/v0/destinations/mailchimp/utils.js | 7 +- src/v0/sources/canny/transform.js | 3 +- src/v0/sources/shopify/transform.js | 3 +- src/v0/sources/shopify/util.js | 3 +- src/v0/util/index.js | 17 ----- .../campaign_manager/networkHandler.js | 20 +++--- src/v1/destinations/monday/networkHandler.js | 16 +++-- .../__tests__/pinterestConversion-cdk.test.ts | 2 +- 36 files changed, 155 insertions(+), 195 deletions(-) diff --git a/src/cdk/v2/handler.ts b/src/cdk/v2/handler.ts index c437247f74..fec6731ffc 100644 --- a/src/cdk/v2/handler.ts +++ b/src/cdk/v2/handler.ts @@ -17,6 +17,8 @@ import { isCdkV2Destination, } from './utils'; +import logger from '../../logger'; + const defTags = { [tags.TAG_NAMES.IMPLEMENTATION]: tags.IMPLEMENTATIONS.CDK_V2, }; @@ -82,7 +84,6 @@ export async function processCdkV2Workflow( destType: string, parsedEvent: FixMe, feature: string, - logger: FixMe, requestMetadata: NonNullable = {}, bindings: Record = {}, ) { diff --git a/src/controllers/bulkUpload.ts b/src/controllers/bulkUpload.ts index 28556dd5df..cb0bcfed3c 100644 --- a/src/controllers/bulkUpload.ts +++ b/src/controllers/bulkUpload.ts @@ -1,5 +1,4 @@ /* eslint-disable global-require, import/no-dynamic-require, @typescript-eslint/no-unused-vars */ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { client as errNotificationClient } from '../util/errorNotifier'; import { getDestFileUploadHandler, @@ -7,6 +6,7 @@ import { getPollStatusHandler, } from '../util/fetchDestinationHandlers'; import { CatchErr, ContextBodySimple } from '../util/types'; +import logger from '../logger'; // TODO: To be refactored and redisgned const ERROR_MESSAGE_PROCESSOR_STRING = 'Error occurred while processing payload.'; diff --git a/src/controllers/delivery.ts b/src/controllers/delivery.ts index 0dc27553cb..9e06b23f4d 100644 --- a/src/controllers/delivery.ts +++ b/src/controllers/delivery.ts @@ -1,9 +1,6 @@ /* eslint-disable prefer-destructuring */ /* eslint-disable sonarjs/no-duplicate-string */ -import { - isDefinedAndNotNullAndNotEmpty, - structuredLogger as logger, -} from '@rudderstack/integrations-lib'; +import { isDefinedAndNotNullAndNotEmpty } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { ServiceSelector } from '../helpers/serviceSelector'; import { DeliveryTestService } from '../services/delivertTest/deliveryTest'; @@ -19,12 +16,13 @@ import { import { FixMe } from '../util/types'; import tags from '../v0/util/tags'; import { ControllerUtility } from './util'; +import logger from '../logger'; const NON_DETERMINABLE = 'Non-determinable'; export class DeliveryController { public static async deliverToDestination(ctx: Context) { - logger.debug('Native(Delivery):: Request to transformer::', ctx.request.body); + logger.debug('Native(Delivery):: Request to transformer for delivery::', ctx.request.body); let deliveryResponse: DeliveryV0Response; const requestMetadata = MiscService.getRequestMetadata(ctx); const deliveryRequest = ctx.request.body as ProxyV0Request; @@ -54,12 +52,12 @@ export class DeliveryController { ctx.body = { output: deliveryResponse }; ControllerUtility.deliveryPostProcess(ctx, deliveryResponse.status); - logger.debug('Native(Delivery):: Response from transformer::', ctx.body); + logger.debug('Native(Delivery):: Response from transformer after delivery::', ctx.body); return ctx; } public static async deliverToDestinationV1(ctx: Context) { - logger.debug('Native(Delivery):: Request to transformer::', ctx.request.body); + logger.debug('Native(Delivery):: Request to transformer for delivery::', ctx.request.body); let deliveryResponse: DeliveryV1Response; const requestMetadata = MiscService.getRequestMetadata(ctx); const deliveryRequest = ctx.request.body as ProxyV1Request; @@ -116,7 +114,7 @@ export class DeliveryController { ); ctx.body = { output: response }; ControllerUtility.postProcess(ctx); - logger.debug('Native(Delivery-Test):: Response from transformer::', ctx.body); + logger.debug('Native(Delivery-Test):: Response from transformer after delivery::', ctx.body); return ctx; } } diff --git a/src/controllers/destination.ts b/src/controllers/destination.ts index b13e728761..998cab67bb 100644 --- a/src/controllers/destination.ts +++ b/src/controllers/destination.ts @@ -1,11 +1,9 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { ServiceSelector } from '../helpers/serviceSelector'; import { DestinationPostTransformationService } from '../services/destination/postTransformation'; import { DestinationPreTransformationService } from '../services/destination/preTransformation'; import { MiscService } from '../services/misc'; import { - ErrorDetailer, ProcessorTransformationRequest, ProcessorTransformationResponse, RouterTransformationRequest, @@ -14,9 +12,10 @@ import { import { DynamicConfigParser } from '../util/dynamicConfigParser'; import stats from '../util/stats'; import { getIntegrationVersion } from '../util/utils'; -import { checkInvalidRtTfEvents, getLoggableData } from '../v0/util'; +import { checkInvalidRtTfEvents } from '../v0/util'; import tags from '../v0/util/tags'; import { ControllerUtility } from './util'; +import logger from '../logger'; export class DestinationController { public static async destinationTransformAtProcessor(ctx: Context) { @@ -33,9 +32,6 @@ export class DestinationController { ...metaTags, }); const integrationService = ServiceSelector.getDestinationService(events); - const loggerWithCtx = logger.child({ - ...getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), - }); try { integrationService.init(); events = DestinationPreTransformationService.preProcess( @@ -51,7 +47,6 @@ export class DestinationController { destination, version, requestMetadata, - loggerWithCtx, ); } catch (error: any) { resplist = events.map((ev) => { @@ -71,7 +66,6 @@ export class DestinationController { } ctx.body = resplist; ControllerUtility.postProcess(ctx); - loggerWithCtx.debug('Native(Process-Transform):: Response from transformer::', ctx.body); stats.histogram('dest_transform_output_events', resplist.length, { destination, version, @@ -113,9 +107,6 @@ export class DestinationController { return ctx; } const metaTags = MiscService.getMetaTags(events[0].metadata); - const loggerWithCtx = logger.child({ - ...getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), - }); stats.histogram('dest_transform_input_events', events.length, { destination, version: 'v0', @@ -132,7 +123,6 @@ export class DestinationController { destination, getIntegrationVersion(), requestMetadata, - loggerWithCtx, ); } catch (error: any) { const metaTO = integrationService.getTags( @@ -155,7 +145,6 @@ export class DestinationController { version: 'v0', ...metaTags, }); - loggerWithCtx.debug('Native(Router-Transform):: Response from transformer::', ctx.body); stats.timing('dest_transform_request_latency', startTime, { destination, version: 'v0', @@ -172,9 +161,6 @@ export class DestinationController { const routerRequest = ctx.request.body as RouterTransformationRequest; const destination = routerRequest.destType; let events = routerRequest.input; - const loggerWithCtx = logger.child({ - ...getLoggableData(events[0]?.metadata as unknown as ErrorDetailer), - }); const integrationService = ServiceSelector.getDestinationService(events); try { events = DestinationPreTransformationService.preProcess(events, ctx); @@ -184,7 +170,6 @@ export class DestinationController { destination, getIntegrationVersion(), requestMetadata, - loggerWithCtx, ); ctx.body = resplist; } catch (error: any) { @@ -202,7 +187,6 @@ export class DestinationController { ctx.body = [errResp]; } ControllerUtility.postProcess(ctx); - loggerWithCtx.debug('Native(Process-Transform-Batch):: Response from transformer::', ctx.body); stats.timing('dest_transform_request_latency', startTime, { destination, feature: tags.FEATURES.BATCH, diff --git a/src/controllers/regulation.ts b/src/controllers/regulation.ts index 4b8f87e3fa..2d40c518f4 100644 --- a/src/controllers/regulation.ts +++ b/src/controllers/regulation.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { ServiceSelector } from '../helpers/serviceSelector'; import { DestinationPostTransformationService } from '../services/destination/postTransformation'; @@ -7,6 +6,7 @@ import stats from '../util/stats'; import tags from '../v0/util/tags'; // eslint-disable-next-line @typescript-eslint/no-unused-vars import { CatchErr } from '../util/types'; +import logger from '../logger'; export class RegulationController { public static async deleteUsers(ctx: Context) { diff --git a/src/controllers/source.ts b/src/controllers/source.ts index e1a4931371..bc4b77bd3d 100644 --- a/src/controllers/source.ts +++ b/src/controllers/source.ts @@ -1,9 +1,9 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { ServiceSelector } from '../helpers/serviceSelector'; import { MiscService } from '../services/misc'; import { SourcePostTransformationService } from '../services/source/postTransformation'; import { ControllerUtility } from './util'; +import logger from '../logger'; export class SourceController { public static async sourceTransform(ctx: Context) { @@ -12,7 +12,6 @@ export class SourceController { const events = ctx.request.body as object[]; const { version, source }: { version: string; source: string } = ctx.params; const integrationService = ServiceSelector.getNativeSourceService(); - const loggerWithCtx = logger.child({ version, source }); try { const { implementationVersion, input } = ControllerUtility.adaptInputToVersion( source, @@ -24,7 +23,6 @@ export class SourceController { source, implementationVersion, requestMetadata, - loggerWithCtx, ); ctx.body = resplist; } catch (err: any) { @@ -33,7 +31,11 @@ export class SourceController { ctx.body = [resp]; } ControllerUtility.postProcess(ctx); - loggerWithCtx.debug('Native(Source-Transform):: Response from transformer::', ctx.body); + logger.debug('Native(Source-Transform):: Response from transformer::', { + srcResponse: ctx.body, + version, + source, + }); return ctx; } } diff --git a/src/controllers/userTransform.ts b/src/controllers/userTransform.ts index 0e288c6f04..c81fb1dcb6 100644 --- a/src/controllers/userTransform.ts +++ b/src/controllers/userTransform.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { Context } from 'koa'; import { UserTransformService } from '../services/userTransform'; import { ProcessorTransformationRequest, UserTransformationServiceResponse } from '../types/index'; @@ -8,6 +7,7 @@ import { validateCode, } from '../util/customTransformer'; import { ControllerUtility } from './util'; +import logger from '../logger'; export class UserTransformController { public static async transform(ctx: Context) { diff --git a/src/index.ts b/src/index.ts index 5557994b2e..3bfd68cf21 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import dotenv from 'dotenv'; import gracefulShutdown from 'http-graceful-shutdown'; import Koa from 'koa'; @@ -9,6 +8,7 @@ import { metricsRouter } from './routes/metricsRouter'; import cluster from './util/cluster'; import { RedisDB } from './util/redis/redisConnector'; import { logProcessInfo } from './util/utils'; +import logger from './logger'; dotenv.config(); const clusterEnabled = process.env.CLUSTER_ENABLED !== 'false'; diff --git a/src/interfaces/DestinationService.ts b/src/interfaces/DestinationService.ts index 5d7596dac5..b45d9a427c 100644 --- a/src/interfaces/DestinationService.ts +++ b/src/interfaces/DestinationService.ts @@ -28,7 +28,6 @@ export interface DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: NonNullable, ): Promise; doRouterTransformation( @@ -36,7 +35,6 @@ export interface DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: NonNullable, ): Promise; doBatchTransformation( @@ -44,7 +42,6 @@ export interface DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: NonNullable, ): RouterTransformationResponse[]; deliver( diff --git a/src/interfaces/SourceService.ts b/src/interfaces/SourceService.ts index fab6490264..c7de8cfe8b 100644 --- a/src/interfaces/SourceService.ts +++ b/src/interfaces/SourceService.ts @@ -1,5 +1,4 @@ import { MetaTransferObject, SourceTransformationResponse } from '../types/index'; -import { FixMe } from '../util/types'; export interface SourceService { getTags(): MetaTransferObject; @@ -9,6 +8,5 @@ export interface SourceService { sourceType: string, version: string, requestMetadata: NonNullable, - logger: FixMe, ): Promise; } diff --git a/src/logger.js b/src/logger.js index 8594eab79a..83786f8642 100644 --- a/src/logger.js +++ b/src/logger.js @@ -1,6 +1,5 @@ /* istanbul ignore file */ const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); -const { getLoggableData } = require('./v0/util'); const levelDebug = 0; // Most verbose logging level const levelInfo = 1; // Logs about state of the application @@ -49,6 +48,22 @@ const error = (...args) => { } }; +const getLogMetadata = (metadata) => { + let reqMeta = metadata; + if (Array.isArray(metadata)) { + [reqMeta] = metadata; + } + return { + ...(reqMeta?.destinationId && { destinationId: reqMeta.destinationId }), + ...(reqMeta?.sourceId && { sourceId: reqMeta.sourceId }), + ...(reqMeta?.workspaceId && { workspaceId: reqMeta.workspaceId }), + ...(reqMeta?.destType && { destType: reqMeta.destType }), + ...(reqMeta?.module && { module: reqMeta.module }), + ...(reqMeta?.implementation && { implementation: reqMeta.implementation }), + ...(reqMeta?.feature && { feature: reqMeta.feature }), + }; +}; + const responseLog = ( identifierMsg, { metadata, responseDetails: { response: responseBody, status, headers: responseHeaders } }, @@ -56,7 +71,7 @@ const responseLog = ( const logger = getLogger(); if (levelError >= logLevel) { logger.debug(identifierMsg, { - ...getLoggableData(metadata), + ...getLogMetadata(metadata), ...(responseBody ? { responseBody } : {}), ...(responseHeaders ? { responseHeaders } : {}), status, @@ -75,4 +90,5 @@ module.exports = { levelWarn, levelError, responseLog, + getLogMetadata, }; diff --git a/src/services/comparator.ts b/src/services/comparator.ts index 511436dfd1..0e28339797 100644 --- a/src/services/comparator.ts +++ b/src/services/comparator.ts @@ -1,5 +1,4 @@ /* eslint-disable class-methods-use-this */ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { DestinationService } from '../interfaces/DestinationService'; import { DeliveryV0Response, @@ -18,6 +17,7 @@ import { import { CommonUtils } from '../util/common'; import stats from '../util/stats'; import tags from '../v0/util/tags'; +import logger from '../logger'; const NS_PER_SEC = 1e9; @@ -204,7 +204,6 @@ export class ComparatorService implements DestinationService { destinationType, version, requestMetadata, - logger, ); const primaryTimeDiff = process.hrtime(primaryStartTime); const primaryTime = primaryTimeDiff[0] * NS_PER_SEC + primaryTimeDiff[1]; @@ -263,7 +262,6 @@ export class ComparatorService implements DestinationService { destinationType, version, requestMetadata, - logger, ); const primaryTimeDiff = process.hrtime(primaryStartTime); const primaryTime = primaryTimeDiff[0] * NS_PER_SEC + primaryTimeDiff[1]; @@ -322,7 +320,6 @@ export class ComparatorService implements DestinationService { destinationType, version, requestMetadata, - {}, ); const primaryTimeDiff = process.hrtime(primaryStartTime); const primaryTime = primaryTimeDiff[0] * NS_PER_SEC + primaryTimeDiff[1]; diff --git a/src/services/destination/__tests__/nativeIntegration.test.ts b/src/services/destination/__tests__/nativeIntegration.test.ts index 85d099d292..3ec3222b9d 100644 --- a/src/services/destination/__tests__/nativeIntegration.test.ts +++ b/src/services/destination/__tests__/nativeIntegration.test.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { FetchHandler } from '../../../helpers/fetchHandlers'; import { ProcessorTransformationOutput, @@ -48,7 +47,6 @@ describe('NativeIntegration Service', () => { destType, version, requestMetadata, - logger, ); expect(resp).toEqual(tresponse); @@ -79,7 +77,6 @@ describe('NativeIntegration Service', () => { destType, version, requestMetadata, - logger, ); const expected = [ diff --git a/src/services/destination/__tests__/postTransformation.test.ts b/src/services/destination/__tests__/postTransformation.test.ts index f961dcbce7..050ae57b7b 100644 --- a/src/services/destination/__tests__/postTransformation.test.ts +++ b/src/services/destination/__tests__/postTransformation.test.ts @@ -1,4 +1,4 @@ -import { MetaTransferObject, ProcessorTransformationRequest } from '../../../types/index'; +import { MetaTransferObject } from '../../../types/index'; import { DestinationPostTransformationService } from '../postTransformation'; import { ProcessorTransformationResponse } from '../../../types'; diff --git a/src/services/destination/cdkV2Integration.ts b/src/services/destination/cdkV2Integration.ts index 78bff7495d..a91bc5674b 100644 --- a/src/services/destination/cdkV2Integration.ts +++ b/src/services/destination/cdkV2Integration.ts @@ -19,10 +19,8 @@ import { UserDeletionResponse, } from '../../types/index'; import stats from '../../util/stats'; -import { CatchErr, FixMe } from '../../util/types'; +import { CatchErr } from '../../util/types'; import tags from '../../v0/util/tags'; -import { MiscService } from '../misc'; -import { getLoggableData } from '../../v0/util'; import { DestinationPostTransformationService } from './postTransformation'; export class CDKV2DestinationService implements DestinationService { @@ -57,7 +55,6 @@ export class CDKV2DestinationService implements DestinationService { destinationType: string, _version: string, requestMetadata: NonNullable, - logger: any, ): Promise { // TODO: Change the promise type const respList: ProcessorTransformationResponse[][] = await Promise.all( @@ -69,7 +66,6 @@ export class CDKV2DestinationService implements DestinationService { tags.FEATURES.PROCESSOR, ); metaTo.metadata = event.metadata; - const loggerWithCtx = logger.child({ ...getLoggableData(metaTo.errorDetails) }); try { const transformedPayloads: | ProcessorTransformationOutput @@ -77,7 +73,6 @@ export class CDKV2DestinationService implements DestinationService { destinationType, event, tags.FEATURES.PROCESSOR, - loggerWithCtx, requestMetadata, ); stats.increment('event_transform_success', { @@ -116,7 +111,6 @@ export class CDKV2DestinationService implements DestinationService { destinationType: string, _version: string, requestMetadata: NonNullable, - logger: FixMe, ): Promise { const allDestEvents: object = groupBy( events, @@ -132,16 +126,12 @@ export class CDKV2DestinationService implements DestinationService { tags.FEATURES.ROUTER, ); metaTo.metadata = destInputArray[0].metadata; - const loggerWithCtx = logger.child({ - ...getLoggableData(metaTo.errorDetails), - }); try { const doRouterTransformationResponse: RouterTransformationResponse[] = await processCdkV2Workflow( destinationType, destInputArray, tags.FEATURES.ROUTER, - loggerWithCtx, requestMetadata, ); return DestinationPostTransformationService.handleRouterTransformSuccessEvents( diff --git a/src/services/destination/nativeIntegration.ts b/src/services/destination/nativeIntegration.ts index f34d717b03..38a27ea71d 100644 --- a/src/services/destination/nativeIntegration.ts +++ b/src/services/destination/nativeIntegration.ts @@ -25,8 +25,6 @@ import { } from '../../types/index'; import stats from '../../util/stats'; import tags from '../../v0/util/tags'; -import { getLoggableData } from '../../v0/util'; -import { MiscService } from '../misc'; import { DestinationPostTransformationService } from './postTransformation'; export class NativeIntegrationDestinationService implements DestinationService { @@ -61,7 +59,6 @@ export class NativeIntegrationDestinationService implements DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: any, ): Promise { const destHandler = FetchHandler.getDestHandler(destinationType, version); const respList: ProcessorTransformationResponse[][] = await Promise.all( @@ -73,15 +70,10 @@ export class NativeIntegrationDestinationService implements DestinationService { tags.FEATURES.PROCESSOR, ); metaTO.metadata = event.metadata; - const loggerWithCtx = logger.child({ ...getLoggableData(metaTO.errorDetails) }); try { const transformedPayloads: | ProcessorTransformationOutput - | ProcessorTransformationOutput[] = await destHandler.process( - event, - requestMetadata, - loggerWithCtx, - ); + | ProcessorTransformationOutput[] = await destHandler.process(event, requestMetadata); return DestinationPostTransformationService.handleProcessorTransformSucessEvents( event, transformedPayloads, @@ -105,7 +97,6 @@ export class NativeIntegrationDestinationService implements DestinationService { destinationType: string, version: string, requestMetadata: NonNullable, - logger: any, ): Promise { const destHandler = FetchHandler.getDestHandler(destinationType, version); const allDestEvents: NonNullable = groupBy( @@ -121,16 +112,9 @@ export class NativeIntegrationDestinationService implements DestinationService { destInputArray[0].metadata?.workspaceId, tags.FEATURES.ROUTER, ); - const loggerWithCtx = logger.child({ - ...getLoggableData(metaTO.errorDetails), - }); try { const doRouterTransformationResponse: RouterTransformationResponse[] = - await destHandler.processRouterDest( - cloneDeep(destInputArray), - requestMetadata, - loggerWithCtx, - ); + await destHandler.processRouterDest(cloneDeep(destInputArray), requestMetadata); metaTO.metadata = destInputArray[0].metadata; return DestinationPostTransformationService.handleRouterTransformSuccessEvents( doRouterTransformationResponse, @@ -157,7 +141,6 @@ export class NativeIntegrationDestinationService implements DestinationService { destinationType: string, version: any, requestMetadata: NonNullable, - logger: any, ): RouterTransformationResponse[] { const destHandler = FetchHandler.getDestHandler(destinationType, version); if (!destHandler.batch) { @@ -176,14 +159,10 @@ export class NativeIntegrationDestinationService implements DestinationService { tags.FEATURES.BATCH, ); metaTO.metadatas = events.map((event) => event.metadata); - const loggerWithCtx = logger.child({ - ...getLoggableData(metaTO.errorDetails), - }); try { const destBatchedRequests: RouterTransformationResponse[] = destHandler.batch( destEvents, requestMetadata, - loggerWithCtx, ); return destBatchedRequests; } catch (error: any) { diff --git a/src/services/misc.ts b/src/services/misc.ts index 3727f6df7b..4378fe231e 100644 --- a/src/services/misc.ts +++ b/src/services/misc.ts @@ -1,12 +1,12 @@ /* eslint-disable global-require, import/no-dynamic-require */ -import { LoggableExtraData, structuredLogger as logger } from '@rudderstack/integrations-lib'; +import { LoggableExtraData } from '@rudderstack/integrations-lib'; import fs from 'fs'; import { Context } from 'koa'; import path from 'path'; import { DestHandlerMap } from '../constants/destinationCanonicalNames'; import { getCPUProfile, getHeapProfile } from '../middleware'; import { ErrorDetailer, Metadata } from '../types'; -import { getLoggableData } from '../v0/util'; +import logger from '../logger'; export class MiscService { public static getDestHandler(dest: string, version: string) { @@ -78,7 +78,7 @@ export class MiscService { } public static logError(message: string, errorDetailer: ErrorDetailer) { - const loggableExtraData: Partial = getLoggableData(errorDetailer); - logger.errorw(message || '', loggableExtraData); + const loggableExtraData: Partial = logger.getLogMetadata(errorDetailer); + logger.error(message || '', loggableExtraData); } } diff --git a/src/services/source/__tests__/nativeIntegration.test.ts b/src/services/source/__tests__/nativeIntegration.test.ts index 77e355fd1a..a2a5af041e 100644 --- a/src/services/source/__tests__/nativeIntegration.test.ts +++ b/src/services/source/__tests__/nativeIntegration.test.ts @@ -1,4 +1,3 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import { FetchHandler } from '../../../helpers/fetchHandlers'; import { RudderMessage, SourceTransformationResponse } from '../../../types/index'; import stats from '../../../util/stats'; @@ -44,13 +43,7 @@ describe('NativeIntegration Source Service', () => { }); const service = new NativeIntegrationSourceService(); - const resp = await service.sourceTransformRoutine( - events, - sourceType, - version, - requestMetadata, - logger, - ); + const resp = await service.sourceTransformRoutine(events, sourceType, version, requestMetadata); expect(resp).toEqual(tresponse); @@ -87,13 +80,7 @@ describe('NativeIntegration Source Service', () => { jest.spyOn(stats, 'increment').mockImplementation(() => {}); const service = new NativeIntegrationSourceService(); - const resp = await service.sourceTransformRoutine( - events, - sourceType, - version, - requestMetadata, - logger, - ); + const resp = await service.sourceTransformRoutine(events, sourceType, version, requestMetadata); expect(resp).toEqual(tresponse); diff --git a/src/services/source/nativeIntegration.ts b/src/services/source/nativeIntegration.ts index b68b5c44ad..a4f26d068a 100644 --- a/src/services/source/nativeIntegration.ts +++ b/src/services/source/nativeIntegration.ts @@ -9,8 +9,8 @@ import { import stats from '../../util/stats'; import { FixMe } from '../../util/types'; import tags from '../../v0/util/tags'; -import { getLoggableData } from '../../v0/util'; import { SourcePostTransformationService } from './postTransformation'; +import logger from '../../logger'; export class NativeIntegrationSourceService implements SourceService { public getTags(): MetaTransferObject { @@ -32,23 +32,23 @@ export class NativeIntegrationSourceService implements SourceService { version: string, // eslint-disable-next-line @typescript-eslint/no-unused-vars _requestMetadata: NonNullable, - logger: FixMe, ): Promise { const sourceHandler = FetchHandler.getSourceHandler(sourceType, version); const metaTO = this.getTags(); - const loggerWithCtx = logger.child({ ...getLoggableData(metaTO.errorDetails) }); const respList: SourceTransformationResponse[] = await Promise.all( sourceEvents.map(async (sourceEvent) => { try { const respEvents: RudderMessage | RudderMessage[] | SourceTransformationResponse = - await sourceHandler.process(sourceEvent, loggerWithCtx); + await sourceHandler.process(sourceEvent); return SourcePostTransformationService.handleSuccessEventsSource(respEvents); } catch (error: FixMe) { stats.increment('source_transform_errors', { source: sourceType, version, }); - logger.debug('Error during source Transform', error); + logger.debug(`Error during source Transform: ${error}`, { + ...logger.getLogMetadata(metaTO.errorDetails), + }); return SourcePostTransformationService.handleFailureEventsSource(error, metaTO); } }), diff --git a/src/util/errorNotifier/bugsnag.js b/src/util/errorNotifier/bugsnag.js index ef01c58730..a6a22655ad 100644 --- a/src/util/errorNotifier/bugsnag.js +++ b/src/util/errorNotifier/bugsnag.js @@ -22,7 +22,7 @@ const { NetworkInstrumentationError, } = require('@rudderstack/integrations-lib'); const { FilteredEventsError } = require('../../v0/util/errorTypes'); -const { logger } = require('../../logger'); +const logger = require('../../logger'); const pkg = require('../../../package.json'); const { diff --git a/src/util/redis/redisConnector.js b/src/util/redis/redisConnector.js index 404dc02e60..7dc20a305d 100644 --- a/src/util/redis/redisConnector.js +++ b/src/util/redis/redisConnector.js @@ -1,6 +1,7 @@ const Redis = require('ioredis'); -const { RedisError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { RedisError } = require('@rudderstack/integrations-lib'); const stats = require('../stats'); +const logger = require('../../logger'); const timeoutPromise = () => new Promise((_, reject) => { diff --git a/src/util/redis/redisConnector.test.js b/src/util/redis/redisConnector.test.js index 7cf2ccbbcf..9a520e41a4 100644 --- a/src/util/redis/redisConnector.test.js +++ b/src/util/redis/redisConnector.test.js @@ -2,8 +2,9 @@ const fs = require('fs'); const path = require('path'); const version = 'v0'; const { RedisDB } = require('./redisConnector'); -const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const logger = require('../../logger'); jest.mock('ioredis', () => require('../../../test/__mocks__/redis')); + const sourcesList = ['shopify']; process.env.USE_REDIS_DB = 'true'; diff --git a/src/util/utils.js b/src/util/utils.js index d74603dd7a..1ac70b9541 100644 --- a/src/util/utils.js +++ b/src/util/utils.js @@ -170,7 +170,8 @@ function processInfo() { } function logProcessInfo() { - logger.error(`Process info: `, util.inspect(processInfo(), false, null, true)); + const inspectedInfo = util.inspect(processInfo(), false, Infinity, true); + logger.error(`Process info: ${inspectedInfo}`); } // stringLiterals expected to be an array of strings. A line in trace should contain diff --git a/src/v0/destinations/campaign_manager/transform.js b/src/v0/destinations/campaign_manager/transform.js index 403a79a971..53df0d3983 100644 --- a/src/v0/destinations/campaign_manager/transform.js +++ b/src/v0/destinations/campaign_manager/transform.js @@ -24,6 +24,7 @@ const { const { convertToMicroseconds } = require('./util'); const { JSON_MIME_TYPE } = require('../../util/constant'); +const logger = require('../../../logger'); function isEmptyObject(obj) { return Object.keys(obj).length === 0 && obj.constructor === Object; @@ -243,7 +244,7 @@ const batchEvents = (eventChunksArray) => { return batchedResponseList; }; -const processRouterDest = async (inputs, reqMetadata, logger) => { +const processRouterDest = async (inputs, reqMetadata) => { logger.debug(`Transformation router request received with size ${inputs.length}`); const batchErrorRespList = []; const eventChunksArray = []; diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js index 5a1ba2d69f..d8993734d3 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js @@ -123,7 +123,7 @@ const ProxyRequest = async (request) => { }, ); const { response: processedResp, status, headers: responseHeaders } = processedResponse; - logger.responseLog(`[${destType.toUpperCase()}] get conversion action id response`, { + logger.responseLog(`[${destType.toUpperCase()}] conversion enhancement response`, { metadata, responseDetails: { response: processedResp, diff --git a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js index 526b37969d..503e22353d 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js @@ -5,15 +5,13 @@ const { AbortedError, NetworkInstrumentationError, NetworkError, - structuredLogger: logger, } = require('@rudderstack/integrations-lib'); -const { prepareProxyRequest, httpSend, httpPOST } = require('../../../adapters/network'); +const { prepareProxyRequest, httpPOST, handleHttpRequest } = require('../../../adapters/network'); const { isHttpStatusSuccess, getHashFromArray, isDefinedAndNotNullAndNotEmpty, getAuthErrCategoryFromStCode, - getLoggableData, } = require('../../util'); const { getConversionActionId } = require('./utils'); const Cache = require('../../util/cache'); @@ -23,6 +21,7 @@ const { getDynamicErrorType, } = require('../../../adapters/utils/networkUtils'); const tags = require('../../util/tags'); +const logger = require('../../../logger'); const conversionCustomVariableCache = new Cache(CONVERSION_CUSTOM_VARIABLE_CACHE_TTL); @@ -42,9 +41,13 @@ const createJob = async ({ endpoint, headers, payload, metadata }) => { ); createJobResponse = processAxiosResponse(createJobResponse); const { response, status, headers: responseHeaders } = createJobResponse; - logger.debug(`[${destType.toUpperCase()}] create job`, { - ...getLoggableData(metadata), - ...(responseHeaders ? { responseHeaders } : {}), + logger.responseLog(`[${destType.toUpperCase()}] create job`, { + metadata, + responseDetails: { + headers: responseHeaders, + status, + response, + }, }); if (!isHttpStatusSuccess(status)) { throw new AbortedError( @@ -73,9 +76,13 @@ const addConversionToJob = async ({ endpoint, headers, jobId, payload, metadata ); addConversionToJobResponse = processAxiosResponse(addConversionToJobResponse); const { response, status, headers: responseHeaders } = addConversionToJobResponse; - logger.debug(`[${destType.toUpperCase()}] add conversion to job`, { - ...getLoggableData(metadata), - ...(responseHeaders ? { responseHeaders } : {}), + logger.responseLog(`[${destType.toUpperCase()}] add conversion to job`, { + metadata, + responseDetails: { + response, + status, + headers: responseHeaders, + }, }); if (!isHttpStatusSuccess(status)) { throw new AbortedError( @@ -90,7 +97,14 @@ const addConversionToJob = async ({ endpoint, headers, jobId, payload, metadata const runTheJob = async ({ endpoint, headers, payload, jobId, metadata }) => { const endPoint = `${endpoint}/${jobId}:run`; - const executeJobResponse = await httpPOST( + // logger.responseLog(`[${destType.toUpperCase()}] run job request`, { + // ...getLoggableData(metadata), + // requestBody: payload, + // method: 'POST', + // url: endPoint, + // }); + const { httpResponse: executeJobResponse, processedResponse } = await handleHttpRequest( + 'post', endPoint, payload, { headers }, @@ -102,10 +116,14 @@ const runTheJob = async ({ endpoint, headers, payload, jobId, metadata }) => { module: 'dataDelivery', }, ); - const { headers: responseHeaders } = executeJobResponse; - logger.debug(`[${destType.toUpperCase()}] run job`, { - ...getLoggableData(metadata), - ...(responseHeaders ? { responseHeaders } : {}), + const { headers: responseHeaders, response, status } = processedResponse; + logger.responseLog(`[${destType.toUpperCase()}] run job`, { + metadata, + responseDetails: { + response, + status, + responseHeaders, + }, }); return executeJobResponse; }; @@ -137,9 +155,13 @@ const getConversionCustomVariable = async ({ headers, params, metadata }) => { }); searchStreamResponse = processAxiosResponse(searchStreamResponse); const { response, status, headers: responseHeaders } = searchStreamResponse; - logger.debug(`[${destType.toUpperCase()}] get conversion custom variable`, { - ...getLoggableData(metadata), - ...(responseHeaders ? { responseHeaders } : {}), + logger.responseLog(`[${destType.toUpperCase()}] get conversion custom variable`, { + metadata, + responseDetails: { + response, + status, + headers: responseHeaders, + }, }); if (!isHttpStatusSuccess(status)) { throw new NetworkError( @@ -288,19 +310,23 @@ const ProxyRequest = async (request) => { } const requestBody = { url: endpoint, data: body.JSON, headers, method }; - const response = await httpSend(requestBody, { + const { httpResponse, processedResponse } = await handleHttpRequest('constructor', requestBody, { feature: 'proxy', destType: 'gogole_adwords_offline_conversions', endpointPath: `/proxy`, requestMethod: 'POST', module: 'dataDelivery', }); - const { headers: responseHeaders } = response; - logger.debug(`[${destType.toUpperCase()}] deliver event to destination`, { - ...getLoggableData(metadata), - ...(responseHeaders ? { responseHeaders } : {}), + const { headers: responseHeaders, status, response } = processedResponse; + logger.responseLog(`[${destType.toUpperCase()}] deliver event to destination`, { + metadata, + responseDetails: { + response, + headers: responseHeaders, + status, + }, }); - return response; + return httpResponse; }; const responseHandler = (responseParams) => { diff --git a/src/v0/destinations/google_adwords_offline_conversions/utils.js b/src/v0/destinations/google_adwords_offline_conversions/utils.js index ee992137c6..7fe565ef22 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/utils.js +++ b/src/v0/destinations/google_adwords_offline_conversions/utils.js @@ -5,7 +5,6 @@ const { AbortedError, ConfigurationError, InstrumentationError, - structuredLogger: logger, } = require('@rudderstack/integrations-lib'); const { httpPOST } = require('../../../adapters/network'); const { @@ -20,7 +19,6 @@ const { getAuthErrCategoryFromStCode, getAccessToken, getIntegrationsObj, - getLoggableData, } = require('../../util'); const { SEARCH_STREAM, @@ -38,6 +36,7 @@ const { processAxiosResponse } = require('../../../adapters/utils/networkUtils') const Cache = require('../../util/cache'); const helper = require('./helper'); const { finaliseConsent } = require('../../util/googleUtils'); +const logger = require('../../../logger'); const conversionActionIdCache = new Cache(CONVERSION_ACTION_ID_CACHE_TTL); @@ -81,9 +80,13 @@ const getConversionActionId = async ({ headers, params, metadata }) => { }); searchStreamResponse = processAxiosResponse(searchStreamResponse); const { response, status, headers: responseHeaders } = searchStreamResponse; - logger.debug(`[${destType.toUpperCase()}] get conversion custom variable`, { - ...getLoggableData(metadata), - ...(responseHeaders ? { responseHeaders } : {}), + logger.responseLog(`[${destType.toUpperCase()}] get conversion custom variable`, { + metadata, + responseDetails: { + response, + status, + headers: responseHeaders, + }, }); if (!isHttpStatusSuccess(status)) { throw new AbortedError( diff --git a/src/v0/destinations/klaviyo/util.js b/src/v0/destinations/klaviyo/util.js index 1f514a0c5b..4bcd86f56a 100644 --- a/src/v0/destinations/klaviyo/util.js +++ b/src/v0/destinations/klaviyo/util.js @@ -1,11 +1,8 @@ const { defaultRequestConfig } = require('rudder-transformer-cdk/build/utils'); const lodash = require('lodash'); -const { - NetworkError, - InstrumentationError, - structuredLogger: logger, -} = require('@rudderstack/integrations-lib'); +const { NetworkError, InstrumentationError } = require('@rudderstack/integrations-lib'); const { WhiteListedTraits } = require('../../../constants'); +const logger = require('../../../logger'); const { constructPayload, @@ -16,7 +13,6 @@ const { defaultBatchRequestConfig, getSuccessRespEvents, defaultPatchRequestConfig, - getLoggableData, } = require('../../util'); const tags = require('../../util/tags'); const { handleHttpRequest } = require('../../../adapters/network'); @@ -60,9 +56,11 @@ const getIdFromNewOrExistingProfile = async ({ endpoint, payload, requestOptions module: 'router', }, ); - logger.debug(`[${destType.toUpperCase()}] get id from profile`, { - ...getLoggableData(metadata), - ...(resp.headers ? { responseHeaders: resp.headers } : {}), + logger.responseLog(`[${destType.toUpperCase()}] get id from profile`, { + metadata, + responseDetails: { + ...resp, + }, }); /** diff --git a/src/v0/destinations/mailchimp/utils.js b/src/v0/destinations/mailchimp/utils.js index a726f23a39..f678742f2d 100644 --- a/src/v0/destinations/mailchimp/utils.js +++ b/src/v0/destinations/mailchimp/utils.js @@ -1,10 +1,6 @@ const get = require('get-value'); const md5 = require('md5'); -const { - InstrumentationError, - NetworkError, - structuredLogger: logger, -} = require('@rudderstack/integrations-lib'); +const { InstrumentationError, NetworkError } = require('@rudderstack/integrations-lib'); const myAxios = require('../../../util/myAxios'); const { MappedToDestinationKey } = require('../../../constants'); const { @@ -19,6 +15,7 @@ const { defaultBatchRequestConfig, constructPayload, } = require('../../util'); +const logger = require('../../../logger'); const { MERGE_CONFIG, MERGE_ADDRESS, SUBSCRIPTION_STATUS, VALID_STATUSES } = require('./config'); const { getDynamicErrorType } = require('../../../adapters/utils/networkUtils'); const tags = require('../../util/tags'); diff --git a/src/v0/sources/canny/transform.js b/src/v0/sources/canny/transform.js index d2ceadb699..aad5a881c1 100644 --- a/src/v0/sources/canny/transform.js +++ b/src/v0/sources/canny/transform.js @@ -1,7 +1,8 @@ const sha256 = require('sha256'); -const { TransformationError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { TransformationError } = require('@rudderstack/integrations-lib'); const Message = require('../message'); const { voterMapping, authorMapping, checkForRequiredFields } = require('./util'); +const logger = require('../../../logger'); const CannyOperation = { VOTE_CREATED: 'vote.created', diff --git a/src/v0/sources/shopify/transform.js b/src/v0/sources/shopify/transform.js index 7d427ebe06..bc2135d215 100644 --- a/src/v0/sources/shopify/transform.js +++ b/src/v0/sources/shopify/transform.js @@ -1,7 +1,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ const lodash = require('lodash'); const get = require('get-value'); -const { RedisError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { RedisError } = require('@rudderstack/integrations-lib'); const stats = require('../../../util/stats'); const { getShopifyTopic, @@ -13,6 +13,7 @@ const { getHashLineItems, getDataFromRedis, } = require('./util'); +const logger = require('../../../logger'); const { RedisDB } = require('../../../util/redis/redisConnector'); const { removeUndefinedAndNullValues, isDefinedAndNotNull } = require('../../util'); const Message = require('../message'); diff --git a/src/v0/sources/shopify/util.js b/src/v0/sources/shopify/util.js index 3dc54cc434..6aea0d19bd 100644 --- a/src/v0/sources/shopify/util.js +++ b/src/v0/sources/shopify/util.js @@ -2,7 +2,7 @@ /* eslint-disable @typescript-eslint/naming-convention */ const { v5 } = require('uuid'); const sha256 = require('sha256'); -const { TransformationError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { TransformationError } = require('@rudderstack/integrations-lib'); const stats = require('../../../util/stats'); const { constructPayload, @@ -22,6 +22,7 @@ const { useRedisDatabase, maxTimeToIdentifyRSGeneratedCall, } = require('./config'); +const logger = require('../../../logger'); const getDataFromRedis = async (key, metricMetadata) => { try { diff --git a/src/v0/util/index.js b/src/v0/util/index.js index 362652d489..ac1bacf404 100644 --- a/src/v0/util/index.js +++ b/src/v0/util/index.js @@ -2234,22 +2234,6 @@ const validateEventAndLowerCaseConversion = (event, isMandatory, convertToLowerC return convertToLowerCase ? event.toString().toLowerCase() : event.toString(); }; -function getLoggableData(metadata) { - let reqMeta = metadata; - if (Array.isArray(metadata)) { - [reqMeta] = metadata; - } - return { - ...(reqMeta?.destinationId && { destinationId: reqMeta.destinationId }), - ...(reqMeta?.sourceId && { sourceId: reqMeta.sourceId }), - ...(reqMeta?.workspaceId && { workspaceId: reqMeta.workspaceId }), - ...(reqMeta?.destType && { destType: reqMeta.destType }), - ...(reqMeta?.module && { module: reqMeta.module }), - ...(reqMeta?.implementation && { implementation: reqMeta.implementation }), - ...(reqMeta?.feature && { feature: reqMeta.feature }), - }; -} - // ======================================================================== // EXPORTS // ======================================================================== @@ -2368,5 +2352,4 @@ module.exports = { removeDuplicateMetadata, combineBatchRequestsWithSameJobIds, validateEventAndLowerCaseConversion, - getLoggableData, }; diff --git a/src/v1/destinations/campaign_manager/networkHandler.js b/src/v1/destinations/campaign_manager/networkHandler.js index 53e7dc1b35..eee3869fb5 100644 --- a/src/v1/destinations/campaign_manager/networkHandler.js +++ b/src/v1/destinations/campaign_manager/networkHandler.js @@ -1,19 +1,15 @@ /* eslint-disable no-param-reassign */ /* eslint-disable no-restricted-syntax */ -const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); const { TransformerProxyError } = require('../../../v0/util/errorTypes'); const { prepareProxyRequest, proxyRequest } = require('../../../adapters/network'); -const { - isHttpStatusSuccess, - getAuthErrCategoryFromStCode, - getLoggableData, -} = require('../../../v0/util/index'); +const { isHttpStatusSuccess, getAuthErrCategoryFromStCode } = require('../../../v0/util/index'); const { processAxiosResponse, getDynamicErrorType, } = require('../../../adapters/utils/networkUtils'); const tags = require('../../../v0/util/tags'); +const logger = require('../../../logger'); function isEventAbortableAndExtractErrMsg(element, proxyOutputObj) { let isAbortable = false; @@ -45,11 +41,13 @@ const responseHandler = (responseParams) => { const responseWithIndividualEvents = []; const { response, status, headers } = destinationResponse; - logger.debug('[campaign_manager] response handling', { - ...getLoggableData(rudderJobMetadata), - ...(headers ? { headers } : {}), - response, - status, + logger.responseLog('[campaign_manager] response handling', { + metadata: rudderJobMetadata, + responseDetails: { + headers, + response, + status, + }, }); if (isHttpStatusSuccess(status)) { diff --git a/src/v1/destinations/monday/networkHandler.js b/src/v1/destinations/monday/networkHandler.js index d55b4ce907..c92ef3c794 100644 --- a/src/v1/destinations/monday/networkHandler.js +++ b/src/v1/destinations/monday/networkHandler.js @@ -1,12 +1,12 @@ -const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); const { TransformerProxyError } = require('../../../v0/util/errorTypes'); const { proxyRequest, prepareProxyRequest } = require('../../../adapters/network'); const { processAxiosResponse, getDynamicErrorType, } = require('../../../adapters/utils/networkUtils'); -const { isHttpStatusSuccess, getLoggableData } = require('../../../v0/util/index'); +const { isHttpStatusSuccess } = require('../../../v0/util/index'); const tags = require('../../../v0/util/tags'); +const logger = require('../../../logger'); const checkIfUpdationOfStatusRequired = (response) => { let errorMsg = ''; @@ -44,11 +44,13 @@ const responseHandler = (responseParams) => { const responseWithIndividualEvents = []; const { response, status, headers } = destinationResponse; - logger.debug('[campaign_manager] response handling', { - ...getLoggableData(rudderJobMetadata), - ...(headers ? { headers } : {}), - response, - status, + logger.responseLog('[campaign_manager] response handling', { + metadata: rudderJobMetadata, + responseDetails: { + headers, + response, + status, + }, }); // batching not supported if (isHttpStatusSuccess(status)) { diff --git a/test/__tests__/pinterestConversion-cdk.test.ts b/test/__tests__/pinterestConversion-cdk.test.ts index 6aaa710ed7..2afde331d5 100644 --- a/test/__tests__/pinterestConversion-cdk.test.ts +++ b/test/__tests__/pinterestConversion-cdk.test.ts @@ -1,8 +1,8 @@ -import { structuredLogger as logger } from '@rudderstack/integrations-lib'; import fs from 'fs'; import path from 'path'; import { executeWorkflow, getWorkflowEngine, processCdkV2Workflow } from '../../src/cdk/v2/handler'; import tags from '../../src/v0/util/tags'; +import logger from '../../src/logger'; const integration = 'pinterest_tag'; const name = 'Pinterest Conversion API'; From e533b889bbba1e0df76ce297a1dd27ce98d4a511 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Mon, 27 May 2024 15:56:46 +0530 Subject: [PATCH 07/37] chore: response logging in garl Signed-off-by: Sai Sankeerth --- .../config.js | 1 + .../networkHandler.js | 80 ++++++++++++------- 2 files changed, 54 insertions(+), 27 deletions(-) diff --git a/src/v0/destinations/google_adwords_remarketing_lists/config.js b/src/v0/destinations/google_adwords_remarketing_lists/config.js index 0f08b3866d..f8983699c6 100644 --- a/src/v0/destinations/google_adwords_remarketing_lists/config.js +++ b/src/v0/destinations/google_adwords_remarketing_lists/config.js @@ -29,4 +29,5 @@ module.exports = { offlineDataJobsMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.AUDIENCE_LIST.name], addressInfoMapping: MAPPING_CONFIG[CONFIG_CATEGORIES.ADDRESSINFO.name], consentConfigMap, + destType: 'google_adwords_remarketing_lists', }; diff --git a/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js b/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js index 3045c1713f..940a2b8cd1 100644 --- a/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js +++ b/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js @@ -1,12 +1,14 @@ const { NetworkError } = require('@rudderstack/integrations-lib'); -const { httpSend, prepareProxyRequest } = require('../../../adapters/network'); +const { prepareProxyRequest, handleHttpRequest } = require('../../../adapters/network'); const { isHttpStatusSuccess, getAuthErrCategoryFromStCode } = require('../../util/index'); +const logger = require('../../../logger'); const { processAxiosResponse, getDynamicErrorType, } = require('../../../adapters/utils/networkUtils'); const tags = require('../../util/tags'); +const { destType } = require('./config'); /** * This function helps to create a offlineUserDataJobs * @param endpoint @@ -18,7 +20,7 @@ const tags = require('../../util/tags'); * ref: https://developers.google.com/google-ads/api/rest/reference/rest/v15/CustomerMatchUserListMetadata */ -const createJob = async (endpoint, headers, method, params) => { +const createJob = async ({ endpoint, headers, method, params, metadata }) => { const jobCreatingUrl = `${endpoint}:create`; const customerMatchUserListMetadata = { userList: `customers/${params.customerId}/userLists/${params.listId}`, @@ -37,14 +39,22 @@ const createJob = async (endpoint, headers, method, params) => { headers, method, }; - const response = await httpSend(jobCreatingRequest, { - destType: 'google_adwords_remarketing_lists', - feature: 'proxy', - endpointPath: '/customers/create', - requestMethod: 'POST', - module: 'dataDelivery', + const { httpResponse, processedResponse } = await handleHttpRequest( + 'constructor', + jobCreatingRequest, + { + destType: 'google_adwords_remarketing_lists', + feature: 'proxy', + endpointPath: '/customers/create', + requestMethod: 'POST', + module: 'dataDelivery', + }, + ); + logger.responseLog(`[${destType.toUpperCase()}] job creation response`, { + metadata, + responseDetails: processedResponse, }); - return response; + return httpResponse; }; /** * This function helps to put user details in a offlineUserDataJobs @@ -55,7 +65,7 @@ const createJob = async (endpoint, headers, method, params) => { * @param body */ -const addUserToJob = async (endpoint, headers, method, jobId, body) => { +const addUserToJob = async ({ endpoint, headers, method, jobId, body, metadata }) => { const jobAddingUrl = `${endpoint}/${jobId}:addOperations`; const secondRequest = { url: jobAddingUrl, @@ -63,12 +73,20 @@ const addUserToJob = async (endpoint, headers, method, jobId, body) => { headers, method, }; - const response = await httpSend(secondRequest, { - destType: 'google_adwords_remarketing_lists', - feature: 'proxy', - endpointPath: '/addOperations', - requestMethod: 'POST', - module: 'dataDelivery', + const { httpResponse: response, processedResponse } = await handleHttpRequest( + 'constructor', + secondRequest, + { + destType: 'google_adwords_remarketing_lists', + feature: 'proxy', + endpointPath: '/addOperations', + requestMethod: 'POST', + module: 'dataDelivery', + }, + ); + logger.responseLog(`[${destType.toUpperCase()}] add user to job response`, { + metadata, + responseDetails: processedResponse, }); return response; }; @@ -80,19 +98,27 @@ const addUserToJob = async (endpoint, headers, method, jobId, body) => { * @param method * @param jobId */ -const runTheJob = async (endpoint, headers, method, jobId) => { +const runTheJob = async ({ endpoint, headers, method, jobId, metadata }) => { const jobRunningUrl = `${endpoint}/${jobId}:run`; const thirdRequest = { url: jobRunningUrl, headers, method, }; - const response = await httpSend(thirdRequest, { - destType: 'google_adwords_remarketing_lists', - feature: 'proxy', - endpointPath: '/run', - requestMethod: 'POST', - module: 'dataDelivery', + const { httpResponse: response, processedResponse } = await handleHttpRequest( + 'constructor', + thirdRequest, + { + destType: 'google_adwords_remarketing_lists', + feature: 'proxy', + endpointPath: '/run', + requestMethod: 'POST', + module: 'dataDelivery', + }, + ); + logger.responseLog(`[${destType.toUpperCase()}] run job response`, { + metadata, + responseDetails: processedResponse, }); return response; }; @@ -104,12 +130,12 @@ const runTheJob = async (endpoint, headers, method, jobId) => { * @returns */ const gaAudienceProxyRequest = async (request) => { - const { body, method, params, endpoint } = request; + const { body, method, params, endpoint, metadata } = request; const { headers } = request; // step1: offlineUserDataJobs creation - const firstResponse = await createJob(endpoint, headers, method, params); + const firstResponse = await createJob({ endpoint, headers, method, params, metadata }); if (!firstResponse.success && !isHttpStatusSuccess(firstResponse?.response?.status)) { return firstResponse; } @@ -126,7 +152,7 @@ const gaAudienceProxyRequest = async (request) => { if (firstResponse?.response?.data?.resourceName) // eslint-disable-next-line prefer-destructuring jobId = firstResponse.response.data.resourceName.split('/')[3]; - const secondResponse = await addUserToJob(endpoint, headers, method, jobId, body); + const secondResponse = await addUserToJob({ endpoint, headers, method, jobId, body, metadata }); if (!secondResponse.success && !isHttpStatusSuccess(secondResponse?.response?.status)) { return secondResponse; } @@ -139,7 +165,7 @@ const gaAudienceProxyRequest = async (request) => { } // step3: running the job - const thirdResponse = await runTheJob(endpoint, headers, method, jobId); + const thirdResponse = await runTheJob({ endpoint, headers, method, jobId, metadata }); return thirdResponse; }; From 2939647e0006f98701ed08fdb9851d2ded0b1575 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Mon, 27 May 2024 16:20:52 +0530 Subject: [PATCH 08/37] chore: add support for request logging in logger & gaec handler Signed-off-by: Sai Sankeerth --- src/logger.js | 13 +++++++++++++ .../networkHandler.js | 11 ++++++++++- 2 files changed, 23 insertions(+), 1 deletion(-) diff --git a/src/logger.js b/src/logger.js index 83786f8642..09c9cfff14 100644 --- a/src/logger.js +++ b/src/logger.js @@ -64,6 +64,18 @@ const getLogMetadata = (metadata) => { }; }; +const requestLog = (identifierMsg, { metadata, requestDetails: { url, body, method } }) => { + const logger = getLogger(); + if (levelError >= logLevel) { + logger.debug(identifierMsg, { + ...getLogMetadata(metadata), + url, + body, + method, + }); + } +}; + const responseLog = ( identifierMsg, { metadata, responseDetails: { response: responseBody, status, headers: responseHeaders } }, @@ -91,4 +103,5 @@ module.exports = { levelError, responseLog, getLogMetadata, + requestLog, }; diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js index d8993734d3..d795bc171c 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js @@ -38,8 +38,13 @@ const getConversionActionId = async ({ method, headers, params, metadata }) => { const data = { query: queryString, }; + const searchStreamEndpoint = `${BASE_ENDPOINT}/${params.customerId}/googleAds:searchStream`; + logger.requestLog(`[${destType.toUpperCase()}] conversion enhancement request`, { + metadata, + requestDetails: { url: searchStreamEndpoint, body: data, method }, + }); const requestBody = { - url: `${BASE_ENDPOINT}/${params.customerId}/googleAds:searchStream`, + url: searchStreamEndpoint, data, headers, method, @@ -110,6 +115,10 @@ const ProxyRequest = async (request) => { 'conversionAdjustments[0].conversionAction', `customers/${params.customerId}/conversionActions/${conversionActionId}`, ); + logger.requestLog(`[${destType.toUpperCase()}] conversion enhancement request`, { + metadata, + requestDetails: { url: endpoint, body: body.JSON, method }, + }); const requestBody = { url: endpoint, data: body.JSON, headers, method }; const { httpResponse: response, processedResponse } = await handleHttpRequest( 'constructor', From 734f900a00e0c534dd2f2ab63cfa43c8478ea477 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Mon, 27 May 2024 16:44:27 +0530 Subject: [PATCH 09/37] chore: merge context & metadata for default notifier into a single object while logging error Signed-off-by: Sai Sankeerth --- src/util/errorNotifier/default.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/util/errorNotifier/default.js b/src/util/errorNotifier/default.js index 28557a22f2..18f04f055d 100644 --- a/src/util/errorNotifier/default.js +++ b/src/util/errorNotifier/default.js @@ -3,7 +3,7 @@ const logger = require('../../logger'); function init() {} function notify(err, context, metadata) { - logger.error(err, context, metadata); + logger.error(err, { context, metadata }); } module.exports = { From 8a6bf3a402adda0bcb681a59fcc666d76b221ad6 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Mon, 27 May 2024 17:12:09 +0530 Subject: [PATCH 10/37] chore: update response log message & responseDetails Signed-off-by: Sai Sankeerth --- src/v0/destinations/klaviyo/util.js | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/src/v0/destinations/klaviyo/util.js b/src/v0/destinations/klaviyo/util.js index 4bcd86f56a..97b690e093 100644 --- a/src/v0/destinations/klaviyo/util.js +++ b/src/v0/destinations/klaviyo/util.js @@ -56,11 +56,9 @@ const getIdFromNewOrExistingProfile = async ({ endpoint, payload, requestOptions module: 'router', }, ); - logger.responseLog(`[${destType.toUpperCase()}] get id from profile`, { + logger.responseLog(`[${destType.toUpperCase()}] get id from profile response`, { metadata, - responseDetails: { - ...resp, - }, + responseDetails: resp, }); /** From 4fc6c07b4c4eb62672c4d610efc459644804b581 Mon Sep 17 00:00:00 2001 From: ItsSudip Date: Tue, 4 Jun 2024 11:54:04 +0530 Subject: [PATCH 11/37] chore: add log to trck the request for offline conversion creation for GAOC --- .../google_adwords_offline_conversions/networkHandler.js | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js index 503e22353d..b5ea2669a1 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js @@ -308,8 +308,11 @@ const ProxyRequest = async (request) => { set(body.JSON, 'conversions.0.customVariables', resultantCustomVariables); } } - const requestBody = { url: endpoint, data: body.JSON, headers, method }; + logger.requestLog(`[${destType.toUpperCase()}] offline conversion creation request`, { + metadata, + requestDetails: { url: requestBody.url, body: requestBody.data, method }, + }); const { httpResponse, processedResponse } = await handleHttpRequest('constructor', requestBody, { feature: 'proxy', destType: 'gogole_adwords_offline_conversions', From 8ad1a5fa4c3248662735d09e2658b050897c5db1 Mon Sep 17 00:00:00 2001 From: ItsSudip Date: Tue, 4 Jun 2024 13:21:01 +0530 Subject: [PATCH 12/37] chore: update loglevel to warn for request and response logs --- src/logger.js | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/src/logger.js b/src/logger.js index 09c9cfff14..97c717d11d 100644 --- a/src/logger.js +++ b/src/logger.js @@ -66,8 +66,8 @@ const getLogMetadata = (metadata) => { const requestLog = (identifierMsg, { metadata, requestDetails: { url, body, method } }) => { const logger = getLogger(); - if (levelError >= logLevel) { - logger.debug(identifierMsg, { + if (logLevel === levelWarn) { + logger.warn(identifierMsg, { ...getLogMetadata(metadata), url, body, @@ -81,8 +81,8 @@ const responseLog = ( { metadata, responseDetails: { response: responseBody, status, headers: responseHeaders } }, ) => { const logger = getLogger(); - if (levelError >= logLevel) { - logger.debug(identifierMsg, { + if (logLevel === levelWarn) { + logger.warn(identifierMsg, { ...getLogMetadata(metadata), ...(responseBody ? { responseBody } : {}), ...(responseHeaders ? { responseHeaders } : {}), From 2d19625ab71c3f37acda33b73f7f91fe5cdc55e3 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Mon, 10 Jun 2024 19:13:06 +0530 Subject: [PATCH 13/37] chore: refactor with different convention of log levels Signed-off-by: Sai Sankeerth --- benchmark/metaLogger.js | 8 +-- src/logger.js | 123 ++++++++++++++++++++++++---------------- 2 files changed, 78 insertions(+), 53 deletions(-) diff --git a/benchmark/metaLogger.js b/benchmark/metaLogger.js index 2af1f599c9..757e7039e0 100644 --- a/benchmark/metaLogger.js +++ b/benchmark/metaLogger.js @@ -5,25 +5,25 @@ const logger = require('../src/logger'); logger.setLogLevel(Number.POSITIVE_INFINITY); const debug = (...args) => { - logger.setLogLevel(logger.levelDebug); + logger.setLogLevel('debug'); logger.debug(...args); logger.setLogLevel(Number.POSITIVE_INFINITY); }; const info = (...args) => { - logger.setLogLevel(logger.levelInfo); + logger.setLogLevel('info'); logger.info(...args); logger.setLogLevel(Number.POSITIVE_INFINITY); }; const warn = (...args) => { - logger.setLogLevel(logger.levelWarn); + logger.setLogLevel('warn'); logger.warn(...args); logger.setLogLevel(Number.POSITIVE_INFINITY); }; const error = (...args) => { - logger.setLogLevel(logger.levelError); + logger.setLogLevel('error'); logger.error(...args); logger.setLogLevel(Number.POSITIVE_INFINITY); }; diff --git a/src/logger.js b/src/logger.js index 97c717d11d..640a5ae05b 100644 --- a/src/logger.js +++ b/src/logger.js @@ -1,14 +1,17 @@ /* istanbul ignore file */ -const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { structuredLogger: logger /* LOGLEVELS */ } = require('@rudderstack/integrations-lib'); + +const LOGLEVELS = { + debug: 0, // Most verbose logging level + info: 1, // Logs about state of the application + warn: 2, // Logs about warnings which dont immediately halt the application + error: 3, // Logs about errors which dont immediately halt the application +}; -const levelDebug = 0; // Most verbose logging level -const levelInfo = 1; // Logs about state of the application -const levelWarn = 2; // Logs about warnings which dont immediately halt the application -const levelError = 3; // Logs about errors which dont immediately halt the application // any value greater than levelError will work as levelNone const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; -let logLevel = process.env.LOG_LEVEL ? parseInt(process.env.LOG_LEVEL, 10) : levelInfo; +let logLevel = process.env.LOG_LEVEL ?? 'error'; const setLogLevel = (level) => { const logger = getLogger(); @@ -16,78 +19,100 @@ const setLogLevel = (level) => { logger?.setLogLevel(`${loglevel}`); }; +const getLogMetadata = (metadata) => { + let reqMeta = metadata; + if (Array.isArray(metadata)) { + [reqMeta] = metadata; + } + return { + ...(reqMeta?.destinationId && { destinationId: reqMeta.destinationId }), + ...(reqMeta?.sourceId && { sourceId: reqMeta.sourceId }), + ...(reqMeta?.workspaceId && { workspaceId: reqMeta.workspaceId }), + ...(reqMeta?.destType && { destType: reqMeta.destType }), + ...(reqMeta?.module && { module: reqMeta.module }), + ...(reqMeta?.implementation && { implementation: reqMeta.implementation }), + ...(reqMeta?.feature && { feature: reqMeta.feature }), + }; +}; + +const log = (logMethod, args) => { + const [message, logInfo, ...otherArgs] = args; + if (logInfo) { + const { metadata, ...otherLogInfoArgs } = logInfo; + if (Array.isArray(metadata)) { + metadata.forEach((m) => { + logMethod( + message, + { + ...getLogMetadata(m), + ...otherLogInfoArgs, + }, + ...otherArgs, + ); + }); + return; + } + logMethod( + message, + { + ...getLogMetadata(metadata), + ...otherLogInfoArgs, + }, + ...otherArgs, + ); + return; + } + logMethod(message); +}; + const getLogger = () => { return loggerImpl === 'winston' ? logger : console; }; const debug = (...args) => { const logger = getLogger(); - if (levelDebug >= logLevel) { - logger.debug(...args); + if (logLevel >= LOGLEVELS.debug) { + log(logger.debug, args); } }; const info = (...args) => { const logger = getLogger(); - if (levelInfo >= logLevel) { - logger.info(...args); + if (logLevel >= LOGLEVELS.info) { + log(logger.info, args); } }; const warn = (...args) => { const logger = getLogger(); - if (levelWarn >= logLevel) { - logger.warn(...args); + if (logLevel >= LOGLEVELS.warn) { + log(logger.warn, args); } }; const error = (...args) => { const logger = getLogger(); - if (levelError >= logLevel) { - logger.error(...args); - } -}; - -const getLogMetadata = (metadata) => { - let reqMeta = metadata; - if (Array.isArray(metadata)) { - [reqMeta] = metadata; + if (logLevel >= LOGLEVELS.error) { + log(logger.error, args); } - return { - ...(reqMeta?.destinationId && { destinationId: reqMeta.destinationId }), - ...(reqMeta?.sourceId && { sourceId: reqMeta.sourceId }), - ...(reqMeta?.workspaceId && { workspaceId: reqMeta.workspaceId }), - ...(reqMeta?.destType && { destType: reqMeta.destType }), - ...(reqMeta?.module && { module: reqMeta.module }), - ...(reqMeta?.implementation && { implementation: reqMeta.implementation }), - ...(reqMeta?.feature && { feature: reqMeta.feature }), - }; }; const requestLog = (identifierMsg, { metadata, requestDetails: { url, body, method } }) => { const logger = getLogger(); - if (logLevel === levelWarn) { - logger.warn(identifierMsg, { - ...getLogMetadata(metadata), - url, - body, - method, - }); + if (logLevel === LOGLEVELS.warn) { + const reqLogArgs = [identifierMsg, { metadata, url, body, method }]; + log(logger.warn, reqLogArgs); } }; const responseLog = ( identifierMsg, - { metadata, responseDetails: { response: responseBody, status, headers: responseHeaders } }, + { metadata, responseDetails: { response: body, status, headers } }, ) => { const logger = getLogger(); - if (logLevel === levelWarn) { - logger.warn(identifierMsg, { - ...getLogMetadata(metadata), - ...(responseBody ? { responseBody } : {}), - ...(responseHeaders ? { responseHeaders } : {}), - status, - }); + if (logLevel === LOGLEVELS.warn) { + const resLogArgs = [identifierMsg, { metadata, body, status, headers }]; + log(logger.warn, resLogArgs); } }; @@ -97,10 +122,10 @@ module.exports = { warn, error, setLogLevel, - levelDebug, - levelInfo, - levelWarn, - levelError, + // levelDebug, + // levelInfo, + // levelWarn, + // levelError, responseLog, getLogMetadata, requestLog, From 827157b595b1da2a84ecd3d21896d1982ead4026 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Tue, 11 Jun 2024 17:30:14 +0530 Subject: [PATCH 14/37] chore: set appropriate level for metalogger in benchmarking Signed-off-by: Sai Sankeerth --- benchmark/metaLogger.js | 10 +++++----- 1 file changed, 5 insertions(+), 5 deletions(-) diff --git a/benchmark/metaLogger.js b/benchmark/metaLogger.js index 757e7039e0..b89ad71066 100644 --- a/benchmark/metaLogger.js +++ b/benchmark/metaLogger.js @@ -2,30 +2,30 @@ const logger = require('../src/logger'); -logger.setLogLevel(Number.POSITIVE_INFINITY); +logger.setLogLevel('random'); const debug = (...args) => { logger.setLogLevel('debug'); logger.debug(...args); - logger.setLogLevel(Number.POSITIVE_INFINITY); + logger.setLogLevel('random'); }; const info = (...args) => { logger.setLogLevel('info'); logger.info(...args); - logger.setLogLevel(Number.POSITIVE_INFINITY); + logger.setLogLevel('random'); }; const warn = (...args) => { logger.setLogLevel('warn'); logger.warn(...args); - logger.setLogLevel(Number.POSITIVE_INFINITY); + logger.setLogLevel('random'); }; const error = (...args) => { logger.setLogLevel('error'); logger.error(...args); - logger.setLogLevel(Number.POSITIVE_INFINITY); + logger.setLogLevel('random'); }; module.exports = { From 12a4ce26aab6dc08a3bb832ad4e8bebf9bd66616 Mon Sep 17 00:00:00 2001 From: ItsSudip Date: Wed, 12 Jun 2024 12:37:44 +0530 Subject: [PATCH 15/37] chore: load env in logger and fix logLevel condition logic --- src/logger.js | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/src/logger.js b/src/logger.js index 640a5ae05b..acda22893d 100644 --- a/src/logger.js +++ b/src/logger.js @@ -1,6 +1,10 @@ +const dotenv = require('dotenv'); + /* istanbul ignore file */ const { structuredLogger: logger /* LOGLEVELS */ } = require('@rudderstack/integrations-lib'); +dotenv.config({ path: '../.env' }); + const LOGLEVELS = { debug: 0, // Most verbose logging level info: 1, // Logs about state of the application @@ -71,35 +75,35 @@ const getLogger = () => { const debug = (...args) => { const logger = getLogger(); - if (logLevel >= LOGLEVELS.debug) { + if (LOGLEVELS.debug >= logLevel) { log(logger.debug, args); } }; const info = (...args) => { const logger = getLogger(); - if (logLevel >= LOGLEVELS.info) { + if (LOGLEVELS.info >= LOGLEVELS[logLevel]) { log(logger.info, args); } }; const warn = (...args) => { const logger = getLogger(); - if (logLevel >= LOGLEVELS.warn) { + if (LOGLEVELS.warn >= LOGLEVELS[logLevel]) { log(logger.warn, args); } }; const error = (...args) => { const logger = getLogger(); - if (logLevel >= LOGLEVELS.error) { + if (LOGLEVELS.error >= LOGLEVELS[logLevel]) { log(logger.error, args); } }; const requestLog = (identifierMsg, { metadata, requestDetails: { url, body, method } }) => { const logger = getLogger(); - if (logLevel === LOGLEVELS.warn) { + if (LOGLEVELS[logLevel] === LOGLEVELS.warn) { const reqLogArgs = [identifierMsg, { metadata, url, body, method }]; log(logger.warn, reqLogArgs); } @@ -110,7 +114,7 @@ const responseLog = ( { metadata, responseDetails: { response: body, status, headers } }, ) => { const logger = getLogger(); - if (logLevel === LOGLEVELS.warn) { + if (LOGLEVELS[logLevel] === LOGLEVELS.warn) { const resLogArgs = [identifierMsg, { metadata, body, status, headers }]; log(logger.warn, resLogArgs); } From efdb9c52a896197d4a83bd21cc60205c230883bd Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Wed, 12 Jun 2024 17:50:05 +0530 Subject: [PATCH 16/37] chore: add request logs for google adwords destinations chore: add request logs for proxy request Signed-off-by: Sai Sankeerth --- src/adapters/network.js | 53 ++++++++++++------- src/logger.js | 6 +++ .../campaign_manager/transform.js | 2 - .../networkHandler.js | 2 + .../networkHandler.js | 43 ++++++++++++--- .../utils.js | 9 ++++ .../networkHandler.js | 26 +++++++++ 7 files changed, 114 insertions(+), 27 deletions(-) diff --git a/src/adapters/network.js b/src/adapters/network.js index 0720638d12..882676dd8f 100644 --- a/src/adapters/network.js +++ b/src/adapters/network.js @@ -52,12 +52,14 @@ const fireHTTPStats = (clientResponse, startTime, statTags) => { const requestMethod = statTags.requestMethod ? statTags.requestMethod : ''; const module = statTags.module ? statTags.module : ''; const statusCode = clientResponse.success ? clientResponse.response.status : ''; + const logMetaInfo = log.getLogMetadata(statTags?.metadata); stats.timing('outgoing_request_latency', startTime, { feature, destType, endpointPath, requestMethod, module, + ...logMetaInfo, }); stats.counter('outgoing_request_count', 1, { feature, @@ -322,25 +324,6 @@ const prepareProxyRequest = (request) => { return removeUndefinedValues({ endpoint, data, params, headers, method, config }); }; -/** - * depricating: handles proxying requests to destinations from server, expects requsts in "defaultRequestConfig" - * note: needed for test api - * @param {*} request - * @returns - */ -const proxyRequest = async (request, destType) => { - const { endpoint, data, method, params, headers } = prepareProxyRequest(request); - const requestOptions = { - url: endpoint, - data, - params, - headers, - method, - }; - const response = await httpSend(requestOptions, { feature: 'proxy', destType }); - return response; -}; - /** * handles http request and sends the response in a simple format that is followed in transformer * @@ -392,6 +375,38 @@ const handleHttpRequest = async (requestType = 'post', ...httpArgs) => { return { httpResponse, processedResponse }; }; +/** + * depricating: handles proxying requests to destinations from server, expects requsts in "defaultRequestConfig" + * note: needed for test api + * @param {*} request + * @returns + */ +const proxyRequest = async (request, destType) => { + const { metadata } = request; + const { endpoint, data, method, params, headers } = prepareProxyRequest(request); + const requestOptions = { + url: endpoint, + data, + params, + headers, + method, + }; + log.requestLog(`[${destType.toUpperCase()}] delivering data`, { + metadata, + requestDetails: { + body: data, + url: endpoint, + method, + }, + }); + const response = await httpSend(requestOptions, { + feature: 'proxy', + destType, + metadata, + }); + return response; +}; + module.exports = { httpSend, httpGET, diff --git a/src/logger.js b/src/logger.js index acda22893d..101689d0cc 100644 --- a/src/logger.js +++ b/src/logger.js @@ -23,6 +23,12 @@ const setLogLevel = (level) => { logger?.setLogLevel(`${loglevel}`); }; +/** + * obtains the metadata for logging + * + * @param {*} metadata + * @returns { destinationId:string, sourceId:string, workspaceId: string, destType:string, module:string, implementation:string, feature:string } + */ const getLogMetadata = (metadata) => { let reqMeta = metadata; if (Array.isArray(metadata)) { diff --git a/src/v0/destinations/campaign_manager/transform.js b/src/v0/destinations/campaign_manager/transform.js index 53df0d3983..14bc6d2c19 100644 --- a/src/v0/destinations/campaign_manager/transform.js +++ b/src/v0/destinations/campaign_manager/transform.js @@ -24,7 +24,6 @@ const { const { convertToMicroseconds } = require('./util'); const { JSON_MIME_TYPE } = require('../../util/constant'); -const logger = require('../../../logger'); function isEmptyObject(obj) { return Object.keys(obj).length === 0 && obj.constructor === Object; @@ -245,7 +244,6 @@ const batchEvents = (eventChunksArray) => { }; const processRouterDest = async (inputs, reqMetadata) => { - logger.debug(`Transformation router request received with size ${inputs.length}`); const batchErrorRespList = []; const eventChunksArray = []; const { destination } = inputs[0]; diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js index d795bc171c..b790bb04a2 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js @@ -58,6 +58,7 @@ const getConversionActionId = async ({ method, headers, params, metadata }) => { endpointPath: `/googleAds:searchStream`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); const { status, response, headers: responseHeaders } = gaecConversionActionIdResponse; @@ -129,6 +130,7 @@ const ProxyRequest = async (request) => { endpointPath: `/googleAds:uploadOfflineUserData`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); const { response: processedResp, status, headers: responseHeaders } = processedResponse; diff --git a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js index b5ea2669a1..51bc57d176 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_offline_conversions/networkHandler.js @@ -27,6 +27,14 @@ const conversionCustomVariableCache = new Cache(CONVERSION_CUSTOM_VARIABLE_CACHE const createJob = async ({ endpoint, headers, payload, metadata }) => { const endPoint = `${endpoint}:create`; + logger.requestLog(`[${destType.toUpperCase()}] job creation request`, { + metadata, + requestDetails: { + url: endpoint, + body: payload, + method: 'post', + }, + }); let createJobResponse = await httpPOST( endPoint, payload, @@ -37,6 +45,7 @@ const createJob = async ({ endpoint, headers, payload, metadata }) => { endpointPath: `/create`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); createJobResponse = processAxiosResponse(createJobResponse); @@ -62,6 +71,14 @@ const createJob = async ({ endpoint, headers, payload, metadata }) => { const addConversionToJob = async ({ endpoint, headers, jobId, payload, metadata }) => { const endPoint = `${endpoint}/${jobId}:addOperations`; + logger.requestLog(`[${destType.toUpperCase()}] add conversion to job request`, { + metadata, + requestDetails: { + url: endpoint, + body: payload, + method: 'post', + }, + }); let addConversionToJobResponse = await httpPOST( endPoint, payload, @@ -72,6 +89,7 @@ const addConversionToJob = async ({ endpoint, headers, jobId, payload, metadata endpointPath: `/addOperations`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); addConversionToJobResponse = processAxiosResponse(addConversionToJobResponse); @@ -97,12 +115,14 @@ const addConversionToJob = async ({ endpoint, headers, jobId, payload, metadata const runTheJob = async ({ endpoint, headers, payload, jobId, metadata }) => { const endPoint = `${endpoint}/${jobId}:run`; - // logger.responseLog(`[${destType.toUpperCase()}] run job request`, { - // ...getLoggableData(metadata), - // requestBody: payload, - // method: 'POST', - // url: endPoint, - // }); + logger.requestLog(`[${destType.toUpperCase()}] run job request`, { + metadata, + requestDetails: { + body: payload, + method: 'POST', + url: endPoint, + }, + }); const { httpResponse: executeJobResponse, processedResponse } = await handleHttpRequest( 'post', endPoint, @@ -114,6 +134,7 @@ const runTheJob = async ({ endpoint, headers, payload, jobId, metadata }) => { endpointPath: `/run`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); const { headers: responseHeaders, response, status } = processedResponse; @@ -146,12 +167,21 @@ const getConversionCustomVariable = async ({ headers, params, metadata }) => { const requestOptions = { headers, }; + logger.requestLog(`[${destType.toUpperCase()}] get conversion custom variable request`, { + metadata, + requestDetails: { + url: endpoint, + body: data, + method: 'post', + }, + }); let searchStreamResponse = await httpPOST(endpoint, data, requestOptions, { destType: 'google_adwords_offline_conversions', feature: 'proxy', endpointPath: `/searchStream`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }); searchStreamResponse = processAxiosResponse(searchStreamResponse); const { response, status, headers: responseHeaders } = searchStreamResponse; @@ -319,6 +349,7 @@ const ProxyRequest = async (request) => { endpointPath: `/proxy`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }); const { headers: responseHeaders, status, response } = processedResponse; logger.responseLog(`[${destType.toUpperCase()}] deliver event to destination`, { diff --git a/src/v0/destinations/google_adwords_offline_conversions/utils.js b/src/v0/destinations/google_adwords_offline_conversions/utils.js index 7fe565ef22..93f14a49a6 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/utils.js +++ b/src/v0/destinations/google_adwords_offline_conversions/utils.js @@ -71,12 +71,21 @@ const getConversionActionId = async ({ headers, params, metadata }) => { const requestOptions = { headers, }; + logger.requestLog(`[${destType.toUpperCase()}] get conversion action id request`, { + metadata, + requestDetails: { + url: endpoint, + body: data, + method: 'post', + }, + }); let searchStreamResponse = await httpPOST(endpoint, data, requestOptions, { destType: 'google_adwords_offline_conversions', feature: 'transformation', endpointPath: `/googleAds:searchStream`, requestMethod: 'POST', module: 'dataDelivery', + metadata, }); searchStreamResponse = processAxiosResponse(searchStreamResponse); const { response, status, headers: responseHeaders } = searchStreamResponse; diff --git a/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js b/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js index 940a2b8cd1..82fb62b74e 100644 --- a/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js +++ b/src/v0/destinations/google_adwords_remarketing_lists/networkHandler.js @@ -39,6 +39,14 @@ const createJob = async ({ endpoint, headers, method, params, metadata }) => { headers, method, }; + logger.requestLog(`[${destType.toUpperCase()}] job creation request`, { + metadata, + requestDetails: { + url: jobCreatingRequest.url, + body: jobCreatingRequest.data, + method: jobCreatingRequest.method, + }, + }); const { httpResponse, processedResponse } = await handleHttpRequest( 'constructor', jobCreatingRequest, @@ -48,6 +56,7 @@ const createJob = async ({ endpoint, headers, method, params, metadata }) => { endpointPath: '/customers/create', requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); logger.responseLog(`[${destType.toUpperCase()}] job creation response`, { @@ -73,6 +82,14 @@ const addUserToJob = async ({ endpoint, headers, method, jobId, body, metadata } headers, method, }; + logger.requestLog(`[${destType.toUpperCase()}] add user to job request`, { + metadata, + requestDetails: { + url: secondRequest.url, + body: secondRequest.data, + method: secondRequest.method, + }, + }); const { httpResponse: response, processedResponse } = await handleHttpRequest( 'constructor', secondRequest, @@ -82,6 +99,7 @@ const addUserToJob = async ({ endpoint, headers, method, jobId, body, metadata } endpointPath: '/addOperations', requestMethod: 'POST', module: 'dataDelivery', + metadata, }, ); logger.responseLog(`[${destType.toUpperCase()}] add user to job response`, { @@ -105,6 +123,14 @@ const runTheJob = async ({ endpoint, headers, method, jobId, metadata }) => { headers, method, }; + logger.requestLog(`[${destType.toUpperCase()}] run job request`, { + metadata, + requestDetails: { + url: thirdRequest.url, + body: thirdRequest.data, + method: thirdRequest.method, + }, + }); const { httpResponse: response, processedResponse } = await handleHttpRequest( 'constructor', thirdRequest, From 365f7386a5354cb65a28d6fbb3214d941f2c1ee6 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Wed, 12 Jun 2024 20:40:04 +0530 Subject: [PATCH 17/37] chore: call env load before importing logger - set level as part of initialisation --- src/index.ts | 7 +++++-- src/logger.js | 16 ++++++++-------- 2 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/index.ts b/src/index.ts index 3bfd68cf21..47f93068ab 100644 --- a/src/index.ts +++ b/src/index.ts @@ -8,9 +8,12 @@ import { metricsRouter } from './routes/metricsRouter'; import cluster from './util/cluster'; import { RedisDB } from './util/redis/redisConnector'; import { logProcessInfo } from './util/utils'; -import logger from './logger'; dotenv.config(); + +// eslint-disable-next-line import/first +import logger from './logger'; + const clusterEnabled = process.env.CLUSTER_ENABLED !== 'false'; const port = parseInt(process.env.PORT ?? '9090', 10); const metricsPort = parseInt(process.env.METRICS_PORT || '9091', 10); @@ -32,7 +35,7 @@ app.use( addRequestSizeMiddleware(app); addSwaggerRoutes(app); -logger.info('Using new routes'); +logger.error('Using new routes'); applicationRoutes(app); function finalFunction() { diff --git a/src/logger.js b/src/logger.js index 101689d0cc..483bef0491 100644 --- a/src/logger.js +++ b/src/logger.js @@ -1,9 +1,7 @@ const dotenv = require('dotenv'); /* istanbul ignore file */ -const { structuredLogger: logger /* LOGLEVELS */ } = require('@rudderstack/integrations-lib'); - -dotenv.config({ path: '../.env' }); +const { /* LOGLEVELS */ structuredLogger } = require('@rudderstack/integrations-lib'); const LOGLEVELS = { debug: 0, // Most verbose logging level @@ -17,10 +15,16 @@ const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; let logLevel = process.env.LOG_LEVEL ?? 'error'; +const logger = structuredLogger({ level: logLevel }); + +const getLogger = () => { + return loggerImpl === 'winston' ? logger : console; +}; + const setLogLevel = (level) => { const logger = getLogger(); logLevel = level || logLevel; - logger?.setLogLevel(`${loglevel}`); + logger?.setLogLevel(logLevel); }; /** @@ -75,10 +79,6 @@ const log = (logMethod, args) => { logMethod(message); }; -const getLogger = () => { - return loggerImpl === 'winston' ? logger : console; -}; - const debug = (...args) => { const logger = getLogger(); if (LOGLEVELS.debug >= logLevel) { From cc43744465d3abc8f28240d154e7cc7e10c77f99 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 10:31:27 +0530 Subject: [PATCH 18/37] chore: upgrade integrations-lib --- package-lock.json | 8 ++++---- package.json | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/package-lock.json b/package-lock.json index f51f3ccd8e..ff4cd359b6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ "@koa/router": "^12.0.0", "@ndhoule/extend": "^2.0.0", "@pyroscope/nodejs": "^0.2.9", - "@rudderstack/integrations-lib": "^0.2.8", + "@rudderstack/integrations-lib": "^0.2.9", "@rudderstack/workflow-engine": "^0.7.5", "@shopify/jest-koa-mocks": "^5.1.1", "ajv": "^8.12.0", @@ -4437,9 +4437,9 @@ } }, "node_modules/@rudderstack/integrations-lib": { - "version": "0.2.8", - "resolved": "https://registry.npmjs.org/@rudderstack/integrations-lib/-/integrations-lib-0.2.8.tgz", - "integrity": "sha512-5CJoFFCRDhG7busCGVktKqEEXO0DbFqJ56TOT+jyDdoTf8sZ7SsSJ4NCZYmSplZrbQGj2R+aArnQnpxA4hPGmA==", + "version": "0.2.9", + "resolved": "https://registry.npmjs.org/@rudderstack/integrations-lib/-/integrations-lib-0.2.9.tgz", + "integrity": "sha512-e3aKT01B9UWB2gtG0Xnafuxnp0rt+d8OIg+BhQtZXQ3EaUyfARKQ8m0u6X0sjm6lKWe+p1ZRMuZeRctBzlVPSw==", "dependencies": { "axios": "^1.4.0", "axios-mock-adapter": "^1.22.0", diff --git a/package.json b/package.json index 50a276ce42..b6bfa2423a 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "@koa/router": "^12.0.0", "@ndhoule/extend": "^2.0.0", "@pyroscope/nodejs": "^0.2.9", - "@rudderstack/integrations-lib": "^0.2.8", + "@rudderstack/integrations-lib": "^0.2.9", "@rudderstack/workflow-engine": "^0.7.5", "@shopify/jest-koa-mocks": "^5.1.1", "ajv": "^8.12.0", From 00dd83b32c78ae7d49900124406a63303379e7d5 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 11:17:11 +0530 Subject: [PATCH 19/37] chore: remove old references of logger --- src/v0/destinations/braze/util.js | 2 +- src/v0/sources/adjust/transform.js | 3 ++- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/src/v0/destinations/braze/util.js b/src/v0/destinations/braze/util.js index f131c40f5f..4253619d33 100644 --- a/src/v0/destinations/braze/util.js +++ b/src/v0/destinations/braze/util.js @@ -1,7 +1,7 @@ /* eslint-disable */ const _ = require('lodash'); const get = require('get-value'); -const { structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const logger = require('../../../logger'); const stats = require('../../../util/stats'); const { handleHttpRequest } = require('../../../adapters/network'); const { diff --git a/src/v0/sources/adjust/transform.js b/src/v0/sources/adjust/transform.js index 8568622aeb..9da90751b7 100644 --- a/src/v0/sources/adjust/transform.js +++ b/src/v0/sources/adjust/transform.js @@ -1,7 +1,8 @@ const lodash = require('lodash'); const path = require('path'); const fs = require('fs'); -const { TransformationError, structuredLogger: logger } = require('@rudderstack/integrations-lib'); +const { TransformationError } = require('@rudderstack/integrations-lib'); +const logger = require('../../../logger'); const Message = require('../message'); const { CommonUtils } = require('../../../util/common'); const { excludedFieldList } = require('./config'); From 0f9558d4997047c640ed482fb118493799efcb8c Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 13:21:39 +0530 Subject: [PATCH 20/37] chore: refactor fire http stats - update monday response log message - add request log in klaviyo - add labels for outgoing request prometheus metrics --- src/adapters/network.js | 41 ++++++++++++-------- src/util/prometheus.js | 18 ++++++++- src/v0/destinations/klaviyo/util.js | 8 ++++ src/v1/destinations/monday/networkHandler.js | 2 +- 4 files changed, 50 insertions(+), 19 deletions(-) diff --git a/src/adapters/network.js b/src/adapters/network.js index 882676dd8f..c9b8f45138 100644 --- a/src/adapters/network.js +++ b/src/adapters/network.js @@ -52,23 +52,30 @@ const fireHTTPStats = (clientResponse, startTime, statTags) => { const requestMethod = statTags.requestMethod ? statTags.requestMethod : ''; const module = statTags.module ? statTags.module : ''; const statusCode = clientResponse.success ? clientResponse.response.status : ''; - const logMetaInfo = log.getLogMetadata(statTags?.metadata); - stats.timing('outgoing_request_latency', startTime, { - feature, - destType, - endpointPath, - requestMethod, - module, - ...logMetaInfo, - }); - stats.counter('outgoing_request_count', 1, { - feature, - destType, - endpointPath, - success: clientResponse.success, - statusCode, - requestMethod, - module, + let metadata = statTags?.metadata || []; + if (statTags?.metadata && !Array.isArray(statTags?.metadata)) { + metadata = [statTags.metadata]; + } + metadata?.forEach((m) => { + const logMetaInfo = log.getLogMetadata(m); + stats.timing('outgoing_request_latency', startTime, { + ...logMetaInfo, + feature, + destType, + endpointPath, + requestMethod, + module, + }); + stats.counter('outgoing_request_count', 1, { + ...logMetaInfo, + feature, + destType, + endpointPath, + success: clientResponse.success, + statusCode, + requestMethod, + module, + }); }); }; diff --git a/src/util/prometheus.js b/src/util/prometheus.js index bc4c6f2eb9..72f424d39a 100644 --- a/src/util/prometheus.js +++ b/src/util/prometheus.js @@ -567,6 +567,11 @@ class Prometheus { 'statusCode', 'requestMethod', 'module', + 'workspaceId', + 'destinationId', + 'module', + 'implementation', + 'sourceId', ], }, @@ -631,7 +636,18 @@ class Prometheus { name: 'outgoing_request_latency', help: 'Outgoing HTTP requests duration in seconds', type: 'histogram', - labelNames: ['feature', 'destType', 'endpointPath', 'requestMethod', 'module'], + labelNames: [ + 'feature', + 'destType', + 'endpointPath', + 'requestMethod', + 'module', + 'workspaceId', + 'destinationId', + 'module', + 'implementation', + 'sourceId', + ], }, { name: 'http_request_duration', diff --git a/src/v0/destinations/klaviyo/util.js b/src/v0/destinations/klaviyo/util.js index 97b690e093..4db59cfb05 100644 --- a/src/v0/destinations/klaviyo/util.js +++ b/src/v0/destinations/klaviyo/util.js @@ -43,6 +43,14 @@ const getIdFromNewOrExistingProfile = async ({ endpoint, payload, requestOptions let response; let profileId; const endpointPath = '/api/profiles'; + logger.requestLog(`[${destType.toUpperCase()}] get id from profile request`, { + metadata, + requestDetails: { + url: endpoint, + body: payload, + method: 'post', + }, + }); const { processedResponse: resp } = await handleHttpRequest( 'post', endpoint, diff --git a/src/v1/destinations/monday/networkHandler.js b/src/v1/destinations/monday/networkHandler.js index c92ef3c794..5a0313a27b 100644 --- a/src/v1/destinations/monday/networkHandler.js +++ b/src/v1/destinations/monday/networkHandler.js @@ -44,7 +44,7 @@ const responseHandler = (responseParams) => { const responseWithIndividualEvents = []; const { response, status, headers } = destinationResponse; - logger.responseLog('[campaign_manager] response handling', { + logger.responseLog('[monday] proxy response', { metadata: rudderJobMetadata, responseDetails: { headers, From 883f3cb53202e2a88206a5ed62e8bb250ed8b817 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 16:03:40 +0530 Subject: [PATCH 21/37] chore: add logic to trigger stats for destinations that don't send metadata --- src/adapters/network.js | 71 ++++++++++++++++++++++++++++------------- 1 file changed, 49 insertions(+), 22 deletions(-) diff --git a/src/adapters/network.js b/src/adapters/network.js index c9b8f45138..3790aad2ed 100644 --- a/src/adapters/network.js +++ b/src/adapters/network.js @@ -45,6 +45,35 @@ const networkClientConfigs = { httpsAgent: new https.Agent({ keepAlive: true }), }; +const fireOutgoingReqStats = ({ + destType, + feature, + endpointPath, + requestMethod, + module, + metadata, +}) => { + const logMetaInfo = log.getLogMetadata(metadata); + stats.timing('outgoing_request_latency', startTime, { + ...logMetaInfo, + feature, + destType, + endpointPath, + requestMethod, + module, + }); + stats.counter('outgoing_request_count', 1, { + ...logMetaInfo, + feature, + destType, + endpointPath, + success: clientResponse.success, + statusCode, + requestMethod, + module, + }); +}; + const fireHTTPStats = (clientResponse, startTime, statTags) => { const destType = statTags.destType ? statTags.destType : ''; const feature = statTags.feature ? statTags.feature : ''; @@ -52,30 +81,28 @@ const fireHTTPStats = (clientResponse, startTime, statTags) => { const requestMethod = statTags.requestMethod ? statTags.requestMethod : ''; const module = statTags.module ? statTags.module : ''; const statusCode = clientResponse.success ? clientResponse.response.status : ''; - let metadata = statTags?.metadata || []; if (statTags?.metadata && !Array.isArray(statTags?.metadata)) { - metadata = [statTags.metadata]; - } - metadata?.forEach((m) => { - const logMetaInfo = log.getLogMetadata(m); - stats.timing('outgoing_request_latency', startTime, { - ...logMetaInfo, - feature, - destType, - endpointPath, - requestMethod, - module, - }); - stats.counter('outgoing_request_count', 1, { - ...logMetaInfo, - feature, - destType, - endpointPath, - success: clientResponse.success, - statusCode, - requestMethod, - module, + const metadata = !Array.isArray(statTags?.metadata) ? [statTags.metadata] : statTags.metadata; + metadata?.forEach((m) => { + fireOutgoingReqStats({ + destType, + endpointPath, + feature, + module, + requestMethod, + statusCode, + metadata: m, + }); }); + return; + } + fireOutgoingReqStats({ + destType, + endpointPath, + feature, + module, + requestMethod, + statusCode, }); }; From 1c4a1b0b77f4efb9ed114d0e44da63a055c4b3d5 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 16:15:15 +0530 Subject: [PATCH 22/37] chore: issues with func args fixed --- src/adapters/network.js | 31 ++++++++++++++++--------------- 1 file changed, 16 insertions(+), 15 deletions(-) diff --git a/src/adapters/network.js b/src/adapters/network.js index 3790aad2ed..ca880d4536 100644 --- a/src/adapters/network.js +++ b/src/adapters/network.js @@ -51,7 +51,10 @@ const fireOutgoingReqStats = ({ endpointPath, requestMethod, module, - metadata, + metadata = {}, + startTime, + statusCode, + clientResponse, }) => { const logMetaInfo = log.getLogMetadata(metadata); stats.timing('outgoing_request_latency', startTime, { @@ -81,29 +84,27 @@ const fireHTTPStats = (clientResponse, startTime, statTags) => { const requestMethod = statTags.requestMethod ? statTags.requestMethod : ''; const module = statTags.module ? statTags.module : ''; const statusCode = clientResponse.success ? clientResponse.response.status : ''; + const defArgs = { + destType, + endpointPath, + feature, + module, + requestMethod, + statusCode, + startTime, + clientResponse, + }; if (statTags?.metadata && !Array.isArray(statTags?.metadata)) { const metadata = !Array.isArray(statTags?.metadata) ? [statTags.metadata] : statTags.metadata; metadata?.forEach((m) => { fireOutgoingReqStats({ - destType, - endpointPath, - feature, - module, - requestMethod, - statusCode, + ...defArgs, metadata: m, }); }); return; } - fireOutgoingReqStats({ - destType, - endpointPath, - feature, - module, - requestMethod, - statusCode, - }); + fireOutgoingReqStats(defArgs); }; const enhanceRequestOptions = (options) => { From c7cc39f13802268de9b7564ef5e020790e668cbb Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 17:30:12 +0530 Subject: [PATCH 23/37] chore: debug-1 --- src/logger.js | 2 ++ 1 file changed, 2 insertions(+) diff --git a/src/logger.js b/src/logger.js index 483bef0491..9130f323ac 100644 --- a/src/logger.js +++ b/src/logger.js @@ -14,6 +14,8 @@ const LOGLEVELS = { const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; let logLevel = process.env.LOG_LEVEL ?? 'error'; +logger.error('(error) Loglevel is:', logLevel); +logger.warn('(warn) Loglevel is:', logLevel); const logger = structuredLogger({ level: logLevel }); From addae95a9c72f3d11af859d492ee0434d84e39b6 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 17:32:41 +0530 Subject: [PATCH 24/37] chore: debug-2 --- src/logger.js | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/logger.js b/src/logger.js index 9130f323ac..1626266f74 100644 --- a/src/logger.js +++ b/src/logger.js @@ -14,11 +14,12 @@ const LOGLEVELS = { const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; let logLevel = process.env.LOG_LEVEL ?? 'error'; -logger.error('(error) Loglevel is:', logLevel); -logger.warn('(warn) Loglevel is:', logLevel); const logger = structuredLogger({ level: logLevel }); +logger.error('(error) Loglevel is:', logLevel); +logger.warn('(warn) Loglevel is:', logLevel); + const getLogger = () => { return loggerImpl === 'winston' ? logger : console; }; From e1434c999c21b41f6b92621065977c87879c7908 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 18:01:38 +0530 Subject: [PATCH 25/37] chore: debug-3 --- src/logger.js | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/src/logger.js b/src/logger.js index 1626266f74..9eb0e1ec02 100644 --- a/src/logger.js +++ b/src/logger.js @@ -17,8 +17,10 @@ let logLevel = process.env.LOG_LEVEL ?? 'error'; const logger = structuredLogger({ level: logLevel }); -logger.error('(error) Loglevel is:', logLevel); +logger.debug('(debug) Loglevel is:', logLevel); +logger.info('(info) Loglevel is:', logLevel); logger.warn('(warn) Loglevel is:', logLevel); +logger.error('(error) Loglevel is:', logLevel); const getLogger = () => { return loggerImpl === 'winston' ? logger : console; From 567c3b36a5e332a69c23433be0f89ee001582bc9 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Thu, 13 Jun 2024 19:12:46 +0530 Subject: [PATCH 26/37] chore: fix levels problem - upgrade integrations-lib - utilise modified LOGLEVELS --- package-lock.json | 51 +++++++++++++++++++++++++++++++++++++++++++---- package.json | 2 +- src/logger.js | 22 +++++--------------- 3 files changed, 53 insertions(+), 22 deletions(-) diff --git a/package-lock.json b/package-lock.json index ef5833fc13..09d27d1478 100644 --- a/package-lock.json +++ b/package-lock.json @@ -19,7 +19,7 @@ "@koa/router": "^12.0.0", "@ndhoule/extend": "^2.0.0", "@pyroscope/nodejs": "^0.2.9", - "@rudderstack/integrations-lib": "^0.2.9", + "@rudderstack/integrations-lib": "^0.2.10", "@rudderstack/json-template-engine": "^0.11.0", "@rudderstack/workflow-engine": "^0.8.0", "@shopify/jest-koa-mocks": "^5.1.1", @@ -142,6 +142,49 @@ "mocha": "2.1.0" } }, + "../rudder-integrations-lib": { + "name": "@rudderstack/integrations-lib", + "version": "0.2.9", + "extraneous": true, + "license": "MIT", + "dependencies": { + "axios": "^1.4.0", + "axios-mock-adapter": "^1.22.0", + "crypto": "^1.0.1", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-config-airbnb-typescript": "^17.1.0", + "get-value": "^3.0.1", + "handlebars": "^4.7.8", + "lodash": "^4.17.21", + "moment": "^2.29.4", + "moment-timezone": "^0.5.43", + "set-value": "^4.1.0", + "sha256": "^0.2.0", + "tslib": "^2.4.0", + "winston": "^3.11.0" + }, + "devDependencies": { + "@commitlint/config-conventional": "^18.5.0", + "@types/get-value": "^3.0.3", + "@types/jest": "^29.5.4", + "@types/lodash": "^4.14.195", + "@types/node": "^20.3.3", + "@types/set-value": "^4.0.1", + "@types/sha256": "^0.2.0", + "@typescript-eslint/eslint-plugin": "^6.20.0", + "@typescript-eslint/parser": "^6.20.0", + "commitlint": "^18.6.0", + "eslint": "^8.56.0", + "eslint-config-prettier": "^9.1.0", + "husky": "^8.0.0", + "jest": "^29.4.3", + "pre-commit": "^1.2.2", + "prettier": "^2.8.4", + "ts-jest": "^29.0.5", + "ts-node": "^10.9.1", + "typescript": "^5.1.6" + } + }, "node_modules/@aashutoshrathi/word-wrap": { "version": "1.2.6", "resolved": "https://registry.npmjs.org/@aashutoshrathi/word-wrap/-/word-wrap-1.2.6.tgz", @@ -4461,9 +4504,9 @@ } }, "node_modules/@rudderstack/integrations-lib": { - "version": "0.2.9", - "resolved": "https://registry.npmjs.org/@rudderstack/integrations-lib/-/integrations-lib-0.2.9.tgz", - "integrity": "sha512-e3aKT01B9UWB2gtG0Xnafuxnp0rt+d8OIg+BhQtZXQ3EaUyfARKQ8m0u6X0sjm6lKWe+p1ZRMuZeRctBzlVPSw==", + "version": "0.2.10", + "resolved": "https://registry.npmjs.org/@rudderstack/integrations-lib/-/integrations-lib-0.2.10.tgz", + "integrity": "sha512-PVlRIxO9PVYpR+UNm1qQt85wo0wO9oX0PvoC9XqzYO+C0PfRvkMqac8ghA5ytqeCYNfSIye7DtidaII5ZoCQCA==", "dependencies": { "axios": "^1.4.0", "axios-mock-adapter": "^1.22.0", diff --git a/package.json b/package.json index 84a8621b83..388153ef91 100644 --- a/package.json +++ b/package.json @@ -64,7 +64,7 @@ "@koa/router": "^12.0.0", "@ndhoule/extend": "^2.0.0", "@pyroscope/nodejs": "^0.2.9", - "@rudderstack/integrations-lib": "^0.2.9", + "@rudderstack/integrations-lib": "^0.2.10", "@rudderstack/json-template-engine": "^0.11.0", "@rudderstack/workflow-engine": "^0.8.0", "@shopify/jest-koa-mocks": "^5.1.1", diff --git a/src/logger.js b/src/logger.js index 9eb0e1ec02..85d4a11255 100644 --- a/src/logger.js +++ b/src/logger.js @@ -1,14 +1,7 @@ const dotenv = require('dotenv'); /* istanbul ignore file */ -const { /* LOGLEVELS */ structuredLogger } = require('@rudderstack/integrations-lib'); - -const LOGLEVELS = { - debug: 0, // Most verbose logging level - info: 1, // Logs about state of the application - warn: 2, // Logs about warnings which dont immediately halt the application - error: 3, // Logs about errors which dont immediately halt the application -}; +const { LOGLEVELS, structuredLogger } = require('@rudderstack/integrations-lib'); // any value greater than levelError will work as levelNone const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; @@ -17,11 +10,6 @@ let logLevel = process.env.LOG_LEVEL ?? 'error'; const logger = structuredLogger({ level: logLevel }); -logger.debug('(debug) Loglevel is:', logLevel); -logger.info('(info) Loglevel is:', logLevel); -logger.warn('(warn) Loglevel is:', logLevel); -logger.error('(error) Loglevel is:', logLevel); - const getLogger = () => { return loggerImpl === 'winston' ? logger : console; }; @@ -86,28 +74,28 @@ const log = (logMethod, args) => { const debug = (...args) => { const logger = getLogger(); - if (LOGLEVELS.debug >= logLevel) { + if (LOGLEVELS.debug <= logLevel) { log(logger.debug, args); } }; const info = (...args) => { const logger = getLogger(); - if (LOGLEVELS.info >= LOGLEVELS[logLevel]) { + if (LOGLEVELS.info <= LOGLEVELS[logLevel]) { log(logger.info, args); } }; const warn = (...args) => { const logger = getLogger(); - if (LOGLEVELS.warn >= LOGLEVELS[logLevel]) { + if (LOGLEVELS.warn <= LOGLEVELS[logLevel]) { log(logger.warn, args); } }; const error = (...args) => { const logger = getLogger(); - if (LOGLEVELS.error >= LOGLEVELS[logLevel]) { + if (LOGLEVELS.error <= LOGLEVELS[logLevel]) { log(logger.error, args); } }; From a8b0c8380831a5d9072b176d1ad5ecd64b54583c Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Fri, 14 Jun 2024 14:03:52 +0530 Subject: [PATCH 27/37] chore: add comment on LOGGER_IMPL - update getLoggerImpl logic by adding switch cases --- src/logger.js | 11 +++++++---- 1 file changed, 7 insertions(+), 4 deletions(-) diff --git a/src/logger.js b/src/logger.js index 85d4a11255..1e51fbdb81 100644 --- a/src/logger.js +++ b/src/logger.js @@ -1,9 +1,7 @@ -const dotenv = require('dotenv'); - /* istanbul ignore file */ const { LOGLEVELS, structuredLogger } = require('@rudderstack/integrations-lib'); -// any value greater than levelError will work as levelNone +// LOGGER_IMPL can be `console` or `winston` const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; let logLevel = process.env.LOG_LEVEL ?? 'error'; @@ -11,7 +9,12 @@ let logLevel = process.env.LOG_LEVEL ?? 'error'; const logger = structuredLogger({ level: logLevel }); const getLogger = () => { - return loggerImpl === 'winston' ? logger : console; + switch (loggerImpl) { + case 'winston': + return logger; + case 'console': + return console; + } }; const setLogLevel = (level) => { From 1e1f87eb0109686d5024472c4799ff7065b8e7e6 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Fri, 14 Jun 2024 14:13:03 +0530 Subject: [PATCH 28/37] chore: revert error log to info --- src/index.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/index.ts b/src/index.ts index 47f93068ab..39a69b4ad9 100644 --- a/src/index.ts +++ b/src/index.ts @@ -35,7 +35,7 @@ app.use( addRequestSizeMiddleware(app); addSwaggerRoutes(app); -logger.error('Using new routes'); +logger.info('Using new routes'); applicationRoutes(app); function finalFunction() { From fe87faac95794c69f06f3b4cd0254cbc8062094d Mon Sep 17 00:00:00 2001 From: ItsSudip Date: Fri, 14 Jun 2024 14:26:08 +0530 Subject: [PATCH 29/37] chore: remove redundant condition on network.js --- src/adapters/network.js | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/adapters/network.js b/src/adapters/network.js index ca880d4536..850ba649c8 100644 --- a/src/adapters/network.js +++ b/src/adapters/network.js @@ -94,7 +94,7 @@ const fireHTTPStats = (clientResponse, startTime, statTags) => { startTime, clientResponse, }; - if (statTags?.metadata && !Array.isArray(statTags?.metadata)) { + if (statTags?.metadata) { const metadata = !Array.isArray(statTags?.metadata) ? [statTags.metadata] : statTags.metadata; metadata?.forEach((m) => { fireOutgoingReqStats({ From cbb9354dab6e3b1f789f2156dff88caf23619821 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Fri, 14 Jun 2024 16:06:53 +0530 Subject: [PATCH 30/37] chore: update debug level condition - add destType in cdkv2 handler - include extra fields in fillExcept logger option --- src/cdk/v2/handler.ts | 3 ++- src/logger.js | 16 ++++++++++++++-- 2 files changed, 16 insertions(+), 3 deletions(-) diff --git a/src/cdk/v2/handler.ts b/src/cdk/v2/handler.ts index fec6731ffc..74ebb716e6 100644 --- a/src/cdk/v2/handler.ts +++ b/src/cdk/v2/handler.ts @@ -88,7 +88,8 @@ export async function processCdkV2Workflow( bindings: Record = {}, ) { try { - logger.debug(`Processing cdkV2 workflow`); + logger.debug(`Processing cdkV2 workflow`, { destType }); + const workflowEngine = await getCachedWorkflowEngine(destType, feature, bindings); return await executeWorkflow(workflowEngine, parsedEvent, requestMetadata); } catch (error) { diff --git a/src/logger.js b/src/logger.js index 1e51fbdb81..ea50fa9273 100644 --- a/src/logger.js +++ b/src/logger.js @@ -6,7 +6,19 @@ const loggerImpl = process.env.LOGGER_IMPL ?? 'winston'; let logLevel = process.env.LOG_LEVEL ?? 'error'; -const logger = structuredLogger({ level: logLevel }); +const logger = structuredLogger({ + level: logLevel, + fillExcept: [ + 'destinationId', + 'sourceId', + 'destinationType', + 'workspaceId', + 'module', + 'implementation', + 'feature', + 'destType', + ], +}); const getLogger = () => { switch (loggerImpl) { @@ -77,7 +89,7 @@ const log = (logMethod, args) => { const debug = (...args) => { const logger = getLogger(); - if (LOGLEVELS.debug <= logLevel) { + if (LOGLEVELS.debug <= LOGLEVELS[logLevel]) { log(logger.debug, args); } }; From 451fd114a9df4c5d8aee936b0f3b502f00830723 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Fri, 14 Jun 2024 16:08:40 +0530 Subject: [PATCH 31/37] chore: correction in shopify test Signed-off-by: Sai Sankeerth --- src/util/redis/redisConnector.test.js | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/util/redis/redisConnector.test.js b/src/util/redis/redisConnector.test.js index 9a520e41a4..659b32925b 100644 --- a/src/util/redis/redisConnector.test.js +++ b/src/util/redis/redisConnector.test.js @@ -2,7 +2,6 @@ const fs = require('fs'); const path = require('path'); const version = 'v0'; const { RedisDB } = require('./redisConnector'); -const logger = require('../../logger'); jest.mock('ioredis', () => require('../../../test/__mocks__/redis')); const sourcesList = ['shopify']; @@ -56,7 +55,7 @@ describe(`Source Tests`, () => { data.forEach((dataPoint, index) => { it(`${index}. ${source} - ${dataPoint.description}`, async () => { try { - const output = await transformer.process(dataPoint.input, logger); + const output = await transformer.process(dataPoint.input); expect(output).toEqual(dataPoint.output); } catch (error) { expect(error.message).toEqual(dataPoint.output.error); From ed7837aafda7aecc85025c97f1ad01956a98b5d2 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Fri, 14 Jun 2024 16:23:15 +0530 Subject: [PATCH 32/37] chore: usage of logger in postTransformation - remove usage of logError function from services/misc.ts --- .../destination/postTransformation.ts | 19 ++++++++----------- src/services/misc.ts | 9 +-------- 2 files changed, 9 insertions(+), 19 deletions(-) diff --git a/src/services/destination/postTransformation.ts b/src/services/destination/postTransformation.ts index 40cee61e66..7ab0d96af8 100644 --- a/src/services/destination/postTransformation.ts +++ b/src/services/destination/postTransformation.ts @@ -19,7 +19,7 @@ import { FixMe } from '../../util/types'; import { generateErrorObject } from '../../v0/util'; import tags from '../../v0/util/tags'; import { ErrorReportingService } from '../errorReporting'; -import { MiscService } from '../misc'; +import logger from '../../logger'; const defaultErrorMessages = { router: '[Router Transform] Error occurred while processing the payload.', @@ -68,7 +68,7 @@ export class DestinationPostTransformationService { error: errObj.message || '[Processor Transform] Error occurred while processing the payload.', statTags: errObj.statTags, } as ProcessorTransformationResponse; - MiscService.logError( + logger.error( errObj.message || '[Processor Transform] Error occurred while processing the payload.', metaTo.errorDetails, ); @@ -109,7 +109,7 @@ export class DestinationPostTransformationService { ...resp.statTags, ...metaTo.errorDetails, }; - MiscService.logError(resp.error || defaultErrorMessages.router, metaTo.errorDetails); + logger.error(resp.error || defaultErrorMessages.router, metaTo.errorDetails); stats.increment('event_transform_failure', metaTo.errorDetails); } else { stats.increment('event_transform_success', { @@ -138,7 +138,7 @@ export class DestinationPostTransformationService { error: errObj.message || defaultErrorMessages.router, statTags: errObj.statTags, } as RouterTransformationResponse; - MiscService.logError(errObj.message || defaultErrorMessages.router, metaTo.errorDetails); + logger.error(errObj.message || defaultErrorMessages.router, metaTo.errorDetails); ErrorReportingService.reportError(error, metaTo.errorContext, resp); stats.increment('event_transform_failure', metaTo.errorDetails); return resp; @@ -156,7 +156,7 @@ export class DestinationPostTransformationService { error: errObj.message || defaultErrorMessages.delivery, statTags: errObj.statTags, } as RouterTransformationResponse; - MiscService.logError(error as string, metaTo.errorDetails); + logger.error(error as string, metaTo.errorDetails); ErrorReportingService.reportError(error, metaTo.errorContext, resp); return resp; } @@ -187,10 +187,7 @@ export class DestinationPostTransformationService { const errObj = generateErrorObject(error, metaTo.errorDetails, false); const metadataArray = metaTo.metadatas; if (!Array.isArray(metadataArray)) { - MiscService.logError( - 'Proxy v1 endpoint error : metadataArray is not an array', - metaTo.errorDetails, - ); + logger.error('Proxy v1 endpoint error : metadataArray is not an array', metaTo.errorDetails); // Panic throw new PlatformError('Proxy v1 endpoint error : metadataArray is not an array'); } @@ -215,7 +212,7 @@ export class DestinationPostTransformationService { authErrorCategory: errObj.authErrorCategory, }), } as DeliveryV1Response; - MiscService.logError(errObj.message, metaTo.errorDetails); + logger.error(errObj.message, metaTo.errorDetails); ErrorReportingService.reportError(error, metaTo.errorContext, resp); return resp; } @@ -233,7 +230,7 @@ export class DestinationPostTransformationService { authErrorCategory: errObj.authErrorCategory, }), } as UserDeletionResponse; - MiscService.logError(errObj.message, metaTo.errorDetails); + logger.error(errObj.message, metaTo.errorDetails); ErrorReportingService.reportError(error, metaTo.errorContext, resp); return resp; } diff --git a/src/services/misc.ts b/src/services/misc.ts index 4378fe231e..09051edeec 100644 --- a/src/services/misc.ts +++ b/src/services/misc.ts @@ -1,12 +1,10 @@ /* eslint-disable global-require, import/no-dynamic-require */ -import { LoggableExtraData } from '@rudderstack/integrations-lib'; import fs from 'fs'; import { Context } from 'koa'; import path from 'path'; import { DestHandlerMap } from '../constants/destinationCanonicalNames'; import { getCPUProfile, getHeapProfile } from '../middleware'; -import { ErrorDetailer, Metadata } from '../types'; -import logger from '../logger'; +import { Metadata } from '../types'; export class MiscService { public static getDestHandler(dest: string, version: string) { @@ -76,9 +74,4 @@ export class MiscService { public static async getHeapProfile() { return getHeapProfile(); } - - public static logError(message: string, errorDetailer: ErrorDetailer) { - const loggableExtraData: Partial = logger.getLogMetadata(errorDetailer); - logger.error(message || '', loggableExtraData); - } } From a978abf44410bea9bc7c5e0fbe931ab9e8553ffe Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Fri, 14 Jun 2024 18:50:57 +0530 Subject: [PATCH 33/37] chore: accept destType key from destinationType in metadata --- src/logger.js | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/logger.js b/src/logger.js index ea50fa9273..b523aff260 100644 --- a/src/logger.js +++ b/src/logger.js @@ -46,11 +46,12 @@ const getLogMetadata = (metadata) => { if (Array.isArray(metadata)) { [reqMeta] = metadata; } + const destType = reqMeta?.destType || reqMeta?.destinationType; return { ...(reqMeta?.destinationId && { destinationId: reqMeta.destinationId }), ...(reqMeta?.sourceId && { sourceId: reqMeta.sourceId }), ...(reqMeta?.workspaceId && { workspaceId: reqMeta.workspaceId }), - ...(reqMeta?.destType && { destType: reqMeta.destType }), + ...(destType && { destType }), ...(reqMeta?.module && { module: reqMeta.module }), ...(reqMeta?.implementation && { implementation: reqMeta.implementation }), ...(reqMeta?.feature && { feature: reqMeta.feature }), From 710746fbb0f3c83ca044bc2cc74fe2ce4df0caa2 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Fri, 14 Jun 2024 18:51:39 +0530 Subject: [PATCH 34/37] chore: correction log message --- .../google_adwords_enhanced_conversions/networkHandler.js | 2 +- src/v0/destinations/google_adwords_offline_conversions/utils.js | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js index b790bb04a2..d82349c04d 100644 --- a/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js +++ b/src/v0/destinations/google_adwords_enhanced_conversions/networkHandler.js @@ -39,7 +39,7 @@ const getConversionActionId = async ({ method, headers, params, metadata }) => { query: queryString, }; const searchStreamEndpoint = `${BASE_ENDPOINT}/${params.customerId}/googleAds:searchStream`; - logger.requestLog(`[${destType.toUpperCase()}] conversion enhancement request`, { + logger.requestLog(`[${destType.toUpperCase()}] get conversion action id request`, { metadata, requestDetails: { url: searchStreamEndpoint, body: data, method }, }); diff --git a/src/v0/destinations/google_adwords_offline_conversions/utils.js b/src/v0/destinations/google_adwords_offline_conversions/utils.js index cb24da0bcd..bf1773d450 100644 --- a/src/v0/destinations/google_adwords_offline_conversions/utils.js +++ b/src/v0/destinations/google_adwords_offline_conversions/utils.js @@ -89,7 +89,7 @@ const getConversionActionId = async ({ headers, params, metadata }) => { }); searchStreamResponse = processAxiosResponse(searchStreamResponse); const { response, status, headers: responseHeaders } = searchStreamResponse; - logger.responseLog(`[${destType.toUpperCase()}] get conversion custom variable`, { + logger.responseLog(`[${destType.toUpperCase()}] get conversion action id response`, { metadata, responseDetails: { response, From 6279c79bb19e70c444cafbca928811cb40e1722c Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Fri, 14 Jun 2024 19:02:46 +0530 Subject: [PATCH 35/37] chore: add method doc for unexported log method in logger.js --- src/logger.js | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/logger.js b/src/logger.js index b523aff260..6cec795498 100644 --- a/src/logger.js +++ b/src/logger.js @@ -58,6 +58,15 @@ const getLogMetadata = (metadata) => { }; }; +/** + * Perform logging operation on logMethod passed + * + * @param {*} logMethod + * - instance method reference + * - The logger should implement all of debug/info/warn/error methods + * @param {*} args + * - the arguments that needs to be passed to logger instance method + */ const log = (logMethod, args) => { const [message, logInfo, ...otherArgs] = args; if (logInfo) { From 4adb97760883a8ae12b20fb5058eb5fe6afa78b3 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Tue, 18 Jun 2024 10:15:34 +0530 Subject: [PATCH 36/37] chore: handle non-object arguments for logger chore: add stats for gladly API call Signed-off-by: Sai Sankeerth --- .../v2/destinations/gladly/procWorkflow.yaml | 3 ++- src/logger.js | 22 ++++++++++++++++--- 2 files changed, 21 insertions(+), 4 deletions(-) diff --git a/src/cdk/v2/destinations/gladly/procWorkflow.yaml b/src/cdk/v2/destinations/gladly/procWorkflow.yaml index a53a0ca8f5..dcefc9d774 100644 --- a/src/cdk/v2/destinations/gladly/procWorkflow.yaml +++ b/src/cdk/v2/destinations/gladly/procWorkflow.yaml @@ -61,7 +61,8 @@ steps: headers: $.getHeaders(.destination) } const endpoint = $.getEndpoint(.destination) + "?" + $.getQueryParams($.context.payload); - const rawResponse = await $.httpGET(endpoint,requestOptions) + const reqStats = {metadata:.metadata, module: 'router',feature: "transformation", destType:"gladly",requestMethod:"get",endpointPath:"/api/v1/customer-profiles"} + const rawResponse = await $.httpGET(endpoint,requestOptions, reqStats) const processedResponse = $.processAxiosResponse(rawResponse) processedResponse diff --git a/src/logger.js b/src/logger.js index 6cec795498..6daff56c67 100644 --- a/src/logger.js +++ b/src/logger.js @@ -58,17 +58,33 @@ const getLogMetadata = (metadata) => { }; }; +const formLogArgs = (args) => { + let msg = ''; + let otherArgs = []; + args.forEach((arg) => { + if (typeof arg !== 'object') { + msg += ' ' + arg; + return; + } + otherArgs.push(arg); + }); + return [msg, ...otherArgs]; +}; + /** * Perform logging operation on logMethod passed * + * **Good practices**: + * - Do not have more than one array args in logger * @param {*} logMethod * - instance method reference * - The logger should implement all of debug/info/warn/error methods - * @param {*} args + * @param {*} logArgs * - the arguments that needs to be passed to logger instance method */ -const log = (logMethod, args) => { - const [message, logInfo, ...otherArgs] = args; +const log = (logMethod, logArgs) => { + const [message, ...args] = formLogArgs(logArgs); + const [logInfo, ...otherArgs] = args; if (logInfo) { const { metadata, ...otherLogInfoArgs } = logInfo; if (Array.isArray(metadata)) { From 2406a4bf4c0963edeba58dbc7af80eafbf707183 Mon Sep 17 00:00:00 2001 From: Sai Sankeerth Date: Tue, 18 Jun 2024 11:40:04 +0530 Subject: [PATCH 37/37] chore: remove new routes log Signed-off-by: Sai Sankeerth --- src/index.ts | 1 - 1 file changed, 1 deletion(-) diff --git a/src/index.ts b/src/index.ts index 39a69b4ad9..c5de26c776 100644 --- a/src/index.ts +++ b/src/index.ts @@ -35,7 +35,6 @@ app.use( addRequestSizeMiddleware(app); addSwaggerRoutes(app); -logger.info('Using new routes'); applicationRoutes(app); function finalFunction() {