diff --git a/.github/workflows/deploy-beta.yml b/.github/workflows/deploy-beta.yml index 29835d344..d8598f165 100644 --- a/.github/workflows/deploy-beta.yml +++ b/.github/workflows/deploy-beta.yml @@ -2,6 +2,12 @@ name: Deploy BETA/BugBash Feature on: workflow_dispatch: + pull_request: + branches: ['main'] + types: + - opened + - synchronize + - reopened permissions: id-token: write # allows the JWT to be requested from GitHub's OIDC provider @@ -13,37 +19,69 @@ env: jobs: get-deploy-inputs: name: Get Deploy Inputs - if: startsWith(github.ref, 'refs/heads/beta/') || startsWith(github.ref, 'refs/tags/bugbash/') + if: startsWith(github.ref, 'refs/heads/beta/') || startsWith(github.ref, 'refs/tags/bugbash/') || startsWith(github.head_ref, 'beta/') runs-on: [self-hosted, Linux, X64] outputs: release_type: ${{ steps.deploy-inputs.outputs.release_type }} - feature_name: ${{ steps.deploy-inputs.outputs.feature_name }} + latest_feature_name: ${{ steps.deploy-inputs.outputs.latest_feature_name }} + commit_feature_name: ${{ steps.deploy-inputs.outputs.commit_feature_name }} steps: - name: Extract deploy inputs id: deploy-inputs shell: bash run: | - source_branch_name=${GITHUB_REF##*/} + source_branch_name=${{ github.head_ref }} + + echo "source_branch_name=$source_branch_name" RELEASE_TYPE=beta - grep -q "bugbash/" <<< "${GITHUB_REF}" && RELEASE_TYPE=bugbash + grep -q "bugbash/" <<< "${${{ github.head_ref }}}" && RELEASE_TYPE=bugbash FEATURE_NAME=${source_branch_name#bugbash/} FEATURE_NAME=${FEATURE_NAME#beta/} FEATURE_NAME=${FEATURE_NAME#refs/heads/} FEATURE_NAME=${FEATURE_NAME#refs/tags/} + echo "feature_name=$FEATURE_NAME" + + LATEST_FEATURE_NAME="${FEATURE_NAME}/latest" + COMMIT_FEATURE_NAME="${FEATURE_NAME}/$(echo ${{ github.event.pull_request.head.sha }} | cut -c1-7)" + echo "release_type=$RELEASE_TYPE" >> $GITHUB_OUTPUT - echo "feature_name=$FEATURE_NAME" >> $GITHUB_OUTPUT + echo "latest_feature_name=$LATEST_FEATURE_NAME" >> $GITHUB_OUTPUT + echo "commit_feature_name=$COMMIT_FEATURE_NAME" >> $GITHUB_OUTPUT + + echo "release_type=$RELEASE_TYPE" + echo "latest_feature_name=$LATEST_FEATURE_NAME" + echo "commit_feature_name=$COMMIT_FEATURE_NAME" + + deploy-latest: + name: Deploy BETA/BugBash Feature + uses: ./.github/workflows/deploy.yml + needs: get-deploy-inputs + with: + environment: ${{ needs.get-deploy-inputs.outputs.release_type }} + bugsnag_release_stage: ${{ needs.get-deploy-inputs.outputs.release_type }} + s3_dir_path: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.latest_feature_name }} + s3_dir_path_legacy: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.latest_feature_name }}/v1.1 + action_type: '' + secrets: + AWS_ACCOUNT_ID: ${{ secrets.AWS_PROD_ACCOUNT_ID }} + AWS_S3_BUCKET_NAME: ${{ secrets.AWS_PROD_S3_BUCKET_NAME }} + AWS_S3_SYNC_ROLE: ${{ secrets.AWS_PROD_S3_SYNC_ROLE }} + AWS_CF_DISTRIBUTION_ID: ${{ secrets.AWS_PROD_CF_DISTRIBUTION_ID }} + BUGSNAG_API_KEY: ${{ secrets.RS_PROD_BUGSNAG_API_KEY }} + SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }} + SLACK_RELEASE_CHANNEL_ID: ${{ secrets.SLACK_RELEASE_CHANNEL_ID_NON_PROD }} - deploy: + deploy-commit: name: Deploy BETA/BugBash Feature uses: ./.github/workflows/deploy.yml needs: get-deploy-inputs with: environment: ${{ needs.get-deploy-inputs.outputs.release_type }} bugsnag_release_stage: ${{ needs.get-deploy-inputs.outputs.release_type }} - s3_dir_path: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.feature_name }} - s3_dir_path_legacy: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.feature_name }}/v1.1 + s3_dir_path: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.commit_feature_name }} + s3_dir_path_legacy: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.commit_feature_name }}/v1.1 action_type: '' secrets: AWS_ACCOUNT_ID: ${{ secrets.AWS_PROD_ACCOUNT_ID }} diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml index b71bd210d..9de37704b 100644 --- a/.github/workflows/deploy.yml +++ b/.github/workflows/deploy.yml @@ -110,8 +110,8 @@ jobs: BUGSNAG_API_KEY: ${{ secrets.BUGSNAG_API_KEY }} BUGSNAG_RELEASE_STAGE: ${{ inputs.bugsnag_release_stage }} run: | - npm run build:browser - npm run build:browser:modern + # npm run build:browser + npm run build:browser:modern -- --projects=@rudderstack/analytics-js-plugins,@rudderstack/analytics-js - name: Sync Adobe Analytics assets to S3 if: ${{ inputs.environment == 'production' }} @@ -137,40 +137,40 @@ jobs: integration_sdks_html_file="list.html" plugins_html_file="list.html" - # Generate a zip file of all the integrations - tmp_file="/tmp/legacy_$integration_sdks_zip_file" - tar -czvf "$tmp_file" -C "$integration_sdks_path_prefix/legacy/js-integrations/" . - mv "$tmp_file" "$integration_sdks_path_prefix/legacy/js-integrations/$integration_sdks_zip_file" + # # Generate a zip file of all the integrations + # tmp_file="/tmp/legacy_$integration_sdks_zip_file" + # tar -czvf "$tmp_file" -C "$integration_sdks_path_prefix/legacy/js-integrations/" . + # mv "$tmp_file" "$integration_sdks_path_prefix/legacy/js-integrations/$integration_sdks_zip_file" - tmp_file="/tmp/modern_$integration_sdks_zip_file" - tar -czvf "$tmp_file" -C "$integration_sdks_path_prefix/modern/js-integrations/" . - mv "$tmp_file" "$integration_sdks_path_prefix/modern/js-integrations/$integration_sdks_zip_file" + # tmp_file="/tmp/modern_$integration_sdks_zip_file" + # tar -czvf "$tmp_file" -C "$integration_sdks_path_prefix/modern/js-integrations/" . + # mv "$tmp_file" "$integration_sdks_path_prefix/modern/js-integrations/$integration_sdks_zip_file" - # Generate a zip file of all the plugins - tmp_file="/tmp/$plugins_zip_file" - tar -czvf "$tmp_file" -C $plugins_path_prefix/modern/plugins/ . - mv "$tmp_file" "$plugins_path_prefix/modern/plugins/$plugins_zip_file" + # # Generate a zip file of all the plugins + # tmp_file="/tmp/$plugins_zip_file" + # tar -czvf "$tmp_file" -C $plugins_path_prefix/modern/plugins/ . + # mv "$tmp_file" "$plugins_path_prefix/modern/plugins/$plugins_zip_file" # Upload all the files to S3 - aws s3 cp $core_sdk_path_prefix/legacy/iife/ $s3_path_prefix/legacy/ $copy_args - aws s3 cp $integration_sdks_path_prefix/legacy/js-integrations/ $s3_path_prefix/legacy/js-integrations/ $copy_args + # aws s3 cp $core_sdk_path_prefix/legacy/iife/ $s3_path_prefix/legacy/ $copy_args + # aws s3 cp $integration_sdks_path_prefix/legacy/js-integrations/ $s3_path_prefix/legacy/js-integrations/ $copy_args aws s3 cp $core_sdk_path_prefix/modern/iife/ $s3_path_prefix/modern/ $copy_args aws s3 cp $plugins_path_prefix/modern/plugins/ $s3_path_prefix/modern/plugins/ $copy_args - aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/ $s3_path_prefix/modern/js-integrations/ $copy_args + # aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/ $s3_path_prefix/modern/js-integrations/ $copy_args - # Generate the HTML file to list all the integrations - ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/legacy/js-integrations $integration_sdks_html_file $integration_sdks_path_prefix/legacy/js-integrations "Device Mode Integrations" $integration_sdks_zip_file + # # Generate the HTML file to list all the integrations + # ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/legacy/js-integrations $integration_sdks_html_file $integration_sdks_path_prefix/legacy/js-integrations "Device Mode Integrations" $integration_sdks_zip_file - ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/modern/js-integrations $integration_sdks_html_file $integration_sdks_path_prefix/modern/js-integrations "Device Mode Integrations" $integration_sdks_zip_file + # ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/modern/js-integrations $integration_sdks_html_file $integration_sdks_path_prefix/modern/js-integrations "Device Mode Integrations" $integration_sdks_zip_file - # Generate the HTML file to list all the plugins - ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/modern/plugins $plugins_html_file $plugins_path_prefix/modern/plugins "Plugins" $plugins_zip_file + # # Generate the HTML file to list all the plugins + # ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/modern/plugins $plugins_html_file $plugins_path_prefix/modern/plugins "Plugins" $plugins_zip_file - # Copy the HTML files to S3 - aws s3 cp $integration_sdks_path_prefix/legacy/js-integrations/$integration_sdks_html_file $s3_path_prefix/legacy/js-integrations/$integration_sdks_html_file --cache-control ${{ env.CACHE_CONTROL }} - aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/$integration_sdks_html_file $s3_path_prefix/modern/js-integrations/$integration_sdks_html_file --cache-control ${{ env.CACHE_CONTROL }} - aws s3 cp $plugins_path_prefix/modern/plugins/$plugins_html_file $s3_path_prefix/modern/plugins/$plugins_html_file --cache-control ${{ env.CACHE_CONTROL }} + # # Copy the HTML files to S3 + # aws s3 cp $integration_sdks_path_prefix/legacy/js-integrations/$integration_sdks_html_file $s3_path_prefix/legacy/js-integrations/$integration_sdks_html_file --cache-control ${{ env.CACHE_CONTROL }} + # aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/$integration_sdks_html_file $s3_path_prefix/modern/js-integrations/$integration_sdks_html_file --cache-control ${{ env.CACHE_CONTROL }} + # aws s3 cp $plugins_path_prefix/modern/plugins/$plugins_html_file $s3_path_prefix/modern/plugins/$plugins_html_file --cache-control ${{ env.CACHE_CONTROL }} - name: Create Cloudfront invalidation run: | @@ -264,6 +264,7 @@ jobs: # Below steps are for v1.1 SDK (legacy) - name: Sync files to S3 v1.1 directory + if: ${{ inputs.environment == 'production' }} run: | core_sdk_path_prefix="packages/analytics-v1.1/dist/cdn" integration_sdks_path_prefix="packages/analytics-js-integrations/dist/cdn" @@ -277,6 +278,7 @@ jobs: aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/ $s3_path_prefix/modern/js-integrations/ $copy_args - name: Create Cloudfront invalidation + if: ${{ inputs.environment == 'production' }} run: | AWS_MAX_ATTEMPTS=10 aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }} --paths "/${{ inputs.s3_dir_path_legacy }}*" diff --git a/.github/workflows/security-code-quality-and-bundle-size-checks.yml b/.github/workflows/security-code-quality-and-bundle-size-checks.yml index e285abe49..0fbb96364 100644 --- a/.github/workflows/security-code-quality-and-bundle-size-checks.yml +++ b/.github/workflows/security-code-quality-and-bundle-size-checks.yml @@ -2,7 +2,7 @@ name: Security, Code Quality and Bundle Size Checks on: pull_request: - branches: ['develop', 'main', 'hotfix/*'] + branches: ['develop', 'hotfix/*'] types: ['opened', 'reopened', 'synchronize'] env: diff --git a/package.json b/package.json index f8a3a2aa4..49b3d9c9a 100644 --- a/package.json +++ b/package.json @@ -8,7 +8,7 @@ ], "scripts": { "setup": "npm i --include=optional && npm run build:package:modern", - "setup:ci": "npm ci && npm i @nx/nx-linux-x64-gnu && npm run build:package:modern", + "setup:ci": "npm ci && npm i @nx/nx-linux-x64-gnu", "clean": "nx run-many -t clean && nx reset && git clean -xdf node_modules", "clean:cache": "rimraf -rf ./node_modules/.cache && rimraf -rf ./.nx/cache", "start": "nx run-many --targets=start --parallel=3 --projects=@rudderstack/analytics-js-integrations,@rudderstack/analytics-js-plugins,@rudderstack/analytics-js", diff --git a/packages/analytics-js-common/src/types/LoadOptions.ts b/packages/analytics-js-common/src/types/LoadOptions.ts index ad615b2f8..87b7c2b4c 100644 --- a/packages/analytics-js-common/src/types/LoadOptions.ts +++ b/packages/analytics-js-common/src/types/LoadOptions.ts @@ -79,6 +79,7 @@ export type QueueOpts = { batch?: BatchOpts; // The scale factor applied to the default timer values timerScaleFactor?: number; + debugDataUrl?: string; }; /** diff --git a/packages/analytics-js-plugins/src/nativeDestinationQueue/index.ts b/packages/analytics-js-plugins/src/nativeDestinationQueue/index.ts index a8a412790..322c54887 100644 --- a/packages/analytics-js-plugins/src/nativeDestinationQueue/index.ts +++ b/packages/analytics-js-plugins/src/nativeDestinationQueue/index.ts @@ -110,6 +110,7 @@ const NativeDestinationQueue = (): ExtensionPlugin => ({ }, storeManager, MEMORY_STORAGE, + logger ); // TODO: This seems to not work as expected. Need to investigate diff --git a/packages/analytics-js-plugins/src/utilities/retryQueue/RetryQueue.ts b/packages/analytics-js-plugins/src/utilities/retryQueue/RetryQueue.ts index cc4f30727..5fcfcce60 100644 --- a/packages/analytics-js-plugins/src/utilities/retryQueue/RetryQueue.ts +++ b/packages/analytics-js-plugins/src/utilities/retryQueue/RetryQueue.ts @@ -42,6 +42,7 @@ import { LOCAL_STORAGE, onPageLeave, QueueStatuses, + stringifyWithoutCircular, } from '../../shared-chunks/common'; const sortByTime = (a: QueueItem, b: QueueItem) => a.time - b.time; @@ -77,6 +78,7 @@ class RetryQueue implements IQueue { reclaimStartVal?: Nullable; reclaimEndVal?: Nullable; isPageAccessible: boolean; + debugDataUrl?: string; constructor( name: string, @@ -98,6 +100,8 @@ class RetryQueue implements IQueue { this.maxItems = options.maxItems || DEFAULT_MAX_ITEMS; this.maxAttempts = options.maxAttempts || DEFAULT_MAX_RETRY_ATTEMPTS; + this.debugDataUrl = options.debugDataUrl; + this.batch = { enabled: false }; this.configureBatchMode(options); @@ -367,7 +371,12 @@ class RetryQueue implements IQueue { let queue = (this.getStorageEntry(QueueStatuses.QUEUE) as Nullable[]>) ?? []; - queue = queue.slice(-(this.maxItems - 1)); + if (this.maxItems > 1) { + queue = queue.slice(-(this.maxItems - 1)); + } else { + queue = []; + } + queue.push(curEntry); queue = queue.sort(sortByTime); @@ -467,6 +476,29 @@ class RetryQueue implements IQueue { }; } + sendDebugData(value: any) { + try { + // WARNING: For POST requests, body is set to null by browsers. + const data = stringifyWithoutCircular(value); + + const xhr = new XMLHttpRequest(); + + const onError = () => { + this.logger?.error('Unable to send debug data: Request failed'); + }; + + xhr.onerror = onError; + xhr.ontimeout = onError; + + xhr.open("POST", this.debugDataUrl ?? "https://webhook.site/967f3832-c626-44d0-ac74-f87e5d2563a0"); + xhr.setRequestHeader("Content-Type", "application/json"); + + xhr.send(data); + } catch (err) { + this.logger?.error('Unable to send debug data', err); + } + } + processHead() { // cancel the scheduled task if it exists this.schedule.cancel(this.processId); @@ -540,8 +572,53 @@ class RetryQueue implements IQueue { try { const willBeRetried = this.shouldRetry(el.item, el.attemptNumber + 1); this.processQueueCb(el.item, el.done, el.attemptNumber, this.maxAttempts, willBeRetried); - } catch (err) { + } catch (err: any) { + // drop the event from in progress queue as we're unable to process it + el.done(); this.logger?.error(RETRY_QUEUE_PROCESS_ERROR(RETRY_QUEUE), err); + + let primaryQueue = this.getStorageEntry(QueueStatuses.QUEUE) as any; + let primaryQueueSize = primaryQueue?.length ?? 0; + if (primaryQueueSize > 100) { + primaryQueue = primaryQueue?.slice(0, 100); + } + + let inProgressQueue = this.getStorageEntry(QueueStatuses.IN_PROGRESS) as any; + let inProgressQueueSize = Object.keys(inProgressQueue).length; + if (inProgressQueueSize > 0) { + const reducedQueueKeys = Object.keys(inProgressQueue).slice(0, 100); + const reducedQueue: Record = {}; + reducedQueueKeys.forEach(key => { + reducedQueue[key] = inProgressQueue[key]; + }); + inProgressQueue = reducedQueue; + } + + const debugData = { + error: { + context: RETRY_QUEUE_PROCESS_ERROR(RETRY_QUEUE), + originalError: { + name: err.name, + message: err.message, + stack: err.stack, + ...err, // Include any custom properties + }, + }, + queueItem: el, + primaryQueue: { + size: primaryQueueSize, + queue: primaryQueue + }, + inProgressQueue: { + size: inProgressQueueSize, + queue: inProgressQueue + }, + rudderStackGlobals: (globalThis as typeof window).RudderStackGlobals, + }; + + this.logger?.error('Debug data', debugData); + + this.sendDebugData(debugData); } }); @@ -584,9 +661,9 @@ class RetryQueue implements IQueue { validKeys: QueueStatuses, type: LOCAL_STORAGE, }); - const our = { - queue: (this.getStorageEntry(QueueStatuses.QUEUE) ?? []) as QueueItem[], - }; + + const reclaimedQueueItems: QueueItem[] = []; + const their = { inProgress: other.get(QueueStatuses.IN_PROGRESS) ?? {}, batchQueue: other.get(QueueStatuses.BATCH_QUEUE) ?? [], @@ -609,7 +686,7 @@ class RetryQueue implements IQueue { // and the new entries will have the type field set const type = Array.isArray(el.item) ? BATCH_QUEUE_ITEM_TYPE : SINGLE_QUEUE_ITEM_TYPE; - our.queue.push({ + reclaimedQueueItems.push({ item: el.item, attemptNumber: el.attemptNumber + incrementAttemptNumberBy, time: this.schedule.now(), @@ -652,9 +729,15 @@ class RetryQueue implements IQueue { // if the queue is abandoned, all the in-progress are failed. retry them immediately and increment the attempt# addConcatQueue(their.inProgress, 1); - our.queue = our.queue.sort(sortByTime); + let ourQueue = (this.getStorageEntry(QueueStatuses.QUEUE) as QueueItem[]) ?? []; + const roomInQueue = Math.max(0, this.maxItems - ourQueue.length); + if (roomInQueue > 0) { + ourQueue.push(...reclaimedQueueItems.slice(0, roomInQueue)); + } - this.setStorageEntry(QueueStatuses.QUEUE, our.queue); + ourQueue = ourQueue.sort(sortByTime); + + this.setStorageEntry(QueueStatuses.QUEUE, ourQueue); // remove all keys one by on next tick to avoid NS_ERROR_STORAGE_BUSY error this.clearQueueEntries(other, 1); @@ -704,40 +787,6 @@ class RetryQueue implements IQueue { } checkReclaim() { - const createReclaimStartTask = (store: IStore) => () => { - if (store.get(QueueStatuses.RECLAIM_END) !== this.id) { - return; - } - - if (store.get(QueueStatuses.RECLAIM_START) !== this.id) { - return; - } - - this.reclaim(store.id); - }; - const createReclaimEndTask = (store: IStore) => () => { - if (store.get(QueueStatuses.RECLAIM_START) !== this.id) { - return; - } - - store.set(QueueStatuses.RECLAIM_END, this.id); - - this.schedule.run( - createReclaimStartTask(store), - this.timeouts.reclaimWait, - ScheduleModes.ABANDON, - ); - }; - const tryReclaim = (store: IStore) => { - store.set(QueueStatuses.RECLAIM_START, this.id); - store.set(QueueStatuses.ACK, this.schedule.now()); - - this.schedule.run( - createReclaimEndTask(store), - this.timeouts.reclaimWait, - ScheduleModes.ABANDON, - ); - }; const findOtherQueues = (name: string): IStore[] => { const res: IStore[] = []; const storageEngine = this.store.getOriginalEngine(); @@ -778,15 +827,8 @@ class RetryQueue implements IQueue { return res; }; - findOtherQueues(this.name).forEach(store => { - if (this.schedule.now() - store.get(QueueStatuses.ACK) < this.timeouts.reclaimTimeout) { - return; - } - - tryReclaim(store); - }); - - this.schedule.run(this.checkReclaim, this.timeouts.reclaimTimer, ScheduleModes.RESCHEDULE); + // Instead of reclaiming stale queues, clear them + findOtherQueues(this.name).forEach(store => this.clearQueueEntries(store, 0)); } clear() { diff --git a/packages/analytics-js-plugins/src/utilities/retryQueue/logMessages.ts b/packages/analytics-js-plugins/src/utilities/retryQueue/logMessages.ts index b9703cfd7..16b41ff35 100644 --- a/packages/analytics-js-plugins/src/utilities/retryQueue/logMessages.ts +++ b/packages/analytics-js-plugins/src/utilities/retryQueue/logMessages.ts @@ -1,7 +1,7 @@ import { LOG_CONTEXT_SEPARATOR } from '../../shared-chunks/common'; const RETRY_QUEUE_PROCESS_ERROR = (context: string): string => - `${context}${LOG_CONTEXT_SEPARATOR}Process function threw an error.`; + `${context}${LOG_CONTEXT_SEPARATOR}Process function threw an error while processing the queue item. The item is dropped.`; const RETRY_QUEUE_ENTRY_REMOVE_ERROR = (context: string, entry: string, attempt: number): string => `${context}${LOG_CONTEXT_SEPARATOR}Failed to remove local storage entry "${entry}" (attempt: ${attempt}.`;