Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: clear stale queues #1952

Draft
wants to merge 16 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
54 changes: 46 additions & 8 deletions .github/workflows/deploy-beta.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,12 @@ name: Deploy BETA/BugBash Feature

on:
workflow_dispatch:
pull_request:
branches: ['main']
types:
- opened
- synchronize
- reopened

permissions:
id-token: write # allows the JWT to be requested from GitHub's OIDC provider
Expand All @@ -13,37 +19,69 @@ env:
jobs:
get-deploy-inputs:
name: Get Deploy Inputs
if: startsWith(github.ref, 'refs/heads/beta/') || startsWith(github.ref, 'refs/tags/bugbash/')
if: startsWith(github.ref, 'refs/heads/beta/') || startsWith(github.ref, 'refs/tags/bugbash/') || startsWith(github.head_ref, 'beta/')
runs-on: [self-hosted, Linux, X64]
outputs:
release_type: ${{ steps.deploy-inputs.outputs.release_type }}
feature_name: ${{ steps.deploy-inputs.outputs.feature_name }}
latest_feature_name: ${{ steps.deploy-inputs.outputs.latest_feature_name }}
commit_feature_name: ${{ steps.deploy-inputs.outputs.commit_feature_name }}

steps:
- name: Extract deploy inputs
id: deploy-inputs
shell: bash
run: |
source_branch_name=${GITHUB_REF##*/}
source_branch_name=${{ github.head_ref }}

echo "source_branch_name=$source_branch_name"
RELEASE_TYPE=beta
grep -q "bugbash/" <<< "${GITHUB_REF}" && RELEASE_TYPE=bugbash
grep -q "bugbash/" <<< "${${{ github.head_ref }}}" && RELEASE_TYPE=bugbash
FEATURE_NAME=${source_branch_name#bugbash/}
FEATURE_NAME=${FEATURE_NAME#beta/}
FEATURE_NAME=${FEATURE_NAME#refs/heads/}
FEATURE_NAME=${FEATURE_NAME#refs/tags/}

echo "feature_name=$FEATURE_NAME"

LATEST_FEATURE_NAME="${FEATURE_NAME}/latest"
COMMIT_FEATURE_NAME="${FEATURE_NAME}/$(echo ${{ github.event.pull_request.head.sha }} | cut -c1-7)"

echo "release_type=$RELEASE_TYPE" >> $GITHUB_OUTPUT
echo "feature_name=$FEATURE_NAME" >> $GITHUB_OUTPUT
echo "latest_feature_name=$LATEST_FEATURE_NAME" >> $GITHUB_OUTPUT
echo "commit_feature_name=$COMMIT_FEATURE_NAME" >> $GITHUB_OUTPUT

echo "release_type=$RELEASE_TYPE"
echo "latest_feature_name=$LATEST_FEATURE_NAME"
echo "commit_feature_name=$COMMIT_FEATURE_NAME"

deploy-latest:
name: Deploy BETA/BugBash Feature
uses: ./.github/workflows/deploy.yml
needs: get-deploy-inputs
with:
environment: ${{ needs.get-deploy-inputs.outputs.release_type }}
bugsnag_release_stage: ${{ needs.get-deploy-inputs.outputs.release_type }}
s3_dir_path: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.latest_feature_name }}
s3_dir_path_legacy: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.latest_feature_name }}/v1.1
action_type: ''
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PROD_ACCOUNT_ID }}
AWS_S3_BUCKET_NAME: ${{ secrets.AWS_PROD_S3_BUCKET_NAME }}
AWS_S3_SYNC_ROLE: ${{ secrets.AWS_PROD_S3_SYNC_ROLE }}
AWS_CF_DISTRIBUTION_ID: ${{ secrets.AWS_PROD_CF_DISTRIBUTION_ID }}
BUGSNAG_API_KEY: ${{ secrets.RS_PROD_BUGSNAG_API_KEY }}
SLACK_BOT_TOKEN: ${{ secrets.SLACK_BOT_TOKEN }}
SLACK_RELEASE_CHANNEL_ID: ${{ secrets.SLACK_RELEASE_CHANNEL_ID_NON_PROD }}

deploy:
deploy-commit:
name: Deploy BETA/BugBash Feature
uses: ./.github/workflows/deploy.yml
needs: get-deploy-inputs
with:
environment: ${{ needs.get-deploy-inputs.outputs.release_type }}
bugsnag_release_stage: ${{ needs.get-deploy-inputs.outputs.release_type }}
s3_dir_path: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.feature_name }}
s3_dir_path_legacy: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.feature_name }}/v1.1
s3_dir_path: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.commit_feature_name }}
s3_dir_path_legacy: ${{ needs.get-deploy-inputs.outputs.release_type }}/${{ needs.get-deploy-inputs.outputs.commit_feature_name }}/v1.1
action_type: ''
secrets:
AWS_ACCOUNT_ID: ${{ secrets.AWS_PROD_ACCOUNT_ID }}
Expand Down
52 changes: 27 additions & 25 deletions .github/workflows/deploy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -110,8 +110,8 @@ jobs:
BUGSNAG_API_KEY: ${{ secrets.BUGSNAG_API_KEY }}
BUGSNAG_RELEASE_STAGE: ${{ inputs.bugsnag_release_stage }}
run: |
npm run build:browser
npm run build:browser:modern
# npm run build:browser
npm run build:browser:modern -- --projects=@rudderstack/analytics-js-plugins,@rudderstack/analytics-js

- name: Sync Adobe Analytics assets to S3
if: ${{ inputs.environment == 'production' }}
Expand All @@ -137,40 +137,40 @@ jobs:
integration_sdks_html_file="list.html"
plugins_html_file="list.html"

# Generate a zip file of all the integrations
tmp_file="/tmp/legacy_$integration_sdks_zip_file"
tar -czvf "$tmp_file" -C "$integration_sdks_path_prefix/legacy/js-integrations/" .
mv "$tmp_file" "$integration_sdks_path_prefix/legacy/js-integrations/$integration_sdks_zip_file"
# # Generate a zip file of all the integrations
# tmp_file="/tmp/legacy_$integration_sdks_zip_file"
# tar -czvf "$tmp_file" -C "$integration_sdks_path_prefix/legacy/js-integrations/" .
# mv "$tmp_file" "$integration_sdks_path_prefix/legacy/js-integrations/$integration_sdks_zip_file"

tmp_file="/tmp/modern_$integration_sdks_zip_file"
tar -czvf "$tmp_file" -C "$integration_sdks_path_prefix/modern/js-integrations/" .
mv "$tmp_file" "$integration_sdks_path_prefix/modern/js-integrations/$integration_sdks_zip_file"
# tmp_file="/tmp/modern_$integration_sdks_zip_file"
# tar -czvf "$tmp_file" -C "$integration_sdks_path_prefix/modern/js-integrations/" .
# mv "$tmp_file" "$integration_sdks_path_prefix/modern/js-integrations/$integration_sdks_zip_file"

# Generate a zip file of all the plugins
tmp_file="/tmp/$plugins_zip_file"
tar -czvf "$tmp_file" -C $plugins_path_prefix/modern/plugins/ .
mv "$tmp_file" "$plugins_path_prefix/modern/plugins/$plugins_zip_file"
# # Generate a zip file of all the plugins
# tmp_file="/tmp/$plugins_zip_file"
# tar -czvf "$tmp_file" -C $plugins_path_prefix/modern/plugins/ .
# mv "$tmp_file" "$plugins_path_prefix/modern/plugins/$plugins_zip_file"

# Upload all the files to S3
aws s3 cp $core_sdk_path_prefix/legacy/iife/ $s3_path_prefix/legacy/ $copy_args
aws s3 cp $integration_sdks_path_prefix/legacy/js-integrations/ $s3_path_prefix/legacy/js-integrations/ $copy_args
# aws s3 cp $core_sdk_path_prefix/legacy/iife/ $s3_path_prefix/legacy/ $copy_args
# aws s3 cp $integration_sdks_path_prefix/legacy/js-integrations/ $s3_path_prefix/legacy/js-integrations/ $copy_args

aws s3 cp $core_sdk_path_prefix/modern/iife/ $s3_path_prefix/modern/ $copy_args
aws s3 cp $plugins_path_prefix/modern/plugins/ $s3_path_prefix/modern/plugins/ $copy_args
aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/ $s3_path_prefix/modern/js-integrations/ $copy_args
# aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/ $s3_path_prefix/modern/js-integrations/ $copy_args

# Generate the HTML file to list all the integrations
./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/legacy/js-integrations $integration_sdks_html_file $integration_sdks_path_prefix/legacy/js-integrations "Device Mode Integrations" $integration_sdks_zip_file
# # Generate the HTML file to list all the integrations
# ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/legacy/js-integrations $integration_sdks_html_file $integration_sdks_path_prefix/legacy/js-integrations "Device Mode Integrations" $integration_sdks_zip_file

./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/modern/js-integrations $integration_sdks_html_file $integration_sdks_path_prefix/modern/js-integrations "Device Mode Integrations" $integration_sdks_zip_file
# ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/modern/js-integrations $integration_sdks_html_file $integration_sdks_path_prefix/modern/js-integrations "Device Mode Integrations" $integration_sdks_zip_file

# Generate the HTML file to list all the plugins
./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/modern/plugins $plugins_html_file $plugins_path_prefix/modern/plugins "Plugins" $plugins_zip_file
# # Generate the HTML file to list all the plugins
# ./scripts/list-sdk-components.sh ${{ secrets.AWS_S3_BUCKET_NAME }} ${{ inputs.s3_dir_path }}/modern/plugins $plugins_html_file $plugins_path_prefix/modern/plugins "Plugins" $plugins_zip_file

# Copy the HTML files to S3
aws s3 cp $integration_sdks_path_prefix/legacy/js-integrations/$integration_sdks_html_file $s3_path_prefix/legacy/js-integrations/$integration_sdks_html_file --cache-control ${{ env.CACHE_CONTROL }}
aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/$integration_sdks_html_file $s3_path_prefix/modern/js-integrations/$integration_sdks_html_file --cache-control ${{ env.CACHE_CONTROL }}
aws s3 cp $plugins_path_prefix/modern/plugins/$plugins_html_file $s3_path_prefix/modern/plugins/$plugins_html_file --cache-control ${{ env.CACHE_CONTROL }}
# # Copy the HTML files to S3
# aws s3 cp $integration_sdks_path_prefix/legacy/js-integrations/$integration_sdks_html_file $s3_path_prefix/legacy/js-integrations/$integration_sdks_html_file --cache-control ${{ env.CACHE_CONTROL }}
# aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/$integration_sdks_html_file $s3_path_prefix/modern/js-integrations/$integration_sdks_html_file --cache-control ${{ env.CACHE_CONTROL }}
# aws s3 cp $plugins_path_prefix/modern/plugins/$plugins_html_file $s3_path_prefix/modern/plugins/$plugins_html_file --cache-control ${{ env.CACHE_CONTROL }}

- name: Create Cloudfront invalidation
run: |
Expand Down Expand Up @@ -264,6 +264,7 @@ jobs:
# Below steps are for v1.1 SDK (legacy)

- name: Sync files to S3 v1.1 directory
if: ${{ inputs.environment == 'production' }}
run: |
core_sdk_path_prefix="packages/analytics-v1.1/dist/cdn"
integration_sdks_path_prefix="packages/analytics-js-integrations/dist/cdn"
Expand All @@ -277,6 +278,7 @@ jobs:
aws s3 cp $integration_sdks_path_prefix/modern/js-integrations/ $s3_path_prefix/modern/js-integrations/ $copy_args

- name: Create Cloudfront invalidation
if: ${{ inputs.environment == 'production' }}
run: |
AWS_MAX_ATTEMPTS=10 aws cloudfront create-invalidation --distribution-id ${{ secrets.AWS_CF_DISTRIBUTION_ID }} --paths "/${{ inputs.s3_dir_path_legacy }}*"

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ name: Security, Code Quality and Bundle Size Checks

on:
pull_request:
branches: ['develop', 'main', 'hotfix/*']
branches: ['develop', 'hotfix/*']
types: ['opened', 'reopened', 'synchronize']

env:
Expand Down
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@
],
"scripts": {
"setup": "npm i --include=optional && npm run build:package:modern",
"setup:ci": "npm ci && npm i @nx/nx-linux-x64-gnu && npm run build:package:modern",
"setup:ci": "npm ci && npm i @nx/nx-linux-x64-gnu",
"clean": "nx run-many -t clean && nx reset && git clean -xdf node_modules",
"clean:cache": "rimraf -rf ./node_modules/.cache && rimraf -rf ./.nx/cache",
"start": "nx run-many --targets=start --parallel=3 --projects=@rudderstack/analytics-js-integrations,@rudderstack/analytics-js-plugins,@rudderstack/analytics-js",
Expand Down
1 change: 1 addition & 0 deletions packages/analytics-js-common/src/types/LoadOptions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@ export type QueueOpts = {
batch?: BatchOpts;
// The scale factor applied to the default timer values
timerScaleFactor?: number;
debugDataUrl?: string;
};

/**
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@ const NativeDestinationQueue = (): ExtensionPlugin => ({
},
storeManager,
MEMORY_STORAGE,
logger
);

// TODO: This seems to not work as expected. Need to investigate
Expand Down
Loading
Loading