From 7d20361f388299e1a3dca7167ef4de5227a346b6 Mon Sep 17 00:00:00 2001 From: Cassidy Schaufele Date: Wed, 30 Aug 2023 11:54:24 -0600 Subject: [PATCH 01/15] DOP-3963: Render only latest version of spec at base spec component (#901) * build staging ecs on push * DOP-3963: Update tests & stage * Update .github/workflows/deploy-stg-ecs.yml --------- Co-authored-by: Matt Meigs --- .../src/services/pageBuilder.ts | 25 +++++++++++++++++-- .../tests/unit/services/pageBuilder.test.ts | 22 ++++------------ 2 files changed, 28 insertions(+), 19 deletions(-) diff --git a/modules/oas-page-builder/src/services/pageBuilder.ts b/modules/oas-page-builder/src/services/pageBuilder.ts index fcfe0ff4b..a25379be5 100644 --- a/modules/oas-page-builder/src/services/pageBuilder.ts +++ b/modules/oas-page-builder/src/services/pageBuilder.ts @@ -45,6 +45,7 @@ interface AtlasSpecUrlParams { apiKeyword: string; apiVersion?: string; resourceVersion?: string; + latestResourceVersion?: string; } const ensureSavedVersionDataMatches = (versions: VersionData, apiVersion?: string, resourceVersion?: string) => { @@ -57,7 +58,12 @@ const ensureSavedVersionDataMatches = (versions: VersionData, apiVersion?: strin } }; -const getAtlasSpecUrl = async ({ apiKeyword, apiVersion, resourceVersion }: AtlasSpecUrlParams) => { +const getAtlasSpecUrl = async ({ + apiKeyword, + apiVersion, + resourceVersion, + latestResourceVersion, +}: AtlasSpecUrlParams) => { // Currently, the only expected API fetched programmatically is the Cloud Admin API, // but it's possible to have more in the future with varying processes. const keywords = ['cloud']; @@ -66,7 +72,11 @@ const getAtlasSpecUrl = async ({ apiKeyword, apiVersion, resourceVersion }: Atla } const versionExtension = `${apiVersion ? `-v${apiVersion.split('.')[0]}` : ''}${ - apiVersion && resourceVersion ? `-${resourceVersion}` : '' + apiVersion && resourceVersion + ? `-${resourceVersion}` + : apiVersion && latestResourceVersion && !resourceVersion + ? `-${latestResourceVersion}` + : '' }`; let oasFileURL; @@ -169,6 +179,17 @@ async function getOASpec({ resourceVersions.length - 1 ]; + /* Build the latest Resource Version spec if on a base API Version that has multiple Resource Versions + * Do not build the base API VERSION, since it may have out of order resources*/ + const { oasFileURL, successfulGitHash } = await getAtlasSpecUrl({ + apiKeyword: source, + apiVersion, + resourceVersion, + latestResourceVersion, + }); + spec = oasFileURL; + isSuccessfulBuild = successfulGitHash; + versionOptions = { active: { apiVersion, diff --git a/modules/oas-page-builder/tests/unit/services/pageBuilder.test.ts b/modules/oas-page-builder/tests/unit/services/pageBuilder.test.ts index 2433ff122..dbe57a3af 100644 --- a/modules/oas-page-builder/tests/unit/services/pageBuilder.test.ts +++ b/modules/oas-page-builder/tests/unit/services/pageBuilder.test.ts @@ -192,7 +192,7 @@ describe('pageBuilder', () => { await buildOpenAPIPages(testEntries, testOptions); - expect(mockExecute).toBeCalledTimes(testEntries.length * 2); + expect(mockExecute).toBeCalledTimes(4); // Local expect(mockExecute).toBeCalledWith( `${testOptions.repo}/source${testEntries[0][1].source}`, @@ -201,12 +201,6 @@ describe('pageBuilder', () => { getExpectedVersionOptions(`${SITE_URL}/${testEntries[0][0]}`) ); - expect(mockExecute).toBeCalledWith( - `${testOptions.repo}/source${testEntries[0][1].source}`, - `${testOptions.output}/${testEntries[0][0]}/index.html`, - expectedDefaultBuildOptions, - getExpectedVersionOptions(`${SITE_URL}/${testEntries[0][0]}`) - ); // Url expect(mockExecute).toBeCalledWith( `${testEntries[1][1].source}`, @@ -215,12 +209,6 @@ describe('pageBuilder', () => { getExpectedVersionOptions(`${SITE_URL}/${testEntries[1][0]}`) ); - expect(mockExecute).toBeCalledWith( - `${testEntries[1][1].source}`, - getExpectedOutputPath(testOptions.output, testEntries[1][0], API_VERSION), - expectedDefaultBuildOptions, - getExpectedVersionOptions(`${SITE_URL}/${testEntries[1][0]}`) - ); // Atlas expect(mockExecute).toBeCalledWith( `https://mongodb-mms-prod-build-server.s3.amazonaws.com/openapi/${MOCKED_GIT_HASH}-v2-${RESOURCE_VERSION}.json`, @@ -230,8 +218,8 @@ describe('pageBuilder', () => { ); expect(mockExecute).toBeCalledWith( - `https://mongodb-mms-prod-build-server.s3.amazonaws.com/openapi/${MOCKED_GIT_HASH}-v2.json`, - getExpectedOutputPath(testOptions.output, testEntries[2][0], API_VERSION), + `https://mongodb-mms-prod-build-server.s3.amazonaws.com/openapi/${MOCKED_GIT_HASH}-v2-${RESOURCE_VERSION}.json`, + getExpectedOutputPath(testOptions.output, testEntries[2][0], API_VERSION, RESOURCE_VERSION), expectedAtlasBuildOptions, getExpectedVersionOptions(`${SITE_URL}/${testEntries[2][0]}`) ); @@ -280,8 +268,8 @@ describe('pageBuilder', () => { ); expect(mockExecute).toBeCalledWith( - `https://mongodb-mms-prod-build-server.s3.amazonaws.com/openapi/${MOCKED_GIT_HASH}-v2.json`, - getExpectedOutputPath(testOptions.output, testEntries[0][0], API_VERSION), + `https://mongodb-mms-prod-build-server.s3.amazonaws.com/openapi/${MOCKED_GIT_HASH}-v2-${LATEST_RESOURCE_VERSION}.json`, + getExpectedOutputPath(testOptions.output, testEntries[0][0], API_VERSION, LATEST_RESOURCE_VERSION), expectedAtlasBuildOptions, getExpectedVersionOptions(`${SITE_URL}/${testEntries[0][0]}`, LATEST_RESOURCE_VERSION) ); From 3d7fa56acd3344feb227a44c8364a48851310c39 Mon Sep 17 00:00:00 2001 From: Brandon Ly Date: Fri, 1 Sep 2023 13:32:34 -0500 Subject: [PATCH 02/15] [FEATURE]: Comment on PRs with webhook URL for feature branch (#903) * [FEATURE]: Add commenting to PR * [FEATURE]: use correct cdk file path * [FEATURE]: ADd token * [FEATURE]: Log stuff for debugging :S * [FEATURE]: Add try catch and conditional for commenting * [FEATURE]: Get cwd * [FEATURE]: Add env for branch name * [FEATURE]: Use path.join * [FEATURE]: Remove bad log, i hate plain js --- .github/scripts/webhook-pr-comment.js | 14 ++++++++++++++ .github/workflows/deploy-feature-branch.yml | 20 ++++++++++++++++---- 2 files changed, 30 insertions(+), 4 deletions(-) create mode 100644 .github/scripts/webhook-pr-comment.js diff --git a/.github/scripts/webhook-pr-comment.js b/.github/scripts/webhook-pr-comment.js new file mode 100644 index 000000000..19c60882d --- /dev/null +++ b/.github/scripts/webhook-pr-comment.js @@ -0,0 +1,14 @@ +const fs = require('fs'); +const path = require('path'); +module.exports = () => { + try { + const outputsFile = fs.readFileSync('cdk-infra/outputs.json').toString(); + const outputs = JSON.parse(outputsFile); + + const webhook = Object.values(outputs[`auto-builder-stack-enhancedApp-stg-${process.env.GIT_BRANCH}-webhooks`])[0]; + return webhook; + } catch (error) { + console.log('Error occurred when retrieving Webhook URL', error); + return ''; + } +}; diff --git a/.github/workflows/deploy-feature-branch.yml b/.github/workflows/deploy-feature-branch.yml index 316d9ea2a..3ffc81099 100644 --- a/.github/workflows/deploy-feature-branch.yml +++ b/.github/workflows/deploy-feature-branch.yml @@ -5,12 +5,11 @@ on: types: - opened - reopened - name: Initial Feature Branch Deploy jobs: deploy: + permissions: write-all runs-on: ubuntu-latest - steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v1 @@ -28,5 +27,18 @@ jobs: cd cdk-infra/ npm ci npm run deploy:feature -- -c env=stg -c customFeatureName=enhancedApp-stg-${{github.head_ref}} --outputs-file outputs.json - - name: Display resource URLs - run: cat cdk-infra/outputs.json \ No newline at end of file + - name: Get Webhook URL + uses: actions/github-script@v6 + id: webhook + env: + GIT_BRANCH: ${{github.head_ref}} + with: + script: | + const script = require('./.github/scripts/webhook-pr-comment.js'); + return script() + result-encoding: string + - name: Comment on PR + if: steps.webhook.outputs.result != '' + run: gh pr comment ${{github.event.number}} --body "The URL for your feature branch webhook is ${{steps.webhook.outputs.result}}" + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file From ac8a104ce12546db98ab883b5039b308f9b13676 Mon Sep 17 00:00:00 2001 From: Allison Reinheimer Moore Date: Thu, 7 Sep 2023 10:25:17 -0400 Subject: [PATCH 03/15] =?UTF-8?q?DOP-3977:=20encourage=20writers=20to=20sw?= =?UTF-8?q?itch=20webhooks=20=F0=9F=A5=95=20(#902)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * DOP-3977: encourage writers to switch webhooks 🥕 * more newlines * revert v2 changes --- api/controllers/v1/jobs.ts | 16 +++++++++++++--- 1 file changed, 13 insertions(+), 3 deletions(-) diff --git a/api/controllers/v1/jobs.ts b/api/controllers/v1/jobs.ts index 96ca8eb74..652b6d226 100644 --- a/api/controllers/v1/jobs.ts +++ b/api/controllers/v1/jobs.ts @@ -166,13 +166,22 @@ function prepProgressMessage( jobId: string, jobTitle: string, status: string, - errorReason: string + errorReason: string, + jobType?: string ): string { const msg = `Your Job (<${jobUrl}${jobId}|${jobTitle}>) `; const env = c.get('env'); switch (status) { case 'inQueue': - return msg + 'has successfully been added to the ' + env + ' queue.'; + // Encourage writers to update to new webhook on githubPush jobs + let inQueueMsg = msg; + if (jobType == 'githubPush') { + const webhookWikiUrl = + 'https://wiki.corp.mongodb.com/display/DE/How-To%3A+Use+Snooty%27s+Autobuilder+to+Build+Your+Content'; + const updatePlease = `:exclamation: You used the old webhook for this build. <${webhookWikiUrl}|Update to the new webhook> in your fork of this repo to save 90s per build.`; + inQueueMsg = updatePlease + '\n\n' + msg; + } + return inQueueMsg + 'has successfully been added to the ' + env + ' queue.'; case 'inProgress': return msg + 'is now being processed.'; case 'completed': @@ -213,7 +222,8 @@ async function NotifyBuildProgress(jobId: string): Promise { jobId, jobTitle, fullDocument.status as string, - fullDocument?.error?.reason || '' + fullDocument?.error?.reason || '', + fullDocument?.payload.jobType ), entitlement['slack_user_id'] ); From 99f917169bae325b37e1dfb5262bc46d859d27d5 Mon Sep 17 00:00:00 2001 From: Cassidy Schaufele Date: Thu, 7 Sep 2023 10:53:35 -0600 Subject: [PATCH 04/15] Update Dockerfile --- Dockerfile | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index d0f4838e8..94305a712 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,8 +22,8 @@ RUN cd ./modules/oas-page-builder \ # where repo work will happen FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge -ARG SNOOTY_PARSER_VERSION=0.14.6 -ARG SNOOTY_FRONTEND_VERSION=0.14.13 +ARG SNOOTY_PARSER_VERSION=0.14.7 +ARG SNOOTY_FRONTEND_VERSION=0.14.14 ARG MUT_VERSION=0.10.3 ARG REDOC_CLI_VERSION=1.2.2 ARG NPM_BASE_64_AUTH From a746745556e4b039601a956b8edab0490bb5aed4 Mon Sep 17 00:00:00 2001 From: Cassidy Schaufele Date: Thu, 7 Sep 2023 12:20:39 -0600 Subject: [PATCH 05/15] Update Dockerfile --- Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/Dockerfile b/Dockerfile index 94305a712..0554f2afc 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,7 +22,7 @@ RUN cd ./modules/oas-page-builder \ # where repo work will happen FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge -ARG SNOOTY_PARSER_VERSION=0.14.7 +ARG SNOOTY_PARSER_VERSION=0.14.8 ARG SNOOTY_FRONTEND_VERSION=0.14.14 ARG MUT_VERSION=0.10.3 ARG REDOC_CLI_VERSION=1.2.2 From a7642af68340f04890816c206153ec133409632f Mon Sep 17 00:00:00 2001 From: anabellabuckvar <41971124+anabellabuckvar@users.noreply.github.com> Date: Thu, 7 Sep 2023 16:05:07 -0400 Subject: [PATCH 06/15] DOP-3999 Exclude oldgen jobs from "don't send notifications of non-GC builds" logic (#904) * DOP-3999 notify oldgen jobs * DOP-3999 autobuilder php-library * DOP-3999 rollback last change * DOP-3999 undid package-lock changes * DOP-3999 undid package-lock.json changes * DOP-3999 use isNextGen field * DOP-3999 fixed if statement --- src/job/jobHandler.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/job/jobHandler.ts b/src/job/jobHandler.ts index accb96f2d..357bd2c27 100644 --- a/src/job/jobHandler.ts +++ b/src/job/jobHandler.ts @@ -105,7 +105,7 @@ export abstract class JobHandler { // completed after the Gatsby Cloud build via the SnootyBuildComplete lambda. const { _id: jobId, user } = this.currJob; const gatsbyCloudSiteId = await this._repoEntitlementsRepo.getGatsbySiteIdByGithubUsername(user); - if (gatsbyCloudSiteId && this.currJob.payload.jobType === 'githubPush') { + if (this.currJob.payload.isNextGen && gatsbyCloudSiteId && this.currJob.payload.jobType === 'githubPush') { this.logger.info( jobId, `User ${user} has a Gatsby Cloud site. The Autobuilder will not mark the build as completed right now.` From ce592fc5ef2af5be77a9ff4d4f15e7b42cc24771 Mon Sep 17 00:00:00 2001 From: Brandon Ly Date: Thu, 7 Sep 2023 15:47:37 -0500 Subject: [PATCH 07/15] [DOP-3911]: Check project path for monorepo (#894) * [DOP-3911]: Add way to check file changes and determine what projects have been changed if head_commit is included * [DOP-3911]: Add some comments * [DOP-3911]: Add checks for snooty.toml in monorepo * [DOP-3911]: Add commit info and simplify files * [DOP-3911]: Add unit tests * [DOP-3911]: Update tests * [DOP-3911]: Update comments * [DOP-3911]: Remove extra comment * [DOP-3911]: Remove unused code * [DOP-3911]: Remove unused property in job payload * [DOP-3911]: Remove unused import * [DOP-3911]: Remove log and irrelevant comments * [DOP-3911]: Refactor to start using tree approach and create new directories for organization * [DOP-3911]: Refactor set to be called from parent function * [DOP-3911]: More refactoring, update tests to use set * [DOP-3911]: Clean up * [DOP-3911]: Resolve merge conflicts * [DOP-3911]: Resolve merge conflicts * [DOP-3911]: Add types and add comments * [DOP-3911]: Add feature * [DOP-3911]: Add feature flag for testing monorepo path * [DOP-3911]: Fix typo * [DOP-3911]: Remove get snooty.toml * [DOP-3911]: Remove use of config library, and clean up code * [DOP-3911]: Add parameter store flag for ease of testing * [DOP-3911]: Comment update * [DOP-3911]: Properly name test --- .github/workflows/update-feature-branch.yml | 2 +- api/controllers/v2/github.ts | 22 ++++- .../constructs/api/webhook-env-construct.ts | 7 ++ src/clients/githubClient.ts | 19 ++++ src/monorepo/index.ts | 42 ++++++++ src/monorepo/services/get-paths.ts | 36 +++++++ src/monorepo/types/atlas-types.ts | 25 +++++ src/monorepo/types/github-types.ts | 5 + src/monorepo/utils/monorepo-constants.ts | 2 + src/monorepo/utils/path-utils.ts | 50 ++++++++++ src/repositories/docSetRepository.ts | 45 +++++++++ tests/unit/monorepo/monorepo.test.ts | 98 +++++++++++++++++++ 12 files changed, 350 insertions(+), 3 deletions(-) create mode 100644 src/clients/githubClient.ts create mode 100644 src/monorepo/index.ts create mode 100644 src/monorepo/services/get-paths.ts create mode 100644 src/monorepo/types/atlas-types.ts create mode 100644 src/monorepo/types/github-types.ts create mode 100644 src/monorepo/utils/monorepo-constants.ts create mode 100644 src/monorepo/utils/path-utils.ts create mode 100644 src/repositories/docSetRepository.ts create mode 100644 tests/unit/monorepo/monorepo.test.ts diff --git a/.github/workflows/update-feature-branch.yml b/.github/workflows/update-feature-branch.yml index 0bbf6bb53..72ebe490c 100644 --- a/.github/workflows/update-feature-branch.yml +++ b/.github/workflows/update-feature-branch.yml @@ -46,7 +46,7 @@ jobs: cd cdk-infra/ npm ci npm run deploy:feature:stack -- -c env=stg -c customFeatureName=enhancedApp-stg-${{github.head_ref}} \ - auto-builder-stack-enhancedApp-stg-${{github.head_ref}}-webhook + auto-builder-stack-enhancedApp-stg-${{github.head_ref}}-webhooks - name: Update Worker Stack if: steps.filter.outputs.worker == 'true' diff --git a/api/controllers/v2/github.ts b/api/controllers/v2/github.ts index 19657f31c..e50842d51 100644 --- a/api/controllers/v2/github.ts +++ b/api/controllers/v2/github.ts @@ -8,6 +8,8 @@ import { ConsoleLogger } from '../../../src/services/logger'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { EnhancedJob, JobStatus } from '../../../src/entities/job'; import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handlers/github'; +import { getMonorepoPaths } from '../../../src/monorepo'; +import { getUpdatedFilePaths } from '../../../src/monorepo/utils/path-utils'; async function prepGithubPushPayload( githubEvent: PushEvent, @@ -75,9 +77,9 @@ export const TriggerBuild = async (event: APIGatewayEvent): Promise { + const commitInfo: GitCommitInfo = { + ownerName, + repoName, + commitSha, + }; + + const snootyDirSet = await getSnootyDirSet(commitInfo); + + const projects = updatedFilePaths.map((path) => getProjectDirFromPath(path, snootyDirSet)); + + // remove empty strings and remove duplicated values + return Array.from(new Set(projects.filter((dir) => !!dir))); +} diff --git a/src/monorepo/services/get-paths.ts b/src/monorepo/services/get-paths.ts new file mode 100644 index 000000000..8716d7ec1 --- /dev/null +++ b/src/monorepo/services/get-paths.ts @@ -0,0 +1,36 @@ +import { SNOOTY_TOML_FILENAME } from '../utils/monorepo-constants'; + +/** + * This function returns the project path for a given file change from a docs repository + * within the monorepo. This function supports nested projects. + * @param path An added/modified/removed file path from a commit e.g. server-docs/source/index.rst + * @param commitInfo Contains information + * @returns The closest file path that contains a snooty.toml, relative to the path parameter. + */ +export function getProjectDirFromPath(path: string, snootyDirSet: Set): string { + const pathArray = path.split('/'); + if (pathArray.length === 0) { + console.warn('WARNING! Empty path found: ', path); + return ''; + } + + /** + * If the changed file is the snooty.toml file, we know that we + * are in the project's root directory. We can join the original + * pathArray to get the project path since the snooty.toml has been removed. + */ + const changedFile = pathArray.pop(); + + if (changedFile === SNOOTY_TOML_FILENAME) return pathArray.join('/'); + + while (pathArray.length > 0) { + const currDir = pathArray.join('/'); + + if (snootyDirSet.has(currDir)) return currDir; + + pathArray.pop(); + } + + console.warn(`WARNING! No snooty.toml found for the given path: ${path}`); + return ''; +} diff --git a/src/monorepo/types/atlas-types.ts b/src/monorepo/types/atlas-types.ts new file mode 100644 index 000000000..1af35ee35 --- /dev/null +++ b/src/monorepo/types/atlas-types.ts @@ -0,0 +1,25 @@ +interface DirectoryConfig { + snooty_toml?: string; + source?: string; +} + +interface RepoConfig { + repoName: string; + deployable: boolean; + branches: BranchConfig[]; +} + +interface BranchConfig { + gitBranchName: string; +} + +// TODO: Populate these more. For DOP-3911, they are +// being added for testing purposes. +export interface DocSetEntry { + project: string; + prefix: string; + bucket: string; + url: string; + directories?: DirectoryConfig; + repos?: RepoConfig[]; +} diff --git a/src/monorepo/types/github-types.ts b/src/monorepo/types/github-types.ts new file mode 100644 index 000000000..161613fa8 --- /dev/null +++ b/src/monorepo/types/github-types.ts @@ -0,0 +1,5 @@ +export interface GitCommitInfo { + commitSha: string; + ownerName: string; + repoName: string; +} diff --git a/src/monorepo/utils/monorepo-constants.ts b/src/monorepo/utils/monorepo-constants.ts new file mode 100644 index 000000000..fc71e1bc5 --- /dev/null +++ b/src/monorepo/utils/monorepo-constants.ts @@ -0,0 +1,2 @@ +export const SNOOTY_TOML_FILENAME = 'snooty.toml'; +export const MONOREPO_NAME = 'docs-monorepo'; diff --git a/src/monorepo/utils/path-utils.ts b/src/monorepo/utils/path-utils.ts new file mode 100644 index 000000000..307671e05 --- /dev/null +++ b/src/monorepo/utils/path-utils.ts @@ -0,0 +1,50 @@ +import { Commit } from '@octokit/webhooks-types'; +import { getOctokitClient } from '../../clients/githubClient'; +import { GitCommitInfo } from '../types/github-types'; +import { SNOOTY_TOML_FILENAME } from './monorepo-constants'; + +/** + * Creates a `Set` of all `snooty.toml` paths within the monorepo. + * The function retrieves the monorepo's tree structure from GitHub. + */ +export async function getSnootyDirSet({ commitSha, ownerName, repoName }: GitCommitInfo): Promise> { + try { + const client = getOctokitClient(); + + // getting the repository tree for a given commit SHA. This returns an object + // with the property `tree` that is a flat array of all files in the repository. + // The tree array contains objects that hold the file path. + // Unlike the contents API for repositories, the actual file content is not returned. + const { data } = await client.request('GET /repos/{owner}/{repo}/git/trees/{tree_sha}', { + owner: ownerName, + repo: repoName, + tree_sha: commitSha, + recursive: 'true', + }); + + const snootyTomlDirs = data.tree + .filter((treeNode) => !!treeNode.path?.includes(SNOOTY_TOML_FILENAME)) + .map((treeNode) => { + // casting the `treeNode.path` from `(string | undefined)` to `string` since the filter will ensure that the result + // only includes treeNode.path values that are defined and include snooty.toml + // in the path i.e. we will not have `undefined` as a value in the resulting array. + const path = treeNode.path as string; + + // the - 1 is to remove the trailing slash + return path.slice(0, path.length - SNOOTY_TOML_FILENAME.length - 1); + }); + + const snootyDirSet = new Set(snootyTomlDirs); + + return snootyDirSet; + } catch (error) { + console.error( + `ERROR! Unable to retrieve tree for SHA: ${commitSha} owner name: ${ownerName} repo name: ${repoName}`, + error + ); + throw error; + } +} + +export const getUpdatedFilePaths = (commit: Commit): string[] => + commit.modified.concat(commit.added).concat(commit.removed); diff --git a/src/repositories/docSetRepository.ts b/src/repositories/docSetRepository.ts new file mode 100644 index 000000000..a2a2e25e8 --- /dev/null +++ b/src/repositories/docSetRepository.ts @@ -0,0 +1,45 @@ +import { IConfig } from 'config'; +import { Db } from 'mongodb'; +import { ILogger } from '../services/logger'; +import { BaseRepository } from './baseRepository'; + +const docSetCollectionName = process.env.DOCS_SET_COLLECTION_NAME || 'docset'; + +export class DocSetRepository extends BaseRepository { + constructor(db: Db, config: IConfig, logger: ILogger) { + super(config, logger, 'DocSetRepository', db.collection(docSetCollectionName)); + } + + /** + * Compares the project path from a monorepo push event, and compares it with + * what is configured in the docset entry in Atlas. + * @param path The project path where the snooty.toml file exists from the monorepo. + * This path will reflect the current project path from a given commit. + * @param projectName The project name for the docset entry. + * @returns A boolean representing whether or not the configured docset entry snooty_toml path + * matches the path found in GitHub. + */ + async checkSnootyTomlPath(path: string, projectName: string) { + const query = { project: projectName }; + try { + const docSetObject = await this.findOne( + query, + `Mongo Timeout Error: Timedout while retrieving repos entry for ${path}` + ); + + if (!docSetObject) { + console.warn(`WARNING: The docset does not exist for the following project: ${projectName} \n path: ${path}`); + + return false; + } + + return docSetObject.directories.snooty_toml === path; + } catch (error) { + console.warn( + `WARNING: Error occurred when retrieving project path for ${projectName}. The following path was provided: ${path}`, + error + ); + return false; + } + } +} diff --git a/tests/unit/monorepo/monorepo.test.ts b/tests/unit/monorepo/monorepo.test.ts new file mode 100644 index 000000000..918d48e40 --- /dev/null +++ b/tests/unit/monorepo/monorepo.test.ts @@ -0,0 +1,98 @@ +import { Octokit } from '@octokit/rest'; +import { getMonorepoPaths } from '../../../src/monorepo'; +import { getOctokitClient } from '../../../src/clients/githubClient'; +import { mockDeep } from 'jest-mock-extended'; + +jest.mock('../../../src/clients/githubClient'); +jest.mock('@octokit/rest'); + +const mockedOctokit = mockDeep(); + +beforeEach(() => { + jest.resetAllMocks(); + + const mockedGetOctokitClient = getOctokitClient as jest.MockedFunction; + mockedGetOctokitClient.mockReturnValue(mockedOctokit); +}); + +function mockOctokitTreeResponse(filePaths: string[]) { + // Partial representation of the GitHub API response that we care about. + // The response contains a property `tree` which is an array of objects. + const mockedResponse = { + data: { + tree: filePaths.map((path) => ({ path })), + }, + }; + + jest + .spyOn(mockedOctokit, 'request') + .mockResolvedValueOnce(mockedResponse as unknown as ReturnType); +} + +describe('Monorepo Path Parsing tests', () => { + it('Successfully finds project paths if snooty.toml is changed', async () => { + mockOctokitTreeResponse(['server-docs/source/datalake/snooty.toml', 'server-docs/snooty.toml']); + + const paths = await getMonorepoPaths({ + commitSha: '12345', + ownerName: 'mongodb', + repoName: 'monorepo', + updatedFilePaths: ['server-docs/snooty.toml', 'server-docs/source/datalake/snooty.toml'], + }); + + expect(paths).toContain('server-docs'); + expect(paths).toContain('server-docs/source/datalake'); + }); + + it('Successfully finds project paths based on changed files other than snooty.toml', async () => { + /** + * server-docs/source/datalake contains a snooty.toml file. We will reject once and then resolve + * once as this should mimic responses from the GitHub API. + */ + + mockOctokitTreeResponse(['server-docs/source/datalake/snooty.toml', 'server-docs/snooty.toml']); + + const paths = await getMonorepoPaths({ + commitSha: '12345', + ownerName: 'mongodb', + repoName: 'monorepo', + updatedFilePaths: ['server-docs/source/datalake/source/index.rst'], + }); + + expect(paths).toContain('server-docs/source/datalake'); + }); + + it('Returns an empty array if there is no snooty.toml at any point in the file path', async () => { + mockOctokitTreeResponse(['server-docs/source/datalake/snooty.toml', 'server-docs/snooty.toml']); + + const paths = await getMonorepoPaths({ + commitSha: '12345', + ownerName: 'mongodb', + repoName: 'monorepo', + updatedFilePaths: ['bad/path/index.rst'], + }); + + expect(paths.length).toEqual(0); + }); + + it('Returns only one project path when two files in the same project are modified', async () => { + /** + * server-docs/source/datalake contains a snooty.toml file. We will reject once and then resolve + * once as this should mimic responses from the GitHub API. + */ + mockOctokitTreeResponse(['server-docs/source/datalake/snooty.toml', 'server-docs/snooty.toml']); + + const paths = await getMonorepoPaths({ + commitSha: '12345', + ownerName: 'mongodb', + repoName: 'monorepo', + updatedFilePaths: [ + 'server-docs/source/datalake/source/index.rst', + 'server-docs/source/datalake/source/test/index.rst', + ], + }); + + expect(paths).toContain('server-docs/source/datalake'); + expect(paths.length).toEqual(1); + }); +}); From 225a7e971ca19133d9f378c6c90cffc4d62828e8 Mon Sep 17 00:00:00 2001 From: Seung Park Date: Tue, 12 Sep 2023 11:31:34 -0400 Subject: [PATCH 08/15] DOP-3994: add feature flag for chatbot (#905) * add environment flag from param store * test front end change * wrong dockerfile * user master branch * test stack update * revert testing --- cdk-infra/lib/constructs/worker/worker-env-construct.ts | 4 ++++ config/custom-environment-variables.json | 1 + config/default.json | 1 + infrastructure/ecs-main/ecs_service.yml | 2 ++ infrastructure/ecs-main/serverless.yml | 1 + src/job/jobHandler.ts | 1 + tests/data/data.ts | 2 +- tests/unit/job/productionJobHandler.test.ts | 2 +- tests/utils/jobHandlerTestHelper.ts | 1 + 9 files changed, 13 insertions(+), 2 deletions(-) diff --git a/cdk-infra/lib/constructs/worker/worker-env-construct.ts b/cdk-infra/lib/constructs/worker/worker-env-construct.ts index d9f2fbbdb..f1d27ee07 100644 --- a/cdk-infra/lib/constructs/worker/worker-env-construct.ts +++ b/cdk-infra/lib/constructs/worker/worker-env-construct.ts @@ -36,6 +36,9 @@ export class WorkerEnvConstruct extends Construct { `/docs/worker_pool/preview_webhook/snooty_gatsby_cloud_test/data_source` ); + // front end feature flag for chatbot UI + const gatsbyUseChatbot = StringParameter.valueFromLookup(this, `${ssmPrefix}/flag/use_chatbot`); + const githubBotUsername = StringParameter.valueFromLookup(this, `${ssmPrefix}/github/bot/username`); const npmEmail = StringParameter.valueFromLookup(this, `${ssmPrefix}/npm/email`); @@ -80,6 +83,7 @@ export class WorkerEnvConstruct extends Construct { USE_CUSTOM_BUCKETS: `${getUseCustomBuckets()}`, FEATURE_NAME: `${getFeatureName()}`, GATSBY_TEST_SEARCH_UI: 'false', + GATSBY_SHOW_CHATBOT: gatsbyUseChatbot, }; } } diff --git a/config/custom-environment-variables.json b/config/custom-environment-variables.json index edd5c2434..011a147c7 100644 --- a/config/custom-environment-variables.json +++ b/config/custom-environment-variables.json @@ -24,6 +24,7 @@ "previewBuildEnabled": "PREVIEW_BUILD_ENABLED", "featureFlagUpdatePages": "FEATURE_FLAG_UPDATE_PAGES", "featureFlagSearchUI": "GATSBY_TEST_SEARCH_UI", + "gatsbyUseChatbot": "GATSBY_SHOW_CHATBOT", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", "repo_dir": "repos", "jobId": "jobId", diff --git a/config/default.json b/config/default.json index dec339ac1..4f88c8feb 100644 --- a/config/default.json +++ b/config/default.json @@ -32,6 +32,7 @@ "previewBuildEnabled": "false", "featureFlagUpdatePages": "false", "featureFlagSearchUI": "false", + "gatsbyUseChatbot": "false", "parallel": { "enabled": true, "stg": { diff --git a/infrastructure/ecs-main/ecs_service.yml b/infrastructure/ecs-main/ecs_service.yml index 4d8d63f19..f4b09982c 100644 --- a/infrastructure/ecs-main/ecs_service.yml +++ b/infrastructure/ecs-main/ecs_service.yml @@ -62,6 +62,8 @@ Resources: Value: ${self:custom.featureFlagUpdatePages} - Name: GATSBY_TEST_SEARCH_UI Value: ${self:custom.featureFlagSearchUI} + - Name: GATSBY_SHOW_CHATBOT + Value: ${self:custom.gatsbyUseChatbot} - Name: FASTLY_MAIN_TOKEN Value: ${self:custom.fastlyMainToken} - Name: FASTLY_MAIN_SERVICE_ID diff --git a/infrastructure/ecs-main/serverless.yml b/infrastructure/ecs-main/serverless.yml index 0de1eb941..e06ca9a2c 100644 --- a/infrastructure/ecs-main/serverless.yml +++ b/infrastructure/ecs-main/serverless.yml @@ -117,6 +117,7 @@ custom: featureFlagUpdatePages: ${ssm:/env/${self:provider.stage}/docs/worker_pool/flag/update_pages} featureFlagSearchUI: ${ssm:/env/${self:provider.stage}/docs/worker_pool/flag/search_ui} gatsbyTestEmbedVersions: ${ssm:/env/${self:provider.stage}/docs/worker_pool/flag/embedded_versions} + gatsbyUseChatbot: ${ssm:/env/${self:provider.stage}/docs/worker_pool/flag/use_chatbot} fastlyMainToken: ${ssm:/env/${self:provider.stage}/docs/worker_pool/fastly/docs/main/token} fastlyMainServiceId: ${ssm:/env/${self:provider.stage}/docs/worker_pool/fastly/docs/main/service_id} fastlyCloudManagerToken: ${ssm:/env/${self:provider.stage}/docs/worker_pool/fastly/docs/cloudmanager/token} diff --git a/src/job/jobHandler.ts b/src/job/jobHandler.ts index 357bd2c27..baefa3b83 100644 --- a/src/job/jobHandler.ts +++ b/src/job/jobHandler.ts @@ -369,6 +369,7 @@ export abstract class JobHandler { GATSBY_BASE_URL: this._config.get('gatsbyBaseUrl'), PREVIEW_BUILD_ENABLED: this._config.get('previewBuildEnabled'), GATSBY_TEST_SEARCH_UI: this._config.get('featureFlagSearchUI'), + GATSBY_SHOW_CHATBOT: this._config.get('gatsbyUseChatbot'), }; for (const [envName, envValue] of Object.entries(snootyFrontEndVars)) { diff --git a/tests/data/data.ts b/tests/data/data.ts index fafca623a..d403e8cad 100644 --- a/tests/data/data.ts +++ b/tests/data/data.ts @@ -175,7 +175,7 @@ export class TestDataProvider { } static getEnvVarsWithPathPrefixWithFlags(job: Job): string { - return `GATSBY_PARSER_USER=TestUser\nGATSBY_PARSER_BRANCH=${job.payload.branchName}\nPATH_PREFIX=${job.payload.pathPrefix}\nGATSBY_BASE_URL=test\nPREVIEW_BUILD_ENABLED=false\nGATSBY_TEST_SEARCH_UI=false\n`; + return `GATSBY_PARSER_USER=TestUser\nGATSBY_PARSER_BRANCH=${job.payload.branchName}\nPATH_PREFIX=${job.payload.pathPrefix}\nGATSBY_BASE_URL=test\nPREVIEW_BUILD_ENABLED=false\nGATSBY_TEST_SEARCH_UI=false\nGATSBY_SHOW_CHATBOT=false\n`; } static getPathPrefixCases(): Array { diff --git a/tests/unit/job/productionJobHandler.test.ts b/tests/unit/job/productionJobHandler.test.ts index 187109286..00e5f4600 100644 --- a/tests/unit/job/productionJobHandler.test.ts +++ b/tests/unit/job/productionJobHandler.test.ts @@ -247,7 +247,7 @@ describe('ProductionJobHandler Tests', () => { expect(jobHandlerTestHelper.fileSystemServices.writeToFile).toBeCalledWith( `repos/${jobHandlerTestHelper.job.payload.repoName}/.env.production`, - `GATSBY_PARSER_USER=TestUser\nGATSBY_PARSER_BRANCH=${jobHandlerTestHelper.job.payload.branchName}\nPATH_PREFIX=/\nGATSBY_BASE_URL=test\nPREVIEW_BUILD_ENABLED=false\nGATSBY_TEST_SEARCH_UI=false\n`, + `GATSBY_PARSER_USER=TestUser\nGATSBY_PARSER_BRANCH=${jobHandlerTestHelper.job.payload.branchName}\nPATH_PREFIX=/\nGATSBY_BASE_URL=test\nPREVIEW_BUILD_ENABLED=false\nGATSBY_TEST_SEARCH_UI=false\nGATSBY_SHOW_CHATBOT=false\n`, { encoding: 'utf8', flag: 'w' } ); }); diff --git a/tests/utils/jobHandlerTestHelper.ts b/tests/utils/jobHandlerTestHelper.ts index ea24e8a1d..9d9e871e9 100644 --- a/tests/utils/jobHandlerTestHelper.ts +++ b/tests/utils/jobHandlerTestHelper.ts @@ -148,6 +148,7 @@ export class JobHandlerTestHelper { this.config.get.calledWith('gatsbyBaseUrl').mockReturnValue('test'); this.config.get.calledWith('previewBuildEnabled').mockReturnValue('false'); this.config.get.calledWith('featureFlagSearchUI').mockReturnValue('false'); + this.config.get.calledWith('gatsbyUseChatbot').mockReturnValue('false'); this.repoConnector.checkCommits .calledWith(this.job) .mockReturnValue(TestDataProvider.getCommitCheckValidResponse(this.job)); From dbb00e83562253f91e0336d59d5e02cf1ecce7bd Mon Sep 17 00:00:00 2001 From: Brandon Ly Date: Fri, 15 Sep 2023 08:36:16 -0500 Subject: [PATCH 09/15] [BUGFIX]: Fix duplicate parameter deploy error (#907) --- cdk-infra/lib/constructs/api/webhook-env-construct.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/cdk-infra/lib/constructs/api/webhook-env-construct.ts b/cdk-infra/lib/constructs/api/webhook-env-construct.ts index 2aefada52..eec5651ff 100644 --- a/cdk-infra/lib/constructs/api/webhook-env-construct.ts +++ b/cdk-infra/lib/constructs/api/webhook-env-construct.ts @@ -3,7 +3,7 @@ import { StringParameter } from 'aws-cdk-lib/aws-ssm'; import { Construct } from 'constructs'; import { getSsmPathPrefix } from '../../../utils/ssm'; import { getDashboardUrl } from '../../../utils/slack'; -import { getEnv } from '../../../utils/env'; +import { getEnv, getFeatureName } from '../../../utils/env'; interface WebhookEnvConstructProps { jobsQueue: IQueue; @@ -18,10 +18,11 @@ export class WebhookEnvConstruct extends Construct { const ssmPrefix = getSsmPathPrefix(); const env = getEnv(); + const featureName = getFeatureName(); // Create configurable feature flag that lives in parameter store. const monorepoPathFeature = new StringParameter(this, 'monorepoPathFeature', { - parameterName: `${ssmPrefix}/monorepo/path_feature`, + parameterName: `${ssmPrefix}/${featureName}/monorepo/path_feature`, stringValue: env === 'dotcomstg' || env === 'stg' ? 'true' : 'false', }); From 2d8b0da2b41b95a9eee82081c401af5fb558abcc Mon Sep 17 00:00:00 2001 From: Cassidy Schaufele Date: Wed, 20 Sep 2023 07:48:11 -0600 Subject: [PATCH 10/15] Update Dockerfile.enhanced Updates for 9/14 release --- Dockerfile.enhanced | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile.enhanced b/Dockerfile.enhanced index fb530489f..0dcf5d7bf 100644 --- a/Dockerfile.enhanced +++ b/Dockerfile.enhanced @@ -22,8 +22,8 @@ RUN cd ./modules/oas-page-builder \ # where repo work will happen FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge -ARG SNOOTY_PARSER_VERSION=0.14.6 -ARG SNOOTY_FRONTEND_VERSION=0.14.13 +ARG SNOOTY_PARSER_VERSION=0.14.8 +ARG SNOOTY_FRONTEND_VERSION=0.14.14 ARG MUT_VERSION=0.10.5 ARG REDOC_CLI_VERSION=1.2.0 ARG NPM_BASE_64_AUTH From 313fb86e46cf1a17841c9dedb5ad083e9b340a97 Mon Sep 17 00:00:00 2001 From: Seung Park Date: Wed, 20 Sep 2023 13:05:04 -0400 Subject: [PATCH 11/15] Update mut versions in Dockerfiles (#910) * update mut versions in dockerfiles * update enhanced Dockerfile * test on preprd * remove test --- Dockerfile | 2 +- Dockerfile.enhanced | 2 +- Dockerfile.legacy | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/Dockerfile b/Dockerfile index 0554f2afc..9adaee0b4 100644 --- a/Dockerfile +++ b/Dockerfile @@ -24,7 +24,7 @@ FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge ARG SNOOTY_PARSER_VERSION=0.14.8 ARG SNOOTY_FRONTEND_VERSION=0.14.14 -ARG MUT_VERSION=0.10.3 +ARG MUT_VERSION=0.10.6 ARG REDOC_CLI_VERSION=1.2.2 ARG NPM_BASE_64_AUTH ARG NPM_EMAIL diff --git a/Dockerfile.enhanced b/Dockerfile.enhanced index 0dcf5d7bf..4ab6bdad4 100644 --- a/Dockerfile.enhanced +++ b/Dockerfile.enhanced @@ -24,7 +24,7 @@ FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge ARG SNOOTY_PARSER_VERSION=0.14.8 ARG SNOOTY_FRONTEND_VERSION=0.14.14 -ARG MUT_VERSION=0.10.5 +ARG MUT_VERSION=0.10.6 ARG REDOC_CLI_VERSION=1.2.0 ARG NPM_BASE_64_AUTH ARG NPM_EMAIL diff --git a/Dockerfile.legacy b/Dockerfile.legacy index eb58d4c6c..06a41338f 100644 --- a/Dockerfile.legacy +++ b/Dockerfile.legacy @@ -2,7 +2,7 @@ # The solution is probably to switch to Ubuntu 22.04 at that point, which is supported until 2027. FROM node:14-bullseye-slim -ARG MUT_VERSION=0.10.3 +ARG MUT_VERSION=0.10.6 ENV PATH="${PATH}:/opt/mut" From d6d309171f501a5639243fd6aec50cb7e3fe0c3c Mon Sep 17 00:00:00 2001 From: Allison Reinheimer Moore Date: Thu, 21 Sep 2023 10:49:57 -0400 Subject: [PATCH 12/15] bump Dockerfiles for 2023-09-21 release (#911) Release doc for MongoDB employees: https://docs.google.com/document/d/14rvqpqOYoDIDUmIoxhH-2LJcx7U57pMoYTGbjXDSCjY/edit --- Dockerfile | 6 +++--- Dockerfile.enhanced | 8 ++++---- 2 files changed, 7 insertions(+), 7 deletions(-) diff --git a/Dockerfile b/Dockerfile index 9adaee0b4..06d973d13 100644 --- a/Dockerfile +++ b/Dockerfile @@ -22,9 +22,9 @@ RUN cd ./modules/oas-page-builder \ # where repo work will happen FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge -ARG SNOOTY_PARSER_VERSION=0.14.8 -ARG SNOOTY_FRONTEND_VERSION=0.14.14 -ARG MUT_VERSION=0.10.6 +ARG SNOOTY_PARSER_VERSION=0.14.9 +ARG SNOOTY_FRONTEND_VERSION=0.14.16 +ARG MUT_VERSION=0.10.7 ARG REDOC_CLI_VERSION=1.2.2 ARG NPM_BASE_64_AUTH ARG NPM_EMAIL diff --git a/Dockerfile.enhanced b/Dockerfile.enhanced index 4ab6bdad4..0639de58d 100644 --- a/Dockerfile.enhanced +++ b/Dockerfile.enhanced @@ -22,10 +22,10 @@ RUN cd ./modules/oas-page-builder \ # where repo work will happen FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge -ARG SNOOTY_PARSER_VERSION=0.14.8 -ARG SNOOTY_FRONTEND_VERSION=0.14.14 -ARG MUT_VERSION=0.10.6 -ARG REDOC_CLI_VERSION=1.2.0 +ARG SNOOTY_PARSER_VERSION=0.14.9 +ARG SNOOTY_FRONTEND_VERSION=0.14.16 +ARG MUT_VERSION=0.10.7 +ARG REDOC_CLI_VERSION=1.2.2 ARG NPM_BASE_64_AUTH ARG NPM_EMAIL ENV DEBIAN_FRONTEND=noninteractive From c5d2b371c7d9dbf5df0dbcf631c2f19e2ac3b47e Mon Sep 17 00:00:00 2001 From: Allison Reinheimer Moore Date: Thu, 21 Sep 2023 15:49:34 -0400 Subject: [PATCH 13/15] update Dockerfiles for v0.14.17 of front-end --- Dockerfile | 2 +- Dockerfile.enhanced | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 06d973d13..05738922b 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,7 @@ RUN cd ./modules/oas-page-builder \ FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge ARG SNOOTY_PARSER_VERSION=0.14.9 -ARG SNOOTY_FRONTEND_VERSION=0.14.16 +ARG SNOOTY_FRONTEND_VERSION=0.14.17 ARG MUT_VERSION=0.10.7 ARG REDOC_CLI_VERSION=1.2.2 ARG NPM_BASE_64_AUTH diff --git a/Dockerfile.enhanced b/Dockerfile.enhanced index 0639de58d..e88280eda 100644 --- a/Dockerfile.enhanced +++ b/Dockerfile.enhanced @@ -23,7 +23,7 @@ RUN cd ./modules/oas-page-builder \ FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge ARG SNOOTY_PARSER_VERSION=0.14.9 -ARG SNOOTY_FRONTEND_VERSION=0.14.16 +ARG SNOOTY_FRONTEND_VERSION=0.14.17 ARG MUT_VERSION=0.10.7 ARG REDOC_CLI_VERSION=1.2.2 ARG NPM_BASE_64_AUTH From c007d0fa31488925117b8eb4e73adbfd4599fd33 Mon Sep 17 00:00:00 2001 From: Allison Reinheimer Moore Date: Fri, 22 Sep 2023 09:50:47 -0400 Subject: [PATCH 14/15] bump front-end to v0.14.18 (#914) --- Dockerfile | 2 +- Dockerfile.enhanced | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/Dockerfile b/Dockerfile index 05738922b..ec34c3ed3 100644 --- a/Dockerfile +++ b/Dockerfile @@ -23,7 +23,7 @@ RUN cd ./modules/oas-page-builder \ FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge ARG SNOOTY_PARSER_VERSION=0.14.9 -ARG SNOOTY_FRONTEND_VERSION=0.14.17 +ARG SNOOTY_FRONTEND_VERSION=0.14.18 ARG MUT_VERSION=0.10.7 ARG REDOC_CLI_VERSION=1.2.2 ARG NPM_BASE_64_AUTH diff --git a/Dockerfile.enhanced b/Dockerfile.enhanced index e88280eda..14430b326 100644 --- a/Dockerfile.enhanced +++ b/Dockerfile.enhanced @@ -23,7 +23,7 @@ RUN cd ./modules/oas-page-builder \ FROM ubuntu:20.04 ARG WORK_DIRECTORY=/home/docsworker-xlarge ARG SNOOTY_PARSER_VERSION=0.14.9 -ARG SNOOTY_FRONTEND_VERSION=0.14.17 +ARG SNOOTY_FRONTEND_VERSION=0.14.18 ARG MUT_VERSION=0.10.7 ARG REDOC_CLI_VERSION=1.2.2 ARG NPM_BASE_64_AUTH From 01be34bc2a0698da896c63f544e17a738b106c78 Mon Sep 17 00:00:00 2001 From: mmeigs Date: Fri, 22 Sep 2023 10:51:25 -0400 Subject: [PATCH 15/15] DOP-4020: repos_branches to docsets (#909) * find todos in main RepoBranchesRepository, consolidate slack helpers * delete unused RepoBranchesRepository method * replace getRepo everywhere with new DocsetsRepository getRepo * add docsetsRepository to each JobHandler and test * add docsetsRepo to JobHandlers * more test finds, missing references * added env vars for docsetCollection * modify tests * set to stage ecs * add docsets to parameter store and serverless * more env vars * persistence module repos_branches migrate to docsets * work on tests in persistence module * dont use preprd * fixed persistence tests * merge two separate docsetsRepos * deploy preprd * use .env * debug failing test * all tests passing * PR feedback * fix test * fix v2 to match v1, double query * add warning to checkSnootyTomlPath --- api/config/custom-environment-variables.json | 1 + api/config/default.json | 1 + api/controllers/v1/github.ts | 17 ++- api/controllers/v1/jobs.ts | 4 +- api/controllers/v1/slack.ts | 67 +++------- api/controllers/v2/github.ts | 13 +- api/controllers/v2/slack.ts | 67 +++------- api/handlers/github.ts | 6 +- api/handlers/slack.ts | 49 +++++++ .../constructs/api/webhook-env-construct.ts | 2 + .../constructs/worker/worker-env-construct.ts | 2 + .../config/custom-environment-variables.json | 1 + cdk-infra/static/api/config/default.json | 1 + config/custom-environment-variables.json | 2 +- config/default.json | 2 +- config/test.json | 2 +- infrastructure/ecs-main/ecs_service.yml | 4 +- infrastructure/ecs-main/serverless.yml | 1 + .../services/metadata/repos_branches/index.ts | 68 +++++++--- modules/persistence/tests/data/docsets.json | 63 +++++++++ .../associated_products.test.ts.snap | 2 - modules/persistence/tests/utils.ts | 2 + serverless.yml | 4 +- src/app.ts | 6 +- src/enhanced/job/enhancedJobHandlerFactory.ts | 3 + src/enhanced/job/enhancedJobHandlers.ts | 2 +- src/enhanced/utils/job/handle-job.ts | 6 +- src/job/jobHandler.ts | 6 +- src/job/jobManager.ts | 7 + src/job/jobValidator.ts | 8 +- src/job/manifestJobHandler.ts | 3 + src/job/productionJobHandler.ts | 3 + src/job/regressionJobHandler.ts | 3 + src/job/stagingJobHandler.ts | 3 + src/onDemandApp.ts | 7 +- src/repositories/baseRepository.ts | 21 +++ src/repositories/docSetRepository.ts | 45 ------- src/repositories/docsetsRepository.ts | 120 ++++++++++++++++++ src/repositories/repoBranchesRepository.ts | 45 ------- tests/data/data.ts | 40 ++++++ tests/mongo/testDBManager.ts | 2 + tests/unit/api/github.test.ts | 6 +- tests/unit/api/slack.test.ts | 26 +++- tests/unit/job/JobHandlerFactory.test.ts | 5 + tests/unit/job/jobValidator.test.ts | 17 ++- tests/unit/job/productionJobHandler.test.ts | 2 +- tests/unit/jobManager.test.ts | 10 +- .../repositories/docsetsRepository.test.ts | 55 ++++++++ .../repoBranchesRepository.test.ts | 52 -------- tests/utils/jobHandlerTestHelper.ts | 4 + tests/utils/repositoryHelper.ts | 5 + 51 files changed, 592 insertions(+), 301 deletions(-) create mode 100644 api/handlers/slack.ts create mode 100644 modules/persistence/tests/data/docsets.json delete mode 100644 src/repositories/docSetRepository.ts create mode 100644 src/repositories/docsetsRepository.ts create mode 100644 tests/unit/repositories/docsetsRepository.test.ts delete mode 100644 tests/unit/repositories/repoBranchesRepository.test.ts diff --git a/api/config/custom-environment-variables.json b/api/config/custom-environment-variables.json index 65ca3f515..4199fbce2 100644 --- a/api/config/custom-environment-variables.json +++ b/api/config/custom-environment-variables.json @@ -20,6 +20,7 @@ "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", "dashboardUrl": "DASHBOARD_URL", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "taskDefinitionFamily": "TASK_DEFINITION_FAMILY", "jobsQueueUrl": "JOBS_QUEUE_URL", "jobUpdatesQueueUrl": "JOB_UPDATES_QUEUE_URL", diff --git a/api/config/default.json b/api/config/default.json index 4118bfd2b..16d56ce57 100644 --- a/api/config/default.json +++ b/api/config/default.json @@ -19,6 +19,7 @@ "jobQueueCollection": "JOB_QUEUE_COL_NAME", "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "MONGO_TIMEOUT_S": 15, "JOB_TIMEOUT_S": 900, "RETRY_TIMEOUT_MS": 5000, diff --git a/api/controllers/v1/github.ts b/api/controllers/v1/github.ts index 8b239d10c..36471662a 100644 --- a/api/controllers/v1/github.ts +++ b/api/controllers/v1/github.ts @@ -4,13 +4,19 @@ import { JobRepository } from '../../../src/repositories/jobRepository'; import { ConsoleLogger } from '../../../src/services/logger'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handlers/github'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; +import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/repos_branches'; -async function prepGithubPushPayload(githubEvent: any, repoBranchesRepository: RepoBranchesRepository, prefix: string) { +async function prepGithubPushPayload( + githubEvent: any, + repoBranchesRepository: RepoBranchesRepository, + prefix: string, + repoInfo: ReposBranchesDocument +) { const branch_name = githubEvent.ref.split('/')[2]; const branch_info = await repoBranchesRepository.getRepoBranchAliases(githubEvent.repository.name, branch_name); const urlSlug = branch_info.aliasObject?.urlSlug ?? branch_name; - const repo_info = await repoBranchesRepository.getRepo(githubEvent.repository.name); - const project = repo_info?.project ?? githubEvent.repository.name; + const project = repoInfo?.project ?? githubEvent.repository.name; return { title: githubEvent.repository.full_name, @@ -57,6 +63,7 @@ export const TriggerBuild = async (event: any = {}, context: any = {}): Promise< const consoleLogger = new ConsoleLogger(); const jobRepository = new JobRepository(db, c, consoleLogger); const repoBranchesRepository = new RepoBranchesRepository(db, c, consoleLogger); + const docsetsRepository = new DocsetsRepository(db, c, consoleLogger); if (!validateJsonWebhook(event, c.get('githubSecret'))) { const errMsg = "X-Hub-Signature incorrect. Github webhook token doesn't match"; @@ -77,10 +84,10 @@ export const TriggerBuild = async (event: any = {}, context: any = {}): Promise< } const env = c.get('env'); - const repoInfo = await repoBranchesRepository.getRepo(body.repository.name); + const repoInfo = await docsetsRepository.getRepo(body.repository.name); const jobPrefix = repoInfo?.prefix ? repoInfo['prefix'][env] : ''; // TODO: Make job be of type Job - const job = await prepGithubPushPayload(body, repoBranchesRepository, jobPrefix); + const job = await prepGithubPushPayload(body, repoBranchesRepository, jobPrefix, repoInfo); try { consoleLogger.info(job.title, 'Creating Job'); const jobId = await jobRepository.insertJob(job, c.get('jobsQueueUrl')); diff --git a/api/controllers/v1/jobs.ts b/api/controllers/v1/jobs.ts index 652b6d226..38ea304da 100644 --- a/api/controllers/v1/jobs.ts +++ b/api/controllers/v1/jobs.ts @@ -2,7 +2,6 @@ import * as c from 'config'; import * as mongodb from 'mongodb'; import { IConfig } from 'config'; import { RepoEntitlementsRepository } from '../../../src/repositories/repoEntitlementsRepository'; -import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { ConsoleLogger } from '../../../src/services/logger'; import { SlackConnector } from '../../../src/services/slack'; import { JobRepository } from '../../../src/repositories/jobRepository'; @@ -12,6 +11,7 @@ import { ECSContainer } from '../../../src/services/containerServices'; import { SQSConnector } from '../../../src/services/queue'; import { Batch } from '../../../src/services/batch'; import { notifyBuildSummary, snootyBuildComplete } from '../../handlers/jobs'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; export const TriggerLocalBuild = async (event: any = {}, context: any = {}): Promise => { const client = new mongodb.MongoClient(c.get('dbUrl')); @@ -258,7 +258,7 @@ async function SubmitArchiveJob(jobId: string) { const db = client.db(c.get('dbName')); const models = { jobs: new JobRepository(db, c, consoleLogger), - repoBranches: new RepoBranchesRepository(db, c, consoleLogger), + repoBranches: new DocsetsRepository(db, c, consoleLogger), }; const job = await models.jobs.getJobById(jobId); const repo = await models.repoBranches.getRepo(job.payload.repoName); diff --git a/api/controllers/v1/slack.ts b/api/controllers/v1/slack.ts index 25fecb35d..f34665af8 100644 --- a/api/controllers/v1/slack.ts +++ b/api/controllers/v1/slack.ts @@ -5,53 +5,14 @@ import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRe import { ConsoleLogger, ILogger } from '../../../src/services/logger'; import { SlackConnector } from '../../../src/services/slack'; import { JobRepository } from '../../../src/repositories/jobRepository'; - -function isUserEntitled(entitlementsObject: any): boolean { - return (entitlementsObject?.repos?.length ?? 0) > 0; -} - -function isRestrictedToDeploy(userId: string): boolean { - const { restrictedProdDeploy, entitledSlackUsers } = c.get('prodDeploy'); - return restrictedProdDeploy && !entitledSlackUsers.includes(userId); -} - -function prepResponse(statusCode, contentType, body) { - return { - statusCode: statusCode, - headers: { 'Content-Type': contentType }, - body: body, - }; -} - -async function buildEntitledBranchList(entitlement: any, repoBranchesRepository: RepoBranchesRepository) { - const entitledBranches: string[] = []; - for (const repo of entitlement.repos) { - const [repoOwner, repoName] = repo.split('/'); - const branches = await repoBranchesRepository.getRepoBranches(repoName); - for (const branch of branches) { - let buildWithSnooty = true; - if ('buildsWithSnooty' in branch) { - buildWithSnooty = branch['buildsWithSnooty']; - } - if (buildWithSnooty) { - entitledBranches.push(`${repoOwner}/${repoName}/${branch['gitBranchName']}`); - } - } - } - return entitledBranches.sort(); -} - -function getQSString(qs: string) { - const key_val = {}; - const arr = qs.split('&'); - if (arr) { - arr.forEach((keyval) => { - const kvpair = keyval.split('='); - key_val[kvpair[0]] = kvpair[1]; - }); - } - return key_val; -} +import { + buildEntitledBranchList, + getQSString, + isRestrictedToDeploy, + isUserEntitled, + prepResponse, +} from '../../handlers/slack'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; export const DisplayRepoOptions = async (event: any = {}, context: any = {}): Promise => { const consoleLogger = new ConsoleLogger(); @@ -102,7 +63,12 @@ const deployHelper = (deployable, payload, jobTitle, jobUserName, jobUserEmail) // For every repo/branch selected to be deployed, return an array of jobs with the payload data // needed for a successful build. -export const getDeployableJobs = async (values, entitlement, repoBranchesRepository: RepoBranchesRepository) => { +export const getDeployableJobs = async ( + values, + entitlement, + repoBranchesRepository: RepoBranchesRepository, + docsetsRepository: DocsetsRepository +) => { const deployable = []; for (let i = 0; i < values.repo_option.length; i++) { @@ -113,7 +79,7 @@ export const getDeployableJobs = async (values, entitlement, repoBranchesReposit const jobUserName = entitlement.github_username; const jobUserEmail = entitlement?.email ?? ''; - const repoInfo = await repoBranchesRepository.getRepo(repoName); + const repoInfo = await docsetsRepository.getRepo(repoName); const non_versioned = repoInfo.branches.length === 1; const branchObject = await repoBranchesRepository.getRepoBranchAliases(repoName, branchName); @@ -196,6 +162,7 @@ export const DeployRepo = async (event: any = {}, context: any = {}): Promise 0) { await deployRepo(deployable, consoleLogger, jobRepository, c.get('jobsQueueUrl')); } diff --git a/api/controllers/v2/github.ts b/api/controllers/v2/github.ts index e50842d51..bfa145bec 100644 --- a/api/controllers/v2/github.ts +++ b/api/controllers/v2/github.ts @@ -8,19 +8,21 @@ import { ConsoleLogger } from '../../../src/services/logger'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { EnhancedJob, JobStatus } from '../../../src/entities/job'; import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handlers/github'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; import { getMonorepoPaths } from '../../../src/monorepo'; import { getUpdatedFilePaths } from '../../../src/monorepo/utils/path-utils'; +import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/associated_products'; async function prepGithubPushPayload( githubEvent: PushEvent, repoBranchesRepository: RepoBranchesRepository, - prefix: string + prefix: string, + repoInfo: ReposBranchesDocument ): Promise> { const branch_name = githubEvent.ref.split('/')[2]; const branch_info = await repoBranchesRepository.getRepoBranchAliases(githubEvent.repository.name, branch_name); const urlSlug = branch_info.aliasObject?.urlSlug ?? branch_name; - const repo_info = await repoBranchesRepository.getRepo(githubEvent.repository.name); - const project = repo_info?.project ?? githubEvent.repository.name; + const project = repoInfo?.project ?? githubEvent.repository.name; return { title: githubEvent.repository.full_name, @@ -58,6 +60,7 @@ export const TriggerBuild = async (event: APIGatewayEvent): Promise('env'); - const repoInfo = await repoBranchesRepository.getRepo(body.repository.name); + const repoInfo = await docsetsRepository.getRepo(body.repository.name); const jobPrefix = repoInfo?.prefix ? repoInfo['prefix'][env] : ''; - const job = await prepGithubPushPayload(body, repoBranchesRepository, jobPrefix); + const job = await prepGithubPushPayload(body, repoBranchesRepository, jobPrefix, repoInfo); if (process.env.MONOREPO_PATH_FEATURE === 'true') { try { diff --git a/api/controllers/v2/slack.ts b/api/controllers/v2/slack.ts index d3794d9bf..7e8c2d2ec 100644 --- a/api/controllers/v2/slack.ts +++ b/api/controllers/v2/slack.ts @@ -7,53 +7,14 @@ import { SlackConnector } from '../../../src/services/slack'; import { JobRepository } from '../../../src/repositories/jobRepository'; import { APIGatewayEvent, APIGatewayProxyResult } from 'aws-lambda'; import { JobStatus } from '../../../src/entities/job'; - -function isUserEntitled(entitlementsObject: any): boolean { - return (entitlementsObject?.repos?.length ?? 0) > 0; -} - -function isRestrictedToDeploy(userId: string): boolean { - const { restrictedProdDeploy, entitledSlackUsers } = c.get('prodDeploy'); - return restrictedProdDeploy && !entitledSlackUsers.includes(userId); -} - -function prepResponse(statusCode, contentType, body) { - return { - statusCode: statusCode, - headers: { 'Content-Type': contentType }, - body: body, - }; -} - -async function buildEntitledBranchList(entitlement: any, repoBranchesRepository: RepoBranchesRepository) { - const entitledBranches: string[] = []; - for (const repo of entitlement.repos) { - const [repoOwner, repoName] = repo.split('/'); - const branches = await repoBranchesRepository.getRepoBranches(repoName); - for (const branch of branches) { - let buildWithSnooty = true; - if ('buildsWithSnooty' in branch) { - buildWithSnooty = branch['buildsWithSnooty']; - } - if (buildWithSnooty) { - entitledBranches.push(`${repoOwner}/${repoName}/${branch['gitBranchName']}`); - } - } - } - return entitledBranches.sort(); -} - -function getQSString(qs: string) { - const key_val = {}; - const arr = qs.split('&'); - if (arr) { - arr.forEach((keyval) => { - const kvpair = keyval.split('='); - key_val[kvpair[0]] = kvpair[1]; - }); - } - return key_val; -} +import { + buildEntitledBranchList, + getQSString, + isRestrictedToDeploy, + isUserEntitled, + prepResponse, +} from '../../handlers/slack'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; export const DisplayRepoOptions = async (event: APIGatewayEvent): Promise => { const consoleLogger = new ConsoleLogger(); @@ -119,7 +80,12 @@ const deployHelper = (deployable, payload, jobTitle, jobUserName, jobUserEmail) // For every repo/branch selected to be deployed, return an array of jobs with the payload data // needed for a successful build. -export const getDeployableJobs = async (values, entitlement, repoBranchesRepository: RepoBranchesRepository) => { +export const getDeployableJobs = async ( + values, + entitlement, + repoBranchesRepository: RepoBranchesRepository, + docsetsRepository: DocsetsRepository +) => { const deployable = []; for (let i = 0; i < values.repo_option.length; i++) { @@ -130,7 +96,7 @@ export const getDeployableJobs = async (values, entitlement, repoBranchesReposit const jobUserName = entitlement.github_username; const jobUserEmail = entitlement?.email ?? ''; - const repoInfo = await repoBranchesRepository.getRepo(repoName); + const repoInfo = await docsetsRepository.getRepo(repoName); const non_versioned = repoInfo.branches.length === 1; const branchObject = await repoBranchesRepository.getRepoBranchAliases(repoName, branchName); @@ -220,6 +186,7 @@ export const DeployRepo = async (event: APIGatewayEvent): Promise 0) { await deployRepo(deployable, consoleLogger, jobRepository, c.get('jobsQueueUrl')); } diff --git a/api/handlers/github.ts b/api/handlers/github.ts index 3906a2cbb..ab2eb6d2c 100644 --- a/api/handlers/github.ts +++ b/api/handlers/github.ts @@ -4,7 +4,7 @@ import * as mongodb from 'mongodb'; import { APIGatewayEvent } from 'aws-lambda'; import { PullRequestEvent } from '@octokit/webhooks-types'; import { ConsoleLogger } from '../../src/services/logger'; -import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../src/repositories/docsetsRepository'; import { UpdatedDocsRepository } from '../../src/repositories/updatedDocsRepository'; import { MetadataRepository } from '../../src/repositories/metadataRepository'; @@ -94,8 +94,8 @@ export const markBuildArtifactsForDeletion = async (event: APIGatewayEvent) => { try { await client.connect(); const poolDb = client.db(c.get('dbName')); - const repoBranchesRepository = new RepoBranchesRepository(poolDb, c, consoleLogger); - const project = (await repoBranchesRepository.getProjectByRepoName(repository.name)) as string; + const docsetsRepository = new DocsetsRepository(poolDb, c, consoleLogger); + const project = (await docsetsRepository.getProjectByRepoName(repository.name)) as string; // Start marking build artifacts for deletion const snootyDb = client.db(c.get('snootyDbName')); diff --git a/api/handlers/slack.ts b/api/handlers/slack.ts new file mode 100644 index 000000000..2ceabc94d --- /dev/null +++ b/api/handlers/slack.ts @@ -0,0 +1,49 @@ +import * as c from 'config'; +import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; + +export function isUserEntitled(entitlementsObject: any): boolean { + return (entitlementsObject?.repos?.length ?? 0) > 0; +} + +export function isRestrictedToDeploy(userId: string): boolean { + const { restrictedProdDeploy, entitledSlackUsers } = c.get('prodDeploy'); + return restrictedProdDeploy && !entitledSlackUsers.includes(userId); +} + +export function prepResponse(statusCode, contentType, body) { + return { + statusCode: statusCode, + headers: { 'Content-Type': contentType }, + body: body, + }; +} + +export async function buildEntitledBranchList(entitlement: any, repoBranchesRepository: RepoBranchesRepository) { + const entitledBranches: string[] = []; + for (const repo of entitlement.repos) { + const [repoOwner, repoName] = repo.split('/'); + const branches = await repoBranchesRepository.getRepoBranches(repoName); + for (const branch of branches) { + let buildWithSnooty = true; + if ('buildsWithSnooty' in branch) { + buildWithSnooty = branch['buildsWithSnooty']; + } + if (buildWithSnooty) { + entitledBranches.push(`${repoOwner}/${repoName}/${branch['gitBranchName']}`); + } + } + } + return entitledBranches.sort(); +} + +export function getQSString(qs: string) { + const key_val = {}; + const arr = qs.split('&'); + if (arr) { + arr.forEach((keyval) => { + const kvpair = keyval.split('='); + key_val[kvpair[0]] = kvpair[1]; + }); + } + return key_val; +} diff --git a/cdk-infra/lib/constructs/api/webhook-env-construct.ts b/cdk-infra/lib/constructs/api/webhook-env-construct.ts index eec5651ff..8daf0af50 100644 --- a/cdk-infra/lib/constructs/api/webhook-env-construct.ts +++ b/cdk-infra/lib/constructs/api/webhook-env-construct.ts @@ -29,6 +29,7 @@ export class WebhookEnvConstruct extends Construct { const dbName = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/dbname`); const snootyDbName = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/snooty`); const repoBranchesCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/repo`); + const docsetsCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/docsets`); const dbUsername = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/username`); const dbHost = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/host`); const jobCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/job/queue`); @@ -46,6 +47,7 @@ export class WebhookEnvConstruct extends Construct { DB_NAME: dbName, SNOOTY_DB_NAME: snootyDbName, REPO_BRANCHES_COL_NAME: repoBranchesCollection, + DOCSETS_COL_NAME: docsetsCollection, JOB_QUEUE_COL_NAME: jobCollection, NODE_CONFIG_DIR: './config', JOBS_QUEUE_URL: jobsQueue.queueUrl, diff --git a/cdk-infra/lib/constructs/worker/worker-env-construct.ts b/cdk-infra/lib/constructs/worker/worker-env-construct.ts index f1d27ee07..a0313e3bb 100644 --- a/cdk-infra/lib/constructs/worker/worker-env-construct.ts +++ b/cdk-infra/lib/constructs/worker/worker-env-construct.ts @@ -52,6 +52,7 @@ export class WorkerEnvConstruct extends Construct { `${ssmPrefix}/atlas/collections/user/entitlements` ); const repoBranchesCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/repo`); + const docsetsCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/docsets`); const jobCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/job/queue`); const dbPassword = secureStrings['MONGO_ATLAS_PASSWORD']; @@ -75,6 +76,7 @@ export class WorkerEnvConstruct extends Construct { USER_ENTITLEMENT_COL_NAME: entitlementCollection, NPM_EMAIL: npmEmail, REPO_BRANCHES_COL_NAME: repoBranchesCollection, + DOCSETS_COL_NAME: docsetsCollection, JOB_QUEUE_COL_NAME: jobCollection, CDN_INVALIDATOR_SERVICE_URL: getCdnInvalidatorUrl(env), SEARCH_INDEX_BUCKET: 'docs-search-indexes-test', diff --git a/cdk-infra/static/api/config/custom-environment-variables.json b/cdk-infra/static/api/config/custom-environment-variables.json index 44dbc915e..8d554d1b5 100644 --- a/cdk-infra/static/api/config/custom-environment-variables.json +++ b/cdk-infra/static/api/config/custom-environment-variables.json @@ -20,6 +20,7 @@ "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", "dashboardUrl": "DASHBOARD_URL", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "taskDefinitionFamily": "TASK_DEFINITION_FAMILY", "jobsQueueUrl": "JOBS_QUEUE_URL", "jobUpdatesQueueUrl": "JOB_UPDATES_QUEUE_URL", diff --git a/cdk-infra/static/api/config/default.json b/cdk-infra/static/api/config/default.json index c2f6f8e4d..3758dea37 100644 --- a/cdk-infra/static/api/config/default.json +++ b/cdk-infra/static/api/config/default.json @@ -19,6 +19,7 @@ "jobQueueCollection": "JOB_QUEUE_COL_NAME", "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "MONGO_TIMEOUT_S": 15, "JOB_TIMEOUT_S": 900, "RETRY_TIMEOUT_MS": 5000, diff --git a/config/custom-environment-variables.json b/config/custom-environment-variables.json index 011a147c7..49043b253 100644 --- a/config/custom-environment-variables.json +++ b/config/custom-environment-variables.json @@ -14,7 +14,6 @@ "githubBotPW": "GITHUB_BOT_PASSWORD", "fastlyDochubMap": "FASTLY_DOCHUB_MAP", "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", - "reposBranchesCollection": "REPOS_BRANCHES_COL_NAME", "GATSBY_PARSER_USER": "GATSBY_PARSER_USER", "fastlyOpsManagerToken": "FASTLY_OPS_MANAGER_TOKEN", "fastlyOpsManagerServiceId": "FASTLY_OPS_MANAGER_SERVICE_ID", @@ -26,6 +25,7 @@ "featureFlagSearchUI": "GATSBY_TEST_SEARCH_UI", "gatsbyUseChatbot": "GATSBY_SHOW_CHATBOT", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "repo_dir": "repos", "jobId": "jobId", "jobsQueueUrl": "JOBS_QUEUE_URL", diff --git a/config/default.json b/config/default.json index 4f88c8feb..9b0a73341 100644 --- a/config/default.json +++ b/config/default.json @@ -12,7 +12,7 @@ "githubBotPW": "q1w", "fastlyDochubMap": "devfslydochubmap", "entitlementCollection": "entitlements", - "reposBranchesCollection": "allison_repos_branches", + "docsetsCollection": "docsets", "MONGO_TIMEOUT_S": 15, "JOB_TIMEOUT_S": 900, "RETRY_TIMEOUT_MS": 5000, diff --git a/config/test.json b/config/test.json index 92be1d719..5a17611ea 100644 --- a/config/test.json +++ b/config/test.json @@ -12,7 +12,7 @@ "githubBotPW": "q1w", "fastlyDochubMap": "devfslydochubmap", "entitlementCollection": "entitlements", - "reposBranchesCollection": "allison_repos_branches", + "docsetsCollection": "docsets", "MONGO_TIMEOUT_S": 1, "JOB_TIMEOUT_S": 10, "RETRY_TIMEOUT_MS": 10, diff --git a/infrastructure/ecs-main/ecs_service.yml b/infrastructure/ecs-main/ecs_service.yml index f4b09982c..6fc767800 100644 --- a/infrastructure/ecs-main/ecs_service.yml +++ b/infrastructure/ecs-main/ecs_service.yml @@ -47,7 +47,9 @@ Resources: - Name: JOB_QUEUE_COL_NAME Value: ${self:custom.jobCollection} - Name: REPO_BRANCHES_COL_NAME - value: ${self:custom.repoBranchesCollection} + Value: ${self:custom.repoBranchesCollection} + - Name: DOCSETS_COL_NAME + Value: ${self:custom.docsetsCollection} - Name: NPM_BASE_64_AUTH Value: ${self:custom.npmBase64Auth} - Name: NPM_EMAIL diff --git a/infrastructure/ecs-main/serverless.yml b/infrastructure/ecs-main/serverless.yml index e06ca9a2c..c73430fbc 100644 --- a/infrastructure/ecs-main/serverless.yml +++ b/infrastructure/ecs-main/serverless.yml @@ -138,6 +138,7 @@ custom: nodeIntgrBucketName: docs-node-intgr jobCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/job/queue} repoBranchesCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/repo} + docsetsCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/docsets} entitlementCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/user/entitlements} jobsQueueUrl: ${docs-worker-pool-api-${self:provider.stage}.JobsQueueURL} jobUpdatesQueueUrl: ${docs-worker-pool-api-${self:provider.stage}.JobsUpdateQueueURL} diff --git a/modules/persistence/src/services/metadata/repos_branches/index.ts b/modules/persistence/src/services/metadata/repos_branches/index.ts index 3de8086a2..6ea2cb455 100644 --- a/modules/persistence/src/services/metadata/repos_branches/index.ts +++ b/modules/persistence/src/services/metadata/repos_branches/index.ts @@ -27,6 +27,40 @@ export interface ReposBranchesDocument extends WithId { const internals: { [key: project]: ReposBranchesDocument } = {}; +const getAggregationPipeline = (matchCondition: any) => { + return [ + // Stage 1: Unwind the repos array to create multiple documents for each referenced repo + { + $unwind: '$repos', + }, + // Stage 2: Lookup to join with the repos_branches collection + { + $lookup: { + from: 'repos_branches', + localField: 'repos', + foreignField: '_id', + as: 'repo', + }, + }, + // Stage 3: Merge/flatten repo into docset + { + $replaceRoot: { newRoot: { $mergeObjects: [{ $arrayElemAt: ['$repo', 0] }, '$$ROOT'] } }, + }, + // Stage 4: Match documents based on given field(s) + { + $match: matchCondition, + }, + // Stage 5: Exclude fields + { + $project: { + _id: 0, + repos: 0, + repo: 0, + }, + }, + ]; +}; + // Queries pool*.repos_branches for all entries for associated_products in a shared metadata entry export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) => { const { associated_products = [] } = metadata; @@ -48,14 +82,14 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) => try { const db = await pool(); - await db - .collection('repos_branches') - .find({ project: { $in: fetch } }) - .forEach((doc: ReposBranchesDocument) => { - // TODO: store in cache - internals[doc['project']] = doc; - res.push(doc); - }); + const aggregationPipeline = getAggregationPipeline({ project: { $in: fetch } }); + const cursor = db.collection('docsets').aggregate(aggregationPipeline); + const docsets = (await cursor.toArray()) as ReposBranchesDocument[]; + docsets.forEach((doc: ReposBranchesDocument) => { + // TODO: store in cache + internals[doc['project']] = doc; + res.push(doc); + }); return res; } catch (e) { console.error(`Error while getting associated repo branches: ${e}`); @@ -80,20 +114,20 @@ export const getRepoBranchesEntry = async (project: project, branch = ''): Promi // get from DB if not cached try { const db = await pool(); - const query = { - project, - }; + const matchCondition = { project }; if (branch) { - query['branches'] = { - $elemMatch: { gitBranchName: branch }, - }; + matchCondition['branches'] = { $elemMatch: { gitBranchName: branch } }; } - const res = (await db.collection('repos_branches').findOne(query)) as unknown as ReposBranchesDocument; + const aggregationPipeline = getAggregationPipeline(matchCondition); + + const cursor = db.collection('docsets').aggregate(aggregationPipeline); + const res = (await cursor.toArray()) as unknown as ReposBranchesDocument[]; + // if not already set, set cache value for repo_branches if (!internals[project]) { - internals[project] = res; + internals[project] = res[0]; } - return res; + return res[0]; } catch (e) { console.error(`Error while getting repo branches entry: ${e}`); throw e; diff --git a/modules/persistence/tests/data/docsets.json b/modules/persistence/tests/data/docsets.json new file mode 100644 index 000000000..0d12420db --- /dev/null +++ b/modules/persistence/tests/data/docsets.json @@ -0,0 +1,63 @@ +[ + { + "_id": "5fc999cm3f17b4e8917e9494", + "bucket": { + "regression": "docs-atlas-stg", + "dev": "docs-atlas-dev", + "stg": "docs-atlas-stg", + "prd": "docs-atlas-prd", + "dotcomstg": "docs-atlas-dotcomstg", + "dotcomprd": "docs-atlas-dotcomprd" + }, + "url": { + "regression": "https://docs-atlas-integration.mongodb.com", + "dev": "https://docs-atlas-staging.mongodb.com", + "stg": "https://docs-atlas-staging.mongodb.com", + "prd": "https://docs.atlas.mongodb.com", + "dotcomprd": "http://mongodb.com/", + "dotcomstg": "https://mongodbcom-cdn.website.staging.corp.mongodb.com/" + }, + "prefix": { + "stg": "", + "prd": "", + "dotcomstg": "docs/atlas", + "dotcomprd": "docs/atlas" + }, + "project": "cloud-docs", + "search": { + "categoryName": "atlas", + "categoryTitle": "Atlas" + }, + "repos": ["5fc999ce3f17b4e8917e0494"] + }, + { + "_id": "6243aa3f0bae3635a59a1850", + "bucket": { + "regression": "docs-mongodb-org-stg", + "dev": "docs-mongodb-org-dev", + "stg": "docs-mongodb-org-stg", + "prd": "docs-mongodb-org-prd", + "dotcomstg": "docs-atlas-dotcomstg", + "dotcomprd": "docs-atlas-dotcomprd" + }, + "url": { + "regression": "https://docs-mongodbcom-integration.corp.mongodb.com", + "dev": "https://docs-mongodborg-staging.corp.mongodb.com", + "stg": "https://docs-mongodborg-staging.corp.mongodb.com", + "prd": "https://docs.mongodb.com", + "dotcomprd": "https://www.mongodb.com/", + "dotcomstg": "https://mongodbcom-cdn.website.staging.corp.mongodb.com/" + }, + "prefix": { + "stg": "atlas/cli", + "prd": "atlas/cli", + "dotcomstg": "docs-qa/atlas/cli", + "dotcomprd": "docs/atlas/cli" + }, + "project": "atlas-cli", + "search": { + "categoryTitle": "Atlas CLI" + }, + "repos": ["6243aa3f0aae3635a59a1150"] + } +] diff --git a/modules/persistence/tests/metadata/__snapshots__/associated_products.test.ts.snap b/modules/persistence/tests/metadata/__snapshots__/associated_products.test.ts.snap index 0e3417793..9c699ee35 100644 --- a/modules/persistence/tests/metadata/__snapshots__/associated_products.test.ts.snap +++ b/modules/persistence/tests/metadata/__snapshots__/associated_products.test.ts.snap @@ -2,7 +2,6 @@ exports[`associated_products module getAllAssociatedRepoBranchesEntries should get all repo branches info for specified associated products 1`] = ` Object { - "_id": "6243aa3f0aae3635a59a1150", "branches": Array [ Object { "active": true, @@ -2766,7 +2765,6 @@ Array [ exports[`associated_products module getRepoBranchesEntry should query repo branches for project and branch 1`] = ` Object { - "_id": "6243aa3f0aae3635a59a1150", "branches": Array [ Object { "active": true, diff --git a/modules/persistence/tests/utils.ts b/modules/persistence/tests/utils.ts index bc9d8861f..a86e0e11b 100644 --- a/modules/persistence/tests/utils.ts +++ b/modules/persistence/tests/utils.ts @@ -7,6 +7,7 @@ import { Db, MongoClient, ObjectId } from 'mongodb'; import metadata from './data/metadata.json'; import repoBranches from './data/repos_branches.json'; +import docsets from './data/docsets.json'; /** * mocks a db with test data in ./data collection @@ -23,6 +24,7 @@ export const setMockDB = async (dbName: string = new ObjectId().toString()): Pro const connection = await MongoClient.connect(process.env.MONGO_URL || 'test'); const mockDb = connection.db(dbName); await mockDb.collection('repos_branches').insertMany(repoBranches as unknown[] as Document[]); + await mockDb.collection('docsets').insertMany(docsets as unknown[] as Document[]); await mockDb.collection('metadata').insertMany(metadata as unknown[] as Document[]); return [mockDb, connection]; } catch (e) { diff --git a/serverless.yml b/serverless.yml index c8ae695d3..0ac22c91d 100644 --- a/serverless.yml +++ b/serverless.yml @@ -62,6 +62,7 @@ custom: jobCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/job/queue} entitlementCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/user/entitlements} repoBranchesCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/repo} + docsetsCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/docsets} githubSecret: ${ssm:/env/${self:provider.stage}/docs/worker_pool/github/webhook/secret} githubDeletionSecret: ${ssm:/env/${self:provider.stage}/docs/worker_pool/github/webhook/deletionSecret} githubBotPW: ${ssm:/env/${self:provider.stage}/docs/worker_pool/github/bot/password} @@ -114,7 +115,8 @@ webhook-env-core: &webhook-env-core GITHUB_SECRET: ${self:custom.githubSecret} GITHUB_DELETION_SECRET: ${self:custom.githubDeletionSecret} GITHUB_BOT_PASSWORD: ${self:custom.githubBotPW} - REPO_BRANCHES_COL_NAME: ${self:custom.repoBranchesCollection} + REPO_BRANCHES_COL_NAME: ${self:custom.repoBranchesCollection} + DOCSETS_COL_NAME: ${self:custom.docsetsCollection} SLACK_SECRET: ${self:custom.slackSecret} SLACK_TOKEN: ${self:custom.slackAuthToken} SNOOTY_SECRET: ${self:custom.snootySecret} diff --git a/src/app.ts b/src/app.ts index 714918d2b..8edcd8023 100644 --- a/src/app.ts +++ b/src/app.ts @@ -12,6 +12,7 @@ import * as mongodb from 'mongodb'; import { FileSystemServices } from './services/fileServices'; import { JobValidator } from './job/jobValidator'; import { RepoBranchesRepository } from './repositories/repoBranchesRepository'; +import { DocsetsRepository } from './repositories/docsetsRepository'; let db: mongodb.Db; let client: mongodb.MongoClient; @@ -30,6 +31,7 @@ let repoConnector: GitHubConnector; let jobHandlerFactory: JobHandlerFactory; let jobManager: JobManager; let repoBranchesRepo: RepoBranchesRepository; +let docsetsRepo: DocsetsRepository; let ssoConnector: ISSOConnector; async function init(): Promise { @@ -48,7 +50,8 @@ async function init(): Promise { ssmConnector = new ParameterStoreConnector(); repoEntitlementRepository = new RepoEntitlementsRepository(db, c, consoleLogger); repoBranchesRepo = new RepoBranchesRepository(db, c, consoleLogger); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo); + docsetsRepo = new DocsetsRepository(db, c, consoleLogger); + jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo, docsetsRepo); ssoConnector = new OktaConnector(c, consoleLogger); cdnConnector = new K8SCDNConnector(c, consoleLogger, ssmConnector, ssoConnector); repoConnector = new GitHubConnector(githubCommandExecutor, c, fileSystemServices, hybridJobLogger); @@ -64,6 +67,7 @@ async function init(): Promise { fileSystemServices, hybridJobLogger, repoBranchesRepo, + docsetsRepo, repoEntitlementRepository ); jobManager.start().catch((err) => { diff --git a/src/enhanced/job/enhancedJobHandlerFactory.ts b/src/enhanced/job/enhancedJobHandlerFactory.ts index 845631246..bd923fe99 100644 --- a/src/enhanced/job/enhancedJobHandlerFactory.ts +++ b/src/enhanced/job/enhancedJobHandlerFactory.ts @@ -17,6 +17,7 @@ import { EnhancedProductionJobHandler, EnhancedRegressionJobHandler, } from './enhancedJobHandlers'; +import { DocsetsRepository } from '../../repositories/docsetsRepository'; const enhancedJobHandlerMap = { githubPush: EnhancedStagingJobHandler, @@ -37,6 +38,7 @@ export class EnhancedJobHandlerFactory { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ): JobHandler { const jt = job.payload?.jobType; @@ -53,6 +55,7 @@ export class EnhancedJobHandlerFactory { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); } diff --git a/src/enhanced/job/enhancedJobHandlers.ts b/src/enhanced/job/enhancedJobHandlers.ts index db61af6a9..cdce4930f 100644 --- a/src/enhanced/job/enhancedJobHandlers.ts +++ b/src/enhanced/job/enhancedJobHandlers.ts @@ -10,7 +10,7 @@ import { StagingJobHandler } from '../../job/stagingJobHandler'; * @param this reference to current object */ async function setEnvironmentVariablesEnhanced(this: JobHandler) { - const repo_info = await this._repoBranchesRepo.getRepoBranchesByRepoName(this.currJob.payload.repoName); + const repo_info = await this._docsetsRepo.getRepoBranchesByRepoName(this.currJob.payload.repoName); let env = this._config.get('env'); this.logger.info( diff --git a/src/enhanced/utils/job/handle-job.ts b/src/enhanced/utils/job/handle-job.ts index c3bf34fd6..02fe423fc 100644 --- a/src/enhanced/utils/job/handle-job.ts +++ b/src/enhanced/utils/job/handle-job.ts @@ -11,6 +11,7 @@ import * as mongodb from 'mongodb'; import { FileSystemServices } from '../../../services/fileServices'; import { JobValidator } from '../../../job/jobValidator'; import { RepoBranchesRepository } from '../../../repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../../repositories/docsetsRepository'; import { ISSOConnector, OktaConnector } from '../../../services/sso'; import { EnhancedJobHandlerFactory } from '../../job/enhancedJobHandlerFactory'; @@ -27,6 +28,7 @@ let repoConnector: GitHubConnector; let jobHandlerFactory: JobHandlerFactory; let jobManager: JobManager; let repoBranchesRepo: RepoBranchesRepository; +let docsetsRepo: DocsetsRepository; let ssmConnector: ParameterStoreConnector; let ssoConnector: ISSOConnector; @@ -40,7 +42,8 @@ export async function handleJob(jobId: string, db: mongodb.Db) { ssmConnector = new ParameterStoreConnector(); repoEntitlementRepository = new RepoEntitlementsRepository(db, c, consoleLogger); repoBranchesRepo = new RepoBranchesRepository(db, c, consoleLogger); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo); + docsetsRepo = new DocsetsRepository(db, c, consoleLogger); + jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo, docsetsRepo); ssoConnector = new OktaConnector(c, consoleLogger); cdnConnector = new K8SCDNConnector(c, consoleLogger, ssmConnector, ssoConnector); repoConnector = new GitHubConnector(githubCommandExecutor, c, fileSystemServices, hybridJobLogger); @@ -57,6 +60,7 @@ export async function handleJob(jobId: string, db: mongodb.Db) { fileSystemServices, hybridJobLogger, repoBranchesRepo, + docsetsRepo, repoEntitlementRepository ); diff --git a/src/job/jobHandler.ts b/src/job/jobHandler.ts index baefa3b83..aa5fbbfaa 100644 --- a/src/job/jobHandler.ts +++ b/src/job/jobHandler.ts @@ -11,6 +11,7 @@ import { AutoBuilderError, InvalidJobError, JobStoppedError, PublishError } from import { IConfig } from 'config'; import { IJobValidator } from './jobValidator'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; require('fs'); export abstract class JobHandler { @@ -55,6 +56,7 @@ export abstract class JobHandler { protected name: string; protected _repoBranchesRepo: RepoBranchesRepository; + protected _docsetsRepo: DocsetsRepository; protected _repoEntitlementsRepo: RepoEntitlementsRepository; constructor( @@ -68,6 +70,7 @@ export abstract class JobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { this._commandExecutor = commandExecutor; @@ -81,6 +84,7 @@ export abstract class JobHandler { this._config = config; this._validator = validator; this._repoBranchesRepo = repoBranchesRepo; + this._docsetsRepo = docsetsRepo; this._repoEntitlementsRepo = repoEntitlementsRepo; } @@ -432,7 +436,7 @@ export abstract class JobHandler { } protected async setEnvironmentVariables(): Promise { - const repo_info = await this._repoBranchesRepo.getRepoBranchesByRepoName(this._currJob.payload.repoName); + const repo_info = await this._docsetsRepo.getRepoBranchesByRepoName(this._currJob.payload.repoName); let env = this._config.get('env'); this._logger.info( this._currJob._id, diff --git a/src/job/jobManager.ts b/src/job/jobManager.ts index f7e3216e1..4d579cc56 100644 --- a/src/job/jobManager.ts +++ b/src/job/jobManager.ts @@ -14,6 +14,7 @@ import { JobRepository } from '../repositories/jobRepository'; import { IFileSystemServices } from '../services/fileServices'; import { IConfig } from 'config'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; export const jobHandlerMap = { @@ -35,6 +36,7 @@ export class JobHandlerFactory { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ): JobHandler { const jt = job.payload?.jobType; @@ -50,6 +52,7 @@ export class JobHandlerFactory { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); } @@ -70,6 +73,7 @@ export class JobManager { private _jobHandlerFactory: JobHandlerFactory; private _jobCommandExecutor: IJobCommandExecutor; private _repoBranchesRepo: RepoBranchesRepository; + private _docsetsRepo: DocsetsRepository; private _repoEntitlementsRepo: RepoEntitlementsRepository; constructor( @@ -83,6 +87,7 @@ export class JobManager { fileSystemServices: IFileSystemServices, logger: IJobRepoLogger, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { this._jobRepository = jobRepository; @@ -97,6 +102,7 @@ export class JobManager { this._jobHandlerFactory = jobHandlerFactory; this._jobCommandExecutor = jobCommandExecutor; this._repoBranchesRepo = repoBranchesRepo; + this._docsetsRepo = docsetsRepo; this._repoEntitlementsRepo = repoEntitlementsRepo; } @@ -163,6 +169,7 @@ export class JobManager { this._logger, this._jobValidator, this._repoBranchesRepo, + this._docsetsRepo, this._repoEntitlementsRepo ); diff --git a/src/job/jobValidator.ts b/src/job/jobValidator.ts index eca2687bc..e1f266c99 100644 --- a/src/job/jobValidator.ts +++ b/src/job/jobValidator.ts @@ -4,6 +4,7 @@ import type { Job } from '../entities/job'; import { IFileSystemServices } from '../services/fileServices'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; export interface IJobValidator { throwIfJobInvalid(job: Job): Promise; @@ -16,14 +17,17 @@ export class JobValidator implements IJobValidator { _fileSystemService: IFileSystemServices; _repoEntitlementRepository: RepoEntitlementsRepository; _repoBranchesRepository: RepoBranchesRepository; + _docsetsRepository: DocsetsRepository; constructor( fileSystemService: IFileSystemServices, repoEntitlementRepository: RepoEntitlementsRepository, - repoBranchesRepository: RepoBranchesRepository + repoBranchesRepository: RepoBranchesRepository, + docsetsRepository: DocsetsRepository ) { this._fileSystemService = fileSystemService; this._repoEntitlementRepository = repoEntitlementRepository; this._repoBranchesRepository = repoBranchesRepository; + this._docsetsRepository = docsetsRepository; } async throwIfUserNotEntitled(job: Job): Promise { @@ -34,7 +38,7 @@ export class JobValidator implements IJobValidator { } async throwIfBranchNotConfigured(job: Job): Promise { - job.payload.repoBranches = await this._repoBranchesRepository.getRepoBranchesByRepoName(job.payload.repoName); + job.payload.repoBranches = await this._docsetsRepository.getRepoBranchesByRepoName(job.payload.repoName); if (!job.payload?.repoBranches) { throw new AuthorizationError(`repoBranches not found for ${job.payload.repoName}`); } diff --git a/src/job/manifestJobHandler.ts b/src/job/manifestJobHandler.ts index 2befbfde7..2f1b99602 100644 --- a/src/job/manifestJobHandler.ts +++ b/src/job/manifestJobHandler.ts @@ -14,6 +14,7 @@ import { IJobValidator } from './jobValidator'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; import { InvalidJobError } from '../errors/errors'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; // TODO: Move this to a generic util and out of this job file export const joinUrlAndPrefix = (url: string, prefix: string) => { @@ -37,6 +38,7 @@ export class ManifestJobHandler extends JobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { super( @@ -50,6 +52,7 @@ export class ManifestJobHandler extends JobHandler { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); this.name = 'Manifest'; diff --git a/src/job/productionJobHandler.ts b/src/job/productionJobHandler.ts index f9d434955..bbe3ce770 100644 --- a/src/job/productionJobHandler.ts +++ b/src/job/productionJobHandler.ts @@ -1,6 +1,7 @@ import { IConfig } from 'config'; import type { Job } from '../entities/job'; import { InvalidJobError } from '../errors/errors'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; import { JobRepository } from '../repositories/jobRepository'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; @@ -25,6 +26,7 @@ export class ProductionJobHandler extends JobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { super( @@ -38,6 +40,7 @@ export class ProductionJobHandler extends JobHandler { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); this.name = 'Production'; diff --git a/src/job/regressionJobHandler.ts b/src/job/regressionJobHandler.ts index bc4efb33d..951ab505a 100644 --- a/src/job/regressionJobHandler.ts +++ b/src/job/regressionJobHandler.ts @@ -1,5 +1,6 @@ import { IConfig } from 'config'; import type { Job } from '../entities/job'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; import { JobRepository } from '../repositories/jobRepository'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; @@ -23,6 +24,7 @@ export class RegressionJobHandler extends ProductionJobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { super( @@ -36,6 +38,7 @@ export class RegressionJobHandler extends ProductionJobHandler { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); this.name = 'Regression'; diff --git a/src/job/stagingJobHandler.ts b/src/job/stagingJobHandler.ts index e77000f52..0f9680c8d 100644 --- a/src/job/stagingJobHandler.ts +++ b/src/job/stagingJobHandler.ts @@ -10,6 +10,7 @@ import { IRepoConnector } from '../services/repo'; import { IJobValidator } from './jobValidator'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; export class StagingJobHandler extends JobHandler { constructor( @@ -23,6 +24,7 @@ export class StagingJobHandler extends JobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { super( @@ -36,6 +38,7 @@ export class StagingJobHandler extends JobHandler { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); this.name = 'Staging'; diff --git a/src/onDemandApp.ts b/src/onDemandApp.ts index 837df350e..1174fc9fc 100644 --- a/src/onDemandApp.ts +++ b/src/onDemandApp.ts @@ -12,6 +12,7 @@ import { FileSystemServices } from './services/fileServices'; import { JobValidator } from './job/jobValidator'; import { RepoBranchesRepository } from './repositories/repoBranchesRepository'; import { ISSOConnector, OktaConnector } from './services/sso'; +import { DocsetsRepository } from './repositories/docsetsRepository'; let db: mongodb.Db; let client: mongodb.MongoClient; @@ -22,13 +23,13 @@ let githubCommandExecutor: GithubCommandExecutor; let jobRepository: JobRepository; let hybridJobLogger: HybridJobLogger; let repoEntitlementRepository: RepoEntitlementsRepository; -let repoBranchesRepository: RepoBranchesRepository; let jobValidator: JobValidator; let cdnConnector: K8SCDNConnector; let repoConnector: GitHubConnector; let jobHandlerFactory: JobHandlerFactory; let jobManager: JobManager; let repoBranchesRepo: RepoBranchesRepository; +let docsetsRepo: DocsetsRepository; let ssmConnector: ParameterStoreConnector; let ssoConnector: ISSOConnector; @@ -49,7 +50,8 @@ async function init(): Promise { ssmConnector = new ParameterStoreConnector(); repoEntitlementRepository = new RepoEntitlementsRepository(db, c, consoleLogger); repoBranchesRepo = new RepoBranchesRepository(db, c, consoleLogger); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo); + docsetsRepo = new DocsetsRepository(db, c, consoleLogger); + jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo, docsetsRepo); ssoConnector = new OktaConnector(c, consoleLogger); cdnConnector = new K8SCDNConnector(c, consoleLogger, ssmConnector, ssoConnector); repoConnector = new GitHubConnector(githubCommandExecutor, c, fileSystemServices, hybridJobLogger); @@ -65,6 +67,7 @@ async function init(): Promise { fileSystemServices, hybridJobLogger, repoBranchesRepo, + docsetsRepo, repoEntitlementRepository ); try { diff --git a/src/repositories/baseRepository.ts b/src/repositories/baseRepository.ts index 236c2fd71..750a1f83f 100644 --- a/src/repositories/baseRepository.ts +++ b/src/repositories/baseRepository.ts @@ -97,6 +97,27 @@ export abstract class BaseRepository { } } + protected async aggregate( + aggregationPipeline: any, + errorMsg: string, + options: mongodb.AggregateOptions = {} + ): Promise { + try { + const result = await this.promiseTimeoutS( + this._config.get('MONGO_TIMEOUT_S'), + this._collection.aggregate(aggregationPipeline, options), + errorMsg + ); + return result; + } catch (error) { + this._logger.error( + `${this._repoName}:findOne`, + `Failed to query with aggregation pipeline (${JSON.stringify(aggregationPipeline)})\nerror: ${error}` + ); + throw error; + } + } + protected async updateMany(query: any, update: any, errorMsg: string): Promise { try { const updateResult = await this.promiseTimeoutS( diff --git a/src/repositories/docSetRepository.ts b/src/repositories/docSetRepository.ts deleted file mode 100644 index a2a2e25e8..000000000 --- a/src/repositories/docSetRepository.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { IConfig } from 'config'; -import { Db } from 'mongodb'; -import { ILogger } from '../services/logger'; -import { BaseRepository } from './baseRepository'; - -const docSetCollectionName = process.env.DOCS_SET_COLLECTION_NAME || 'docset'; - -export class DocSetRepository extends BaseRepository { - constructor(db: Db, config: IConfig, logger: ILogger) { - super(config, logger, 'DocSetRepository', db.collection(docSetCollectionName)); - } - - /** - * Compares the project path from a monorepo push event, and compares it with - * what is configured in the docset entry in Atlas. - * @param path The project path where the snooty.toml file exists from the monorepo. - * This path will reflect the current project path from a given commit. - * @param projectName The project name for the docset entry. - * @returns A boolean representing whether or not the configured docset entry snooty_toml path - * matches the path found in GitHub. - */ - async checkSnootyTomlPath(path: string, projectName: string) { - const query = { project: projectName }; - try { - const docSetObject = await this.findOne( - query, - `Mongo Timeout Error: Timedout while retrieving repos entry for ${path}` - ); - - if (!docSetObject) { - console.warn(`WARNING: The docset does not exist for the following project: ${projectName} \n path: ${path}`); - - return false; - } - - return docSetObject.directories.snooty_toml === path; - } catch (error) { - console.warn( - `WARNING: Error occurred when retrieving project path for ${projectName}. The following path was provided: ${path}`, - error - ); - return false; - } - } -} diff --git a/src/repositories/docsetsRepository.ts b/src/repositories/docsetsRepository.ts new file mode 100644 index 000000000..bd673fba8 --- /dev/null +++ b/src/repositories/docsetsRepository.ts @@ -0,0 +1,120 @@ +import { Db } from 'mongodb'; +import { BaseRepository } from './baseRepository'; +import { ILogger } from '../services/logger'; +import { IConfig } from 'config'; + +const docsetsCollectionName = process.env.DOCSETS_COL_NAME || 'docsets'; +export class DocsetsRepository extends BaseRepository { + constructor(db: Db, config: IConfig, logger: ILogger) { + super(config, logger, 'DocsetsRepository', db.collection(docsetsCollectionName)); + } + + private getAggregationPipeline( + matchConditionField: string, + matchConditionValue: string, + projection?: { [k: string]: number } + ) { + const DEFAULT_PROJECTIONS = { + _id: 0, + repos: 0, + repo: 0, + }; + + return [ + // Stage 1: Unwind the repos array to create multiple documents for each referenced repo + { + $unwind: '$repos', + }, + // Stage 2: Lookup to join with the repos_branches collection + { + $lookup: { + from: 'repos_branches', + localField: 'repos', + foreignField: '_id', + as: 'repo', + }, + }, + // Stage 3: Match documents based on given field + { + $match: { + [`repo.${matchConditionField}`]: matchConditionValue, + }, + }, + // Stage 4: Merge/flatten repo into docset + { + $replaceRoot: { newRoot: { $mergeObjects: [{ $arrayElemAt: ['$repo', 0] }, '$$ROOT'] } }, + }, + // Stage 5: Exclude fields + { + $project: projection || DEFAULT_PROJECTIONS, + }, + ]; + } + + async getProjectByRepoName(repoName: string): Promise { + const projection = { project: 1 }; + const aggregationPipeline = this.getAggregationPipeline('repoName', repoName, projection); + const cursor = await this.aggregate(aggregationPipeline, `Error while getting project by repo name ${repoName}`); + const res = await cursor.toArray(); + if (!res.length) { + const msg = `DocsetsRepository.getProjectByRepoName - Could not find project by repoName: ${repoName}`; + this._logger.info(this._repoName, msg); + } + return res[0]?.project; + } + + async getRepo(repoName: string): Promise { + const aggregationPipeline = this.getAggregationPipeline('repoName', repoName); + const cursor = await this.aggregate(aggregationPipeline, `Error while fetching repo by repo name ${repoName}`); + const res = await cursor.toArray(); + if (!res.length) { + const msg = `DocsetsRepository.getRepo - Could not find repo by repoName: ${repoName}`; + this._logger.info(this._repoName, msg); + } + return res[0]; + } + + async getRepoBranchesByRepoName(repoName: string): Promise { + const aggregationPipeline = this.getAggregationPipeline('repoName', repoName); + const cursor = await this.aggregate(aggregationPipeline, `Error while fetching repo by repo name ${repoName}`); + const res = await cursor.toArray(); + if (res.length && res[0]?.bucket && res[0]?.url) { + return res[0]; + } + return { status: 'failure' }; + } + + /** + * Compares the project path from a monorepo push event, and compares it with + * what is configured in the docset entry in Atlas. + * @param path The project path where the snooty.toml file exists from the monorepo. + * This path will reflect the current project path from a given commit. + * @param projectName The project name for the docset entry. + * @returns A boolean representing whether or not the configured docset entry snooty_toml path + * matches the path found in GitHub. + */ + // Warning: Directories field might be changing locations in schema. This method is unused and validity should be checked before usage. + async checkSnootyTomlPath(path: string, projectName: string) { + const query = { project: projectName }; + try { + const docsetObject = await this.findOne( + query, + `Mongo Timeout Error: Timedout while retrieving repos entry for ${path}` + ); + + if (!docsetObject) { + console.warn(`WARNING: The docset does not exist for the following project: ${projectName} \n path: ${path}`); + + return false; + } + + return docsetObject.directories.snooty_toml === path; + } catch (error) { + console.warn( + `WARNING: Error occurred when retrieving project path for ${projectName}. The following path was provided: ${path}`, + error + ); + return false; + } + } +} diff --git a/src/repositories/repoBranchesRepository.ts b/src/repositories/repoBranchesRepository.ts index 0d9cf4a67..d08bca2e7 100644 --- a/src/repositories/repoBranchesRepository.ts +++ b/src/repositories/repoBranchesRepository.ts @@ -8,37 +8,6 @@ export class RepoBranchesRepository extends BaseRepository { super(config, logger, 'RepoBranchesRepository', db.collection(config.get('repoBranchesCollection'))); } - async getConfiguredBranchesByGithubRepoName(repoName: string): Promise { - const query = { repoName: repoName }; - const reposObject = await this.findOne( - query, - `Mongo Timeout Error: Timedout while retrieving repos entry for ${repoName}` - ); - if (reposObject?.branches) { - return { - branches: reposObject.branches, - repoName: reposObject.repoName, - status: 'success', - }; - } else { - return { status: 'failure' }; - } - } - - async getProjectByRepoName(repoName: string) { - const query = { repoName }; - const projection = { _id: 0, project: 1 }; - const res = await this.findOne(query, `Error while getting project by repo name ${repoName}`, { projection }); - return res.project; - } - - async getRepo(repoName: string): Promise { - const query = { repoName: repoName }; - const repo = await this.findOne(query, `Mongo Timeout Error: Timedout while retrieving branches for ${repoName}`); - // if user has specific entitlements - return repo; - } - async getRepoBranches(repoName: string): Promise { const query = { repoName: repoName }; const repo = await this.findOne(query, `Mongo Timeout Error: Timedout while retrieving branches for ${repoName}`); @@ -46,20 +15,6 @@ export class RepoBranchesRepository extends BaseRepository { return repo?.['branches'] ?? []; } - async getRepoBranchesByRepoName(repoName: string): Promise { - const query = { repoName: repoName }; - const repoDetails = await this.findOne( - query, - `Mongo Timeout Error: Timedout while retrieving repo information for ${repoName}` - ); - - if (repoDetails?.bucket && repoDetails?.url) { - return repoDetails; - } else { - return { status: 'failure' }; - } - } - async getRepoBranchAliases(repoName: string, branchName: string): Promise { const returnObject = { status: 'failure' }; const aliasArray = await this._collection diff --git a/tests/data/data.ts b/tests/data/data.ts index d403e8cad..80aac8875 100644 --- a/tests/data/data.ts +++ b/tests/data/data.ts @@ -447,4 +447,44 @@ export class TestDataProvider { } return retVal; } + + static getAggregationPipeline( + matchConditionField: string, + matchConditionValue: string, + projection?: { [k: string]: number } + ) { + return [ + // Stage 1: Unwind the repos array to create multiple documents for each referenced repo + { + $unwind: '$repos', + }, + // Stage 2: Lookup to join with the repos_branches collection + { + $lookup: { + from: 'repos_branches', + localField: 'repos', + foreignField: '_id', + as: 'repo', + }, + }, + // Stage 3: Match documents based on given field + { + $match: { + [`repo.${matchConditionField}`]: matchConditionValue, + }, + }, + // Stage 4: Merge/flatten repo into docset + { + $replaceRoot: { newRoot: { $mergeObjects: [{ $arrayElemAt: ['$repo', 0] }, '$$ROOT'] } }, + }, + // Stage 5: Exclude fields + { + $project: projection || { + _id: 0, + repos: 0, + repo: 0, + }, + }, + ]; + } } diff --git a/tests/mongo/testDBManager.ts b/tests/mongo/testDBManager.ts index 4ca7e1334..4fda8a33a 100644 --- a/tests/mongo/testDBManager.ts +++ b/tests/mongo/testDBManager.ts @@ -16,6 +16,7 @@ export class TestDBManager { process.env.DB_NAME = 'jest'; process.env.JOB_QUEUE_COL_NAME = 'queue'; process.env.USER_ENTITLEMENT_COL_NAME = 'entitlements'; + process.env.DOCSETS_COL_NAME = 'docsets'; } async start() { @@ -29,6 +30,7 @@ export class TestDBManager { this.db = this.client.db(process.env.DB_NAME); await this.db.createCollection(process.env.JOB_QUEUE_COL_NAME); await this.db.createCollection(process.env.USER_ENTITLEMENT_COL_NAME); + await this.db.createCollection(process.env.DOCSETS_COL_NAME); } async stop() { diff --git a/tests/unit/api/github.test.ts b/tests/unit/api/github.test.ts index 8be7d926a..e40627926 100644 --- a/tests/unit/api/github.test.ts +++ b/tests/unit/api/github.test.ts @@ -5,9 +5,9 @@ import { createMockAPIGatewayEvent } from '../../utils/createMockAPIGatewayEvent const DELETION_SECRET = 'GH_DELETION_SECRET'; -// Mock RepoBranchesRepository so that we can mock which data to return. -jest.mock('../../../src/repositories/repoBranchesRepository', () => ({ - RepoBranchesRepository: jest.fn().mockImplementation(() => ({ +// Mock DocsetsRepository so that we can mock which data to return. +jest.mock('../../../src/repositories/docsetsRepository', () => ({ + DocsetsRepository: jest.fn().mockImplementation(() => ({ getProjectByRepoName: jest.fn().mockImplementation((repoName) => repoName), })), })); diff --git a/tests/unit/api/slack.test.ts b/tests/unit/api/slack.test.ts index f825c4dbd..7962e938c 100644 --- a/tests/unit/api/slack.test.ts +++ b/tests/unit/api/slack.test.ts @@ -1,4 +1,5 @@ import { getDeployableJobs } from '../../../api/controllers/v1/slack'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; const mockRepoInfo = { @@ -36,13 +37,19 @@ const mockBranchObject = { // Mock RepoBranchesRepository so that we can mock which data to return. jest.mock('../../../src/repositories/repoBranchesRepository', () => ({ RepoBranchesRepository: jest.fn().mockImplementation(() => ({ - getRepo: jest.fn().mockImplementation(() => mockRepoInfo), getRepoBranchAliases: jest .fn() .mockImplementation((repoName, branchName) => mockBranchObject[repoName][branchName]), })), })); +// Mock DocsetsRepository so that we can mock which data to return. +jest.mock('../../../src/repositories/docsetsRepository', () => ({ + DocsetsRepository: jest.fn().mockImplementation(() => ({ + getRepo: jest.fn().mockImplementation(() => mockRepoInfo), + })), +})); + jest.mock('config'); describe('Slack API Controller Tests', () => { @@ -50,15 +57,21 @@ describe('Slack API Controller Tests', () => { email: 'test.user@mongodb.com', github_username: 'test.user', }; - // We're mocking RepoBranchesRepository to avoid needing access to a database. We'll use mock data. + // We're mocking RepoBranchesRepository and DocsetsRepository to avoid needing access to a database. We'll use mock data. const mockRepoBranchRepository = new RepoBranchesRepository(null, null, null); + const mockDocsetsRepository = new DocsetsRepository(null, null, null); test('deployable jobs with the assigned urlSlug have primaryAlias set to true', async () => { const mockValues = { repo_option: [{ value: 'mongodb/docs/master' }], }; - const deployable = await getDeployableJobs(mockValues, mockEntitlement, mockRepoBranchRepository); + const deployable = await getDeployableJobs( + mockValues, + mockEntitlement, + mockRepoBranchRepository, + mockDocsetsRepository + ); expect(deployable).toHaveLength(2); const jobsWithPrimaryAlias = deployable.filter((job) => job.payload.primaryAlias); @@ -71,7 +84,12 @@ describe('Slack API Controller Tests', () => { repo_option: [{ value: 'mongodb/docs/v5.0' }], }; - const deployable = await getDeployableJobs(mockValues, mockEntitlement, mockRepoBranchRepository); + const deployable = await getDeployableJobs( + mockValues, + mockEntitlement, + mockRepoBranchRepository, + mockDocsetsRepository + ); expect(deployable).toHaveLength(3); const jobsWithPrimaryAlias = deployable.filter((job) => job.payload.primaryAlias); diff --git a/tests/unit/job/JobHandlerFactory.test.ts b/tests/unit/job/JobHandlerFactory.test.ts index d7d547a8f..faab594f7 100644 --- a/tests/unit/job/JobHandlerFactory.test.ts +++ b/tests/unit/job/JobHandlerFactory.test.ts @@ -15,6 +15,7 @@ import { ManifestJobHandler } from '../../../src/job/manifestJobHandler'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { IJobValidator } from '../../../src/job/jobValidator'; import { RepoEntitlementsRepository } from '../../../src/repositories/repoEntitlementsRepository'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; describe('JobHandlerFactory Tests', () => { let job: Job; @@ -27,6 +28,7 @@ describe('JobHandlerFactory Tests', () => { let logger: IJobRepoLogger; let jobHandlerFactory: JobHandlerFactory; let repoBranchesRepo: RepoBranchesRepository; + let docsetsRepo: DocsetsRepository; let jobValidator: IJobValidator; let repoEntitlementsRepository: RepoEntitlementsRepository; @@ -41,6 +43,7 @@ describe('JobHandlerFactory Tests', () => { logger = mockDeep(); jobHandlerFactory = new JobHandlerFactory(); repoBranchesRepo = mockDeep(); + docsetsRepo = mockDeep(); repoEntitlementsRepository = mockDeep(); }); @@ -62,6 +65,7 @@ describe('JobHandlerFactory Tests', () => { logger, jobValidator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepository ); }).toThrowError('Job type not supported'); @@ -87,6 +91,7 @@ describe('JobHandlerFactory Tests', () => { logger, jobValidator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepository ); expect(handler).toBeInstanceOf(m[jt]); diff --git a/tests/unit/job/jobValidator.test.ts b/tests/unit/job/jobValidator.test.ts index f84fd44a3..7db94a212 100644 --- a/tests/unit/job/jobValidator.test.ts +++ b/tests/unit/job/jobValidator.test.ts @@ -6,12 +6,14 @@ import { JobValidator } from '../../../src/job/jobValidator'; import { RepoEntitlementsRepository } from '../../../src/repositories/repoEntitlementsRepository'; import { TestDataProvider } from '../../data/data'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; let job: Job; let fileSystemServices: IFileSystemServices; let repoEntitlementRepository: RepoEntitlementsRepository; let jobValidator: JobValidator; let repoBranchesRepository: RepoBranchesRepository; +let docsetsRepository: DocsetsRepository; beforeEach(() => { // Deep copy buildJobDef is necessary because we modify job @@ -19,18 +21,27 @@ beforeEach(() => { fileSystemServices = mockDeep(); repoEntitlementRepository = mockDeep(); repoBranchesRepository = mockDeep(); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepository); + docsetsRepository = mockDeep(); + jobValidator = new JobValidator( + fileSystemServices, + repoEntitlementRepository, + repoBranchesRepository, + docsetsRepository + ); }); afterEach(() => { mockReset(repoEntitlementRepository); mockReset(fileSystemServices); - mockReset(repoEntitlementRepository); + mockReset(repoBranchesRepository); + mockReset(docsetsRepository); }); describe('JobValidator Tests', () => { test('Construct Job Factory', () => { - expect(new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepository)).toBeDefined(); + expect( + new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepository, docsetsRepository) + ).toBeDefined(); }); test('invalid job type throws', async () => { diff --git a/tests/unit/job/productionJobHandler.test.ts b/tests/unit/job/productionJobHandler.test.ts index 00e5f4600..0e788b611 100644 --- a/tests/unit/job/productionJobHandler.test.ts +++ b/tests/unit/job/productionJobHandler.test.ts @@ -1,7 +1,7 @@ import { mockReset } from 'jest-mock-extended'; import { JobStatus } from '../../../src/entities/job'; import { TestDataProvider } from '../../data/data'; -import { getBuildJobDef, getManifestJobDef } from '../../data/jobDef'; +import { getBuildJobDef } from '../../data/jobDef'; import { JobHandlerTestHelper } from '../../utils/jobHandlerTestHelper'; describe('ProductionJobHandler Tests', () => { diff --git a/tests/unit/jobManager.test.ts b/tests/unit/jobManager.test.ts index 19f751a45..00925c5b7 100644 --- a/tests/unit/jobManager.test.ts +++ b/tests/unit/jobManager.test.ts @@ -12,6 +12,8 @@ import { IJobRepoLogger } from '../../src/services/logger'; import { IRepoConnector } from '../../src/services/repo'; import { getBuildJobDef } from '../data/jobDef'; import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../src/repositories/docsetsRepository'; +import { RepoEntitlementsRepository } from '../../src/repositories/repoEntitlementsRepository'; describe('JobManager Tests', () => { let job: Job; @@ -26,6 +28,8 @@ describe('JobManager Tests', () => { let jobManager: JobManager; let jobValidator: JobValidator; let repoBranchesRepo: RepoBranchesRepository; + let docsetsRepo: DocsetsRepository; + let repoEntitlementsRepo: RepoEntitlementsRepository; beforeEach(() => { jest.useFakeTimers('modern'); @@ -41,6 +45,8 @@ describe('JobManager Tests', () => { jobHandlerFactory = mockDeep(); jobValidator = mockDeep(); repoBranchesRepo = mockDeep(); + docsetsRepo = mockDeep(); + repoEntitlementsRepo = mockDeep(); jobManager = new JobManager( config, jobValidator, @@ -51,7 +57,9 @@ describe('JobManager Tests', () => { repoConnector, fileSystemServices, logger, - repoBranchesRepo + repoBranchesRepo, + docsetsRepo, + repoEntitlementsRepo ); }); diff --git a/tests/unit/repositories/docsetsRepository.test.ts b/tests/unit/repositories/docsetsRepository.test.ts new file mode 100644 index 000000000..9fa497604 --- /dev/null +++ b/tests/unit/repositories/docsetsRepository.test.ts @@ -0,0 +1,55 @@ +import { DBRepositoryHelper } from '../../utils/repositoryHelper'; +import { TestDataProvider } from '../../data/data'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; + +describe('Docsets Repository Tests', () => { + let docsetsRepo: DocsetsRepository; + let dbRepoHelper: DBRepositoryHelper; + beforeEach(() => { + dbRepoHelper = new DBRepositoryHelper(); + docsetsRepo = dbRepoHelper.init('docsets', 'docsets', 'docsets'); + }); + + test('Construct Repo Entitlement Repository', () => { + expect(new DocsetsRepository(dbRepoHelper.db, dbRepoHelper.config, dbRepoHelper.logger)).toBeDefined(); + }); + + describe('Docsets Repository getRepoBranchesByRepoName Tests', () => { + test('getRepoBranchesByRepoName returns failure as result is undefined', async () => { + const testPipeline = TestDataProvider.getAggregationPipeline('repoName', 'test_repo'); + dbRepoHelper.collection.aggregate.mockReturnValueOnce({ + toArray: () => [], + }); + await expect(docsetsRepo.getRepoBranchesByRepoName('test_repo')).resolves.toEqual({ status: 'failure' }); + expect(dbRepoHelper.collection.aggregate).toBeCalledTimes(1); + expect(dbRepoHelper.collection.aggregate).toBeCalledWith(testPipeline, {}); + }); + + test('getRepoBranchesByRepoName is successfull', async () => { + const testPipeline = TestDataProvider.getAggregationPipeline('repoName', 'test_repo'); + dbRepoHelper.collection.aggregate.mockReturnValueOnce({ + toArray: () => ({ + bucket: {}, + url: {}, + }), + }); + await docsetsRepo.getRepoBranchesByRepoName('test_repo'); + expect(dbRepoHelper.collection.aggregate).toBeCalledTimes(1); + expect(dbRepoHelper.collection.aggregate).toBeCalledWith(testPipeline, {}); + }); + + test('Update with completion status timesout', async () => { + dbRepoHelper.config.get.calledWith('MONGO_TIMEOUT_S').mockReturnValueOnce(1); + dbRepoHelper.collection.aggregate.mockImplementationOnce(() => { + return new Promise((resolve, reject) => { + setTimeout(resolve, 5000, [[]]); + }); + }); + docsetsRepo.getRepoBranchesByRepoName('test_repo').catch((error) => { + expect(dbRepoHelper.logger.error).toBeCalledTimes(1); + expect(error.message).toContain(`Error while fetching repo by repo name test_repo`); + }); + jest.runAllTimers(); + }); + }); +}); diff --git a/tests/unit/repositories/repoBranchesRepository.test.ts b/tests/unit/repositories/repoBranchesRepository.test.ts deleted file mode 100644 index 5e4b8c137..000000000 --- a/tests/unit/repositories/repoBranchesRepository.test.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; -import { DBRepositoryHelper } from '../../utils/repositoryHelper'; -import { TestDataProvider } from '../../data/data'; -import { getBuildJobDef } from '../../data/jobDef'; - -describe('Repo Branches Repository Tests', () => { - let repoBranchesRepo: RepoBranchesRepository; - let dbRepoHelper: DBRepositoryHelper; - beforeEach(() => { - dbRepoHelper = new DBRepositoryHelper(); - repoBranchesRepo = dbRepoHelper.init('repoBranches', 'repoBranchesCollection', 'testColl'); - }); - - test('Construct Repo Entitlement Repository', () => { - expect(new RepoBranchesRepository(dbRepoHelper.db, dbRepoHelper.config, dbRepoHelper.logger)).toBeDefined(); - }); - - describe('Repo Branches Repository getRepoBranchesByRepoName Tests', () => { - test('getRepoBranchesByRepoName returns failure as result is undefined', async () => { - const testData = TestDataProvider.getRepoBranchesByRepoName('test_repo'); - await expect(repoBranchesRepo.getRepoBranchesByRepoName('test_repo')).resolves.toEqual({ status: 'failure' }); - expect(dbRepoHelper.collection.findOne).toBeCalledTimes(1); - expect(dbRepoHelper.collection.findOne).toBeCalledWith(testData.query, {}); - }); - - test('getRepoBranchesByRepoName is successfull', async () => { - const job = getBuildJobDef(); - const testData = TestDataProvider.getRepoBranchesByRepoName('test_repo'); - job.payload.repoName = 'test_repo'; - dbRepoHelper.collection.findOne.mockReturnValueOnce(TestDataProvider.getRepoBranchesData(job)); - await repoBranchesRepo.getRepoBranchesByRepoName('test_repo'); - expect(dbRepoHelper.collection.findOne).toBeCalledTimes(1); - expect(dbRepoHelper.collection.findOne).toBeCalledWith(testData.query, {}); - }); - - test('Update with completion status timesout', async () => { - dbRepoHelper.config.get.calledWith('MONGO_TIMEOUT_S').mockReturnValueOnce(1); - dbRepoHelper.collection.findOne.mockImplementationOnce(() => { - return new Promise((resolve, reject) => { - setTimeout(resolve, 5000, 'one'); - }); - }); - repoBranchesRepo.getRepoBranchesByRepoName('test_repo').catch((error) => { - expect(dbRepoHelper.logger.error).toBeCalledTimes(1); - expect(error.message).toContain( - `Mongo Timeout Error: Timedout while retrieving repo information for test_repo` - ); - }); - jest.runAllTimers(); - }); - }); -}); diff --git a/tests/utils/jobHandlerTestHelper.ts b/tests/utils/jobHandlerTestHelper.ts index 9d9e871e9..2e5669f1d 100644 --- a/tests/utils/jobHandlerTestHelper.ts +++ b/tests/utils/jobHandlerTestHelper.ts @@ -7,6 +7,7 @@ import { StagingJobHandler } from '../../src/job/stagingJobHandler'; import { ManifestJobHandler } from '../../src/job/manifestJobHandler'; import { JobRepository } from '../../src/repositories/jobRepository'; import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../src/repositories/docsetsRepository'; import { RepoEntitlementsRepository } from '../../src/repositories/repoEntitlementsRepository'; import { ICDNConnector } from '../../src/services/cdn'; import { IJobCommandExecutor } from '../../src/services/commandExecutor'; @@ -33,6 +34,7 @@ export class JobHandlerTestHelper { jobHandler: ProductionJobHandler | StagingJobHandler | ManifestJobHandler; jobValidator: IJobValidator; repoBranchesRepo: RepoBranchesRepository; + docsetsRepo: DocsetsRepository; repoEntitlementsRepo: RepoEntitlementsRepository; lengthPrototype; handlerMapper = { @@ -58,6 +60,7 @@ export class JobHandlerTestHelper { this.logger = mockDeep(); this.jobValidator = mockDeep(); this.repoBranchesRepo = mockDeep(); + this.docsetsRepo = mockDeep(); this.repoEntitlementsRepo = mockDeep(); this.jobHandler = new this.handlerMapper[handlerName]( this.job, @@ -70,6 +73,7 @@ export class JobHandlerTestHelper { this.logger, this.jobValidator, this.repoBranchesRepo, + this.docsetsRepo, this.repoEntitlementsRepo ); return this.jobHandler; diff --git a/tests/utils/repositoryHelper.ts b/tests/utils/repositoryHelper.ts index 24b98a149..9dfbd1498 100644 --- a/tests/utils/repositoryHelper.ts +++ b/tests/utils/repositoryHelper.ts @@ -4,6 +4,7 @@ import { Db, FindCursor, FindOptions } from 'mongodb'; import { JobRepository } from '../../src/repositories/jobRepository'; import { RepoEntitlementsRepository } from '../../src/repositories/repoEntitlementsRepository'; import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../src/repositories/docsetsRepository'; import { ILogger } from '../../src/services/logger'; export class DBRepositoryHelper { @@ -15,6 +16,7 @@ export class DBRepositoryHelper { find: (query: any, errorMsg: string, options?: FindOptions) => Promise; findOne: (query: any, errorMsg: string) => Promise; findOneAndUpdate: (query: any, update: any, options: any, errorMsg: string) => Promise; + aggregate: (pipeline: any, errorMsg: string, options: any) => Promise; collection: any; jobRepo: JobRepository; @@ -22,6 +24,7 @@ export class DBRepositoryHelper { job: JobRepository, repo: RepoEntitlementsRepository, repoBranches: RepoBranchesRepository, + docsets: DocsetsRepository, }; init(repoName, collectionConfigName, collectionName): any { @@ -35,12 +38,14 @@ export class DBRepositoryHelper { this.find = jest.fn(); this.findOne = jest.fn(); this.findOneAndUpdate = jest.fn(); + this.aggregate = jest.fn(); this.collection = { updateOne: this.updateOne, updateMany: this.updateMany, find: this.find, findOne: this.findOne, findOneAndUpdate: this.findOneAndUpdate, + aggregate: this.aggregate, }; this.config.get.calledWith(collectionConfigName).mockReturnValue(collectionName); this.db.collection.calledWith(collectionName).mockReturnValue(this.collection);