diff --git a/api/config/custom-environment-variables.json b/api/config/custom-environment-variables.json index 65ca3f515..4199fbce2 100644 --- a/api/config/custom-environment-variables.json +++ b/api/config/custom-environment-variables.json @@ -20,6 +20,7 @@ "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", "dashboardUrl": "DASHBOARD_URL", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "taskDefinitionFamily": "TASK_DEFINITION_FAMILY", "jobsQueueUrl": "JOBS_QUEUE_URL", "jobUpdatesQueueUrl": "JOB_UPDATES_QUEUE_URL", diff --git a/api/config/default.json b/api/config/default.json index 4118bfd2b..16d56ce57 100644 --- a/api/config/default.json +++ b/api/config/default.json @@ -19,6 +19,7 @@ "jobQueueCollection": "JOB_QUEUE_COL_NAME", "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "MONGO_TIMEOUT_S": 15, "JOB_TIMEOUT_S": 900, "RETRY_TIMEOUT_MS": 5000, diff --git a/api/controllers/v1/github.ts b/api/controllers/v1/github.ts index 8b239d10c..36471662a 100644 --- a/api/controllers/v1/github.ts +++ b/api/controllers/v1/github.ts @@ -4,13 +4,19 @@ import { JobRepository } from '../../../src/repositories/jobRepository'; import { ConsoleLogger } from '../../../src/services/logger'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handlers/github'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; +import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/repos_branches'; -async function prepGithubPushPayload(githubEvent: any, repoBranchesRepository: RepoBranchesRepository, prefix: string) { +async function prepGithubPushPayload( + githubEvent: any, + repoBranchesRepository: RepoBranchesRepository, + prefix: string, + repoInfo: ReposBranchesDocument +) { const branch_name = githubEvent.ref.split('/')[2]; const branch_info = await repoBranchesRepository.getRepoBranchAliases(githubEvent.repository.name, branch_name); const urlSlug = branch_info.aliasObject?.urlSlug ?? branch_name; - const repo_info = await repoBranchesRepository.getRepo(githubEvent.repository.name); - const project = repo_info?.project ?? githubEvent.repository.name; + const project = repoInfo?.project ?? githubEvent.repository.name; return { title: githubEvent.repository.full_name, @@ -57,6 +63,7 @@ export const TriggerBuild = async (event: any = {}, context: any = {}): Promise< const consoleLogger = new ConsoleLogger(); const jobRepository = new JobRepository(db, c, consoleLogger); const repoBranchesRepository = new RepoBranchesRepository(db, c, consoleLogger); + const docsetsRepository = new DocsetsRepository(db, c, consoleLogger); if (!validateJsonWebhook(event, c.get('githubSecret'))) { const errMsg = "X-Hub-Signature incorrect. Github webhook token doesn't match"; @@ -77,10 +84,10 @@ export const TriggerBuild = async (event: any = {}, context: any = {}): Promise< } const env = c.get('env'); - const repoInfo = await repoBranchesRepository.getRepo(body.repository.name); + const repoInfo = await docsetsRepository.getRepo(body.repository.name); const jobPrefix = repoInfo?.prefix ? repoInfo['prefix'][env] : ''; // TODO: Make job be of type Job - const job = await prepGithubPushPayload(body, repoBranchesRepository, jobPrefix); + const job = await prepGithubPushPayload(body, repoBranchesRepository, jobPrefix, repoInfo); try { consoleLogger.info(job.title, 'Creating Job'); const jobId = await jobRepository.insertJob(job, c.get('jobsQueueUrl')); diff --git a/api/controllers/v1/jobs.ts b/api/controllers/v1/jobs.ts index 652b6d226..38ea304da 100644 --- a/api/controllers/v1/jobs.ts +++ b/api/controllers/v1/jobs.ts @@ -2,7 +2,6 @@ import * as c from 'config'; import * as mongodb from 'mongodb'; import { IConfig } from 'config'; import { RepoEntitlementsRepository } from '../../../src/repositories/repoEntitlementsRepository'; -import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { ConsoleLogger } from '../../../src/services/logger'; import { SlackConnector } from '../../../src/services/slack'; import { JobRepository } from '../../../src/repositories/jobRepository'; @@ -12,6 +11,7 @@ import { ECSContainer } from '../../../src/services/containerServices'; import { SQSConnector } from '../../../src/services/queue'; import { Batch } from '../../../src/services/batch'; import { notifyBuildSummary, snootyBuildComplete } from '../../handlers/jobs'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; export const TriggerLocalBuild = async (event: any = {}, context: any = {}): Promise => { const client = new mongodb.MongoClient(c.get('dbUrl')); @@ -258,7 +258,7 @@ async function SubmitArchiveJob(jobId: string) { const db = client.db(c.get('dbName')); const models = { jobs: new JobRepository(db, c, consoleLogger), - repoBranches: new RepoBranchesRepository(db, c, consoleLogger), + repoBranches: new DocsetsRepository(db, c, consoleLogger), }; const job = await models.jobs.getJobById(jobId); const repo = await models.repoBranches.getRepo(job.payload.repoName); diff --git a/api/controllers/v1/slack.ts b/api/controllers/v1/slack.ts index 25fecb35d..f34665af8 100644 --- a/api/controllers/v1/slack.ts +++ b/api/controllers/v1/slack.ts @@ -5,53 +5,14 @@ import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRe import { ConsoleLogger, ILogger } from '../../../src/services/logger'; import { SlackConnector } from '../../../src/services/slack'; import { JobRepository } from '../../../src/repositories/jobRepository'; - -function isUserEntitled(entitlementsObject: any): boolean { - return (entitlementsObject?.repos?.length ?? 0) > 0; -} - -function isRestrictedToDeploy(userId: string): boolean { - const { restrictedProdDeploy, entitledSlackUsers } = c.get('prodDeploy'); - return restrictedProdDeploy && !entitledSlackUsers.includes(userId); -} - -function prepResponse(statusCode, contentType, body) { - return { - statusCode: statusCode, - headers: { 'Content-Type': contentType }, - body: body, - }; -} - -async function buildEntitledBranchList(entitlement: any, repoBranchesRepository: RepoBranchesRepository) { - const entitledBranches: string[] = []; - for (const repo of entitlement.repos) { - const [repoOwner, repoName] = repo.split('/'); - const branches = await repoBranchesRepository.getRepoBranches(repoName); - for (const branch of branches) { - let buildWithSnooty = true; - if ('buildsWithSnooty' in branch) { - buildWithSnooty = branch['buildsWithSnooty']; - } - if (buildWithSnooty) { - entitledBranches.push(`${repoOwner}/${repoName}/${branch['gitBranchName']}`); - } - } - } - return entitledBranches.sort(); -} - -function getQSString(qs: string) { - const key_val = {}; - const arr = qs.split('&'); - if (arr) { - arr.forEach((keyval) => { - const kvpair = keyval.split('='); - key_val[kvpair[0]] = kvpair[1]; - }); - } - return key_val; -} +import { + buildEntitledBranchList, + getQSString, + isRestrictedToDeploy, + isUserEntitled, + prepResponse, +} from '../../handlers/slack'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; export const DisplayRepoOptions = async (event: any = {}, context: any = {}): Promise => { const consoleLogger = new ConsoleLogger(); @@ -102,7 +63,12 @@ const deployHelper = (deployable, payload, jobTitle, jobUserName, jobUserEmail) // For every repo/branch selected to be deployed, return an array of jobs with the payload data // needed for a successful build. -export const getDeployableJobs = async (values, entitlement, repoBranchesRepository: RepoBranchesRepository) => { +export const getDeployableJobs = async ( + values, + entitlement, + repoBranchesRepository: RepoBranchesRepository, + docsetsRepository: DocsetsRepository +) => { const deployable = []; for (let i = 0; i < values.repo_option.length; i++) { @@ -113,7 +79,7 @@ export const getDeployableJobs = async (values, entitlement, repoBranchesReposit const jobUserName = entitlement.github_username; const jobUserEmail = entitlement?.email ?? ''; - const repoInfo = await repoBranchesRepository.getRepo(repoName); + const repoInfo = await docsetsRepository.getRepo(repoName); const non_versioned = repoInfo.branches.length === 1; const branchObject = await repoBranchesRepository.getRepoBranchAliases(repoName, branchName); @@ -196,6 +162,7 @@ export const DeployRepo = async (event: any = {}, context: any = {}): Promise 0) { await deployRepo(deployable, consoleLogger, jobRepository, c.get('jobsQueueUrl')); } diff --git a/api/controllers/v2/github.ts b/api/controllers/v2/github.ts index e50842d51..bfa145bec 100644 --- a/api/controllers/v2/github.ts +++ b/api/controllers/v2/github.ts @@ -8,19 +8,21 @@ import { ConsoleLogger } from '../../../src/services/logger'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { EnhancedJob, JobStatus } from '../../../src/entities/job'; import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handlers/github'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; import { getMonorepoPaths } from '../../../src/monorepo'; import { getUpdatedFilePaths } from '../../../src/monorepo/utils/path-utils'; +import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/associated_products'; async function prepGithubPushPayload( githubEvent: PushEvent, repoBranchesRepository: RepoBranchesRepository, - prefix: string + prefix: string, + repoInfo: ReposBranchesDocument ): Promise> { const branch_name = githubEvent.ref.split('/')[2]; const branch_info = await repoBranchesRepository.getRepoBranchAliases(githubEvent.repository.name, branch_name); const urlSlug = branch_info.aliasObject?.urlSlug ?? branch_name; - const repo_info = await repoBranchesRepository.getRepo(githubEvent.repository.name); - const project = repo_info?.project ?? githubEvent.repository.name; + const project = repoInfo?.project ?? githubEvent.repository.name; return { title: githubEvent.repository.full_name, @@ -58,6 +60,7 @@ export const TriggerBuild = async (event: APIGatewayEvent): Promise('env'); - const repoInfo = await repoBranchesRepository.getRepo(body.repository.name); + const repoInfo = await docsetsRepository.getRepo(body.repository.name); const jobPrefix = repoInfo?.prefix ? repoInfo['prefix'][env] : ''; - const job = await prepGithubPushPayload(body, repoBranchesRepository, jobPrefix); + const job = await prepGithubPushPayload(body, repoBranchesRepository, jobPrefix, repoInfo); if (process.env.MONOREPO_PATH_FEATURE === 'true') { try { diff --git a/api/controllers/v2/slack.ts b/api/controllers/v2/slack.ts index d3794d9bf..7e8c2d2ec 100644 --- a/api/controllers/v2/slack.ts +++ b/api/controllers/v2/slack.ts @@ -7,53 +7,14 @@ import { SlackConnector } from '../../../src/services/slack'; import { JobRepository } from '../../../src/repositories/jobRepository'; import { APIGatewayEvent, APIGatewayProxyResult } from 'aws-lambda'; import { JobStatus } from '../../../src/entities/job'; - -function isUserEntitled(entitlementsObject: any): boolean { - return (entitlementsObject?.repos?.length ?? 0) > 0; -} - -function isRestrictedToDeploy(userId: string): boolean { - const { restrictedProdDeploy, entitledSlackUsers } = c.get('prodDeploy'); - return restrictedProdDeploy && !entitledSlackUsers.includes(userId); -} - -function prepResponse(statusCode, contentType, body) { - return { - statusCode: statusCode, - headers: { 'Content-Type': contentType }, - body: body, - }; -} - -async function buildEntitledBranchList(entitlement: any, repoBranchesRepository: RepoBranchesRepository) { - const entitledBranches: string[] = []; - for (const repo of entitlement.repos) { - const [repoOwner, repoName] = repo.split('/'); - const branches = await repoBranchesRepository.getRepoBranches(repoName); - for (const branch of branches) { - let buildWithSnooty = true; - if ('buildsWithSnooty' in branch) { - buildWithSnooty = branch['buildsWithSnooty']; - } - if (buildWithSnooty) { - entitledBranches.push(`${repoOwner}/${repoName}/${branch['gitBranchName']}`); - } - } - } - return entitledBranches.sort(); -} - -function getQSString(qs: string) { - const key_val = {}; - const arr = qs.split('&'); - if (arr) { - arr.forEach((keyval) => { - const kvpair = keyval.split('='); - key_val[kvpair[0]] = kvpair[1]; - }); - } - return key_val; -} +import { + buildEntitledBranchList, + getQSString, + isRestrictedToDeploy, + isUserEntitled, + prepResponse, +} from '../../handlers/slack'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; export const DisplayRepoOptions = async (event: APIGatewayEvent): Promise => { const consoleLogger = new ConsoleLogger(); @@ -119,7 +80,12 @@ const deployHelper = (deployable, payload, jobTitle, jobUserName, jobUserEmail) // For every repo/branch selected to be deployed, return an array of jobs with the payload data // needed for a successful build. -export const getDeployableJobs = async (values, entitlement, repoBranchesRepository: RepoBranchesRepository) => { +export const getDeployableJobs = async ( + values, + entitlement, + repoBranchesRepository: RepoBranchesRepository, + docsetsRepository: DocsetsRepository +) => { const deployable = []; for (let i = 0; i < values.repo_option.length; i++) { @@ -130,7 +96,7 @@ export const getDeployableJobs = async (values, entitlement, repoBranchesReposit const jobUserName = entitlement.github_username; const jobUserEmail = entitlement?.email ?? ''; - const repoInfo = await repoBranchesRepository.getRepo(repoName); + const repoInfo = await docsetsRepository.getRepo(repoName); const non_versioned = repoInfo.branches.length === 1; const branchObject = await repoBranchesRepository.getRepoBranchAliases(repoName, branchName); @@ -220,6 +186,7 @@ export const DeployRepo = async (event: APIGatewayEvent): Promise 0) { await deployRepo(deployable, consoleLogger, jobRepository, c.get('jobsQueueUrl')); } diff --git a/api/handlers/github.ts b/api/handlers/github.ts index 3906a2cbb..ab2eb6d2c 100644 --- a/api/handlers/github.ts +++ b/api/handlers/github.ts @@ -4,7 +4,7 @@ import * as mongodb from 'mongodb'; import { APIGatewayEvent } from 'aws-lambda'; import { PullRequestEvent } from '@octokit/webhooks-types'; import { ConsoleLogger } from '../../src/services/logger'; -import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../src/repositories/docsetsRepository'; import { UpdatedDocsRepository } from '../../src/repositories/updatedDocsRepository'; import { MetadataRepository } from '../../src/repositories/metadataRepository'; @@ -94,8 +94,8 @@ export const markBuildArtifactsForDeletion = async (event: APIGatewayEvent) => { try { await client.connect(); const poolDb = client.db(c.get('dbName')); - const repoBranchesRepository = new RepoBranchesRepository(poolDb, c, consoleLogger); - const project = (await repoBranchesRepository.getProjectByRepoName(repository.name)) as string; + const docsetsRepository = new DocsetsRepository(poolDb, c, consoleLogger); + const project = (await docsetsRepository.getProjectByRepoName(repository.name)) as string; // Start marking build artifacts for deletion const snootyDb = client.db(c.get('snootyDbName')); diff --git a/api/handlers/slack.ts b/api/handlers/slack.ts new file mode 100644 index 000000000..2ceabc94d --- /dev/null +++ b/api/handlers/slack.ts @@ -0,0 +1,49 @@ +import * as c from 'config'; +import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; + +export function isUserEntitled(entitlementsObject: any): boolean { + return (entitlementsObject?.repos?.length ?? 0) > 0; +} + +export function isRestrictedToDeploy(userId: string): boolean { + const { restrictedProdDeploy, entitledSlackUsers } = c.get('prodDeploy'); + return restrictedProdDeploy && !entitledSlackUsers.includes(userId); +} + +export function prepResponse(statusCode, contentType, body) { + return { + statusCode: statusCode, + headers: { 'Content-Type': contentType }, + body: body, + }; +} + +export async function buildEntitledBranchList(entitlement: any, repoBranchesRepository: RepoBranchesRepository) { + const entitledBranches: string[] = []; + for (const repo of entitlement.repos) { + const [repoOwner, repoName] = repo.split('/'); + const branches = await repoBranchesRepository.getRepoBranches(repoName); + for (const branch of branches) { + let buildWithSnooty = true; + if ('buildsWithSnooty' in branch) { + buildWithSnooty = branch['buildsWithSnooty']; + } + if (buildWithSnooty) { + entitledBranches.push(`${repoOwner}/${repoName}/${branch['gitBranchName']}`); + } + } + } + return entitledBranches.sort(); +} + +export function getQSString(qs: string) { + const key_val = {}; + const arr = qs.split('&'); + if (arr) { + arr.forEach((keyval) => { + const kvpair = keyval.split('='); + key_val[kvpair[0]] = kvpair[1]; + }); + } + return key_val; +} diff --git a/cdk-infra/lib/constructs/api/webhook-env-construct.ts b/cdk-infra/lib/constructs/api/webhook-env-construct.ts index eec5651ff..8daf0af50 100644 --- a/cdk-infra/lib/constructs/api/webhook-env-construct.ts +++ b/cdk-infra/lib/constructs/api/webhook-env-construct.ts @@ -29,6 +29,7 @@ export class WebhookEnvConstruct extends Construct { const dbName = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/dbname`); const snootyDbName = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/snooty`); const repoBranchesCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/repo`); + const docsetsCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/docsets`); const dbUsername = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/username`); const dbHost = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/host`); const jobCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/job/queue`); @@ -46,6 +47,7 @@ export class WebhookEnvConstruct extends Construct { DB_NAME: dbName, SNOOTY_DB_NAME: snootyDbName, REPO_BRANCHES_COL_NAME: repoBranchesCollection, + DOCSETS_COL_NAME: docsetsCollection, JOB_QUEUE_COL_NAME: jobCollection, NODE_CONFIG_DIR: './config', JOBS_QUEUE_URL: jobsQueue.queueUrl, diff --git a/cdk-infra/lib/constructs/worker/worker-env-construct.ts b/cdk-infra/lib/constructs/worker/worker-env-construct.ts index f1d27ee07..a0313e3bb 100644 --- a/cdk-infra/lib/constructs/worker/worker-env-construct.ts +++ b/cdk-infra/lib/constructs/worker/worker-env-construct.ts @@ -52,6 +52,7 @@ export class WorkerEnvConstruct extends Construct { `${ssmPrefix}/atlas/collections/user/entitlements` ); const repoBranchesCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/repo`); + const docsetsCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/docsets`); const jobCollection = StringParameter.valueFromLookup(this, `${ssmPrefix}/atlas/collections/job/queue`); const dbPassword = secureStrings['MONGO_ATLAS_PASSWORD']; @@ -75,6 +76,7 @@ export class WorkerEnvConstruct extends Construct { USER_ENTITLEMENT_COL_NAME: entitlementCollection, NPM_EMAIL: npmEmail, REPO_BRANCHES_COL_NAME: repoBranchesCollection, + DOCSETS_COL_NAME: docsetsCollection, JOB_QUEUE_COL_NAME: jobCollection, CDN_INVALIDATOR_SERVICE_URL: getCdnInvalidatorUrl(env), SEARCH_INDEX_BUCKET: 'docs-search-indexes-test', diff --git a/cdk-infra/static/api/config/custom-environment-variables.json b/cdk-infra/static/api/config/custom-environment-variables.json index 44dbc915e..8d554d1b5 100644 --- a/cdk-infra/static/api/config/custom-environment-variables.json +++ b/cdk-infra/static/api/config/custom-environment-variables.json @@ -20,6 +20,7 @@ "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", "dashboardUrl": "DASHBOARD_URL", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "taskDefinitionFamily": "TASK_DEFINITION_FAMILY", "jobsQueueUrl": "JOBS_QUEUE_URL", "jobUpdatesQueueUrl": "JOB_UPDATES_QUEUE_URL", diff --git a/cdk-infra/static/api/config/default.json b/cdk-infra/static/api/config/default.json index c2f6f8e4d..3758dea37 100644 --- a/cdk-infra/static/api/config/default.json +++ b/cdk-infra/static/api/config/default.json @@ -19,6 +19,7 @@ "jobQueueCollection": "JOB_QUEUE_COL_NAME", "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "MONGO_TIMEOUT_S": 15, "JOB_TIMEOUT_S": 900, "RETRY_TIMEOUT_MS": 5000, diff --git a/config/custom-environment-variables.json b/config/custom-environment-variables.json index 011a147c7..49043b253 100644 --- a/config/custom-environment-variables.json +++ b/config/custom-environment-variables.json @@ -14,7 +14,6 @@ "githubBotPW": "GITHUB_BOT_PASSWORD", "fastlyDochubMap": "FASTLY_DOCHUB_MAP", "entitlementCollection": "USER_ENTITLEMENT_COL_NAME", - "reposBranchesCollection": "REPOS_BRANCHES_COL_NAME", "GATSBY_PARSER_USER": "GATSBY_PARSER_USER", "fastlyOpsManagerToken": "FASTLY_OPS_MANAGER_TOKEN", "fastlyOpsManagerServiceId": "FASTLY_OPS_MANAGER_SERVICE_ID", @@ -26,6 +25,7 @@ "featureFlagSearchUI": "GATSBY_TEST_SEARCH_UI", "gatsbyUseChatbot": "GATSBY_SHOW_CHATBOT", "repoBranchesCollection": "REPO_BRANCHES_COL_NAME", + "docsetsCollection": "DOCSETS_COL_NAME", "repo_dir": "repos", "jobId": "jobId", "jobsQueueUrl": "JOBS_QUEUE_URL", diff --git a/config/default.json b/config/default.json index 4f88c8feb..9b0a73341 100644 --- a/config/default.json +++ b/config/default.json @@ -12,7 +12,7 @@ "githubBotPW": "q1w", "fastlyDochubMap": "devfslydochubmap", "entitlementCollection": "entitlements", - "reposBranchesCollection": "allison_repos_branches", + "docsetsCollection": "docsets", "MONGO_TIMEOUT_S": 15, "JOB_TIMEOUT_S": 900, "RETRY_TIMEOUT_MS": 5000, diff --git a/config/test.json b/config/test.json index 92be1d719..5a17611ea 100644 --- a/config/test.json +++ b/config/test.json @@ -12,7 +12,7 @@ "githubBotPW": "q1w", "fastlyDochubMap": "devfslydochubmap", "entitlementCollection": "entitlements", - "reposBranchesCollection": "allison_repos_branches", + "docsetsCollection": "docsets", "MONGO_TIMEOUT_S": 1, "JOB_TIMEOUT_S": 10, "RETRY_TIMEOUT_MS": 10, diff --git a/infrastructure/ecs-main/ecs_service.yml b/infrastructure/ecs-main/ecs_service.yml index f4b09982c..6fc767800 100644 --- a/infrastructure/ecs-main/ecs_service.yml +++ b/infrastructure/ecs-main/ecs_service.yml @@ -47,7 +47,9 @@ Resources: - Name: JOB_QUEUE_COL_NAME Value: ${self:custom.jobCollection} - Name: REPO_BRANCHES_COL_NAME - value: ${self:custom.repoBranchesCollection} + Value: ${self:custom.repoBranchesCollection} + - Name: DOCSETS_COL_NAME + Value: ${self:custom.docsetsCollection} - Name: NPM_BASE_64_AUTH Value: ${self:custom.npmBase64Auth} - Name: NPM_EMAIL diff --git a/infrastructure/ecs-main/serverless.yml b/infrastructure/ecs-main/serverless.yml index e06ca9a2c..c73430fbc 100644 --- a/infrastructure/ecs-main/serverless.yml +++ b/infrastructure/ecs-main/serverless.yml @@ -138,6 +138,7 @@ custom: nodeIntgrBucketName: docs-node-intgr jobCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/job/queue} repoBranchesCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/repo} + docsetsCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/docsets} entitlementCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/user/entitlements} jobsQueueUrl: ${docs-worker-pool-api-${self:provider.stage}.JobsQueueURL} jobUpdatesQueueUrl: ${docs-worker-pool-api-${self:provider.stage}.JobsUpdateQueueURL} diff --git a/modules/persistence/src/services/metadata/repos_branches/index.ts b/modules/persistence/src/services/metadata/repos_branches/index.ts index 3de8086a2..6ea2cb455 100644 --- a/modules/persistence/src/services/metadata/repos_branches/index.ts +++ b/modules/persistence/src/services/metadata/repos_branches/index.ts @@ -27,6 +27,40 @@ export interface ReposBranchesDocument extends WithId { const internals: { [key: project]: ReposBranchesDocument } = {}; +const getAggregationPipeline = (matchCondition: any) => { + return [ + // Stage 1: Unwind the repos array to create multiple documents for each referenced repo + { + $unwind: '$repos', + }, + // Stage 2: Lookup to join with the repos_branches collection + { + $lookup: { + from: 'repos_branches', + localField: 'repos', + foreignField: '_id', + as: 'repo', + }, + }, + // Stage 3: Merge/flatten repo into docset + { + $replaceRoot: { newRoot: { $mergeObjects: [{ $arrayElemAt: ['$repo', 0] }, '$$ROOT'] } }, + }, + // Stage 4: Match documents based on given field(s) + { + $match: matchCondition, + }, + // Stage 5: Exclude fields + { + $project: { + _id: 0, + repos: 0, + repo: 0, + }, + }, + ]; +}; + // Queries pool*.repos_branches for all entries for associated_products in a shared metadata entry export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) => { const { associated_products = [] } = metadata; @@ -48,14 +82,14 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) => try { const db = await pool(); - await db - .collection('repos_branches') - .find({ project: { $in: fetch } }) - .forEach((doc: ReposBranchesDocument) => { - // TODO: store in cache - internals[doc['project']] = doc; - res.push(doc); - }); + const aggregationPipeline = getAggregationPipeline({ project: { $in: fetch } }); + const cursor = db.collection('docsets').aggregate(aggregationPipeline); + const docsets = (await cursor.toArray()) as ReposBranchesDocument[]; + docsets.forEach((doc: ReposBranchesDocument) => { + // TODO: store in cache + internals[doc['project']] = doc; + res.push(doc); + }); return res; } catch (e) { console.error(`Error while getting associated repo branches: ${e}`); @@ -80,20 +114,20 @@ export const getRepoBranchesEntry = async (project: project, branch = ''): Promi // get from DB if not cached try { const db = await pool(); - const query = { - project, - }; + const matchCondition = { project }; if (branch) { - query['branches'] = { - $elemMatch: { gitBranchName: branch }, - }; + matchCondition['branches'] = { $elemMatch: { gitBranchName: branch } }; } - const res = (await db.collection('repos_branches').findOne(query)) as unknown as ReposBranchesDocument; + const aggregationPipeline = getAggregationPipeline(matchCondition); + + const cursor = db.collection('docsets').aggregate(aggregationPipeline); + const res = (await cursor.toArray()) as unknown as ReposBranchesDocument[]; + // if not already set, set cache value for repo_branches if (!internals[project]) { - internals[project] = res; + internals[project] = res[0]; } - return res; + return res[0]; } catch (e) { console.error(`Error while getting repo branches entry: ${e}`); throw e; diff --git a/modules/persistence/tests/data/docsets.json b/modules/persistence/tests/data/docsets.json new file mode 100644 index 000000000..0d12420db --- /dev/null +++ b/modules/persistence/tests/data/docsets.json @@ -0,0 +1,63 @@ +[ + { + "_id": "5fc999cm3f17b4e8917e9494", + "bucket": { + "regression": "docs-atlas-stg", + "dev": "docs-atlas-dev", + "stg": "docs-atlas-stg", + "prd": "docs-atlas-prd", + "dotcomstg": "docs-atlas-dotcomstg", + "dotcomprd": "docs-atlas-dotcomprd" + }, + "url": { + "regression": "https://docs-atlas-integration.mongodb.com", + "dev": "https://docs-atlas-staging.mongodb.com", + "stg": "https://docs-atlas-staging.mongodb.com", + "prd": "https://docs.atlas.mongodb.com", + "dotcomprd": "http://mongodb.com/", + "dotcomstg": "https://mongodbcom-cdn.website.staging.corp.mongodb.com/" + }, + "prefix": { + "stg": "", + "prd": "", + "dotcomstg": "docs/atlas", + "dotcomprd": "docs/atlas" + }, + "project": "cloud-docs", + "search": { + "categoryName": "atlas", + "categoryTitle": "Atlas" + }, + "repos": ["5fc999ce3f17b4e8917e0494"] + }, + { + "_id": "6243aa3f0bae3635a59a1850", + "bucket": { + "regression": "docs-mongodb-org-stg", + "dev": "docs-mongodb-org-dev", + "stg": "docs-mongodb-org-stg", + "prd": "docs-mongodb-org-prd", + "dotcomstg": "docs-atlas-dotcomstg", + "dotcomprd": "docs-atlas-dotcomprd" + }, + "url": { + "regression": "https://docs-mongodbcom-integration.corp.mongodb.com", + "dev": "https://docs-mongodborg-staging.corp.mongodb.com", + "stg": "https://docs-mongodborg-staging.corp.mongodb.com", + "prd": "https://docs.mongodb.com", + "dotcomprd": "https://www.mongodb.com/", + "dotcomstg": "https://mongodbcom-cdn.website.staging.corp.mongodb.com/" + }, + "prefix": { + "stg": "atlas/cli", + "prd": "atlas/cli", + "dotcomstg": "docs-qa/atlas/cli", + "dotcomprd": "docs/atlas/cli" + }, + "project": "atlas-cli", + "search": { + "categoryTitle": "Atlas CLI" + }, + "repos": ["6243aa3f0aae3635a59a1150"] + } +] diff --git a/modules/persistence/tests/metadata/__snapshots__/associated_products.test.ts.snap b/modules/persistence/tests/metadata/__snapshots__/associated_products.test.ts.snap index 0e3417793..9c699ee35 100644 --- a/modules/persistence/tests/metadata/__snapshots__/associated_products.test.ts.snap +++ b/modules/persistence/tests/metadata/__snapshots__/associated_products.test.ts.snap @@ -2,7 +2,6 @@ exports[`associated_products module getAllAssociatedRepoBranchesEntries should get all repo branches info for specified associated products 1`] = ` Object { - "_id": "6243aa3f0aae3635a59a1150", "branches": Array [ Object { "active": true, @@ -2766,7 +2765,6 @@ Array [ exports[`associated_products module getRepoBranchesEntry should query repo branches for project and branch 1`] = ` Object { - "_id": "6243aa3f0aae3635a59a1150", "branches": Array [ Object { "active": true, diff --git a/modules/persistence/tests/utils.ts b/modules/persistence/tests/utils.ts index bc9d8861f..a86e0e11b 100644 --- a/modules/persistence/tests/utils.ts +++ b/modules/persistence/tests/utils.ts @@ -7,6 +7,7 @@ import { Db, MongoClient, ObjectId } from 'mongodb'; import metadata from './data/metadata.json'; import repoBranches from './data/repos_branches.json'; +import docsets from './data/docsets.json'; /** * mocks a db with test data in ./data collection @@ -23,6 +24,7 @@ export const setMockDB = async (dbName: string = new ObjectId().toString()): Pro const connection = await MongoClient.connect(process.env.MONGO_URL || 'test'); const mockDb = connection.db(dbName); await mockDb.collection('repos_branches').insertMany(repoBranches as unknown[] as Document[]); + await mockDb.collection('docsets').insertMany(docsets as unknown[] as Document[]); await mockDb.collection('metadata').insertMany(metadata as unknown[] as Document[]); return [mockDb, connection]; } catch (e) { diff --git a/serverless.yml b/serverless.yml index c8ae695d3..0ac22c91d 100644 --- a/serverless.yml +++ b/serverless.yml @@ -62,6 +62,7 @@ custom: jobCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/job/queue} entitlementCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/user/entitlements} repoBranchesCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/repo} + docsetsCollection: ${ssm:/env/${self:provider.stage}/docs/worker_pool/atlas/collections/docsets} githubSecret: ${ssm:/env/${self:provider.stage}/docs/worker_pool/github/webhook/secret} githubDeletionSecret: ${ssm:/env/${self:provider.stage}/docs/worker_pool/github/webhook/deletionSecret} githubBotPW: ${ssm:/env/${self:provider.stage}/docs/worker_pool/github/bot/password} @@ -114,7 +115,8 @@ webhook-env-core: &webhook-env-core GITHUB_SECRET: ${self:custom.githubSecret} GITHUB_DELETION_SECRET: ${self:custom.githubDeletionSecret} GITHUB_BOT_PASSWORD: ${self:custom.githubBotPW} - REPO_BRANCHES_COL_NAME: ${self:custom.repoBranchesCollection} + REPO_BRANCHES_COL_NAME: ${self:custom.repoBranchesCollection} + DOCSETS_COL_NAME: ${self:custom.docsetsCollection} SLACK_SECRET: ${self:custom.slackSecret} SLACK_TOKEN: ${self:custom.slackAuthToken} SNOOTY_SECRET: ${self:custom.snootySecret} diff --git a/src/app.ts b/src/app.ts index 714918d2b..8edcd8023 100644 --- a/src/app.ts +++ b/src/app.ts @@ -12,6 +12,7 @@ import * as mongodb from 'mongodb'; import { FileSystemServices } from './services/fileServices'; import { JobValidator } from './job/jobValidator'; import { RepoBranchesRepository } from './repositories/repoBranchesRepository'; +import { DocsetsRepository } from './repositories/docsetsRepository'; let db: mongodb.Db; let client: mongodb.MongoClient; @@ -30,6 +31,7 @@ let repoConnector: GitHubConnector; let jobHandlerFactory: JobHandlerFactory; let jobManager: JobManager; let repoBranchesRepo: RepoBranchesRepository; +let docsetsRepo: DocsetsRepository; let ssoConnector: ISSOConnector; async function init(): Promise { @@ -48,7 +50,8 @@ async function init(): Promise { ssmConnector = new ParameterStoreConnector(); repoEntitlementRepository = new RepoEntitlementsRepository(db, c, consoleLogger); repoBranchesRepo = new RepoBranchesRepository(db, c, consoleLogger); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo); + docsetsRepo = new DocsetsRepository(db, c, consoleLogger); + jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo, docsetsRepo); ssoConnector = new OktaConnector(c, consoleLogger); cdnConnector = new K8SCDNConnector(c, consoleLogger, ssmConnector, ssoConnector); repoConnector = new GitHubConnector(githubCommandExecutor, c, fileSystemServices, hybridJobLogger); @@ -64,6 +67,7 @@ async function init(): Promise { fileSystemServices, hybridJobLogger, repoBranchesRepo, + docsetsRepo, repoEntitlementRepository ); jobManager.start().catch((err) => { diff --git a/src/enhanced/job/enhancedJobHandlerFactory.ts b/src/enhanced/job/enhancedJobHandlerFactory.ts index 845631246..bd923fe99 100644 --- a/src/enhanced/job/enhancedJobHandlerFactory.ts +++ b/src/enhanced/job/enhancedJobHandlerFactory.ts @@ -17,6 +17,7 @@ import { EnhancedProductionJobHandler, EnhancedRegressionJobHandler, } from './enhancedJobHandlers'; +import { DocsetsRepository } from '../../repositories/docsetsRepository'; const enhancedJobHandlerMap = { githubPush: EnhancedStagingJobHandler, @@ -37,6 +38,7 @@ export class EnhancedJobHandlerFactory { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ): JobHandler { const jt = job.payload?.jobType; @@ -53,6 +55,7 @@ export class EnhancedJobHandlerFactory { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); } diff --git a/src/enhanced/job/enhancedJobHandlers.ts b/src/enhanced/job/enhancedJobHandlers.ts index db61af6a9..cdce4930f 100644 --- a/src/enhanced/job/enhancedJobHandlers.ts +++ b/src/enhanced/job/enhancedJobHandlers.ts @@ -10,7 +10,7 @@ import { StagingJobHandler } from '../../job/stagingJobHandler'; * @param this reference to current object */ async function setEnvironmentVariablesEnhanced(this: JobHandler) { - const repo_info = await this._repoBranchesRepo.getRepoBranchesByRepoName(this.currJob.payload.repoName); + const repo_info = await this._docsetsRepo.getRepoBranchesByRepoName(this.currJob.payload.repoName); let env = this._config.get('env'); this.logger.info( diff --git a/src/enhanced/utils/job/handle-job.ts b/src/enhanced/utils/job/handle-job.ts index c3bf34fd6..02fe423fc 100644 --- a/src/enhanced/utils/job/handle-job.ts +++ b/src/enhanced/utils/job/handle-job.ts @@ -11,6 +11,7 @@ import * as mongodb from 'mongodb'; import { FileSystemServices } from '../../../services/fileServices'; import { JobValidator } from '../../../job/jobValidator'; import { RepoBranchesRepository } from '../../../repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../../repositories/docsetsRepository'; import { ISSOConnector, OktaConnector } from '../../../services/sso'; import { EnhancedJobHandlerFactory } from '../../job/enhancedJobHandlerFactory'; @@ -27,6 +28,7 @@ let repoConnector: GitHubConnector; let jobHandlerFactory: JobHandlerFactory; let jobManager: JobManager; let repoBranchesRepo: RepoBranchesRepository; +let docsetsRepo: DocsetsRepository; let ssmConnector: ParameterStoreConnector; let ssoConnector: ISSOConnector; @@ -40,7 +42,8 @@ export async function handleJob(jobId: string, db: mongodb.Db) { ssmConnector = new ParameterStoreConnector(); repoEntitlementRepository = new RepoEntitlementsRepository(db, c, consoleLogger); repoBranchesRepo = new RepoBranchesRepository(db, c, consoleLogger); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo); + docsetsRepo = new DocsetsRepository(db, c, consoleLogger); + jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo, docsetsRepo); ssoConnector = new OktaConnector(c, consoleLogger); cdnConnector = new K8SCDNConnector(c, consoleLogger, ssmConnector, ssoConnector); repoConnector = new GitHubConnector(githubCommandExecutor, c, fileSystemServices, hybridJobLogger); @@ -57,6 +60,7 @@ export async function handleJob(jobId: string, db: mongodb.Db) { fileSystemServices, hybridJobLogger, repoBranchesRepo, + docsetsRepo, repoEntitlementRepository ); diff --git a/src/job/jobHandler.ts b/src/job/jobHandler.ts index baefa3b83..aa5fbbfaa 100644 --- a/src/job/jobHandler.ts +++ b/src/job/jobHandler.ts @@ -11,6 +11,7 @@ import { AutoBuilderError, InvalidJobError, JobStoppedError, PublishError } from import { IConfig } from 'config'; import { IJobValidator } from './jobValidator'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; require('fs'); export abstract class JobHandler { @@ -55,6 +56,7 @@ export abstract class JobHandler { protected name: string; protected _repoBranchesRepo: RepoBranchesRepository; + protected _docsetsRepo: DocsetsRepository; protected _repoEntitlementsRepo: RepoEntitlementsRepository; constructor( @@ -68,6 +70,7 @@ export abstract class JobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { this._commandExecutor = commandExecutor; @@ -81,6 +84,7 @@ export abstract class JobHandler { this._config = config; this._validator = validator; this._repoBranchesRepo = repoBranchesRepo; + this._docsetsRepo = docsetsRepo; this._repoEntitlementsRepo = repoEntitlementsRepo; } @@ -432,7 +436,7 @@ export abstract class JobHandler { } protected async setEnvironmentVariables(): Promise { - const repo_info = await this._repoBranchesRepo.getRepoBranchesByRepoName(this._currJob.payload.repoName); + const repo_info = await this._docsetsRepo.getRepoBranchesByRepoName(this._currJob.payload.repoName); let env = this._config.get('env'); this._logger.info( this._currJob._id, diff --git a/src/job/jobManager.ts b/src/job/jobManager.ts index f7e3216e1..4d579cc56 100644 --- a/src/job/jobManager.ts +++ b/src/job/jobManager.ts @@ -14,6 +14,7 @@ import { JobRepository } from '../repositories/jobRepository'; import { IFileSystemServices } from '../services/fileServices'; import { IConfig } from 'config'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; export const jobHandlerMap = { @@ -35,6 +36,7 @@ export class JobHandlerFactory { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ): JobHandler { const jt = job.payload?.jobType; @@ -50,6 +52,7 @@ export class JobHandlerFactory { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); } @@ -70,6 +73,7 @@ export class JobManager { private _jobHandlerFactory: JobHandlerFactory; private _jobCommandExecutor: IJobCommandExecutor; private _repoBranchesRepo: RepoBranchesRepository; + private _docsetsRepo: DocsetsRepository; private _repoEntitlementsRepo: RepoEntitlementsRepository; constructor( @@ -83,6 +87,7 @@ export class JobManager { fileSystemServices: IFileSystemServices, logger: IJobRepoLogger, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { this._jobRepository = jobRepository; @@ -97,6 +102,7 @@ export class JobManager { this._jobHandlerFactory = jobHandlerFactory; this._jobCommandExecutor = jobCommandExecutor; this._repoBranchesRepo = repoBranchesRepo; + this._docsetsRepo = docsetsRepo; this._repoEntitlementsRepo = repoEntitlementsRepo; } @@ -163,6 +169,7 @@ export class JobManager { this._logger, this._jobValidator, this._repoBranchesRepo, + this._docsetsRepo, this._repoEntitlementsRepo ); diff --git a/src/job/jobValidator.ts b/src/job/jobValidator.ts index eca2687bc..e1f266c99 100644 --- a/src/job/jobValidator.ts +++ b/src/job/jobValidator.ts @@ -4,6 +4,7 @@ import type { Job } from '../entities/job'; import { IFileSystemServices } from '../services/fileServices'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; export interface IJobValidator { throwIfJobInvalid(job: Job): Promise; @@ -16,14 +17,17 @@ export class JobValidator implements IJobValidator { _fileSystemService: IFileSystemServices; _repoEntitlementRepository: RepoEntitlementsRepository; _repoBranchesRepository: RepoBranchesRepository; + _docsetsRepository: DocsetsRepository; constructor( fileSystemService: IFileSystemServices, repoEntitlementRepository: RepoEntitlementsRepository, - repoBranchesRepository: RepoBranchesRepository + repoBranchesRepository: RepoBranchesRepository, + docsetsRepository: DocsetsRepository ) { this._fileSystemService = fileSystemService; this._repoEntitlementRepository = repoEntitlementRepository; this._repoBranchesRepository = repoBranchesRepository; + this._docsetsRepository = docsetsRepository; } async throwIfUserNotEntitled(job: Job): Promise { @@ -34,7 +38,7 @@ export class JobValidator implements IJobValidator { } async throwIfBranchNotConfigured(job: Job): Promise { - job.payload.repoBranches = await this._repoBranchesRepository.getRepoBranchesByRepoName(job.payload.repoName); + job.payload.repoBranches = await this._docsetsRepository.getRepoBranchesByRepoName(job.payload.repoName); if (!job.payload?.repoBranches) { throw new AuthorizationError(`repoBranches not found for ${job.payload.repoName}`); } diff --git a/src/job/manifestJobHandler.ts b/src/job/manifestJobHandler.ts index 2befbfde7..2f1b99602 100644 --- a/src/job/manifestJobHandler.ts +++ b/src/job/manifestJobHandler.ts @@ -14,6 +14,7 @@ import { IJobValidator } from './jobValidator'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; import { InvalidJobError } from '../errors/errors'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; // TODO: Move this to a generic util and out of this job file export const joinUrlAndPrefix = (url: string, prefix: string) => { @@ -37,6 +38,7 @@ export class ManifestJobHandler extends JobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { super( @@ -50,6 +52,7 @@ export class ManifestJobHandler extends JobHandler { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); this.name = 'Manifest'; diff --git a/src/job/productionJobHandler.ts b/src/job/productionJobHandler.ts index f9d434955..bbe3ce770 100644 --- a/src/job/productionJobHandler.ts +++ b/src/job/productionJobHandler.ts @@ -1,6 +1,7 @@ import { IConfig } from 'config'; import type { Job } from '../entities/job'; import { InvalidJobError } from '../errors/errors'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; import { JobRepository } from '../repositories/jobRepository'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; @@ -25,6 +26,7 @@ export class ProductionJobHandler extends JobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { super( @@ -38,6 +40,7 @@ export class ProductionJobHandler extends JobHandler { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); this.name = 'Production'; diff --git a/src/job/regressionJobHandler.ts b/src/job/regressionJobHandler.ts index bc4efb33d..951ab505a 100644 --- a/src/job/regressionJobHandler.ts +++ b/src/job/regressionJobHandler.ts @@ -1,5 +1,6 @@ import { IConfig } from 'config'; import type { Job } from '../entities/job'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; import { JobRepository } from '../repositories/jobRepository'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; @@ -23,6 +24,7 @@ export class RegressionJobHandler extends ProductionJobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { super( @@ -36,6 +38,7 @@ export class RegressionJobHandler extends ProductionJobHandler { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); this.name = 'Regression'; diff --git a/src/job/stagingJobHandler.ts b/src/job/stagingJobHandler.ts index e77000f52..0f9680c8d 100644 --- a/src/job/stagingJobHandler.ts +++ b/src/job/stagingJobHandler.ts @@ -10,6 +10,7 @@ import { IRepoConnector } from '../services/repo'; import { IJobValidator } from './jobValidator'; import { RepoBranchesRepository } from '../repositories/repoBranchesRepository'; import { RepoEntitlementsRepository } from '../repositories/repoEntitlementsRepository'; +import { DocsetsRepository } from '../repositories/docsetsRepository'; export class StagingJobHandler extends JobHandler { constructor( @@ -23,6 +24,7 @@ export class StagingJobHandler extends JobHandler { logger: IJobRepoLogger, validator: IJobValidator, repoBranchesRepo: RepoBranchesRepository, + docsetsRepo: DocsetsRepository, repoEntitlementsRepo: RepoEntitlementsRepository ) { super( @@ -36,6 +38,7 @@ export class StagingJobHandler extends JobHandler { logger, validator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepo ); this.name = 'Staging'; diff --git a/src/onDemandApp.ts b/src/onDemandApp.ts index 837df350e..1174fc9fc 100644 --- a/src/onDemandApp.ts +++ b/src/onDemandApp.ts @@ -12,6 +12,7 @@ import { FileSystemServices } from './services/fileServices'; import { JobValidator } from './job/jobValidator'; import { RepoBranchesRepository } from './repositories/repoBranchesRepository'; import { ISSOConnector, OktaConnector } from './services/sso'; +import { DocsetsRepository } from './repositories/docsetsRepository'; let db: mongodb.Db; let client: mongodb.MongoClient; @@ -22,13 +23,13 @@ let githubCommandExecutor: GithubCommandExecutor; let jobRepository: JobRepository; let hybridJobLogger: HybridJobLogger; let repoEntitlementRepository: RepoEntitlementsRepository; -let repoBranchesRepository: RepoBranchesRepository; let jobValidator: JobValidator; let cdnConnector: K8SCDNConnector; let repoConnector: GitHubConnector; let jobHandlerFactory: JobHandlerFactory; let jobManager: JobManager; let repoBranchesRepo: RepoBranchesRepository; +let docsetsRepo: DocsetsRepository; let ssmConnector: ParameterStoreConnector; let ssoConnector: ISSOConnector; @@ -49,7 +50,8 @@ async function init(): Promise { ssmConnector = new ParameterStoreConnector(); repoEntitlementRepository = new RepoEntitlementsRepository(db, c, consoleLogger); repoBranchesRepo = new RepoBranchesRepository(db, c, consoleLogger); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo); + docsetsRepo = new DocsetsRepository(db, c, consoleLogger); + jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepo, docsetsRepo); ssoConnector = new OktaConnector(c, consoleLogger); cdnConnector = new K8SCDNConnector(c, consoleLogger, ssmConnector, ssoConnector); repoConnector = new GitHubConnector(githubCommandExecutor, c, fileSystemServices, hybridJobLogger); @@ -65,6 +67,7 @@ async function init(): Promise { fileSystemServices, hybridJobLogger, repoBranchesRepo, + docsetsRepo, repoEntitlementRepository ); try { diff --git a/src/repositories/baseRepository.ts b/src/repositories/baseRepository.ts index 236c2fd71..750a1f83f 100644 --- a/src/repositories/baseRepository.ts +++ b/src/repositories/baseRepository.ts @@ -97,6 +97,27 @@ export abstract class BaseRepository { } } + protected async aggregate( + aggregationPipeline: any, + errorMsg: string, + options: mongodb.AggregateOptions = {} + ): Promise { + try { + const result = await this.promiseTimeoutS( + this._config.get('MONGO_TIMEOUT_S'), + this._collection.aggregate(aggregationPipeline, options), + errorMsg + ); + return result; + } catch (error) { + this._logger.error( + `${this._repoName}:findOne`, + `Failed to query with aggregation pipeline (${JSON.stringify(aggregationPipeline)})\nerror: ${error}` + ); + throw error; + } + } + protected async updateMany(query: any, update: any, errorMsg: string): Promise { try { const updateResult = await this.promiseTimeoutS( diff --git a/src/repositories/docSetRepository.ts b/src/repositories/docSetRepository.ts deleted file mode 100644 index a2a2e25e8..000000000 --- a/src/repositories/docSetRepository.ts +++ /dev/null @@ -1,45 +0,0 @@ -import { IConfig } from 'config'; -import { Db } from 'mongodb'; -import { ILogger } from '../services/logger'; -import { BaseRepository } from './baseRepository'; - -const docSetCollectionName = process.env.DOCS_SET_COLLECTION_NAME || 'docset'; - -export class DocSetRepository extends BaseRepository { - constructor(db: Db, config: IConfig, logger: ILogger) { - super(config, logger, 'DocSetRepository', db.collection(docSetCollectionName)); - } - - /** - * Compares the project path from a monorepo push event, and compares it with - * what is configured in the docset entry in Atlas. - * @param path The project path where the snooty.toml file exists from the monorepo. - * This path will reflect the current project path from a given commit. - * @param projectName The project name for the docset entry. - * @returns A boolean representing whether or not the configured docset entry snooty_toml path - * matches the path found in GitHub. - */ - async checkSnootyTomlPath(path: string, projectName: string) { - const query = { project: projectName }; - try { - const docSetObject = await this.findOne( - query, - `Mongo Timeout Error: Timedout while retrieving repos entry for ${path}` - ); - - if (!docSetObject) { - console.warn(`WARNING: The docset does not exist for the following project: ${projectName} \n path: ${path}`); - - return false; - } - - return docSetObject.directories.snooty_toml === path; - } catch (error) { - console.warn( - `WARNING: Error occurred when retrieving project path for ${projectName}. The following path was provided: ${path}`, - error - ); - return false; - } - } -} diff --git a/src/repositories/docsetsRepository.ts b/src/repositories/docsetsRepository.ts new file mode 100644 index 000000000..bd673fba8 --- /dev/null +++ b/src/repositories/docsetsRepository.ts @@ -0,0 +1,120 @@ +import { Db } from 'mongodb'; +import { BaseRepository } from './baseRepository'; +import { ILogger } from '../services/logger'; +import { IConfig } from 'config'; + +const docsetsCollectionName = process.env.DOCSETS_COL_NAME || 'docsets'; +export class DocsetsRepository extends BaseRepository { + constructor(db: Db, config: IConfig, logger: ILogger) { + super(config, logger, 'DocsetsRepository', db.collection(docsetsCollectionName)); + } + + private getAggregationPipeline( + matchConditionField: string, + matchConditionValue: string, + projection?: { [k: string]: number } + ) { + const DEFAULT_PROJECTIONS = { + _id: 0, + repos: 0, + repo: 0, + }; + + return [ + // Stage 1: Unwind the repos array to create multiple documents for each referenced repo + { + $unwind: '$repos', + }, + // Stage 2: Lookup to join with the repos_branches collection + { + $lookup: { + from: 'repos_branches', + localField: 'repos', + foreignField: '_id', + as: 'repo', + }, + }, + // Stage 3: Match documents based on given field + { + $match: { + [`repo.${matchConditionField}`]: matchConditionValue, + }, + }, + // Stage 4: Merge/flatten repo into docset + { + $replaceRoot: { newRoot: { $mergeObjects: [{ $arrayElemAt: ['$repo', 0] }, '$$ROOT'] } }, + }, + // Stage 5: Exclude fields + { + $project: projection || DEFAULT_PROJECTIONS, + }, + ]; + } + + async getProjectByRepoName(repoName: string): Promise { + const projection = { project: 1 }; + const aggregationPipeline = this.getAggregationPipeline('repoName', repoName, projection); + const cursor = await this.aggregate(aggregationPipeline, `Error while getting project by repo name ${repoName}`); + const res = await cursor.toArray(); + if (!res.length) { + const msg = `DocsetsRepository.getProjectByRepoName - Could not find project by repoName: ${repoName}`; + this._logger.info(this._repoName, msg); + } + return res[0]?.project; + } + + async getRepo(repoName: string): Promise { + const aggregationPipeline = this.getAggregationPipeline('repoName', repoName); + const cursor = await this.aggregate(aggregationPipeline, `Error while fetching repo by repo name ${repoName}`); + const res = await cursor.toArray(); + if (!res.length) { + const msg = `DocsetsRepository.getRepo - Could not find repo by repoName: ${repoName}`; + this._logger.info(this._repoName, msg); + } + return res[0]; + } + + async getRepoBranchesByRepoName(repoName: string): Promise { + const aggregationPipeline = this.getAggregationPipeline('repoName', repoName); + const cursor = await this.aggregate(aggregationPipeline, `Error while fetching repo by repo name ${repoName}`); + const res = await cursor.toArray(); + if (res.length && res[0]?.bucket && res[0]?.url) { + return res[0]; + } + return { status: 'failure' }; + } + + /** + * Compares the project path from a monorepo push event, and compares it with + * what is configured in the docset entry in Atlas. + * @param path The project path where the snooty.toml file exists from the monorepo. + * This path will reflect the current project path from a given commit. + * @param projectName The project name for the docset entry. + * @returns A boolean representing whether or not the configured docset entry snooty_toml path + * matches the path found in GitHub. + */ + // Warning: Directories field might be changing locations in schema. This method is unused and validity should be checked before usage. + async checkSnootyTomlPath(path: string, projectName: string) { + const query = { project: projectName }; + try { + const docsetObject = await this.findOne( + query, + `Mongo Timeout Error: Timedout while retrieving repos entry for ${path}` + ); + + if (!docsetObject) { + console.warn(`WARNING: The docset does not exist for the following project: ${projectName} \n path: ${path}`); + + return false; + } + + return docsetObject.directories.snooty_toml === path; + } catch (error) { + console.warn( + `WARNING: Error occurred when retrieving project path for ${projectName}. The following path was provided: ${path}`, + error + ); + return false; + } + } +} diff --git a/src/repositories/repoBranchesRepository.ts b/src/repositories/repoBranchesRepository.ts index 0d9cf4a67..d08bca2e7 100644 --- a/src/repositories/repoBranchesRepository.ts +++ b/src/repositories/repoBranchesRepository.ts @@ -8,37 +8,6 @@ export class RepoBranchesRepository extends BaseRepository { super(config, logger, 'RepoBranchesRepository', db.collection(config.get('repoBranchesCollection'))); } - async getConfiguredBranchesByGithubRepoName(repoName: string): Promise { - const query = { repoName: repoName }; - const reposObject = await this.findOne( - query, - `Mongo Timeout Error: Timedout while retrieving repos entry for ${repoName}` - ); - if (reposObject?.branches) { - return { - branches: reposObject.branches, - repoName: reposObject.repoName, - status: 'success', - }; - } else { - return { status: 'failure' }; - } - } - - async getProjectByRepoName(repoName: string) { - const query = { repoName }; - const projection = { _id: 0, project: 1 }; - const res = await this.findOne(query, `Error while getting project by repo name ${repoName}`, { projection }); - return res.project; - } - - async getRepo(repoName: string): Promise { - const query = { repoName: repoName }; - const repo = await this.findOne(query, `Mongo Timeout Error: Timedout while retrieving branches for ${repoName}`); - // if user has specific entitlements - return repo; - } - async getRepoBranches(repoName: string): Promise { const query = { repoName: repoName }; const repo = await this.findOne(query, `Mongo Timeout Error: Timedout while retrieving branches for ${repoName}`); @@ -46,20 +15,6 @@ export class RepoBranchesRepository extends BaseRepository { return repo?.['branches'] ?? []; } - async getRepoBranchesByRepoName(repoName: string): Promise { - const query = { repoName: repoName }; - const repoDetails = await this.findOne( - query, - `Mongo Timeout Error: Timedout while retrieving repo information for ${repoName}` - ); - - if (repoDetails?.bucket && repoDetails?.url) { - return repoDetails; - } else { - return { status: 'failure' }; - } - } - async getRepoBranchAliases(repoName: string, branchName: string): Promise { const returnObject = { status: 'failure' }; const aliasArray = await this._collection diff --git a/tests/data/data.ts b/tests/data/data.ts index d403e8cad..80aac8875 100644 --- a/tests/data/data.ts +++ b/tests/data/data.ts @@ -447,4 +447,44 @@ export class TestDataProvider { } return retVal; } + + static getAggregationPipeline( + matchConditionField: string, + matchConditionValue: string, + projection?: { [k: string]: number } + ) { + return [ + // Stage 1: Unwind the repos array to create multiple documents for each referenced repo + { + $unwind: '$repos', + }, + // Stage 2: Lookup to join with the repos_branches collection + { + $lookup: { + from: 'repos_branches', + localField: 'repos', + foreignField: '_id', + as: 'repo', + }, + }, + // Stage 3: Match documents based on given field + { + $match: { + [`repo.${matchConditionField}`]: matchConditionValue, + }, + }, + // Stage 4: Merge/flatten repo into docset + { + $replaceRoot: { newRoot: { $mergeObjects: [{ $arrayElemAt: ['$repo', 0] }, '$$ROOT'] } }, + }, + // Stage 5: Exclude fields + { + $project: projection || { + _id: 0, + repos: 0, + repo: 0, + }, + }, + ]; + } } diff --git a/tests/mongo/testDBManager.ts b/tests/mongo/testDBManager.ts index 4ca7e1334..4fda8a33a 100644 --- a/tests/mongo/testDBManager.ts +++ b/tests/mongo/testDBManager.ts @@ -16,6 +16,7 @@ export class TestDBManager { process.env.DB_NAME = 'jest'; process.env.JOB_QUEUE_COL_NAME = 'queue'; process.env.USER_ENTITLEMENT_COL_NAME = 'entitlements'; + process.env.DOCSETS_COL_NAME = 'docsets'; } async start() { @@ -29,6 +30,7 @@ export class TestDBManager { this.db = this.client.db(process.env.DB_NAME); await this.db.createCollection(process.env.JOB_QUEUE_COL_NAME); await this.db.createCollection(process.env.USER_ENTITLEMENT_COL_NAME); + await this.db.createCollection(process.env.DOCSETS_COL_NAME); } async stop() { diff --git a/tests/unit/api/github.test.ts b/tests/unit/api/github.test.ts index 8be7d926a..e40627926 100644 --- a/tests/unit/api/github.test.ts +++ b/tests/unit/api/github.test.ts @@ -5,9 +5,9 @@ import { createMockAPIGatewayEvent } from '../../utils/createMockAPIGatewayEvent const DELETION_SECRET = 'GH_DELETION_SECRET'; -// Mock RepoBranchesRepository so that we can mock which data to return. -jest.mock('../../../src/repositories/repoBranchesRepository', () => ({ - RepoBranchesRepository: jest.fn().mockImplementation(() => ({ +// Mock DocsetsRepository so that we can mock which data to return. +jest.mock('../../../src/repositories/docsetsRepository', () => ({ + DocsetsRepository: jest.fn().mockImplementation(() => ({ getProjectByRepoName: jest.fn().mockImplementation((repoName) => repoName), })), })); diff --git a/tests/unit/api/slack.test.ts b/tests/unit/api/slack.test.ts index f825c4dbd..7962e938c 100644 --- a/tests/unit/api/slack.test.ts +++ b/tests/unit/api/slack.test.ts @@ -1,4 +1,5 @@ import { getDeployableJobs } from '../../../api/controllers/v1/slack'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; const mockRepoInfo = { @@ -36,13 +37,19 @@ const mockBranchObject = { // Mock RepoBranchesRepository so that we can mock which data to return. jest.mock('../../../src/repositories/repoBranchesRepository', () => ({ RepoBranchesRepository: jest.fn().mockImplementation(() => ({ - getRepo: jest.fn().mockImplementation(() => mockRepoInfo), getRepoBranchAliases: jest .fn() .mockImplementation((repoName, branchName) => mockBranchObject[repoName][branchName]), })), })); +// Mock DocsetsRepository so that we can mock which data to return. +jest.mock('../../../src/repositories/docsetsRepository', () => ({ + DocsetsRepository: jest.fn().mockImplementation(() => ({ + getRepo: jest.fn().mockImplementation(() => mockRepoInfo), + })), +})); + jest.mock('config'); describe('Slack API Controller Tests', () => { @@ -50,15 +57,21 @@ describe('Slack API Controller Tests', () => { email: 'test.user@mongodb.com', github_username: 'test.user', }; - // We're mocking RepoBranchesRepository to avoid needing access to a database. We'll use mock data. + // We're mocking RepoBranchesRepository and DocsetsRepository to avoid needing access to a database. We'll use mock data. const mockRepoBranchRepository = new RepoBranchesRepository(null, null, null); + const mockDocsetsRepository = new DocsetsRepository(null, null, null); test('deployable jobs with the assigned urlSlug have primaryAlias set to true', async () => { const mockValues = { repo_option: [{ value: 'mongodb/docs/master' }], }; - const deployable = await getDeployableJobs(mockValues, mockEntitlement, mockRepoBranchRepository); + const deployable = await getDeployableJobs( + mockValues, + mockEntitlement, + mockRepoBranchRepository, + mockDocsetsRepository + ); expect(deployable).toHaveLength(2); const jobsWithPrimaryAlias = deployable.filter((job) => job.payload.primaryAlias); @@ -71,7 +84,12 @@ describe('Slack API Controller Tests', () => { repo_option: [{ value: 'mongodb/docs/v5.0' }], }; - const deployable = await getDeployableJobs(mockValues, mockEntitlement, mockRepoBranchRepository); + const deployable = await getDeployableJobs( + mockValues, + mockEntitlement, + mockRepoBranchRepository, + mockDocsetsRepository + ); expect(deployable).toHaveLength(3); const jobsWithPrimaryAlias = deployable.filter((job) => job.payload.primaryAlias); diff --git a/tests/unit/job/JobHandlerFactory.test.ts b/tests/unit/job/JobHandlerFactory.test.ts index d7d547a8f..faab594f7 100644 --- a/tests/unit/job/JobHandlerFactory.test.ts +++ b/tests/unit/job/JobHandlerFactory.test.ts @@ -15,6 +15,7 @@ import { ManifestJobHandler } from '../../../src/job/manifestJobHandler'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { IJobValidator } from '../../../src/job/jobValidator'; import { RepoEntitlementsRepository } from '../../../src/repositories/repoEntitlementsRepository'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; describe('JobHandlerFactory Tests', () => { let job: Job; @@ -27,6 +28,7 @@ describe('JobHandlerFactory Tests', () => { let logger: IJobRepoLogger; let jobHandlerFactory: JobHandlerFactory; let repoBranchesRepo: RepoBranchesRepository; + let docsetsRepo: DocsetsRepository; let jobValidator: IJobValidator; let repoEntitlementsRepository: RepoEntitlementsRepository; @@ -41,6 +43,7 @@ describe('JobHandlerFactory Tests', () => { logger = mockDeep(); jobHandlerFactory = new JobHandlerFactory(); repoBranchesRepo = mockDeep(); + docsetsRepo = mockDeep(); repoEntitlementsRepository = mockDeep(); }); @@ -62,6 +65,7 @@ describe('JobHandlerFactory Tests', () => { logger, jobValidator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepository ); }).toThrowError('Job type not supported'); @@ -87,6 +91,7 @@ describe('JobHandlerFactory Tests', () => { logger, jobValidator, repoBranchesRepo, + docsetsRepo, repoEntitlementsRepository ); expect(handler).toBeInstanceOf(m[jt]); diff --git a/tests/unit/job/jobValidator.test.ts b/tests/unit/job/jobValidator.test.ts index f84fd44a3..7db94a212 100644 --- a/tests/unit/job/jobValidator.test.ts +++ b/tests/unit/job/jobValidator.test.ts @@ -6,12 +6,14 @@ import { JobValidator } from '../../../src/job/jobValidator'; import { RepoEntitlementsRepository } from '../../../src/repositories/repoEntitlementsRepository'; import { TestDataProvider } from '../../data/data'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; let job: Job; let fileSystemServices: IFileSystemServices; let repoEntitlementRepository: RepoEntitlementsRepository; let jobValidator: JobValidator; let repoBranchesRepository: RepoBranchesRepository; +let docsetsRepository: DocsetsRepository; beforeEach(() => { // Deep copy buildJobDef is necessary because we modify job @@ -19,18 +21,27 @@ beforeEach(() => { fileSystemServices = mockDeep(); repoEntitlementRepository = mockDeep(); repoBranchesRepository = mockDeep(); - jobValidator = new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepository); + docsetsRepository = mockDeep(); + jobValidator = new JobValidator( + fileSystemServices, + repoEntitlementRepository, + repoBranchesRepository, + docsetsRepository + ); }); afterEach(() => { mockReset(repoEntitlementRepository); mockReset(fileSystemServices); - mockReset(repoEntitlementRepository); + mockReset(repoBranchesRepository); + mockReset(docsetsRepository); }); describe('JobValidator Tests', () => { test('Construct Job Factory', () => { - expect(new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepository)).toBeDefined(); + expect( + new JobValidator(fileSystemServices, repoEntitlementRepository, repoBranchesRepository, docsetsRepository) + ).toBeDefined(); }); test('invalid job type throws', async () => { diff --git a/tests/unit/job/productionJobHandler.test.ts b/tests/unit/job/productionJobHandler.test.ts index 00e5f4600..0e788b611 100644 --- a/tests/unit/job/productionJobHandler.test.ts +++ b/tests/unit/job/productionJobHandler.test.ts @@ -1,7 +1,7 @@ import { mockReset } from 'jest-mock-extended'; import { JobStatus } from '../../../src/entities/job'; import { TestDataProvider } from '../../data/data'; -import { getBuildJobDef, getManifestJobDef } from '../../data/jobDef'; +import { getBuildJobDef } from '../../data/jobDef'; import { JobHandlerTestHelper } from '../../utils/jobHandlerTestHelper'; describe('ProductionJobHandler Tests', () => { diff --git a/tests/unit/jobManager.test.ts b/tests/unit/jobManager.test.ts index 19f751a45..00925c5b7 100644 --- a/tests/unit/jobManager.test.ts +++ b/tests/unit/jobManager.test.ts @@ -12,6 +12,8 @@ import { IJobRepoLogger } from '../../src/services/logger'; import { IRepoConnector } from '../../src/services/repo'; import { getBuildJobDef } from '../data/jobDef'; import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../src/repositories/docsetsRepository'; +import { RepoEntitlementsRepository } from '../../src/repositories/repoEntitlementsRepository'; describe('JobManager Tests', () => { let job: Job; @@ -26,6 +28,8 @@ describe('JobManager Tests', () => { let jobManager: JobManager; let jobValidator: JobValidator; let repoBranchesRepo: RepoBranchesRepository; + let docsetsRepo: DocsetsRepository; + let repoEntitlementsRepo: RepoEntitlementsRepository; beforeEach(() => { jest.useFakeTimers('modern'); @@ -41,6 +45,8 @@ describe('JobManager Tests', () => { jobHandlerFactory = mockDeep(); jobValidator = mockDeep(); repoBranchesRepo = mockDeep(); + docsetsRepo = mockDeep(); + repoEntitlementsRepo = mockDeep(); jobManager = new JobManager( config, jobValidator, @@ -51,7 +57,9 @@ describe('JobManager Tests', () => { repoConnector, fileSystemServices, logger, - repoBranchesRepo + repoBranchesRepo, + docsetsRepo, + repoEntitlementsRepo ); }); diff --git a/tests/unit/repositories/docsetsRepository.test.ts b/tests/unit/repositories/docsetsRepository.test.ts new file mode 100644 index 000000000..9fa497604 --- /dev/null +++ b/tests/unit/repositories/docsetsRepository.test.ts @@ -0,0 +1,55 @@ +import { DBRepositoryHelper } from '../../utils/repositoryHelper'; +import { TestDataProvider } from '../../data/data'; +import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; + +describe('Docsets Repository Tests', () => { + let docsetsRepo: DocsetsRepository; + let dbRepoHelper: DBRepositoryHelper; + beforeEach(() => { + dbRepoHelper = new DBRepositoryHelper(); + docsetsRepo = dbRepoHelper.init('docsets', 'docsets', 'docsets'); + }); + + test('Construct Repo Entitlement Repository', () => { + expect(new DocsetsRepository(dbRepoHelper.db, dbRepoHelper.config, dbRepoHelper.logger)).toBeDefined(); + }); + + describe('Docsets Repository getRepoBranchesByRepoName Tests', () => { + test('getRepoBranchesByRepoName returns failure as result is undefined', async () => { + const testPipeline = TestDataProvider.getAggregationPipeline('repoName', 'test_repo'); + dbRepoHelper.collection.aggregate.mockReturnValueOnce({ + toArray: () => [], + }); + await expect(docsetsRepo.getRepoBranchesByRepoName('test_repo')).resolves.toEqual({ status: 'failure' }); + expect(dbRepoHelper.collection.aggregate).toBeCalledTimes(1); + expect(dbRepoHelper.collection.aggregate).toBeCalledWith(testPipeline, {}); + }); + + test('getRepoBranchesByRepoName is successfull', async () => { + const testPipeline = TestDataProvider.getAggregationPipeline('repoName', 'test_repo'); + dbRepoHelper.collection.aggregate.mockReturnValueOnce({ + toArray: () => ({ + bucket: {}, + url: {}, + }), + }); + await docsetsRepo.getRepoBranchesByRepoName('test_repo'); + expect(dbRepoHelper.collection.aggregate).toBeCalledTimes(1); + expect(dbRepoHelper.collection.aggregate).toBeCalledWith(testPipeline, {}); + }); + + test('Update with completion status timesout', async () => { + dbRepoHelper.config.get.calledWith('MONGO_TIMEOUT_S').mockReturnValueOnce(1); + dbRepoHelper.collection.aggregate.mockImplementationOnce(() => { + return new Promise((resolve, reject) => { + setTimeout(resolve, 5000, [[]]); + }); + }); + docsetsRepo.getRepoBranchesByRepoName('test_repo').catch((error) => { + expect(dbRepoHelper.logger.error).toBeCalledTimes(1); + expect(error.message).toContain(`Error while fetching repo by repo name test_repo`); + }); + jest.runAllTimers(); + }); + }); +}); diff --git a/tests/unit/repositories/repoBranchesRepository.test.ts b/tests/unit/repositories/repoBranchesRepository.test.ts deleted file mode 100644 index 5e4b8c137..000000000 --- a/tests/unit/repositories/repoBranchesRepository.test.ts +++ /dev/null @@ -1,52 +0,0 @@ -import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; -import { DBRepositoryHelper } from '../../utils/repositoryHelper'; -import { TestDataProvider } from '../../data/data'; -import { getBuildJobDef } from '../../data/jobDef'; - -describe('Repo Branches Repository Tests', () => { - let repoBranchesRepo: RepoBranchesRepository; - let dbRepoHelper: DBRepositoryHelper; - beforeEach(() => { - dbRepoHelper = new DBRepositoryHelper(); - repoBranchesRepo = dbRepoHelper.init('repoBranches', 'repoBranchesCollection', 'testColl'); - }); - - test('Construct Repo Entitlement Repository', () => { - expect(new RepoBranchesRepository(dbRepoHelper.db, dbRepoHelper.config, dbRepoHelper.logger)).toBeDefined(); - }); - - describe('Repo Branches Repository getRepoBranchesByRepoName Tests', () => { - test('getRepoBranchesByRepoName returns failure as result is undefined', async () => { - const testData = TestDataProvider.getRepoBranchesByRepoName('test_repo'); - await expect(repoBranchesRepo.getRepoBranchesByRepoName('test_repo')).resolves.toEqual({ status: 'failure' }); - expect(dbRepoHelper.collection.findOne).toBeCalledTimes(1); - expect(dbRepoHelper.collection.findOne).toBeCalledWith(testData.query, {}); - }); - - test('getRepoBranchesByRepoName is successfull', async () => { - const job = getBuildJobDef(); - const testData = TestDataProvider.getRepoBranchesByRepoName('test_repo'); - job.payload.repoName = 'test_repo'; - dbRepoHelper.collection.findOne.mockReturnValueOnce(TestDataProvider.getRepoBranchesData(job)); - await repoBranchesRepo.getRepoBranchesByRepoName('test_repo'); - expect(dbRepoHelper.collection.findOne).toBeCalledTimes(1); - expect(dbRepoHelper.collection.findOne).toBeCalledWith(testData.query, {}); - }); - - test('Update with completion status timesout', async () => { - dbRepoHelper.config.get.calledWith('MONGO_TIMEOUT_S').mockReturnValueOnce(1); - dbRepoHelper.collection.findOne.mockImplementationOnce(() => { - return new Promise((resolve, reject) => { - setTimeout(resolve, 5000, 'one'); - }); - }); - repoBranchesRepo.getRepoBranchesByRepoName('test_repo').catch((error) => { - expect(dbRepoHelper.logger.error).toBeCalledTimes(1); - expect(error.message).toContain( - `Mongo Timeout Error: Timedout while retrieving repo information for test_repo` - ); - }); - jest.runAllTimers(); - }); - }); -}); diff --git a/tests/utils/jobHandlerTestHelper.ts b/tests/utils/jobHandlerTestHelper.ts index 9d9e871e9..2e5669f1d 100644 --- a/tests/utils/jobHandlerTestHelper.ts +++ b/tests/utils/jobHandlerTestHelper.ts @@ -7,6 +7,7 @@ import { StagingJobHandler } from '../../src/job/stagingJobHandler'; import { ManifestJobHandler } from '../../src/job/manifestJobHandler'; import { JobRepository } from '../../src/repositories/jobRepository'; import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../src/repositories/docsetsRepository'; import { RepoEntitlementsRepository } from '../../src/repositories/repoEntitlementsRepository'; import { ICDNConnector } from '../../src/services/cdn'; import { IJobCommandExecutor } from '../../src/services/commandExecutor'; @@ -33,6 +34,7 @@ export class JobHandlerTestHelper { jobHandler: ProductionJobHandler | StagingJobHandler | ManifestJobHandler; jobValidator: IJobValidator; repoBranchesRepo: RepoBranchesRepository; + docsetsRepo: DocsetsRepository; repoEntitlementsRepo: RepoEntitlementsRepository; lengthPrototype; handlerMapper = { @@ -58,6 +60,7 @@ export class JobHandlerTestHelper { this.logger = mockDeep(); this.jobValidator = mockDeep(); this.repoBranchesRepo = mockDeep(); + this.docsetsRepo = mockDeep(); this.repoEntitlementsRepo = mockDeep(); this.jobHandler = new this.handlerMapper[handlerName]( this.job, @@ -70,6 +73,7 @@ export class JobHandlerTestHelper { this.logger, this.jobValidator, this.repoBranchesRepo, + this.docsetsRepo, this.repoEntitlementsRepo ); return this.jobHandler; diff --git a/tests/utils/repositoryHelper.ts b/tests/utils/repositoryHelper.ts index 24b98a149..9dfbd1498 100644 --- a/tests/utils/repositoryHelper.ts +++ b/tests/utils/repositoryHelper.ts @@ -4,6 +4,7 @@ import { Db, FindCursor, FindOptions } from 'mongodb'; import { JobRepository } from '../../src/repositories/jobRepository'; import { RepoEntitlementsRepository } from '../../src/repositories/repoEntitlementsRepository'; import { RepoBranchesRepository } from '../../src/repositories/repoBranchesRepository'; +import { DocsetsRepository } from '../../src/repositories/docsetsRepository'; import { ILogger } from '../../src/services/logger'; export class DBRepositoryHelper { @@ -15,6 +16,7 @@ export class DBRepositoryHelper { find: (query: any, errorMsg: string, options?: FindOptions) => Promise; findOne: (query: any, errorMsg: string) => Promise; findOneAndUpdate: (query: any, update: any, options: any, errorMsg: string) => Promise; + aggregate: (pipeline: any, errorMsg: string, options: any) => Promise; collection: any; jobRepo: JobRepository; @@ -22,6 +24,7 @@ export class DBRepositoryHelper { job: JobRepository, repo: RepoEntitlementsRepository, repoBranches: RepoBranchesRepository, + docsets: DocsetsRepository, }; init(repoName, collectionConfigName, collectionName): any { @@ -35,12 +38,14 @@ export class DBRepositoryHelper { this.find = jest.fn(); this.findOne = jest.fn(); this.findOneAndUpdate = jest.fn(); + this.aggregate = jest.fn(); this.collection = { updateOne: this.updateOne, updateMany: this.updateMany, find: this.find, findOne: this.findOne, findOneAndUpdate: this.findOneAndUpdate, + aggregate: this.aggregate, }; this.config.get.calledWith(collectionConfigName).mockReturnValue(collectionName); this.db.collection.calledWith(collectionName).mockReturnValue(this.collection);