From 417c7fba0fc57a3745ff41c58568cd009f0e3173 Mon Sep 17 00:00:00 2001 From: Caesar Bell Date: Fri, 3 Nov 2023 14:05:04 -0400 Subject: [PATCH] :pencil: DOP-4098 updates comment (#931) * :pencil: DOP-4098 updates comment * :pencil: DOP-4098 updates comment * :label: DOP-4098 updates types to reflect change between repos_branches and docsets --------- Co-authored-by: Caesar Bell --- api/controllers/v1/github.ts | 4 ++-- api/controllers/v2/github.ts | 4 ++-- .../ToC/utils/prefixFromEnvironment.ts | 4 ++-- .../metadata/associated_products/index.ts | 23 ++++++++---------- .../services/metadata/repos_branches/index.ts | 24 ++++++++++++------- 5 files changed, 31 insertions(+), 28 deletions(-) diff --git a/api/controllers/v1/github.ts b/api/controllers/v1/github.ts index db741457f..b72f1d646 100644 --- a/api/controllers/v1/github.ts +++ b/api/controllers/v1/github.ts @@ -5,13 +5,13 @@ import { ConsoleLogger } from '../../../src/services/logger'; import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository'; import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handlers/github'; import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; -import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/repos_branches'; +import { ReposBranchesDocsetsDocument } from '../../../modules/persistence/src/services/metadata/repos_branches'; async function prepGithubPushPayload( githubEvent: any, repoBranchesRepository: RepoBranchesRepository, prefix: string, - repoInfo: ReposBranchesDocument + repoInfo: ReposBranchesDocsetsDocument ) { const branch_name = githubEvent.ref.split('/')[2]; const branch_info = await repoBranchesRepository.getRepoBranchAliases( diff --git a/api/controllers/v2/github.ts b/api/controllers/v2/github.ts index a6e37fea6..ccb205f6e 100644 --- a/api/controllers/v2/github.ts +++ b/api/controllers/v2/github.ts @@ -11,14 +11,14 @@ import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handle import { DocsetsRepository } from '../../../src/repositories/docsetsRepository'; import { getMonorepoPaths } from '../../../src/monorepo'; import { getUpdatedFilePaths } from '../../../src/monorepo/utils/path-utils'; -import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/associated_products'; +import { ReposBranchesDocsetsDocument } from '../../../modules/persistence/src/services/metadata/repos_branches'; import { MONOREPO_NAME } from '../../../src/monorepo/utils/monorepo-constants'; async function prepGithubPushPayload( githubEvent: PushEvent, repoBranchesRepository: RepoBranchesRepository, prefix: string, - repoInfo: ReposBranchesDocument + repoInfo: ReposBranchesDocsetsDocument ): Promise> { const branch_name = githubEvent.ref.split('/')[2]; const branch_info = await repoBranchesRepository.getRepoBranchAliases( diff --git a/modules/persistence/src/services/metadata/ToC/utils/prefixFromEnvironment.ts b/modules/persistence/src/services/metadata/ToC/utils/prefixFromEnvironment.ts index c43b01449..accd3599b 100644 --- a/modules/persistence/src/services/metadata/ToC/utils/prefixFromEnvironment.ts +++ b/modules/persistence/src/services/metadata/ToC/utils/prefixFromEnvironment.ts @@ -1,6 +1,6 @@ -import { ReposBranchesDocument } from '../../associated_products'; +import { DocsetsDocument } from '../../repos_branches'; -export const prefixFromEnvironment = (repoBranchEntry: ReposBranchesDocument) => { +export const prefixFromEnvironment = (repoBranchEntry: DocsetsDocument) => { const env = process.env.SNOOTY_ENV ?? 'dotcomprd'; return { url: repoBranchEntry.url[env], diff --git a/modules/persistence/src/services/metadata/associated_products/index.ts b/modules/persistence/src/services/metadata/associated_products/index.ts index b504dd717..9ac5abf4e 100644 --- a/modules/persistence/src/services/metadata/associated_products/index.ts +++ b/modules/persistence/src/services/metadata/associated_products/index.ts @@ -1,7 +1,13 @@ import { AggregationCursor } from 'mongodb'; import { Metadata } from '..'; import { db } from '../../connector'; -import { getAllAssociatedRepoBranchesEntries, getRepoBranchesEntry } from '../repos_branches'; +import { + ReposBranchesDocsetsDocument, + ReposBranchesDocument, + DocsetsDocument, + getAllAssociatedRepoBranchesEntries, + getRepoBranchesEntry, +} from '../repos_branches'; import { ToCInsertions, TocOrderInsertions, traverseAndMerge, copyToCTree, project } from '../ToC'; import { prefixFromEnvironment } from '../ToC/utils/prefixFromEnvironment'; @@ -33,16 +39,7 @@ export interface BranchEntry { [key: string]: any; } -export interface ReposBranchesDocument { - repoName: string; - project: string; - branches: BranchEntry[]; - url: EnvKeyedObject; - prefix: EnvKeyedObject; - [key: string]: any; -} - -const mapRepoBranches = (repoBranches: ReposBranchesDocument[]) => +const mapRepoBranches = (repoBranches: ReposBranchesDocsetsDocument[]) => Object.fromEntries( repoBranches.map((entry) => { const { url, prefix } = entry; @@ -94,7 +91,7 @@ const umbrellaMetadataEntry = async (project: string): Promise => { // Convert our cursor from the associated metadata aggregation query into a series of ToC objects and their parent metadata entries const shapeToCsCursor = async ( tocCursor: AggregationCursor, - repoBranchesMap: { [k: string]: ReposBranchesDocument } + repoBranchesMap: { [k: string]: ReposBranchesDocsetsDocument } ): Promise<{ tocInsertions: ToCInsertions; tocOrderInsertions: TocOrderInsertions; @@ -192,7 +189,7 @@ export const mergeAssociatedToCs = async (metadata: Metadata) => { ); // We need to have copies of the main umbrella product's ToC here, to handle multiple metadata entry support - const umbrellaPrefixes = prefixFromEnvironment(umbrellaRepoBranchesEntry as any as ReposBranchesDocument); + const umbrellaPrefixes = prefixFromEnvironment(umbrellaRepoBranchesEntry as any as DocsetsDocument); const umbrellaToCs = { original: copyToCTree(umbrellaMetadata.toctree), urlified: copyToCTree(umbrellaMetadata.toctree, umbrellaPrefixes.prefix, umbrellaPrefixes.url), diff --git a/modules/persistence/src/services/metadata/repos_branches/index.ts b/modules/persistence/src/services/metadata/repos_branches/index.ts index cd43e63bb..cfdd37016 100644 --- a/modules/persistence/src/services/metadata/repos_branches/index.ts +++ b/modules/persistence/src/services/metadata/repos_branches/index.ts @@ -16,17 +16,23 @@ export interface BranchEntry { [key: string]: any; } +export interface DocsetsDocument extends WithId { + url: EnvKeyedObject; + prefix: EnvKeyedObject; + bucket: EnvKeyedObject; +} + export interface ReposBranchesDocument extends WithId { repoName: string; project: string; branches: BranchEntry[]; - url: EnvKeyedObject; - prefix: EnvKeyedObject; internalOnly: boolean; [key: string]: any; } -const internals: { [key: project]: ReposBranchesDocument } = {}; +export type ReposBranchesDocsetsDocument = ReposBranchesDocument & DocsetsDocument; + +const internals: { [key: project]: ReposBranchesDocsetsDocument } = {}; const getAggregationPipeline = (matchCondition: any) => { return [ @@ -67,7 +73,7 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) => const { associated_products = [] } = metadata; if (!associated_products.length) return []; - const res: ReposBranchesDocument[] = [], + const res: ReposBranchesDocsetsDocument[] = [], fetch: project[] = []; associated_products.forEach((ap) => { if (internals[ap.name]) { @@ -85,8 +91,8 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) => const db = await pool(); const aggregationPipeline = getAggregationPipeline({ project: { $in: fetch }, internalOnly: false }); const cursor = db.collection('docsets').aggregate(aggregationPipeline); - const docsets = (await cursor.toArray()) as ReposBranchesDocument[]; - docsets.forEach((doc: ReposBranchesDocument) => { + const docsets = (await cursor.toArray()) as DocsetsDocument[]; + docsets.forEach((doc: ReposBranchesDocsetsDocument) => { // TODO: store in cache internals[doc['project']] = doc; res.push(doc); @@ -98,7 +104,7 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) => } }; -// Queries pool*.repos_branches for any entries for the given project and branch from a metadata entry. +// Queries pool*.repos_branches and pool*. for any entries for the given project and branch from a metadata entry. export const getRepoBranchesEntry = async (project: project, branch = ''): Promise => { const cachedDoc = internals[project]; // return cached repo doc if exists @@ -126,7 +132,7 @@ export const getRepoBranchesEntry = async (project: project, branch = ''): Promi const aggregationPipeline = getAggregationPipeline(matchCondition); const cursor = db.collection('docsets').aggregate(aggregationPipeline); - const res = (await cursor.toArray()) as unknown as ReposBranchesDocument[]; + const res = (await cursor.toArray()) as unknown as ReposBranchesDocsetsDocument[]; const returnedEntry = res[0]; if (res.length > 1) { @@ -135,7 +141,7 @@ export const getRepoBranchesEntry = async (project: project, branch = ''): Promi ); } - // if not already set, set cache value for repo_branches + // if not already set, set cache value for docsets if (!internals[project]) { internals[project] = returnedEntry; }