Skip to content

Commit

Permalink
📝 DOP-4098 updates comment (#931)
Browse files Browse the repository at this point in the history
* 📝 DOP-4098 updates comment

* 📝 DOP-4098 updates comment

* 🏷️ DOP-4098 updates types to reflect change between repos_branches and docsets

---------

Co-authored-by: Caesar Bell <[email protected]>
  • Loading branch information
caesarbell and Caesar Bell authored Nov 3, 2023
1 parent 64535ea commit 417c7fb
Show file tree
Hide file tree
Showing 5 changed files with 31 additions and 28 deletions.
4 changes: 2 additions & 2 deletions api/controllers/v1/github.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,13 +5,13 @@ import { ConsoleLogger } from '../../../src/services/logger';
import { RepoBranchesRepository } from '../../../src/repositories/repoBranchesRepository';
import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handlers/github';
import { DocsetsRepository } from '../../../src/repositories/docsetsRepository';
import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/repos_branches';
import { ReposBranchesDocsetsDocument } from '../../../modules/persistence/src/services/metadata/repos_branches';

async function prepGithubPushPayload(
githubEvent: any,

Check warning on line 11 in api/controllers/v1/github.ts

View workflow job for this annotation

GitHub Actions / test

Unexpected any. Specify a different type
repoBranchesRepository: RepoBranchesRepository,
prefix: string,
repoInfo: ReposBranchesDocument
repoInfo: ReposBranchesDocsetsDocument
) {
const branch_name = githubEvent.ref.split('/')[2];
const branch_info = await repoBranchesRepository.getRepoBranchAliases(
Expand Down
4 changes: 2 additions & 2 deletions api/controllers/v2/github.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,14 +11,14 @@ import { markBuildArtifactsForDeletion, validateJsonWebhook } from '../../handle
import { DocsetsRepository } from '../../../src/repositories/docsetsRepository';
import { getMonorepoPaths } from '../../../src/monorepo';
import { getUpdatedFilePaths } from '../../../src/monorepo/utils/path-utils';
import { ReposBranchesDocument } from '../../../modules/persistence/src/services/metadata/associated_products';
import { ReposBranchesDocsetsDocument } from '../../../modules/persistence/src/services/metadata/repos_branches';
import { MONOREPO_NAME } from '../../../src/monorepo/utils/monorepo-constants';

async function prepGithubPushPayload(
githubEvent: PushEvent,
repoBranchesRepository: RepoBranchesRepository,
prefix: string,
repoInfo: ReposBranchesDocument
repoInfo: ReposBranchesDocsetsDocument
): Promise<Omit<EnhancedJob, '_id'>> {
const branch_name = githubEvent.ref.split('/')[2];
const branch_info = await repoBranchesRepository.getRepoBranchAliases(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import { ReposBranchesDocument } from '../../associated_products';
import { DocsetsDocument } from '../../repos_branches';

export const prefixFromEnvironment = (repoBranchEntry: ReposBranchesDocument) => {
export const prefixFromEnvironment = (repoBranchEntry: DocsetsDocument) => {
const env = process.env.SNOOTY_ENV ?? 'dotcomprd';
return {
url: repoBranchEntry.url[env],
Expand Down
Original file line number Diff line number Diff line change
@@ -1,7 +1,13 @@
import { AggregationCursor } from 'mongodb';
import { Metadata } from '..';
import { db } from '../../connector';
import { getAllAssociatedRepoBranchesEntries, getRepoBranchesEntry } from '../repos_branches';
import {
ReposBranchesDocsetsDocument,
ReposBranchesDocument,
DocsetsDocument,
getAllAssociatedRepoBranchesEntries,
getRepoBranchesEntry,
} from '../repos_branches';
import { ToCInsertions, TocOrderInsertions, traverseAndMerge, copyToCTree, project } from '../ToC';
import { prefixFromEnvironment } from '../ToC/utils/prefixFromEnvironment';

Expand Down Expand Up @@ -33,16 +39,7 @@ export interface BranchEntry {
[key: string]: any;
}

export interface ReposBranchesDocument {
repoName: string;
project: string;
branches: BranchEntry[];
url: EnvKeyedObject;
prefix: EnvKeyedObject;
[key: string]: any;
}

const mapRepoBranches = (repoBranches: ReposBranchesDocument[]) =>
const mapRepoBranches = (repoBranches: ReposBranchesDocsetsDocument[]) =>
Object.fromEntries(
repoBranches.map((entry) => {
const { url, prefix } = entry;
Expand Down Expand Up @@ -94,7 +91,7 @@ const umbrellaMetadataEntry = async (project: string): Promise<Metadata> => {
// Convert our cursor from the associated metadata aggregation query into a series of ToC objects and their parent metadata entries
const shapeToCsCursor = async (
tocCursor: AggregationCursor,
repoBranchesMap: { [k: string]: ReposBranchesDocument }
repoBranchesMap: { [k: string]: ReposBranchesDocsetsDocument }
): Promise<{
tocInsertions: ToCInsertions;
tocOrderInsertions: TocOrderInsertions;
Expand Down Expand Up @@ -192,7 +189,7 @@ export const mergeAssociatedToCs = async (metadata: Metadata) => {
);

// We need to have copies of the main umbrella product's ToC here, to handle multiple metadata entry support
const umbrellaPrefixes = prefixFromEnvironment(umbrellaRepoBranchesEntry as any as ReposBranchesDocument);
const umbrellaPrefixes = prefixFromEnvironment(umbrellaRepoBranchesEntry as any as DocsetsDocument);
const umbrellaToCs = {
original: copyToCTree(umbrellaMetadata.toctree),
urlified: copyToCTree(umbrellaMetadata.toctree, umbrellaPrefixes.prefix, umbrellaPrefixes.url),
Expand Down
24 changes: 15 additions & 9 deletions modules/persistence/src/services/metadata/repos_branches/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,17 +16,23 @@ export interface BranchEntry {
[key: string]: any;
}

export interface DocsetsDocument extends WithId<Document> {
url: EnvKeyedObject;
prefix: EnvKeyedObject;
bucket: EnvKeyedObject;
}

export interface ReposBranchesDocument extends WithId<Document> {
repoName: string;
project: string;
branches: BranchEntry[];
url: EnvKeyedObject;
prefix: EnvKeyedObject;
internalOnly: boolean;
[key: string]: any;
}

const internals: { [key: project]: ReposBranchesDocument } = {};
export type ReposBranchesDocsetsDocument = ReposBranchesDocument & DocsetsDocument;

const internals: { [key: project]: ReposBranchesDocsetsDocument } = {};

const getAggregationPipeline = (matchCondition: any) => {
return [
Expand Down Expand Up @@ -67,7 +73,7 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) =>
const { associated_products = [] } = metadata;
if (!associated_products.length) return [];

const res: ReposBranchesDocument[] = [],
const res: ReposBranchesDocsetsDocument[] = [],
fetch: project[] = [];
associated_products.forEach((ap) => {
if (internals[ap.name]) {
Expand All @@ -85,8 +91,8 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) =>
const db = await pool();
const aggregationPipeline = getAggregationPipeline({ project: { $in: fetch }, internalOnly: false });
const cursor = db.collection('docsets').aggregate(aggregationPipeline);
const docsets = (await cursor.toArray()) as ReposBranchesDocument[];
docsets.forEach((doc: ReposBranchesDocument) => {
const docsets = (await cursor.toArray()) as DocsetsDocument[];
docsets.forEach((doc: ReposBranchesDocsetsDocument) => {
// TODO: store in cache
internals[doc['project']] = doc;
res.push(doc);
Expand All @@ -98,7 +104,7 @@ export const getAllAssociatedRepoBranchesEntries = async (metadata: Metadata) =>
}
};

// Queries pool*.repos_branches for any entries for the given project and branch from a metadata entry.
// Queries pool*.repos_branches and pool*. for any entries for the given project and branch from a metadata entry.
export const getRepoBranchesEntry = async (project: project, branch = ''): Promise<ReposBranchesDocument> => {
const cachedDoc = internals[project];
// return cached repo doc if exists
Expand Down Expand Up @@ -126,7 +132,7 @@ export const getRepoBranchesEntry = async (project: project, branch = ''): Promi
const aggregationPipeline = getAggregationPipeline(matchCondition);

const cursor = db.collection('docsets').aggregate(aggregationPipeline);
const res = (await cursor.toArray()) as unknown as ReposBranchesDocument[];
const res = (await cursor.toArray()) as unknown as ReposBranchesDocsetsDocument[];
const returnedEntry = res[0];

if (res.length > 1) {
Expand All @@ -135,7 +141,7 @@ export const getRepoBranchesEntry = async (project: project, branch = ''): Promi
);
}

// if not already set, set cache value for repo_branches
// if not already set, set cache value for docsets
if (!internals[project]) {
internals[project] = returnedEntry;
}
Expand Down

0 comments on commit 417c7fb

Please sign in to comment.