Skip to content

Commit

Permalink
Merge branch 'master' into DOP-4171
Browse files Browse the repository at this point in the history
  • Loading branch information
branberry authored Jan 26, 2024
2 parents d4105c3 + e2c2607 commit fb83f8c
Show file tree
Hide file tree
Showing 25 changed files with 122 additions and 106 deletions.
8 changes: 8 additions & 0 deletions .github/workflows/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,14 @@

This README describes the various workflows defined for the docs-worker-pool repository.

## Releases

The release process occurs whenever someone releases a specific tag (note: it does not run upon creation of a tag, only release). Three separate workflows are run upon release:

1. `deploy-prd-ecs.yml` - Builds and deploys the old Autobuilder to prd
2. `deploy-prd-enhanced-webhooks.yml` - Builds and deploys webhooks (GitHub and Slack handlers) for the enhanced Autobuilder
3. `deploy-prd-enhanced-worker.yml` - Builds and deploys the Autobuilder worker to prd

## Feature Branch Deploys

The feature branch deploy process occurs whenever a developer opens a pull request. It consists of three separate workflows:
Expand Down
5 changes: 2 additions & 3 deletions .github/workflows/deploy-prd-ecs.yml
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
on:
push:
tags:
- 'v*'
release:
types: [released]
concurrency:
group: environment-prd-${{ github.ref }}
cancel-in-progress: true
Expand Down
6 changes: 2 additions & 4 deletions .github/workflows/deploy-prd-enhanced-webhooks.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
on:
push:
tags:
- 'v*'

release:
types: [released]
concurrency:
group: environment-prd-enhanced-webhooks-${{ github.ref }}
cancel-in-progress: true
Expand Down
6 changes: 2 additions & 4 deletions .github/workflows/deploy-prd-enhanced-worker.yml
Original file line number Diff line number Diff line change
@@ -1,8 +1,6 @@
on:
push:
tags:
- 'v*'

release:
types: [released]
concurrency:
group: environment-prd-enhanced-worker-${{ github.ref }}
cancel-in-progress: true
Expand Down
2 changes: 1 addition & 1 deletion .github/workflows/test-persistence.yml
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ jobs:
- uses: actions/checkout@v3
- uses: actions/setup-node@v1
with:
node-version: '14.x'
node-version: '18.x'
- name: Install dependencies
run: npm install --dev
- name: Lint
Expand Down
2 changes: 1 addition & 1 deletion Dockerfile
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ RUN cd ./modules/oas-page-builder \
FROM ubuntu:20.04
ARG WORK_DIRECTORY=/home/docsworker-xlarge
ARG SNOOTY_PARSER_VERSION=0.15.2
ARG SNOOTY_FRONTEND_VERSION=0.15.3
ARG SNOOTY_FRONTEND_VERSION=0.15.6
ARG MUT_VERSION=0.10.7
ARG REDOC_CLI_VERSION=1.2.3
ARG NPM_BASE_64_AUTH
Expand Down
4 changes: 2 additions & 2 deletions Dockerfile.enhanced
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ RUN cd ./modules/oas-page-builder \
FROM ubuntu:20.04
ARG WORK_DIRECTORY=/home/docsworker-xlarge
ARG SNOOTY_PARSER_VERSION=0.15.2
ARG SNOOTY_FRONTEND_VERSION=0.15.3
ARG SNOOTY_FRONTEND_VERSION=0.15.6
ARG MUT_VERSION=0.10.7
ARG REDOC_CLI_VERSION=1.2.3
ARG NPM_BASE_64_AUTH
Expand Down Expand Up @@ -117,4 +117,4 @@ ENV OAS_MODULE_PATH=${WORK_DIRECTORY}/modules/oas-page-builder/index.js

RUN mkdir repos && chmod 755 repos
EXPOSE 3000
CMD ["node", "enhanced/enhancedApp.js"]
CMD ["node", "--enable-source-maps", "enhanced/enhancedApp.js"]
2 changes: 1 addition & 1 deletion Dockerfile.local
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ FROM arm64v8/ubuntu:20.04 as initial
ARG NPM_BASE_64_AUTH
ARG NPM_EMAIL
ARG SNOOTY_PARSER_VERSION=0.15.2
ARG SNOOTY_FRONTEND_VERSION=0.15.3
ARG SNOOTY_FRONTEND_VERSION=0.15.6
ARG MUT_VERSION=0.10.7
ARG REDOC_CLI_VERSION=1.2.3
ARG NPM_BASE_64_AUTH
Expand Down
3 changes: 2 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -157,7 +157,8 @@ docs-worker-pool contains various triggers for release to higher environments. C

- Create release tags. We currently follow [semver](https://semver.org/) standards.
- If you don't have push access, open an issue or otherwise contact a contributor with administrator privileges.
- Verify that the deploy-production-ec2 workflow executed successfully for both job runs across both production instances.
- Create a release associated with the tag using the GitHub UI or by running `gh release create`.
- Verify that the deploy-production-ec2 workflow executed successfully for both job runs across both production instances. The workflow should only run when a release is published.

### Serverless Development

Expand Down
1 change: 1 addition & 0 deletions api/controllers/v2/github.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,6 +138,7 @@ export const TriggerBuild = async (event: APIGatewayEvent): Promise<APIGatewayPr

/* Create and insert Job for each monorepo project that has changes */
for (const path of monorepoPaths) {
consoleLogger.info(body.repository.full_name, `Create Job for Monorepo directory: /${path}`);
// TODO: Deal with nested monorepo projects
/* For now, we will ignore nested monorepo projects until necessary */
if (path.split('/').length > 1) continue;
Expand Down
17 changes: 10 additions & 7 deletions api/handlers/slack.ts
Original file line number Diff line number Diff line change
Expand Up @@ -24,14 +24,17 @@ export async function buildEntitledBranchList(entitlement: any, repoBranchesRepo
const [repoOwner, repoName, directoryPath] = repo.split('/');
const branches = await repoBranchesRepository.getRepoBranches(repoName, directoryPath);
for (const branch of branches) {
let buildWithSnooty = true;
if ('buildsWithSnooty' in branch) {
buildWithSnooty = branch['buildsWithSnooty'];
}
const buildWithSnooty = branch['buildsWithSnooty'];
if (buildWithSnooty) {
entitledBranches.push(
`${repoOwner}/${repoName}${directoryPath ? '/' + directoryPath : ''}/${branch['gitBranchName']}`
);
const active = branch['active'];
const repoPath = `${repoOwner}/${repoName}${directoryPath ? '/' + directoryPath : ''}/${
branch['gitBranchName']
}`;
if (!active) {
entitledBranches.push(`(!inactive) ${repoPath}`);
} else {
entitledBranches.push(repoPath);
}
}
}
}
Expand Down
6 changes: 6 additions & 0 deletions infrastructure/ecs-main/buckets.yml
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,12 @@ Resources:
Protocol: "https"
HostName: ${self:custom.site.host.${self:provider.stage}}
ReplaceKeyPrefixWith: ${self:custom.site.prefix.${self:provider.stage}}/atlas/cli/v1.1
- RoutingRuleCondition:
KeyPrefixEquals: ${self:custom.site.prefix.${self:provider.stage}}/php-library/master
RedirectRule:
Protocol: "https"
HostName: ${self:custom.site.host.${self:provider.stage}}
ReplaceKeyPrefixWith: ${self:custom.site.prefix.${self:provider.stage}}/php-library/upcoming
DocAtlasBucket:
Type: "AWS::S3::Bucket"
Properties:
Expand Down
2 changes: 1 addition & 1 deletion modules/persistence/.nvmrc
Original file line number Diff line number Diff line change
@@ -1 +1 @@
v14.17.6
v18.12.0
11 changes: 7 additions & 4 deletions modules/persistence/src/services/metadata/ToC/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
import { AssociatedProduct, hasAssociations } from '../associated_products';
import { Metadata } from '..';
import { convertSlugToUrl } from './utils/convertSlugToUrl';

Expand Down Expand Up @@ -93,15 +92,19 @@ const mergeTocTreeOrder = (metadata: Metadata, node, insertions: TocOrderInserti
// contains an associated_products entry
export const traverseAndMerge = (
metadata: Metadata,
associated_products: AssociatedProduct[],
umbrellaMetadata: Metadata,
umbrellaToCs: ToCCopies,
tocInsertions: ToCInsertions,
tocOrderInsertions: TocOrderInsertions
) => {
const { project } = metadata;
const associatedProducts = umbrellaMetadata.associated_products || [];

const toctree = hasAssociations(metadata) ? umbrellaToCs.original : umbrellaToCs.urlified;
const toBeInserted = new Set(associated_products.map((p) => p.name));
const associatedProductNames = associatedProducts.map((p) => p.name);
const toctree = structuredClone(
metadata.project === umbrellaMetadata.project ? umbrellaToCs.original : umbrellaToCs.urlified
);
const toBeInserted = new Set(associatedProductNames);
let queue = [toctree];
while (queue?.length && toBeInserted.size) {
let next = queue.shift();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -26,12 +26,6 @@ interface AggregatedMetadata {
most_recent: Metadata;
}

type EnvKeyedObject = {
prd: any;
preprd: any;
dotcomstg: any;
dotcomprd: any;
};
// TODO: move the branch/repobranch interfaces into their own file, or into a seperate abstraction?
export interface BranchEntry {
name: string;
Expand Down Expand Up @@ -69,8 +63,8 @@ const umbrellaMetadataEntry = async (project: string): Promise<Metadata> => {
return null as unknown as Metadata;
}

const repoDoc = await getRepoBranchesEntry(project);
const branchNames = repoDoc.branches.map((branchEntry) => branchEntry.gitBranchName);
const umbrellaRepos = await getRepoBranchesEntry(umbrella.project);
const branchNames = umbrellaRepos.branches.map((branchEntry) => branchEntry.gitBranchName);
const entry = await snooty
.collection('metadata')
.find({
Expand Down Expand Up @@ -198,7 +192,7 @@ export const mergeAssociatedToCs = async (metadata: Metadata) => {
const mergedMetadataEntries = [umbrellaMetadata, ...associatedMetadataEntries].map((metadataEntry) => {
const mergedMetadataEntry = traverseAndMerge(
metadataEntry,
umbrellaMetadata.associated_products || [],
umbrellaMetadata,
umbrellaToCs,
tocInsertions,
tocOrderInsertions
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -65,6 +65,11 @@ const getAggregationPipeline = (matchCondition: any) => {
repo: 0,
},
},
{
$sort: {
prodDeployable: -1,
},
},
];
};

Expand Down
4 changes: 2 additions & 2 deletions modules/persistence/tests/metadata/ToC.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ describe('ToC module', () => {
expect(
traverseAndMerge(
umbrellaMetadata as unknown as Metadata,
umbrellaMetadata.associated_products || [],
umbrellaMetadata,
umbrellaToCs,
tocInsertions,
tocOrderInsertions
Expand All @@ -135,7 +135,7 @@ describe('ToC module', () => {
expect(
traverseAndMerge(
metadata[0] as unknown as Metadata,
umbrellaMetadata.associated_products || [],
umbrellaMetadata,
umbrellaToCs,
tocInsertions,
tocOrderInsertions
Expand Down
1 change: 1 addition & 0 deletions modules/persistence/tests/setupAfterEnv.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
beforeAll(() => {
// Disable console.time from crowding test output
console.time = jest.fn();
global.structuredClone = (val) => JSON.parse(JSON.stringify(val));
});
12 changes: 2 additions & 10 deletions src/commands/index.ts
Original file line number Diff line number Diff line change
@@ -1,17 +1,9 @@
import { prepareBuildAndGetDependencies } from './src/helpers/dependency-helpers';
import { prepareBuild } from './src/helpers/dependency-helpers';
import { nextGenDeploy } from './src/shared/next-gen-deploy';
import { nextGenHtml } from './src/shared/next-gen-html';
import { nextGenParse } from './src/shared/next-gen-parse';
import { nextGenStage } from './src/shared/next-gen-stage';
import { oasPageBuild } from './src/shared/oas-page-build';
import { persistenceModule } from './src/shared/persistence-module';

export {
nextGenParse,
nextGenHtml,
nextGenStage,
persistenceModule,
oasPageBuild,
nextGenDeploy,
prepareBuildAndGetDependencies,
};
export { nextGenParse, nextGenHtml, nextGenStage, persistenceModule, oasPageBuild, nextGenDeploy, prepareBuild };
32 changes: 17 additions & 15 deletions src/commands/src/helpers/dependency-helpers.ts
Original file line number Diff line number Diff line change
Expand Up @@ -43,16 +43,25 @@ async function createEnvProdFile({
}
}

export async function downloadBuildDependencies(buildDependencies: BuildDependencies, repoName: string) {
export async function downloadBuildDependencies(
buildDependencies: BuildDependencies,
repoName: string,
directory?: string
) {
const commands: string[] = [];
await Promise.all(
buildDependencies.map(async (dependencyInfo) => {
const repoDir = getRepoDir(repoName);
const repoDir = getRepoDir(repoName, directory);
const targetDir = dependencyInfo.targetDir ?? repoDir;
let options = {};
if (targetDir != repoDir) {
options = { cwd: repoDir };
}
try {
await executeCliCommand({
command: 'mkdir',
args: ['-p', targetDir],
options: options,
});
} catch (error) {
console.error(
Expand All @@ -63,36 +72,29 @@ export async function downloadBuildDependencies(buildDependencies: BuildDependen
}
commands.push(`mkdir -p ${targetDir}`);
await Promise.all(
dependencyInfo.dependencies.map((dep) => {
dependencyInfo.dependencies.map(async (dep) => {
commands.push(`curl -SfL ${dep.url} -o ${targetDir}/${dep.filename}`);
try {
executeCliCommand({
return await executeCliCommand({
command: 'curl',
args: ['-SfL', dep.url, '-o', `${targetDir}/${dep.filename}`],
args: ['--max-time', '10', '-SfL', dep.url, '-o', `${targetDir}/${dep.filename}`],
options: options,
});
} catch (error) {
console.error(
`ERROR! Could not curl ${dep.url} into ${targetDir}/${dep.filename}. Dependency information: `,
dependencyInfo
);
}
commands.push(`curl -SfL ${dep.url} -o ${targetDir}/${dep.filename}`);
})
);
})
);
return commands;
}

export async function prepareBuildAndGetDependencies(
repoName: string,
projectName: string,
baseUrl: string,
buildDependencies: BuildDependencies,
directory?: string
) {
export async function prepareBuild(repoName: string, projectName: string, baseUrl: string, directory?: string) {
const repoDir = getRepoDir(repoName, directory);
await downloadBuildDependencies(buildDependencies, repoName);
console.log('Downloaded Build dependencies');

// doing these in parallel
const commandPromises = [
Expand Down
Loading

0 comments on commit fb83f8c

Please sign in to comment.