Skip to content

Commit

Permalink
fix tests
Browse files Browse the repository at this point in the history
  • Loading branch information
mmeigs committed Oct 31, 2023
1 parent 0432276 commit 3e3c10f
Show file tree
Hide file tree
Showing 5 changed files with 15 additions and 18 deletions.
1 change: 1 addition & 0 deletions config/test.json
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
"fastlyDochubMap": "devfslydochubmap",
"entitlementCollection": "entitlements",
"docsetsCollection": "docsets",
"featureFlagMonorepoPath": "true",
"MONGO_TIMEOUT_S": 1,
"JOB_TIMEOUT_S": 10,
"RETRY_TIMEOUT_MS": 10,
Expand Down
6 changes: 1 addition & 5 deletions src/services/commandExecutor.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,11 +100,7 @@ export class GithubCommandExecutor extends ShellCommandExecutor implements IGith
branchName: string,
newHead: string | null | undefined = null
): Promise<CommandExecutorResponse> {
const pullRepoCommands = [
`cd repos/${repoDirName}/cloud-docs`,
`git checkout ${branchName}`,
`git pull origin ${branchName}`,
];
const pullRepoCommands = [`cd repos/${repoDirName}`, `git checkout ${branchName}`, `git pull origin ${branchName}`];
if (newHead) {
pullRepoCommands.push(`git checkout ${newHead} .`);
}
Expand Down
16 changes: 8 additions & 8 deletions tests/data/data.ts
Original file line number Diff line number Diff line change
Expand Up @@ -448,11 +448,13 @@ export class TestDataProvider {
return retVal;
}

static getAggregationPipeline(
matchConditionField: string,
matchConditionValue: string,
projection?: { [k: string]: number }
) {
static getAggregationPipeline(matchConditions: { [k: string]: string }, projection?: { [k: string]: number }) {
// Add prefix 'repo' to each field in matchConditions
const formattedMatchConditions = Object.entries(matchConditions).reduce((acc, [key, val]) => {
acc[`repo.${key}`] = val;
return acc;
}, {});

return [
// Stage 1: Unwind the repos array to create multiple documents for each referenced repo
{
Expand All @@ -469,9 +471,7 @@ export class TestDataProvider {
},
// Stage 3: Match documents based on given field
{
$match: {
[`repo.${matchConditionField}`]: matchConditionValue,
},
$match: formattedMatchConditions,
},
// Stage 4: Merge/flatten repo into docset
{
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/job/productionJobHandler.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ describe('ProductionJobHandler Tests', () => {
test('Execute throws error when Downloading makefile repo should update status', async () => {
jobHandlerTestHelper.fileSystemServices.saveUrlAsFile
.calledWith(
`https://raw.githubusercontent.com/mongodb/docs-worker-pool/meta/makefiles/Makefile.${jobHandlerTestHelper.job.payload.repoName}`
`https://raw.githubusercontent.com/mongodb/docs-worker-pool/monorepo-pub-branches/makefiles/Makefile.${jobHandlerTestHelper.job.payload.repoName}`
)
.mockImplementation(() => {
throw new Error('Error while Downloading makefile');
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/repositories/docsetsRepository.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,26 +16,26 @@ describe('Docsets Repository Tests', () => {

describe('Docsets Repository getRepoBranchesByRepoName Tests', () => {
test('getRepoBranchesByRepoName returns failure as result is undefined', async () => {
const testPipeline = TestDataProvider.getAggregationPipeline('repoName', 'test_repo');
const testPipeline = TestDataProvider.getAggregationPipeline({ repoName: 'test_repo', project: 'test_project' });
dbRepoHelper.collection.aggregate.mockReturnValueOnce({
toArray: () => [],
});
await expect(docsetsRepo.getRepoBranchesByRepoName('test_repo', 'project')).resolves.toEqual({
await expect(docsetsRepo.getRepoBranchesByRepoName('test_repo', 'test_project')).resolves.toEqual({
status: 'failure',
});
expect(dbRepoHelper.collection.aggregate).toBeCalledTimes(1);
expect(dbRepoHelper.collection.aggregate).toBeCalledWith(testPipeline, {});
});

test('getRepoBranchesByRepoName is successfull', async () => {
const testPipeline = TestDataProvider.getAggregationPipeline('repoName', 'test_repo');
const testPipeline = TestDataProvider.getAggregationPipeline({ repoName: 'test_repo', project: 'test_project' });
dbRepoHelper.collection.aggregate.mockReturnValueOnce({
toArray: () => ({
bucket: {},
url: {},
}),
});
await docsetsRepo.getRepoBranchesByRepoName('test_repo', 'project');
await docsetsRepo.getRepoBranchesByRepoName('test_repo', 'test_project');
expect(dbRepoHelper.collection.aggregate).toBeCalledTimes(1);
expect(dbRepoHelper.collection.aggregate).toBeCalledWith(testPipeline, {});
});
Expand Down

0 comments on commit 3e3c10f

Please sign in to comment.