Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat: aggregate experiments in the report #648

Merged
merged 9 commits into from
Oct 2, 2023
6 changes: 5 additions & 1 deletion src/chaos-experiments/models/chaosExperimentsManager.js
Original file line number Diff line number Diff line change
Expand Up @@ -92,5 +92,9 @@ module.exports.runChaosExperiment = async (kubernetesChaosConfig, jobExperimentI
};

module.exports.getFutureJobExperiments = async function (timestamp, contextId) {
return databaseConnector.getFutureJobExperiments(contextId);
return databaseConnector.getFutureJobExperiments(timestamp, contextId);
};

module.exports.getChaosJobExperimentsByJobId = async function (jobId, contextId) {
return databaseConnector.getChaosJobExperimentsByJobId(jobId, contextId);
};
10 changes: 5 additions & 5 deletions src/chaos-experiments/models/database/databaseConnector.js
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@ module.exports = {
getChaosExperimentsByIds,
deleteChaosExperiment,
insertChaosJobExperiment,
getChaosJobExperimentById,
getChaosJobExperimentsByJobId,
getChaosJobExperimentByJobId,
getFutureJobExperiments,
setChaosJobExperimentTriggered,
Expand Down Expand Up @@ -57,16 +57,16 @@ async function insertChaosJobExperiment(jobExperimentId, jobId, experimentId, st
return databaseConnector.insertChaosJobExperiment(jobExperimentId, jobId, experimentId, startTime, endTime, contextId);
}

async function getChaosJobExperimentById(jobExperimentId, contextId) {
return databaseConnector.getChaosJobExperimentById(jobExperimentId, contextId);
async function getChaosJobExperimentsByJobId(jobExperimentId, contextId) {
return databaseConnector.getChaosJobExperimentsByJobId(jobExperimentId, contextId);
}

async function getChaosJobExperimentByJobId(jobId, contextId) {
return databaseConnector.getChaosJobExperimentById(jobId, contextId);
GuyAb marked this conversation as resolved.
Show resolved Hide resolved
}

async function getFutureJobExperiments(contextId) {
return databaseConnector.getFutureJobExperiments(contextId);
async function getFutureJobExperiments(timestamp, contextId) {
return databaseConnector.getFutureJobExperiments(timestamp, contextId);
}

async function setChaosJobExperimentTriggered(jobExperimentId, isTriggered, contextId) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ module.exports = {
updateChaosExperiment,
insertChaosJobExperiment,
getChaosJobExperimentById,
getChaosJobExperimentByJobId,
getChaosJobExperimentsByJobId,
getFutureJobExperiments,
setChaosJobExperimentTriggered
};
Expand Down Expand Up @@ -159,7 +159,7 @@ async function getChaosJobExperimentById(jobExperimentId, contextId) {
return chaosExperiment;
}

async function getChaosJobExperimentByJobId(jobId, contextId) {
async function getChaosJobExperimentsByJobId(jobId, contextId) {
GuyAb marked this conversation as resolved.
Show resolved Hide resolved
const options = {
where: { job_id: jobId }
};
Expand All @@ -168,11 +168,9 @@ async function getChaosJobExperimentByJobId(jobId, contextId) {
options.where.context_id = contextId;
}

let chaosExperiment = await _getChaosJobExperiment(options);
if (chaosExperiment) {
chaosExperiment = chaosExperiment.get();
}
return chaosExperiment;
const chaosJobExperimentModel = client.model(CHAOS_JOB_EXPERIMENTS_TABLE_NAME);
const allChaosJobExperiments = await chaosJobExperimentModel.findAll(options);
return allChaosJobExperiments;
}

async function getFutureJobExperiments(timestamp, contextId) {
Expand Down
20 changes: 20 additions & 0 deletions src/reports/models/aggregateReportManager.js
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ const math = require('mathjs');

const logger = require('../../common/logger');
const databaseConnector = require('./databaseConnector');
const chaosExperimentsManager = require('../../chaos-experiments/models/chaosExperimentsManager');
const constants = require('../utils/constants');

const STATS_INTERVAL = 30;
Expand All @@ -15,6 +16,7 @@ module.exports = {

async function aggregateReport(report) {
let stats = await databaseConnector.getStats(report.test_id, report.report_id);
const experiments = await getChaosExperiments(report.job_id);
GuyAb marked this conversation as resolved.
Show resolved Hide resolved

if (stats.length === 0) {
const errorMessage = `Can not generate aggregate report as there are no statistics yet for testId: ${report.test_id} and reportId: ${report.report_id}`;
Expand All @@ -35,6 +37,7 @@ async function aggregateReport(report) {
reportInput.revision_id = report.revision_id;
reportInput.score = report.score;
reportInput.benchmark_weights_data = report.benchmark_weights_data;
reportInput.experiments = experiments;
reportInput.notes = report.notes;

reportInput.status = mapReportStatus(report.status);
Expand Down Expand Up @@ -63,6 +66,23 @@ async function aggregateReport(report) {
return reportInput;
}

async function getChaosExperiments(job_id) {
GuyAb marked this conversation as resolved.
Show resolved Hide resolved
const chaosJobExperiments = await chaosExperimentsManager.getChaosJobExperimentsByJobId(job_id);
const uniqueExperimentIds = [...new Set(chaosJobExperiments.map(jobExperiment => jobExperiment.experiment_id))];
const chaosExperiments = await chaosExperimentsManager.getChaosExperimentsByIds(uniqueExperimentIds);
const mappedChaosJobExperiments = chaosJobExperiments.map((jobExperiment) => {
const chaosExperiment = chaosExperiments.find((experiment) => experiment.id === jobExperiment.experiment_id && jobExperiment.is_triggered);
GuyAb marked this conversation as resolved.
Show resolved Hide resolved
return {
kind: chaosExperiment.kubeObject.kind,
name: chaosExperiment.name,
id: chaosExperiment.id,
start_time: jobExperiment.start_time,
end_time: jobExperiment.end_time
};
});
return mappedChaosJobExperiments;
}

function createAggregateManually(listOfStats) {
const requestMedians = [], requestMaxs = [], requestMins = [], scenario95 = [], scenario99 = [], request95 = [],
request99 = [], scenarioMins = [], scenarioMaxs = [], scenarioMedians = [];
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ describe('Sequelize client tests', function () {
should(sequelizeGetStub.args[0][0]).containDeep({ where: { name: experimentName } });
});
});
describe('Get ChaosExperimentsById', () => {
describe('Get ChaosExperimentsByIds', () => {
it('Validate sequelize passed arguments', async () => {
sequelizeGetStub.returns([experiment]);
const experimentIds = ['1234', '4321'];
Expand Down Expand Up @@ -226,15 +226,30 @@ describe('Sequelize client tests', function () {
should(sequelizeGetStub.args[0][0]).containDeep({ where: { id: jobExperimentId } });
});
});
describe('getChaosJobExperimentByJobId', function() {
describe('getChaosJobExperimentsByJobId', function() {
it('Validate sequelize passed arguments', async () => {
sequelizeGetStub.returns([jobExperiment]);
const experimentJobId = experiment.job_id;
await sequelizeConnector.getChaosJobExperimentByJobId(experimentJobId);
await sequelizeConnector.getChaosJobExperimentsByJobId(experimentJobId);
should(sequelizeGetStub.calledOnce).eql(true);
should(sequelizeGetStub.args[0][0]).containDeep({ where: { job_id: experimentJobId } });
});
});
describe('getFutureJobExperiments', function() {
it('Validate sequelize passed arguments', async () => {
sequelizeGetStub.returns([jobExperiment]);
const timestamp = Date.now();
await sequelizeConnector.getFutureJobExperiments(timestamp, 'contextId');
should(sequelizeGetStub.calledOnce).eql(true);
should(sequelizeGetStub.args[0][0]).deepEqual({
where: {
is_triggered: false,
start_time: {},
context_id: 'contextId'
}
});
});
});
});

describe('Set job experiment is triggered', () => {
Expand Down
115 changes: 114 additions & 1 deletion tests/unit-tests/reporter/models/finalReportGenerator-test.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@ const rewire = require('rewire');
const logger = require('../../../../src/common/logger');
const aggregateReportGenerator = rewire('../../../../src/reports/models/aggregateReportGenerator');
const aggregateReportManager = rewire('../../../../src/reports/models/aggregateReportManager');
const chaosExperimentsManager = require('../../../../src/chaos-experiments/models/chaosExperimentsManager');
const databaseConnector = require('../../../../src/reports/models/databaseConnector');
const reportsManager = require('../../../../src/reports/models/reportsManager');

Expand All @@ -17,17 +18,26 @@ const REPORT = {
test_name: 'some_test_name',
webhooks: ['http://www.zooz.com'],
arrival_rate: 100,
job_id: 'job_id',
duration: 10,
environment: 'test'
};

describe('Artillery report generator test', () => {
let sandbox, databaseConnectorGetStatsStub, loggerErrorStub, loggerWarnStub, reportsManagerGetReportStub;
let sandbox,
databaseConnectorGetStatsStub,
getJobExperimentsStubByJobIdStub,
getChaosExperimentsByIdsStub,
loggerErrorStub,
loggerWarnStub,
reportsManagerGetReportStub;

before(() => {
sandbox = sinon.sandbox.create();
databaseConnectorGetStatsStub = sandbox.stub(databaseConnector, 'getStats');
reportsManagerGetReportStub = sandbox.stub(reportsManager, 'getReport');
getJobExperimentsStubByJobIdStub = sandbox.stub(chaosExperimentsManager, 'getChaosJobExperimentsByJobId');
kerenfi marked this conversation as resolved.
Show resolved Hide resolved
getChaosExperimentsByIdsStub = sandbox.stub(chaosExperimentsManager, 'getChaosExperimentsByIds');
loggerErrorStub = sandbox.stub(logger, 'error');
loggerWarnStub = sandbox.stub(logger, 'warn');
});
Expand All @@ -48,6 +58,7 @@ describe('Artillery report generator test', () => {

it('create aggregate report when there is only intermediate rows', async () => {
databaseConnectorGetStatsStub.resolves(SINGLE_RUNNER_INTERMEDIATE_ROWS);
getJobExperimentsStubByJobIdStub.resolves([]);

const reportOutput = await aggregateReportGenerator.createAggregateReport(REPORT.test_id, REPORT.report_id);
should(reportOutput.parallelism).eql(1);
Expand All @@ -57,6 +68,7 @@ describe('Artillery report generator test', () => {
const statsWithUnknownData = JSON.parse(JSON.stringify(SINGLE_RUNNER_INTERMEDIATE_ROWS));
statsWithUnknownData.push({ phase_status: 'some_unknown_phase', data: JSON.stringify({}) });
databaseConnectorGetStatsStub.resolves(statsWithUnknownData);
getJobExperimentsStubByJobIdStub.resolves([]);

const reportOutput = await aggregateReportGenerator.createAggregateReport(REPORT.test_id, REPORT.report_id);
should(reportOutput.parallelism).eql(1);
Expand All @@ -72,6 +84,38 @@ describe('Artillery report generator test', () => {

loggerWarnStub.callCount.should.eql(1);
});

it('create final report successfully with chaos experiments', async function() {
const statsWithUnknownData = JSON.parse(JSON.stringify(SINGLE_RUNNER_INTERMEDIATE_ROWS));
statsWithUnknownData.push({ phase_status: 'intermediate', data: 'unsupported data type' });
databaseConnectorGetStatsStub.resolves(statsWithUnknownData);
getJobExperimentsStubByJobIdStub.resolves(JOB_EXPERIMENTS_ROWS);
getChaosExperimentsByIdsStub.resolves(CHAOS_EXPERIMENTS_ROWS);
const reportOutput = await aggregateReportGenerator.createAggregateReport(REPORT.test_id, REPORT.report_id);
should(reportOutput.experiments).deepEqual([
{
kind: CHAOS_EXPERIMENTS_ROWS[0].kubeObject.kind,
name: CHAOS_EXPERIMENTS_ROWS[0].name,
id: JOB_EXPERIMENTS_ROWS[0].experiment_id,
start_time: JOB_EXPERIMENTS_ROWS[0].start_time,
end_time: JOB_EXPERIMENTS_ROWS[0].end_time
},
{
kind: CHAOS_EXPERIMENTS_ROWS[1].kubeObject.kind,
name: CHAOS_EXPERIMENTS_ROWS[1].name,
id: JOB_EXPERIMENTS_ROWS[1].experiment_id,
start_time: JOB_EXPERIMENTS_ROWS[1].start_time,
end_time: JOB_EXPERIMENTS_ROWS[1].end_time
},
{
kind: CHAOS_EXPERIMENTS_ROWS[2].kubeObject.kind,
name: CHAOS_EXPERIMENTS_ROWS[2].name,
id: JOB_EXPERIMENTS_ROWS[2].experiment_id,
start_time: JOB_EXPERIMENTS_ROWS[2].start_time,
end_time: JOB_EXPERIMENTS_ROWS[2].end_time
}
]);
});
});

describe('Happy flows - With parallelism', function () {
Expand All @@ -86,6 +130,7 @@ describe('Artillery report generator test', () => {
const firstStatsTimestamp = JSON.parse(PARALLEL_INTERMEDIATE_ROWS[0].data).timestamp;

reportsManagerGetReportStub.resolves(REPORT);
getJobExperimentsStubByJobIdStub.resolves([]);
REPORT.start_time = new Date(new Date(firstStatsTimestamp).getTime() - (STATS_INTERVAL * 1000));
databaseConnectorGetStatsStub.resolves(PARALLEL_INTERMEDIATE_ROWS);
const reportOutput = await aggregateReportGenerator.createAggregateReport(REPORT.test_id, REPORT.report_id);
Expand Down Expand Up @@ -233,9 +278,42 @@ describe('Artillery report generator test', () => {

testShouldFail.should.eql(false, 'Test action was supposed to get exception');
});

it('create final report fails when get experiments returns error on get job experiments', async () => {
databaseConnectorGetStatsStub.resolves(SINGLE_RUNNER_INTERMEDIATE_ROWS);
reportsManagerGetReportStub.rejects(new Error('Database failure'));

let testShouldFail = true;
try {
await aggregateReportGenerator.createAggregateReport('testId', 'reportId');
} catch (error) {
testShouldFail = false;
error.message.should.eql('Database failure');
}

testShouldFail.should.eql(false, 'Test action was supposed to get exception');
});

it('create final report fails when get experiments returns error on get chaos experiments', async () => {
databaseConnectorGetStatsStub.resolves(SINGLE_RUNNER_INTERMEDIATE_ROWS);
getJobExperimentsStubByJobIdStub.resolves(JOB_EXPERIMENTS_ROWS);
getChaosExperimentsByIdsStub.rejects(new Error('Database failure'));

let testShouldFail = true;
try {
await aggregateReportGenerator.createAggregateReport('testId', 'reportId');
} catch (error) {
testShouldFail = false;
error.message.should.eql('Database failure');
}

testShouldFail.should.eql(false, 'Test action was supposed to get exception');
});
});
});

const timestamp = Date.now();

const SINGLE_RUNNER_INTERMEDIATE_ROWS = [{
test_id: 'cb7d7862-55c2-4a9b-bcec-d41d54101836',
report_id: 'b6489011-2073-4998-91cc-fd62f8b927f7',
Expand Down Expand Up @@ -337,3 +415,38 @@ const PARALLEL_INTERMEDIATE_ROWS = [
data: '{"timestamp":"2019-03-10T17:24:33.043Z","scenariosCreated":300,"scenariosCompleted":300,"requestsCompleted":300,"latency":{"min":59.5,"max":98.3,"median":61.3,"p95":72.9,"p99":84},"rps":{"count":300,"mean":20},"scenarioDuration":{"min":60,"max":98.9,"median":61.9,"p95":73.5,"p99":84.5},"scenarioCounts":{"Get response code 200":300},"errors":{},"codes":{"200":300},"matches":0,"customStats":{},"counters":{},"concurrency":1,"pendingRequests":1,"scenariosAvoided":0}'
}
];

const JOB_EXPERIMENTS_ROWS = [{
job_id: REPORT.job_id,
experiment_id: '1234-abc-5678',
start_time: timestamp,
end_time: timestamp + 100
},
{
job_id: REPORT.job_id,
experiment_id: 'abcd-1234-efgh',
start_time: timestamp,
end_time: timestamp + 200
},
{
job_id: REPORT.job_id,
experiment_id: '4321-abc-5678',
start_time: timestamp,
end_time: timestamp + 300
}];

const CHAOS_EXPERIMENTS_ROWS = [{
id: '1234-abc-5678',
name: 'first-experiment',
kubeObject: { kind: 'PodChaos' }
},
{
id: 'abcd-1234-efgh',
name: 'second-experiment',
kubeObject: { kind: 'DNSChaos' }
},
{
id: '4321-abc-5678',
name: 'third-experiment',
kubeObject: { kind: 'IOChaos' }
}];