Skip to content

Commit

Permalink
fix to dynamic print
Browse files Browse the repository at this point in the history
  • Loading branch information
eleanorjboyd committed Oct 9, 2023
1 parent 2d0830a commit 6ecaa9a
Show file tree
Hide file tree
Showing 5 changed files with 45 additions and 55 deletions.
48 changes: 18 additions & 30 deletions src/client/testing/testController/common/resultResolver.ts
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ import { clearAllChildren, createErrorTestItem, getTestCaseNodes } from './testI
import { sendTelemetryEvent } from '../../../telemetry';
import { EventName } from '../../../telemetry/constants';
import { splitLines } from '../../../common/stringUtils';
import { buildErrorNodeOptions, fixLogLines, populateTestTree, splitTestNameWithRegex } from './utils';
import { buildErrorNodeOptions, populateTestTree, splitTestNameWithRegex } from './utils';
import { Deferred } from '../../../common/utils/async';

export class PythonResultResolver implements ITestResultResolver {
Expand Down Expand Up @@ -138,15 +138,16 @@ export class PythonResultResolver implements ITestResultResolver {
const tempArr: TestItem[] = getTestCaseNodes(i);
testCases.push(...tempArr);
});
const testItem = rawTestExecData.result[keyTemp];

if (rawTestExecData.result[keyTemp].outcome === 'error') {
const rawTraceback = rawTestExecData.result[keyTemp].traceback ?? '';
if (testItem.outcome === 'error') {
const rawTraceback = testItem.traceback ?? '';
const traceback = splitLines(rawTraceback, {
trim: false,
removeEmptyEntries: true,
}).join('\r\n');
const text = `${rawTestExecData.result[keyTemp].test} failed with error: ${
rawTestExecData.result[keyTemp].message ?? rawTestExecData.result[keyTemp].outcome
const text = `${testItem.test} failed with error: ${
testItem.message ?? testItem.outcome
}\r\n${traceback}\r\n`;
const message = new TestMessage(text);

Expand All @@ -157,23 +158,17 @@ export class PythonResultResolver implements ITestResultResolver {
if (indiItem.uri && indiItem.range) {
message.location = new Location(indiItem.uri, indiItem.range);
runInstance.errored(indiItem, message);
runInstance.appendOutput(fixLogLines(text));
}
}
});
} else if (
rawTestExecData.result[keyTemp].outcome === 'failure' ||
rawTestExecData.result[keyTemp].outcome === 'passed-unexpected'
) {
const rawTraceback = rawTestExecData.result[keyTemp].traceback ?? '';
} else if (testItem.outcome === 'failure' || testItem.outcome === 'passed-unexpected') {
const rawTraceback = testItem.traceback ?? '';
const traceback = splitLines(rawTraceback, {
trim: false,
removeEmptyEntries: true,
}).join('\r\n');

const text = `${rawTestExecData.result[keyTemp].test} failed: ${
rawTestExecData.result[keyTemp].message ?? rawTestExecData.result[keyTemp].outcome
}\r\n${traceback}\r\n`;
const text = `${testItem.test} failed: ${testItem.message ?? testItem.outcome}\r\n${traceback}\r\n`;
const message = new TestMessage(text);

// note that keyTemp is a runId for unittest library...
Expand All @@ -184,14 +179,10 @@ export class PythonResultResolver implements ITestResultResolver {
if (indiItem.uri && indiItem.range) {
message.location = new Location(indiItem.uri, indiItem.range);
runInstance.failed(indiItem, message);
runInstance.appendOutput(fixLogLines(text));
}
}
});
} else if (
rawTestExecData.result[keyTemp].outcome === 'success' ||
rawTestExecData.result[keyTemp].outcome === 'expected-failure'
) {
} else if (testItem.outcome === 'success' || testItem.outcome === 'expected-failure') {
const grabTestItem = this.runIdToTestItem.get(keyTemp);
const grabVSid = this.runIdToVSid.get(keyTemp);
if (grabTestItem !== undefined) {
Expand All @@ -203,7 +194,7 @@ export class PythonResultResolver implements ITestResultResolver {
}
});
}
} else if (rawTestExecData.result[keyTemp].outcome === 'skipped') {
} else if (testItem.outcome === 'skipped') {
const grabTestItem = this.runIdToTestItem.get(keyTemp);
const grabVSid = this.runIdToVSid.get(keyTemp);
if (grabTestItem !== undefined) {
Expand All @@ -215,11 +206,11 @@ export class PythonResultResolver implements ITestResultResolver {
}
});
}
} else if (rawTestExecData.result[keyTemp].outcome === 'subtest-failure') {
} else if (testItem.outcome === 'subtest-failure') {
// split on [] or () based on how the subtest is setup.
const [parentTestCaseId, subtestId] = splitTestNameWithRegex(keyTemp);
const parentTestItem = this.runIdToTestItem.get(parentTestCaseId);
const data = rawTestExecData.result[keyTemp];
const data = testItem;
// find the subtest's parent test item
if (parentTestItem) {
const subtestStats = this.subTestStats.get(parentTestCaseId);
Expand All @@ -230,20 +221,19 @@ export class PythonResultResolver implements ITestResultResolver {
failed: 1,
passed: 0,
});
runInstance.appendOutput(fixLogLines(`${parentTestCaseId} [subtests]:\r\n`));
// clear since subtest items don't persist between runs
clearAllChildren(parentTestItem);
}
const subTestItem = this.testController?.createTestItem(subtestId, subtestId);
runInstance.appendOutput(fixLogLines(`${subtestId} Failed\r\n`));
// create a new test item for the subtest
if (subTestItem) {
const traceback = data.traceback ?? '';
const text = `${data.subtest} Failed: ${data.message ?? data.outcome}\r\n${traceback}\r\n`;
runInstance.appendOutput(fixLogLines(text));
const text = `${data.subtest} failed: ${
testItem.message ?? testItem.outcome
}\r\n${traceback}\r\n`;
parentTestItem.children.add(subTestItem);
runInstance.started(subTestItem);
const message = new TestMessage(rawTestExecData?.result[keyTemp].message ?? '');
const message = new TestMessage(text);
if (parentTestItem.uri && parentTestItem.range) {
message.location = new Location(parentTestItem.uri, parentTestItem.range);
}
Expand All @@ -254,7 +244,7 @@ export class PythonResultResolver implements ITestResultResolver {
} else {
throw new Error('Parent test item not found');
}
} else if (rawTestExecData.result[keyTemp].outcome === 'subtest-success') {
} else if (testItem.outcome === 'subtest-success') {
// split on [] or () based on how the subtest is setup.
const [parentTestCaseId, subtestId] = splitTestNameWithRegex(keyTemp);
const parentTestItem = this.runIdToTestItem.get(parentTestCaseId);
Expand All @@ -266,7 +256,6 @@ export class PythonResultResolver implements ITestResultResolver {
subtestStats.passed += 1;
} else {
this.subTestStats.set(parentTestCaseId, { failed: 0, passed: 1 });
runInstance.appendOutput(fixLogLines(`${parentTestCaseId} [subtests]:\r\n`));
// clear since subtest items don't persist between runs
clearAllChildren(parentTestItem);
}
Expand All @@ -276,7 +265,6 @@ export class PythonResultResolver implements ITestResultResolver {
parentTestItem.children.add(subTestItem);
runInstance.started(subTestItem);
runInstance.passed(subTestItem);
runInstance.appendOutput(fixLogLines(`${subtestId} Passed\r\n`));
} else {
throw new Error('Unable to create new child node for subtest');
}
Expand Down
19 changes: 7 additions & 12 deletions src/client/testing/testController/common/server.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ import {
createEOTPayload,
createExecutionErrorPayload,
extractJsonPayload,
fixLogLines,
fixLogLinesNoTrailing,
} from './utils';
import { createDeferred } from '../../../common/utils/async';
import { EnvironmentVariables } from '../../../api/types';
Expand Down Expand Up @@ -247,24 +247,23 @@ export class PythonTestServer implements ITestServer, Disposable {
// Displays output to user and ensure the subprocess doesn't run into buffer overflow.
// TODO: after a release, remove discovery output from the "Python Test Log" channel and send it to the "Python" channel instead.
// TODO: after a release, remove run output from the "Python Test Log" channel and send it to the "Test Result" channel instead.
let collectedOutput = '';
if (isDiscovery) {
result?.proc?.stdout?.on('data', (data) => {
const out = fixLogLines(data.toString());
collectedOutput += out;
const out = fixLogLinesNoTrailing(data.toString());
spawnOptions?.outputChannel?.append(`${out}`);
});
result?.proc?.stderr?.on('data', (data) => {
const out = fixLogLines(data.toString());
collectedOutput += out;
const out = fixLogLinesNoTrailing(data.toString());
spawnOptions?.outputChannel?.append(`${out}`);
});
} else {
result?.proc?.stdout?.on('data', (data) => {
const out = fixLogLines(data.toString());
const out = fixLogLinesNoTrailing(data.toString());
runInstance?.appendOutput(`${out}`);
spawnOptions?.outputChannel?.append(out);
});
result?.proc?.stderr?.on('data', (data) => {
const out = fixLogLines(data.toString());
const out = fixLogLinesNoTrailing(data.toString());
runInstance?.appendOutput(`${out}`);
spawnOptions?.outputChannel?.append(out);
});
Expand All @@ -278,10 +277,6 @@ export class PythonTestServer implements ITestServer, Disposable {
' The "Python Test Log" channel will be deprecated within the next month. See ___ for details.',
);
if (isDiscovery) {
// Collect all discovery output and log it at process finish to avoid dividing it between log lines.
traceLog(`\r\n${collectedOutput}`);
spawnOptions?.outputChannel?.append(`${collectedOutput}`);

if (code !== 0) {
// This occurs when we are running discovery
traceError(
Expand Down
5 changes: 5 additions & 0 deletions src/client/testing/testController/common/utils.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,11 @@ export function fixLogLines(content: string): string {
const lines = content.split(/\r?\n/g);
return `${lines.join('\r\n')}\r\n`;
}

export function fixLogLinesNoTrailing(content: string): string {
const lines = content.split(/\r?\n/g);
return `${lines.join('\r\n')}`;
}
export interface IJSONRPCData {
extractedJSON: string;
remainingRawData: string;
Expand Down
20 changes: 11 additions & 9 deletions src/client/testing/testController/pytest/pytestDiscoveryAdapter.ts
Original file line number Diff line number Diff line change
Expand Up @@ -10,15 +10,20 @@ import {
import { IConfigurationService, ITestOutputChannel } from '../../../common/types';
import { Deferred, createDeferred } from '../../../common/utils/async';
import { EXTENSION_ROOT_DIR } from '../../../constants';
import { traceError, traceInfo, traceLog, traceVerbose } from '../../../logging';
import { traceError, traceInfo, traceVerbose } from '../../../logging';
import {
DataReceivedEvent,
DiscoveredTestPayload,
ITestDiscoveryAdapter,
ITestResultResolver,
ITestServer,
} from '../common/types';
import { createDiscoveryErrorPayload, createEOTPayload, createTestingDeferred, fixLogLines } from '../common/utils';
import {
createDiscoveryErrorPayload,
createEOTPayload,
createTestingDeferred,
fixLogLinesNoTrailing,
} from '../common/utils';
import { IEnvironmentVariablesProvider } from '../../../common/variables/types';

/**
Expand Down Expand Up @@ -97,21 +102,18 @@ export class PytestTestDiscoveryAdapter implements ITestDiscoveryAdapter {
// Displays output to user and ensure the subprocess doesn't run into buffer overflow.
// TODO: after a release, remove discovery output from the "Python Test Log" channel and send it to the "Python" channel instead.

let collectedOutput = '';
result?.proc?.stdout?.on('data', (data) => {
const out = fixLogLines(data.toString());
collectedOutput += out;
const out = fixLogLinesNoTrailing(data.toString());
traceInfo(out);
spawnOptions?.outputChannel?.append(`${out}`);
});
result?.proc?.stderr?.on('data', (data) => {
const out = fixLogLines(data.toString());
collectedOutput += out;
const out = fixLogLinesNoTrailing(data.toString());
traceError(out);
spawnOptions?.outputChannel?.append(`${out}`);
});
result?.proc?.on('exit', (code, signal) => {
// Collect all discovery output and log it at process finish to avoid dividing it between log lines.
traceLog(`\r\n${collectedOutput}`);
spawnOptions?.outputChannel?.append(`${collectedOutput}`);
this.outputChannel?.append(
'Starting now, all test run output will be sent to the Test Result panel' +
' and test discovery output will be sent to the "Python" output channel instead of the "Python Test Log" channel.' +
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -192,13 +192,13 @@ export class PytestTestExecutionAdapter implements ITestExecutionAdapter {
// Displays output to user and ensure the subprocess doesn't run into buffer overflow.
// TODO: after a release, remove run output from the "Python Test Log" channel and send it to the "Test Result" channel instead.
result?.proc?.stdout?.on('data', (data) => {
const out = utils.fixLogLines(data.toString());
runInstance?.appendOutput(`${out}`);
const out = utils.fixLogLinesNoTrailing(data.toString());
runInstance?.appendOutput(out);
this.outputChannel?.append(out);
});
result?.proc?.stderr?.on('data', (data) => {
const out = utils.fixLogLines(data.toString());
runInstance?.appendOutput(`${out}`);
const out = utils.fixLogLinesNoTrailing(data.toString());
runInstance?.appendOutput(out);
this.outputChannel?.append(out);
});
result?.proc?.on('exit', (code, signal) => {
Expand Down

0 comments on commit 6ecaa9a

Please sign in to comment.