chore: fix Mocha test runner suggestion when hooks fail (#9750)

This commit is contained in:
Nikolay Vitkov 2023-02-28 12:55:20 +01:00 committed by GitHub
parent 4a365a42b4
commit 232873ae76
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 73 additions and 59 deletions

View File

@ -25,7 +25,7 @@ The best place to look is an existing test to see how they use the helpers.
## Skipping tests in specific conditions
To skip tests edit the [TestExpecations](https://github.com/puppeteer/puppeteer/blob/main/test/TestExpectations.json) file. See [test runner documentation](https://github.com/puppeteer/puppeteer/tree/main/tools/mochaRunner) for more details.
To skip tests edit the [TestExpectations](https://github.com/puppeteer/puppeteer/blob/main/test/TestExpectations.json) file. See [test runner documentation](https://github.com/puppeteer/puppeteer/tree/main/tools/mochaRunner) for more details.
## Running tests

View File

@ -43,7 +43,7 @@
"expectedLineCoverage": 56
}
],
"parameterDefinitons": {
"parameterDefinitions": {
"chrome": {
"PUPPETEER_PRODUCT": "chrome"
},

View File

@ -15,6 +15,7 @@
"../packages/testserver:build"
],
"files": [
"../tools/mochaRunner/**",
"src/**"
],
"output": [

View File

@ -24,7 +24,7 @@ npm run build && npm run test -- --test-suite chrome-headless
## TestSuites.json
Define test suites via the `testSuites` attribute. `parameters` can be used in the `TestExpectations.json` to disable tests
based on parameters. The meaning for parameters is defined in `parameterDefinitons` which tell what env object corresponds
based on parameters. The meaning for parameters is defined in `parameterDefinitions` which tell what env object corresponds
to the given parameter.
## TestExpectations.json

View File

@ -31,10 +31,10 @@ import {
import {
extendProcessEnv,
filterByPlatform,
prettyPrintJSON,
readJSON,
filterByParameters,
getExpectationUpdates,
printSuggestions,
} from './utils.js';
function getApplicableTestSuites(
@ -109,7 +109,7 @@ async function main() {
const env = extendProcessEnv([
...parameters.map(param => {
return parsedSuitesFile.parameterDefinitons[param];
return parsedSuitesFile.parameterDefinitions[param];
}),
{
PUPPETEER_SKIPPED_TEST_CONFIG: JSON.stringify(
@ -211,46 +211,22 @@ async function main() {
console.error(err);
} finally {
if (!noSuggestions) {
const toAdd = recommendations.filter(item => {
return item.action === 'add';
});
if (toAdd.length) {
console.log(
printSuggestions(
recommendations,
'add',
'Add the following to TestExpectations.json to ignore the error:'
);
prettyPrintJSON(
toAdd.map(item => {
return item.expectation;
})
);
}
const toRemove = recommendations.filter(item => {
return item.action === 'remove';
});
if (toRemove.length) {
console.log(
printSuggestions(
recommendations,
'remove',
'Remove the following from the TestExpectations.json to ignore the error:'
);
prettyPrintJSON(
toRemove.map(item => {
return item.expectation;
})
printSuggestions(
recommendations,
'update',
'Update the following expectations in the TestExpectations.json to ignore the error:'
);
}
const toUpdate = recommendations.filter(item => {
return item.action === 'update';
});
if (toUpdate.length) {
console.log(
'Update the following expectations in the TestExpecations.json to ignore the error:'
);
prettyPrintJSON(
toUpdate.map(item => {
return item.expectation;
})
);
}
}
process.exit(fail ? 1 : 0);
}
}

View File

@ -31,7 +31,7 @@ export type TestSuite = z.infer<typeof zTestSuite>;
export const zTestSuiteFile = z.object({
testSuites: z.array(zTestSuite),
parameterDefinitons: z.record(z.any()),
parameterDefinitions: z.record(z.any()),
});
export type TestSuiteFile = z.infer<typeof zTestSuiteFile>;

View File

@ -57,6 +57,24 @@ export function prettyPrintJSON(json: unknown): void {
console.log(JSON.stringify(json, null, 2));
}
export function printSuggestions(
recommendations: RecommendedExpectation[],
action: RecommendedExpectation['action'],
message: string
): void {
const toPrint = recommendations.filter(item => {
return item.action === action;
});
if (toPrint.length) {
console.log(message);
prettyPrintJSON(
toPrint.map(item => {
return item.expectation;
})
);
}
}
export function filterByParameters(
expectations: TestExpectation[],
parameters: string[]
@ -88,9 +106,8 @@ export function findEffectiveExpectationForTest(
.pop();
}
type RecommendedExpecation = {
type RecommendedExpectation = {
expectation: TestExpectation;
test: MochaTestResult;
action: 'remove' | 'add' | 'update';
};
@ -104,28 +121,42 @@ export function isWildCardPattern(testIdPattern: string): boolean {
export function getExpectationUpdates(
results: MochaResults,
expecations: TestExpectation[],
expectations: TestExpectation[],
context: {
platforms: NodeJS.Platform[];
parameters: string[];
}
): RecommendedExpecation[] {
const output: RecommendedExpecation[] = [];
): RecommendedExpectation[] {
const output: Map<string, RecommendedExpectation> = new Map();
for (const pass of results.passes) {
const expectationEntry = findEffectiveExpectationForTest(expecations, pass);
// If an error occurs during a hook
// the error not have a file associated with it
if (!pass.file) {
continue;
}
const expectationEntry = findEffectiveExpectationForTest(
expectations,
pass
);
if (expectationEntry && !expectationEntry.expectations.includes('PASS')) {
output.push({
addEntry({
expectation: expectationEntry,
test: pass,
action: 'remove',
});
}
}
for (const failure of results.failures) {
// If an error occurs during a hook
// the error not have a file associated with it
if (!failure.file) {
continue;
}
const expectationEntry = findEffectiveExpectationForTest(
expecations,
expectations,
failure
);
// If the effective explanation is a wildcard, we recommend adding a new
@ -140,7 +171,7 @@ export function getExpectationUpdates(
getTestResultForFailure(failure)
)
) {
output.push({
addEntry({
expectation: {
...expectationEntry,
expectations: [
@ -148,24 +179,30 @@ export function getExpectationUpdates(
getTestResultForFailure(failure),
],
},
test: failure,
action: 'update',
});
}
} else {
output.push({
addEntry({
expectation: {
testIdPattern: getTestId(failure.file, failure.fullTitle),
platforms: context.platforms,
parameters: context.parameters,
expectations: [getTestResultForFailure(failure)],
},
test: failure,
action: 'add',
});
}
}
return output;
function addEntry(value: RecommendedExpectation) {
const key = JSON.stringify(value);
if (!output.has(key)) {
output.set(key, value);
}
}
return [...output.values()];
}
export function getTestResultForFailure(