diff --git a/test/README.md b/test/README.md index 69254086..3c48b5b3 100644 --- a/test/README.md +++ b/test/README.md @@ -38,9 +38,25 @@ npm test - **Important**: don't forget to first build the code if you're testing local changes: ```bash -npm run build && npm test +npm run build --workspace=@puppeteer-test/test && npm test ``` +### CLI options + +| Description | Option | Type | +| ----------------------------------------------------------------- | ---------------- | ------- | +| Do not generate coverage report | --no-coverage | boolean | +| Do not generate suggestion for updating TestExpectation.json file | --no-suggestions | boolean | +| Specify a file to which to save run data | --save-stats-to | string | +| Specify a file with a custom Mocha reporter | --reporter | string | +| Number of times to retry failed tests. | --retries | number | +| Timeout threshold value. | --timeout | number | +| Tell Mocha to not run test files in parallel | --no-parallel | boolean | +| Generate full stacktrace upon failure | --fullTrace | boolean | +| Name of the Test suit defined in TestSuites.json | --test-suite | string | + +### Helpful information + - To run a specific test, substitute the `it` with `it.only`: ```ts diff --git a/tools/mochaRunner/src/main.ts b/tools/mochaRunner/src/main.ts index ef62fc5b..27ffb381 100644 --- a/tools/mochaRunner/src/main.ts +++ b/tools/mochaRunner/src/main.ts @@ -71,6 +71,7 @@ function getApplicableTestSuites( async function main() { const noCoverage = process.argv.indexOf('--no-coverage') !== -1; + const noSuggestions = process.argv.indexOf('--no-suggestions') !== -1; const statsFilenameIdx = process.argv.indexOf('--save-stats-to'); let statsFilename = ''; @@ -209,44 +210,46 @@ async function main() { fail = true; console.error(err); } finally { - const toAdd = recommendations.filter(item => { - return item.action === 'add'; - }); - if (toAdd.length) { - console.log( - 'Add the following to TestExpectations.json to ignore the error:' - ); - prettyPrintJSON( - toAdd.map(item => { - return item.expectation; - }) - ); - } - const toRemove = recommendations.filter(item => { - return item.action === 'remove'; - }); - if (toRemove.length) { - console.log( - 'Remove the following from the TestExpectations.json to ignore the error:' - ); - prettyPrintJSON( - toRemove.map(item => { - return item.expectation; - }) - ); - } - const toUpdate = recommendations.filter(item => { - return item.action === 'update'; - }); - if (toUpdate.length) { - console.log( - 'Update the following expectations in the TestExpecations.json to ignore the error:' - ); - prettyPrintJSON( - toUpdate.map(item => { - return item.expectation; - }) - ); + if (!noSuggestions) { + const toAdd = recommendations.filter(item => { + return item.action === 'add'; + }); + if (toAdd.length) { + console.log( + 'Add the following to TestExpectations.json to ignore the error:' + ); + prettyPrintJSON( + toAdd.map(item => { + return item.expectation; + }) + ); + } + const toRemove = recommendations.filter(item => { + return item.action === 'remove'; + }); + if (toRemove.length) { + console.log( + 'Remove the following from the TestExpectations.json to ignore the error:' + ); + prettyPrintJSON( + toRemove.map(item => { + return item.expectation; + }) + ); + } + const toUpdate = recommendations.filter(item => { + return item.action === 'update'; + }); + if (toUpdate.length) { + console.log( + 'Update the following expectations in the TestExpecations.json to ignore the error:' + ); + prettyPrintJSON( + toUpdate.map(item => { + return item.expectation; + }) + ); + } } process.exit(fail ? 1 : 0); }