chore: add env var checking to verify-mocha-results (#21211)

Co-authored-by: Ryan Manuel <ryanm@cypress.io>
This commit is contained in:
Zach Bloomquist
2022-04-28 21:44:15 -04:00
committed by GitHub
parent 51b4bdc2ce
commit fe3e2afbea
7 changed files with 325 additions and 85 deletions

View File

@@ -29,7 +29,7 @@ mainBuildFilters: &mainBuildFilters
only:
- develop
- 10.0-release
- remove-unused-scripts
- check-results-for-env
# usually we don't build Mac app - it takes a long time
# but sometimes we want to really confirm we are doing the right thing
@@ -38,7 +38,7 @@ macWorkflowFilters: &mac-workflow-filters
when:
or:
- equal: [ develop, << pipeline.git.branch >> ]
- equal: [ remove-unused-scripts, << pipeline.git.branch >> ]
- equal: [ check-results-for-env, << pipeline.git.branch >> ]
- matches:
pattern: "-release$"
value: << pipeline.git.branch >>
@@ -48,7 +48,7 @@ windowsWorkflowFilters: &windows-workflow-filters
or:
- equal: [ master, << pipeline.git.branch >> ]
- equal: [ develop, << pipeline.git.branch >> ]
- equal: [ remove-unused-scripts, << pipeline.git.branch >> ]
- equal: [ check-results-for-env, << pipeline.git.branch >> ]
- matches:
pattern: "-release$"
value: << pipeline.git.branch >>
@@ -1005,6 +1005,9 @@ jobs:
- run:
name: Top level packages
command: yarn list --depth=0 || true
- run:
name: Check env canaries
command: node ./scripts/circle-env.js --check-canaries
- build-and-persist
- store-npm-logs
@@ -1651,7 +1654,7 @@ jobs:
- run:
name: Check current branch to persist artifacts
command: |
if [[ "$CIRCLE_BRANCH" != "develop" && "$CIRCLE_BRANCH" != "remove-unused-scripts" ]]; then
if [[ "$CIRCLE_BRANCH" != "develop" && "$CIRCLE_BRANCH" != "check-results-for-env" ]]; then
echo "Not uploading artifacts or posting install comment for this branch."
circleci-agent step halt
fi
@@ -2050,6 +2053,7 @@ linux-workflow: &linux-workflow
jobs:
- node_modules_install
- build:
context: test-runner:env-canary
requires:
- node_modules_install
- lint:
@@ -2353,6 +2357,7 @@ mac-workflow: &mac-workflow
- build:
name: darwin-build
context: test-runner:env-canary
executor: mac
resource_class: macos.x86.medium.gen2
requires:
@@ -2393,6 +2398,7 @@ windows-workflow: &windows-workflow
- build:
name: windows-build
context: test-runner:env-canary
executor: windows
resource_class: windows.medium
requires:

View File

@@ -58,7 +58,7 @@
"pretest-watch": "yarn ensure-deps",
"test-watch": "lerna exec yarn test-watch --ignore \"'@packages/{desktop-gui,driver,root,static,web-config}'\"",
"type-check": "yarn lerna exec yarn type-check --scope @tooling/system-tests && node scripts/type_check",
"verify:mocha:results": "node ./scripts/verify_mocha_results",
"verify:mocha:results": "node ./scripts/verify-mocha-results",
"prewatch": "yarn ensure-deps",
"watch": "lerna exec yarn watch --parallel --stream",
"prepare": "husky install"

59
scripts/circle-env.js Normal file
View File

@@ -0,0 +1,59 @@
/* eslint-disable no-console */
const fs = require('fs').promises
async function loadInternalTaskData () {
const filename = process.env.CIRCLE_INTERNAL_CONFIG
if (!filename) throw new Error('Missing CIRCLE_INTERNAL_CONFIG environment variable, cannot load Circle task data.')
const taskDataJson = await fs.readFile(filename, 'utf8')
try {
return JSON.parse(taskDataJson)
} catch (err) {
throw new Error(`An error occurred while parsing the Circle task data: ${err}`)
}
}
// check if the project env canary and context canary are both present to verify that this script is reading the right env
async function checkCanaries () {
if (!process.env.CI) console.warn('This script will not work outside of CI.')
const circleEnv = await readCircleEnv()
if (!circleEnv.MAIN_CANARY) throw new Error('Missing MAIN_CANARY.')
if (!circleEnv.CONTEXT_CANARY) throw new Error('Missing CONTEXT_CANARY. Does this job have the test-runner:env-canary context?')
}
// Returns a map of environment variables defined for this job. `readCircleEnv()` differs from `process.env` - it will
// only return environment variables explicitly specified for this job by CircleCI project env and contexts
// NOTE: this Circle API is not stable, and yet it is the only way to access this information.
async function readCircleEnv () {
const taskData = await loadInternalTaskData()
try {
// if this starts failing, try SSHing into a CircleCI job and see what changed in the $CIRCLE_INTERNAL_CONFIG file's schema
const circleEnv = taskData['Dispatched']['TaskInfo']['Environment']
if (!circleEnv || !Object.keys(circleEnv).length) throw new Error('An empty Environment object was found.')
return circleEnv
} catch (err) {
throw new Error(`An error occurred when reading the environment from Circle task data: ${err}`)
}
}
module.exports = {
readCircleEnv,
_checkCanaries: checkCanaries,
}
if (require.main === module) {
if (process.argv.includes('--check-canaries')) {
checkCanaries()
} else {
console.error(`No options were passed, but ${__filename} was invoked as a script.`)
process.exit(1)
}
}

View File

@@ -0,0 +1,41 @@
const fs = require('fs').promises
const sinon = require('sinon')
const { expect } = require('chai')
const { _checkCanaries } = require('../circle-env')
describe('circle-env', () => {
let cachedEnv = { ...process.env }
afterEach(() => {
sinon.restore()
Object.assign(process.env, cachedEnv)
})
beforeEach(() => {
process.env.CI = 'true'
process.env.CIRCLE_INTERNAL_CONFIG = '/foo.json'
})
it('fails with missing canaries', async () => {
sinon.stub(fs, 'readFile')
.withArgs('/foo.json').resolves(JSON.stringify({
Dispatched: { TaskInfo: { Environment: { somekey: 'someval' } } },
}))
try {
await _checkCanaries()
throw new Error('should not reach')
} catch (err) {
expect(err.message).to.include('Missing MAIN_CANARY')
}
})
it('passes with canaries', async () => {
sinon.stub(fs, 'readFile')
.withArgs('/foo.json').resolves(JSON.stringify({
Dispatched: { TaskInfo: { Environment: { MAIN_CANARY: 'true', CONTEXT_CANARY: 'true' } } },
}))
await _checkCanaries()
})
})

View File

@@ -0,0 +1,91 @@
const fs = require('fs').promises
const sinon = require('sinon')
const { expect } = require('chai')
const { verifyMochaResults } = require('../verify-mocha-results')
describe('verify-mocha-results', () => {
let cachedEnv = { ...process.env }
if (process.platform === 'win32') {
// skip the rest of the tests
return it('fails on windows', async () => {
try {
await verifyMochaResults()
throw new Error('should not reach')
} catch (err) {
expect(err.message).to.equal('verifyMochaResults not supported on Windows')
}
})
}
afterEach(() => {
sinon.restore()
Object.assign(process.env, cachedEnv)
})
beforeEach(() => {
process.env.CIRCLE_INTERNAL_CONFIG = '/foo.json'
sinon.stub(fs, 'readFile')
.withArgs('/foo.json').resolves(JSON.stringify({
Dispatched: { TaskInfo: { Environment: { somekey: 'someval' } } },
}))
sinon.stub(fs, 'readdir').withArgs('/tmp/cypress/junit').resolves([
'report.xml',
])
})
it('does not fail with normal report', async () => {
fs.readFile
.withArgs('/tmp/cypress/junit/report.xml')
.resolves('<testsuites name="foo" time="1" tests="10" failures="0">')
await verifyMochaResults()
})
context('env checking', () => {
it('checks for protected env and fails and removes results when found', async () => {
const spy = sinon.stub(fs, 'rm').withArgs('/tmp/cypress/junit', { recursive: true, force: true })
fs.readFile
.withArgs('/tmp/cypress/junit/report.xml')
.resolves('<testsuites name="foo" time="1" tests="10" failures="0">someval')
try {
await verifyMochaResults()
throw new Error('should not reach')
} catch (err) {
expect(err.message).to.include('somekey').and.not.include('someval')
expect(spy.getCalls().length).to.equal(1)
}
})
})
context('test result checking', () => {
it('checks for non-passing tests and fails when found', async () => {
fs.readFile
.withArgs('/tmp/cypress/junit/report.xml')
.resolves('<testsuites name="foo" time="1" tests="10" failures="3">')
try {
await verifyMochaResults()
throw new Error('should not reach')
} catch (err) {
expect(err.message).to.include('Expected the number of failures to be equal to 0')
}
})
it('checks for 0 tests run and fails when found', async () => {
fs.readFile
.withArgs('/tmp/cypress/junit/report.xml')
.resolves('<testsuites name="foo" time="1" tests="0" failures="0">')
try {
await verifyMochaResults()
throw new Error('should not reach')
} catch (err) {
expect(err.message).to.include('Expected the total number of tests to be >0')
}
})
})
})

View File

@@ -0,0 +1,123 @@
/* eslint-disable no-console */
// this is a safety script to ensure that Mocha tests ran, by checking:
// 1. that there are N test results in the reports dir (or at least 1, if N is not set)
// 2. each of them contains 0 failures and >0 tests
// additionally, it checks that no secrets are in the reports, since CI does not scrub
// reports for environment variables
// usage: yarn verify:mocha:results <N>
const Bluebird = require('bluebird')
const fs = require('fs').promises
const la = require('lazy-ass')
const path = require('path')
const { readCircleEnv } = require('./circle-env')
const RESULT_REGEX = /<testsuites name="([^"]+)" time="([^"]+)" tests="([^"]+)" failures="([^"]+)"(?: skipped="([^"]+)"|)>/
const REPORTS_PATH = '/tmp/cypress/junit'
const expectedResultCount = Number(process.argv[process.argv.length - 1])
const parseResult = (xml) => {
const [name, time, tests, failures, skipped] = RESULT_REGEX.exec(xml).slice(1)
return {
name, time, tests: Number(tests), failures: Number(failures), skipped: Number(skipped || 0),
}
}
const total = { tests: 0, failures: 0, skipped: 0 }
console.log(`Looking for reports in ${REPORTS_PATH}`)
// some env is ok in reports. this is based off of what Circle doesn't mask in stdout:
// https://circleci.com/blog/keep-environment-variables-private-with-secret-masking/
function isWhitelistedEnv (key, value) {
return ['true', 'false', 'TRUE', 'FALSE'].includes(value)
|| ['nodejs_version', 'CF_DOMAIN'].includes(key)
|| value.length < 4
}
async function checkReportFile (filename, circleEnv) {
console.log(`Checking that ${filename} contains a valid report...`)
let xml; let result
try {
xml = await fs.readFile(path.join(REPORTS_PATH, filename))
} catch (err) {
throw new Error(`Unable to read the report in ${filename}: ${err.message}`)
}
try {
result = parseResult(xml)
} catch (err) {
throw new Error(`Error parsing result: ${err.message}. File contents:\n\n${xml}`)
}
const { name, time, tests, failures, skipped } = result
console.log(`Report parsed successfully. Name: ${name}\tTests ran: ${tests}\tFailing: ${failures}\tSkipped: ${skipped}\tTotal time: ${time}`)
la(tests > 0, 'Expected the total number of tests to be >0, but it was', tests, 'instead.')
la(failures === 0, 'Expected the number of failures to be equal to 0, but it was', failures, '. This stage should not have been reached. Check why the failed test stage did not cause this entire build to fail.')
for (const key in circleEnv) {
const value = circleEnv[key]
if (!isWhitelistedEnv(key, value) && xml.includes(value)) {
await fs.rm(REPORTS_PATH, { recursive: true, force: true })
throw new Error(`Report contained the value of ${key}, which is a CI environment variable. This means that a failing test is exposing environment variables. Test reports will not be persisted for this job.`)
}
}
total.tests += tests
total.failures += failures
total.skipped += skipped
}
async function checkReportFiles (filenames) {
let circleEnv
try {
circleEnv = await readCircleEnv()
} catch (err) {
// set SKIP_CIRCLE_ENV to bypass, for local development
if (!process.env.SKIP_CIRCLE_ENV) throw err
circleEnv = {}
}
await Bluebird.mapSeries(filenames, (f) => checkReportFile(f, circleEnv))
console.log('All reports are valid.')
console.log(`Total tests ran: ${total.tests}\tTotal failing: ${total.failures}\tTotal skipped: ${total.skipped}`)
}
async function verifyMochaResults () {
if (process.platform === 'win32') throw new Error('verifyMochaResults not supported on Windows')
try {
const filenames = await fs.readdir(REPORTS_PATH)
const resultCount = filenames.length
console.log(`Found ${resultCount} files in ${REPORTS_PATH}:`, filenames)
if (!expectedResultCount) {
console.log('Expecting at least 1 report...')
la(resultCount > 0, 'Expected at least 1 report, but found', resultCount, '. Verify that all tests ran as expected.')
} else {
console.log(`Expecting exactly ${expectedResultCount} reports...`)
la(expectedResultCount === resultCount, 'Expected', expectedResultCount, 'reports, but found', resultCount, '. Verify that all tests ran as expected.')
}
await checkReportFiles(filenames)
} catch (err) {
throw new Error(`Problem reading from ${REPORTS_PATH}: ${err.message}`)
}
}
if (require.main === module) verifyMochaResults()
module.exports = { verifyMochaResults }

View File

@@ -1,80 +0,0 @@
/* eslint-disable no-console */
// this is a safety script to ensure that Mocha tests ran, by checking:
// 1. that there are N test results in the reports dir (or at least 1, if N is not set)
// 2. each of them contains 0 failures and >0 tests
// usage: yarn verify:mocha:results <N>
const Bluebird = require('bluebird')
const fse = Bluebird.promisifyAll(require('fs-extra'))
const la = require('lazy-ass')
const path = require('path')
const RESULT_REGEX = /<testsuites name="([^"]+)" time="([^"]+)" tests="([^"]+)" failures="([^"]+)"(?: skipped="([^"]+)"|)>/
const REPORTS_PATH = '/tmp/cypress/junit'
const expectedResultCount = Number(process.argv[process.argv.length - 1])
const parseResult = (xml) => {
const [name, time, tests, failures, skipped] = RESULT_REGEX.exec(xml).slice(1)
return {
name, time, tests: Number(tests), failures: Number(failures), skipped: Number(skipped || 0),
}
}
const total = { tests: 0, failures: 0, skipped: 0 }
console.log(`Looking for reports in ${REPORTS_PATH}`)
fse.readdir(REPORTS_PATH)
.catch((err) => {
throw new Error(`Problem reading from ${REPORTS_PATH}: ${err.message}`)
})
.then((files) => {
const resultCount = files.length
console.log(`Found ${resultCount} files in ${REPORTS_PATH}:`, files)
if (!expectedResultCount) {
console.log('Expecting at least 1 report...')
la(resultCount > 0, 'Expected at least 1 report, but found', resultCount, '. Verify that all tests ran as expected.')
} else {
console.log(`Expecting exactly ${expectedResultCount} reports...`)
la(expectedResultCount === resultCount, 'Expected', expectedResultCount, 'reports, but found', resultCount, '. Verify that all tests ran as expected.')
}
return Bluebird.mapSeries(files, (file) => {
console.log(`Checking that ${file} contains a valid report...`)
return fse.readFile(path.join(REPORTS_PATH, file))
.catch((err) => {
throw new Error(`Unable to read the report in ${file}: ${err.message}`)
})
.then((xml) => {
try {
return parseResult(xml)
} catch (err) {
throw new Error(`Error parsing result: ${err.message}. File contents:\n\n${xml}`)
}
})
.then(({ name, time, tests, failures, skipped }) => {
console.log(`Report parsed successfully. Name: ${name}\tTests ran: ${tests}\tFailing: ${failures}\tSkipped: ${skipped}\tTotal time: ${time}`)
la(tests > 0, 'Expected the total number of tests to be >0, but it was', tests, 'instead.')
la(failures === 0, 'Expected the number of failures to be equal to 0, but it was', failures, '. This stage should not have been reached. Check why the failed test stage did not cause this entire build to fail.')
total.tests += tests
total.failures += failures
total.skipped += skipped
})
})
})
.then(() => {
console.log('All reports are valid.')
console.log(`Total tests ran: ${total.tests}\tTotal failing: ${total.failures}\tTotal skipped: ${total.skipped}`)
})
.catch((err) => {
console.error(err)
process.exit(1)
})