Files
cypress/system-tests/lib/resultsUtils.ts
T
Cacie Prins 3288aa5069 chore(dependency): Upgrade Electron 27 (#28792)
* dependency: upgrades electron from 25 to 26

* bump cache run ci

* fix docker img names

* ref electron upgrade branch

* chore: updating v8 snapshot cache

* chore: updating v8 snapshot cache

* chore: updating v8 snapshot cache

* debug

* debug

* debug

* update search string for resize observer error swallow

* debug

* update integrity check

* update electron readme with upgrade troubleshooting section

* point to new publish binary workflow branch for electron 27

* update electron readme with locations of chromium & node versions for a given electron version

* update node versions and docker image refs

* update electron version to 27.1.3

* fix db nativeBinding arg

* chore: updating v8 snapshot cache

* install setuptools on mac when updating v8 snapshot cache

* chore: updating v8 snapshot cache

* chore: updating v8 snapshot cache

* run workflows on this branch run ci

* require addon directly and pass to better-sqlite3 init; debug

* rm debug

* try loading better-sqlite with a more dynamic filename

* bump electron version

* bump electron version

* bump electron version -- run ci

* bump electron version -- run ci

* bump electron version -- run ci

* bump electron version -- run ci

* bump electron version -- run ci

* add a step to update workflows.yml to electron upgrade process

* reduce retry limit on issue 1244 test to prevent circle from thinking tests have hanged

* target main branch of binary publish workflow? run ci

* Update .node-version -- run ci

* Update CHANGELOG.md

* Update module_api_spec.ts

* point publish binary back to electron upgrade branch

* Adds some logging re: cachedDataVersion

* use precise electron version for better-sqlite3 for centos7

* Update CHANGELOG.md

* chore: fix issue with bytenode (#28568)

* fix jsc for 27 -- run ci

* Update smoke.js

* fix build

* update electron upgrade steps

* Update packages/electron/README.md

Co-authored-by: Mike McCready <66998419+MikeMcC399@users.noreply.github.com>

* Update cli/CHANGELOG.md

Co-authored-by: Mike McCready <66998419+MikeMcC399@users.noreply.github.com>

* fix DebugEmptyStates component test

* try to fix downstream build -- run ci (#28649)

Co-authored-by: Cacie Prins <cacieprins@users.noreply.github.com>

* point to consolidated binary publish branch

* revert webpack-preprocessor-awesome-typescript-loader update

* revert certain system tests

* increase padding for module api system test duration window

* account for differing screenshot sizes

* screenshot size differs locally vs ci

* update protocol snapshots

* Update after-pack-hook.js

* fix flaky slideshow

* correct the chromium version in changelog

* use 18.17.1 internal images

* workflow filters

* fix trailing checkbox in electron readme

* add solution to crashing better-sqlite3 in electron readme

* Update packages/electron/README.md

Co-authored-by: Bill Glesias <bglesias@gmail.com>

* Update scripts/after-pack-hook.js

Co-authored-by: Ryan Manuel <ryanm@cypress.io>

* Update scripts/after-pack-hook.js

Co-authored-by: Ryan Manuel <ryanm@cypress.io>

* Update scripts/after-pack-hook.js

Co-authored-by: Ryan Manuel <ryanm@cypress.io>

* add branch to setup_should_persist_artifacts

* debug app e2e test

* bump cache

* upgrade browsers-internal to chrome 121

* revert to chrome 118 images

* bump cache

* chore: updating v8 snapshot cache

* chore: updating v8 snapshot cache

* chore: updating v8 snapshot cache

* bump cache

---------

Co-authored-by: cypress-bot[bot] <+cypress-bot[bot]@users.noreply.github.com>
Co-authored-by: Ryan Manuel <ryanm@cypress.io>
Co-authored-by: Mike McCready <66998419+MikeMcC399@users.noreply.github.com>
Co-authored-by: Bill Glesias <bglesias@gmail.com>
2024-02-15 10:33:51 -05:00

288 lines
7.6 KiB
TypeScript

import systemTests from './system-tests'
import dayjs from 'dayjs'
import _ from 'lodash'
const STATIC_DATE = '2018-02-01T20:14:19.323Z'
const expectDurationWithin = function (obj, duration, low, high, reset) {
const d = _.get(obj, duration)
// bail if we don't have a duration
if (!_.isNumber(d)) {
return
}
// ensure the duration is within range
expect(d, duration).to.be.within(low, high)
// once valid, mutate and set static range
return _.set(obj, duration, reset)
}
const expectStartToBeBeforeEnd = function (obj, start, end) {
const s = _.get(obj, start)
const e = _.get(obj, end)
expect(
dayjs(s).isBefore(e),
`expected start: ${s} to be before end: ${e}`,
).to.be.true
// once valid, mutate and set static dates
_.set(obj, start, STATIC_DATE)
return _.set(obj, end, STATIC_DATE)
}
const normalizeTestTimings = function (obj, timings) {
const t = _.get(obj, timings)
// bail if we don't have any timings
if (!t) {
return
}
_.set(obj, 'timings', _.mapValues(t, (val, key) => {
switch (key) {
case 'lifecycle':
// ensure that lifecycle is under 500ms
expect(val, 'lifecycle').to.be.within(0, 500)
// reset to 100
return 100
case 'test':
// ensure test fn duration is within 2000ms
expectDurationWithin(val, 'fnDuration', 0, 2000, 400)
// ensure test after fn duration is within 500ms
expectDurationWithin(val, 'afterFnDuration', 0, 500, 200)
return val
default:
return _.map(val, (hook) => {
// ensure test fn duration is within 1500ms
expectDurationWithin(hook, 'fnDuration', 0, 1500, 400)
// ensure test after fn duration is within 500ms
expectDurationWithin(hook, 'afterFnDuration', 0, 500, 200)
return hook
})
}
}))
}
export const expectRunsToHaveCorrectTimings = (runs = []) => {
runs.forEach((run) => {
expect(run.config).to.not.exist
expectStartToBeBeforeEnd(run, 'stats.wallClockStartedAt', 'stats.wallClockEndedAt')
expectStartToBeBeforeEnd(run, 'reporterStats.start', 'reporterStats.end')
// grab all the wallclock durations for all test (and retried attempts)
// because our duration should be at least this
const attempts = _.flatMap(run.tests, (test) => test.attempts)
const wallClocks = _.sumBy(attempts, 'wallClockDuration')
// ensure each run's duration is around the sum
// of all tests wallclock duration
// TODO: if this remains flaky, increase padding here
// and add an additional non-e2e performance test with baseline p95
expectDurationWithin(
run,
'stats.wallClockDuration',
wallClocks,
wallClocks + 400, // add 400ms to account for padding
1234,
)
expectDurationWithin(
run,
'reporterStats.duration',
wallClocks,
wallClocks + 400, // add 400ms to account for padding
1234,
)
const addFnAndAfterFn = (obj) => {
return obj.fnDuration + obj.afterFnDuration
}
_.each(run.tests, (test) => {
try {
if (test.displayError) {
test.displayError = systemTests.normalizeStdout(test.displayError)
}
const attempts = test.attempts
// now make sure that each tests wallclock duration
// is around the sum of all of its timings
attempts.forEach((attempt) => {
if (attempt.error) {
attempt.error.stack = systemTests.normalizeStdout(attempt.error.stack).trim()
}
// cannot sum an object, must use array of values
const timings = _.sumBy(_.values(attempt.timings), (val) => {
if (_.isArray(val)) {
// array for hooks
return _.sumBy(val, addFnAndAfterFn)
}
if (_.isObject(val)) {
// obj for test itself
return addFnAndAfterFn(val)
}
return val
})
expectDurationWithin(
attempt,
'wallClockDuration',
timings,
timings + 80, // add 80ms to account for padding
1234,
)
// now reset all the test timings
normalizeTestTimings(attempt, 'timings')
if (attempt.wallClockStartedAt) {
const d = new Date(attempt.wallClockStartedAt)
expect(d.toJSON()).to.eq(attempt.wallClockStartedAt)
attempt.wallClockStartedAt = STATIC_DATE
expect(attempt.videoTimestamp).to.be.a('number')
attempt.videoTimestamp = 9999
}
})
} catch (e) {
e.message = `Error during validation for test \n${e.message}`
throw e
}
})
run.screenshots = _.map(run.screenshots, (screenshot) => {
expect(screenshot.screenshotId).to.have.length(5)
screenshot.screenshotId = 'some-random-id'
const d = new Date(screenshot.takenAt)
expect(d.toJSON()).to.eq(screenshot.takenAt)
screenshot.takenAt = STATIC_DATE
return screenshot
})
})
}
export const expectCorrectModuleApiResult = (json, opts: {
e2ePath: string
runs: number
video: boolean
}) => {
if (opts.video == null) {
opts.video = true
}
// should be n runs
expect(json.runs).to.have.length(opts.runs)
// ensure that config has been set
expect(json.config).to.be.an('object')
expect(json.config.projectName).to.eq('e2e')
expect(json.config.projectRoot).to.eq(opts.e2ePath)
// but zero out config because it's too volatile
json.config = {}
expect(json.browserPath).to.be.a('string')
expect(json.browserName).to.be.a('string')
expect(json.browserVersion).to.be.a('string')
expect(json.osName).to.be.a('string')
expect(json.osVersion).to.be.a('string')
expect(json.cypressVersion).to.be.a('string')
_.extend(json, {
browserPath: 'path/to/browser',
browserName: 'FooBrowser',
browserVersion: '88',
osName: 'FooOS',
osVersion: '1234',
cypressVersion: '9.9.9',
})
// ensure the totals are accurate
expect(json.totalTests).to.eq(
_.sum([
json.totalFailed,
json.totalPassed,
json.totalPending,
json.totalSkipped,
]),
)
// ensure totalDuration matches all of the stats durations
expectDurationWithin(
json,
'totalDuration',
_.sumBy(json.runs, 'stats.duration'),
_.sumBy(json.runs, 'stats.duration'),
5555,
)
expectStartToBeBeforeEnd(json, 'startedTestsAt', 'endedTestsAt')
json.runs.forEach((run) => {
expectStartToBeBeforeEnd(run, 'stats.startedAt', 'stats.endedAt')
expectStartToBeBeforeEnd(run, 'reporterStats.start', 'reporterStats.end')
const wallClocks = _.sumBy(run.tests, 'duration')
// ensure each run's duration is around the sum
// of all tests wallclock duration
expectDurationWithin(
run,
'stats.duration',
wallClocks,
wallClocks + 1000, // add 600ms to account for padding
1234,
)
expectDurationWithin(
run,
'reporterStats.duration',
wallClocks,
wallClocks + 1000, // add 600ms to account for padding
1234,
)
run.spec.absolute = systemTests.normalizeStdout(run.spec.absolute)
_.each(run.tests, (test) => {
if (test.displayError) {
test.displayError = systemTests.normalizeStdout(test.displayError)
}
test.duration = 1234
})
if (opts.video) {
// normalize video path
run.video = systemTests.normalizeStdout(run.video)
}
run.screenshots.forEach((screenshot) => {
const d = new Date(screenshot.takenAt)
expect(d.toJSON()).to.eq(screenshot.takenAt)
screenshot.takenAt = STATIC_DATE
screenshot.path = systemTests.normalizeStdout(screenshot.path)
})
})
}