fix: improve memory management for chromium-based browsers (#25462)

This commit is contained in:
Matt Schile
2023-01-18 15:35:57 -07:00
committed by GitHub
parent 991c532447
commit d1dc1258ea
40 changed files with 1614 additions and 71 deletions

View File

@@ -28,7 +28,7 @@ mainBuildFilters: &mainBuildFilters
only:
- develop
- /^release\/\d+\.\d+\.\d+$/
- 'macOS-launch-arm-browser'
- 'mschile/chrome_memory_fix'
# usually we don't build Mac app - it takes a long time
# but sometimes we want to really confirm we are doing the right thing
@@ -37,7 +37,7 @@ macWorkflowFilters: &darwin-workflow-filters
when:
or:
- equal: [ develop, << pipeline.git.branch >> ]
- equal: [ 'macOS-launch-arm-browser', << pipeline.git.branch >> ]
- equal: [ 'mschile/chrome_memory_fix', << pipeline.git.branch >> ]
- matches:
pattern: /^release\/\d+\.\d+\.\d+$/
value: << pipeline.git.branch >>
@@ -45,7 +45,7 @@ linuxArm64WorkflowFilters: &linux-arm64-workflow-filters
when:
or:
- equal: [ develop, << pipeline.git.branch >> ]
- equal: [ 'macOS-launch-arm-browser', << pipeline.git.branch >> ]
- equal: [ 'mschile/chrome_memory_fix', << pipeline.git.branch >> ]
- matches:
pattern: /^release\/\d+\.\d+\.\d+$/
value: << pipeline.git.branch >>
@@ -63,7 +63,7 @@ windowsWorkflowFilters: &windows-workflow-filters
when:
or:
- equal: [ develop, << pipeline.git.branch >> ]
- equal: [ 'retry-flake', << pipeline.git.branch >> ]
- equal: [ 'mschile/chrome_memory_fix', << pipeline.git.branch >> ]
- matches:
pattern: /^release\/\d+\.\d+\.\d+$/
value: << pipeline.git.branch >>
@@ -92,7 +92,7 @@ executors:
mac:
macos:
# Executor should have Node >= required version
xcode: "14.0.0"
xcode: "14.0.1"
resource_class: macos.x86.medium.gen2
environment:
PLATFORM: darwin
@@ -129,7 +129,7 @@ commands:
- run:
name: Check current branch to persist artifacts
command: |
if [[ "$CIRCLE_BRANCH" != "develop" && "$CIRCLE_BRANCH" != "release/"* && "$CIRCLE_BRANCH" != "macOS-launch-arm-browser" ]]; then
if [[ "$CIRCLE_BRANCH" != "develop" && "$CIRCLE_BRANCH" != "release/"* && "$CIRCLE_BRANCH" != "mschile/chrome_memory_fix" ]]; then
echo "Not uploading artifacts or posting install comment for this branch."
circleci-agent step halt
fi
@@ -1357,6 +1357,40 @@ jobs:
path: /tmp/cypress
- store-npm-logs
memory-driver-tests:
<<: *defaults
parameters:
<<: *defaultsParameters
resource_class:
type: string
default: medium
resource_class: << parameters.resource_class >>
parallelism: 1
steps:
- restore_cached_workspace
- run:
name: Driver memory tests in Electron
environment:
CYPRESS_CONFIG_ENV: production
command: |
echo Current working directory is $PWD
node --version
if [[ `node ../../scripts/get-platform-key.js` == 'linux-arm64' ]]; then
# these are missing on Circle and there is no way to pre-install them on Arm
sudo apt-get update
sudo apt-get install -y libgbm-dev
fi
CYPRESS_INTERNAL_MEMORY_SAVE_STATS=true \
DEBUG=cypress*memory \
yarn cypress:run --browser electron --spec "cypress/e2e/memory/*.cy.*"
working_directory: packages/driver
- store_test_results:
path: /tmp/cypress
- store-npm-logs
- store_artifacts:
path: packages/driver/cypress/logs/memory
unit-tests:
<<: *defaults
parameters:
@@ -2422,6 +2456,9 @@ linux-x64-workflow: &linux-x64-workflow
context: test-runner:cypress-record-key
requires:
- build
- memory-driver-tests:
requires:
- build
- run-frontend-shared-component-tests-chrome:
context: [test-runner:cypress-record-key, test-runner:launchpad-tests, test-runner:percy]
percy: true
@@ -2548,6 +2585,12 @@ linux-x64-workflow: &linux-x64-workflow
- run-vite-dev-server-integration-tests
- v8-integration-tests
- create-build-artifacts:
context:
- test-runner:upload
- test-runner:commit-status-checks
requires:
- build
# various testing scenarios, like building full binary
# and testing it on a real project
- test-against-staging:
@@ -2563,12 +2606,6 @@ linux-x64-workflow: &linux-x64-workflow
<<: *mainBuildFilters
requires:
- build
- create-build-artifacts:
context:
- test-runner:upload
- test-runner:commit-status-checks
requires:
- build
- test-npm-module-on-minimum-node-version:
requires:
- create-build-artifacts
@@ -2663,6 +2700,12 @@ linux-arm64-workflow: &linux-arm64-workflow
resource_class: arm.medium
requires:
- linux-arm64-build
- memory-driver-tests:
name: linux-arm64-memory-driver-tests
executor: linux-arm64
resource_class: arm.medium
requires:
- linux-arm64-build
darwin-x64-workflow: &darwin-x64-workflow
jobs:
@@ -2703,6 +2746,12 @@ darwin-x64-workflow: &darwin-x64-workflow
resource_class: macos.x86.medium.gen2
requires:
- darwin-x64-build
- memory-driver-tests:
name: darwin-x64-memory-driver-tests
executor: mac
resource_class: macos.x86.medium.gen2
requires:
- darwin-x64-build
darwin-arm64-workflow: &darwin-arm64-workflow
jobs:
@@ -2736,6 +2785,12 @@ darwin-arm64-workflow: &darwin-arm64-workflow
resource_class: cypress-io/latest_m1
requires:
- darwin-arm64-build
- memory-driver-tests:
name: darwin-arm64-memory-driver-tests
executor: darwin-arm64
resource_class: cypress-io/latest_m1
requires:
- darwin-arm64-build
windows-workflow: &windows-workflow
jobs:
@@ -2798,6 +2853,12 @@ windows-workflow: &windows-workflow
resource_class: windows.large
requires:
- windows-build
- memory-driver-tests:
name: windows-memory-driver-tests
executor: windows
resource_class: windows.large
requires:
- windows-build
workflows:
linux-x64:

5
.vscode/cspell.json vendored
View File

@@ -14,10 +14,13 @@
"Fetchables",
"forcedefault",
"getenv",
"GIBIBYTES",
"graphcache",
"headlessui",
"Iconify",
"intlify",
"KIBIBYTE",
"kibibytes",
"Lachlan",
"loggedin",
"mksnapshot",
@@ -27,6 +30,7 @@
"OVERLIMIT",
"overscan",
"packherd",
"pidusage",
"Pinia",
"pnpm",
"pseudoclass",
@@ -41,6 +45,7 @@
"snapshotting",
"sourcemaps",
"speclist",
"systeminformation",
"testid",
"TIMEDOUT",
"titleize",

View File

@@ -3044,6 +3044,11 @@ declare namespace Cypress {
* @default false
*/
experimentalWebKitSupport: boolean
/**
* Enables support for improved memory management within Chromium-based browsers.
* @default false
*/
experimentalMemoryManagement: boolean
/**
* Number of times to retry a failed test.
* If a number is set, tests will retry in both runMode and openMode.

View File

@@ -151,6 +151,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'pending': false,
'body': '[body]',
@@ -167,6 +168,7 @@ export const snapshots = {
'pass',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -193,6 +195,7 @@ export const snapshots = {
'test end',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -234,6 +237,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -632,6 +636,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'pending': false,
'body': '[body]',
@@ -742,6 +747,7 @@ export const snapshots = {
'pass',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -789,6 +795,7 @@ export const snapshots = {
'test end',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -851,6 +858,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -1270,6 +1278,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r3',
'order': 1,
'title': 'test 2',
'pending': false,
'body': '[body]',
@@ -1380,6 +1389,7 @@ export const snapshots = {
'pass',
{
'id': 'r3',
'order': 1,
'title': 'test 2',
'state': 'passed',
'pending': false,
@@ -1427,6 +1437,7 @@ export const snapshots = {
'test end',
{
'id': 'r3',
'order': 1,
'title': 'test 2',
'state': 'passed',
'pending': false,
@@ -1489,6 +1500,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r3',
'order': 1,
'title': 'test 2',
'state': 'passed',
'pending': false,
@@ -2000,6 +2012,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'pending': false,
'body': '[body]',
@@ -2185,6 +2198,7 @@ export const snapshots = {
'pass',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -2242,6 +2256,7 @@ export const snapshots = {
'test end',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -2314,6 +2329,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -2857,6 +2873,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'pending': false,
'body': '[body]',
@@ -3012,6 +3029,7 @@ export const snapshots = {
'pass',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -3062,6 +3080,7 @@ export const snapshots = {
'test end',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -3112,6 +3131,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r3',
'order': 1,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -4158,6 +4178,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r7',
'order': 4,
'title': 'test 1',
'pending': false,
'body': '[body]',
@@ -4199,6 +4220,7 @@ export const snapshots = {
'retry',
{
'id': 'r7',
'order': 4,
'title': 'test 1',
'hookName': 'after each',
'err': {
@@ -4314,6 +4336,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r7',
'order': 4,
'title': 'test 1',
'hookName': 'after each',
'err': {
@@ -4361,6 +4384,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r7',
'order': 4,
'title': 'test 1',
'pending': false,
'body': '[body]',
@@ -4459,6 +4483,7 @@ export const snapshots = {
'pass',
{
'id': 'r7',
'order': 4,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -4497,6 +4522,7 @@ export const snapshots = {
'test end',
{
'id': 'r7',
'order': 4,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -4535,6 +4561,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r7',
'order': 4,
'title': 'test 1',
'state': 'passed',
'pending': false,
@@ -5688,6 +5715,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r4',
'order': 2,
'title': 'test 2',
'pending': false,
'body': '[body]',
@@ -5723,6 +5751,7 @@ export const snapshots = {
'retry',
{
'id': 'r4',
'order': 2,
'title': 'test 2',
'err': {
'message': '[error message]',
@@ -5813,6 +5842,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r4',
'order': 2,
'title': 'test 2',
'err': {
'message': '[error message]',
@@ -5879,6 +5909,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r4',
'order': 2,
'title': 'test 2',
'pending': false,
'body': '[body]',
@@ -5952,6 +5983,7 @@ export const snapshots = {
'pass',
{
'id': 'r4',
'order': 2,
'title': 'test 2',
'state': 'passed',
'pending': false,
@@ -5992,6 +6024,7 @@ export const snapshots = {
'test end',
{
'id': 'r4',
'order': 2,
'title': 'test 2',
'state': 'passed',
'pending': false,
@@ -6032,6 +6065,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r4',
'order': 2,
'title': 'test 2',
'state': 'passed',
'pending': false,
@@ -6539,6 +6573,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r2',
'order': 1,
'title': 'visits',
'pending': false,
'body': '[body]',
@@ -6555,6 +6590,7 @@ export const snapshots = {
'retry',
{
'id': 'r2',
'order': 1,
'title': 'visits',
'err': {
'message': '[error message]',
@@ -6593,6 +6629,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r2',
'order': 1,
'title': 'visits',
'err': {
'message': '[error message]',
@@ -6626,6 +6663,7 @@ export const snapshots = {
'test:before:run',
{
'id': 'r2',
'order': 1,
'title': 'visits',
'pending': false,
'body': '[body]',
@@ -6642,6 +6680,7 @@ export const snapshots = {
'fail',
{
'id': 'r2',
'order': 1,
'title': 'visits',
'err': {
'message': '[error message]',
@@ -6679,6 +6718,7 @@ export const snapshots = {
'test end',
{
'id': 'r2',
'order': 1,
'title': 'visits',
'err': {
'message': '[error message]',
@@ -6711,6 +6751,7 @@ export const snapshots = {
'test:after:run',
{
'id': 'r2',
'order': 1,
'title': 'visits',
'err': {
'message': '[error message]',

View File

@@ -145,7 +145,7 @@ export function runCypressInCypressMochaEventsTest<T> (snapshots: T, snapToCompa
// "cypress in cypress"
bus.on('assert:cypress:in:cypress', (snapshot: CypressInCypressMochaEvent[]) => {
const expected = snapshots[snapToCompare]
const diff = disparity.unifiedNoColor(JSON.stringify(snapshot, null, 2), JSON.stringify(expected, null, 2), {})
const diff = disparity.unifiedNoColor(JSON.stringify(expected, null, 2), JSON.stringify(snapshot, null, 2), {})
if (diff !== '') {
/* eslint-disable no-console */

View File

@@ -540,6 +540,18 @@ export class EventManager {
})
})
Cypress.on('run:start', async () => {
if (Cypress.config('experimentalMemoryManagement') && Cypress.isBrowser({ family: 'chromium' })) {
await Cypress.backend('start:memory:profiling', Cypress.config('spec'))
}
})
Cypress.on('run:end', async () => {
if (Cypress.config('experimentalMemoryManagement') && Cypress.isBrowser({ family: 'chromium' })) {
await Cypress.backend('end:memory:profiling')
}
})
driverToLocalEvents.forEach((event) => {
Cypress.on(event, (...args: unknown[]) => {
// special case for asserting the correct mocha events + payload
@@ -569,8 +581,16 @@ export class EventManager {
this.localBus.emit('script:error', err)
})
Cypress.on('test:before:run:async', (_attr, test) => {
Cypress.on('test:before:run:async', async (_attr, test) => {
this.studioStore.interceptTest(test)
// if the experimental flag is on and we are in a chromium based browser,
// check the memory pressure to determine if garbage collection is needed
if (Cypress.config('experimentalMemoryManagement') && Cypress.isBrowser({ family: 'chromium' })) {
await Cypress.backend('check:memory:pressure', {
test: { title: test.title, order: test.order, currentRetry: test.currentRetry() },
})
}
})
Cypress.on('test:after:run', (test) => {

View File

@@ -37,6 +37,7 @@ exports['config/src/index .getDefaultValues returns list of public config keys 1
'experimentalFetchPolyfill': false,
'experimentalInteractiveRunEvents': false,
'experimentalRunAllSpecs': false,
'experimentalMemoryManagement': false,
'experimentalModifyObstructiveThirdPartyCode': false,
'experimentalSkipDomainInjection': null,
'experimentalOriginDependencies': false,
@@ -123,6 +124,7 @@ exports['config/src/index .getDefaultValues returns list of public config keys f
'experimentalFetchPolyfill': false,
'experimentalInteractiveRunEvents': false,
'experimentalRunAllSpecs': false,
'experimentalMemoryManagement': false,
'experimentalModifyObstructiveThirdPartyCode': false,
'experimentalSkipDomainInjection': null,
'experimentalOriginDependencies': false,
@@ -205,6 +207,7 @@ exports['config/src/index .getPublicConfigKeys returns list of public config key
'experimentalFetchPolyfill',
'experimentalInteractiveRunEvents',
'experimentalRunAllSpecs',
'experimentalMemoryManagement',
'experimentalModifyObstructiveThirdPartyCode',
'experimentalSkipDomainInjection',
'experimentalOriginDependencies',

View File

@@ -209,6 +209,11 @@ const driverConfigOptions: Array<DriverConfigOption> = [
defaultValue: false,
validation: validate.isBoolean,
isExperimental: true,
}, {
name: 'experimentalMemoryManagement',
defaultValue: false,
validation: validate.isBoolean,
isExperimental: true,
}, {
name: 'experimentalModifyObstructiveThirdPartyCode',
defaultValue: false,
@@ -226,6 +231,7 @@ const driverConfigOptions: Array<DriverConfigOption> = [
defaultValue: false,
validation: validate.isBoolean,
isExperimental: true,
requireRestartOnChange: 'server',
}, {
name: 'experimentalSourceRewriting',
defaultValue: false,

View File

@@ -1055,6 +1055,7 @@ describe('config/src/project/utils', () => {
experimentalSkipDomainInjection: { value: null, from: 'default' },
experimentalFetchPolyfill: { value: false, from: 'default' },
experimentalInteractiveRunEvents: { value: false, from: 'default' },
experimentalMemoryManagement: { value: false, from: 'default' },
experimentalOriginDependencies: { value: false, from: 'default' },
experimentalRunAllSpecs: { value: false, from: 'default' },
experimentalSingleTabRunMode: { value: false, from: 'default' },
@@ -1151,6 +1152,7 @@ describe('config/src/project/utils', () => {
experimentalSkipDomainInjection: { value: null, from: 'default' },
experimentalFetchPolyfill: { value: false, from: 'default' },
experimentalInteractiveRunEvents: { value: false, from: 'default' },
experimentalMemoryManagement: { value: false, from: 'default' },
experimentalOriginDependencies: { value: false, from: 'default' },
experimentalRunAllSpecs: { value: false, from: 'default' },
experimentalSingleTabRunMode: { value: false, from: 'default' },

View File

@@ -4,6 +4,7 @@ import { devServer as cypressWebpackDevServer } from '@cypress/webpack-dev-serve
export default defineConfig({
projectId: 'ypt4pf',
experimentalStudio: true,
experimentalMemoryManagement: true,
experimentalWebKitSupport: true,
hosts: {
'foobar.com': '127.0.0.1',

View File

@@ -59,7 +59,7 @@ describe('cy.session', { retries: 0 }, () => {
.then(async () => {
cy.spy(Cypress, 'action').log(false)
await Cypress.action('runner:test:before:run:async', {})
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
expect(Cypress.action).to.be.calledWith('cy:url:changed', '')
expect(Cypress.action).to.be.calledWith('cy:visit:blank', { testIsolation: true })
@@ -71,7 +71,7 @@ describe('cy.session', { retries: 0 }, () => {
it('clears session data before each run', async () => {
const clearCurrentSessionData = cy.spy(Cypress.session, 'clearCurrentSessionData')
await Cypress.action('runner:test:before:run:async', {})
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
expect(clearCurrentSessionData).to.be.called
})
@@ -79,7 +79,7 @@ describe('cy.session', { retries: 0 }, () => {
it('resets rendered html origins before each run', async () => {
const backendSpy = cy.spy(Cypress, 'backend')
await Cypress.action('runner:test:before:run:async', {})
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
expect(backendSpy).to.be.calledWith('reset:rendered:html:origins')
})
@@ -94,7 +94,7 @@ describe('cy.session', { retries: 0 }, () => {
.then(async () => {
cy.spy(Cypress, 'action').log(false)
await Cypress.action('runner:test:before:run:async', {})
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
expect(Cypress.action).to.be.calledWith('cy:url:changed', '')
expect(Cypress.action).to.be.calledWith('cy:visit:blank', { testIsolation: true })
@@ -789,7 +789,7 @@ describe('cy.session', { retries: 0 }, () => {
.then(async () => {
cy.spy(Cypress, 'action').log(false)
await Cypress.action('runner:test:before:run:async', {})
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
expect(Cypress.action).not.to.be.calledWith('cy:url:changed')
expect(Cypress.action).not.to.be.calledWith('cy:visit:blank')
@@ -800,7 +800,7 @@ describe('cy.session', { retries: 0 }, () => {
it('does not clear session data before each run', async () => {
const clearCurrentSessionData = cy.spy(Cypress.session, 'clearCurrentSessionData')
await Cypress.action('runner:test:before:run:async', {})
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
expect(clearCurrentSessionData).not.to.be.called
})
@@ -808,7 +808,7 @@ describe('cy.session', { retries: 0 }, () => {
it('does not reset rendered html origins before each run', async () => {
const backendSpy = cy.spy(Cypress, 'backend')
await Cypress.action('runner:test:before:run:async', {})
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
expect(backendSpy).not.to.be.calledWith('reset:rendered:html:origins')
})
@@ -823,7 +823,7 @@ describe('cy.session', { retries: 0 }, () => {
.then(async () => {
cy.spy(Cypress, 'action').log(false)
await Cypress.action('runner:test:before:run:async', {})
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
expect(Cypress.action).not.to.be.calledWith('cy:url:changed')
expect(Cypress.action).not.to.be.calledWith('cy:visit:blank')

View File

@@ -659,8 +659,8 @@ describe('src/cy/commands/window', () => {
it('changes viewport and then resets back to the original', () => {
const { viewportHeight, viewportWidth } = Cypress.config()
cy.viewport(500, 400).then(() => {
Cypress.action('runner:test:before:run:async', {})
cy.viewport(500, 400).then(async () => {
await Cypress.action('runner:test:before:run:async', {}, Cypress.state('runnable'))
.then(() => {
expect(Cypress.config('viewportWidth')).to.eq(viewportWidth)
expect(Cypress.config('viewportHeight')).to.eq(viewportHeight)

View File

@@ -0,0 +1,7 @@
describe('memory spec', { browser: { family: 'chromium' } }, () => {
for (let index = 0; index < 50; index++) {
it(`test ${index + 1} passes`, () => {
cy.visit('http://localhost:3500/memory')
})
}
})

View File

@@ -331,6 +331,23 @@ const createApp = (port) => {
res.sendStatus(200)
})
app.get('/memory', (req, res) => {
res.send(`
<html>
<body></body>
<script>
for (let i = 0; i < 100; i++) {
const el = document.createElement('p')
el.id = 'p' + i
el.innerHTML = 'x'.repeat(100000)
document.body.appendChild(el)
}
</script>
</html>
`)
})
app.use(express.static(path.join(__dirname, '..')))
app.use(require('errorhandler')())

View File

@@ -320,11 +320,10 @@ function patchTestClone () {
const ret = testClone.apply(this, arguments)
// carry over testConfigOverrides
// carry over testConfig, id, and order
ret._testConfig = this._testConfig
// carry over test.id
ret.id = this.id
ret.order = this.order
return ret
}

View File

@@ -110,6 +110,8 @@ const destroySourceMapConsumers = () => {
Object.values(sourceMapConsumers).forEach((consumer) => {
consumer.destroy()
})
sourceMapConsumers = {}
}
export default {

View File

@@ -42,12 +42,16 @@
<span style="color:#e05561"><span style="color:#e6e6e6">
<span style="color:#e05561">- You wrote an endless loop and you must fix your own code<span style="color:#e6e6e6">
<span style="color:#e05561">- You are running Docker (there is an easy fix for this: see link below)<span style="color:#e6e6e6">
<span style="color:#e05561">- You are running lots of tests on a memory intense application<span style="color:#e6e6e6">
<span style="color:#e05561">- You are running in a memory starved VM environment<span style="color:#e6e6e6">
<span style="color:#e05561">- You are running lots of tests on a memory intense application.<span style="color:#e6e6e6">
<span style="color:#e05561"> - Try enabling <span style="color:#e5e510">experimentalMemoryManagement<span style="color:#e05561"> in your config file.<span style="color:#e6e6e6">
<span style="color:#e05561"> - Try lowering <span style="color:#e5e510">numTestsKeptInMemory<span style="color:#e05561"> in your config file.<span style="color:#e6e6e6">
<span style="color:#e05561">- You are running in a memory starved VM environment.<span style="color:#e6e6e6">
<span style="color:#e05561"> - Try enabling <span style="color:#e5e510">experimentalMemoryManagement<span style="color:#e05561"> in your config file.<span style="color:#e6e6e6">
<span style="color:#e05561"> - Try lowering <span style="color:#e5e510">numTestsKeptInMemory<span style="color:#e05561"> in your config file.<span style="color:#e6e6e6">
<span style="color:#e05561">- There are problems with your GPU / GPU drivers<span style="color:#e6e6e6">
<span style="color:#e05561">- There are browser bugs in Chromium<span style="color:#e6e6e6">
<span style="color:#e05561"><span style="color:#e6e6e6">
<span style="color:#e05561">You can learn more including how to fix Docker here:<span style="color:#e6e6e6">
<span style="color:#e05561"><span style="color:#e6e6e6">
<span style="color:#e05561">https://on.cypress.io/renderer-process-crashed<span style="color:#e6e6e6"></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span>
<span style="color:#e05561">https://on.cypress.io/renderer-process-crashed<span style="color:#e6e6e6"></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span></span>
</pre></body></html>

View File

@@ -601,8 +601,12 @@ export const AllCypressErrors = {
- You wrote an endless loop and you must fix your own code
- You are running Docker (there is an easy fix for this: see link below)
- You are running lots of tests on a memory intense application
- You are running in a memory starved VM environment
- You are running lots of tests on a memory intense application.
- Try enabling ${fmt.highlight('experimentalMemoryManagement')} in your config file.
- Try lowering ${fmt.highlight('numTestsKeptInMemory')} in your config file.
- You are running in a memory starved VM environment.
- Try enabling ${fmt.highlight('experimentalMemoryManagement')} in your config file.
- Try lowering ${fmt.highlight('numTestsKeptInMemory')} in your config file.
- There are problems with your GPU / GPU drivers
- There are browser bugs in Chromium

View File

@@ -523,9 +523,13 @@
"name": "Origin Dependencies",
"description": "Enables support for `require`/`import` within `cy.origin`."
},
"experimentalMemoryManagement": {
"name": "Memory Management",
"description": "Enables support for improved memory management within Chromium-based browsers."
},
"experimentalSkipDomainInjection": {
"name": "Skip Domain Injection",
"description": "Disables setting `document.domain` to the application's super domain on injection"
"description": "Disables setting `document.domain` to the application's super domain on injection."
}
},
"device": {

View File

@@ -351,6 +351,10 @@ export class CdpAutomation {
return this.sendCloseCommandFn(data.shouldKeepTabOpen)
case 'focus:browser:window':
return this.sendDebuggerCommandFn('Page.bringToFront')
case 'get:heap:size:limit':
return this.sendDebuggerCommandFn('Runtime.evaluate', { expression: 'performance.memory.jsHeapSizeLimit' })
case 'collect:garbage':
return this.sendDebuggerCommandFn('HeapProfiler.collectGarbage')
default:
throw new Error(`No automation handler registered for: '${message}'`)
}

View File

@@ -21,6 +21,7 @@ import { BrowserCriClient } from './browser-cri-client'
import type { CriClient } from './cri-client'
import type { Automation } from '../automation'
import type { BrowserLaunchOpts, BrowserNewTabOpts, RunModeVideoApi } from '@packages/types'
import memory from './memory'
const debug = debugModule('cypress:server:browsers:chrome')
@@ -119,6 +120,9 @@ const DEFAULT_ARGS = [
// write shared memory files into '/tmp' instead of '/dev/shm'
// https://github.com/cypress-io/cypress/issues/5336
'--disable-dev-shm-usage',
// enable precise memory info so performance.memory returns more accurate values
'--enable-precise-memory-info',
]
let browserCriClient: BrowserCriClient | undefined
@@ -608,9 +612,11 @@ export = {
const browserCriClient = this._getBrowserCriClient()
// Handle chrome tab crashes.
pageCriClient.on('Inspector.targetCrashed', () => {
pageCriClient.on('Inspector.targetCrashed', async () => {
const err = errors.get('RENDERER_CRASHED')
await memory.endProfiling()
if (!options.onError) {
errors.log(err)
throw new Error('Missing onError in attachListeners')

View File

@@ -12,6 +12,7 @@ import type { Browser, BrowserInstance } from './types'
import type { BrowserWindow, WebContents } from 'electron'
import type { Automation } from '../automation'
import type { BrowserLaunchOpts, Preferences, RunModeVideoApi } from '@packages/types'
import memory from './memory'
// TODO: unmix these two types
type ElectronOpts = Windows.WindowOptions & BrowserLaunchOpts
@@ -143,9 +144,11 @@ export = {
// prevents a tiny 1px padding around the window
// causing screenshots/videos to be off by 1px
resizable: !options.browser.isHeadless,
onCrashed () {
async onCrashed () {
const err = errors.get('RENDERER_CRASHED')
await memory.endProfiling()
if (!options.onError) {
errors.log(err)
throw new Error('Missing onError in onCrashed')
@@ -471,14 +474,14 @@ export = {
},
/**
* Clear instance state for the electron instance, this is normally called in on kill or on exit for electron there isn't state to clear.
* Clear instance state for the electron instance, this is normally called on kill or on exit, for electron there isn't any state to clear.
*/
clearInstanceState () {},
async connectToNewSpec (browser: Browser, options: ElectronOpts, automation: Automation) {
if (!options.url) throw new Error('Missing url in connectToNewSpec')
await this.open(browser, options.url, options, automation)
return this.open(browser, options.url, options, automation)
},
connectToExisting () {

View File

@@ -124,8 +124,13 @@ export = {
async connectToNewSpec (browser: Browser, options: BrowserNewTabOpts, automation: Automation): Promise<BrowserInstance | null> {
const browserLauncher = await getBrowserLauncher(browser, options.browsers)
// Instance will be null when we're dealing with electron. In that case we don't need a browserCriClient
await browserLauncher.connectToNewSpec(browser, options, automation)
const newInstance = await browserLauncher.connectToNewSpec(browser, options, automation)
// if a new instance was returned, update our instance to use the new one
if (newInstance) {
instance = newInstance
instance.browser = browser
}
return this.getBrowserInstance()
},

View File

@@ -0,0 +1,61 @@
import { exec } from 'child_process'
import util from 'util'
const execAsync = util.promisify(exec)
/**
* Returns the total memory limit from the memory cgroup.
* @returns total memory limit in bytes
*/
const getTotalMemoryLimit = async () => {
return Number((await execAsync('cat /sys/fs/cgroup/memory/memory.limit_in_bytes', { encoding: 'utf8' })).stdout)
}
/**
* Convert the raw memory stats into an object.
* @param rawStats raw memory stats from the memory cgroup
* @returns object of memory stats
*/
const convertRawStats = (rawStats: string): { total_inactive_file: string } => {
const stats = rawStats.split('\n').filter(Boolean).reduce((acc, arr): { total_inactive_file: string} => {
const stat = arr.split(' ')
acc[stat[0]] = stat[1]
return acc
}, {} as { total_inactive_file: string })
return stats
}
/**
* Returns the available memory from the memory cgroup.
* @param totalMemoryLimit total memory limit in bytes
* @param log optional object to add the total memory working set used
* @returns available memory in bytes
*/
const getAvailableMemory = async (totalMemoryLimit: number, log?: { [key: string]: any }) => {
// retrieve the memory usage and memory stats from the memory cgroup
const [usageExec, rawStats] = await Promise.all([
execAsync('cat /sys/fs/cgroup/memory/memory.usage_in_bytes', { encoding: 'utf8' }),
execAsync('cat /sys/fs/cgroup/memory/memory.stat', { encoding: 'utf8' }),
])
const stats = convertRawStats(rawStats.stdout)
const usage = Number(usageExec.stdout)
// calculate the actual memory used by removing the inactive file cache from the reported usage
const totalMemoryWorkingSetUsed = (usage - Number(stats.total_inactive_file))
if (log) {
log.totalMemoryWorkingSetUsed = totalMemoryWorkingSetUsed
}
// return the available memory by subtracting the used memory from the total memory limit
return totalMemoryLimit - totalMemoryWorkingSetUsed
}
export default {
getTotalMemoryLimit,
getAvailableMemory,
}

View File

@@ -0,0 +1,25 @@
import os from 'os'
import si from 'systeminformation'
/**
* Returns the total memory limit in bytes.
* @returns total memory limit in bytes
*/
const getTotalMemoryLimit = async () => {
return os.totalmem()
}
/**
* Returns the available memory in bytes.
* @param totalMemoryLimit total memory limit in bytes
* @param log optional object to add any additional information
* @returns available memory in bytes
*/
const getAvailableMemory = async (totalMemoryLimit: number, log?: { [key: string]: any }) => {
return (await si.mem()).available
}
export default {
getTotalMemoryLimit,
getAvailableMemory,
}

View File

@@ -0,0 +1,418 @@
import debugModule from 'debug'
import _ from 'lodash'
import si from 'systeminformation'
import os from 'os'
import fs from 'fs-extra'
import { performance } from 'perf_hooks'
import path from 'path'
import pid from 'pidusage'
import { groupCyProcesses, Process } from '../../util/process_profiler'
import browsers from '..'
import type { Automation } from '../../automation'
import type { BrowserInstance } from '../types'
const debug = debugModule('cypress:server:browsers:memory')
const debugVerbose = debugModule('cypress-verbose:server:browsers:memory')
const MEMORY_THRESHOLD_PERCENTAGE = Number(process.env.CYPRESS_INTERNAL_MEMORY_THRESHOLD_PERCENTAGE) || 50
const MEMORY_PROFILER_INTERVAL = Number(process.env.CYPRESS_INTERNAL_MEMORY_PROFILER_INTERVAL) || 1000
const MEMORY_FOLDER = process.env.CYPRESS_INTERNAL_MEMORY_FOLDER_PATH || path.join('cypress', 'logs', 'memory')
const SAVE_MEMORY_STATS = ['1', 'true'].includes(process.env.CYPRESS_INTERNAL_MEMORY_SAVE_STATS?.toLowerCase() as string)
const SKIP_GC = ['1', 'true'].includes(process.env.CYPRESS_INTERNAL_MEMORY_SKIP_GC?.toLowerCase() as string)
const KIBIBYTE = 1024
const FOUR_GIBIBYTES = 4 * (KIBIBYTE ** 3)
let rendererProcess: Process | null
let handler: MemoryHandler
let totalMemoryLimit: number
let jsHeapSizeLimit: number
let browserInstance: BrowserInstance | null = null
let started = false
let cumulativeStats: { [key: string]: any }[] = []
let collectGarbageOnNextTest = false
let timer: NodeJS.Timeout | null
let currentSpecFileName: string | null
let statsLog: { [key: string]: any } = {}
let gcLog: { [key: string]: any } = {}
export type MemoryHandler = {
getTotalMemoryLimit: () => Promise<number>
getAvailableMemory: (totalMemoryLimit: number, log?: { [key: string]: any }) => Promise<number>
}
/**
* Algorithm:
*
* When the test runs starts:
* 1. set total mem limit for the container/host by reading off cgroup memory limits (if available) otherwise use os.totalmem()
*
* On a defined interval (e.g. 1s):
* 1. set current mem available for the container/host by reading off cgroup memory usage (if available) otherwise use si.mem().available
* 2. set current renderer mem usage
* 3. set max avail render mem to minimum of v8 heap size limit and total available mem (current available mem + current renderer mem usage)
* 4. calc % of memory used, current renderer mem usage / max avail render mem
*
* Before each test:
* 1. if that exceeds the defined memory threshold percentage (e.g. 50%) do a GC
*/
/**
* Returns a function that wraps the provided function and measures the duration of the function.
* @param func the function to time
* @param opts name of the function to time and whether to save the result to the log
* @returns a function that wraps the provided function and measures the duration of the function
*/
const measure = (func: (...args) => any, opts: { name?: string, save?: boolean } = { save: true }) => {
return async (...args) => {
const start = performance.now()
const result = await func.apply(this, args)
const duration = performance.now() - start
const name = opts.name || func.name
if (opts?.save) {
if (name === 'checkMemoryPressure') {
gcLog[`${name}Duration`] = duration
} else {
statsLog[`${name}Duration`] = duration
}
} else {
debugVerbose('%s took %dms', name, duration)
}
return result
}
}
/**
* Retrieves the JS heap size limit for the browser.
* @param automation - the automation client to use
* @returns the JS heap size limit in bytes for the browser. If not available, returns a default of four gibibytes.
*/
export const getJsHeapSizeLimit: (automation: Automation) => Promise<number> = measure(async (automation: Automation) => {
let heapLimit: Number
try {
heapLimit = (await automation.request('get:heap:size:limit', null, null)).result.value
} catch (err) {
debug('could not get jsHeapSizeLimit from browser, using default of four gibibytes')
heapLimit = FOUR_GIBIBYTES
}
return heapLimit
}, { name: 'getJsHeapSizeLimit', save: false })
/**
* @returns the memory handler to use based on the platform and if linux, the cgroup version
*/
export const getMemoryHandler = async (): Promise<MemoryHandler> => {
if (os.platform() === 'linux') {
if (await fs.pathExists('/sys/fs/cgroup/cgroup.controllers')) {
// cgroup v2 can use the default handler so just pass through
} else {
debug('using cgroup v1 memory handler')
return (await import('./cgroup-v1')).default
}
}
debug('using default memory handler')
return (await import('./default')).default
}
/**
* Attempts to find the browser's renderer process running the Cypress tests.
* @param processes - all of the system processes
* @returns the renderer process or null if there is no renderer process
*/
const findRendererProcess = (processes: si.Systeminformation.ProcessesData) => {
// group the processes by their group (e.g. browser, cypress, launchpad, etc...)
const groupedProcesses = groupCyProcesses(processes)
// filter down to the renderer processes by looking at the 'browser' group and the command/params with type renderer
const browserProcesses = groupedProcesses.filter((p) => p.group === 'browser')
// if we only have one browser process assume it's the renderer process, otherwise filter down to the renderer processes
const rendererProcesses = browserProcesses.length === 1 ? browserProcesses : browserProcesses.filter(
(p) => p.group === 'browser' && (p.command?.includes('--type=renderer') || p.params?.includes('--type=renderer')),
)
// if there are no renderer processes, return null
if (rendererProcesses.length === 0) return null
// assume the renderer process with the most memory is the one we're interested in
const maxRendererProcess = rendererProcesses.reduce((prev, current) => (prev.memRss > current.memRss) ? prev : current)
debugVerbose('renderer processes found: %o', maxRendererProcess)
return maxRendererProcess
}
/**
* Retrieves the memory usage for the renderer process.
* @returns the memory usage in bytes for the renderer process or null if there is no renderer process
*/
export const getRendererMemoryUsage: () => Promise<number | null> = measure(async () => {
// if we don't have a renderer process yet, find it.
// this is done once since the renderer process will not change
if (!rendererProcess) {
let process: Process | null = null
let processes: si.Systeminformation.ProcessesData
try {
processes = await si.processes()
} catch (err) {
debug('could not get processes to find renderer process: %o', err)
return null
}
process = findRendererProcess(processes)
if (!process) return null
// if we found a renderer process, save it so we don't have to find it again
rendererProcess = process
// return the memory usage for the renderer process
return rendererProcess.memRss * KIBIBYTE
}
try {
// if we have a renderer process, get the memory usage for it
return (await pid(rendererProcess.pid)).memory
} catch {
// if we can't get the memory usage for the renderer process,
// assume it's gone and clear it out so we can find it again
rendererProcess = null
return getRendererMemoryUsage()
}
}, { name: 'getRendererMemoryUsage', save: true })
/**
* Retrieves the available memory for the container/host.
* @returns the available memory in bytes for the container/host
*/
export const getAvailableMemory: () => Promise<number> = measure(() => {
return handler.getAvailableMemory(totalMemoryLimit, statsLog)
}, { name: 'getAvailableMemory', save: true })
/**
* Calculates the memory stats used to determine if garbage collection should be run before the next test starts.
*/
export const calculateMemoryStats: () => Promise<void> = measure(async () => {
// retrieve the available memory and the renderer process memory usage
const [currentAvailableMemory, rendererProcessMemRss] = await Promise.all([
getAvailableMemory(),
getRendererMemoryUsage(),
])
if (rendererProcessMemRss === null) {
debug('no renderer process found, skipping memory stat collection')
return
}
// the max available memory is the minimum of the js heap size limit and
// the current available memory plus the renderer process memory usage
const maxAvailableRendererMemory = Math.min(jsHeapSizeLimit, currentAvailableMemory + rendererProcessMemRss)
const rendererUsagePercentage = (rendererProcessMemRss / maxAvailableRendererMemory) * 100
// if we're using more than MEMORY_THRESHOLD_PERCENTAGE of the available memory,
const shouldCollectGarbage = rendererUsagePercentage >= MEMORY_THRESHOLD_PERCENTAGE && !SKIP_GC
// if we should collect garbage, set the flag to true so we can collect garbage on the next test
collectGarbageOnNextTest = collectGarbageOnNextTest || shouldCollectGarbage
// set all the memory stats on the stats log
statsLog.jsHeapSizeLimit = jsHeapSizeLimit
statsLog.totalMemoryLimit = totalMemoryLimit
statsLog.rendererProcessMemRss = rendererProcessMemRss
statsLog.rendererUsagePercentage = rendererUsagePercentage
statsLog.rendererMemoryThreshold = maxAvailableRendererMemory * (MEMORY_THRESHOLD_PERCENTAGE / 100)
statsLog.currentAvailableMemory = currentAvailableMemory
statsLog.maxAvailableRendererMemory = maxAvailableRendererMemory
statsLog.shouldCollectGarbage = shouldCollectGarbage
statsLog.timestamp = Date.now()
}, { name: 'calculateMemoryStats', save: true })
/**
* Collects garbage if needed and logs the test information.
* @param automation - the automation client used to collect garbage
* @param test - the current test
*/
const checkMemoryPressureAndLog = async ({ automation, test }: { automation: Automation, test: { title: string, order: number, currentRetry: number }}) => {
await checkMemoryPressure(automation)
gcLog.testTitle = test.title
gcLog.testOrder = Number(`${test.order}.${test.currentRetry}`)
gcLog.garbageCollected = collectGarbageOnNextTest
gcLog.timestamp = Date.now()
addCumulativeStats(gcLog)
gcLog = {}
// clear the flag so we don't collect garbage on every test
collectGarbageOnNextTest = false
}
/**
* Collects the browser's garbage if it previously exceeded the threshold when it was measured.
* @param automation the automation client used to collect garbage
*/
const checkMemoryPressure: (automation: Automation) => Promise<void> = measure(async (automation: Automation) => {
if (collectGarbageOnNextTest) {
debug('forcing garbage collection')
try {
await automation.request('collect:garbage', null, null)
} catch (err) {
debug('error collecting garbage: %o', err)
}
} else {
debug('skipping garbage collection')
}
}, { name: 'checkMemoryPressure', save: true })
/**
* Adds the memory stats to the cumulative stats.
* @param stats - memory stats to add to the cumulative stats
*/
const addCumulativeStats = (stats: { [key: string]: any }) => {
debugVerbose('memory stats: %o', stats)
if (SAVE_MEMORY_STATS) {
cumulativeStats.push(_.clone(stats))
}
}
/**
* Gathers the memory stats and schedules the next check.
*/
const gatherMemoryStats = async () => {
try {
await calculateMemoryStats()
addCumulativeStats(statsLog)
statsLog = {}
} catch (err) {
debug('error gathering memory stats: %o', err)
}
scheduleMemoryCheck()
}
/**
* Schedules the next gathering of memory stats based on the MEMORY_PROFILER_INTERVAL.
*/
const scheduleMemoryCheck = () => {
if (started) {
// not setinterval, since gatherMemoryStats is asynchronous
timer = setTimeout(gatherMemoryStats, MEMORY_PROFILER_INTERVAL)
}
}
/**
* Starts the memory profiler.
* @param automation - the automation client used to interact with the browser
* @param spec - the current spec file
*/
async function startProfiling (automation: Automation, spec: { fileName: string }) {
if (started) {
return
}
debugVerbose('start memory profiler')
try {
// ensure we are starting from a clean state
reset()
started = true
browserInstance = browsers.getBrowserInstance()
// stop the profiler when the browser exits
browserInstance?.once('exit', endProfiling)
// save the current spec file name to be used later for saving the cumulative stats
currentSpecFileName = spec?.fileName
handler = await getMemoryHandler()
// get the js heap size limit and total memory limit once
// since they don't change during the spec run
await Promise.all([
jsHeapSizeLimit = await getJsHeapSizeLimit(automation),
totalMemoryLimit = await handler.getTotalMemoryLimit(),
])
await gatherMemoryStats()
} catch (err) {
debug('error starting memory profiler: %o', err)
}
}
/**
* Saves the cumulative stats to a file.
*/
const saveCumulativeStats = async () => {
if (SAVE_MEMORY_STATS && currentSpecFileName) {
try {
// save the cumulative stats to a file named after the spec file
await fs.outputFile(path.join(MEMORY_FOLDER, `${currentSpecFileName}.json`), JSON.stringify(cumulativeStats))
} catch (err) {
debugVerbose('error creating memory stats file: %o', err)
}
}
}
/**
* Resets all of the state.
*/
const reset = () => {
started = false
rendererProcess = null
cumulativeStats = []
collectGarbageOnNextTest = false
timer = null
currentSpecFileName = null
statsLog = {}
gcLog = {}
browserInstance?.removeListener('exit', endProfiling)
browserInstance = null
}
/**
* Ends the memory profiler.
*/
const endProfiling = async () => {
if (!started) return
// clear the timer
if (timer) clearTimeout(timer)
// save the cumulative stats to a file
await saveCumulativeStats()
reset()
debugVerbose('end memory profiler')
}
/**
* Returns all of the memory stats collected thus far.
* @returns Array of memory stats.
*/
const getMemoryStats = () => {
return _.clone(cumulativeStats)
}
export default {
startProfiling,
endProfiling,
gatherMemoryStats,
checkMemoryPressure: checkMemoryPressureAndLog,
getMemoryStats,
}

View File

@@ -30,7 +30,7 @@ export type BrowserInstance = EventEmitter & {
export type BrowserLauncher = {
open: (browser: Browser, url: string, options: BrowserLaunchOpts, automation: Automation) => Promise<BrowserInstance>
connectToNewSpec: (browser: Browser, options: BrowserNewTabOpts, automation: Automation) => Promise<void>
connectToNewSpec: (browser: Browser, options: BrowserNewTabOpts, automation: Automation) => Promise<BrowserInstance | void>
/**
* Used in Cypress-in-Cypress tests to connect to the existing browser instance.
*/

View File

@@ -66,6 +66,9 @@ try {
// https://github.com/cypress-io/cypress/pull/20271
app.commandLine.appendSwitch('disable-ipc-flooding-protection')
// ensure we get the most accurate memory usage
app.commandLine.appendSwitch('enable-precise-memory-info')
if (os.platform() === 'linux') {
app.disableHardwareAcceleration()
}

View File

@@ -61,6 +61,7 @@ const _summaries: StringValues = {
experimentalWebKitSupport: 'Adds support for testing in the WebKit browser engine used by Safari. See https://on.cypress.io/webkit-experiment for more information.',
experimentalRunAllSpecs: 'Enables the "Run All Specs" UI feature, allowing the execution of multiple specs sequentially',
experimentalOriginDependencies: 'Enables support for `require`/`import` within `cy.origin`',
experimentalMemoryManagement: 'Enables support for improved memory management within Chromium-based browsers.',
}
/**
@@ -84,6 +85,7 @@ const _names: StringValues = {
experimentalWebKitSupport: 'WebKit Support',
experimentalRunAllSpecs: 'Run All Specs',
experimentalOriginDependencies: 'Origin Dependencies',
experimentalMemoryManagement: 'Memory Management',
}
/**

View File

@@ -28,6 +28,7 @@ import type { Socket } from '@packages/socket'
import type { RunState, CachedTestState } from '@packages/types'
import { cors } from '@packages/network'
import memory from './browsers/memory'
type StartListeningCallbacks = {
onSocketConnection: (socket: any) => void
@@ -466,6 +467,12 @@ export class SocketBase {
return setCrossOriginCookie(args[0])
case 'request:sent:with:credentials':
return this.localBus.emit('request:sent:with:credentials', args[0])
case 'start:memory:profiling':
return memory.startProfiling(automation, args[0])
case 'end:memory:profiling':
return memory.endProfiling()
case 'check:memory:pressure':
return memory.checkMemoryPressure({ ...args[0], automation })
default:
throw new Error(`You requested a backend event we cannot handle: ${eventName}`)
}

View File

@@ -8,7 +8,7 @@ const browsers = require('../browsers')
const plugins = require('../plugins')
type Group = 'browser' | 'cypress' | 'launchpad' | 'plugin' | 'ffmpeg' | 'electron-shared' | 'other'
type Process = si.Systeminformation.ProcessesProcessData & {
export type Process = si.Systeminformation.ProcessesProcessData & {
group?: Group
}
@@ -37,7 +37,7 @@ const formatPidDisplay = (groupedProcesses) => {
return display
}
export const _groupCyProcesses = ({ list }: si.Systeminformation.ProcessesData) => {
export const groupCyProcesses = ({ list }: si.Systeminformation.ProcessesData) => {
const cyProcesses: Process[] = []
const thisProcess: Process = _.find(list, { pid: process.pid })!
@@ -231,7 +231,7 @@ export const _printGroupedProcesses = (groupTotals) => {
function _checkProcesses () {
return si.processes()
.then(_groupCyProcesses)
.then(groupCyProcesses)
.then(_renameBrowserGroup)
.then(_aggregateGroups)
.then(_printGroupedProcesses)

View File

@@ -99,6 +99,7 @@
"ospath": "1.2.2",
"p-defer": "^3.0.0",
"p-queue": "6.1.0",
"pidusage": "3.0.2",
"pluralize": "8.0.0",
"randomstring": "1.1.5",
"recast": "0.20.4",

View File

@@ -15,13 +15,7 @@ context('lib/browsers/cdp_automation', () => {
onRequestEvent: sinon.stub(),
}
cdpAutomation = await CdpAutomation.create(this.sendDebuggerCommand, this.onFn, this.sendCloseTargetCommand, this.automation, false)
this.sendDebuggerCommand
.throws(new Error('not stubbed'))
.withArgs('Browser.getVersion')
.resolves()
cdpAutomation = await CdpAutomation.create(this.sendDebuggerCommand, this.onFn, this.sendCloseTargetCommand, this.automation)
this.onRequest = cdpAutomation.onRequest
})
@@ -309,5 +303,21 @@ context('lib/browsers/cdp_automation', () => {
return this.onRequest('focus:browser:window').then((resp) => expect(resp).to.be.undefined)
})
})
describe('get:heap:size:limit', function () {
it('sends Runtime.evaluate to request the performance.memory.jsHeapSizeLimit', async function () {
this.sendDebuggerCommand.withArgs('Runtime.evaluate', { expression: 'performance.memory.jsHeapSizeLimit' }).resolves()
return this.onRequest('get:heap:size:limit').then((resp) => expect(resp).to.be.undefined)
})
})
describe('collect:garbage', function () {
it('sends HeapProfiler.collectGarbage when garbage collection is requested', async function () {
this.sendDebuggerCommand.withArgs('HeapProfiler.collectGarbage').resolves()
return this.onRequest('collect:garbage').then((resp) => expect(resp).to.be.undefined)
})
})
})
})

View File

@@ -0,0 +1,33 @@
const { expect, sinon } = require('../../../spec_helper')
import util from 'util'
describe('lib/browsers/memory/cgroup-v1', () => {
let mockExec
let memory
before(async () => {
mockExec = sinon.stub()
sinon.stub(util, 'promisify').returns(mockExec)
memory = require('../../../../lib/browsers/memory/cgroup-v1').default
})
context('#getTotalMemoryLimit', () => {
it('returns total memory limit from limit_in_bytes', async () => {
mockExec.withArgs('cat /sys/fs/cgroup/memory/memory.limit_in_bytes', { encoding: 'utf8' }).resolves({ stdout: '100' })
expect(await memory.getTotalMemoryLimit()).to.eq(100)
})
})
context('#getAvailableMemory', () => {
it('returns available memory from cgroup', async () => {
mockExec.withArgs('cat /sys/fs/cgroup/memory/memory.usage_in_bytes').resolves({ stdout: '100' })
mockExec.withArgs('cat /sys/fs/cgroup/memory/memory.stat').resolves({ stdout: 'total_inactive_file 50' })
expect(await memory.getAvailableMemory(200)).to.eq(150)
})
})
})

View File

@@ -0,0 +1,23 @@
const { expect, sinon } = require('../../../spec_helper')
import os from 'os'
import si from 'systeminformation'
import memory from '../../../../lib/browsers/memory/default'
describe('lib/browsers/memory', () => {
context('#getTotalMemoryLimit', () => {
it('returns total memory limit from os', async () => {
sinon.stub(os, 'totalmem').returns(100)
expect(await memory.getTotalMemoryLimit()).to.eq(100)
})
})
context('#getAvailableMemory', () => {
it('returns available memory from os', async () => {
sinon.stub(si, 'mem').returns({ available: 50 })
expect(await memory.getAvailableMemory(100)).to.eq(50)
})
})
})

View File

@@ -0,0 +1,723 @@
import os from 'os'
import si from 'systeminformation'
import fs from 'fs-extra'
import browsers from '../../../../lib/browsers'
import { proxyquire, expect, sinon } from '../../../spec_helper'
import { Automation } from '../../../../lib/automation'
describe('lib/browsers/memory', () => {
let memory: typeof import('../../../../lib/browsers/memory')
before(() => {
delete require.cache[require.resolve('../../../../lib/browsers/memory')]
})
beforeEach(() => {
sinon.useFakeTimers()
process.env.CYPRESS_INTERNAL_MEMORY_SAVE_STATS = 'true'
memory = require('../../../../lib/browsers/memory')
})
afterEach(async () => {
await memory.default.endProfiling()
})
context('#getJsHeapSizeLimit', () => {
it('retrieves the jsHeapSizeLimit from performance.memory', async () => {
const automation = sinon.createStubInstance(Automation)
automation.request.withArgs('get:heap:size:limit', null, null).resolves({ result: { value: 50 } })
expect(await memory.getJsHeapSizeLimit(automation)).to.eq(50)
})
it('defaults the jsHeapSizeLimit to four gibibytes', async () => {
const automation = sinon.createStubInstance(Automation)
automation.request.withArgs('get:heap:size:limit', null, null).throws(new Error('performance not available'))
expect(await memory.getJsHeapSizeLimit(automation)).to.eq(4294967296)
})
})
context('#getMemoryHandler', () => {
it('returns "default" for non-linux', async () => {
const defaultHandler = require('../../../../lib/browsers/memory/default').default
sinon.stub(os, 'platform').returns('darwin')
expect(await memory.getMemoryHandler()).to.eq(defaultHandler)
})
it('returns "cgroup-v1" for linux cgroup v1', async () => {
const cgroupV1Handler = require('../../../../lib/browsers/memory/cgroup-v1').default
sinon.stub(os, 'platform').returns('linux')
sinon.stub(fs, 'pathExists').withArgs('/sys/fs/cgroup/cgroup.controllers').resolves(false)
expect(await memory.getMemoryHandler()).to.eq(cgroupV1Handler)
})
it('returns "default" for linux cgroup v2', async () => {
const defaultHandler = require('../../../../lib/browsers/memory/default').default
sinon.stub(os, 'platform').returns('linux')
sinon.stub(fs, 'pathExists').withArgs('/sys/fs/cgroup/cgroup.controllers').resolves(true)
expect(await memory.getMemoryHandler()).to.eq(defaultHandler)
})
})
context('#startProfiling', () => {
it('starts the profiling', async () => {
// restore the fake timers since we are stubbing setTimeout directly
sinon._clock.restore()
const automation = sinon.createStubInstance(Automation)
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'calculateMemoryStats').resolves()
sinon.stub(global, 'setTimeout').onFirstCall().callsFake(async (fn) => {
await fn()
})
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
expect(memory.calculateMemoryStats).to.be.calledTwice
})
it('doesn\'t start twice', async () => {
const automation = sinon.createStubInstance(Automation)
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'calculateMemoryStats').resolves()
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
// second call doesn't do anything
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
expect(memory.calculateMemoryStats).to.be.calledOnce
})
})
context('#checkMemoryPressure', () => {
it('collects memory when renderer process is greater than the default threshold', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'getRendererMemoryUsage').resolves(75)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
jsHeapSizeLimit: 100,
totalMemoryLimit: 2000,
rendererProcessMemRss: 75,
rendererUsagePercentage: 75,
rendererMemoryThreshold: 50,
currentAvailableMemory: 1000,
maxAvailableRendererMemory: 100,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('collects memory when renderer process is greater than the custom threshold', async () => {
process.env.CYPRESS_INTERNAL_MEMORY_THRESHOLD_PERCENTAGE = '25'
const memory = proxyquire('../lib/browsers/memory', {})
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'getRendererMemoryUsage').resolves(25)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
jsHeapSizeLimit: 100,
totalMemoryLimit: 2000,
rendererProcessMemRss: 25,
rendererUsagePercentage: 25,
rendererMemoryThreshold: 25,
currentAvailableMemory: 1000,
maxAvailableRendererMemory: 100,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('collects memory when renderer process is equal to the threshold', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'getRendererMemoryUsage').resolves(50)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
jsHeapSizeLimit: 100,
totalMemoryLimit: 2000,
rendererProcessMemRss: 50,
rendererUsagePercentage: 50,
rendererMemoryThreshold: 50,
currentAvailableMemory: 1000,
maxAvailableRendererMemory: 100,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('uses the available memory limit if it\'s less than the jsHeapSizeLimit', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(10),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'getRendererMemoryUsage').resolves(25)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
jsHeapSizeLimit: 100,
totalMemoryLimit: 2000,
rendererProcessMemRss: 25,
rendererUsagePercentage: 71.42857142857143,
rendererMemoryThreshold: 17.5,
currentAvailableMemory: 10,
maxAvailableRendererMemory: 35,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('skips collecting memory when renderer process is less than the threshold', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'getRendererMemoryUsage').resolves(25)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
jsHeapSizeLimit: 100,
totalMemoryLimit: 2000,
rendererProcessMemRss: 25,
rendererUsagePercentage: 25,
rendererMemoryThreshold: 50,
currentAvailableMemory: 1000,
maxAvailableRendererMemory: 100,
shouldCollectGarbage: false,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: false,
timestamp: 0,
},
]
expect(gcStub).to.not.be.called
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('skips collecting memory if the renderer process is not found', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(si, 'processes').resolves({ list: [
{ name: 'foo', pid: process.pid },
] })
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
getRendererMemoryUsageDuration: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: false,
timestamp: 0,
},
]
expect(gcStub).to.not.be.called
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('finds the renderer process from the process.command', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(2000),
getTotalMemoryLimit: sinon.stub().resolves(3000),
}
const processesMock = sinon.stub(si, 'processes').resolves({ list: [
{ name: 'cypress', pid: process.pid },
{ name: 'browser', pid: 1234, parentPid: process.pid, command: 'browser.exe' },
{ name: 'renderer', pid: 12345, parentPid: 1234, command: '--type=renderer', memRss: 1 },
] })
sinon.stub(browsers, 'getBrowserInstance').returns({
pid: 1234,
once: sinon.stub().resolves(),
removeListener: sinon.stub(),
})
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(2000)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
getRendererMemoryUsageDuration: 0,
jsHeapSizeLimit: 2000,
totalMemoryLimit: 3000,
rendererProcessMemRss: 1024,
rendererUsagePercentage: 51.2,
rendererMemoryThreshold: 1000,
currentAvailableMemory: 2000,
maxAvailableRendererMemory: 2000,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(processesMock).to.be.calledOnce
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('finds the renderer process from the process.params', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(2000),
getTotalMemoryLimit: sinon.stub().resolves(3000),
}
const processesMock = sinon.stub(si, 'processes').resolves({ list: [
{ name: 'cypress', pid: process.pid },
{ name: 'browser', pid: 1234, parentPid: process.pid, command: 'browser.exe' },
{ name: 'renderer', pid: 12345, parentPid: 1234, command: 'browser.exe', params: '--type=renderer', memRss: 1 },
] })
sinon.stub(browsers, 'getBrowserInstance').returns({
pid: 1234,
once: sinon.stub().resolves(),
removeListener: sinon.stub(),
})
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(2000)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
getRendererMemoryUsageDuration: 0,
jsHeapSizeLimit: 2000,
totalMemoryLimit: 3000,
rendererProcessMemRss: 1024,
rendererUsagePercentage: 51.2,
rendererMemoryThreshold: 1000,
currentAvailableMemory: 2000,
maxAvailableRendererMemory: 2000,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(processesMock).to.be.calledOnce
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('selects the renderer process with the most memory', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(10000),
getTotalMemoryLimit: sinon.stub().resolves(20000),
}
const processesMock = sinon.stub(si, 'processes').resolves({ list: [
{ name: 'cypress', pid: process.pid },
{ name: 'browser', pid: 1234, parentPid: process.pid, command: 'browser.exe' },
{ name: 'renderer', pid: 12345, parentPid: 1234, command: '--type=renderer', memRss: 1 },
{ name: 'max-renderer', pid: 123456, parentPid: 1234, command: '--type=renderer', memRss: 5 },
] })
sinon.stub(browsers, 'getBrowserInstance').returns({
pid: 1234,
once: sinon.stub().resolves(),
removeListener: sinon.stub(),
})
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(10000)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
getRendererMemoryUsageDuration: 0,
jsHeapSizeLimit: 10000,
totalMemoryLimit: 20000,
rendererProcessMemRss: 5120,
rendererUsagePercentage: 51.2,
rendererMemoryThreshold: 5000,
currentAvailableMemory: 10000,
maxAvailableRendererMemory: 10000,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(processesMock).to.be.calledOnce
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('uses the existing process id to obtain the memory usage', async () => {
const pidStub = sinon.stub().resolves({ memory: 2000 })
const memory = proxyquire('../lib/browsers/memory', { pidusage: pidStub })
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(3000),
getTotalMemoryLimit: sinon.stub().resolves(4000),
}
const processesMock = sinon.stub(si, 'processes').resolves({ list: [
{ name: 'cypress', pid: process.pid },
{ name: 'browser', pid: 1234, parentPid: process.pid, command: 'browser.exe' },
{ name: 'renderer', pid: 12345, parentPid: 1234, command: '--type=renderer', memRss: 1 },
] })
sinon.stub(browsers, 'getBrowserInstance').returns({
pid: 1234,
once: sinon.stub().resolves(),
removeListener: sinon.stub(),
})
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(3000)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
// first call will find the renderer process and use si.processes
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
// second call will use the existing process id and use pidusage
await memory.default.gatherMemoryStats()
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
getRendererMemoryUsageDuration: 0,
jsHeapSizeLimit: 3000,
totalMemoryLimit: 4000,
rendererProcessMemRss: 1024,
rendererUsagePercentage: 34.13333333333333,
rendererMemoryThreshold: 1500,
currentAvailableMemory: 3000,
maxAvailableRendererMemory: 3000,
shouldCollectGarbage: false,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
getAvailableMemoryDuration: 0,
getRendererMemoryUsageDuration: 0,
jsHeapSizeLimit: 3000,
totalMemoryLimit: 4000,
rendererProcessMemRss: 2000,
rendererUsagePercentage: 66.66666666666666,
rendererMemoryThreshold: 1500,
currentAvailableMemory: 3000,
maxAvailableRendererMemory: 3000,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(processesMock).to.be.calledOnce
expect(pidStub).to.be.calledOnce
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
it('collects memory when a previous checkMemory call goes over the threshold', async () => {
const automation = sinon.createStubInstance(Automation)
const gcStub = automation.request.withArgs('collect:garbage').resolves()
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'getRendererMemoryUsage')
.onFirstCall().resolves(75)
.onSecondCall().resolves(25)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.gatherMemoryStats()
await memory.default.checkMemoryPressure({ automation, test: { title: 'test', order: 1, currentRetry: 0 } })
const expected = [
{
getAvailableMemoryDuration: 0,
jsHeapSizeLimit: 100,
totalMemoryLimit: 2000,
rendererProcessMemRss: 75,
rendererUsagePercentage: 75,
rendererMemoryThreshold: 50,
currentAvailableMemory: 1000,
maxAvailableRendererMemory: 100,
shouldCollectGarbage: true,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
getAvailableMemoryDuration: 0,
jsHeapSizeLimit: 100,
totalMemoryLimit: 2000,
rendererProcessMemRss: 25,
rendererUsagePercentage: 25,
rendererMemoryThreshold: 50,
currentAvailableMemory: 1000,
maxAvailableRendererMemory: 100,
shouldCollectGarbage: false,
timestamp: 0,
calculateMemoryStatsDuration: 0,
},
{
checkMemoryPressureDuration: 0,
testTitle: 'test',
testOrder: 1,
garbageCollected: true,
timestamp: 0,
},
]
expect(gcStub).to.be.calledOnce
expect(memory.getRendererMemoryUsage).to.be.calledTwice
expect(memory.default.getMemoryStats()).to.deep.eql(expected)
})
})
context('#endProfiling', () => {
it('stops the profiling', async () => {
// restore the fake timers since we are stubbing setTimeout/clearTimeout directly
sinon._clock.restore()
const automation = sinon.createStubInstance(Automation)
const mockHandler = {
getAvailableMemory: sinon.stub().resolves(1000),
getTotalMemoryLimit: sinon.stub().resolves(2000),
}
sinon.stub(memory, 'getJsHeapSizeLimit').resolves(100)
sinon.stub(memory, 'getMemoryHandler').resolves(mockHandler)
sinon.stub(memory, 'calculateMemoryStats').resolves()
const timer = sinon.stub()
sinon.stub(global, 'setTimeout').returns(timer)
sinon.stub(global, 'clearTimeout')
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.endProfiling()
expect(memory.calculateMemoryStats).to.be.calledOnce
expect(global.clearTimeout).to.be.calledWith(timer)
})
it('saves the cumulative memory stats to a file', async () => {
const fileStub = sinon.stub(fs, 'outputFile').withArgs('cypress/logs/memory/memory_spec.json').resolves()
const automation = sinon.createStubInstance(Automation)
await memory.default.startProfiling(automation, { fileName: 'memory_spec' })
await memory.default.endProfiling()
expect(fileStub).to.be.calledOnce
})
})
})

View File

@@ -4,7 +4,7 @@ import _ from 'lodash'
import si from 'systeminformation'
import { expect } from 'chai'
import {
_groupCyProcesses,
groupCyProcesses,
_renameBrowserGroup,
_aggregateGroups,
_reset,
@@ -116,13 +116,13 @@ describe('lib/util/process_profiler', function () {
_reset()
})
context('._groupCyProcesses', () => {
context('.groupCyProcesses', () => {
it('groups correctly', () => {
sinon.stub(browsers, 'getBrowserInstance').returns({ pid: BROWSER_PID })
sinon.stub(plugins, 'getPluginPid').returns(PLUGIN_PID)
// @ts-ignore
const groupedProcesses = _groupCyProcesses({ list: PROCESSES })
const groupedProcesses = groupCyProcesses({ list: PROCESSES })
const checkGroup = (pid, group) => {
expect(_.find(groupedProcesses, { pid }))
@@ -184,7 +184,7 @@ describe('lib/util/process_profiler', function () {
})
// @ts-ignore
const result = _aggregateGroups(_groupCyProcesses({ list: processes }))
const result = _aggregateGroups(groupCyProcesses({ list: processes }))
// main process will have variable pid, replace it w constant for snapshotting
// @ts-ignore

View File

@@ -26,8 +26,12 @@ This can happen for a number of different reasons:
- You wrote an endless loop and you must fix your own code
- You are running Docker (there is an easy fix for this: see link below)
- You are running lots of tests on a memory intense application
- You are running in a memory starved VM environment
- You are running lots of tests on a memory intense application.
- Try enabling experimentalMemoryManagement in your config file.
- Try lowering numTestsKeptInMemory in your config file.
- You are running in a memory starved VM environment.
- Try enabling experimentalMemoryManagement in your config file.
- Try lowering numTestsKeptInMemory in your config file.
- There are problems with your GPU / GPU drivers
- There are browser bugs in Chromium
@@ -131,8 +135,12 @@ This can happen for a number of different reasons:
- You wrote an endless loop and you must fix your own code
- You are running Docker (there is an easy fix for this: see link below)
- You are running lots of tests on a memory intense application
- You are running in a memory starved VM environment
- You are running lots of tests on a memory intense application.
- Try enabling experimentalMemoryManagement in your config file.
- Try lowering numTestsKeptInMemory in your config file.
- You are running in a memory starved VM environment.
- Try enabling experimentalMemoryManagement in your config file.
- Try lowering numTestsKeptInMemory in your config file.
- There are problems with your GPU / GPU drivers
- There are browser bugs in Chromium

View File

@@ -525,6 +525,7 @@
"./node_modules/picomatch/index.js",
"./node_modules/picomatch/lib/constants.js",
"./node_modules/picomatch/lib/picomatch.js",
"./node_modules/pidusage/lib/stats.js",
"./node_modules/pinkie/index.js",
"./node_modules/pngjs/lib/chunkstream.js",
"./node_modules/pngjs/lib/filter-parse-async.js",
@@ -2707,6 +2708,14 @@
"./node_modules/picomatch/lib/parse.js",
"./node_modules/picomatch/lib/scan.js",
"./node_modules/picomatch/lib/utils.js",
"./node_modules/pidusage/index.js",
"./node_modules/pidusage/lib/bin.js",
"./node_modules/pidusage/lib/helpers/cpu.js",
"./node_modules/pidusage/lib/helpers/parallel.js",
"./node_modules/pidusage/lib/history.js",
"./node_modules/pidusage/lib/procfile.js",
"./node_modules/pidusage/lib/ps.js",
"./node_modules/pidusage/lib/wmic.js",
"./node_modules/pify/index.js",
"./node_modules/pinkie-promise/index.js",
"./node_modules/pixelmatch/index.js",
@@ -4281,5 +4290,5 @@
"./tooling/v8-snapshot/cache/darwin/snapshot-entry.js"
],
"deferredHashFile": "yarn.lock",
"deferredHash": "7520727043f0935e864f3d1f8096dac95d86a907f0c8045536e6865ebd7a3b01"
}
"deferredHash": "5a30e2aa1f598a2590be298a95e5dd462a9177e68fbc42fbe7b88dc953d2655e"
}

View File

@@ -6008,6 +6008,13 @@
dependencies:
type-detect "4.0.8"
"@sinonjs/commons@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-2.0.0.tgz#fd4ca5b063554307e8327b4564bd56d3b73924a3"
integrity sha512-uLa0j859mMrg2slwQYdO/AkrOfmH+X6LTVmNTS9CqexuE2IvVORIkSpJLqePAbEnKJ77aMmCwr1NUZ57120Xcg==
dependencies:
type-detect "4.0.8"
"@sinonjs/fake-timers@8.1.0":
version "8.1.0"
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-8.1.0.tgz#3fdc2b6cb58935b21bfb8d1625eb1300484316e7"
@@ -6015,12 +6022,12 @@
dependencies:
"@sinonjs/commons" "^1.7.0"
"@sinonjs/fake-timers@>=5", "@sinonjs/fake-timers@^9.1.2":
version "9.1.2"
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c"
integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw==
"@sinonjs/fake-timers@^10.0.2":
version "10.0.2"
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-10.0.2.tgz#d10549ed1f423d80639c528b6c7f5a1017747d0c"
integrity sha512-SwUDyjWnah1AaNl7kxsa7cfLhlTYoiyhDAIgyh+El30YvXs/o7OLXpYH88Zdhyx9JExKrmHDJ+10bwIcY80Jmw==
dependencies:
"@sinonjs/commons" "^1.7.0"
"@sinonjs/commons" "^2.0.0"
"@sinonjs/fake-timers@^6.0.0", "@sinonjs/fake-timers@^6.0.1":
version "6.0.1"
@@ -6029,6 +6036,13 @@
dependencies:
"@sinonjs/commons" "^1.7.0"
"@sinonjs/fake-timers@^9.1.2":
version "9.1.2"
resolved "https://registry.yarnpkg.com/@sinonjs/fake-timers/-/fake-timers-9.1.2.tgz#4eaab737fab77332ab132d396a3c0d364bd0ea8c"
integrity sha512-BPS4ynJW/o92PUR4wgriz2Ud5gpST5vz6GQfMixEDK0Z8ZCUv2M7SkBLykH56T++Xs+8ln9zTGbOvNGIe02/jw==
dependencies:
"@sinonjs/commons" "^1.7.0"
"@sinonjs/formatio@^2.0.0":
version "2.0.0"
resolved "https://registry.yarnpkg.com/@sinonjs/formatio/-/formatio-2.0.0.tgz#84db7e9eb5531df18a8c5e0bfb6e449e55e654b2"
@@ -23690,12 +23704,12 @@ nise@^4.0.1, nise@^4.1.0:
path-to-regexp "^1.7.0"
nise@^5.1.1:
version "5.1.1"
resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.1.tgz#ac4237e0d785ecfcb83e20f389185975da5c31f3"
integrity sha512-yr5kW2THW1AkxVmCnKEh4nbYkJdB3I7LUkiUgOvEkOp414mc2UMaHMA7pjq1nYowhdoJZGwEKGaQVbxfpWj10A==
version "5.1.4"
resolved "https://registry.yarnpkg.com/nise/-/nise-5.1.4.tgz#491ce7e7307d4ec546f5a659b2efe94a18b4bbc0"
integrity sha512-8+Ib8rRJ4L0o3kfmyVCL7gzrohyDe0cMFTBa2d364yIrEGMEoetznKJx899YxjybU6bL9SQkYPSBBs1gyYs8Xg==
dependencies:
"@sinonjs/commons" "^1.8.3"
"@sinonjs/fake-timers" ">=5"
"@sinonjs/commons" "^2.0.0"
"@sinonjs/fake-timers" "^10.0.2"
"@sinonjs/text-encoding" "^0.7.1"
just-extend "^4.0.2"
path-to-regexp "^1.7.0"
@@ -25603,6 +25617,13 @@ pidtree@^0.3.0:
resolved "https://registry.yarnpkg.com/pidtree/-/pidtree-0.3.1.tgz#ef09ac2cc0533df1f3250ccf2c4d366b0d12114a"
integrity sha512-qQbW94hLHEqCg7nhby4yRC7G2+jYHY4Rguc2bjw7Uug4GIJuu1tvf2uHaZv5Q8zdt+WKJ6qK1FOI6amaWUo5FA==
pidusage@3.0.2:
version "3.0.2"
resolved "https://registry.yarnpkg.com/pidusage/-/pidusage-3.0.2.tgz#6faa5402b2530b3af2cf93d13bcf202889724a53"
integrity sha512-g0VU+y08pKw5M8EZ2rIGiEBaB8wrQMjYGFfW2QVIfyT8V+fq8YFLkvlz4bz5ljvFDJYNFCWT3PWqcRr2FKO81w==
dependencies:
safe-buffer "^5.2.1"
pify@^2.0.0, pify@^2.2.0, pify@^2.3.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
@@ -28681,7 +28702,7 @@ safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1:
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d"
integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g==
safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@~5.2.0:
safe-buffer@5.2.1, safe-buffer@>=5.1.0, safe-buffer@^5.0.1, safe-buffer@^5.1.0, safe-buffer@^5.1.1, safe-buffer@^5.1.2, safe-buffer@^5.2.0, safe-buffer@^5.2.1, safe-buffer@~5.2.0:
version "5.2.1"
resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6"
integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==