feat: introduce v8 snapshots to improve startup performance (#24295)

Co-authored-by: Lachlan Miller <lachlan.miller.1990@outlook.com>
Co-authored-by: Zach Bloomquist <git@chary.us>
Co-authored-by: Tyler Biethman <tbiethman@users.noreply.github.com>
Co-authored-by: Matt Henkes <mjhenkes@gmail.com>
Co-authored-by: Chris Breiding <chrisbreiding@users.noreply.github.com>
Co-authored-by: Matt Schile <mschile@cypress.io>
Co-authored-by: Mark Noonan <mark@cypress.io>
Co-authored-by: Zachary Williams <ZachJW34@gmail.com>
Co-authored-by: Ben M <benm@cypress.io>
Co-authored-by: Zachary Williams <zachjw34@gmail.com>
Co-authored-by: astone123 <adams@cypress.io>
Co-authored-by: Bill Glesias <bglesias@gmail.com>
Co-authored-by: Emily Rohrbough <emilyrohrbough@yahoo.com>
Co-authored-by: Emily Rohrbough <emilyrohrbough@users.noreply.github.com>
Co-authored-by: semantic-release-bot <semantic-release-bot@martynus.net>
Co-authored-by: Adam Stone <adams@cypress.io>
Co-authored-by: Blue F <blue@cypress.io>
Co-authored-by: GitStart <1501599+gitstart@users.noreply.github.com>
Co-authored-by: Mike Plummer <mike-plummer@users.noreply.github.com>
Co-authored-by: Jordan <jordan@jpdesigning.com>
Co-authored-by: Sam Goodger <turbo@tailz.dev>
Co-authored-by: Colum Ferry <cferry09@gmail.com>
Co-authored-by: Stokes Player <stokes@cypress.io>
Co-authored-by: Vilhelm Melkstam <vilhelm.melkstam@gmail.com>
Co-authored-by: amehta265 <65267668+amehta265@users.noreply.github.com>
This commit is contained in:
Ryan Manuel
2022-10-31 20:20:27 -05:00
committed by GitHub
parent 440a08bb2a
commit b0c0eaa508
335 changed files with 37377 additions and 629 deletions

View File

@@ -1,3 +1,3 @@
# Bump this version to force CI to re-create the cache from scratch.
9-13-22
10-31-22

View File

@@ -27,8 +27,7 @@ mainBuildFilters: &mainBuildFilters
branches:
only:
- develop
- fix-ci-deps
- issue-23843_electron_21_upgrade
- 'feature/v8-snapshots'
# usually we don't build Mac app - it takes a long time
# but sometimes we want to really confirm we are doing the right thing
@@ -37,8 +36,7 @@ macWorkflowFilters: &darwin-workflow-filters
when:
or:
- equal: [ develop, << pipeline.git.branch >> ]
- equal: [ 'correct-dashboard-results', << pipeline.git.branch >> ]
- equal: [ 'issue-23843_electron_21_upgrade', << pipeline.git.branch >> ]
- equal: [ 'feature/v8-snapshots', << pipeline.git.branch >> ]
- matches:
pattern: "-release$"
value: << pipeline.git.branch >>
@@ -47,8 +45,7 @@ linuxArm64WorkflowFilters: &linux-arm64-workflow-filters
when:
or:
- equal: [ develop, << pipeline.git.branch >> ]
- equal: [ 'webkit-multidomain', << pipeline.git.branch >> ]
- equal: [ 'issue-23843_electron_21_upgrade', << pipeline.git.branch >> ]
- equal: [ 'feature/v8-snapshots', << pipeline.git.branch >> ]
- matches:
pattern: "-release$"
value: << pipeline.git.branch >>
@@ -66,8 +63,7 @@ windowsWorkflowFilters: &windows-workflow-filters
when:
or:
- equal: [ develop, << pipeline.git.branch >> ]
- equal: [ linux-arm64, << pipeline.git.branch >> ]
- equal: [ 'ryanm/fix/windows-node-module-install', << pipeline.git.branch >> ]
- equal: [ 'feature/v8-snapshots', << pipeline.git.branch >> ]
- matches:
pattern: "-release$"
value: << pipeline.git.branch >>
@@ -114,17 +110,19 @@ executors:
environment:
PLATFORM: windows
darwin-arm64:
darwin-arm64: &darwin-arm64-executor
machine: true
environment:
PLATFORM: darwin
linux-arm64:
linux-arm64: &linux-arm64-executor
machine:
image: ubuntu-2004:2022.04.1
resource_class: arm.medium
environment:
PLATFORM: linux
# TODO: Disabling snapshots for now on Linux Arm 64 architectures. Will revisit with https://github.com/cypress-io/cypress/issues/23557
DISABLE_SNAPSHOT_REQUIRE: 1
commands:
verify_should_persist_artifacts:
@@ -132,7 +130,7 @@ commands:
- run:
name: Check current branch to persist artifacts
command: |
if [[ "$CIRCLE_BRANCH" != "develop" && "$CIRCLE_BRANCH" != "issue-23843_electron_21_upgrade" ]]; then
if [[ "$CIRCLE_BRANCH" != "develop" && "$CIRCLE_BRANCH" != "feature/v8-snapshots" ]]; then
echo "Not uploading artifacts or posting install comment for this branch."
circleci-agent step halt
fi
@@ -199,6 +197,11 @@ commands:
command: |
source ./scripts/ensure-node.sh
yarn build
- run:
name: Generate v8 snapshot
command: |
source ./scripts/ensure-node.sh
yarn build-v8-snapshot-prod
- prepare-modules-cache # So we don't throw these in the workspace cache
- persist_to_workspace:
root: ~/
@@ -478,7 +481,7 @@ commands:
- run:
name: Run driver tests in Cypress
environment:
CYPRESS_KONFIG_ENV: production
CYPRESS_CONFIG_ENV: production
command: |
echo Current working directory is $PWD
echo Total containers $CIRCLE_NODE_TOTAL
@@ -562,7 +565,7 @@ commands:
# internal PR
cmd=$([[ <<parameters.percy>> == 'true' ]] && echo 'yarn percy exec --parallel -- --') || true
DEBUG=<<parameters.debug>> \
CYPRESS_KONFIG_ENV=production \
CYPRESS_CONFIG_ENV=production \
CYPRESS_RECORD_KEY=$MAIN_RECORD_KEY \
PERCY_PARALLEL_NONCE=$CIRCLE_WORKFLOW_WORKSPACE_ID \
PERCY_ENABLE=${PERCY_TOKEN:-0} \
@@ -588,7 +591,7 @@ commands:
cd ../..
DEBUG=<<parameters.debug>> \
CYPRESS_KONFIG_ENV=production \
CYPRESS_CONFIG_ENV=production \
PERCY_PARALLEL_NONCE=$CIRCLE_WORKFLOW_WORKSPACE_ID \
PERCY_ENABLE=${PERCY_TOKEN:-0} \
PERCY_PARALLEL_TOTAL=-1 \
@@ -1063,14 +1066,16 @@ commands:
# notarization on Mac can take a while
no_output_timeout: "45m"
command: |
source ./scripts/ensure-node.sh
node --version
if [[ `node ./scripts/get-platform-key.js` == 'linux-arm64' ]]; then
# these are missing on Circle and there is no way to pre-install them on Arm
sudo apt-get update
sudo apt-get install -y libgtk2.0-0 libgtk-3-0 libgbm-dev libnotify-dev libgconf-2-4 libnss3 libxss1 libasound2 libxtst6 xauth xvfb
DISABLE_SNAPSHOT_REQUIRE=1 yarn binary-build --version $(node ./scripts/get-next-version.js)
else
yarn binary-build --version $(node ./scripts/get-next-version.js)
fi
source ./scripts/ensure-node.sh
node --version
yarn binary-build --version $(node ./scripts/get-next-version.js)
- run:
name: Zip the binary
command: |
@@ -1207,7 +1212,7 @@ jobs:
<<: *defaultsParameters
resource_class:
type: string
default: medium+
default: large
resource_class: << parameters.resource_class >>
steps:
- restore_cached_workspace
@@ -1316,6 +1321,52 @@ jobs:
PERCY_PARALLEL_TOTAL=-1 \
yarn percy snapshot ./cli/visual-snapshots
v8-integration-tests:
<<: *defaults
parameters:
<<: *defaultsParameters
resource_class:
type: string
default: medium
resource_class: << parameters.resource_class >>
parallelism: 1
steps:
- restore_cached_workspace
- restore_cached_system_tests_deps
# TODO: Remove this once we switch off self-hosted M1 runners
- when:
condition:
equal: [ *darwin-arm64-executor, << parameters.executor >> ]
steps:
- run: rm -f /tmp/cypress/junit/*
- unless:
condition:
or:
- equal: [ *linux-arm64-executor, << parameters.executor >> ] # TODO: Figure out how to support linux-arm64 when we get to linux arm64 build: https://github.com/cypress-io/cypress/issues/23557
steps:
- run:
name: Run v8 integration tests
command: |
source ./scripts/ensure-node.sh
yarn test-integration --scope "'@tooling/{packherd,v8-snapshot,electron-mksnapshot}'"
- verify-mocha-results:
expectedResultCount: 3
- when:
condition:
or:
- equal: [ *linux-arm64-executor, << parameters.executor >> ]
steps:
- run:
name: Run v8 integration tests
command: |
source ./scripts/ensure-node.sh
yarn test-integration --scope "'@tooling/packherd'"
- verify-mocha-results:
expectedResultCount: 1
- store_test_results:
path: /tmp/cypress
- store-npm-logs
unit-tests:
<<: *defaults
parameters:
@@ -1346,7 +1397,7 @@ jobs:
# run type checking for each individual package
- run: yarn lerna run types
- verify-mocha-results:
expectedResultCount: 10
expectedResultCount: 18
- store_test_results:
path: /tmp/cypress
# CLI tests generate HTML files with sample CLI command output
@@ -1677,7 +1728,7 @@ jobs:
working_directory: packages/reporter
- run:
command: |
CYPRESS_KONFIG_ENV=production \
CYPRESS_CONFIG_ENV=production \
CYPRESS_RECORD_KEY=$MAIN_RECORD_KEY \
PERCY_PARALLEL_NONCE=$CIRCLE_WORKFLOW_WORKSPACE_ID \
PERCY_ENABLE=${PERCY_TOKEN:-0} \
@@ -1700,7 +1751,7 @@ jobs:
- restore_cached_system_tests_deps
- run:
command: |
CYPRESS_KONFIG_ENV=production \
CYPRESS_CONFIG_ENV=production \
CYPRESS_RECORD_KEY=$MAIN_RECORD_KEY \
PERCY_PARALLEL_NONCE=$CIRCLE_WORKFLOW_WORKSPACE_ID \
PERCY_ENABLE=${PERCY_TOKEN:-0} \
@@ -1722,7 +1773,7 @@ jobs:
- restore_cached_system_tests_deps
- run:
command: |
CYPRESS_KONFIG_ENV=production \
CYPRESS_CONFIG_ENV=production \
CYPRESS_RECORD_KEY=$MAIN_RECORD_KEY \
PERCY_PARALLEL_NONCE=$CIRCLE_WORKFLOW_WORKSPACE_ID \
PERCY_ENABLE=${PERCY_TOKEN:-0} \
@@ -1932,7 +1983,7 @@ jobs:
<<: *defaultsParameters
resource_class:
type: string
default: medium+
default: large
resource_class: << parameters.resource_class >>
steps:
- restore_cached_workspace
@@ -1951,6 +2002,7 @@ jobs:
type: string
default: medium+
steps:
- restore_cached_workspace
- clone-repo-and-checkout-branch:
repo: cypress-example-kitchensink
- install-required-node
@@ -1986,6 +2038,7 @@ jobs:
test-kitchensink-against-staging:
<<: *defaults
steps:
- restore_cached_workspace
- clone-repo-and-checkout-branch:
repo: cypress-example-kitchensink
- install-required-node
@@ -2011,6 +2064,7 @@ jobs:
test-against-staging:
<<: *defaults
steps:
- restore_cached_workspace
- clone-repo-and-checkout-branch:
repo: cypress-test-tiny
- run:
@@ -2517,6 +2571,9 @@ linux-x64-workflow: &linux-x64-workflow
- npm-cypress-schematic:
requires:
- build
- v8-integration-tests:
requires:
- system-tests-node-modules-install
# This release definition must be updated with any new jobs
# Any attempts to automate this are welcome
# If CircleCI provided an "after all" hook, then this wouldn't be necessary
@@ -2567,6 +2624,7 @@ linux-x64-workflow: &linux-x64-workflow
- run-reporter-component-tests-chrome
- run-webpack-dev-server-integration-tests
- run-vite-dev-server-integration-tests
- v8-integration-tests
# various testing scenarios, like building full binary
# and testing it on a real project
@@ -2677,6 +2735,13 @@ linux-arm64-workflow: &linux-arm64-workflow
requires:
- linux-arm64-build
- v8-integration-tests:
name: linux-arm64-v8-integration-tests
executor: linux-arm64
resource_class: arm.medium
requires:
- linux-arm64-build
darwin-x64-workflow: &darwin-x64-workflow
jobs:
- node_modules_install:
@@ -2716,6 +2781,13 @@ darwin-x64-workflow: &darwin-x64-workflow
requires:
- darwin-x64-build
- v8-integration-tests:
name: darwin-x64-v8-integration-tests
executor: mac
resource_class: macos.x86.medium.gen2
requires:
- darwin-x64-build
darwin-arm64-workflow: &darwin-arm64-workflow
jobs:
- node_modules_install:
@@ -2742,6 +2814,13 @@ darwin-arm64-workflow: &darwin-arm64-workflow
requires:
- darwin-arm64-build
- v8-integration-tests:
name: darwin-arm64-v8-integration-tests
executor: darwin-arm64
resource_class: cypress-io/latest_m1
requires:
- darwin-arm64-build
windows-workflow: &windows-workflow
jobs:
- node_modules_install:
@@ -2803,6 +2882,13 @@ windows-workflow: &windows-workflow
requires:
- windows-create-build-artifacts
- v8-integration-tests:
name: windows-v8-integration-tests
executor: windows
resource_class: windows.large
requires:
- windows-build
workflows:
linux-x64:
<<: *linux-x64-workflow

View File

@@ -113,3 +113,7 @@ system-tests/projects/react-app-webpack-5-unconfigured/**/*
system-tests/project-fixtures/**
system-tests/projects/**/*/expected-cypress*/**/*
# These are generated files that are not linted
tooling/electron-mksnapshot/bin/**
tooling/v8-snapshot/cache/**

View File

@@ -41,6 +41,7 @@ module.exports = {
'**/scripts/**',
'**/test/**',
'**/system-tests/**',
'tooling/**',
'packages/{app,driver,frontend-shared,launchpad}/cypress/**',
'*.test.ts',
// ignore in packages that don't run in the Cypress process

4
.gitignore vendored
View File

@@ -382,3 +382,7 @@ globbed_node_modules
# Autogenerated files, typically from graphql-code-generator
*.gen.ts
*.gen.json
# Snapshot Binaries
snapshot_blob.bin
v8_context_snapshot.x86_64.bin

10
.vscode/cspell.json vendored
View File

@@ -8,6 +8,7 @@
"composables",
"dedup",
"ERRORED",
"esbuild",
"execa",
"Fetchable",
"Fetchables",
@@ -19,10 +20,13 @@
"intlify",
"Lachlan",
"loggedin",
"mksnapshot",
"msapplication",
"norewrite",
"NOTESTS",
"OVERLIMIT",
"overscan",
"packherd",
"Pinia",
"pnpm",
"pseudoclass",
@@ -30,6 +34,12 @@
"Screenshotting",
"semibold",
"shiki",
"snapbuild",
"snapgen",
"snapshottable",
"snapshotted",
"snapshotting",
"sourcemaps",
"speclist",
"testid",
"TIMEDOUT",

View File

@@ -128,18 +128,19 @@ Cypress is a large open source project. When you want to contribute to Cypress,
Cypress uses a monorepo, which means there are many independent packages in this repository. There are two main types of packages: private and public.
Private packages generally live within the [`packages`](./packages) directory and are in the `@packages/` namespace. These packages are combined to form the main Cypress app that you get when you `npm install cypress`. They are discrete modules with different responsibilities, but each is necessary for the Cypress app and is not necessarily useful outside of the Cypress app. Since these modules are all compiled and bundled into a binary upon release, they are sometimes collectively referred to as the Cypress binary.
Private packages included in the app generally live within the [`packages`](./packages) directory and are in the `@packages/` namespace. These packages are combined to form the main Cypress app that you get when you `npm install cypress`. They are discrete modules with different responsibilities, but each is necessary for the Cypress app and is not necessarily useful outside of the Cypress app. Since these modules are all compiled and bundled into a binary upon release, they are sometimes collectively referred to as the Cypress binary.
Here is a list of the core packages in this repository with a short description, located within the [`packages`](./packages) directory:
| Folder Name | Package Name | Purpose |
| :------------------------------------ | :---------------------- | :--------------------------------------------------------------------------- |
| [cli](./cli) | `cypress` | The command-line tool that is packaged as an `npm` module. |
| [app](./packages/app) | `@packages/app` | The the front-end for the Cypress App that renders in the launched browser instance. |
| [app](./packages/app) | `@packages/app` | The front-end for the Cypress App that renders in the launched browser instance. |
| [config](./packages/config) | `@packages/config` | The Cypress configuration types and validation used in the server, data-context and driver. |
| [data-context](./packages/data-context) | `@packages/data-context` | Centralized data access for the Cypress application. |
| [driver](./packages/driver) | `@packages/driver` | The code that is used to drive the behavior of the API commands. |
| [electron](./packages/electron) | `@packages/electron` | The Cypress implementation of Electron. |
| [errors](./packages/errors) | `@packages/errors` | Error definitions and utilities for Cypress |
| [example](./packages/example) | `@packages/example` | Our example kitchen-sink application. |
| [extension](./packages/extension) | `@packages/extension` | The Cypress Chrome browser extension |
| [frontend-shared](./packages/frontend-shared) | `@packages/frontend-shared` | Shared components and styles used in the `app` and `launchpad`. |
@@ -150,6 +151,7 @@ Here is a list of the core packages in this repository with a short description,
| [launchpad](./packages/launchpad) | `@packages/launcher` | The portal to running Cypress that displays in `open` mode. |
| [net-stubbing](./packages/net-stubbing) | `@packages/net-stubbing` | Contains server side code for Cypress' network stubbing features. |
| [network](./packages/network) | `@packages/network` | Various utilities related to networking. |
| [packherd-require](./packages/packherd-require) | `@packages/packherd-require` | Loads modules that have been bundled by `@tooling/packherd`. |
| [proxy](./packages/proxy) | `@packages/proxy` | Code for Cypress' network proxy layer. |
| [reporter](./packages/reporter) | `@packages/reporter` | The reporter shows the running results of the tests (The Command Log UI). |
| [resolve-dist](./packages/resolve-dist) | `@packages/resolve-dist` | Centralizes the resolution of paths to compiled/static assets from server-side code.. |
@@ -161,8 +163,19 @@ Here is a list of the core packages in this repository with a short description,
| [socket](./packages/socket) | `@packages/socket` | A wrapper around socket.io to provide common libraries. |
| [ts](./packages/ts) | `@packages/ts` | A centralized version of typescript. |
| [types](./packages/types) | `@packages/types` | The shared internal Cypress types. |
| [v8-snapshot-require](./packages/v8-snapshot-require) | `@packages/v8-snapshot-requie` | Tool to load a snapshot for Electron applications that was created by `@tooling/v8-snapshot`. |
| [web-config](./packages/web-config) | `@packages/ui-components` | The web-related configuration. |
Private packages involved in development of the app live within the [`tooling`](./tooling) directory and are in the `@tooling/` namespace. They are discrete modules with different responsibilities, but each is necessary for development of the Cypress app and is not necessarily useful outside of the Cypress app.
Here is a list of the packages in this repository with a short description, located within the [`tooling`](./tooling) directory:
| Folder Name | Package Name | Purpose |
| :------------------------------------ | :---------------------- | :--------------------------------------------------------------------------- |
| [electron-mksnapshot](./electron-mksnapshot) | `electron-mksnapshot` | A rewrite of [electron/mksnapshot](https://github.com/electron/mksnapshot) to support multiple versions. |
| [packherd](./tooling/packherd) | `packherd` | Herds all dependencies reachable from an entry and packs them. |
| [v8-snapshot](./tooling/v8-snapshot) | `v8-snapshot` | Tool to create a snapshot for Electron applications. |
Public packages live within the [`npm`](./npm) folder and are standalone modules that get independently published to npm under the `@cypress/` namespace. These packages generally contain extensions, plugins, or other packages that are complementary to, yet independent of, the main Cypress app.
Here is a list of the npm packages in this repository:
@@ -396,6 +409,67 @@ Each package documents how to best work with it, so consult the `README.md` of e
They will outline development and test procedures. When in doubt just look at the `scripts` of each `package.json` file. Everything we do at Cypress is contained there.
### V8 Snapshotting
In order to improve start up time, Cypress uses [electron mksnapshot](https://github.com/electron/mksnapshot) for generating [v8 snapshots](https://v8.dev/blog/custom-startup-snapshots) for both development and production.
#### Snapshot Generation
Locally, a v8 snapshot is generated in a post install step and set up to only include node modules. In this way, cypress code can be modified without having to regenerate a snapshot. If you do want or need to regenerate the snapshot for development you can run:
```
yarn build-v8-snapshot-dev
```
On CI and for binary builds we run:
```
yarn build-v8-snapshot-prod
```
which will include both node modules and cypress code.
During the process of snapshot generation, metadata is created/updated in `tooling/v8-snapshot/cache`. Changes to these files can and should be committed to the repo as it will make subsequent snapshot generations faster.
#### Troubleshooting
**Generation**
If you run into errors while generating the v8 snapshot, you can occasionally identify the problem dependency via the output. You can try to remove that dependency from the cache and see if regenerating succeeds. If it does, likely it was moved to a more restrictive section (e.g. healthy to deferred/no-rewrite or deferred to norewrite). If all else fails, you can try running the following (but keep in mind this may take a while):
```
V8_SNAPSHOT_FROM_SCRATCH=1 yarn build-v8-snapshot-dev
```
or
```
V8_SNAPSHOT_FROM_SCRATCH=1 yarn build-v8-snapshot-prod
```
**Runtime**
If you're experiencing issues during runtime, you can try and narrow down where the problem might be via a few different scenarios:
* If the problem occurs with the binary, but not in the monorepo, chances are something is being removed during the binary cleanup step that shouldn't be
* If the problem occurs with running `yarn build-v8-snapshot-prod` but not `yarn build-v8-snapshot-dev`, then that means there's a problem with a cypress file and not a node module dependency. Chances are that a file is not being flagged properly (e.g. healthy when it should be deferred or norewrite).
* If the problem occurs with both `yarn build-v8-snapshot-prod` and `yarn build-v8-snapshot-dev` but does not occur when using the `DISABLE_SNAPSHOT_REQUIRE` environment variable, then that means there's a problem with a node module dependency. Chances are that a file is not being flagged properly (e.g. healthy when it should be deferred or norewrite).
* If the problem still occurs when using the `DISABLE_SNAPSHOT_REQUIRE` environment variable, then that means the problem is not snapshot related.
**Build Length**
If the `build-v8-snapshot-prod` command is taking a long time to run on Circle CI, the snapshot cache probably needs to be updated. Run these commands on a windows, linux, and mac and commit the updates to the snapshot cache to git:
```
yarn build-v8-snapshot-dev
```
or
```
yarn build-v8-snapshot-prod
```
## Committing Code
### Branches

View File

@@ -134,7 +134,7 @@ function isValidCypressInternalEnvValue (value) {
return true
}
// names of config environments, see "packages/server/config/app.yml"
// names of config environments, see "packages/server/config/app.json"
const names = ['development', 'test', 'staging', 'production']
return _.includes(names, value)
@@ -257,7 +257,7 @@ const getApplicationDataFolder = (...paths) => {
const { env } = process
// allow overriding the app_data folder
let folder = env.CYPRESS_KONFIG_ENV || env.CYPRESS_INTERNAL_ENV || 'development'
let folder = env.CYPRESS_CONFIG_ENV || env.CYPRESS_INTERNAL_ENV || 'development'
const PRODUCT_NAME = pkg.productName || pkg.name
const OS_DATA_PATH = ospath.data()

View File

@@ -10,6 +10,7 @@
"**/build/**",
"**/dist/**",
"**/dist-test/**",
"**/.cy/**"
"**/.cy/**",
"**/v8-snapshot/cache/**"
]
}

View File

@@ -4,6 +4,7 @@
"cli",
"packages/*",
"npm/*",
"tooling/*",
"system-tests"
],
"useWorkspaces": true,

View File

@@ -9,7 +9,7 @@ const scaffoldAngularProject = async (project: string) => {
Fixtures.removeProject(project)
await Fixtures.scaffoldProject(project)
await FixturesScaffold.scaffoldProjectNodeModules(project)
await FixturesScaffold.scaffoldProjectNodeModules({ project })
await fs.remove(path.join(projectPath, 'cypress.config.ts'))
await fs.remove(path.join(projectPath, 'cypress'))

View File

@@ -9,7 +9,7 @@ const scaffoldAngularProject = async (project: string) => {
Fixtures.removeProject(project)
await Fixtures.scaffoldProject(project)
await FixturesScaffold.scaffoldProjectNodeModules(project)
await FixturesScaffold.scaffoldProjectNodeModules({ project })
await fs.remove(path.join(projectPath, 'cypress.config.ts'))
await fs.remove(path.join(projectPath, 'cypress'))

View File

@@ -6,7 +6,7 @@ export async function scaffoldSystemTestProject (project: ProjectFixtureDir) {
await Fixtures.scaffoldProject(project)
await FixturesScaffold.scaffoldProjectNodeModules(project)
await FixturesScaffold.scaffoldProjectNodeModules({ project })
return Fixtures.projectPath(project)
}

View File

@@ -44,7 +44,7 @@ async function sourceModulesForProject (fixture: ProjectDirs[number]) {
Fixtures.remove()
const projectRoot = await Fixtures.scaffoldProject(fixture)
await FixturesScaffold.scaffoldProjectNodeModules(fixture)
await FixturesScaffold.scaffoldProjectNodeModules({ project: fixture })
const result = sourceDefaultWebpackDependencies({
cypressConfig: {

View File

@@ -6,7 +6,7 @@ export async function scaffoldMigrationProject (project: ProjectFixtureDir): Pro
await Fixtures.scaffoldProject(project)
await FixturesScaffold.scaffoldProjectNodeModules(project)
await FixturesScaffold.scaffoldProjectNodeModules({ project })
return Fixtures.projectPath(project)
}

View File

@@ -15,6 +15,8 @@
"build": "yarn build-npm-modules && lerna run build --stream --no-bail --ignore create-cypress-tests --ignore cypress --ignore \"'@packages/{runner}'\" --ignore \"'@cypress/{angular,react,react18,vue,vue2,mount-utils,svelte}'\" && node ./cli/scripts/post-build.js && lerna run build --stream --scope create-cypress-tests",
"build-npm-modules": "lerna run build --scope cypress --scope @cypress/mount-utils && lerna run build --scope \"'@cypress/{angular,react,react18,vue,vue2,svelte}'\"",
"build-prod": "lerna run build-prod-ui --stream && lerna run build-prod --stream --ignore create-cypress-tests && node ./cli/scripts/post-build.js && lerna run build-prod --stream --scope create-cypress-tests --scope",
"build-v8-snapshot-dev": "node --max-old-space-size=8192 tooling/v8-snapshot/scripts/setup-v8-snapshot-in-cypress.js --env=dev",
"build-v8-snapshot-prod": "node --max-old-space-size=8192 tooling/v8-snapshot/scripts/setup-v8-snapshot-in-cypress.js",
"check-node-version": "node scripts/check-node-version.js",
"check-terminal": "node scripts/check-terminal.js",
"clean": "lerna run clean --parallel --no-bail || echo 'ok, errors while cleaning'",
@@ -49,7 +51,7 @@
"stop-only": "npx stop-only --skip .cy,.publish,.projects,node_modules,dist,dist-test,fixtures,lib,bower_components,src,__snapshots__ --exclude cypress-tests.ts,*only.cy.js",
"stop-only-all": "yarn stop-only --folder packages",
"pretest": "yarn ensure-deps",
"test": "yarn lerna exec yarn test --scope cypress --scope \"'@packages/{config,errors,data-context,electron,extension,https-proxy,launcher,net-stubbing,network,proxy,rewriter,scaffold-config,socket}'\"",
"test": "yarn lerna exec yarn test --scope cypress --scope \"'@packages/{config,data-context,electron,errors,extension,https-proxy,launcher,net-stubbing,network,packherd-require,proxy,rewriter,scaffold-config,socket,v8-snapshot-require}'\" --scope \"'@tooling/{electron-mksnapshot,v8-snapshot}'\"",
"test-debug": "lerna exec yarn test-debug --ignore \"'@packages/{driver,root,static,web-config}'\"",
"pretest-e2e": "yarn ensure-deps",
"test-integration": "lerna exec yarn test-integration --ignore \"'@packages/{driver,root,static,web-config}'\"",
@@ -75,6 +77,7 @@
"@cypress/questions-remain": "1.0.1",
"@cypress/request": "2.88.10",
"@cypress/request-promise": "4.2.6",
"@electron/fuses": "1.6.0",
"@fellow/eslint-plugin-coffee": "0.4.13",
"@graphql-codegen/add": "3.1.0",
"@graphql-codegen/cli": "2.2.0",
@@ -177,7 +180,6 @@
"js-codemod": "cpojer/js-codemod",
"jscodemods": "https://github.com/cypress-io/jscodemods.git#01b546e",
"jscodeshift": "0.7.0",
"konfig": "0.2.1",
"lazy-ass": "1.6.0",
"lerna": "3.20.2",
"lint-staged": "11.1.2",
@@ -251,6 +253,7 @@
"cli",
"packages/*",
"npm/*",
"tooling/*",
"system-tests"
],
"nohoist": [

View File

@@ -13,7 +13,7 @@
"test": "yarn test-unit",
"test:clean": "find ./test/__fixtures__ -depth -name 'output.*' -type f -exec rm {} \\;",
"test-debug": "yarn test-unit --inspect-brk=5566",
"test-unit": "mocha --configFile=../../mocha-reporter-config.json -r @packages/ts/register 'test/**/*.spec.ts' --exit --timeout 5000"
"test-unit": "mocha --reporter mocha-multi-reporters --reporter-options configFile=../../mocha-reporter-config.json -r @packages/ts/register 'test/**/*.spec.ts' --exit --timeout 5000"
},
"dependencies": {
"@babel/core": "^7",

View File

@@ -507,7 +507,7 @@ export function mergeDefaults (
}
function isValidCypressInternalEnvValue (value: string) {
// names of config environments, see "config/app.yml"
// names of config environments, see "config/app.json"
const names = ['development', 'test', 'staging', 'production']
return _.includes(names, value)

View File

@@ -11,7 +11,7 @@
"tslint": "tslint --config ../ts/tslint.json --project .",
"clean": "rimraf --glob \"./{src,test}/**/*.js\"",
"test": "yarn test-unit",
"test-unit": "mocha -r @packages/ts/register --config ./test/.mocharc.js"
"test-unit": "mocha -r @packages/ts/register --config ./test/.mocharc.js --reporter mocha-multi-reporters --reporter-options configFile=../../mocha-reporter-config.json"
},
"dependencies": {
"@babel/code-frame": "7.8.3",
@@ -83,4 +83,4 @@
"src"
],
"types": "src/index.ts"
}
}

View File

@@ -11,7 +11,7 @@ export default defineConfig({
'*.idp.com': '127.0.0.1',
'localalias': '127.0.0.1',
},
reporter: 'cypress-multi-reporters',
reporter: '../../node_modules/cypress-multi-reporters/index.js',
reporterOptions: {
configFile: '../../mocha-reporter-config.json',
},

View File

@@ -2,11 +2,12 @@
const _ = require('lodash')
const os = require('os')
const path = require('path')
const pkg = require('../../../package.json')
const paths = require('./paths')
const log = require('debug')('cypress:electron')
const fs = require('fs-extra')
const crypto = require('crypto')
const { flipFuses, FuseVersion, FuseV1Options } = require('@electron/fuses')
const pkg = require('@packages/root')
let electronVersion
@@ -101,13 +102,16 @@ module.exports = {
log('package icon', iconPath)
const platform = os.platform()
const arch = os.arch()
_.defaults(options, {
dist: paths.getPathToDist(),
dir: 'app',
out: 'tmp',
name: 'Cypress',
platform: os.platform(),
arch: os.arch(),
platform,
arch,
asar: false,
prune: true,
overwrite: true,
@@ -128,6 +132,16 @@ module.exports = {
console.log('to', options.dist)
return this.move(appPath, options.dist)
})
.then(() => {
return !['1', 'true'].includes(process.env.DISABLE_SNAPSHOT_REQUIRE) ? flipFuses(
paths.getPathToExec(),
{
version: FuseVersion.V1,
resetAdHocDarwinSignature: platform === 'darwin' && arch === 'arm64',
[FuseV1Options.LoadBrowserProcessSpecificV8Snapshot]: true,
},
) : Promise.resolve()
}).catch((err) => {
console.log(err.stack)

View File

@@ -11,7 +11,7 @@
"start": "./bin/cypress-electron",
"test": "yarn test-unit",
"test-debug": "yarn test-unit --inspect-brk=5566",
"test-unit": "mocha",
"test-unit": "mocha --reporter mocha-multi-reporters --reporter-options configFile=../../mocha-reporter-config.json",
"test-watch": "yarn test-unit --watch"
},
"dependencies": {

View File

@@ -127,7 +127,7 @@ async function makeE2ETasks () {
await scaffoldCommonNodeModules()
try {
await scaffoldProjectNodeModules(projectName)
await scaffoldProjectNodeModules({ project: projectName })
} catch (e) {
if (isRetry) {
throw e

View File

@@ -8,8 +8,11 @@ import { mutationErrorPlugin, nexusDebugLogPlugin, nexusSlowGuardPlugin, nexusDe
const isCodegen = Boolean(process.env.CYPRESS_INTERNAL_NEXUS_CODEGEN)
// TODO: fix this with an update to esbuild: https://github.com/cypress-io/cypress/issues/23126
const types = Object.assign({}, schemaTypes, { default: undefined })
export const graphqlSchema = makeSchema({
types: schemaTypes,
types,
shouldGenerateArtifacts: isCodegen,
shouldExitAfterGenerateArtifacts: isCodegen,
outputs: {

View File

@@ -1,5 +1,6 @@
import { idArg, stringArg, nonNull, objectType } from 'nexus'
import { ProjectLike, ScaffoldedFile } from '..'
import { ProjectLike } from '../interfaceTypes/gql-ProjectLike'
import { ScaffoldedFile } from './gql-ScaffoldedFile'
import { CurrentProject } from './gql-CurrentProject'
import { DevState } from './gql-DevState'
import { AuthState } from './gql-AuthState'

View File

@@ -139,9 +139,9 @@ function getWindowsBrowser (browser: Browser): Promise<FoundBrowser> {
return tryNextExePath()
}
// Use exports.getVersionString here, rather than our local reference
// Use module.exports.getVersionString here, rather than our local reference
// to that variable so that the tests can easily mock it
return exports.getVersionString(path).then((version) => {
return module.exports.getVersionString(path).then((version) => {
debug('got version string for %s: %o', browser.name, { exePath, version })
return {

View File

@@ -6,7 +6,7 @@
"scripts": {
"build-prod": "tsc --project .",
"clean-deps": "rimraf node_modules",
"test": "mocha -r @packages/ts/register --reporter mocha-multi-reporters --reporter-options configFile=../../mocha-reporter-config.json --exit test/unit/*"
"test": "CYPRESS_INTERNAL_ENV=test mocha -r @packages/ts/register --reporter mocha-multi-reporters --reporter-options configFile=../../mocha-reporter-config.json --exit test/unit/*"
},
"dependencies": {
"@types/mime-types": "2.1.0",

View File

@@ -0,0 +1,125 @@
## @packages/packherd-require
Loads modules that have been bundled by `@tooling/packherd`.
**Table of Contents**
- [Summary](#summary)
- [Loading Bundled/Snapshotted Modules with Packherd](#loading-bundledsnapshotted-modules-with-packherd)
- [Transpiling TypeScript Modules on Demand](#transpiling-typescript-modules-on-demand)
- [Transpile Cache](#transpile-cache)
- [Sourcemap Support](#sourcemap-support)
- [Implementation](#implementation)
- [Import Caveats](#import-caveats)
- [Env Vars](#env-vars)
## Summary
packherd has three main tasks:
1. bundling application files and providing related metadata
2. loading modules that have been bundled previously and are provided via fully instantiated
module exports or definition functions that return a module export when invoked
3. transpiling TypeScript modules on demand and maintaining a cache of them
`1.` is provided by `@tooling/packherd`. `2.` and `3.` are provided by this package. While `1.` and `2.`
are very related and work hand in hand, `3.` is unrelated to them and was
just added here since it is another feature required to intercept module loads.
## Loading Bundled/Snapshotted Modules with Packherd
In order to hook into the `require` process and load from a different source instead of the
file system the [packherdRequire][require fn] function needs to be invoked with the desired
configuration. Note that both this hook and the _transpile TypeScript on demand_ feature can
function together without any problem.
The [require opts][require opts] that are passed to this function allow to configure how
packherd resolves and loads the modules that are included via one of the following:
- `moduleExports`: map of fully instantiated module exports that have been obtained either by
`require` ing each module previously or by having them snapshotted into the application
- `moduleDefinitions`: similar to `moduleExports` except that these are functions that need to
be invoked in order to obtain the `module.exports`, thus incurring some overhead
Since packherd cannot know how the modules are keyed inside the maps, you should pass a `getModuleKey`
function of [this type][GetModuleKey] in order to resolve those keys.
For example in the case of [v8-snapshot][v8-snapshot] (TODO: Update this link when snapshot module is added) the [getModuleKey
implementation][v8-snapshot module key] (TODO: Update this link when snapshot module is added) implementation relies on a resolver map that is
embedded inside the app's snapshot. Additionally it knows how modules are keyed via the
[modified esbuild][cypress esbuild] bundler it uses.
Once the module key has been resolved (or even if not) packherd tries its best to resolve
and/or load the module from the most efficient source. It attempts to avoid accessing the file
system until no more options remain and only loads it via the Node.js resolution/loader
mechanism when all else failed.
For more details on the module resolve/load steps refer to [PackherdModuleLoader][packherd
module loader], in particular [`tryLoad`][try load] and [`tryResolve`][try resolve] including
the relevant code sections which include detailed comments for each step.
## Transpiling TypeScript Modules on Demand
To enable this feature the [packherdRequire][require fn] has to be invoked in order to
have it hook into Node.js `require` calls via a `Module._extension`. Particularly the
[`transpileOpts`][transpile opts] field of the [opts][require opts] needs to be configured as follows.
- `supportTS`: `true`
- `initTranspileCache`: needs to be a function matching [InitTranspileCache][init transpile cache fn]
### Transpile Cache
We recommend to use the [dirt-simple-file-cache][dirt-simple-file-cache] module to provide the
transpile cache as it has been developed alongside packherd for just this purpose.
Here is an example of how that option field could be setup with this module.
```js
const dirtSimpleFileCache = require('dirt-simple-file-cache')
const initTranspileCache = () =>
DirtSimpleFileCache.initSync(projectBaseDir, { keepInMemoryCache: true })
```
### Sourcemap Support
In order to show original locations for errors logged to the console, packherd hooks into the
generation of error stack traces and maps locations to TypeScript.
For more information please read the [sourcemap docs][sourcemap docs]
### Implementation
Please find more implementation details regarding transpilation inside
[./src/transpile-ts.ts][transpile-ts].
### Import Caveats
Since esbuild enforces the behaviour of imports being static this caused problems
with tests that relied on being able to patch/`sinon.stub` modules even after they were
imported.
In general we would recommend doing this _properly_ via a tool like
[proxyquire][proxyquire].
## Env Vars
- `PACKHERD_CODE_FRAMES` if set will include code snippets for error messages that have been
sourcemapped
[require fn]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/require.ts#L71
[require opts]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/require.ts#L23-L32
[transpile opts]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/types.ts#L187-L195
[init transpile cache fn]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/types.ts#L177-L185
[transpile-ts]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/transpile-ts.ts
[GetModuleKey]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/loader.ts#L35-L45
[packherd module loader]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/loader.ts#L226
[try load]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/loader.ts#L536
[try resolve]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/loader.ts#L458
[sourcemap docs]:https://github.com/cypress-io/cypress/blob/develop/packages/packherd/src/sourcemap-support.ts
[v8-snapshot]:https://github.com/thlorenz/v8-snapshot
[v8-snapshot module key]:https://github.com/thlorenz/v8-snapshot/blob/master/src/loading/snapshot-require.ts#L20
[proxyquire]:https://github.com/thlorenz/proxyquire
[dirt-simple-file-cache]:https://github.com/thlorenz/dirt-simple-file-cache
[cypress esbuild]:https://github.com/cypress-io/esbuild/tree/thlorenz/snap

View File

@@ -0,0 +1,33 @@
{
"name": "@packages/packherd-require",
"version": "0.0.0-development",
"description": "Loads modules that have been bundled by `@tooling/packherd`.",
"private": true,
"main": "dist/require.js",
"scripts": {
"build": "tsc",
"build-prod": "yarn build",
"check-ts": "tsc --noEmit && yarn -s tslint",
"clean-deps": "rimraf node_modules",
"clean": "rimraf dist",
"test": "yarn test-unit",
"test-unit": "mocha --config ./test/.mocharc.js",
"tslint": "tslint --config ../ts/tslint.json --project .",
"watch": "tsc --watch"
},
"dependencies": {
"convert-source-map": "^1.7.0",
"debug": "^4.3.1",
"source-map-js": "^0.6.2"
},
"devDependencies": {
"@packages/ts": "0.0.0-development",
"esbuild": "^0.15.3",
"mocha": "7.0.1"
},
"files": [
"dist",
"src/require.ts"
],
"types": "src/require.ts"
}

View File

@@ -0,0 +1,27 @@
import type { TranspileCache } from './types'
/**
* In memory transpile cache that is used if none was provided to `packherd:require`.
*/
export class DefaultTranspileCache implements TranspileCache {
private readonly _cache: Map<string, string> = new Map()
get (fullPath: string): string | undefined {
// In memory cache only so we don't expect anything to be stale
return this._cache.get(fullPath)
}
addAsync (origFullPath: string, convertedContent: string): Promise<void> {
this.add(origFullPath, convertedContent)
return Promise.resolve()
}
add (origFullPath: string, convertedContent: string): void {
this._cache.set(origFullPath, convertedContent)
}
clearSync (): void {
this._cache.clear()
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,224 @@
import debug from 'debug'
import { DefaultTranspileCache } from './default-transpile-cache'
import {
GetModuleKeyOpts,
ModuleLoaderOpts,
PackherdModuleLoader,
} from './loader'
import type {
ModuleNeedsReload,
PackherdTranspileOpts,
} from './types'
import path from 'path'
const logInfo = debug('cypress-verbose:packherd:info')
const logDebug = debug('cypress-verbose:packherd:debug')
const logTrace = debug('cypress-verbose:packherd:trace')
const logError = debug('cypress:packherd:error')
export * from './loader'
export * from './types'
/**
* Configures how packherd require works.
*
* @property requireStatsFile: specifies where to write benchmarking stats if diagnose is active
* @property transpileOpts: configures if/how TypeScript files are transpiled
* @property sourceMapLookup: if provided it will be used to find sourcemaps by module URI
* @property moduleNeedsReload: allows to override how packherd determines if a
* module needs to be reloaded even if found in a cache
* @category Loader
*/
export type PackherdRequireOpts = ModuleLoaderOpts & {
requireStatsFile?: string
transpileOpts?: Partial<PackherdTranspileOpts>
moduleNeedsReload?: ModuleNeedsReload
}
const DEFAULT_TRANSPILE_OPTS = {
supportTS: false,
}
/**
* Patches Node.js require chain in order to load modules from different sources
* and/or transpile TypeScript modules on the fly.
*
* Hooks into `Module_.load` if either {@link ModuleLoaderOpts} `moduleExports`
* or `moduleDefinitions` or both are provided.
* It will then try to load modules from either of those two before falling
* back to the default Node.js behavior and loading them from the file system.
*
* Optionally hooks into `Module._extension` in order to transpile TypeScript files as
* they are required/imported.
*
* @returns a variety of functions which allow to communicate with the loader:
*
* - resolve: function to resolve a module from it's URI
* - shouldBypassCache: returns `true` if a cache, i.e. exports embedded in the
* snapshot cannot by used
* - registerModuleLoad: allows registering modules being loaded even if that
* occurs from inside a snapshot
* - registerModuleLoad: needs to be called to track loaded modules which is
* necessary to determine if cache should be bypassed or not
*
* These are used by [v8-snapshot](https://github.com/thlorenz/v8-snapshot)
* from the `require` embedded in its snapshot, see [custom-require](https://github.com/thlorenz/v8-snapshot/blob/master/src/blueprint/custom-require.js).
*
* @category Loader
*/
export function packherdRequire (
projectBaseDir: string,
opts: PackherdRequireOpts,
) {
const Module = require('module')
const { supportTS, initTranspileCache, tsconfig } = Object.assign(
{},
DEFAULT_TRANSPILE_OPTS,
opts.transpileOpts,
)
const diagnosticsEnabled = opts.diagnosticsEnabled ?? false
const cache =
initTranspileCache == null
? new DefaultTranspileCache()
: initTranspileCache(projectBaseDir, {
// even though we pass `cacheDir` here other end may store the cache wherever it wants
cacheDir: '/tmp/packherd-cache',
}) ?? new DefaultTranspileCache()
if (supportTS) {
logInfo('Enabling TS support')
logDebug({ supportTS, initTranspileCache, tsconfig })
const { hookTranspileTs } = require('./transpile-ts')
hookTranspileTs(
Module,
projectBaseDir,
logInfo,
diagnosticsEnabled,
cache,
tsconfig,
)
}
const exportKeysLen =
opts.moduleExports != null ? Object.keys(opts.moduleExports).length : 0
const definitionKeysLen =
opts.moduleDefinitions != null
? Object.keys(opts.moduleDefinitions).length
: 0
logInfo(
'packherd defining %d exports and %d definitions!',
exportKeysLen,
definitionKeysLen,
)
logInfo({ projectBaseDir })
// Even though packherd is designed to support loading from these caches we
// also support using it for on the fly TypeScript transpilation only.
// In that case the necessary extensions hook was applied above and no
// further work is needed.
if (exportKeysLen === 0 && definitionKeysLen === 0) {
logInfo(
'No moduleExports nor moduleDefinitions provided, not hooking Module._load',
)
return { resolve: require.resolve.bind(require) }
}
const origLoad = Module._load
const moduleLoader = new PackherdModuleLoader(
Module,
origLoad,
projectBaseDir,
opts,
)
//
// Module._load override
//
Module._load = function (
moduleUri: string,
parent: typeof Module,
isMain: boolean,
) {
logTrace('Module._load "%s"', moduleUri)
if (Module.builtinModules.includes(moduleUri)) {
return origLoad(moduleUri, parent, isMain)
}
try {
const { resolved, origin, exports, fullPath } = moduleLoader.tryLoad(
moduleUri,
parent,
isMain,
)
const moduleRelativePath = path.relative(projectBaseDir, fullPath)
switch (resolved) {
case 'module:node':
case 'module-uri:node':
case 'module-fullpath:node':
case 'module-key:node':
case 'cache:node': {
logTrace(
'Resolved "%s" via %s (%s | %s)',
moduleUri,
resolved,
moduleRelativePath,
fullPath,
)
break
}
default:
// No need to do anything
}
switch (origin) {
case 'Module._load': {
logTrace(
'Loaded "%s" via %s resolved as (%s | %s)',
moduleUri,
origin,
moduleRelativePath,
fullPath,
)
break
}
case 'packherd:export':
case 'packherd:definition': {
logTrace('Loaded "%s" via (%s | %s)', moduleUri, origin, resolved)
break
}
default:
// No need to do anything
}
return exports
} catch (err) {
if (diagnosticsEnabled && !moduleUri.endsWith('hook-require')) {
logError(err)
// eslint-disable-next-line no-debugger
debugger
}
throw err
}
}
return {
resolve (uri: string, opts?: GetModuleKeyOpts) {
return moduleLoader.tryResolve(uri, opts).fullPath
},
shouldBypassCache: moduleLoader.shouldBypassCache.bind(moduleLoader),
registerModuleLoad: moduleLoader.registerModuleLoad.bind(moduleLoader),
tryLoad: moduleLoader.tryLoad.bind(moduleLoader),
}
}

View File

@@ -0,0 +1,490 @@
import debug from 'debug'
import path from 'path'
import { MappedPosition, RawSourceMap, SourceMapConsumer } from 'source-map-js'
import type {
MapAndSourceContent,
TranspileCache,
UrlAndMap,
} from './types'
import convertSourceMap from 'convert-source-map'
import { DefaultTranspileCache } from './default-transpile-cache'
const logError = debug('cypress:packherd:error')
const logDebug = debug('cypress-verbose:packherd:debug')
const logTrace = debug('cypress-verbose:packherd:trace')
const INCLUDE_CODE_BEFORE = 2
const INCLUDE_CODE_AFTER = 2
const CODE_FRAME_LINE_GUTTER_WIDTH = 4
const INCLUDE_CODE_FRAMES = process.env.PACKHERD_CODE_FRAMES != null
// -----------------
// types
// -----------------
type StackPosition = {
nextPos?: MappedPosition
curPos?: MappedPosition
}
type FullScriptPath = string
type SourcePosition = {
script: FullScriptPath
line: number
column: number
}
const EMPTY_URL_AND_MAP = { url: null, map: null }
type CallSite = NodeJS.CallSite & {
[index: string]: Function
} & {
codeFrames: string[]
}
type MappedPositionWithCodeFrames = MappedPosition & { codeFrames: string[] }
// -----------------
// Config
// -----------------
// Fix position in Node where some (internal) code is prepended.
// See https://github.com/evanw/node-source-map-support/issues/36
// Header removed in node at ^10.16 || >=11.11.0
// v11 is not an LTS candidate, we can just test the one version with it.
// Test node versions for: 10.16-19, 10.20+, 12-19, 20-99, 100+, or 11.11
const noHeader = /^v(10\.1[6-9]|10\.[2-9][0-9]|10\.[0-9]{3,}|1[2-9]\d*|[2-9]\d|\d{3,}|11\.11)/
const headerLength = noHeader.test(process.version) ? 0 : 62
// -----------------
// Expose uri to map + content mapping
// -----------------
/**
* Retrieves the sourcemap for the provided bundle uri via the sourcemap support instance.
*
* @param projectBaseDir the root of the project for which the bundled code was generated
* @param bundleUri the path of the generated bundle
* @param cache when provided will be used to look for sourcemaps from transpiled modules
* @param sourceMapLookup when provided will be queried to lookup sourcemaps
*
* @category Sourcemap
*/
export function getSourceMap (
projectBaseDir: string,
bundleUri: string,
cache: TranspileCache = new DefaultTranspileCache(),
): UrlAndMap {
const sourcemapSupport = SourcemapSupport.createSingletonInstance(
cache,
projectBaseDir,
)
return sourcemapSupport.retrieveSourceMap(bundleUri)
}
/**
* Retrieves the sourcemap for the provided bundle uri via the sourcemap support instance
* and extracts the source of the specified @see fileUri when found.
*
* @param projectBaseDir the root of the project for which the bundled code was generated
* @param bundleUri the path of the generated bundle
* @param fileUri the path for the original file we want to extract the source content for
* @param cache when provided will be used to look for sourcemaps from transpiled modules
* @param sourceMapLookup when provided will be queried to lookup sourcemaps
*
* @category Sourcemap
*/
export function getSourceMapAndContent (
projectBaseDir: string,
bundleUri: string,
fileUri: string,
cache: TranspileCache = new DefaultTranspileCache(),
): MapAndSourceContent | undefined {
const { map, url } = getSourceMap(
projectBaseDir,
bundleUri,
cache,
)
if (map == null || url == null) return undefined
const sourceContent = map.sourceContentFor(fileUri, true)
return { map, url, sourceContent }
}
// -----------------
// Install
// -----------------
/**
* Creates an instance of @see SourcemapSupport and installs a hook for
* @see Error.prepareStackTrace in order to map stack traces using the source maps
* it discovers.
*
* @param cache used to look up script content from which to extract source maps
* @param projectBaseDir directory that is the root of relative source map sources
*/
export function installSourcemapSupport (
cache: TranspileCache,
projectBaseDir: string,
) {
// NOTE: this is a noop if an instance was created previously
const sourcemapSupport = SourcemapSupport.createSingletonInstance(
cache,
projectBaseDir,
)
if (Error.prepareStackTrace === sourcemapSupport.prepareStackTrace) return
logDebug('Installing sourcemap')
Error.prepareStackTrace = sourcemapSupport.prepareStackTrace
}
// -----------------
// SourcemapSupport
// -----------------
class SourcemapSupport {
private readonly _sourcemapCache: Map<FullScriptPath, UrlAndMap> = new Map()
private constructor (
private readonly _cache: TranspileCache,
private readonly _projectBaseDir: string,
) {}
// This function is part of the V8 stack trace API, for more info see:
// https://v8.dev/docs/stack-trace-api
prepareStackTrace = (err: Error, stack: NodeJS.CallSite[]) => {
const name = err.name ?? 'Error'
const message = err.message ?? ''
const errorString = `${name }: ${ message}`
const state: StackPosition = {}
const processedStack: string[] = []
let includeCodeFrames = INCLUDE_CODE_FRAMES
for (let i = stack.length - 1; i >= 0; i--) {
const c = this.wrapCallSite(
stack[i] as CallSite,
state,
includeCodeFrames,
)
if (includeCodeFrames) {
// Keep trying to include some code until we succeeded once
includeCodeFrames = c.codeFrames.length === 0
}
if (c.codeFrames != null) {
for (const codeFrame of c.codeFrames.reverse()) {
processedStack.push(`\n ${codeFrame}`)
}
}
processedStack.push(`\n at ${ c}`)
state.nextPos = state.curPos
}
state.curPos = state.nextPos = undefined
return `${errorString}${processedStack.reverse().join('')}\n`
}
wrapCallSite (
frame: CallSite,
state: StackPosition,
includeCodeFrames: boolean,
): CallSite {
const script = frame.getFileName()
if (script != null) {
const line = frame.getLineNumber()
let column = frame.getColumnNumber() ?? 0
if (line === 1 && column > headerLength && !frame.isEval()) {
column -= headerLength
}
// Special case which is impossible to map to anything
if (line == null) return frame
const pos = this.mapSourcePosition(
{ script, line, column },
includeCodeFrames,
)
state.curPos = pos
frame = cloneCallSite(frame)
frame.getFileName = function getFileName () {
return pos.source || pos.name || null
}
frame.getLineNumber = function getLineNumber () {
return pos.line
}
frame.getColumnNumber = function getColumnNumber () {
return pos.column + 1
}
frame.getScriptNameOrSourceURL = function getScriptNameOrSourceURL () {
return pos.source || pos.name
}
frame.codeFrames = pos.codeFrames
return frame
}
return frame
}
mapSourcePosition (
pos: SourcePosition,
includeCodeFrames: boolean,
): MappedPositionWithCodeFrames {
const sourceMap = this.retrieveSourceMap(pos.script)
if (typeof sourceMap?.map?.originalPositionFor === 'function') {
const origPos = sourceMap.map.originalPositionFor(pos)
const codeFrames = includeCodeFrames
? extractCodeFrames(sourceMap.map, origPos)
: []
if (origPos.source != null) {
origPos.source = this._ensureFullPath(origPos.source)
return Object.assign(origPos, { codeFrames })
}
}
// return generated position if we couldn't find the original
const { line, column, script } = pos
return {
line,
column,
source: '',
name: script,
codeFrames: [],
}
}
mapFromInlined (script: string): UrlAndMap {
const scriptSource = this._cache.get(script)
if (scriptSource == null) return EMPTY_URL_AND_MAP
try {
const converter = convertSourceMap.fromSource(scriptSource)
if (converter == null) return EMPTY_URL_AND_MAP
const map: RawSourceMap = converter.sourcemap
const urlAndMap = { url: script, map: new SourceMapConsumer(map) }
this._sourcemapCache.set(script, urlAndMap)
return urlAndMap
} catch (err) {
logError('Encountered invalid source map %s', script)
logError(err)
return EMPTY_URL_AND_MAP
}
}
retrieveSourceMap (script: FullScriptPath) {
// 1. Try to load previously cached source map
const fromMemory = this._sourcemapCache.get(script)
if (fromMemory != null) {
logTrace('from memory sourcemap for "%s"', script)
return fromMemory
}
// 2. Try to parse a source map out of the script
// Only supporting our own TypeScript modules for now
if (path.extname(script) !== '.ts') return EMPTY_URL_AND_MAP
logTrace('retrieving sourcemap for %s', script)
return this.mapFromInlined(script)
}
_ensureFullPath (p: string) {
return path.isAbsolute(p) ? p : path.join(this._projectBaseDir, p)
}
private static _instance?: SourcemapSupport
/**
* Creates a [SourcmapSupport] instance unless one was created previously.
* NOTE: that it is impossible for a process to have two instances and the
* parameters the first one was created with will remain active for the process lifetime.
*/
static createSingletonInstance (
cache: TranspileCache,
projectBaseDir: string,
): SourcemapSupport {
if (SourcemapSupport._instance == null) {
SourcemapSupport._instance = new SourcemapSupport(
cache,
projectBaseDir,
)
}
return SourcemapSupport._instance
}
}
// -----------------
// Utility Methods
// -----------------
//
function cloneCallSite (frame: CallSite): CallSite {
const clone: Partial<CallSite> = {}
for (const name of Object.getOwnPropertyNames(Object.getPrototypeOf(frame))) {
clone[name] = /^(?:is|get)/.test(name)
? function () {
return frame[name].call(frame)
}
: frame[name]
}
clone.toString = CallSiteToString
return clone as CallSite
}
// Via source-map-support module
// This is copied almost verbatim from the V8 source code at
// https://code.google.com/p/v8/source/browse/trunk/src/messages.js. The
// implementation of wrapCallSite() used to just forward to the actual source
// code of CallSite.prototype.toString but unfortunately a new release of V8
// did something to the prototype chain and broke the shim. The only fix I
// could find was copy/paste.
function CallSiteToString (this: CallSite) {
let fileName: string | undefined
let fileLocation: string | undefined = ''
if (this.isNative()) {
fileLocation = 'native'
} else {
// TODO(thlorenz): may not be needed as this is for in browser callsites
// @ts-ignore getScriptNameOrSourceURL exists only in the browser
fileName = this.getScriptNameOrSourceURL()
if (fileName == null && this.isEval()) {
fileLocation = this.getEvalOrigin()
fileLocation += ', ' // Expecting source position to follow.
}
if (fileName) {
fileLocation += fileName
} else {
// Source code does not originate from a file and is not native, but we
// can still get the source position inside the source string, e.g. in
// an eval string.
fileLocation += '<anonymous>'
}
const lineNumber = this.getLineNumber()
if (lineNumber != null) {
fileLocation += `:${ lineNumber}`
let columnNumber = this.getColumnNumber()
if (columnNumber) {
fileLocation += `:${ columnNumber}`
}
}
}
let line = ''
let addSuffix = true
const functionName = this.getFunctionName()
const isConstructor = this.isConstructor()
const isMethodCall = !(this.isToplevel() || isConstructor)
if (isMethodCall) {
let typeName = this.getTypeName()
// Fixes shim to be backward compatible with Node v0 to v4
if (typeName === '[object Object]') {
typeName = 'null'
}
const methodName = this.getMethodName()
if (functionName) {
if (typeName && functionName.indexOf(typeName) !== 0) {
line += `${typeName }.`
}
line += functionName
if (
methodName &&
functionName.indexOf(`.${ methodName}`) !==
functionName.length - methodName.length - 1
) {
line += ` [as ${ methodName }]`
}
} else {
line += `${typeName }.${ methodName || '<anonymous>'}`
}
} else if (isConstructor) {
line += `new ${ functionName || '<anonymous>'}`
} else if (functionName) {
line += functionName
} else {
line += fileLocation
addSuffix = false
}
if (addSuffix) {
line += ` (${ fileLocation })`
}
return line
}
function extractCodeFrames (
map: SourceMapConsumer,
pos: MappedPosition,
): string[] {
const sourceContent = map.sourceContentFor(pos.source, true)
if (sourceContent == null) return []
// We adjusted lines to be 1 based (see mapSourcePosition)
const lineno = pos.line - 1
const lines = sourceContent.split('\n')
const beforeStart = Math.max(0, lineno - INCLUDE_CODE_BEFORE)
const beforeEnd = Math.min(lines.length, lineno + 1)
const afterStart = Math.min(lines.length, beforeEnd)
const afterEnd = Math.min(lines.length, afterStart + INCLUDE_CODE_AFTER)
const framesBefore = lines.slice(beforeStart, beforeEnd).map((x, idx) => {
const lineGutter = (beforeStart + idx + 1)
.toString()
.padStart(CODE_FRAME_LINE_GUTTER_WIDTH)
return `${lineGutter}: ${x}`
})
if (pos.column >= 0) {
framesBefore.push(
`${' '.repeat(CODE_FRAME_LINE_GUTTER_WIDTH + 1 + pos.column) }^`,
)
}
const framesAfter = lines.slice(afterStart, afterEnd).map((x, idx) => {
const lineGutter = (afterStart + idx + 1)
.toString()
.padStart(CODE_FRAME_LINE_GUTTER_WIDTH)
return `${lineGutter}: ${x}`
})
return framesBefore.concat(framesAfter)
}

View File

@@ -0,0 +1,121 @@
import type { Debugger } from 'debug'
import { TransformOptions, transformSync } from 'esbuild'
import type { TranspileCache } from './types'
import path from 'path'
import { installSourcemapSupport } from './sourcemap-support'
type EnhancedModule = NodeModule & {
_extensions: Record<string, (mod: EnhancedModule, filename: string) => void>
_compile: (code: string, filename: string) => unknown
_cache: Record<string, NodeModule>
}
const DEFAULT_TRANSFORM_OPTS: TransformOptions = {
target: ['node14.5'],
loader: 'ts',
format: 'cjs',
sourcemap: 'inline',
minify: false,
supported: {
// We won't be supporting dynamic imports since everything we're doing gets bundled into one snapshot anyway
'dynamic-import': false,
},
}
function transpileTsCode (
fullModuleUri: string,
ts: string,
cache: TranspileCache,
projectBaseDir: string,
// TODO: consider 'error' for importsNotUsedAsValues (maybe) to add some type checking
tsconfig?: TransformOptions['tsconfigRaw'],
): string {
installSourcemapSupport(cache, projectBaseDir)
// Try to get from cache first
const cached = (cache != null && cache.get(fullModuleUri)) || null
if (cached != null) return cached
// Transpile
const opts = Object.assign({}, DEFAULT_TRANSFORM_OPTS, {
tsconfigRaw: tsconfig,
sourcefile: fullModuleUri,
})
const result = transformSync(ts, opts)
// Add to Cache
if (cache != null) {
cache.add(fullModuleUri, result.code)
}
return result.code
}
/**
* Hooks into `Module._extensions` in order to transpile TypeScript modules on the fly.
*
* @param Module the Node.js Module
* @param projectBaseDir root of the project
* @param log `debug` module logger to use
* @param diagnosticsEnabled if `true` in case of a transpile/compile error the app breaks when run in the debugger
* @param cache used to avoid re-transpiling modules that haven't changed since last transpile
* @param sourceMapLookup allows overriding how a sourcemap for a particular `uri` is retrieved
* @param tsconfig overrides tsconfig passed to esbuild
*
* @category Transpilation
*/
export function hookTranspileTs (
Module: EnhancedModule,
projectBaseDir: string,
log: Debugger,
diagnosticsEnabled: boolean,
cache: TranspileCache,
tsconfig?: TransformOptions['tsconfigRaw'],
) {
installSourcemapSupport(cache, projectBaseDir)
const defaultLoader = Module._extensions['.js']
Module._extensions['.ts'] = function (mod: EnhancedModule, filename: string) {
const origCompile = mod._compile
// NOTE: I benchmarked bypassing the loader to avoid reading `code`
// that goes unused in case the transpiled version is already in the cache.
// That optimization does not make a notable difference and thus we opt of
// the more robust approach of using the Node.js builtin compile which also
// provides internal Node.js cache checks.
mod._compile = (code: string) => {
mod._compile = origCompile
try {
log('transpiling %s', path.relative(projectBaseDir, filename))
const transpiled = transpileTsCode(
filename,
code,
cache,
projectBaseDir,
tsconfig,
)
const compiled: NodeModule = mod._compile(
transpiled,
filename,
) as NodeModule
return compiled
} catch (err) {
// eslint-disable-next-line no-console
console.error(err)
if (diagnosticsEnabled) {
// eslint-disable-next-line no-debugger
debugger
}
return mod._compile(code, filename)
}
}
defaultLoader(mod, filename)
}
}

View File

@@ -0,0 +1,168 @@
import type {
TransformOptions,
} from 'esbuild'
import type { SourceMapConsumer } from 'source-map-js'
// -----------------
// Loading/Require
// -----------------
type NodeRequireFunction = (id: string) => any
/**
* The function that needs to be called in order to instantiate a Node.js module definition.
* Invoking it results in a `NodeModule`. The `exports` and `module` will be initialized as if the module was being
* `require`d.
*
* module definitions need to be provided to {import('./require').packherdRequire}
*
* @category Loader
*/
export type ModuleDefinition = (
exports: NodeModule['exports'],
module: { exports: NodeModule['exports'] },
__filename: string,
__dirname: string,
require: NodeRequireFunction
) => NodeModule
/**
* The result of attempting a requested `URI` to a full path.
* @property resolved: indicates how the module was resolved
* - 'module:node': resolved via Node.js module resolution which requires I/O
* - 'module-uri:node': uri was already a full path
* - 'module-fullpath:node': resolved relative to parent
* - 'cache:direct': resolved directly from `packherd` module export cache
* - 'cach:node': resolved from cache after determining full path via Node.js
* @property fullPath: full path to the resolved module
*
* @private
* @category Loader
*/
export type ModuleResolveResult = {
resolved:
| 'module:node'
| 'module-uri:node'
| 'module-fullpath:node'
| 'module-key:node'
| 'cache:direct'
| 'cache:node'
fullPath: string
}
/**
* Result of loading a module via packherd.
*
* @property exports: the `exports` of the module
* @property origin: indicates how the exports were retrieved
* - ''packherd:export': directly from the fully instantiated exports provided to packherd
* - 'packherd:definition': by invoking on of the provided definitions
* - 'Module._cache' loaded from Node.js module cache
* - 'Module._load' by calling Node.js module load method which requires I/O
*
* @private
* @category Loader
*/
export type ModuleLoadResult = ModuleResolveResult & {
exports: NodeModule['exports']
origin:
| 'packherd:export'
| 'packherd:definition'
| 'Module._cache'
| 'Module._load'
}
/**
* The Node.js Module builtin including some private methods that we depend on in packherd.
* @category Loader
*/
export type ModuleBuiltin = typeof import('module') & {
_resolveFilename(
moduleUri: string,
parent: NodeModule | undefined,
isMain: boolean
): string
_load(
request: string,
parent: NodeModule | undefined,
isMain: boolean
): NodeModule
_cache: Record<string, NodeModule>
}
/**
* Predicate part of loader opts which specifies how to determine if a module should be reloaded
* even though it could be loaded from a cache.
*
* @category Loader
*/
export type ModuleNeedsReload = (
moduleId: string,
loadedModules: Set<string>,
moduleCache: Record<string, NodeModule>
) => boolean
// -----------------
// Transpilation
// -----------------
/**
* Interface to the cache used to store/retrieve transpiled TypeScript
*
* This interface matches DirtSimpleFileCache
*
* @category Transpilation */
export interface TranspileCache {
get(fullPath: string): string | undefined
addAsync(origFullPath: string, convertedContent: string): Promise<void>
add(origFullPath: string, convertedContent: string): void
clearSync(): void
}
/** @category Transpilation */
export type TranspileCacheOpts = {
cacheDir: string
keepInMemoryCache: boolean
}
/**
* Function that packherd calls in order to initialize the {@link TranspileCache}.
*
* @category Transpilation
*/
export type InitTranspileCache = (
projectBasedir: string,
opts?: Partial<TranspileCacheOpts>
) => TranspileCache | undefined
/**
* Transpile options
*
* @property tsconfig: passed to esbuild
* @property supportTS: when `true` Typescript will be transpiled, otherwise not
* @property initTranspileCache: called by packherd to init transpile cache
*
* @category Transpilation
*/
export type PackherdTranspileOpts = {
tsconfig?: TransformOptions['tsconfigRaw']
supportTS?: boolean
initTranspileCache?: InitTranspileCache
}
// -----------------
// SourcemapSupport
// -----------------
/**
* @private
* @category Sourcemap
*/
export type UrlAndMap = { url: string | null, map: SourceMapConsumer | null }
/**
* @category Sourcemap
*/
export type MapAndSourceContent = {
url: string
map: SourceMapConsumer
sourceContent: string
}

View File

@@ -0,0 +1,9 @@
module.exports = {
require: '@packages/ts/register',
reporter: 'mocha-multi-reporters',
reporterOptions: {
configFile: '../../mocha-reporter-config.json',
},
spec: 'test/**/*.spec.ts',
watchFiles: ['test/**/*.ts', 'src/**/*.ts'],
}

View File

@@ -0,0 +1,12 @@
import { expect } from 'chai'
// NOTE: these relative paths only work from the ./dist folder
require('../test/fixtures/circular-deps/hook-require')
const result = require('../test/fixtures/circular-deps/lib/entry')
describe('Circular Dependency', () => {
it('is properly processed', () => {
expect(result.origin).to.equal('definitions')
expect(result.result).to.equal(4)
})
})

View File

@@ -0,0 +1,26 @@
'use strict'
function foo(exports, module, __filename, __dirname, require) {
const bar = require('./bar')
exports.value = 1
exports.sum = bar + exports.value
}
function bar(exports, module, __filename, __dirname, require) {
const { value } = require('./foo')
if (value > 1) {
console.log('value of foo is too high')
}
module.exports = 1
}
function entry(exports, module, __filename, __dirname, require) {
const { sum } = require('./foo')
module.exports = { result: sum + sum, origin: 'definitions' }
}
module.exports = {
'./foo.js': foo,
'./bar.js': bar,
'./entry.js': entry,
}

View File

@@ -0,0 +1,26 @@
const debug = require('debug')
const path = require('path')
const definitions = require('./definitions')
const { packherdRequire } = require('../../../')
const entryFile = require.resolve('./lib/entry')
const logDebug = debug('packherd:debug')
function getModuleKey({ moduleUri, baseDir }) {
const moduleRelativePath = path.isAbsolute(moduleUri)
? path.relative(baseDir, moduleUri)
: moduleUri
logDebug({ baseDir, moduleUri, moduleRelativePath })
return {
moduleKey: moduleUri,
moduleRelativePath,
}
}
const projectBaseDir = path.dirname(entryFile)
packherdRequire(projectBaseDir, {
diagnosticsEnabled: true,
moduleDefinitions: definitions,
getModuleKey,
})

View File

@@ -0,0 +1,5 @@
const { value } = require('./foo')
if (value > 1) {
console.log('value of foo is too high')
}
module.exports = 1

View File

@@ -0,0 +1,2 @@
const { sum } = require('./foo')
module.exports = { result: sum + sum, origin: 'file' }

View File

@@ -0,0 +1,3 @@
const bar = require('./bar')
exports.value = 1
exports.sum = bar + exports.value

View File

@@ -0,0 +1 @@
console.log(require('./lib/entry'))

View File

@@ -0,0 +1,12 @@
{
"name": "circular-deps",
"private": true,
"version": "0.0.0",
"description": "Example with circular module dependencies.",
"scripts": {
"start": "node -r ./hook-require.js main",
"debug": "node --inspect-brk -r ./hook-require.js main",
"ndb": "ndb -r ./hook-require.js main"
},
"license": "MIT"
}

View File

@@ -0,0 +1,22 @@
'use strict'
function foo(exports, module, __filename, __dirname, require) {
const bar = require('./bar')
exports.value = 1
exports.sum = bar + exports.value
}
function bar(exports, module, __filename, __dirname, require) {
module.exports = 1
}
function entry(exports, module, __filename, __dirname, require) {
const { sum } = require('./foo')
module.exports = { result: sum + sum, origin: 'definitions' }
}
module.exports = {
'./foo.js': foo,
'./bar.js': bar,
'./entry.js': entry,
}

View File

@@ -0,0 +1,26 @@
const debug = require('debug')
const path = require('path')
const definitions = require('./definitions')
const { packherdRequire } = require('../../../')
const entryFile = require.resolve('./lib/entry')
const logDebug = debug('packherd:debug')
function getModuleKey({ moduleUri, baseDir }) {
const moduleRelativePath = path.isAbsolute(moduleUri)
? path.relative(baseDir, moduleUri)
: moduleUri
logDebug({ baseDir, moduleUri, moduleRelativePath })
return {
moduleKey: moduleUri,
moduleRelativePath,
}
}
const projectBaseDir = path.dirname(entryFile)
packherdRequire(projectBaseDir, {
diagnosticsEnabled: true,
moduleDefinitions: definitions,
getModuleKey,
})

View File

@@ -0,0 +1,5 @@
const { value } = require('./foo')
if (value > 1) {
console.log('value of foo is too high')
}
module.exports = 1

View File

@@ -0,0 +1,2 @@
const { sum } = require('./foo')
module.exports = { result: sum + sum, origin: 'file' }

View File

@@ -0,0 +1,3 @@
const bar = require('./bar')
exports.value = 1
exports.sum = bar + exports.value

View File

@@ -0,0 +1 @@
console.log(require('./lib/entry'))

View File

@@ -0,0 +1,12 @@
{
"name": "normal-deps",
"private": true,
"version": "0.0.0",
"description": "Example with circular module dependencies.",
"scripts": {
"start": "node -r ./hook-require.js main",
"debug": "node --inspect-brk -r ./hook-require.js main",
"ndb": "ndb -r ./hook-require.js main"
},
"license": "MIT"
}

View File

@@ -0,0 +1,12 @@
import { expect } from 'chai'
// NOTE: these relative paths only work from the ./dist folder
require('../test/fixtures/normal-deps/hook-require')
const result = require('../test/fixtures/normal-deps/lib/entry')
describe('Normal Dependency', () => {
it('is properly processed', () => {
expect(result.origin).to.equal('definitions')
expect(result.result).to.equal(4)
})
})

View File

@@ -0,0 +1,8 @@
{
"extends": "../ts/tsconfig.json",
"include": ["src"],
"compilerOptions": {
"lib": ["esnext"],
"outDir": "./dist",
},
}

View File

@@ -8,9 +8,9 @@
"clean": "rimraf 'lib/**/*.js'",
"clean-deps": "rimraf node_modules",
"run-mocha": "mocha -r @packages/ts/register -r test/pretest.ts --reporter mocha-multi-reporters --reporter-options configFile=../../mocha-reporter-config.json",
"test": "yarn run-mocha \"test/integration/*.spec.ts\" \"test/unit/**/*.spec.ts\"",
"test-integration": "yarn run-mocha \"test/integration/*.spec.ts\"",
"test-unit": "yarn run-mocha \"test/unit/**/*.spec.ts\""
"test": "CYPRESS_INTERNAL_ENV=test yarn run-mocha \"test/integration/*.spec.ts\" \"test/unit/**/*.spec.ts\"",
"test-integration": "CYPRESS_INTERNAL_ENV=test yarn run-mocha \"test/integration/*.spec.ts\"",
"test-unit": "CYPRESS_INTERNAL_ENV=test yarn run-mocha \"test/unit/**/*.spec.ts\""
},
"dependencies": {
"bluebird": "3.5.3",

View File

@@ -3,5 +3,13 @@ if (process.env.CYPRESS_INTERNAL_ENV === 'production') {
throw new Error(`${__filename} should only run outside of prod`)
}
require('@packages/ts/register')
if (require.name !== 'customRequire') {
// Purposefully make this a dynamic require so that it doesn't have the potential to get picked up by snapshotting mechanism
const hook = './hook'
const { hookRequire } = require(`@packages/server/${hook}-require`)
hookRequire(true)
}
require('../lib/threads/worker.ts')

View File

@@ -10,7 +10,7 @@
"clean": "rimraf ./src/*.js ./src/**/*.js ./src/**/**/*.js ./test/**/*.js || echo 'cleaned'",
"clean-deps": "rimraf node_modules",
"test": "yarn test-unit",
"test-unit": "mocha -r @packages/ts/register 'test/unit/**' --config ./test/.mocharc.js --exit"
"test-unit": "mocha -r @packages/ts/register 'test/unit/**' --config ./test/.mocharc.js --exit --reporter mocha-multi-reporters --reporter-options configFile=../../mocha-reporter-config.json"
},
"dependencies": {
"compare-versions": "4.1.3",

View File

@@ -76,3 +76,9 @@ Prepend `SNAPSHOT_UPDATE=1` to any test command. See [`snap-shot-it` instruction
SNAPSHOT_UPDATE=1 yarn test test/unit/api_spec.js
SNAPSHOT_UPDATE=1 yarn test test/integration/cli_spec.js
```
### V8 Snapshots
In order to improve start up time, Cypress uses [electron mksnapshot](https://github.com/electron/mksnapshot) for generating [v8 snapshots](https://v8.dev/blog/custom-startup-snapshots) for both development and production.
Cypress code is automatically set up to run using snapshots. If you want to run Cypress in development without the v8 snapshot (for debugging purposes or to see if there's a problem with the snapshot or the code itself) you can set the environment variable `DISABLE_SNAPSHOT_REQUIRE` to 1 or true.

View File

@@ -0,0 +1,14 @@
{
"development": {
"api_url": "http://localhost:1234/"
},
"test": {
"api_url": "http://localhost:1234/"
},
"staging": {
"api_url": "https://api-staging.cypress.io/"
},
"production": {
"api_url": "https://api.cypress.io/"
}
}

View File

@@ -1,21 +0,0 @@
default:
cdn_url: "https://cdn.cypress.io"
desktop_url: "https://download.cypress.io/desktop"
desktop_manifest_url: https://download.cypress.io/desktop.json
chromium_url: https://download.cypress.io/chromium
chromium_manifest_url: https://download.cypress.io/chromium.json
development: &development
api_url: "http://localhost:1234/"
on_url: "http://localhost:8080/"
test:
<<: *development
staging:
api_url: "https://api-staging.cypress.io/"
on_url: "https://on.cypress.io/"
production:
api_url: "https://api.cypress.io/"
on_url: "https://on.cypress.io/"

View File

@@ -0,0 +1,43 @@
const path = require('path')
const env = process.env.CYPRESS_INTERNAL_ENV === 'production' ? 'prod' : 'dev'
process.env.PROJECT_BASE_DIR = process.env.PROJECT_BASE_DIR ?? path.join(__dirname, '..', '..')
const isDev = env === 'dev'
function runWithSnapshot (forceTypeScript) {
const { snapshotRequire } = require('@packages/v8-snapshot-require')
const projectBaseDir = process.env.PROJECT_BASE_DIR
const supportTS = forceTypeScript || typeof global.snapshotResult === 'undefined' || global.supportTypeScript
snapshotRequire(projectBaseDir, {
diagnosticsEnabled: isDev,
useCache: true,
transpileOpts: {
supportTS,
initTranspileCache: supportTS
? () => require('dirt-simple-file-cache').DirtSimpleFileCache.initSync(projectBaseDir, { cacheDir: path.join(projectBaseDir, 'node_modules', '.dsfc'), keepInMemoryCache: true })
: undefined,
tsconfig: {
compilerOptions: {
useDefineForClassFields: false, // default
importsNotUsedAsValues: 'remove', // default
},
},
},
})
}
const hookRequire = (forceTypeScript) => {
if (['1', 'true'].includes(process.env.DISABLE_SNAPSHOT_REQUIRE) || typeof snapshotResult === 'undefined') {
require('@packages/ts/register')
} else {
runWithSnapshot(forceTypeScript)
}
}
module.exports = {
hookRequire,
}

View File

@@ -1,46 +1,18 @@
// if running in production mode (CYPRESS_INTERNAL_ENV)
// all transpile should have been done already
// and these calls should do nothing
require('@packages/ts/register')
const { initializeStartTime } = require('./lib/util/performance_benchmark')
const { patchFs } = require('./lib/util/patch-fs')
const fs = require('fs')
const run = async () => {
initializeStartTime()
// prevent EMFILE errors
patchFs(fs)
if (require.name !== 'customRequire') {
// Purposefully make this a dynamic require so that it doesn't have the potential to get picked up by snapshotting mechanism
const hook = './hook'
// override tty if we're being forced to
require('./lib/util/tty').override()
const { hookRequire } = require(`${hook}-require`)
const electronApp = require('./lib/util/electron-app')
hookRequire(false)
}
// are we in the main node process or the electron process?
const isRunningElectron = electronApp.isRunning()
if (process.env.CY_NET_PROFILE && isRunningElectron) {
const netProfiler = require('./lib/util/net_profiler')()
process.stdout.write(`Network profiler writing to ${netProfiler.logPath}\n`)
await require('./server-entry')
}
require('./lib/unhandled_exceptions').handle()
process.env.UV_THREADPOOL_SIZE = 128
if (isRunningElectron) {
require('./lib/util/process_profiler').start()
}
// warn when deprecated callback apis are used in electron
// https://github.com/electron/electron/blob/master/docs/api/process.md#processenablepromiseapis
process.enablePromiseAPIs = process.env.CYPRESS_INTERNAL_ENV !== 'production'
// don't show any electron deprecation warnings in prod
process.noDeprecation = process.env.CYPRESS_INTERNAL_ENV === 'production'
// always show stack traces for Electron deprecation warnings
process.traceDeprecation = true
require('./lib/util/suppress_warnings').suppress()
module.exports = require('./lib/cypress').start(process.argv)
module.exports = run()

View File

@@ -19,9 +19,7 @@ const THIRTY_SECONDS = humanInterval('30 seconds')
const SIXTY_SECONDS = humanInterval('60 seconds')
const TWO_MINUTES = humanInterval('2 minutes')
let intervals
let DELAYS = [
const DELAYS = process.env.API_RETRY_INTERVALS ? process.env.API_RETRY_INTERVALS.split(',').map(_.toNumber) : [
THIRTY_SECONDS,
SIXTY_SECONDS,
TWO_MINUTES,
@@ -34,16 +32,6 @@ const runnerCapabilities = {
let responseCache = {}
intervals = process.env.API_RETRY_INTERVALS
if (intervals) {
DELAYS = _
.chain(intervals)
.split(',')
.map(_.toNumber)
.value()
}
const rp = request.defaults((params, callback) => {
let resp

View File

@@ -1,9 +1,9 @@
import _ from 'lodash'
import UrlParse from 'url-parse'
import konfig from '../konfig'
const apiUrl = konfig('api_url')
const app_config = require('../../config/app.json')
const apiUrl = app_config[process.env.CYPRESS_CONFIG_ENV || process.env.CYPRESS_INTERNAL_ENV || 'development'].api_url
const DASHBOARD_ENDPOINTS = {
api: '',
auth: 'auth',

View File

@@ -0,0 +1,27 @@
<html>
<body>
<script>
// gathers and sends localStorage and sessionStorage via postMessage to the Cypress frame
// detect existence of local/session storage with JSON.stringify(...).length since localStorage.length may not be accurate
const _localStorageStr = JSON.stringify(window.localStorage)
const _localStorage = _localStorageStr.length > 2 && JSON.parse(_localStorageStr)
const _sessionStorageStr = JSON.stringify(window.sessionStorage)
const _sessionStorage = _sessionStorageStr.length > 2 && JSON.parse(JSON.stringify(window.sessionStorage))
const value = {}
if (_localStorage) {
value.localStorage = _localStorage
}
if (_sessionStorage) {
value.sessionStorage = _sessionStorage
}
window.parent.postMessage({
value,
type: 'localStorage',
}, '*')
</script>
</body>
</html>

View File

@@ -0,0 +1,36 @@
<html>
<body>
<script>
window.onmessage = function (event) {
const msg = event.data
if (msg.type === 'set:storage:data') {
const { data } = msg
const setData = (storageData, type) => {
if (!storageData) return
const { clear, value } = storageData
if (clear) {
window[type].clear()
}
if (value) {
Object.keys(value).forEach((key) => {
window[type].setItem(key, value[key])
})
}
}
setData(data.localStorage, 'localStorage')
setData(data.sessionStorage, 'sessionStorage')
window.parent.postMessage({ type: 'set:storage:complete' }, '*')
}
}
window.parent.postMessage({ type: 'set:storage:load' }, '*')
</script>
</body>
</html>

View File

@@ -1,36 +0,0 @@
const path = require('path')
require('./environment')
const konfig = require('konfig')
const pathToConfigDir = path.resolve(__dirname, '..', 'config')
const getConfig = function () {
const { env } = process
// backup previous env
const previousNodeEnv = env.NODE_ENV
const previousNodeEnvExisted = env.hasOwnProperty('NODE_ENV')
// we want to set node env to cypress env
// and then restore it back to the previous
env.NODE_ENV = env.CYPRESS_KONFIG_ENV || env.CYPRESS_INTERNAL_ENV
// get the config values
const config = konfig({ path: pathToConfigDir }).app
// restore NODE_ENV to previous state
if (previousNodeEnvExisted) {
env.NODE_ENV = previousNodeEnv
} else {
delete env.NODE_ENV
}
// return the config getter function
return (getter) => {
return config[getter]
}
}
module.exports = getConfig()

View File

@@ -13,6 +13,8 @@ import { globalPubSub, getCtx, clearCtx } from '@packages/data-context'
import type { WebContents } from 'electron'
import type { LaunchArgs, Preferences } from '@packages/types'
import { debugElapsedTime } from '../util/performance_benchmark'
import debugLib from 'debug'
import { getPathToDesktopIndex } from '@packages/resolve-dist'
@@ -192,6 +194,8 @@ export = {
})
})
debugElapsedTime('open mode ready')
return this.ready(options, port)
},
}

View File

@@ -24,6 +24,7 @@ import * as objUtils from '../util/obj_utils'
import type { SpecWithRelativeRoot, SpecFile, TestingType, OpenProjectLaunchOpts, FoundBrowser, BrowserVideoController, VideoRecording, ProcessOptions } from '@packages/types'
import type { Cfg } from '../project-base'
import type { Browser } from '../browsers/types'
import { debugElapsedTime } from '../util/performance_benchmark'
import * as printResults from '../util/print-run'
type SetScreenshotMetadata = (data: TakeScreenshotProps) => void
@@ -1058,10 +1059,12 @@ export async function run (options, loading: Promise<void>) {
debug('all BrowserWindows closed, not exiting')
})
debugElapsedTime('run mode ready')
await app.whenReady()
}
await loading
try {
return ready(options)
} catch (e) {

View File

@@ -237,11 +237,6 @@ const events = {
'test:before:run': mergeRunnable('test:before:run'), // our own custom event
}
const reporters = {
teamcity: 'mocha-teamcity-reporter',
junit: 'mocha-junit-reporter',
}
class Reporter {
constructor (reporterName = 'spec', reporterOptions = {}, projectRoot) {
if (!(this instanceof Reporter)) {
@@ -498,16 +493,22 @@ class Reporter {
}
static loadReporter (reporterName, projectRoot) {
let p; let r
let p
debug('trying to load reporter:', reporterName)
r = reporters[reporterName]
if (r) {
// Explicitly require this here (rather than dynamically) so that it gets included in the v8 snapshot
if (reporterName === 'teamcity') {
debug(`${reporterName} is built-in reporter`)
return require(r)
return require('mocha-teamcity-reporter')
}
// Explicitly require this here (rather than dynamically) so that it gets included in the v8 snapshot
if (reporterName === 'junit') {
debug(`${reporterName} is built-in reporter`)
return require('mocha-junit-reporter')
}
if (mochaReporters[reporterName]) {

View File

@@ -58,72 +58,16 @@ export const createRoutesE2E = ({
})
routesE2E.get(`/${config.namespace}/automation/getLocalStorage`, (req, res) => {
// gathers and sends localStorage and sessionStorage via postMessage to the Cypress frame
// detect existence of local/session storage with JSON.stringify(...).length since localStorage.length may not be accurate
res.send(`<html><body><script>(${(function () {
const _localStorageStr = JSON.stringify(window.localStorage)
const _localStorage = _localStorageStr.length > 2 && JSON.parse(_localStorageStr)
const _sessionStorageStr = JSON.stringify(window.sessionStorage)
const _sessionStorage = _sessionStorageStr.length > 2 && JSON.parse(JSON.stringify(window.sessionStorage))
const value = {} as any
if (_localStorage) {
value.localStorage = _localStorage
}
if (_sessionStorage) {
value.sessionStorage = _sessionStorage
}
window.parent.postMessage({
value,
type: 'localStorage',
}, '*')
}).toString()})()</script></body></html>`)
res.sendFile(path.join(__dirname, './html/get-local-storage.html'))
})
/* eslint-disable no-undef */
routesE2E.get(`/${config.namespace}/automation/setLocalStorage`, (req, res) => {
const origin = req.originalUrl.slice(req.originalUrl.indexOf('?') + 1)
networkProxy.http.getRenderedHTMLOrigins()[origin] = true
res.send(`<html><body><script>(${(function () {
window.onmessage = function (event) {
const msg = event.data
if (msg.type === 'set:storage:data') {
const { data } = msg
const setData = (storageData, type) => {
if (!storageData) return
const { clear, value } = storageData
if (clear) {
// @ts-ignore
window[type].clear()
}
if (value) {
Object.keys(value).forEach((key) => {
// @ts-ignore
window[type].setItem(key, value[key])
})
}
}
setData(data.localStorage, 'localStorage')
setData(data.sessionStorage, 'sessionStorage')
window.parent.postMessage({ type: 'set:storage:complete' }, '*')
}
}
window.parent.postMessage({ type: 'set:storage:load' }, '*')
}).toString()})()</script></body></html>`)
res.sendFile(path.join(__dirname, './html/set-local-storage.html'))
})
/* eslint-enable no-undef */
routesE2E.get(`/${config.namespace}/source-maps/:id.map`, (req, res) => {
networkProxy.handleSourceMapRequest(req, res)

View File

@@ -552,8 +552,11 @@ export class SocketBase {
})
if (this.supportsRunEvents) {
socket.on('plugins:before:spec', async (spec) => {
await runEvents.execute('before:spec', {}, spec)
socket.on('plugins:before:spec', (spec) => {
runEvents.execute('before:spec', {}, spec).catch((error) => {
socket.disconnect()
throw error
})
})
}

View File

@@ -92,7 +92,7 @@ module.exports = {
'expected CYPRESS_INTERNAL_ENV, found', env.CYPRESS_INTERNAL_ENV)
// allow overriding the app_data folder
let folder = env.CYPRESS_KONFIG_ENV || env.CYPRESS_INTERNAL_ENV
let folder = env.CYPRESS_CONFIG_ENV || env.CYPRESS_INTERNAL_ENV
if (process.env.CYPRESS_INTERNAL_E2E_TESTING_SELF) {
folder = `${folder}-e2e-test`

View File

@@ -0,0 +1,24 @@
const Debug = require('debug')
const debug = Debug('cypress:server:performance-benchmark')
function threeDecimals (n) {
return Math.round(n * 1000) / 1000
}
const initializeStartTime = () => {
// This needs to be a global since this file is included inside of and outside of the v8 snapshot
global.cypressServerStartTime = performance.now()
}
const debugElapsedTime = (event) => {
const now = performance.now()
const delta = now - global.cypressServerStartTime
debug(`elapsed time at ${event}: ${threeDecimals(delta)}ms`)
}
module.exports = {
initializeStartTime,
debugElapsedTime,
}

View File

@@ -38,7 +38,6 @@
"black-hole-stream": "0.0.1",
"bluebird": "3.7.2",
"bundle-require": "3.0.4",
"chai": "1.10.0",
"chalk": "2.4.2",
"check-more-types": "2.24.0",
"chokidar": "3.5.1",
@@ -55,6 +54,7 @@
"data-uri-to-buffer": "2.0.1",
"dayjs": "^1.9.3",
"debug": "^4.3.2",
"dirt-simple-file-cache": "^0.4.0",
"duplexify": "4.1.1",
"electron-context-menu": "3.1.1",
"errorhandler": "1.5.1",
@@ -77,7 +77,6 @@
"is-html": "2.0.0",
"jimp": "0.14.0",
"jsonlint": "1.6.3",
"konfig": "0.2.1",
"launch-editor": "2.3.0",
"lazy-ass": "1.6.0",
"lockfile": "1.0.4",
@@ -154,6 +153,7 @@
"@types/mime": "3.0.1",
"@types/node": "14.14.31",
"babel-loader": "8.1.0",
"chai": "1.10.0",
"chai-as-promised": "7.1.1",
"chai-subset": "1.6.0",
"chai-uuid": "1.0.6",
@@ -193,7 +193,9 @@
"files": [
"config",
"lib",
"patches"
"patches",
"server-entry.js",
"hook-require.js"
],
"types": "index.d.ts",
"productName": "Cypress",

View File

@@ -0,0 +1,41 @@
const { patchFs } = require('./lib/util/patch-fs')
const fs = require('fs')
// prevent EMFILE errors
patchFs(fs)
// override tty if we're being forced to
require('./lib/util/tty').override()
const electronApp = require('./lib/util/electron-app')
// are we in the main node process or the electron process?
const isRunningElectron = electronApp.isRunning()
if (process.env.CY_NET_PROFILE && isRunningElectron) {
const netProfiler = require('./lib/util/net_profiler')()
process.stdout.write(`Network profiler writing to ${netProfiler.logPath}\n`)
}
require('./lib/unhandled_exceptions').handle()
process.env.UV_THREADPOOL_SIZE = 128
if (isRunningElectron) {
require('./lib/util/process_profiler').start()
}
// warn when deprecated callback apis are used in electron
// https://github.com/electron/electron/blob/master/docs/api/process.md#processenablepromiseapis
process.enablePromiseAPIs = process.env.CYPRESS_INTERNAL_ENV !== 'production'
// don't show any electron deprecation warnings in prod
process.noDeprecation = process.env.CYPRESS_INTERNAL_ENV === 'production'
// always show stack traces for Electron deprecation warnings
process.traceDeprecation = true
require('./lib/util/suppress_warnings').suppress()
module.exports = require('./lib/cypress').start(process.argv)

View File

@@ -1,9 +1,15 @@
require('../../spec_helper')
const { apiRoutes } = require('../../../lib/cloud/routes')
describe('lib/cloud/routes', () => {
const routes = () => {
delete require.cache[require.resolve(`../../../lib/cloud/routes`)]
return require(`../../../lib/cloud/routes`)
}
describe('api routes', () => {
const apiRoutes = routes().apiRoutes
it('api', () => {
expect(apiRoutes.api()).to.eq('http://localhost:1234/')
})
@@ -44,4 +50,54 @@ describe('lib/cloud/routes', () => {
expect(apiRoutes.exceptions()).to.eq('http://localhost:1234/exceptions')
})
})
describe('api url', () => {
let oldCypressInternalEnv
beforeEach(() => {
oldCypressInternalEnv = process.env.CYPRESS_INTERNAL_ENV
})
afterEach(() => {
process.env.CYPRESS_INTERNAL_ENV = oldCypressInternalEnv
})
it('supports development environment', () => {
process.env.CYPRESS_INTERNAL_ENV = 'development'
expect(routes().apiRoutes.api()).to.eq('http://localhost:1234/')
})
it('supports staging environment', () => {
process.env['CYPRESS_INTERNAL_ENV'] = 'staging'
expect(routes().apiRoutes.api()).to.eq('https://api-staging.cypress.io/')
})
it('supports production environment', () => {
process.env.CYPRESS_INTERNAL_ENV = 'production'
expect(routes().apiRoutes.api()).to.eq('https://api.cypress.io/')
})
it('supports test environment', () => {
process.env.CYPRESS_INTERNAL_ENV = 'test'
expect(routes().apiRoutes.api()).to.eq('http://localhost:1234/')
})
it('defaults to development', () => {
process.env.CYPRESS_CONFIG_ENV = undefined
process.env.CYPRESS_INTERNAL_ENV = undefined
expect(routes().apiRoutes.api()).to.eq('http://localhost:1234/')
})
it('honors CYPRESS_CONFIG_ENV', () => {
process.env.CYPRESS_CONFIG_ENV = 'staging'
process.env.CYPRESS_INTERNAL_ENV = 'test'
expect(routes().apiRoutes.api()).to.eq('https://api-staging.cypress.io/')
})
})
})

View File

@@ -1,84 +0,0 @@
require('../spec_helper')
describe('lib/konfig', () => {
beforeEach(function () {
this.env = process.env['CYPRESS_INTERNAL_ENV']
this.setup = (env) => {
process.env['CYPRESS_INTERNAL_ENV'] = env
this.konfig = require(`../../lib/konfig`)
this.eq = (key, val) => {
expect(this.konfig(key)).to.eq(val)
}
}
})
afterEach(function () {
process.env['CYPRESS_INTERNAL_ENV'] = this.env
return delete require.cache[require.resolve(`../../lib/konfig`)]
})
it('does not set global.config', () => {
delete global.config
delete require.cache[require.resolve(`../../lib/konfig`)]
require(`../../lib/konfig`)
expect(global.config).not.to.be.ok
})
it('memoizes the result', () => {
process.env['NODE_ENV'] = 'development'
const config = require(`../../lib/konfig`)
process.env['NODE_ENV'] = 'test'
const config2 = require(`../../lib/konfig`)
expect(config).to.eq(config2)
})
it('does not add NODE_ENV to process env if input env did not contain one', () => {
const env = process.env['NODE_ENV']
delete process.env['NODE_ENV']
delete require.cache[require.resolve(`../../lib/konfig`)]
expect(process.env.hasOwnProperty('NODE_ENV')).to.eq(false)
require(`../../lib/konfig`)
expect(process.env.hasOwnProperty('NODE_ENV')).to.eq(false)
process.env['NODE_ENV'] = env
})
context('development', () => {
beforeEach(function () {
return this.setup('development')
})
it('api_url', function () {
return this.eq('api_url', 'http://localhost:1234/')
})
})
context('test', () => {
beforeEach(function () {
return this.setup('test')
})
it('api_url', function () {
return this.eq('api_url', 'http://localhost:1234/')
})
})
context('production', () => {
beforeEach(function () {
return this.setup('production')
})
it('api_url', function () {
return this.eq('api_url', 'https://api.cypress.io/')
})
})
})

View File

@@ -7,34 +7,38 @@ const path = require('path')
// built Electron app without ts-node hook. Assume the
// build has been done correctly
module.exports = function (scopeDir) {
try {
// Only set up ts-node if we're not using the snapshot
// @ts-ignore snapshotResult is a global defined in the v8 snapshot
if (['1', 'true'].includes(process.env.DISABLE_SNAPSHOT_REQUIRE) || typeof snapshotResult === 'undefined') {
try {
// Prevent double-compiling if we're testing the app and already have ts-node hook installed
// TODO(tim): e2e testing does not like this, I guess b/c it's currently using the tsconfig
// for the app project?
if (!process.env.CYPRESS_INTERNAL_E2E_TESTING_SELF) {
debug('registering ts-node on directory', scopeDir)
const tsNode = require('ts-node')
// register TypeScript Node require hook
// https://github.com/TypeStrong/ts-node#programmatic-usage
const project = require('path').join(__dirname, 'tsconfig.json')
if (!process.env.CYPRESS_INTERNAL_E2E_TESTING_SELF) {
debug('registering ts-node on directory', scopeDir)
const tsNode = require('ts-node')
// register TypeScript Node require hook
// https://github.com/TypeStrong/ts-node#programmatic-usage
const project = require('path').join(__dirname, 'tsconfig.json')
process.env.TS_CACHED_TRANSPILE_CACHE = path.join(__dirname, 'node_modules', '.ts-cache')
process.env.TS_CACHED_TRANSPILE_CACHE = path.join(__dirname, 'node_modules', '.ts-cache')
tsNode.register({
compiler: 'typescript-cached-transpile',
project,
transpileOnly: true,
preferTsExts: true, // Helps when the files are compiled locally, resolves the TS file
scope: Boolean(scopeDir),
scopeDir,
})
} else {
debug('skipping ts-node registration while testing the app')
tsNode.register({
compiler: 'typescript-cached-transpile',
project,
transpileOnly: true,
preferTsExts: true, // Helps when the files are compiled locally, resolves the TS file
scope: Boolean(scopeDir),
scopeDir,
})
} else {
debug('skipping ts-node registration while testing the app')
}
// do we need to prevent any other TypeScript hooks?
} catch (e) {
// continue running without TypeScript require hook
debug('Running without ts-node hook in environment "%s"', process.env.CYPRESS_INTERNAL_ENV)
}
// do we need to prevent any other TypeScript hooks?
} catch (e) {
// continue running without TypeScript require hook
debug('Running without ts-node hook in environment "%s"', process.env.CYPRESS_INTERNAL_ENV)
}
}

View File

@@ -0,0 +1 @@
cache/

View File

@@ -0,0 +1,89 @@
## @packages/v8-snapshot-require
Tool to load a snapshot for Electron applications that was created by `@tooling/v8-snapshot`.
**Table of Contents**
- [Features](#features)
- [Loading From Snapshot](#loading-from-snapshot)
- [Resolver Map](#resolver-map)
- [Examples](#examples)
- [Debugging and Diagnosing](#debugging-and-diagnosing)
## Features
`@packages/v8-snapshot-require` provides modules snapshotted by `@tooling/v8-snapshot` to `@packages/packherd-require`
and helps in locating modules to load from the snapshot by deriving their keys from information about each
module provided by packherd.
## Loading From Snapshot
In order to facilitate loading from the snapshot, `@packages/v8-snapshot-require` ties into the
`@packages/packherd-require` resolution mechanism in order to help it obtain the _key_ to locate
a fully initialized module _exports_ or its _definition_ from the snapshotted Object that
`@packages/v8-snapshot-require` also provides during `@packages/packherd-require` initialization
inside the [snapshot-require][snapshot-require] setup.
It uses the [resolver-map][resolver-map] in order to resolve modules without querying the file
system.
Once v8-snapshot provides this key to packherd it then tries to first load a fully initialized
version of the module, aka _exports_, falling back to a function which will initialize it, aka
_definitions_ and only finally loads it from the file system via Node.js.
Most of that logic lives inside `@packages/packherd-require` and it is recommended to read its
documentation.
### Resolver Map
The resolver map is constructed from metadata that as [esbuild-snap][esbuild-snap] produces as
a side effect of bundling the application's dependencies and optionally the app's modules.
The keys of this map are the directory relative to the project base dir, from which a module
was resolved, concatenated with the import request string (separated by `'***'`) and the value
the fully resolved path relative to the project base dir.
This map is embedded into the snapshot and used fast module key resolution and used to resolve
a module's key via the [getModuleKey function][getModuleKey-code].
## Examples
In order to learn how to orchestrate snapshot creation and loading please have a look at the
examples provided with this app, for instance:
- [example-express/snapshot/install-snapshot.js](https://github.com/cypress-io/cypress/blob/develop/system-tests/projects/v8-snapshot/example-express/snapshot/install-snapshot.js)
- [example-express/app/hook-require.js](https://github.com/cypress-io/cypress/blob/develop/system-tests/projects/v8-snapshot/example-express/app/hook-require.js)
## Debugging and Diagnosing
In order to gain insight into how the modules are loaded please set the
`DEBUG=(cypress:pack|cypress:snap)*` which will cause the tool to emit a wealth of
information part of which will provide insight into how many modules were initialized
from the snapshot and which weren't.
```js
cypress:packherd:debug { exportHits: 20, definitionHits: 8, misses: 3 }
```
It will also provide information about what it encountered inside the snapshot, namely the
number of:
- `exports` modules that are fully initialized inside the snapshot
- `definitions` functions that will return `module.exports` when invoked
NOTE: that `definitions` and `exports` overlap as a module's definition is always included even
if its export is included as well.
Thus the below means that we have `12` modules that are included fully initialized and `6 (18 - 12)` that aren't.
```
exports: 12
definitions: 18
```
[getModuleKey-code]:https://github.com/cypress-io/cypress/blob/develop/packages/v8-snapshot-require/src/snapshot-require.ts#L45
[resolver-map]:https://github.com/cypress-io/cypress/blob/develop/tooling/v8-snapshot/src/snapshot-generator.ts#L126
[snapshot-require]:https://github.com/cypress-io/cypress/blob/develop/packages/v8-snapshot-require/src/snapshot-require.ts#L187
[esbuild-snap]:https://github.com/cypress-io/esbuild/tree/thlorenz/snap

View File

@@ -0,0 +1,32 @@
{
"name": "@packages/v8-snapshot-require",
"version": "0.0.0-development",
"description": "Various utils related to create v8 snapshots for electron apps.",
"private": true,
"main": "dist/snapshot-require.js",
"scripts": {
"build": "tsc",
"build-prod": "yarn build",
"check-ts": "tsc --noEmit && yarn -s tslint",
"clean-deps": "rimraf node_modules",
"clean": "rimraf dist",
"test": "yarn test-unit",
"test-unit": "mocha --config ./test/.mocharc.js",
"tslint": "tslint --config ../ts/tslint.json --project .",
"watch": "tsc --watch"
},
"dependencies": {
"@packages/packherd-require": "0.0.0-development",
"debug": "^4.1.1",
"source-map-js": "^0.6.2"
},
"devDependencies": {
"mocha": "7.0.1",
"rimraf": "^3.0.2"
},
"files": [
"dist",
"src/snapshot-require.ts"
],
"types": "src/snapshot-require.ts"
}

View File

@@ -0,0 +1,4 @@
/**
* Name under which the snapshotted file will appear in tools like DevTools.
*/
export const EMBEDDED = '<embedded>'

View File

@@ -0,0 +1,220 @@
import path from 'path'
import type { DependencyMapArray, DependencyNode } from './types'
import { strict as assert } from 'assert'
const resolvedPathCache = new Map()
const getResolvedPathForKey = (projectBaseDir: string, key: string) => {
let resolvedPath = resolvedPathCache.get(key)
if (!resolvedPath) {
resolvedPath = path.resolve(projectBaseDir, key)
resolvedPathCache.set(key, resolvedPath)
}
return resolvedPath
}
/**
* Converts the array representation that was embedded into the snapshot back
* into a dependency map.
*
* @param arr array representation of the dependency map
* @param projectBaseDir the root of the project the map is for
*/
function dependencyArrayToResolvedMap (
arr: DependencyMapArray,
projectBaseDir: string,
) {
// NOTE: using path.resolve here guarantees that map keys/values are native slashed
// even though the included dependency map array uses always forward slashes
const map: Map<string, DependencyNode> = new Map()
for (const [k, { directDeps, allDeps }] of arr) {
const resolvedKey = getResolvedPathForKey(projectBaseDir, k)
const resolvedDirectDeps = directDeps.map((x) => {
return getResolvedPathForKey(projectBaseDir, x)
})
const resolvedAllDeps = allDeps.map((x) => {
return getResolvedPathForKey(projectBaseDir, x)
})
map.set(resolvedKey, {
directDeps: new Set(resolvedDirectDeps),
allDeps: new Set(resolvedAllDeps),
})
}
return map
}
/**
* Wraps a {@link Map} of dependencies and adds methods to query it.
*/
export class DependencyMap {
/**
* Creates an instance of a {@link DependencyMap}.
*
* @param dependencyMap the mapped dependencies
*/
constructor (private readonly dependencyMap: Map<string, DependencyNode>) {}
/**
* Get all dependencies of a particular module.
*
* @param nodeId the id of the module
*/
allDepsOf (nodeId: string) {
const node = this.dependencyMap.get(nodeId)
assert(node != null, `Node with ${nodeId} needs to be in map`)
return Array.from(node.allDeps)
}
/**
* Get all direct dependencies of a particular module, meaning the `import`
* or `require` for those deps are found inside the module.
*
* @param nodeId the id of the module
*/
directDepsOf (nodeId: string) {
const node = this.dependencyMap.get(nodeId)
assert(node != null, `Node with ${nodeId} needs to be in map`)
return Array.from(node.directDeps)
}
/**
* Determines if a particular module is inside the set of loaded modules, but
* not inside the Node.js module cache.
*
* @param id the module id
* @param loaded the set of loaded modules
* @param cache the Node.js module cache
*/
loadedButNotCached (
id: string,
loaded: Set<string>,
cache: Record<string, NodeModule>,
) {
if (!loaded.has(id)) return false
return cache[id] == null
}
/**
* Determines if a critical dependency of the given module is inside the
* Node.js module cache, but not inside the loaded set.
*
* @param id the module id
* @param loaded the set of loaded modules
* @param cache the Node.js module cache
*/
criticalDependencyLoadedButNotCached (
id: string,
loaded: Set<string>,
cache: Record<string, NodeModule>,
) {
assert(cache[id] == null, 'Should not query for modules that are in cache')
const node = this.dependencyMap.get(id)
// Shouldn't be invoked for with a module that isn't in the snapshot, since then it wouldn't
// be in snapshot exports either
assert(
node != null,
`should not check dependencies that are not inside the snapshot: ${id}`,
)
// 1. Determine if any of direct deps should be reloaded
for (const childId of node.directDeps) {
if (this.loadedButNotCached(childId, loaded, cache)) return true
}
// 2. Determine if any of the indirect deps of the module should be reloaded
// Unfortunately this most likely case is also the most expensive.
const indirectsToReach: Set<string> = new Set()
for (const childId of node.allDeps) {
if (this.loadedButNotCached(childId, loaded, cache)) {
indirectsToReach.add(childId)
}
}
if (indirectsToReach.size > 0) {
const visited: Set<string> = new Set()
return this._reachableWithoutHittingCache(
node,
indirectsToReach,
loaded,
cache,
visited,
)
}
// 3. We determined that the module does not need to be reloaded
// This is the most common case as we only return `true` above if the
// Node.js module cache has been modified.
// But unfortunately we're required to traverse all of a module's
// dependencies EVERY time to detect those few edge cases.
return false
}
/**
* Determines if we can walk to a module following the dependency tree
* without hitting a module that is inside the Node.js module cache.
*/
private _reachableWithoutHittingCache (
node: DependencyNode,
toReach: Set<string>,
loaded: Set<string>,
cache: Record<string, NodeModule>,
visited: Set<string>,
) {
// Walk the tree until we either hit a module that is cached or is one of the modules we try to reach
for (const child of node.directDeps) {
if (visited.has(child)) continue
visited.add(child)
if (toReach.has(child)) return true
if (cache[child] == null) {
const childNode = this.dependencyMap.get(child)
if (
childNode != null &&
this._reachableWithoutHittingCache(
childNode,
toReach,
loaded,
cache,
visited,
)
) {
return true
}
}
}
return false
}
/**
* Creates an instance of a {@link DependencyMap} from a dependency map
* {@link Array} representation that was embedded in the snapshot.
*
* @param arr the dependency map
* @param projectBaseDir the root of the project the map is for
*/
static fromDepArrayAndBaseDir (
arr: DependencyMapArray,
projectBaseDir: string,
) {
const map = dependencyArrayToResolvedMap(arr, projectBaseDir)
return new DependencyMap(map)
}
}

View File

@@ -0,0 +1,338 @@
import debug from 'debug'
import path from 'path'
import type {
GetModuleKey,
GetModuleKeyOpts,
ModuleNeedsReload,
PackherdTranspileOpts,
} from '@packages/packherd-require'
import { packherdRequire } from '@packages/packherd-require'
import type { Snapshot, DependencyMapArray } from './types'
import { forwardSlash } from './utils'
import Module from 'module'
import { DependencyMap } from './dependency-map'
export * from './types'
const logInfo = debug('cypress:snapshot:info')
const logError = debug('cypress:snapshot:error')
const logDebug = debug('cypress:snapshot:debug')
const RESOLVER_MAP_KEY_SEP = '***'
/**
* Creates the function which tries to obtain the module key for a given module
* uri.
*
* @param resolverMap the {@link Map} of a map from directory to module key
* which was prepared during snapshotting and embedded into it
* @private
*/
function createGetModuleKey (resolverMap?: Record<string, string>) {
/**
* Attempts to find the module key from the resolver map if we can find a key
* for the relative dir of the module importing the module uri.
*
* This requires that the `opts.path` or `opts.relPath` is provided (in this
* these paths represent the location of the module that is importing the
* uri).
*
* @param moduleUri expected to be forward slashed regardless of which OS
* we're running on as the resolver map also only includes forward slashed paths
* @param baseDir project base dir
* @param opts {@link GetModuleKeyOpts}
*/
const getModuleKey: GetModuleKey = ({ moduleUri, baseDir, opts }) => {
// We can only reliably resolve modules without the Node.js machinery if we can find it in the
// resolver map. For instance resolving `./util` involves probing the file system to resolve to
// either `util.js`, `util.json` or possibly `util/index.js`
// We could make an assumption that `./util.js` resolves to that file, but it could also refer
// to `./util.js/index.js`
// The same is true even if `path.isAbsolute` is given, i.e. `/Volumes/dev/util.js` could either be
// a file or a directory, so we still couldn't be sure.
if (resolverMap == null || opts == null) {
return { moduleKey: undefined, moduleRelativePath: undefined }
}
// Wrap result in order to keep forward slashes going
const relParentDir = forwardSlash(
opts.relPath ?? path.relative(baseDir, opts.path),
)
const resolverKey = `${relParentDir}${RESOLVER_MAP_KEY_SEP}${moduleUri}`
const resolved = resolverMap[resolverKey]
// Module cache prefixes with `./` while the resolver map doesn't
if (resolved != null) {
const moduleKey = `./${resolved}`
return { moduleKey, moduleRelativePath: moduleKey }
}
return { moduleKey: undefined, moduleRelativePath: undefined }
}
return getModuleKey
}
/**
* Creates the predicate that determines if a module needs to be reloaded or if
* it can be pulled from either the Node.js module cache or our exports cache,
* embedded in the snapshot.
*
* @param dependencyMapArray the dependency map embedded in the snapshot
* @param projectBaseDir the root of the project
* @private
*/
function createModuleNeedsReload (
dependencyMapArray: DependencyMapArray,
projectBaseDir: string,
) {
const map = DependencyMap.fromDepArrayAndBaseDir(
dependencyMapArray,
projectBaseDir,
)
// NOTE: that all keys as well as moduleId are native slashed in order to normalize
// on Node.js Module._cache which is provided here as the `moduleCache`
/**
* Determines if a module needs to be reloaded.
*
* @param moduleId the id of the module
* @param loadedModules modules that we tracked as loaded
* @param moduleCache the Node.js module cache
*/
const moduleNeedsReload: ModuleNeedsReload = (
moduleId: string,
loadedModules: Set<string>,
moduleCache: Record<string, NodeModule>,
) => {
if (moduleCache[moduleId] != null) return false
return (
map.loadedButNotCached(moduleId, loadedModules, moduleCache) ||
map.criticalDependencyLoadedButNotCached(
moduleId,
loadedModules,
moduleCache,
)
)
}
return moduleNeedsReload
}
/**
* Configures the setup of the require hook.
*
* @property useCache if `true` we use the cached module exports and definitions embedded in the snapshot
* @property diagnosticsEnabled toggles diagnosticsEnabled
* @property snapshotOverride if set overrides the exports and definitions
* embedded in the snapshot
* @property requireStatsFile if set require stats are written to this file
* @property transpileOpts configures {@link
* https://github.com/thlorenz/packherd | packherd} TypeScript transpilation
* @property alwaysHook if `true` we hook `Module._load` even if no embedded snapshot is found
*/
export type SnapshotRequireOpts = {
useCache?: boolean
diagnosticsEnabled?: boolean
snapshotOverride?: Snapshot
requireStatsFile?: string
transpileOpts?: PackherdTranspileOpts
alwaysHook?: boolean
}
const DEFAULT_SNAPSHOT_REQUIRE_OPTS = {
useCache: true,
diagnosticsEnabled: false,
alwaysHook: true,
}
/**
* Attempts to extract the exports and definitions from the snapshot
*/
function getCaches (sr: Snapshot | undefined, useCache: boolean) {
if (typeof sr !== 'undefined') {
return {
moduleExports: useCache ? sr.customRequire.exports : undefined,
moduleDefinitions: sr.customRequire.definitions,
}
}
return { moduleExports: {}, moduleDefinitions: {} }
}
/**
* Sets up the require hook to use assets embedded in the snapshot.
*
* @param projectBaseDir project root
* @param opts configure how the hook is setup and how it behaves
*/
export function snapshotRequire (
projectBaseDir: string,
opts: SnapshotRequireOpts = {},
) {
const { useCache, diagnosticsEnabled, alwaysHook } = Object.assign(
{},
DEFAULT_SNAPSHOT_REQUIRE_OPTS,
opts,
)
// 1. Assign snapshot which is a global if it was embedded
const sr: Snapshot =
opts.snapshotOverride ||
// @ts-ignore global snapshotResult
(typeof snapshotResult !== 'undefined' ? snapshotResult : undefined)
// If we have no snapshot we don't need to hook anything
if (sr != null || alwaysHook) {
// 2. Pull out our exports and definitions embedded inside the snapshot
const { moduleExports, moduleDefinitions } = getCaches(sr, useCache)
// 3. Provide some info about what we found
const cacheKeys = Object.keys(moduleExports || {})
const defKeys = Object.keys(moduleDefinitions)
logInfo(
'Caching %d, defining %d modules! %s cache',
cacheKeys.length,
defKeys.length,
useCache ? 'Using' : 'Not using',
)
logDebug('initializing packherd require')
// 4. Attempt to pull out the resolver map as well as the dependency map
let resolverMap: Record<string, string> | undefined
let moduleNeedsReload: ModuleNeedsReload | undefined
// @ts-ignore global snapshotAuxiliaryData
if (typeof snapshotAuxiliaryData !== 'undefined') {
// @ts-ignore global snapshotAuxiliaryData
resolverMap = snapshotAuxiliaryData.resolverMap
const dependencyMapArray: DependencyMapArray =
// @ts-ignore global snapshotAuxiliaryData
snapshotAuxiliaryData.dependencyMapArray
// 5. Setup the module needs reload predicate with the dependency map
if (dependencyMapArray != null) {
moduleNeedsReload = createModuleNeedsReload(
dependencyMapArray,
projectBaseDir,
)
}
}
// 6. Setup the module key resolver with the resolver map
const getModuleKey = createGetModuleKey(resolverMap)
// 7. Use packherd to hook Node.js require and get hold of some callbacks
// to interact with packherd's module loading mechanism
const { resolve, shouldBypassCache, registerModuleLoad, tryLoad } =
packherdRequire(projectBaseDir, {
diagnosticsEnabled,
moduleExports,
moduleDefinitions,
getModuleKey,
requireStatsFile: opts.requireStatsFile,
transpileOpts: opts.transpileOpts,
moduleNeedsReload,
})
// @ts-ignore global snapshotResult
// 8. Ensure that the user passed the project base dir since the loader
// cannot resolve modules without it
if (typeof snapshotResult !== 'undefined') {
const projectBaseDir = process.env.PROJECT_BASE_DIR
if (projectBaseDir == null) {
throw new Error(
'Please provide the \'PROJECT_BASE_DIR\' env var.\n' +
'This is the same used when creating the snapshot.\n' +
'Example: PROJECT_BASE_DIR=`pwd` yarn dev',
)
}
// 9. Setup the path resolver that is used from inside the snapshot in
// order to resolve full paths of modules
const pathResolver = {
resolve (p: string) {
try {
return path.resolve(projectBaseDir, p)
} catch (err) {
logError(err)
// eslint-disable-next-line no-debugger
debugger
}
return
},
}
// -----------------
// Snapshot Globals
// -----------------
// While creating the snapshot we use stubs for globals like process.
// When we execute code that is inside the snapshot we need to ensure
// that it is using the actual instances. We do this by swapping out the
// stubs with the those instances.
// For more info see ../blueprint/set-globals.js
// 10. Prepare the globals we need to inject into the snapshot
// The below aren't available in all environments
const checked_process: any =
typeof process !== 'undefined' ? process : undefined
const checked_window: any =
// @ts-ignore ignore window as it's something that will only be available at runtime
typeof window !== 'undefined' ? window : undefined
const checked_document: any =
// @ts-ignore ignore document as it's something that will only be available at runtime
typeof document !== 'undefined' ? document : undefined
// 11. Inject those globals
// @ts-ignore global snapshotResult
snapshotResult.setGlobals(
global,
checked_process,
checked_window,
checked_document,
console,
pathResolver,
require,
)
// 11. Setup the customRequire inside the snapshot
// @ts-ignore private module var
require.cache = Module._cache
// @ts-ignore global snapshotResult
snapshotResult.customRequire.cache = require.cache
// 12. Add some 'magic' functions that we can use from inside the
// snapshot in order to integrate module loading
// See ../blueprint/custom-require.js
// @ts-ignore custom method on require
require._tryLoad = tryLoad
const oldRequireResolve = require.resolve
// @ts-ignore opts not exactly matching
require.resolve = function (id: string, opts: GetModuleKeyOpts & { paths?: string[] | undefined } | undefined) {
if (opts?.fromSnapshot) {
return resolve(id, opts)
}
return oldRequireResolve(id, opts)
}
// @ts-ignore custom method on require
require.shouldBypassCache = shouldBypassCache
// @ts-ignore custom method on require
require.registerModuleLoad = registerModuleLoad
// @ts-ignore custom property on require
require.builtInModules = new Set(Module.builtinModules)
}
}
}

View File

@@ -0,0 +1,206 @@
import type { CreateBundleResult } from '@tooling/packherd'
import type { RawSourceMap } from 'source-map-js'
type NodeRequireFunction = typeof require
export type Entries<T> = {
[K in keyof T]: [K, T[K]]
}[keyof T][]
/**
* esbuild metadata {@link https://esbuild.github.io/api/#metafile} with extra
* properties that is included by the snapshot esbuild
*
* Namely it includes a `resolverMap` property which is embedded into the
* snapshot in order to resolve modules without having to query the file system
*
* @category snapshot
*/
export type Metadata = CreateBundleResult['metafile'] & {
inputs: Record<
string,
{
bytes: number
fileInfo: {
fullPath: string
}
imports: {
path: string
kind: 'require-call'
}[]
}
>
resolverMap: Record<string, string>
}
/**
* Configures how the bundle to be snapshotted is generated.
*
* @property baseDirPath root of the project which we are snapshotting
*
* @property entryFilePath file we use as the entry-point and from which all
* modules to be snapshotted are reachable
*
* @property bundlerPath the esbuild bundler binary to use, if not provided it
* falls back to the installed one
*
* @property nodeModulesOnly if `true` only node_modules are included in the
* snapshot, i.e. application files are not
*
* @property deferred you should provide any modules here that you know need
* to be deferred in order to speed up the doctor step
*
* @property norewrite you should provide any modules here that you know
* should not be rewritten in order to speed up the doctor step and to work
* around issues due to invalid rewrites
*
* @property includeStrictVerifiers if `true` the bundle will be more strictly
* checked when validated inside the Node.js VM
* This should be set when running the doctor and unset when building the
* bundle to be snapshotted
*
* @property sourcemap if `true` then a sourcemap will be generated for the
* bundled files
*
* @property sourcemapExternalPath the file to write the generated sourcemap
* to if that is desired
*
* @property sourcemapEmbed when `true` the sourcemap is embedded in the
* snapshot
*
* @property sourcemapInline when `true` the sourcemap is inlined at the
* bottom of the bundled file that is snapshotted
*
* @category snapshot
*/
export type CreateBundleOpts = {
baseDirPath: string
entryFilePath: string
bundlerPath: string
nodeModulesOnly: boolean
deferred?: string[]
norewrite?: string[]
includeStrictVerifiers?: boolean
sourcemap?: boolean
sourcemapExternalPath?: string
sourcemapEmbed: boolean
sourcemapInline: boolean
}
/**
* Adds Snapshot specific opts to the {@link CreateBundleOpts}.
*
* @property resolverMap the map that should be embedded in the snapshot in
* order to resolve module import requests without querying the file system
*
* @property auxiliaryData any extra data that should be embedded in the
* snapshot
*
* @property nodeEnv `process.env.NODE_ENV` will be set to this value during
* snapshot creation, see src/blueprint.ts:88
*
* @category snapshot
*/
export type CreateSnapshotScriptOpts = CreateBundleOpts & {
resolverMap?: Record<string, string>
auxiliaryData?: Record<string, any>
nodeEnv: string
}
/**
* Used to configure the workers that are processing a snapshot script in parallel
* @category snapshot
*/
export type ProcessScriptOpts = {
bundleHash: string
bundlePath: string
baseDirPath: string
entryFilePath: string
entryPoint: string
nodeEnv: string
}
/**
* Possible outcomes of processing a snapshot script.
*
* - 'failed:assembleScript' means that the bundler generated a proper bundle
* but it couldn't be included in the snapshot script
* - 'failed:verifyScript' means that the script was assembled fine, but some
* violation was detected during the verification phase which indicates that it
* couldn't be snapshotted as is
* - 'completed' all went fine the script could be snapshotted as is
*
* @category snapshot
*/
export type ProcessScriptResult = {
outcome: 'failed:assembleScript' | 'failed:verifyScript' | 'completed'
error?: Error
}
/** Specifies the signature of the function that represents a module definition
* and when invoked returns a Node.js `module`.
*
* Note that an `exports` parameter which is the same instance as the
* `module.exports` field. The module either mutates the `exports` directly or
* reassigns `module.exports`. After invoking it, the `module.exports` are
* considered the exports of the module.
*
* These definitions are embedded into the snapshot and invoked at runtime.
* They are used instead of full-fledged exports when snapshotting would fail
* were we to initialize them during the snapshot phase.
*
* @category snapshot
* @category loader
*/
export type ModuleDefinition = (
exports: NodeModule['exports'],
module: {
exports: NodeModule['exports']
},
__filename: string,
__dirname: string,
require: NodeRequireFunction
) => NodeModule
/**
* The result of snapshotting a snapshot script. Namely it has the
* `customRequire` function which also references the `exports` containing fully
* initialized modules as well as `definitions` {@link ModuleDefinition}..
*
* @category snapshot
*/
export type Snapshot = {
customRequire: {
definitions: Record<string, NodeRequireFunction>
exports: Record<string, NodeModule>
// Module._cache === require.cache
cache: Record<string, NodeModule>
}
}
/**
* Extra data we include in the snapshot, namely the embedded `sourceMap`.
* @category snapshot
*/
export type SnapshotAuxiliaryData = {
sourceMap?: RawSourceMap
}
/**
* Represents dependencies of a module.
*
* @property directDeps are all dependencies which are directly imported by the module
* @property allDeps are all dependencies imported by the module as well as by
* its dependencies transitively
*/
export type DependencyNode = { directDeps: Set<string>, allDeps: Set<string> }
/**
* The array representation of the dependency map which is used to embed it
* into the snapshot.
*/
export type DependencyMapArray = Array<
[string, { directDeps: string[], allDeps: string[] }]
>

View File

@@ -0,0 +1,12 @@
import path from 'path'
/**
* Normalizes the given path to have forward slashes at all times.
* This is used to resolve modules from the snapshot as they are always stored
* with forward slashes there.
* @category loader
*/
export const forwardSlash =
path.sep === path.posix.sep
? (p: string) => p
: (p: string) => p.replace(/(\\)+/g, '/')

View File

@@ -0,0 +1,9 @@
module.exports = {
require: '@packages/ts/register',
reporter: 'mocha-multi-reporters',
reporterOptions: {
configFile: '../../mocha-reporter-config.json',
},
spec: 'test/**/*.spec.ts',
watchFiles: ['test/**/*.ts', 'src/**/*.ts'],
}

View File

@@ -0,0 +1,101 @@
import { buildDependencyMap } from '@tooling/v8-snapshot'
import { DependencyMap } from '../src/dependency-map'
import type { Metadata } from '../src/types'
import { expect } from 'chai'
const ROOT = 'lib/root.js'
const FOO = 'lib/foo.js'
const BAR = 'lib/bar.js'
const BAZ = 'lib/baz.js'
const FOZ = 'lib/foz.js'
/*
* + ROOT
* |
* +---- FOO
* |
* + --- BAR
* |
* +--- BAZ
* |
* + --- FOZ
* |
* |
* + --- FOO (circular ref)
*
*/
const ALL_ROOT = [FOO, BAR, BAZ, FOZ]
const ALL_FOO = [BAR, BAZ, FOZ]
const ALL_BAR = [BAZ, FOZ, FOO]
const ALL_BAZ = [FOZ, FOO, BAR]
const ALL_FOZ: string[] = []
const DIRECT_ROOT = [FOO]
const DIRECT_FOO = [BAR]
const DIRECT_BAR = [BAZ]
const DIRECT_BAZ = [FOZ, FOO]
const DIRECT_FOZ: string[] = []
const inputs: Metadata['inputs'] = {
[ROOT]: {
imports: [
{
path: FOO,
kind: 'require-call',
},
],
},
[FOO]: {
imports: [
{
path: BAR,
kind: 'require-call',
},
],
},
[BAR]: {
imports: [
{
path: BAZ,
kind: 'require-call',
},
],
},
[BAZ]: {
imports: [
{
path: FOZ,
kind: 'require-call',
},
{
path: FOO,
kind: 'require-call',
},
],
},
[FOZ]: {
imports: [],
},
} as unknown as Metadata['inputs']
const map = buildDependencyMap(inputs)
const dp = new DependencyMap(map)
describe('dependency map: circular', () => {
it('creates a map with circular dep - all deps ', () => {
expect(dp.allDepsOf(ROOT)).to.deep.equal(ALL_ROOT)
expect(dp.allDepsOf(FOO)).to.deep.equal(ALL_FOO)
expect(dp.allDepsOf(BAR)).to.deep.equal(ALL_BAR)
expect(dp.allDepsOf(BAZ)).to.deep.equal(ALL_BAZ)
expect(dp.allDepsOf(FOZ)).to.deep.equal(ALL_FOZ)
})
it('creates a map with circular dep - direct deps ', () => {
expect(dp.directDepsOf(ROOT)).to.deep.equal(DIRECT_ROOT)
expect(dp.directDepsOf(FOO)).to.deep.equal(DIRECT_FOO)
expect(dp.directDepsOf(BAR)).to.deep.equal(DIRECT_BAR)
expect(dp.directDepsOf(BAZ)).to.deep.equal(DIRECT_BAZ)
expect(dp.directDepsOf(FOZ)).to.deep.equal(DIRECT_FOZ)
})
})

View File

@@ -0,0 +1,126 @@
import { buildDependencyMap } from '@tooling/v8-snapshot'
import { DependencyMap } from '../src/dependency-map'
import type { Metadata } from '../src/types'
import { expect } from 'chai'
const NO_DEPS = 'lib/fixtures/no-deps.js'
const SYNC_DEPS = 'lib/fixtures/sync-deps.js'
const DEEP_SYNC_DEPS = 'lib/fixtures/deep-sync-deps.js'
const KEEP_JS = 'lib/keep.js'
const allIds = [NO_DEPS, SYNC_DEPS, DEEP_SYNC_DEPS, KEEP_JS]
/*
* + KEEP_JS
* |
* +---- DEEP_SYNC_DEPS
* | |
* | |
* | + --- SYNC_DEPS
* | |
* | +--- NO_DEPS
* |
* +--- SYNC_DEPS
* |
* +--- NO_DEPS
*/
const inputs: Metadata['inputs'] = {
'lib/fixtures/no-deps.js': {
imports: [],
},
'lib/fixtures/sync-deps.js': {
imports: [
{
path: 'lib/fixtures/no-deps.js',
kind: 'require-call',
},
],
},
'lib/fixtures/deep-sync-deps.js': {
imports: [
{
path: 'lib/fixtures/sync-deps.js',
kind: 'require-call',
},
],
},
'lib/keep.js': {
imports: [
{
path: 'lib/fixtures/deep-sync-deps.js',
kind: 'require-call',
},
{
path: 'lib/fixtures/sync-deps.js',
kind: 'require-call',
},
],
},
} as unknown as Metadata['inputs']
const map = buildDependencyMap(inputs)
const dp = new DependencyMap(map)
describe('dependency map', () => {
it('creates a map that is loaded but not cached', () => {
const loaded: Set<string> = new Set()
const cache: Record<string, NodeModule> = {}
for (const id of allIds) {
expect(dp.loadedButNotCached(id, loaded, cache), `${id} not 'loaded but not cached'`).to.be.false
}
for (const id of allIds) {
cache[id] = {} as NodeModule
loaded.add(id)
}
for (const id of allIds) {
expect(dp.loadedButNotCached(id, loaded, cache), `${id} not 'loaded but not cached'`).to.be.false
}
delete cache[NO_DEPS]
for (const id of allIds) {
const res = id === NO_DEPS
expect(dp.loadedButNotCached(id, loaded, cache)).to.equal(res, `${id} ${res ? '' : 'not '} 'loaded but not cached'`)
}
delete cache[SYNC_DEPS]
for (const id of allIds) {
const res = id === NO_DEPS || id === SYNC_DEPS
expect(dp.loadedButNotCached(id, loaded, cache)).to.equal(res, `${id} ${res ? '' : 'not '} 'loaded but not cached'`)
}
})
it('creates a map with a critical dependency loaded but not cached', () => {
const loaded: Set<string> = new Set()
const cache: Record<string, NodeModule> = {}
const load = (id: string) => {
cache[id] = {} as NodeModule
loaded.add(id)
}
load(NO_DEPS)
expect(dp.criticalDependencyLoadedButNotCached(SYNC_DEPS, loaded, cache), 'SYNC_DEPS needs no reload').to.be.false
delete cache[NO_DEPS]
expect(dp.criticalDependencyLoadedButNotCached(SYNC_DEPS, loaded, cache), 'SYNC_DEPS needs reload since not in cache and NO_DEPS is direct dep').to.be.true
expect(dp.criticalDependencyLoadedButNotCached(DEEP_SYNC_DEPS, loaded, cache), 'DEEP_SYNC_DEPS needs reload since a cache free path to NO_DEPS exists').to.be.true
expect(dp.criticalDependencyLoadedButNotCached(KEEP_JS, loaded, cache), 'KEEP_JS needs reload since a cache free path to NO_DEPS exists').to.be.true
load(SYNC_DEPS)
expect(dp.criticalDependencyLoadedButNotCached(DEEP_SYNC_DEPS, loaded, cache), 'DEEP_SYNC_DEPS needs no reload since no cache free path to NO_DEPS exists').to.be.false
expect(dp.criticalDependencyLoadedButNotCached(KEEP_JS, loaded, cache), 'KEEP_JS needs no reload since no cache free path to NO_DEPS exists').to.be.false
})
})

View File

@@ -0,0 +1,9 @@
{
"extends": "../ts/tsconfig.json",
"include": ["src", "scripts/snapshot-generate-entry-via-deps.ts"],
"compilerOptions": {
"lib": ["esnext"],
"outDir": "./dist",
"allowJs": true,
},
}

View File

@@ -4,6 +4,9 @@ const { join } = require('path')
const glob = require('glob')
const os = require('os')
const path = require('path')
const { setupV8Snapshots } = require('@tooling/v8-snapshot')
const { flipFuses, FuseVersion, FuseV1Options } = require('@electron/fuses')
const { cleanup } = require('./binary/binary-cleanup')
module.exports = async function (params) {
console.log('****************************')
@@ -44,4 +47,23 @@ module.exports = async function (params) {
await fs.copy(distNodeModules, appNodeModules)
console.log('all node_modules subfolders copied to', outputFolder)
const exePathPerPlatform = {
darwin: join(params.appOutDir, 'Cypress.app', 'Contents', 'MacOS', 'Cypress'),
linux: join(params.appOutDir, 'Cypress'),
win32: join(params.appOutDir, 'Cypress.exe'),
}
if (!['1', 'true'].includes(process.env.DISABLE_SNAPSHOT_REQUIRE)) {
await flipFuses(
exePathPerPlatform[os.platform()],
{
version: FuseVersion.V1,
[FuseV1Options.LoadBrowserProcessSpecificV8Snapshot]: true,
},
)
await setupV8Snapshots(params.appOutDir)
await cleanup(outputFolder)
}
}

View File

@@ -0,0 +1,191 @@
const fs = require('fs-extra')
const path = require('path')
const { consolidateDeps } = require('@tooling/v8-snapshot')
const del = require('del')
const esbuild = require('esbuild')
const snapshotMetadata = require('@tooling/v8-snapshot/cache/prod-darwin/snapshot-meta.cache.json')
const tempDir = require('temp-dir')
const workingDir = path.join(tempDir, 'binary-cleanup-workdir')
fs.ensureDirSync(workingDir)
async function removeEmptyDirectories (directory) {
// lstat does not follow symlinks (in contrast to stat)
const fileStats = await fs.lstat(directory)
if (!fileStats.isDirectory()) {
return
}
let fileNames = await fs.readdir(directory)
if (fileNames.length > 0) {
const recursiveRemovalPromises = fileNames.map(
(fileName) => removeEmptyDirectories(path.join(directory, fileName)),
)
await Promise.all(recursiveRemovalPromises)
// re-evaluate fileNames; after deleting subdirectory
// we may have parent directory empty now
fileNames = await fs.readdir(directory)
}
if (fileNames.length === 0) {
await fs.rmdir(directory)
}
}
const getDependencyPathsToKeep = async () => {
let entryPoints = new Set([
// This is the entry point for the server bundle. It will not have access to the snapshot yet. It needs to be kept in the binary
require.resolve('@packages/server/index.js'),
// This is a dynamic import that is used to load the snapshot require logic. It will not have access to the snapshot yet. It needs to be kept in the binary
require.resolve('@packages/server/hook-require.js'),
// These dependencies are started in a new process or thread and will not have access to the snapshot. They need to be kept in the binary
require.resolve('@packages/server/lib/plugins/child/require_async_child.js'),
require.resolve('@packages/server/lib/plugins/child/register_ts_node.js'),
require.resolve('@packages/rewriter/lib/threads/worker.ts'),
// These dependencies use the `require.resolve(<dependency>, { paths: [<path>] })` pattern where <path> is a path within the cypress monorepo. These will not be
// pulled in by esbuild but still need to be kept in the binary.
require.resolve('webpack'),
require.resolve('webpack-dev-server', { paths: [path.join(__dirname, '..', '..', 'npm', 'webpack-dev-server')] }),
require.resolve('html-webpack-plugin-4', { paths: [path.join(__dirname, '..', '..', 'npm', 'webpack-dev-server')] }),
require.resolve('html-webpack-plugin-5', { paths: [path.join(__dirname, '..', '..', 'npm', 'webpack-dev-server')] }),
// These dependencies are completely dynamic using the pattern `require(`./${name}`)` and will not be pulled in by esbuild but still need to be kept in the binary.
...['ibmi',
'sunos',
'android',
'darwin',
'freebsd',
'linux',
'openbsd',
'sunos',
'win32'].map((platform) => require.resolve(`default-gateway/${platform}`)),
])
let esbuildResult
let newEntryPointsFound = true
// The general idea here is to run esbuild on entry points that are used outside of the snapshot. If, during the process,
// we find places where we do a require.resolve on a module, that should be treated as an additional entry point and we run
// esbuild again. We do this until we no longer find any new entry points. The resulting metafile inputs are
// the dependency paths that we need to ensure stay in the snapshot.
while (newEntryPointsFound) {
esbuildResult = await esbuild.build({
entryPoints: [...entryPoints],
bundle: true,
outdir: workingDir,
platform: 'node',
metafile: true,
external: [
'./packages/server/server-entry',
'fsevents',
'pnpapi',
'@swc/core',
'emitter',
],
})
newEntryPointsFound = false
esbuildResult.warnings.forEach((warning) => {
const matches = warning.text.match(/"(.*)" should be marked as external for use with "require.resolve"/)
const warningSubject = matches && matches[1]
if (warningSubject) {
let entryPoint
if (warningSubject.startsWith('.')) {
entryPoint = path.join(__dirname, '..', '..', path.dirname(warning.location.file), warningSubject)
} else {
entryPoint = require.resolve(warningSubject)
}
if (path.extname(entryPoint) !== '' && !entryPoints.has(entryPoint)) {
newEntryPointsFound = true
entryPoints.add(entryPoint)
}
}
})
}
return [...Object.keys(esbuildResult.metafile.inputs), ...entryPoints]
}
const cleanup = async (buildAppDir) => {
// 1. Retrieve all dependencies that still need to be kept in the binary. In theory, we could use the bundles generated here as single files within the binary,
// but for now, we just track on the dependencies that get pulled in
const keptDependencies = [...await getDependencyPathsToKeep(), 'package.json', 'packages/server/server-entry.js']
// 2. Gather the dependencies that could potentially be removed from the binary due to being in the snapshot
const potentiallyRemovedDependencies = [...snapshotMetadata.healthy, ...snapshotMetadata.deferred, ...snapshotMetadata.norewrite]
// 3. Remove all dependencies that are in the snapshot but not in the list of kept dependencies from the binary
await Promise.all(potentiallyRemovedDependencies.map(async (dependency) => {
// marionette-client requires all of its dependencies in a very non-standard dynamic way. We will keep anything in marionette-client
if (!keptDependencies.includes(dependency.slice(2)) && !dependency.includes('marionette-client')) {
await fs.remove(path.join(buildAppDir, dependency.replace(/.ts$/, '.js')))
}
}))
// 4. Consolidate dependencies that are safe to consolidate (`lodash` and `bluebird`)
await consolidateDeps({ projectBaseDir: buildAppDir })
// 5. Remove various unnecessary files from the binary to further clean things up. Likely, there is additional work that can be done here
await del([
// Remove test files
path.join(buildAppDir, '**', 'test'),
path.join(buildAppDir, '**', 'tests'),
// What we need of prettier is entirely encapsulated within the v8 snapshot, but has a few leftover large files
path.join(buildAppDir, '**', 'prettier', 'esm'),
path.join(buildAppDir, '**', 'prettier', 'standalone.js'),
path.join(buildAppDir, '**', 'prettier', 'bin-prettier.js'),
// ESM files are mostly not needed currently
path.join(buildAppDir, '**', '@babel', '**', 'esm'),
path.join(buildAppDir, '**', 'ramda', 'es'),
path.join(buildAppDir, '**', 'jimp', 'es'),
path.join(buildAppDir, '**', '@jimp', '**', 'es'),
path.join(buildAppDir, '**', 'nexus', 'dist-esm'),
path.join(buildAppDir, '**', '@graphql-tools', '**', '*.mjs'),
path.join(buildAppDir, '**', 'graphql', '**', '*.mjs'),
// We currently do not use any map files
path.join(buildAppDir, '**', '*js.map'),
// License files need to be kept
path.join(buildAppDir, '**', '!(LICENSE|license|License).md'),
// These are type related files that are not used within the binary
path.join(buildAppDir, '**', '*.d.ts'),
path.join(buildAppDir, '**', 'ajv', 'lib', '**', '*.ts'),
path.join(buildAppDir, '**', '*.flow'),
// Example files are not needed
path.join(buildAppDir, '**', 'jimp', 'browser', 'examples'),
// Documentation files are not needed
path.join(buildAppDir, '**', 'JSV', 'jsdoc-toolkit'),
path.join(buildAppDir, '**', 'JSV', 'docs'),
path.join(buildAppDir, '**', 'fluent-ffmpeg', 'doc'),
// Files used as part of prebuilding are not necessary
path.join(buildAppDir, '**', 'registry-js', 'prebuilds'),
path.join(buildAppDir, '**', '*.cc'),
path.join(buildAppDir, '**', '*.o'),
path.join(buildAppDir, '**', '*.c'),
path.join(buildAppDir, '**', '*.h'),
// Remove distributions that are not needed in the binary
path.join(buildAppDir, '**', 'ramda', 'dist'),
path.join(buildAppDir, '**', 'jimp', 'browser'),
path.join(buildAppDir, '**', '@jimp', '**', 'src'),
path.join(buildAppDir, '**', 'nexus', 'src'),
path.join(buildAppDir, '**', 'source-map', 'dist'),
path.join(buildAppDir, '**', 'source-map-js', 'dist'),
path.join(buildAppDir, '**', 'pako', 'dist'),
path.join(buildAppDir, '**', 'node-forge', 'dist'),
path.join(buildAppDir, '**', 'pngjs', 'browser.js'),
path.join(buildAppDir, '**', 'plist', 'dist'),
// Remove yarn locks
path.join(buildAppDir, '**', 'yarn.lock'),
], { force: true })
// 6. Remove any empty directories as a result of the rest of the cleanup
await removeEmptyDirectories(buildAppDir)
}
module.exports = {
cleanup,
}

View File

@@ -18,6 +18,7 @@ import { transformRequires } from './util/transform-requires'
import execa from 'execa'
import { testStaticAssets } from './util/testStaticAssets'
import performanceTracking from '../../system-tests/lib/performance'
import verify from '../../cli/lib/tasks/verify'
const globAsync = promisify(glob)
@@ -173,6 +174,10 @@ export async function buildCypressApp (options: BuildCypressAppOpts) {
}, { spaces: 2 })
fs.writeFileSync(meta.distDir('index.js'), `\
${!['1', 'true'].includes(process.env.DISABLE_SNAPSHOT_REQUIRE) ?
`if (!global.snapshotResult && process.versions?.electron) {
throw new Error('global.snapshotResult is not defined. This binary has been built incorrectly.')
}` : ''}
process.env.CYPRESS_INTERNAL_ENV = process.env.CYPRESS_INTERNAL_ENV || 'production'
require('./packages/server')\
`)
@@ -196,8 +201,8 @@ require('./packages/server')\
await transformRequires(meta.distDir())
log(`#testVersion ${meta.distDir()}`)
await testVersion(meta.distDir(), version)
log(`#testDistVersion ${meta.distDir()}`)
await testDistVersion(meta.distDir(), version)
log('#testStaticAssets')
await testStaticAssets(meta.distDir())
@@ -249,9 +254,18 @@ require('./packages/server')\
console.log('electron-builder arguments:')
console.log(args.join(' '))
// Update the root package.json with the next app version so that it is snapshot properly
fs.writeJSONSync(path.join(CY_ROOT_DIR, 'package.json'), {
...jsonRoot,
version,
}, { spaces: 2 })
try {
await execa('electron-builder', args, {
stdio: 'inherit',
env: {
NODE_OPTIONS: '--max_old_space_size=8192',
},
})
} catch (e) {
if (!skipSigning) {
@@ -259,6 +273,9 @@ require('./packages/server')\
}
}
// Revert the root package.json so that subsequent steps will work properly
fs.writeJSONSync(path.join(CY_ROOT_DIR, 'package.json'), jsonRoot, { spaces: 2 })
await checkMaxPathLength()
// lsDistFolder
@@ -268,9 +285,6 @@ require('./packages/server')\
console.log(stdout)
// testVersion(buildAppDir)
await testVersion(meta.buildAppDir(), version)
// runSmokeTests
let usingXvfb = xvfb.isNeeded()
@@ -279,6 +293,9 @@ require('./packages/server')\
await xvfb.start()
}
log(`#testExecutableVersion ${meta.buildAppExecutable()}`)
await testExecutableVersion(meta.buildAppExecutable(), version)
const executablePath = meta.buildAppExecutable()
await smoke.test(executablePath)
@@ -358,23 +375,46 @@ function getIconFilename () {
return iconFilename
}
async function testVersion (dir: string, version: string) {
async function testDistVersion (distDir: string, version: string) {
log('#testVersion')
console.log('testing dist package version')
console.log('by calling: node index.js --version')
console.log('in the folder %s', dir)
console.log('in the folder %s', distDir)
const result = await execa('node', ['index.js', '--version'], {
cwd: dir,
cwd: distDir,
})
la(result.stdout, 'missing output when getting built version', result)
console.log('app in %s', dir)
console.log('app in %s', distDir)
console.log('built app version', result.stdout)
la(result.stdout === version, 'different version reported',
la(result.stdout.trim() === version.trim(), 'different version reported',
result.stdout, 'from input version to build', version)
console.log('✅ using node --version works')
}
async function testExecutableVersion (buildAppExecutable: string, version: string) {
log('#testVersion')
console.log('testing built app executable version')
console.log(`by calling: ${buildAppExecutable} --version`)
const args = ['--version']
if (verify.needsSandbox()) {
args.push('--no-sandbox')
}
const result = await execa(buildAppExecutable, args)
la(result.stdout, 'missing output when getting built version', result)
console.log('built app version', result.stdout)
la(result.stdout.trim() === version.trim(), 'different version reported',
result.stdout, 'from input version to build', version)
console.log('✅ using --version on the Cypress binary works')
}

View File

@@ -1,22 +0,0 @@
/**
* Safer konfig load for test code. The original konfig changes the
* current working directory, thus the tests might be affected
* unexpectedly. This function loads the konfig, but then
* restores the current working directory.
*
* The tests should use this function to get `konfig` function like
*
* @example
* const konfig = require('../binary/get-config')()
*/
const getConfig = () => {
const cwd = process.cwd()
const konfig = require('../../packages/server/lib/konfig')
// restore previous cwd in case it was changed by loading "konfig"
process.chdir(cwd)
return konfig
}
module.exports = getConfig

View File

@@ -160,6 +160,47 @@ const runFailingProjectTest = function (buildAppExecutable, e2e) {
.then(verifyScreenshots)
}
const runV8SnapshotProjectTest = function (buildAppExecutable, e2e) {
if (shouldSkipProjectTest()) {
console.log('skipping failing project test')
return Promise.resolve()
}
console.log('running v8 snapshot project test')
const spawn = () => {
return new Promise((resolve, reject) => {
const env = _.omit(process.env, 'CYPRESS_INTERNAL_ENV')
const args = [
`--run-project=${e2e}`,
`--spec=${e2e}/cypress/e2e/simple_v8_snapshot.cy.js`,
]
if (verify.needsSandbox()) {
args.push('--no-sandbox')
}
const options = {
stdio: 'inherit',
env,
}
return cp.spawn(buildAppExecutable, args, options)
.on('exit', (code) => {
if (code === 0) {
return resolve()
}
return reject(new Error(`running project tests failed with: '${code}' errors.`))
})
})
}
return spawn()
}
const test = async function (buildAppExecutable) {
await scaffoldCommonNodeModules()
await Fixtures.scaffoldProject('e2e')
@@ -168,6 +209,10 @@ const test = async function (buildAppExecutable) {
await runSmokeTest(buildAppExecutable)
await runProjectTest(buildAppExecutable, e2e)
await runFailingProjectTest(buildAppExecutable, e2e)
if (!['1', 'true'].includes(process.env.DISABLE_SNAPSHOT_REQUIRE)) {
await runV8SnapshotProjectTest(buildAppExecutable, e2e)
}
Fixtures.remove()
}

View File

@@ -7,11 +7,12 @@ const os = require('os')
const Promise = require('bluebird')
const { fromSSO, fromEnv } = require('@aws-sdk/credential-providers')
const konfig = require('../get-config')()
const { purgeCloudflareCache } = require('./purge-cloudflare-cache')
const CDN_URL = 'https://cdn.cypress.io'
const getUploadUrl = function () {
const url = konfig('cdn_url')
const url = CDN_URL
la(check.url(url), 'could not get CDN url', url)

View File

@@ -11,10 +11,10 @@ declare global {
/**
* Gulp is only used for running the application during development. At this point of starting the app,
* process.env.CYPRESS_INTERNAL_ENV has not been set yet unless explicitly set on the command line. If not
* set on the command line, it is set to 'development' [here](https://github.com/cypress-io/cypress/blob/a5ec234005fead97f6cfdf611abf8d9f4ad0565d/packages/server/lib/environment.js#L22)
* set on the command line, it is set to 'development' [here](https://github.com/cypress-io/cypress/blob/develop/packages/server/lib/environment.js#L22)
*
* When running in a production build, a file is written out to set CYPRESS_INTERNAL_ENV to 'production'
* [here](https://github.com/cypress-io/cypress/blob/a5ec234005fead97f6cfdf611abf8d9f4ad0565d/scripts/binary/build.ts#L176).
* [here](https://github.com/cypress-io/cypress/blob/develop/scripts/binary/build.ts#L176).
* However, running in production will not use the code in this file.
*/
@@ -34,14 +34,14 @@ export const ENV_VARS = {
// Uses the "built" vite assets, not the served ones
DEV_OPEN: {
CYPRESS_KONFIG_ENV: DEFAULT_INTERNAL_CLOUD_ENV, // TODO: Change this / remove konfig
CYPRESS_CONFIG_ENV: DEFAULT_INTERNAL_CLOUD_ENV, // TODO: Change this / remove config
CYPRESS_INTERNAL_CLOUD_ENV: DEFAULT_INTERNAL_CLOUD_ENV,
CYPRESS_INTERNAL_EVENT_COLLECTOR_ENV: DEFAULT_INTERNAL_EVENT_COLLECTOR_ENV,
},
// Used when we're running Cypress in true "development" mode
DEV: {
CYPRESS_KONFIG_ENV: DEFAULT_INTERNAL_CLOUD_ENV, // TODO: Change this / remove konfig
CYPRESS_CONFIG_ENV: DEFAULT_INTERNAL_CLOUD_ENV, // TODO: Change this / remove config
CYPRESS_INTERNAL_CLOUD_ENV: DEFAULT_INTERNAL_CLOUD_ENV,
CYPRESS_INTERNAL_EVENT_COLLECTOR_ENV: DEFAULT_INTERNAL_EVENT_COLLECTOR_ENV,
},

View File

@@ -21,6 +21,7 @@ export const monorepoPaths = {
pkgLaunchpad: path.join(__dirname, '../../packages/launchpad'),
pkgNetStubbing: path.join(__dirname, '../../packages/net-stubbing'),
pkgNetwork: path.join(__dirname, '../../packages/network'),
pkgPackherdRequire: path.join(__dirname, '../../packages/packherd-require'),
pkgProxy: path.join(__dirname, '../../packages/proxy'),
pkgReporter: path.join(__dirname, '../../packages/reporter'),
pkgResolveDist: path.join(__dirname, '../../packages/resolve-dist'),
@@ -32,5 +33,6 @@ export const monorepoPaths = {
pkgSocket: path.join(__dirname, '../../packages/socket'),
pkgTs: path.join(__dirname, '../../packages/ts'),
pkgTypes: path.join(__dirname, '../../packages/types'),
pkgV8SnapshotRequire: path.join(__dirname, '../../packages/v8-snapshot-require'),
pkgWebConfig: path.join(__dirname, '../../packages/web-config')
} as const

View File

@@ -3,7 +3,7 @@ const { execSync } = require('child_process')
const executionEnv = process.env.CI ? 'ci' : 'local'
const postInstallCommands = {
local: 'patch-package && yarn-deduplicate --strategy=highest && yarn clean && gulp postinstall && yarn build',
local: 'patch-package && yarn-deduplicate --strategy=highest && yarn clean && gulp postinstall && yarn build && yarn build-v8-snapshot-dev',
ci: 'patch-package && yarn clean && gulp postinstall',
}

View File

@@ -1,46 +0,0 @@
const la = require('lazy-ass')
const is = require('check-more-types')
const { join } = require('path')
/* eslint-env mocha */
describe('konfig check', () => {
/*
script tests should NOT suddenly change the current working directory to
packages/server - otherwise the local path filenames might be all wrong
and unexpected. The current working directory changes when we
require `packages/server/lib/konfig` which in turn requires
`lib/cwd` which changes CWD.
From the scripts unit tests we should not use `lib/konfig` directly,
instead we should use `binary/get-config` script to get the konfig function.
*/
let cwd
before(() => {
cwd = process.cwd()
la(
!cwd.includes(join('packages', 'server')),
'process CWD is set to',
cwd,
'for some reason',
)
// if the above assertion breaks, it means some script in binary scripts
// loads "lib/konfig" directly, which unexpectedly changes the CWD.
})
it('does not change CWD on load', () => {
const konfig = require('../binary/get-config')()
const cwdAfter = process.cwd()
la(
cwd === cwdAfter,
'previous cwd',
cwd,
'differs after loading konfig',
cwdAfter,
)
la(is.fn(konfig), 'expected konfig to be a function', konfig)
})
})

Some files were not shown because too many files have changed in this diff Show More