Merge branch 'develop' into e0bf81111e-master-into-develop

This commit is contained in:
Barthélémy Ledoux
2022-02-08 21:38:26 -06:00
committed by GitHub
67 changed files with 1123 additions and 1310 deletions

View File

@@ -1,2 +0,0 @@
# Always validate the PR title, and ignore the commits
titleOnly: true

View File

@@ -1,78 +0,0 @@
name: Merge develop into 10.0-release
on:
push:
branches:
- develop
jobs:
merge-develop-into-10-0-release:
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v2
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
- name: Set committer info
run: |
git config --local user.email "$(git log --format='%ae' HEAD^!)"
git config --local user.name "$(git log --format='%an' HEAD^!)"
- name: Checkout 10.0-release branch
run: git checkout 10.0-release
- name: Check for merge conflict
id: check-conflict
run: echo "::set-output name=merge_conflict::$(git merge-tree $(git merge-base HEAD develop) develop HEAD | egrep '<<<<<<<')"
- name: Merge develop into 10.0-release
id: merge-develop
run: git merge develop
if: ${{ !steps.check-conflict.outputs.merge_conflict }}
- name: Failed merge, set merged status as failed
run: echo "::set-output name=merge_conflict::'failed merge'"
if: ${{ steps.merge-develop.outcome != 'success' }}
- name: Push
run: git push
if: ${{ !steps.check-conflict.outputs.merge_conflict }}
- name: Checkout develop
run: git checkout develop
if: ${{ steps.check-conflict.outputs.merge_conflict }}
- name: Determine name of new branch
id: gen-names
run: |
echo "::set-output name=sha::$(git rev-parse --short HEAD)"
echo "::set-output name=branch_name::$(git rev-parse --short HEAD)-develop-into-10.0-release"
if: ${{ steps.check-conflict.outputs.merge_conflict }}
- name: Create a copy of develop on a new branch
run: git checkout -b ${{ steps.gen-names.outputs.branch_name }} develop
if: ${{ steps.check-conflict.outputs.merge_conflict }}
- name: Push branch to remote
run: git push origin ${{ steps.gen-names.outputs.branch_name }}
if: ${{ steps.check-conflict.outputs.merge_conflict }}
- name: Create Pull Request
uses: actions/github-script@v3
with:
script: |
const pull = await github.pulls.create({
owner: context.repo.owner,
repo: context.repo.repo,
base: '10.0-release',
head: '${{ steps.gen-names.outputs.branch_name }}',
title: 'chore: merge develop (${{ steps.gen-names.outputs.sha }}) into 10.0-release',
body: `There was a merge conflict when trying to automatically merge develop into 10.0-release. Please resolve the conflict and complete the merge.
DO NOT SQUASH AND MERGE
@${context.actor}`,
maintainer_can_modify: true,
})
await github.pulls.requestReviewers({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: pull.data.number,
reviewers: [context.actor],
})
await github.issues.addLabels({
owner: context.repo.owner,
repo: context.repo.repo,
issue_number: pull.data.number,
labels: ['auto-merge'],
})
if: ${{ steps.check-conflict.outputs.merge_conflict }}

View File

@@ -11,7 +11,8 @@ jobs:
uses: actions/checkout@v2
with:
fetch-depth: 0
token: ${{ secrets.GITHUB_TOKEN }}
# the default `GITHUB_TOKEN` cannot push to protected branches, so use `cypress-app-bot`'s token instead
token: ${{ secrets.BOT_GITHUB_TOKEN }}
- name: Set committer info
run: |
git config --local user.email "$(git log --format='%ae' HEAD^!)"

View File

@@ -0,0 +1,21 @@
name: "Semantic Pull Request"
on:
pull_request_target:
types:
- opened
- edited
- synchronize
jobs:
main:
name: Lint Title
runs-on: ubuntu-latest
steps:
# use a fork of the GitHub action - we cannot pull in untrusted third party actions
# see https://github.com/cypress-io/cypress/pull/20091#discussion_r801799647
- uses: cypress-io/action-semantic-pull-request@v4
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
with:
validateSingleCommit: true

View File

@@ -1,9 +0,0 @@
exports['getCDN npm package returns CDN s3 path 1'] = {
"input": {
"platform": "darwin-x64",
"filename": "cypress.tgz",
"version": "3.3.0",
"hash": "ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123"
},
"result": "https://cdn.cypress.io/beta/npm/3.3.0/ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123/cypress.tgz"
}

View File

@@ -1,26 +0,0 @@
exports['getCDN for binary'] = {
"input": {
"platform": "darwin-x64",
"filename": "cypress.zip",
"version": "3.3.0",
"hash": "ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123"
},
"result": "https://cdn.cypress.io/beta/binary/3.3.0/darwin-x64/ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123/cypress.zip"
}
exports['upload binary folder'] = {
"input": {
"platformArch": "darwin-x64",
"version": "3.3.0",
"hash": "ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123"
},
"result": "beta/binary/3.3.0/darwin-x64/ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123/"
}
exports['upload binary folder for platform'] = {
"input": {
"platformArch": "darwin-x64",
"version": "3.3.0"
},
"result": "beta/binary/3.3.0/darwin-x64"
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 165 KiB

View File

@@ -1,4 +1,4 @@
{
"chrome:beta": "98.0.4758.74",
"chrome:stable": "97.0.4692.99"
"chrome:beta": "98.0.4758.80",
"chrome:stable": "98.0.4758.80"
}

View File

@@ -29,7 +29,7 @@ mainBuildFilters: &mainBuildFilters
only:
- develop
- 10.0-release
- node-17-maybe
- fix-ci-artifact-uploads
# usually we don't build Mac app - it takes a long time
# but sometimes we want to really confirm we are doing the right thing
@@ -525,10 +525,6 @@ commands:
post-install-comment:
description: Post GitHub comment with a blurb on how to install pre-release version
steps:
- run: ls -la
- run: ls -la binary-url.json npm-package-url.json
- run: cat binary-url.json
- run: cat npm-package-url.json
- run:
name: Post pre-release install comment
command: |
@@ -862,6 +858,7 @@ commands:
echo "Not code signing for this platform"
fi
- run:
name: Build the Cypress binary
environment:
DEBUG: electron-builder,electron-osx-sign*
# notarization on Mac can take a while
@@ -871,29 +868,13 @@ commands:
yarn binary-build --platform $PLATFORM --version $(node ./scripts/get-next-version.js)
- run:
name: Zip the binary
command: |
yarn binary-zip --platform $PLATFORM
command: yarn binary-zip --platform $PLATFORM
- store-npm-logs
- persist_to_workspace:
root: ~/
paths:
- cypress/cypress.zip
upload-binary:
steps:
- run:
name: upload unique binary
command: |
node scripts/binary.js upload-unique-binary \
--file cypress.zip \
--version $(node -p "require('./package.json').version")
- run: cat binary-url.json
- store-npm-logs
- persist_to_workspace:
root: ~/
paths:
- cypress/binary-url.json
build-cypress-npm-package:
parameters:
executor:
@@ -901,12 +882,11 @@ commands:
default: cy-doc
steps:
- run:
name: bump NPM version
name: Bump NPM version
command: yarn get-next-version --npm
- run:
name: build NPM package
command: |
yarn build --scope cypress
name: Build NPM package
command: yarn build --scope cypress
- run:
command: ls -la types
working_directory: cli/build
@@ -925,27 +905,36 @@ commands:
name: list created NPM package
command: ls -l
- store-npm-logs
- run: pwd
- persist_to_workspace:
root: ~/
paths:
- cypress/cypress.tgz
upload-npm-package:
upload-build-artifacts:
steps:
- run: ls -l
- run:
name: upload NPM package
name: Upload unique binary to S3
command: |
node scripts/binary.js upload-npm-package \
node scripts/binary.js upload-build-artifact \
--type binary \
--file cypress.zip \
--version $(node -p "require('./package.json').version")
- run:
name: Upload NPM package to S3
command: |
node scripts/binary.js upload-build-artifact \
--type npm-package \
--file cypress.tgz \
--version $(node -p "require('./package.json').version")
- store-npm-logs
- run: ls -l
- run: cat binary-url.json
- run: cat npm-package-url.json
- persist_to_workspace:
root: ~/
paths:
- cypress/binary-url.json
- cypress/npm-package-url.json
jobs:
@@ -1587,12 +1576,11 @@ jobs:
- run:
name: Check current branch to persist artifacts
command: |
if [[ "$CIRCLE_BRANCH" != "develop" && "$CIRCLE_BRANCH" != "node-17-maybe" ]]; then
if [[ "$CIRCLE_BRANCH" != "develop" && "$CIRCLE_BRANCH" != "fix-ci-artifact-uploads" ]]; then
echo "Not uploading artifacts or posting install comment for this branch."
circleci-agent step halt
fi
- upload-binary
- upload-npm-package
- upload-build-artifacts
- post-install-comment
test-kitchensink:
@@ -2128,7 +2116,6 @@ linux-workflow: &linux-workflow
- runner-ct-integration-tests-chrome:
requires:
- build
- desktop-gui-integration-tests-7x:
requires:
- build
@@ -2190,7 +2177,6 @@ linux-workflow: &linux-workflow
- npm-react
- npm-mount-utils
- npm-vue
- npm-design-system
- npm-webpack-batteries-included-preprocessor
- npm-webpack-preprocessor
- npm-vite-dev-server

View File

@@ -85,7 +85,7 @@ const getVersionSpecifier = (startDir = path.resolve(__dirname, '../..')) => {
})
}
const betaNpmUrlRe = /^\/beta\/npm\/(?<version>[0-9.]+)\/(?<artifactSlug>.+?)\/cypress\.tgz$/
const betaNpmUrlRe = /^\/beta\/npm\/(?<version>[0-9.]+)\/(?<platformSlug>.+?)\/(?<artifactSlug>.+?)\/cypress\.tgz$/
// convert a prerelease NPM package .tgz URL to the corresponding binary .zip URL
const getBinaryUrlFromPrereleaseNpmUrl = (npmUrl) => {

View File

@@ -93,7 +93,7 @@
"postinstall-postinstall": "2.1.0",
"proxyquire": "2.1.3",
"resolve-pkg": "2.0.0",
"shelljs": "0.8.4",
"shelljs": "0.8.5",
"sinon": "7.2.2",
"snap-shot-it": "7.9.6",
"spawn-mock": "1.0.0",

View File

@@ -467,13 +467,13 @@ describe('/lib/tasks/install', function () {
})
it('returns binary url for prerelease npm url', function () {
expect(install._getBinaryUrlFromPrereleaseNpmUrl('https://cdn.cypress.io/beta/npm/5.1.1/ciprovider-branchname-sha/cypress.tgz'))
expect(install._getBinaryUrlFromPrereleaseNpmUrl('https://cdn.cypress.io/beta/npm/5.1.1/linux-x64/ciprovider-branchname-sha/cypress.tgz'))
.to.eq('https://cdn.cypress.io/beta/binary/5.1.1/linux-x64/ciprovider-branchname-sha/cypress.zip')
expect(install._getBinaryUrlFromPrereleaseNpmUrl('https://cdn.cypress.io/beta/npm/5.1.1/circle-develop-3fdfc3b453eb38ad3c0b079531e4dde6668e3dd0-436710/cypress.tgz'))
expect(install._getBinaryUrlFromPrereleaseNpmUrl('https://cdn.cypress.io/beta/npm/5.1.1/inux-x64/circle-develop-3fdfc3b453eb38ad3c0b079531e4dde6668e3dd0-436710/cypress.tgz'))
.to.eq('https://cdn.cypress.io/beta/binary/5.1.1/linux-x64/circle-develop-3fdfc3b453eb38ad3c0b079531e4dde6668e3dd0-436710/cypress.zip')
expect(install._getBinaryUrlFromPrereleaseNpmUrl('https://cdn.cypress.io/beta/npm/5.1.1/circle-develop/some/branch-3fdfc3b453eb38ad3c0b079531e4dde6668e3dd0-436710/cypress.tgz'))
expect(install._getBinaryUrlFromPrereleaseNpmUrl('https://cdn.cypress.io/beta/npm/5.1.1/inux-x64/circle-develop/some/branch-3fdfc3b453eb38ad3c0b079531e4dde6668e3dd0-436710/cypress.tgz'))
.to.eq('https://cdn.cypress.io/beta/binary/5.1.1/linux-x64/circle-develop/some/branch-3fdfc3b453eb38ad3c0b079531e4dde6668e3dd0-436710/cypress.zip')
})

View File

@@ -67,13 +67,7 @@ of Cypress. You can see the progress of the test projects by opening the status
![Screenshot of status checks](https://i.imgur.com/AsQwzgO.png)
#### :bangbang: Important :bangbang:
The `linux x64`, `win32 x64`, and `darwin x64` artifacts produced by CI are all placed in the same directory on the CDN. The version that was built last will overwrite the other versions in the directory. Until work is done to complete [#19771](https://github.com/cypress-io/cypress/issues/19771), you must ensure that the `linux` workflow publishes its artifacts **after** the `windows`/`mac` workflows. To guarantee this, you can re-run the `create-build-artifacts` job for the `linux` workflow within CircleCI after the initial builds have completed.
<img src="https://user-images.githubusercontent.com/1711637/150612076-ac1d233b-519a-443b-9fd4-950a8f0439ef.png" width="250" height="auto">
Once the `develop` branch for all test projects are reliably passing with the new changes and the `linux` binary is present at `https://cdn.cypress.io/beta/npm/X.Y.Z/<sha>/cypress.tgz`, publishing can proceed.
Once the `develop` branch for all test projects are reliably passing with the new changes and the `linux-x64` binary is present at `https://cdn.cypress.io/beta/binary/X.Y.Z/linux-x64/<sha>/cypress.zip`, and the `linux-x64` cypress npm package is present at `https://cdn.cypress.io/beta/binary/X.Y.Z/linux-x64/<sha>/cypress.tgz`, publishing can proceed.
### Steps to Publish a New Version
@@ -93,14 +87,15 @@ In the following instructions, "X.Y.Z" is used to denote the [next version of Cy
- To find the link to the package file `cypress.tgz`:
1. In GitHub, go to the latest commit (the one whose sha you used in the last step).
![commit-link](https://user-images.githubusercontent.com/1157043/80608728-33fe6100-8a05-11ea-8b53-375303757b67.png)
2. Scroll down past the changes to the comments. The first comment should be a `cypress-bot` comment that includes a line beginning `npm install ...`. Grab the `https://cdn.../npm/X.Y.Z/<long sha>/cypress.tgz` link.
![cdn-tgz-link](https://user-images.githubusercontent.com/1157043/80608736-3791e800-8a05-11ea-8d75-e4f80128e857.png)
- Make sure the linux binaries are present at that location. See [Before Publishing a New Version](#before-publishing-a-new-version).
- Publish to the npm registry straight from the URL:
2. Scroll down past the changes to the comments. The first comment should be a `cypress-bot` comment that includes a line beginning `npm install ...`. Grab the `https://cdn.../npm/X.Y.Z/<platform>/<long sha>/cypress.tgz` link.
![commit-bot-comment](../assets/cypress-bot-pre-release-comment.png)
- Make sure the `linux-x64` binary and npm package are present at the commented locations. See [Before Publishing a New Version](#before-publishing-a-new-version).
- Publish the `linux-x64` distribution to the npm registry straight from the URL:
```shell
npm publish https://cdn.cypress.io/beta/npm/X.Y.Z/<long sha>/cypress.tgz --tag dev
```
:bangbang: Important :bangbang: Be sure to release the `linux-x64` distribution.
5. Double-check that the new version has been published under the `dev` tag using `npm info cypress` or [available-versions](https://github.com/bahmutov/available-versions). `latest` should still point to the previous version. Example output:

View File

@@ -14,7 +14,7 @@
"chalk": "^2.4.2",
"eslint-rule-composer": "^0.3.0",
"lodash": "^4.17.15",
"shelljs": "^0.8.3"
"shelljs": "0.8.5"
},
"devDependencies": {
"eslint": "^7.22.0",

View File

@@ -175,7 +175,7 @@
"semantic-release": "17.2.3",
"semantic-release-monorepo": "7.0.3",
"semver": "7.3.2",
"shelljs": "0.8.3",
"shelljs": "0.8.5",
"shx": "0.3.3",
"sinon": "7.3.2",
"snap-shot-it": "7.9.3",

View File

@@ -185,6 +185,18 @@ describe('src/cy/commands/actions/selectFile', () => {
})
})
it('uses the AUT\'s File constructor', () => {
cy.window().then(($autWindow) => {
cy.get('#basic').selectFile('@foo', { action: 'select' }).then((input) => {
expect(input[0].files[0]).to.be.instanceOf($autWindow.File)
})
cy.get('#basic').selectFile('@foo', { action: 'drag-drop' }).then((input) => {
expect(input[0].files[0]).to.be.instanceOf($autWindow.File)
})
})
})
describe('shorthands', () => {
const validJsonString = `{
"foo": 1,

View File

@@ -1,4 +1,4 @@
const { assertLogLength } = require('../../support/utils')
const { assertLogLength } = require('../../../support/utils')
const { _, $, Promise } = Cypress
@@ -225,265 +225,6 @@ describe('src/cy/commands/querying', () => {
})
})
context('#within', () => {
it('invokes callback function with runnable.ctx', function () {
const ctx = this
cy.get('div:first').within(function () {
expect(ctx === this).to.be.true
})
})
it('scopes additional GET finders to the subject', () => {
const input = cy.$$('#by-name input:first')
cy.get('#by-name').within(() => {
cy.get('input:first').then(($input) => {
expect($input.get(0)).to.eq(input.get(0))
})
})
})
it('scopes additional CONTAINS finders to the subject', () => {
const span = cy.$$('#nested-div span:contains(foo)')
cy.contains('foo').then(($span) => {
expect($span.get(0)).not.to.eq(span.get(0))
})
cy.get('#nested-div').within(() => {
cy.contains('foo').then(($span) => {
expect($span.get(0)).to.eq(span.get(0))
})
})
})
it('does not change the subject', () => {
const form = cy.$$('#by-name')
cy.get('#by-name').within(() => {}).then(($form) => {
expect($form.get(0)).to.eq(form.get(0))
})
})
it('can call child commands after within on the same subject', () => {
const input = cy.$$('#by-name input:first')
cy.get('#by-name').within(() => {}).find('input:first').then(($input) => {
expect($input.get(0)).to.eq(input.get(0))
})
})
it('supports nested withins', () => {
const span = cy.$$('#button-text button span')
cy.get('#button-text').within(() => {
cy.get('button').within(() => {
cy.get('span').then(($span) => {
expect($span.get(0)).to.eq(span.get(0))
})
})
})
})
it('supports complicated nested withins', () => {
const span1 = cy.$$('#button-text a span')
const span2 = cy.$$('#button-text button span')
cy.get('#button-text').within(() => {
cy.get('a').within(() => {
cy.get('span').then(($span) => {
expect($span.get(0)).to.eq(span1.get(0))
})
})
cy.get('button').within(() => {
cy.get('span').then(($span) => {
expect($span.get(0)).to.eq(span2.get(0))
})
})
})
})
it('clears withinSubject after within is over', () => {
const input = cy.$$('input:first')
const span = cy.$$('#button-text button span')
cy.get('#button-text').within(() => {
cy.get('button').within(() => {
cy.get('span').then(($span) => {
expect($span.get(0)).to.eq(span.get(0))
})
})
})
cy.get('input:first').then(($input) => {
expect($input.get(0)).to.eq(input.get(0))
})
})
it('removes command:start listeners after within is over', () => {
cy.get('#button-text').within(() => {
cy.get('button').within(() => {
cy.get('span')
})
})
cy.then(() => {
expect(cy._events).not.to.have.property('command:start')
})
})
it('clears withinSubject even if next is null', (done) => {
const span = cy.$$('#button-text button span')
// should be defined here because next would have been
// null and withinSubject would not have been cleared
cy.once('command:queue:before:end', () => {
expect(cy.state('withinSubject')).not.to.be.undefined
})
cy.once('command:queue:end', () => {
expect(cy.state('withinSubject')).to.be.null
done()
})
cy.get('#button-text').within(() => {
cy.get('button span').then(($span) => {
expect($span.get(0)).to.eq(span.get(0))
})
})
})
// https://github.com/cypress-io/cypress/issues/4757
it('subject is restored after within() call', () => {
cy.get('#wrapper').within(() => {
cy.get('#upper').should('contain.text', 'New York')
})
.should('have.id', 'wrapper')
})
// https://github.com/cypress-io/cypress/issues/5183
it('contains() works after within() call', () => {
cy.get(`#wrapper`).within(() => cy.get(`#upper`)).should(`contain.text`, `New York`)
cy.contains(`button`, `button`).should(`exist`)
})
describe('.log', () => {
beforeEach(function () {
this.logs = []
cy.on('log:added', (attrs, log) => {
if (attrs.name === 'within') {
this.lastLog = log
this.logs.push(log)
}
})
return null
})
it('can silence logging', () => {
cy.get('div:first').within({ log: false }, () => {}).then(function () {
assertLogLength(this.logs, 0)
})
})
it('logs immediately before resolving', (done) => {
const div = cy.$$('div:first')
cy.on('log:added', (attrs, log) => {
if (log.get('name') === 'within') {
expect(log.get('state')).to.eq('pending')
expect(log.get('message')).to.eq('')
expect(log.get('$el').get(0)).to.eq(div.get(0))
done()
}
})
cy.get('div:first').within(() => {})
})
it('snapshots after clicking', () => {
cy.get('div:first').within(() => {})
.then(function () {
const { lastLog } = this
expect(lastLog.get('snapshots').length).to.eq(1)
expect(lastLog.get('snapshots')[0]).to.be.an('object')
})
})
})
describe('errors', {
defaultCommandTimeout: 100,
}, () => {
beforeEach(function () {
this.logs = []
cy.on('log:added', (attrs, log) => {
this.lastLog = log
this.logs.push(log)
})
return null
})
it('logs once when not dom subject', function (done) {
cy.on('fail', (err) => {
const { lastLog } = this
assertLogLength(this.logs, 1)
expect(lastLog.get('error')).to.eq(err)
done()
})
cy.noop().within(() => {})
})
it('throws when not a DOM subject', (done) => {
cy.on('fail', (err) => {
done()
})
cy.noop().within(() => {})
})
_.each(['', [], {}, 1, null, undefined], (value) => {
it(`throws if passed anything other than a function, such as: ${value}`, (done) => {
cy.on('fail', (err) => {
expect(err.message).to.include('`cy.within()` must be called with a function.')
expect(err.docsUrl).to.eq('https://on.cypress.io/within')
done()
})
cy.get('body').within(value)
})
})
it('throws when subject is not in the document', (done) => {
cy.on('command:end', () => {
cy.$$('#list').remove()
})
cy.on('fail', (err) => {
expect(err.message).to.include('`cy.within()` failed because this element')
done()
})
cy.get('#list').within(() => {})
})
})
})
context('#root', () => {
it('returns html', () => {
const html = cy.$$('html')

View File

@@ -1,4 +1,4 @@
const helpers = require('../../support/helpers')
const helpers = require('../../../support/helpers')
const { _ } = Cypress
@@ -7,34 +7,6 @@ describe('src/cy/commands/querying - shadow dom', () => {
cy.visit('/fixtures/shadow-dom.html')
})
context('#within', () => {
it('finds element within shadow dom with includeShadowDom option', () => {
cy.get('#parent-of-shadow-container-0').within(() => {
cy
.get('p', { includeShadowDom: true })
.should('have.length', 1)
.should('have.text', 'Shadow Content 3')
})
})
it('when within subject is shadow root, finds element without needing includeShadowDom option', () => {
cy.get('#shadow-element-1').shadow().within(() => {
cy
.get('p')
.should('have.length', 1)
.should('have.text', 'Shadow Content 1')
})
})
it('when within subject is already in shadow dom, finds element without needing includeShadowDom option', () => {
cy.get('.shadow-8-nested-1', { includeShadowDom: true }).within(() => {
cy
.get('.shadow-8-nested-5')
.should('have.text', '8')
})
})
})
context('#get', () => {
it('finds elements within shadow roots', () => {
cy.get('.shadow-1', { includeShadowDom: true })

View File

@@ -0,0 +1,300 @@
const { assertLogLength } = require('../../../support/utils')
const { _ } = Cypress
describe('src/cy/commands/querying/within', () => {
context('#within', () => {
beforeEach(() => {
cy.visit('/fixtures/dom.html')
})
it('invokes callback function with runnable.ctx', function () {
const ctx = this
cy.get('div:first').within(function () {
expect(ctx === this).to.be.true
})
})
it('scopes additional GET finders to the subject', () => {
const input = cy.$$('#by-name input:first')
cy.get('#by-name').within(() => {
cy.get('input:first').then(($input) => {
expect($input.get(0)).to.eq(input.get(0))
})
})
})
it('scopes additional CONTAINS finders to the subject', () => {
const span = cy.$$('#nested-div span:contains(foo)')
cy.contains('foo').then(($span) => {
expect($span.get(0)).not.to.eq(span.get(0))
})
cy.get('#nested-div').within(() => {
cy.contains('foo').then(($span) => {
expect($span.get(0)).to.eq(span.get(0))
})
})
})
it('does not change the subject', () => {
const form = cy.$$('#by-name')
cy.get('#by-name').within(() => {}).then(($form) => {
expect($form.get(0)).to.eq(form.get(0))
})
})
it('can call child commands after within on the same subject', () => {
const input = cy.$$('#by-name input:first')
cy.get('#by-name').within(() => {}).find('input:first').then(($input) => {
expect($input.get(0)).to.eq(input.get(0))
})
})
it('supports nested withins', () => {
const span = cy.$$('#button-text button span')
cy.get('#button-text').within(() => {
cy.get('button').within(() => {
cy.get('span').then(($span) => {
expect($span.get(0)).to.eq(span.get(0))
})
})
})
})
it('supports complicated nested withins', () => {
const span1 = cy.$$('#button-text a span')
const span2 = cy.$$('#button-text button span')
cy.get('#button-text').within(() => {
cy.get('a').within(() => {
cy.get('span').then(($span) => {
expect($span.get(0)).to.eq(span1.get(0))
})
})
cy.get('button').within(() => {
cy.get('span').then(($span) => {
expect($span.get(0)).to.eq(span2.get(0))
})
})
})
})
it('clears withinSubject after within is over', () => {
const input = cy.$$('input:first')
const span = cy.$$('#button-text button span')
cy.get('#button-text').within(() => {
cy.get('button').within(() => {
cy.get('span').then(($span) => {
expect($span.get(0)).to.eq(span.get(0))
})
})
})
cy.get('input:first').then(($input) => {
expect($input.get(0)).to.eq(input.get(0))
})
})
it('removes command:start listeners after within is over', () => {
cy.get('#button-text').within(() => {
cy.get('button').within(() => {
cy.get('span')
})
})
cy.then(() => {
expect(cy._events).not.to.have.property('command:start')
})
})
it('clears withinSubject even if next is null', (done) => {
const span = cy.$$('#button-text button span')
// should be defined here because next would have been
// null and withinSubject would not have been cleared
cy.once('command:queue:before:end', () => {
expect(cy.state('withinSubject')).not.to.be.undefined
})
cy.once('command:queue:end', () => {
expect(cy.state('withinSubject')).to.be.null
done()
})
cy.get('#button-text').within(() => {
cy.get('button span').then(($span) => {
expect($span.get(0)).to.eq(span.get(0))
})
})
})
// https://github.com/cypress-io/cypress/issues/4757
it('subject is restored after within() call', () => {
cy.get('#wrapper').within(() => {
cy.get('#upper').should('contain.text', 'New York')
})
.should('have.id', 'wrapper')
})
// https://github.com/cypress-io/cypress/issues/5183
it('contains() works after within() call', () => {
cy.get(`#wrapper`).within(() => cy.get(`#upper`)).should(`contain.text`, `New York`)
cy.contains(`button`, `button`).should(`exist`)
})
describe('.log', () => {
beforeEach(function () {
this.logs = []
cy.on('log:added', (attrs, log) => {
if (attrs.name === 'within') {
this.lastLog = log
this.logs.push(log)
}
})
return null
})
it('can silence logging', () => {
cy.get('div:first').within({ log: false }, () => {}).then(function () {
assertLogLength(this.logs, 0)
})
})
it('logs immediately before resolving', (done) => {
const div = cy.$$('div:first')
cy.on('log:added', (attrs, log) => {
if (log.get('name') === 'within') {
expect(log.get('state')).to.eq('pending')
expect(log.get('message')).to.eq('')
expect(log.get('$el').get(0)).to.eq(div.get(0))
done()
}
})
cy.get('div:first').within(() => {})
})
it('snapshots after clicking', () => {
cy.get('div:first').within(() => {})
.then(function () {
const { lastLog } = this
expect(lastLog.get('snapshots').length).to.eq(1)
expect(lastLog.get('snapshots')[0]).to.be.an('object')
})
})
})
describe('errors', {
defaultCommandTimeout: 100,
}, () => {
beforeEach(function () {
this.logs = []
cy.on('log:added', (attrs, log) => {
this.lastLog = log
this.logs.push(log)
})
return null
})
it('logs once when not dom subject', function (done) {
cy.on('fail', (err) => {
const { lastLog } = this
assertLogLength(this.logs, 1)
expect(lastLog.get('error')).to.eq(err)
done()
})
cy.noop().within(() => {})
})
it('throws when not a DOM subject', (done) => {
cy.on('fail', (err) => {
done()
})
cy.noop().within(() => {})
})
_.each(['', [], {}, 1, null, undefined], (value) => {
it(`throws if passed anything other than a function, such as: ${value}`, (done) => {
cy.on('fail', (err) => {
expect(err.message).to.include('`cy.within()` must be called with a function.')
expect(err.docsUrl).to.eq('https://on.cypress.io/within')
done()
})
cy.get('body').within(value)
})
})
it('throws when subject is not in the document', (done) => {
cy.on('command:end', () => {
cy.$$('#list').remove()
})
cy.on('fail', (err) => {
expect(err.message).to.include('`cy.within()` failed because this element')
done()
})
cy.get('#list').within(() => {})
})
})
})
context('#within - shadow dom', () => {
beforeEach(() => {
cy.visit('/fixtures/shadow-dom.html')
})
it('finds element within shadow dom with includeShadowDom option', () => {
cy.get('#parent-of-shadow-container-0').within(() => {
cy
.get('p', { includeShadowDom: true })
.should('have.length', 1)
.should('have.text', 'Shadow Content 3')
})
})
it('when within subject is shadow root, finds element without needing includeShadowDom option', () => {
cy.get('#shadow-element-1').shadow().within(() => {
cy
.get('p')
.should('have.length', 1)
.should('have.text', 'Shadow Content 1')
})
})
it('when within subject is already in shadow dom, finds element without needing includeShadowDom option', () => {
cy.get('.shadow-8-nested-1', { includeShadowDom: true }).within(() => {
cy
.get('.shadow-8-nested-5')
.should('have.text', '8')
})
})
})
})

View File

@@ -759,6 +759,18 @@ describe('src/cy/commands/request', () => {
})
})
describe('when request origin equals browsers origin', () => {
it('sends correct message', () => {
Cypress.backend
.withArgs('http:request')
.resolves({ isOkStatusCode: true, status: 201 })
cy.request(`${window.location.origin}/foo`).then(function () {
expect(this.lastLog.invoke('renderProps').message).to.equal('GET 201 /foo')
})
})
})
describe('when response is successful', () => {
it('sends correct indicator', () => {
Cypress.backend

View File

@@ -1,11 +1,17 @@
// @ts-nocheck
import $ from 'jquery'
import JQuery from 'jquery'
import _ from 'lodash'
import { scrollTo } from './jquery.scrollto'
import $dom from '../dom'
// Add missing types.
interface ExtendedJQueryStatic extends JQueryStatic {
find: any
expr: JQuery.Selectors & { filters: any }
}
const $: ExtendedJQueryStatic = JQuery as any
// force jquery to have the same visible
// and hidden logic as cypress

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
@@ -7,7 +6,7 @@ import $utils from '../../../cypress/utils'
import $errUtils from '../../../cypress/error_utils'
import $elements from '../../../dom/elements'
const checkOrUncheck = (Cypress, cy, type, subject, values = [], userOptions = {}) => {
const checkOrUncheck = (Cypress, cy, type, subject, values: any[] = [], userOptions = {}) => {
// we're not handling conversion of values to strings
// in case we've received numbers
@@ -18,15 +17,15 @@ const checkOrUncheck = (Cypress, cy, type, subject, values = [], userOptions = {
values = []
} else {
// make sure we're an array of values
values = [].concat(values)
values = ([] as any[]).concat(values)
}
// keep an array of subjects which
// are potentially reduced down
// to new filtered subjects
const matchingElements = []
const matchingElements: HTMLElement[] = []
const options = _.defaults({}, userOptions, {
const options: Record<string, any> = _.defaults({}, userOptions, {
$el: subject,
log: true,
force: false,
@@ -75,7 +74,7 @@ const checkOrUncheck = (Cypress, cy, type, subject, values = [], userOptions = {
matchingElements.push(el)
}
const consoleProps = {
const consoleProps: Record<string, any> = {
'Applied To': $dom.getElements($el),
'Elements': $el.length,
}

View File

@@ -35,8 +35,10 @@ const tryMockWebkit = (item) => {
return item
}
const createDataTransfer = (files: Cypress.FileReferenceObject[]): DataTransfer => {
const dataTransfer = new DataTransfer()
const createDataTransfer = (files: Cypress.FileReferenceObject[], eventTarget: JQuery<any>): DataTransfer => {
// obtain a reference to the `targetWindow` so we can use the right instances of the `File` and `DataTransfer` classes
const targetWindow = (eventTarget[0] as HTMLElement).ownerDocument.defaultView || window
const dataTransfer = new targetWindow.DataTransfer()
files.forEach(({
contents,
@@ -44,7 +46,7 @@ const createDataTransfer = (files: Cypress.FileReferenceObject[]): DataTransfer
mimeType = mime.lookup(fileName) || '',
lastModified = Date.now(),
}) => {
const file = new File([contents], fileName, { lastModified, type: mimeType })
const file = new targetWindow.File([contents], fileName, { lastModified, type: mimeType })
dataTransfer.items.add(file)
})
@@ -302,7 +304,7 @@ export default (Commands, Cypress, cy, state, config) => {
})
}
const dataTransfer = createDataTransfer(filesArray)
const dataTransfer = createDataTransfer(filesArray, eventTarget)
ACTIONS[options.action as string](eventTarget.get(0), dataTransfer, coords, state)

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
@@ -59,7 +57,7 @@ export default (Commands, Cypress, cy, state, config) => {
({ options: userOptions, position, x, y } = $actionability.getPositionFromArguments(positionOrX, y, userOptions))
const options = _.defaults({}, userOptions, {
const options: Record<string, any> = _.defaults({}, userOptions, {
log: true,
$el: subject,
bubbles: true,

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
@@ -15,7 +14,11 @@ const debug = debugFn('cypress:driver:command:type')
export default function (Commands, Cypress, cy, state, config) {
const { keyboard } = cy.devices
function type (subject, chars, options = {}) {
// Note: These "change type of `any` to X" comments are written instead of changing them directly
// because Cypress extends user-given options with Cypress internal options.
// These comments will be removed after removing `// @ts-nocheck` comments in `packages/driver`.
// TODO: change the type of `any` to `Partial<Cypress.TypeOptions>`
function type (subject, chars, options: any = {}) {
const userOptions = options
let updateTable
@@ -366,7 +369,7 @@ export default function (Commands, Cypress, cy, state, config) {
// Firefox sends a click event automatically.
if (!Cypress.isBrowser('firefox')) {
const ctor = $dom.getDocumentFromElement(el).defaultView?.PointerEvent
const event = new ctor('click')
const event = new ctor!('click')
el.dispatchEvent(event)
}
@@ -510,7 +513,8 @@ export default function (Commands, Cypress, cy, state, config) {
})
}
function clear (subject, options = {}) {
// TODO: change the type of `any` to `Partial<ClearOptions>`
function clear (subject, options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, {

View File

@@ -1,13 +1,11 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
import $dom from '../../dom'
import $utils from '../../cypress/utils'
import $errUtils from '../../cypress/error_utils'
import $errUtils, { CypressError } from '../../cypress/error_utils'
const returnFalseIfThenable = (key, ...args) => {
const returnFalseIfThenable = (key, ...args): boolean => {
if ((key === 'then') && _.isFunction(args[0]) && _.isFunction(args[1])) {
// https://github.com/cypress-io/cypress/issues/111
// if we're inside of a promise then the promise lib will naturally
@@ -22,6 +20,8 @@ const returnFalseIfThenable = (key, ...args) => {
return false
}
return true
}
const primitiveToObject = (memo) => {
@@ -181,7 +181,7 @@ export default function (Commands, Cypress, cy, state) {
const invokeFn = (subject, userOptionsOrStr, ...args) => {
const userOptionsPassed = _.isObject(userOptionsOrStr) && !_.isFunction(userOptionsOrStr)
let userOptions = null
let userOptions: Record<string, any> | null = null
let str = null
if (!userOptionsPassed) {
@@ -219,7 +219,7 @@ export default function (Commands, Cypress, cy, state) {
const message = getMessage()
let traversalErr = null
let traversalErr: CypressError | null = null
// copy userOptions because _log is added below.
const options = _.extend({}, userOptions)
@@ -568,7 +568,7 @@ export default function (Commands, Cypress, cy, state) {
return ret
}
return thenFn(el, userOptions, callback, state)
return thenFn(el, userOptions, callback)
}
// generate a real array since bluebird is finicky and
@@ -586,9 +586,9 @@ export default function (Commands, Cypress, cy, state) {
// cy.resolve + cy.wrap are upgraded to handle
// promises
Commands.addAll({ prevSubject: 'optional' }, {
then () {
then (subject, userOptions, fn) {
// eslint-disable-next-line prefer-rest-params
return thenFn.apply(this, arguments)
return thenFn.apply(this, [subject, userOptions, fn])
},
})

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
@@ -110,7 +108,7 @@ export default function (Commands, Cypress, cy, state, config) {
})
}
const getAndClear = (log, timeout, options = {}) => {
const getAndClear = (log?, timeout?, options = {}) => {
return automateCookies('get:cookies', options, log, timeout)
.then((resp) => {
// bail early if we got no cookies!
@@ -166,7 +164,8 @@ export default function (Commands, Cypress, cy, state, config) {
})
return Commands.addAll({
getCookie (name, options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.Loggable & Cypress.Timeoutable>`
getCookie (name, options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, {
@@ -212,7 +211,8 @@ export default function (Commands, Cypress, cy, state, config) {
.catch(handleBackendError('getCookie', 'reading the requested cookie from', onFail))
},
getCookies (options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.Loggable & Cypress.Timeoutable>`
getCookies (options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, {
@@ -250,7 +250,8 @@ export default function (Commands, Cypress, cy, state, config) {
.catch(handleBackendError('getCookies', 'reading cookies from', options._log))
},
setCookie (name, value, options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.SetCookieOptions>`
setCookie (name, value, options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, {
@@ -331,7 +332,8 @@ export default function (Commands, Cypress, cy, state, config) {
}).catch(handleBackendError('setCookie', 'setting the requested cookie in', onFail))
},
clearCookie (name, options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.Loggable & Cypress.Timeoutable>`
clearCookie (name, options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, {
@@ -380,7 +382,8 @@ export default function (Commands, Cypress, cy, state, config) {
.catch(handleBackendError('clearCookie', 'clearing the requested cookie in', onFail))
},
clearCookies (options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.Loggable & Cypress.Timeoutable>`
clearCookies (options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, {

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
@@ -7,7 +5,8 @@ import $errUtils from '../../cypress/error_utils'
export default (Commands, Cypress, cy) => {
Commands.addAll({
exec (cmd, options = {}) {
// TODO: change the type of `any` to `Partical<Cypress.ExecOptions>`
exec (cmd, options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, {

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import { basename } from 'path'
@@ -6,7 +5,8 @@ import $errUtils from '../../cypress/error_utils'
export default (Commands, Cypress, cy, state) => {
Commands.addAll({
readFile (file, encoding, options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.Loggable & Cypress.Timeoutable>`
readFile (file, encoding, options: any = {}) {
let userOptions = options
if (_.isObject(encoding)) {
@@ -109,7 +109,8 @@ export default (Commands, Cypress, cy, state) => {
return verifyAssertions()
},
writeFile (fileName, contents, encoding, options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.WriteFileOPtions & Cypress.Timeoutable>`
writeFile (fileName, contents, encoding, options: any = {}) {
let userOptions = options
if (_.isObject(encoding)) {

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
import { basename } from 'path'
@@ -44,7 +42,7 @@ export default (Commands, Cypress, cy, state, config) => {
return Promise.resolve(clone(resp))
}
let options = {}
let options: Record<string, any> = {}
if (_.isObject(args[0])) {
options = args[0]

View File

@@ -71,7 +71,7 @@ export const allCommands = {
Misc,
Popups,
Navigation,
Querying,
...Querying,
Request,
Sessions,
Screenshot,

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
@@ -8,7 +6,8 @@ const { throwErrByPath } = $errUtils
export default (Commands, Cypress, cy) => {
Commands.addAll({
url (options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.UrlOptions>`
url (options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, { log: true })
@@ -39,7 +38,8 @@ export default (Commands, Cypress, cy) => {
return resolveHref()
},
hash (options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.Loggable & Cypress.Timeoutable>`
hash (options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, { log: true })

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Promise from 'bluebird'
@@ -50,7 +49,8 @@ export default (Commands, Cypress, cy, state) => {
return null
},
wrap (arg, options = {}) {
// TODO: change the type of `any` to `Partial<Cypress.Loggable & Cypress.Timeoutable>`
wrap (arg, options: any = {}) {
const userOptions = options
options = _.defaults({}, userOptions, {

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
/* global cy, Cypress */
import _ from 'lodash'
import whatIsCircular from '@cypress/what-is-circular'
import UrlParse from 'url-parse'
@@ -15,10 +13,10 @@ import debugFn from 'debug'
const debug = debugFn('cypress:driver:navigation')
let id = null
let previousDomainVisited = null
let hasVisitedAboutBlank = null
let currentlyVisitingAboutBlank = null
let knownCommandCausedInstability = null
let previousDomainVisited: boolean = false
let hasVisitedAboutBlank: boolean = false
let currentlyVisitingAboutBlank: boolean = false
let knownCommandCausedInstability: boolean = false
const REQUEST_URL_OPTS = 'auth failOnStatusCode retryOnNetworkFailure retryOnStatusCodeFailure retryIntervals method body headers'
.split(' ')
@@ -27,7 +25,7 @@ const VISIT_OPTS = 'url log onBeforeLoad onLoad timeout requestTimeout'
.split(' ')
.concat(REQUEST_URL_OPTS)
const reset = (test = {}) => {
const reset = (test: any = {}) => {
knownCommandCausedInstability = false
// continuously reset this
@@ -62,7 +60,7 @@ const timedOutWaitingForPageLoad = (ms, log) => {
}
const cannotVisitDifferentOrigin = (origin, previousUrlVisited, remoteUrl, existingUrl, log) => {
const differences = []
const differences: string[] = []
if (remoteUrl.protocol !== existingUrl.protocol) {
differences.push('protocol')
@@ -171,7 +169,7 @@ const navigationChanged = (Cypress, cy, state, source, arg) => {
end: true,
snapshot: true,
consoleProps () {
const obj = {
const obj: Record<string, any> = {
'New Url': url,
}
@@ -265,7 +263,7 @@ const stabilityChanged = (Cypress, state, config, stable) => {
return
}
const options = {}
const options: Record<string, any> = {}
_.defaults(options, {
timeout: config('pageLoadTimeout'),
@@ -404,6 +402,14 @@ const normalizeTimeoutOptions = (options) => {
.value()
}
type NotOkResponseError = Error & {
gotResponse: boolean
}
type InvalidContentTypeError = Error & {
invalidContentType: boolean
}
export default (Commands, Cypress, cy, state, config) => {
reset()
@@ -420,7 +426,7 @@ export default (Commands, Cypress, cy, state, config) => {
Cypress.on('stability:changed', (bool, event) => {
// only send up page loading events when we're
// not stable!
stabilityChanged(Cypress, state, config, bool, event)
stabilityChanged(Cypress, state, config, bool)
})
Cypress.on('navigation:changed', (source, arg) => {
@@ -445,11 +451,11 @@ export default (Commands, Cypress, cy, state, config) => {
url,
normalizeTimeoutOptions(options),
)
.then((resp = {}) => {
.then((resp: any = {}) => {
if (!resp.isOkStatusCode) {
// if we didn't even get an OK response
// then immediately die
const err = new Error
const err: NotOkResponseError = new Error as any
err.gotResponse = true
_.extend(err, resp)
@@ -459,7 +465,7 @@ export default (Commands, Cypress, cy, state, config) => {
if (!resp.isHtml) {
// throw invalid contentType error
const err = new Error
const err: InvalidContentTypeError = new Error as any
err.invalidContentType = true
_.extend(err, resp)
@@ -525,7 +531,7 @@ export default (Commands, Cypress, cy, state, config) => {
// clear the current timeout
cy.clearTimeout('reload')
let cleanup = null
let cleanup: (() => any) | null = null
const options = _.defaults({}, userOptions, {
log: true,
timeout: config('pageLoadTimeout'),
@@ -579,7 +585,7 @@ export default (Commands, Cypress, cy, state, config) => {
},
go (numberOrString, userOptions = {}) {
const options = _.defaults({}, userOptions, {
const options: Record<string, any> = _.defaults({}, userOptions, {
log: true,
timeout: config('pageLoadTimeout'),
})
@@ -595,7 +601,7 @@ export default (Commands, Cypress, cy, state, config) => {
$errUtils.throwErrByPath('go.invalid_number', { onFail: options._log })
}
let cleanup = null
let cleanup: (() => any) | null = null
if (options._log) {
options._log.snapshot('before', { next: 'after' })
@@ -669,7 +675,7 @@ export default (Commands, Cypress, cy, state, config) => {
case 'forward': return goNumber(1)
case 'back': return goNumber(-1)
default:
$errUtils.throwErrByPath('go.invalid_direction', {
return $errUtils.throwErrByPath('go.invalid_direction', {
onFail: options._log,
args: { str },
})
@@ -684,10 +690,11 @@ export default (Commands, Cypress, cy, state, config) => {
return goString(numberOrString)
}
$errUtils.throwErrByPath('go.invalid_argument', { onFail: options._log })
return $errUtils.throwErrByPath('go.invalid_argument', { onFail: options._log })
},
visit (url, options = {}) {
// TODO: Change the type of `any` to `Partial<Cypress.VisitOptions>`.
visit (url, options: any = {}) {
if (options.url && url) {
$errUtils.throwErrByPath('visit.no_duplicate_url', { args: { optionsUrl: options.url, url } })
}
@@ -778,7 +785,7 @@ export default (Commands, Cypress, cy, state, config) => {
url = $Location.mergeUrlWithParams(url, qs)
}
let cleanup = null
let cleanup: (() => any) | null = null
// clear the current timeout
cy.clearTimeout('visit')
@@ -801,7 +808,7 @@ export default (Commands, Cypress, cy, state, config) => {
})
options.onBeforeLoad?.call(runnable.ctx, contentWindow)
} catch (err) {
} catch (err: any) {
err.isCallbackError = true
onBeforeLoadError = err
}
@@ -847,7 +854,10 @@ export default (Commands, Cypress, cy, state, config) => {
})
}
const onLoad = ({ runOnLoadCallback, totalTime }) => {
const onLoad = ({ runOnLoadCallback, totalTime }: {
runOnLoadCallback?: boolean
totalTime?: number
}) => {
// reset window on load
win = state('window')
@@ -855,7 +865,7 @@ export default (Commands, Cypress, cy, state, config) => {
if (runOnLoadCallback !== false) {
try {
options.onLoad?.call(runnable.ctx, win)
} catch (err) {
} catch (err: any) {
// mark these as user callback errors, so they're treated differently
// than Node.js errors when caught below
err.isCallbackError = true
@@ -930,7 +940,9 @@ export default (Commands, Cypress, cy, state, config) => {
}
return changeIframeSrc(remote.href, 'hashchange')
.then(onLoad)
.then(() => {
return onLoad({})
})
}
if (existingHash) {
@@ -945,7 +957,7 @@ export default (Commands, Cypress, cy, state, config) => {
}
return requestUrl(url, options)
.then((resp = {}) => {
.then((resp: any = {}) => {
let { url, originalUrl, cookies, redirects, filePath } = resp
// reapply the existing hash
@@ -1000,7 +1012,7 @@ export default (Commands, Cypress, cy, state, config) => {
// tell our backend we're changing domains
// TODO: add in other things we want to preserve
// state for like scrollTop
let s = {
let s: Record<string, any> = {
currentId: id,
tests: Cypress.runner.getTestsState(),
startTime: Cypress.runner.getStartTime(),

View File

@@ -0,0 +1,7 @@
import * as Querying from './querying'
import * as Within from './within'
export {
Querying,
Within,
}

View File

@@ -1,12 +1,11 @@
import _ from 'lodash'
import Promise from 'bluebird'
import { $Command } from '../../cypress/command'
import $dom from '../../dom'
import $elements from '../../dom/elements'
import $errUtils from '../../cypress/error_utils'
import { resolveShadowDomInclusion } from '../../cypress/shadow_dom_utils'
import { getAliasedRequests, isDynamicAliasingPossible } from '../net-stubbing/aliasing'
import $dom from '../../../dom'
import $elements from '../../../dom/elements'
import $errUtils from '../../../cypress/error_utils'
import { resolveShadowDomInclusion } from '../../../cypress/shadow_dom_utils'
import { getAliasedRequests, isDynamicAliasingPossible } from '../../net-stubbing/aliasing'
export default (Commands, Cypress, cy, state) => {
Commands.addAll({
@@ -603,105 +602,6 @@ export default (Commands, Cypress, cy, state) => {
},
})
Commands.addAll({ prevSubject: ['element', 'document'] }, {
within (subject, options, fn) {
let userOptions = options
const ctx = this
if (_.isUndefined(fn)) {
fn = userOptions
userOptions = {}
}
options = _.defaults({}, userOptions, { log: true })
if (options.log) {
options._log = Cypress.log({
$el: subject,
message: '',
timeout: options.timeout,
})
}
if (!_.isFunction(fn)) {
$errUtils.throwErrByPath('within.invalid_argument', { onFail: options._log })
}
// reference the next command after this
// within. when that command runs we'll
// know to remove withinSubject
const next = state('current').get('next')
// backup the current withinSubject
// this prevents a bug where we null out
// withinSubject when there are nested .withins()
// we want the inner within to restore the outer
// once its done
const prevWithinSubject = state('withinSubject')
state('withinSubject', subject)
// https://github.com/cypress-io/cypress/pull/8699
// An internal command is inserted to create a divider between
// commands inside within() callback and commands chained to it.
const restoreCmdIndex = state('index') + 1
cy.queue.insert(restoreCmdIndex, $Command.create({
args: [subject],
name: 'within-restore',
fn: (subject) => subject,
}))
state('index', restoreCmdIndex)
fn.call(ctx, subject)
const cleanup = () => cy.removeListener('command:start', setWithinSubject)
// we need a mechanism to know when we should remove
// our withinSubject so we dont accidentally keep it
// around after the within callback is done executing
// so when each command starts, check to see if this
// is the command which references our 'next' and
// if so, remove the within subject
const setWithinSubject = (obj) => {
if (obj !== next) {
return
}
// okay so what we're doing here is creating a property
// which stores the 'next' command which will reset the
// withinSubject. If two 'within' commands reference the
// exact same 'next' command, then this prevents accidentally
// resetting withinSubject more than once. If they point
// to differnet 'next's then its okay
if (next !== state('nextWithinSubject')) {
state('withinSubject', prevWithinSubject || null)
state('nextWithinSubject', next)
}
// regardless nuke this listeners
cleanup()
}
// if next is defined then we know we'll eventually
// unbind these listeners
if (next) {
cy.on('command:start', setWithinSubject)
} else {
// remove our listener if we happen to reach the end
// event which will finalize cleanup if there was no next obj
cy.once('command:queue:before:end', () => {
cleanup()
state('withinSubject', null)
})
}
return subject
},
})
Commands.add('shadow', { prevSubject: 'element' }, (subject, options) => {
const userOptions = options || {}

View File

@@ -0,0 +1,105 @@
import _ from 'lodash'
import { $Command } from '../../../cypress/command'
import $errUtils from '../../../cypress/error_utils'
export default (Commands, Cypress, cy, state) => {
Commands.addAll({ prevSubject: ['element', 'document'] }, {
within (subject, options, fn) {
let userOptions = options
const ctx = this
if (_.isUndefined(fn)) {
fn = userOptions
userOptions = {}
}
options = _.defaults({}, userOptions, { log: true })
if (options.log) {
options._log = Cypress.log({
$el: subject,
message: '',
timeout: options.timeout,
})
}
if (!_.isFunction(fn)) {
$errUtils.throwErrByPath('within.invalid_argument', { onFail: options._log })
}
// reference the next command after this
// within. when that command runs we'll
// know to remove withinSubject
const next = state('current').get('next')
// backup the current withinSubject
// this prevents a bug where we null out
// withinSubject when there are nested .withins()
// we want the inner within to restore the outer
// once its done
const prevWithinSubject = state('withinSubject')
state('withinSubject', subject)
// https://github.com/cypress-io/cypress/pull/8699
// An internal command is inserted to create a divider between
// commands inside within() callback and commands chained to it.
const restoreCmdIndex = state('index') + 1
cy.queue.insert(restoreCmdIndex, $Command.create({
args: [subject],
name: 'within-restore',
fn: (subject) => subject,
}))
state('index', restoreCmdIndex)
fn.call(ctx, subject)
const cleanup = () => cy.removeListener('command:start', setWithinSubject)
// we need a mechanism to know when we should remove
// our withinSubject so we dont accidentally keep it
// around after the within callback is done executing
// so when each command starts, check to see if this
// is the command which references our 'next' and
// if so, remove the within subject
const setWithinSubject = (obj) => {
if (obj !== next) {
return
}
// okay so what we're doing here is creating a property
// which stores the 'next' command which will reset the
// withinSubject. If two 'within' commands reference the
// exact same 'next' command, then this prevents accidentally
// resetting withinSubject more than once. If they point
// to differnet 'next's then its okay
if (next !== state('nextWithinSubject')) {
state('withinSubject', prevWithinSubject || null)
state('nextWithinSubject', next)
}
// regardless nuke this listeners
cleanup()
}
// if next is defined then we know we'll eventually
// unbind these listeners
if (next) {
cy.on('command:start', setWithinSubject)
} else {
// remove our listener if we happen to reach the end
// event which will finalize cleanup if there was no next obj
cy.once('command:queue:before:end', () => {
cleanup()
state('withinSubject', null)
})
}
return subject
},
})
}

View File

@@ -51,6 +51,14 @@ const whichAreOptional = (val, key) => {
return (val === null) && OPTIONAL_OPTS.includes(key)
}
const getDisplayUrl = (url: string) => {
if (url.startsWith(window.location.origin)) {
return url.slice(window.location.origin.length)
}
return url
}
const needsFormSpecified = (options: any = {}) => {
const { body, json, headers } = options
@@ -273,7 +281,7 @@ export default (Commands, Cypress, cy, state, config) => {
}
return {
message: `${options.method} ${status} ${options.url}`,
message: `${options.method} ${status} ${getDisplayUrl(options.url)}`,
indicator,
}
},

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import $utils from './utils'
import $errUtils from './error_utils'
@@ -36,12 +35,16 @@ const _isBrowser = (browser, matcher, errPrefix) => {
}
}
const isBrowser = (config, obj = '', errPrefix = '`Cypress.isBrowser()`') => {
// TODO: change the type of `any` to `IsBrowserMatcher`
const isBrowser = (config, obj: any = '', errPrefix: string = '`Cypress.isBrowser()`') => {
return _
.chain(obj)
.concat([])
.map((matcher) => _isBrowser(config.browser, matcher, errPrefix))
.reduce((a, b) => {
.reduce((
a: null | { isMatch: boolean, exclusive: boolean },
b: { isMatch: boolean, exclusive: boolean },
) => {
if (!a) return b
if (a.exclusive && b.exclusive) {

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import $ from 'jquery'
import $dom from '../dom'
@@ -33,7 +32,12 @@ const maybeCastNumberToString = (num) => {
return _.isFinite(num) ? `${num}` : num
}
export const $chaiJquery = (chai, chaiUtils, callbacks = {}) => {
interface Callbacks {
onInvalid: (method, obj) => void
onError: (err, method, obj, negated) => void
}
export const $chaiJquery = (chai, chaiUtils, callbacks: Callbacks) => {
const { inspect, flag } = chaiUtils
const assertDom = (ctx, method, ...args) => {

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import $errUtils from './error_utils'
@@ -9,6 +7,8 @@ import { allCommands } from '../cy/commands'
import { addCommand } from '../cy/net-stubbing'
const builtInCommands = [
// `default` is necessary if a file uses `export default` syntax.
// @ts-ignore
..._.toArray(allCommands).map((c) => c.default || c),
addCommand,
]
@@ -74,7 +74,7 @@ export default {
const overridden = _.clone(original)
overridden.fn = function (...args) {
args = [].concat(originalFn, args)
args = ([] as any).concat(originalFn, args)
return fn.apply(this, args)
}

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
import _ from 'lodash'
import Cookies from 'js-cookie'
@@ -10,13 +8,13 @@ let isDebuggingVerbose = false
const preserved = {}
const defaults = {
const defaults: any = {
preserve: null,
}
const warnOnWhitelistRenamed = (obj, type) => {
if (obj.whitelist) {
return $errUtils.throwErrByPath('cookies.whitelist_renamed', { args: { type } })
$errUtils.throwErrByPath('cookies.whitelist_renamed', { args: { type } })
}
}
@@ -53,10 +51,12 @@ export const $Cookies = (namespace, domain) => {
if (preserved[name]) {
return delete preserved[name]
}
return false
}
const API = {
debug (bool = true, options = {}) {
debug (bool = true, options: any = {}) {
_.defaults(options, {
verbose: true,
})
@@ -82,7 +82,7 @@ export const $Cookies = (namespace, domain) => {
return console[m].apply(console, args)
},
getClearableCookies (cookies = []) {
getClearableCookies (cookies: any[] = []) {
return _.filter(cookies, (cookie) => {
return !isAllowed(cookie) && !removePreserved(cookie.name)
})

View File

@@ -1,5 +1,3 @@
// @ts-nocheck
// See: ./errorScenarios.md for details about error messages and stack traces
import _ from 'lodash'
@@ -7,7 +5,7 @@ import chai from 'chai'
import $dom from '../dom'
import $utils from './utils'
import $stackUtils from './stack_utils'
import $stackUtils, { StackAndCodeFrameIndex } from './stack_utils'
import $errorMessages from './error_messages'
const ERROR_PROPS = 'message type name stack sourceMappedStack parsedStack fileName lineNumber columnNumber host uncaught actual expected showDiff isPending docsUrl codeFrame'.split(' ')
@@ -17,9 +15,9 @@ const crossOriginScriptRe = /^script error/i
if (!Error.captureStackTrace) {
Error.captureStackTrace = (err, fn) => {
const stack = (new Error()).stack
const stack = (new Error()).stack;
err.stack = $stackUtils.stackWithLinesDroppedFromMarker(stack, fn.name)
(err as Error).stack = $stackUtils.stackWithLinesDroppedFromMarker(stack, fn?.name)
}
}
@@ -63,15 +61,15 @@ const wrapErr = (err) => {
return $utils.reduceProps(err, ERROR_PROPS)
}
const isAssertionErr = (err = {}) => {
const isAssertionErr = (err: Error) => {
return err.name === 'AssertionError'
}
const isChaiValidationErr = (err = {}) => {
const isChaiValidationErr = (err: Error) => {
return _.startsWith(err.message, 'Invalid Chai property')
}
const isCypressErr = (err = {}) => {
const isCypressErr = (err: Error): boolean => {
return err.name === 'CypressError'
}
@@ -79,7 +77,7 @@ const isSpecError = (spec, err) => {
return _.includes(err.stack, spec.relative)
}
const mergeErrProps = (origErr: Error, ...newProps) => {
const mergeErrProps = (origErr: Error, ...newProps): Error => {
return _.extend(origErr, ...newProps)
}
@@ -197,7 +195,7 @@ const makeErrFromObj = (obj) => {
return err2
}
const throwErr = (err, options = {}) => {
const makeErrFromErr = (err, options: any = {}) => {
if (_.isString(err)) {
err = cypressErr({ message: err })
}
@@ -205,12 +203,12 @@ const throwErr = (err, options = {}) => {
let { onFail, errProps } = options
// assume onFail is a command if
//# onFail is present and isn't a function
// onFail is present and isn't a function
if (onFail && !_.isFunction(onFail)) {
const command = onFail
//# redefine onFail and automatically
//# hook this into our command
// redefine onFail and automatically
// hook this into our command
onFail = (err) => {
return command.error(err)
}
@@ -224,10 +222,14 @@ const throwErr = (err, options = {}) => {
_.extend(err, errProps)
}
throw err
return err
}
const throwErrByPath = (errPath, options = {}) => {
const throwErr = (err, options: any = {}): never => {
throw makeErrFromErr(err, options)
}
const throwErrByPath = (errPath, options: any = {}): never => {
const err = errByPath(errPath, options.args)
if (options.stack) {
@@ -237,15 +239,16 @@ const throwErrByPath = (errPath, options = {}) => {
Error.captureStackTrace(err, throwErrByPath)
}
throwErr(err, options)
throw makeErrFromErr(err, options)
}
const warnByPath = (errPath, options = {}) => {
const warnByPath = (errPath, options: any = {}) => {
const errObj = errByPath(errPath, options.args)
let err = errObj.message
const docsUrl = (errObj as CypressError).docsUrl
if (errObj.docsUrl) {
err += `\n\n${errObj.docsUrl}`
if (docsUrl) {
err += `\n\n${docsUrl}`
}
$utils.warning(err)
@@ -266,6 +269,7 @@ export class InternalCypressError extends Error {
export class CypressError extends Error {
docsUrl?: string
retry?: boolean
userInvocationStack?: any
constructor (message) {
super(message)
@@ -297,10 +301,10 @@ const internalErr = (err): InternalCypressError => {
const cypressErr = (err): CypressError => {
const newErr = new CypressError(err.message)
return mergeErrProps(newErr, err)
return mergeErrProps(newErr, err) as CypressError
}
const cypressErrByPath = (errPath, options = {}) => {
const cypressErrByPath = (errPath, options: any = {}) => {
const errObj = errByPath(errPath, options.args)
return cypressErr(errObj)
@@ -376,7 +380,7 @@ const createUncaughtException = ({ frameType, handlerType, state, err }) => {
let uncaughtErr = errByPath(errPath, {
errMsg: err.message,
promiseAddendum: handlerType === 'unhandledrejection' ? ' It was caused by an unhandled promise rejection.' : '',
})
}) as CypressError
modifyErrMsg(err, uncaughtErr.message, () => uncaughtErr.message)
@@ -394,7 +398,7 @@ const createUncaughtException = ({ frameType, handlerType, state, err }) => {
// stacks from command failures and assertion failures have the right message
// but the stack points to cypress internals. here we replace the internal
// cypress stack with the invocation stack, which points to the user's code
const stackAndCodeFrameIndex = (err, userInvocationStack) => {
const stackAndCodeFrameIndex = (err, userInvocationStack): StackAndCodeFrameIndex => {
if (!userInvocationStack) return { stack: err.stack }
if (isCypressErr(err) || isChaiValidationErr(err)) {
@@ -427,7 +431,7 @@ const enhanceStack = ({ err, userInvocationStack, projectRoot }) => {
// all errors flow through this function before they're finally thrown
// or used to reject promises
const processErr = (errObj = {}, config) => {
const processErr = (errObj: CypressError, config) => {
let docsUrl = errObj.docsUrl
if (config('isInteractive') || !docsUrl) {
@@ -482,7 +486,7 @@ const errorFromErrorEvent = (event): ErrorFromErrorEvent => {
// reset the message on a cross origin script error
// since no details are accessible
if (crossOriginScriptRe.test(message)) {
const crossOriginErr = errByPath('uncaught.cross_origin_script')
const crossOriginErr = errByPath('uncaught.cross_origin_script') as CypressError
message = crossOriginErr.message
docsUrl = crossOriginErr.docsUrl
@@ -490,9 +494,9 @@ const errorFromErrorEvent = (event): ErrorFromErrorEvent => {
// it's possible the error was thrown as a string (throw 'some error')
// so create it in the case it's not already an object
const err = _.isObject(error) ? error : convertErrorEventPropertiesToObject({
const err = (_.isObject(error) ? error : convertErrorEventPropertiesToObject({
message, filename, lineno, colno,
})
})) as CypressError
err.docsUrl = docsUrl

View File

@@ -1,4 +1,3 @@
// @ts-nocheck
// TODO:
// 1. test these method implementations using encoded characters
// look at the spec to figure out whether we SHOULD be decoding them
@@ -18,6 +17,8 @@ const reLocalHost = /^(localhost|0\.0\.0\.0|127\.0\.0\.1)/
const reQueryParam = /\?[^/]+/
export class $Location {
remote: UrlParse
constructor (remote) {
this.remote = new UrlParse(remote)
}
@@ -38,6 +39,8 @@ export class $Location {
password,
}
}
return
}
getHash () {

View File

@@ -66,7 +66,12 @@ const stackWithReplacementMarkerLineRemoved = (stack) => {
})
}
const stackWithUserInvocationStackSpliced = (err, userInvocationStack) => {
export type StackAndCodeFrameIndex = {
stack: string
index?: number
}
const stackWithUserInvocationStackSpliced = (err, userInvocationStack): StackAndCodeFrameIndex => {
const stack = _.trim(err.stack, '\n') // trim newlines from end
const [messageLines, stackLines] = splitStack(stack)
const userInvocationStackWithoutMessage = stackWithoutMessage(userInvocationStack)

View File

@@ -98,7 +98,7 @@ export default {
throw new Error(`The switch/case value: '${value}' did not match any cases: ${keys.join(', ')}.`)
},
reduceProps (obj, props = []) {
reduceProps (obj, props: string[] = []) {
if (!obj) {
return null
}
@@ -355,7 +355,7 @@ export default {
// normalize more than {maxNewLines} new lines into
// exactly {replacementNumLines} new lines
normalizeNewLines (str, maxNewLines, replacementNumLines) {
normalizeNewLines (str, maxNewLines, replacementNumLines?) {
const moreThanMaxNewLinesRe = new RegExp(`\\n{${maxNewLines},}`)
const replacementWithNumLines = replacementNumLines ?? maxNewLines

View File

@@ -24,7 +24,7 @@
},
"devDependencies": {
"electron": "15.3.4",
"electron-packager": "15.1.0",
"electron-packager": "15.4.0",
"execa": "4.1.0",
"mocha": "3.5.3"
},

View File

@@ -35,6 +35,6 @@
"gulp-rev-all": "2.0.2",
"mocha": "2.5.3",
"resolve-pkg": "2.0.0",
"shelljs": "0.8.4"
"shelljs": "0.8.5"
}
}

View File

@@ -26,7 +26,7 @@
"chai-as-promised": "7.1.1",
"cross-env": "6.0.3",
"mocha": "3.5.3",
"shelljs": "0.8.3",
"shelljs": "0.8.5",
"sinon": "^10.0.0",
"sinon-chai": "3.4.0",
"typescript": "^4.2.3"

View File

@@ -1,5 +0,0 @@
# we do not explicitly ignore JavaScript files in "lib/browsers" folder
# because when we add TS files we do not transpile them as a build step
# instead always use require hooks to transpile TS files on the fly
.http-mitm-proxy

View File

@@ -274,30 +274,53 @@ export function start (name, options: StartOptions = {}) {
type OnProgress = (p: number) => void
export async function process (name, cname, videoCompression, ffmpegchaptersConfig, onProgress: OnProgress = function () {}) {
const metaFileName = `${name}.meta`
const maybeGenerateMetaFile = Bluebird.method(() => {
if (!ffmpegchaptersConfig) {
return false
}
// Writing the metadata to filesystem is necessary because fluent-ffmpeg is just a wrapper of ffmpeg command.
return fs.writeFile(metaFileName, ffmpegchaptersConfig).then(() => true)
})
const addChaptersMeta = await maybeGenerateMetaFile()
let total = null
const metaFileName = `${name}.meta`
const addChaptersMeta = ffmpegchaptersConfig && await fs.writeFile(metaFileName, ffmpegchaptersConfig).then(() => true)
return new Bluebird((resolve, reject) => {
debug('processing video from %s to %s video compression %o',
name, cname, videoCompression)
const command = ffmpeg()
.addOptions([
// These flags all serve to reduce initial buffering, especially important
// when dealing with very short videos (such as during component tests).
// See https://ffmpeg.org/ffmpeg-formats.html#Format-Options for details.
'-avioflags direct',
// Because we're passing in a slideshow of still frames, there's no
// fps metadata to be found in the video stream. This ensures that ffmpeg
// isn't buffering a lot of data waiting for information that's not coming.
'-fpsprobesize 0',
// Tells ffmpeg to read only the first 32 bytes of the stream for information
// (resolution, stream format, etc).
// Some videos can have long metadata (eg, lots of chapters) or spread out,
// but our streams are always predictable; No need to wait / buffer data before
// starting encoding
'-probesize 32',
// By default ffmpeg buffers the first 5 seconds of video to analyze it before
// it starts encoding. We're basically telling it "there is no metadata coming,
// start encoding as soon as we give you frames."
'-analyzeduration 0',
])
// See https://trac.ffmpeg.org/wiki/Encode/H.264 for details about h264 options.
const outputOptions = [
// Preset is a tradeoff between encoding speed and filesize. It does not determine video
// quality; It's just a tradeoff between CPU vs size.
'-preset fast',
`-crf ${videoCompression}`,
'-pix_fmt yuv420p',
// Compression Rate Factor is essentially the quality dial; 0 would be lossless
// (big files), while 51 (the maximum) would lead to low quality (and small files).
`-crf ${videoCompression}`,
// Discussion of pixel formats is beyond the scope of these comments. See
// https://en.wikipedia.org/wiki/Chroma_subsampling if you want the gritty details.
// Short version: yuv420p is a standard video format supported everywhere.
'-pix_fmt yuv420p',
]
if (addChaptersMeta) {

View File

@@ -163,7 +163,6 @@
"eventsource": "1.0.7",
"express-session": "1.16.1",
"express-useragent": "1.0.15",
"http-mitm-proxy": "0.7.0",
"https-proxy-agent": "3.0.1",
"istanbul": "0.4.5",
"mocha": "7.1.0",

View File

@@ -21,8 +21,7 @@ const meta = require('./meta')
const build = require('./build')
const upload = require('./upload')
const uploadUtils = require('./util/upload')
const { uploadNpmPackage } = require('./upload-npm-package')
const { uploadUniqueBinary } = require('./upload-unique-binary')
const { uploadArtifactToS3 } = require('./upload-build-artifact')
const { moveBinaries } = require('./move-binaries')
// initialize on existing repo
@@ -252,18 +251,11 @@ const deploy = {
})
},
// upload Cypress NPM package file
'upload-npm-package' (args = process.argv) {
console.log('#packageUpload')
// upload Cypress binary or NPM Package zip file under unique hash
'upload-build-artifact' (args = process.argv) {
console.log('#uploadBuildArtifact')
return uploadNpmPackage(args)
},
// upload Cypress binary zip file under unique hash
'upload-unique-binary' (args = process.argv) {
console.log('#uniqueBinaryUpload')
return uploadUniqueBinary(args)
return uploadArtifactToS3(args)
},
// uploads a single built Cypress binary ZIP file
@@ -288,10 +280,21 @@ const deploy = {
console.log('for platform %s version %s',
options.platform, options.version)
return upload.toS3({
zipFile: options.zip,
const uploadPath = upload.getFullUploadPath({
version: options.version,
platform: options.platform,
name: upload.zipName,
})
return upload.toS3({
file: options.zip,
uploadPath,
}).then(() => {
return uploadUtils.purgeDesktopAppFromCache({
version: options.version,
platform: options.platform,
zipName: options.zip,
})
})
})
},

View File

@@ -18,9 +18,9 @@ import confirm from 'inquirer-confirm'
import uploadUtils from './util/upload'
// @ts-ignore
import { getUploadDirForPlatform } from './upload-unique-binary'
import { getUploadDirForPlatform } from './upload-build-artifact'
// @ts-ignore
import { zipName, getFullUploadName } from './upload'
import { zipName, getFullUploadPath } from './upload'
/**
* 40 character full sha commit string
@@ -160,7 +160,9 @@ export const moveBinaries = async (args = []) => {
const uploadDir = getUploadDirForPlatform({
version: releaseOptions.version,
}, platformArch)
uploadFolder: 'binary',
platformArch,
})
console.log('finding binary for %s in %s', platformArch, uploadDir)
@@ -216,7 +218,7 @@ export const moveBinaries = async (args = []) => {
platformArch: lastBuild.platformArch,
name: zipName,
}
const destinationPath = getFullUploadName(options)
const destinationPath = getFullUploadPath(options)
console.log('copying test runner %s to %s', lastBuild.platformArch, destinationPath)

View File

@@ -0,0 +1,145 @@
const minimist = require('minimist')
const la = require('lazy-ass')
const check = require('check-more-types')
const fs = require('fs')
const hasha = require('hasha')
const _ = require('lodash')
const upload = require('./upload')
const uploadUtils = require('./util/upload')
const { s3helpers } = require('./s3-api')
const uploadTypes = {
binary: {
uploadFolder: 'binary',
uploadFileName: 'cypress.zip',
},
'npm-package': {
uploadFolder: 'npm',
uploadFileName: 'cypress.tgz',
},
}
const getCDN = function (uploadPath) {
return [uploadUtils.getUploadUrl(), uploadPath].join('/')
}
const getUploadDirForPlatform = function (options) {
const { version, uploadFolder, platformArch } = options
return ['beta', uploadFolder, version, platformArch].join('/')
}
// the artifact will be uploaded for every platform and uploaded into under a unique folder
// https://cdn.cypress.io/beta/(binary|npm)/<version>/<platform>/<some unique version info>/cypress.zip
// For binary:
// beta/binary/9.4.2/win32-x64/circle-develop-219138ca4e952edc4af831f2ae16ce659ebdb50b/cypress.zip
// For NPM package:
// beta/npm/9.4.2/circle-develop-219138ca4e952edc4af831f2ae16ce659ebdb50b/cypress.tgz
const getUploadPath = function (options) {
const { hash, uploadFileName } = options
return [getUploadDirForPlatform(options), hash, uploadFileName].join('/')
}
const setChecksum = (filename, key) => {
console.log('setting checksum for file %s', filename)
console.log('on s3 object %s', key)
la(check.unemptyString(filename), 'expected filename', filename)
la(check.unemptyString(key), 'expected uploaded S3 key', key)
const checksum = hasha.fromFileSync(filename, { algorithm: 'sha512' })
const {
size,
} = fs.statSync(filename)
console.log('SHA256 checksum %s', checksum)
console.log('size', size)
const aws = uploadUtils.getS3Credentials()
const s3 = s3helpers.makeS3(aws)
// S3 object metadata can only have string values
const metadata = {
checksum,
size: String(size),
}
// by default s3.copyObject does not preserve ACL when copying
// thus we need to reset it for our public files
return s3helpers.setUserMetadata(aws.bucket, key, metadata,
'application/zip', 'public-read', s3)
}
const validateOptions = (options) => {
const { type, version, platform } = options
const supportedUploadTypes = Object.keys(uploadTypes)
la(check.defined(type) && supportedUploadTypes.includes(type),
`specify which upload type you\'d like to upload. One of ${supportedUploadTypes.join(',')}`, type)
const { uploadFolder, uploadFileName } = uploadTypes[type]
options.uploadFolder = uploadFolder
options.uploadFileName = uploadFileName
la(check.unemptyString(version) && check.semver(version), 'invalid version', version)
if (!options.hash) {
options.hash = uploadUtils.formHashFromEnvironment()
}
la(check.unemptyString(options.hash), 'missing hash to give', options)
options.platformArch = uploadUtils.getUploadNameByOsAndArch(platform || process.platform)
return options
}
const uploadArtifactToS3 = function (args = []) {
const supportedOptions = ['type', 'version', 'file', 'hash', 'platform']
let options = minimist(args, {
string: supportedOptions,
})
console.log('Upload options')
console.log(_.pick(options, supportedOptions))
validateOptions(options)
const uploadPath = getUploadPath(options)
return upload.toS3({ file: options.file, uploadPath })
.then(() => {
return setChecksum(options.file, uploadPath)
})
.then(() => {
const cdnUrl = getCDN(uploadPath)
if (options.type === 'binary') {
console.log('Binary can be downloaded using URL')
console.log(cdnUrl)
} else {
console.log('NPM package can be installed using URL')
console.log('npm install %s', cdnUrl)
}
return cdnUrl
})
.then(uploadUtils.saveUrl(`${options.type}-url.json`))
.catch((e) => {
console.error('There was an issue uploading the artifact.')
console.error(e)
})
}
module.exports = {
getCDN,
getUploadDirForPlatform,
getUploadPath,
setChecksum,
uploadArtifactToS3,
}
if (!module.parent) {
uploadArtifactToS3(process.argv)
}

View File

@@ -1,122 +0,0 @@
const minimist = require('minimist')
const Promise = require('bluebird')
const la = require('lazy-ass')
const check = require('check-more-types')
const fs = require('fs')
const path = require('path')
const awspublish = require('gulp-awspublish')
const rename = require('gulp-rename')
const gulpDebug = require('gulp-debug')
const gulp = require('gulp')
const uploadUtils = require('./util/upload')
const npmPackageExtension = '.tgz'
const uploadFileName = 'cypress.tgz'
const isNpmPackageFile = check.extension(npmPackageExtension)
// the package tgz file will be uploaded into unique folder
// in our case something like this
// https://cdn.cypress.io/beta/npm/<version>/<some unique hash>/cypress.tgz
const rootFolder = 'beta'
const npmFolder = 'npm'
const getCDN = function ({ version, hash, filename }) {
la(check.semver(version), 'invalid version', version)
la(check.unemptyString(hash), 'missing hash', hash)
la(check.unemptyString(filename), 'missing filename', filename)
la(isNpmPackageFile(filename), 'wrong extension for file', filename)
const url = uploadUtils.getUploadUrl()
la(check.url(url), 'could not get upload url', url)
return [url, rootFolder, npmFolder, version, hash, filename].join('/')
}
const getUploadDirName = function (options) {
la(check.unemptyString(options.version), 'missing version', options)
la(check.unemptyString(options.hash), 'missing hash', options)
const dir = [rootFolder, npmFolder, options.version, options.hash, null].join('/')
return dir
}
const uploadFile = (options) => {
return new Promise((resolve, reject) => {
const publisher = uploadUtils.getPublisher()
const headers = {}
headers['Cache-Control'] = 'no-cache'
return gulp.src(options.file)
.pipe(rename((p) => {
p.basename = path.basename(uploadFileName, npmPackageExtension)
p.dirname = getUploadDirName(options)
console.log('renaming upload to', p.dirname, p.basename)
la(check.unemptyString(p.basename), 'missing basename')
la(check.unemptyString(p.dirname), 'missing dirname')
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
}
const uploadNpmPackage = function (args = []) {
console.log(args)
const options = minimist(args, {
string: ['version', 'file', 'hash'],
alias: {
version: 'v',
file: 'f',
hash: 'h',
},
})
console.log('Upload NPM package options')
console.log(options)
la(check.unemptyString(options.file), 'missing file to upload', options)
la(isNpmPackageFile(options.file),
'invalid file to upload extension', options.file)
if (!options.hash) {
options.hash = uploadUtils.formHashFromEnvironment()
}
la(check.unemptyString(options.hash), 'missing hash to give', options)
la(check.unemptyString(options.version), 'missing version', options)
la(fs.existsSync(options.file), 'cannot find file', options.file)
return uploadFile(options)
.then(() => {
const cdnUrl = getCDN({
version: options.version,
hash: options.hash,
filename: uploadFileName,
})
console.log('NPM package can be installed using URL')
console.log('npm install %s', cdnUrl)
return cdnUrl
}).then(uploadUtils.saveUrl('npm-package-url.json'))
}
// for now disable purging from CDN cache
// because each upload should be unique by hash
// .then R.tap(uploadUtils.purgeCache)
module.exports = {
uploadNpmPackage,
getCDN,
}
if (!module.parent) {
uploadNpmPackage(process.argv)
}

View File

@@ -1,197 +0,0 @@
const minimist = require('minimist')
const Promise = require('bluebird')
const la = require('lazy-ass')
const check = require('check-more-types')
const fs = require('fs')
const path = require('path')
const awspublish = require('gulp-awspublish')
const rename = require('gulp-rename')
const gulpDebug = require('gulp-debug')
const gulp = require('gulp')
const hasha = require('hasha')
const _ = require('lodash')
const uploadUtils = require('./util/upload')
const {
s3helpers,
} = require('./s3-api')
// we zip the binary on every platform and upload under same name
const binaryExtension = '.zip'
const uploadFileName = 'cypress.zip'
const isBinaryFile = check.extension(binaryExtension)
const rootFolder = 'beta'
const folder = 'binary'
// the binary will be uploaded into unique folder
// in our case something like this
// https://cdn.cypress.io/desktop/binary/0.20.2/<platform>/<some unique version info>/cypress.zip
const getCDN = function ({ version, hash, filename, platform }) {
la(check.semver(version), 'invalid version', version)
la(check.unemptyString(hash), 'missing hash', hash)
la(check.unemptyString(filename), 'missing filename', filename)
la(isBinaryFile(filename), 'wrong extension for file', filename)
la(check.unemptyString(platform), 'missing platform', platform)
const cdnUrl = uploadUtils.getUploadUrl()
la(check.url(cdnUrl), 'could not get cdn url', cdnUrl)
return [cdnUrl, rootFolder, folder, version, platform, hash, filename].join('/')
}
// returns folder that contains beta (unreleased) binaries for given version
//
const getUploadVersionDirName = function (options) {
la(check.unemptyString(options.version), 'missing version', options)
const dir = [rootFolder, folder, options.version].join('/')
return dir
}
const getUploadDirForPlatform = function (options, platformArch) {
la(uploadUtils.isValidPlatformArch(platformArch),
'missing or invalid platformArch', platformArch)
const versionDir = getUploadVersionDirName(options)
la(check.unemptyString(versionDir), 'could not form folder from', options)
const dir = [versionDir, platformArch].join('/')
return dir
}
const getUploadDirName = function (options) {
la(check.unemptyString(options.hash), 'missing hash', options)
const uploadFolder = getUploadDirForPlatform(options, options.platformArch)
la(check.unemptyString(uploadFolder), 'could not form folder from', options)
const dir = [uploadFolder, options.hash, null].join('/')
return dir
}
const uploadFile = (options) => {
return new Promise((resolve, reject) => {
const publisher = uploadUtils.getPublisher()
const headers = {}
headers['Cache-Control'] = 'no-cache'
let key = null
return gulp.src(options.file)
.pipe(rename((p) => {
p.basename = path.basename(uploadFileName, binaryExtension)
p.dirname = getUploadDirName(options)
console.log('renaming upload to', p.dirname, p.basename)
la(check.unemptyString(p.basename), 'missing basename')
la(check.unemptyString(p.dirname), 'missing dirname')
key = p.dirname + uploadFileName
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', () => {
return resolve(key)
})
})
}
const setChecksum = (filename, key) => {
console.log('setting checksum for file %s', filename)
console.log('on s3 object %s', key)
la(check.unemptyString(filename), 'expected filename', filename)
la(check.unemptyString(key), 'expected uploaded S3 key', key)
const checksum = hasha.fromFileSync(filename, { algorithm: 'sha512' })
const {
size,
} = fs.statSync(filename)
console.log('SHA256 checksum %s', checksum)
console.log('size', size)
const aws = uploadUtils.getS3Credentials()
const s3 = s3helpers.makeS3(aws)
// S3 object metadata can only have string values
const metadata = {
checksum,
size: String(size),
}
// by default s3.copyObject does not preserve ACL when copying
// thus we need to reset it for our public files
return s3helpers.setUserMetadata(aws.bucket, key, metadata,
'application/zip', 'public-read', s3)
}
const uploadUniqueBinary = function (args = []) {
const options = minimist(args, {
string: ['version', 'file', 'hash', 'platform'],
alias: {
version: 'v',
file: 'f',
hash: 'h',
},
})
console.log('Upload unique binary options')
console.log(_.pick(options, ['file', 'version', 'hash']))
la(check.unemptyString(options.file), 'missing file to upload', options)
la(isBinaryFile(options.file),
'invalid file to upload extension', options.file)
if (!options.hash) {
options.hash = uploadUtils.formHashFromEnvironment()
}
la(check.unemptyString(options.hash), 'missing hash to give', options)
la(check.unemptyString(options.version), 'missing version', options)
la(fs.existsSync(options.file), 'cannot find file', options.file)
const platform = options.platform != null ? options.platform : process.platform
options.platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
return uploadFile(options)
.then((key) => {
return setChecksum(options.file, key)
}).then(() => {
const cdnUrl = getCDN({
version: options.version,
hash: options.hash,
filename: uploadFileName,
platform: options.platformArch,
})
console.log('Binary can be downloaded using URL')
console.log(cdnUrl)
return cdnUrl
}).then(uploadUtils.saveUrl('binary-url.json'))
}
module.exports = {
getUploadDirName,
getUploadDirForPlatform,
uploadUniqueBinary,
getCDN,
}
if (!module.parent) {
uploadUniqueBinary(process.argv)
}

View File

@@ -5,9 +5,9 @@ let fs = require('fs-extra')
const path = require('path')
const gulp = require('gulp')
const Promise = require('bluebird')
const meta = require('./meta')
const la = require('lazy-ass')
const check = require('check-more-types')
const uploadUtils = require('./util/upload')
fs = Promise.promisifyAll(fs)
@@ -30,17 +30,25 @@ module.exports = {
// returns desktop folder for a given folder without platform
// something like desktop/0.20.1
getUploadeVersionFolder (aws, version) {
getUploadVersionFolder (aws, version) {
la(check.unemptyString(aws.folder), 'aws object is missing desktop folder', aws.folder)
const dirName = [aws.folder, version].join('/')
return dirName
},
getFullUploadName ({ folder, version, platformArch, name }) {
la(check.unemptyString(folder), 'missing folder', folder)
la(check.semver(version), 'missing or invalid version', version)
la(check.unemptyString(name), 'missing file name', name)
// store uploaded application in subfolders by version and platform
// something like desktop/0.20.1/darwin-x64/
getFullUploadPath (options) {
let { folder, version, platformArch, name } = options
if (!folder) {
folder = this.getAwsObj().folder
}
la(check.unemptyString(folder), 'missing folder', options)
la(check.semver(version), 'missing or invalid version', options)
la(check.unemptyString(name), 'missing file name', options)
la(uploadUtils.isValidPlatformArch(platformArch),
'invalid platform and arch', platformArch)
@@ -49,20 +57,6 @@ module.exports = {
return fileName
},
// store uploaded application in subfolders by platform and version
// something like desktop/0.20.1/darwin-x64/
getUploadDirName ({ version, platform }) {
const aws = this.getAwsObj()
const platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
const versionFolder = this.getUploadeVersionFolder(aws, version)
const dirName = [versionFolder, platformArch, null].join('/')
console.log('target directory %s', dirName)
return dirName
},
getManifestUrl (folder, version, uploadOsName) {
const url = uploadUtils.getUploadUrl()
@@ -141,48 +135,35 @@ module.exports = {
})
},
toS3 ({ zipFile, version, platform }) {
toS3 ({ file, uploadPath }) {
console.log('#uploadToS3 ⏳')
console.log('uploading', file, 'to', uploadPath)
la(check.unemptyString(version), 'expected version string', version)
la(check.unemptyString(zipFile), 'expected zip filename', zipFile)
la(check.extension('zip', zipFile),
'zip filename should end with .zip', zipFile)
la(check.unemptyString(file), 'missing file to upload', file)
la(fs.existsSync(file), 'cannot find file', file)
la(check.extension(path.extname(uploadPath))(file),
'invalid file to upload extension', file)
la(meta.isValidPlatform(platform), 'invalid platform', platform)
return new Promise((resolve, reject) => {
const publisher = this.getPublisher()
console.log(`zip filename ${zipFile}`)
const headers = {}
if (!fs.existsSync(zipFile)) {
throw new Error(`Cannot find zip file ${zipFile}`)
}
headers['Cache-Control'] = 'no-cache'
const upload = () => {
return new Promise((resolve, reject) => {
const publisher = this.getPublisher()
return gulp.src(file)
.pipe(rename((p) => {
// rename to standard filename for upload
p.basename = path.basename(uploadPath, path.extname(uploadPath))
p.dirname = path.dirname(uploadPath)
const headers = {}
headers['Cache-Control'] = 'no-cache'
return gulp.src(zipFile)
.pipe(rename((p) => {
// rename to standard filename zipName
p.basename = path.basename(zipName, p.extname)
p.dirname = this.getUploadDirName({ version, platform })
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
}
return upload()
.then(() => {
return uploadUtils.purgeDesktopAppFromCache({ version, platform, zipName })
return p
}))
.pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
},
}

View File

@@ -15,7 +15,6 @@ const getUploadUrl = function () {
const url = konfig('cdn_url')
la(check.url(url), 'could not get CDN url', url)
console.log('upload url', url)
return url
}

View File

@@ -0,0 +1,140 @@
const { sinon } = require('@packages/https-proxy/test/spec_helper')
const { expect } = require('chai')
const hasha = require('hasha')
const fs = require('fs')
const {
getCDN,
getUploadDirForPlatform,
getUploadPath,
uploadArtifactToS3,
} = require('../../binary/upload-build-artifact')
const upload = require('../../binary/upload')
const uploadUtils = require('../../binary/util/upload')
const { s3helpers } = require('../../binary/s3-api')
/* eslint-env mocha */
describe('upload-release-artifact', () => {
describe('.getCDN', () => {
it('returns CDN s3 url', () => {
const uploadUrl = 'dir/path/file'
const result = getCDN(uploadUrl)
expect(result).to.eq('https://cdn.cypress.io/dir/path/file')
})
})
describe('.getUploadDirForPlatform', () => {
it('returns folder for given version and platform', () => {
const options = {
uploadFolder: 'binary',
platformArch: 'darwin-x64',
version: '3.3.0',
}
const result = getUploadDirForPlatform(options)
expect(result).to.eq('beta/binary/3.3.0/darwin-x64')
})
})
describe('.getUploadPath', () => {
it('returns s3 upload path', () => {
const options = {
uploadFolder: 'binary',
platformArch: 'darwin-x64',
version: '3.3.0',
hash: 'hash',
uploadFileName: 'file',
}
const result = getUploadPath(options)
expect(result).to.eq('beta/binary/3.3.0/darwin-x64/hash/file')
})
})
describe('.uploadArtifactToS3', () => {
let sandbox
beforeEach(function () {
sandbox = sinon.sandbox.create()
sandbox.stub(hasha, 'fromFileSync').returns('checksum')
sandbox.stub(fs, 'statSync').returns('size')
sandbox.stub(s3helpers, 'makeS3').returns('size')
sandbox.stub(s3helpers, 'setUserMetadata')
sandbox.stub(upload, 'toS3')
sandbox.stub(uploadUtils, 'formHashFromEnvironment')
sandbox.stub(uploadUtils, 'getS3Credentials').returns({ bucket: 'beta' })
sandbox.stub(uploadUtils, 'getUploadNameByOsAndArch')
sandbox.stub(uploadUtils, 'saveUrl')
})
afterEach(function () {
sandbox.restore()
})
it('throws error if type argument is missing', () => {
expect(() => uploadArtifactToS3()).to.throw(/specify which upload type you'd like to upload/)
})
it('throws error if type argument is not binary or npm-package', () => {
expect(() => uploadArtifactToS3(['--type', 'npm'])).to.throw(/specify which upload type you'd like to upload/)
})
it('throws error if version argument is missing', () => {
expect(() => uploadArtifactToS3(['--type', 'binary'])).to.throw(/invalid version/)
})
it('throws error if version argument is not a semver', () => {
expect(() => uploadArtifactToS3(['--type', 'npm-package', '--version', '.1'])).to.throw(/invalid version/)
})
it('throws error if not ran in CircleCI to generate unique hash', () => {
uploadUtils.formHashFromEnvironment.throws()
expect(() => uploadArtifactToS3(['--type', 'npm-package', '--version', '1.0.0'])).to.throw()
})
it('uploads binary to s3 and saves url to json', () => {
uploadUtils.formHashFromEnvironment.returns('hash')
uploadUtils.getUploadNameByOsAndArch.returns('darwin-x64')
upload.toS3.resolves(true)
const args = ['--file', 'my.zip', '--type', 'binary', '--version', '1.0.0']
uploadArtifactToS3(args)
expect(uploadUtils.formHashFromEnvironment).to.have.calledOnce
expect(uploadUtils.getUploadNameByOsAndArch).to.have.calledOnce
expect(upload.toS3).to.have.been.calledOnce
expect(upload.toS3.lastCall.args).to.have.lengthOf(1)
expect(upload.toS3.lastCall.args[0]).to.have.property('file', 'my.zip')
expect(upload.toS3.lastCall.args[0]).to.have.property('uploadPath', 'beta/binary/1.0.0/darwin-x64/hash/cypress.zip')
expect(uploadUtils.saveUrl).to.have.calledOnce
expect(uploadUtils.saveUrl.lastCall.args).to.have.lengthOf(1)
expect(uploadUtils.saveUrl.lastCall.args[0]).to.eq('binary-url.json')
})
it('uploads npm-package to s3 and saves url to json', () => {
uploadUtils.formHashFromEnvironment.returns('hash')
uploadUtils.getUploadNameByOsAndArch.returns('darwin-x64')
upload.toS3.resolves(true)
const args = ['--file', 'my.zip', '--type', 'npm-package', '--version', '1.0.0']
uploadArtifactToS3(args)
expect(uploadUtils.formHashFromEnvironment).to.have.calledOnce
expect(uploadUtils.getUploadNameByOsAndArch).to.have.calledOnce
expect(upload.toS3).to.have.been.calledOnce
expect(upload.toS3.lastCall.args).to.have.lengthOf(1)
expect(upload.toS3.lastCall.args[0]).to.have.property('file', 'my.zip')
expect(upload.toS3.lastCall.args[0]).to.have.property('uploadPath', 'beta/npm/1.0.0/darwin-x64/hash/cypress.tgz')
expect(uploadUtils.saveUrl).to.have.calledOnce
expect(uploadUtils.saveUrl.lastCall.args).to.have.lengthOf(1)
expect(uploadUtils.saveUrl.lastCall.args[0]).to.eq('npm-package-url.json')
})
})
})

View File

@@ -1,23 +0,0 @@
const snapshot = require('snap-shot-it')
/* eslint-env mocha */
describe('getCDN', () => {
context('npm package', () => {
const { getCDN } = require('../../binary/upload-npm-package')
it('returns CDN s3 path', () => {
const options = {
platform: 'darwin-x64',
filename: 'cypress.tgz',
version: '3.3.0',
// ci name + commit sha + build number
hash: 'ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123',
}
snapshot({
input: options,
result: getCDN(options),
})
})
})
})

View File

@@ -2,10 +2,8 @@ require('../../spec-helper')
const snapshot = require('snap-shot-it')
const la = require('lazy-ass')
const os = require('os')
/* eslint-env mocha */
/* global sinon */
describe('upload', () => {
const upload = require('../../binary/upload')
@@ -19,36 +17,14 @@ describe('upload', () => {
})
})
context('getUploadeVersionFolder', () => {
context('getUploadVersionFolder', () => {
it('returns folder', () => {
const aws = {
folder: 'desktop',
}
const folder = upload.getUploadeVersionFolder(aws, '3.3.0')
const folder = upload.getUploadVersionFolder(aws, '3.3.0')
la(folder === 'desktop/3.3.0', 'wrong desktop folder', folder)
})
})
context('getUploadDirName', () => {
it('returns folder with platform', () => {
const aws = {
folder: 'desktop',
}
sinon.stub(upload, 'getAwsObj').returns(aws)
sinon.stub(os, 'arch').returns('x64')
const folder = upload.getUploadDirName({
platform: 'darwin',
version: '3.3.0',
})
la(
folder === 'desktop/3.3.0/darwin-x64/',
'wrong upload desktop folder',
folder,
)
})
})
})

View File

@@ -1,62 +0,0 @@
const snapshot = require('snap-shot-it')
/* eslint-env mocha */
describe('upload-unique-binary', () => {
describe('getUploadDirName', () => {
const { getUploadDirName } = require('../../binary/upload-unique-binary')
it('returns folder for given version', () => {
const options = {
platformArch: 'darwin-x64',
version: '3.3.0',
// ci name + commit sha + build number
hash: 'ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123',
}
snapshot('upload binary folder', {
input: options,
result: getUploadDirName(options),
})
})
})
describe('getUploadDirForPlatform', () => {
const {
getUploadDirForPlatform,
} = require('../../binary/upload-unique-binary')
it('returns folder for given version and platform', () => {
const options = {
platformArch: 'darwin-x64',
version: '3.3.0',
}
const result = getUploadDirForPlatform(options, options.platformArch)
snapshot('upload binary folder for platform', {
input: options,
result,
})
})
})
describe('getCDN', () => {
context('binary', () => {
const { getCDN } = require('../../binary/upload-unique-binary')
it('returns CDN s3 path', () => {
const options = {
platform: 'darwin-x64',
filename: 'cypress.zip',
version: '3.3.0',
// ci name + commit sha + build number
hash: 'ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123',
}
snapshot('getCDN for binary', {
input: options,
result: getCDN(options),
})
})
})
})
})

View File

@@ -50,7 +50,6 @@
"fluent-ffmpeg": "2.1.2",
"fs-extra": "8.1.0",
"glob": "7.2.0",
"http-mitm-proxy": "0.7.0",
"https-proxy-agent": "3.0.1",
"human-interval": "1.0.0",
"image-size": "0.8.3",

View File

@@ -1,7 +1,5 @@
const _ = require('lodash')
const express = require('express')
const http = require('http')
const https = require('https')
const path = require('path')
const debug = require('debug')('cypress:server:network-error-handling-spec')
const Promise = require('bluebird')
@@ -12,7 +10,6 @@ const chrome = require('@packages/server/lib/browsers/chrome')
const systemTests = require('../lib/system-tests').default
const random = require('@packages/server/lib/util/random')
const Fixtures = require('../lib/fixtures')
let mitmProxy = require('http-mitm-proxy')
const PORT = 13370
const PROXY_PORT = 13371
@@ -348,19 +345,19 @@ describe('e2e network error handling', function () {
})
context('Cypress', () => {
let debugProxy
beforeEach(() => {
delete process.env.HTTP_PROXY
delete process.env.HTTPS_PROXY
return delete process.env.NO_PROXY
delete process.env.NO_PROXY
})
afterEach(function () {
if (this.debugProxy) {
return this.debugProxy.stop()
.then(() => {
this.debugProxy = null
})
afterEach(async function () {
if (debugProxy) {
await debugProxy.stop()
debugProxy = null
}
})
@@ -415,11 +412,11 @@ describe('e2e network error handling', function () {
return true
})
this.debugProxy = new DebugProxy({
debugProxy = new DebugProxy({
onConnect,
})
return this.debugProxy
return debugProxy
.start(PROXY_PORT)
.then(() => {
process.env.HTTP_PROXY = `http://localhost:${PROXY_PORT}`
@@ -465,9 +462,9 @@ describe('e2e network error handling', function () {
})
it('behind a proxy', function () {
this.debugProxy = new DebugProxy()
debugProxy = new DebugProxy()
return this.debugProxy
return debugProxy
.start(PROXY_PORT)
.then(() => {
process.env.HTTP_PROXY = `http://localhost:${PROXY_PORT}`
@@ -485,27 +482,22 @@ describe('e2e network error handling', function () {
})
})
it('behind a proxy with transfer-encoding: chunked', function () {
mitmProxy = mitmProxy()
mitmProxy.onRequest((ctx, callback) => {
return callback()
})
mitmProxy.listen({
host: '127.0.0.1',
port: PROXY_PORT,
keepAlive: true,
httpAgent: http.globalAgent,
httpsAgent: https.globalAgent,
forceSNI: false,
forceChunkedRequest: true,
it('behind a proxy with transfer-encoding: chunked', async function () {
debugProxy = new DebugProxy({
onRequest: (reqUrl, req, res) => {
expect(req.headers).to.have.property('content-length')
// delete content-length to force te: chunked
delete req.headers['content-length']
debugProxy._onRequest(reqUrl, req, res)
},
})
process.env.HTTP_PROXY = `http://localhost:${PROXY_PORT}`
process.env.NO_PROXY = ''
return systemTests.exec(this, {
await debugProxy.start(PROXY_PORT)
await systemTests.exec(this, {
spec: 'network_error_304_handling_spec.js',
video: false,
config: {

View File

@@ -10791,7 +10791,7 @@ asap@^2.0.0, asap@~2.0.3, asap@~2.0.6:
resolved "https://registry.yarnpkg.com/asap/-/asap-2.0.6.tgz#e50347611d7e690943208bbdafebcbc2fb866d46"
integrity sha1-5QNHYR1+aQlDIIu9r+vLwvuGbUY=
asar@^3.0.0, asar@^3.0.3:
asar@^3.0.3, asar@^3.1.0:
version "3.1.0"
resolved "https://registry.yarnpkg.com/asar/-/asar-3.1.0.tgz#70b0509449fe3daccc63beb4d3c7d2e24d3c6473"
integrity sha512-vyxPxP5arcAqN4F/ebHd/HhwnAiZtwhglvdmc7BR2f0ywbVNTOpSeyhLDbGXtE/y58hv1oC75TaNIXutnsOZsQ==
@@ -10983,7 +10983,7 @@ async@>=0.2.9, async@^3.2.0:
resolved "https://registry.yarnpkg.com/async/-/async-3.2.0.tgz#b3a2685c5ebb641d3de02d161002c60fc9f85720"
integrity sha512-TR2mEZFVOj2pLStYxLht7TyfuRzaydfpxr3k9RpHIzMgw7A64dzsdqCxH1WJyQdoe8T10nDXd9wnEigmiuHIZw==
async@^2.1.4, async@^2.4.1, async@^2.5.0, async@^2.6.2:
async@^2.1.4, async@^2.4.1, async@^2.6.2:
version "2.6.3"
resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff"
integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==
@@ -15314,6 +15314,15 @@ cross-spawn-async@^2.1.1:
lru-cache "^4.0.0"
which "^1.2.8"
cross-spawn-windows-exe@^1.1.0, cross-spawn-windows-exe@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/cross-spawn-windows-exe/-/cross-spawn-windows-exe-1.2.0.tgz#46253b0f497676e766faf4a7061004618b5ac5ec"
integrity sha512-mkLtJJcYbDCxEG7Js6eUnUNndWjyUZwJ3H7bErmmtOYU/Zb99DyUkpamuIZE0b3bhmJyZ7D90uS6f+CGxRRjOw==
dependencies:
"@malept/cross-spawn-promise" "^1.1.0"
is-wsl "^2.2.0"
which "^2.0.2"
cross-spawn@6.0.5, cross-spawn@^6.0.0, cross-spawn@^6.0.5:
version "6.0.5"
resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
@@ -17378,7 +17387,7 @@ electron-is-dev@^2.0.0:
resolved "https://registry.npmjs.org/electron-is-dev/-/electron-is-dev-2.0.0.tgz#833487a069b8dad21425c67a19847d9064ab19bd"
integrity sha512-3X99K852Yoqu9AcW50qz3ibYBWY79/pBhlMCab8ToEWS48R0T9tyxRiQhwylE7zQdXrMnx2JKqUJyMPmt5FBqA==
electron-notarize@^1.0.0, electron-notarize@^1.1.1:
electron-notarize@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/electron-notarize/-/electron-notarize-1.1.1.tgz#3ed274b36158c1beb1dbef14e7faf5927e028629"
integrity sha512-kufsnqh86CTX89AYNG3NCPoboqnku/+32RxeJ2+7A4Rbm4bbOx0Nc7XTy3/gAlBfpj9xPAxHfhZLOHgfi6cJVw==
@@ -17386,18 +17395,6 @@ electron-notarize@^1.0.0, electron-notarize@^1.1.1:
debug "^4.1.1"
fs-extra "^9.0.1"
electron-osx-sign@^0.4.11:
version "0.4.17"
resolved "https://registry.yarnpkg.com/electron-osx-sign/-/electron-osx-sign-0.4.17.tgz#2727ca0c79e1e4e5ccd3861fb3da9c3c913b006c"
integrity sha512-wUJPmZJQCs1zgdlQgeIpRcvrf7M5/COQaOV68Va1J/SgmWx5KL2otgg+fAae7luw6qz9R8Gvu/Qpe9tAOu/3xQ==
dependencies:
bluebird "^3.5.0"
compare-version "^0.1.2"
debug "^2.6.8"
isbinaryfile "^3.0.2"
minimist "^1.2.0"
plist "^3.0.1"
electron-osx-sign@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/electron-osx-sign/-/electron-osx-sign-0.5.0.tgz#fc258c5e896859904bbe3d01da06902c04b51c3a"
@@ -17410,16 +17407,17 @@ electron-osx-sign@^0.5.0:
minimist "^1.2.0"
plist "^3.0.1"
electron-packager@15.1.0:
version "15.1.0"
resolved "https://registry.yarnpkg.com/electron-packager/-/electron-packager-15.1.0.tgz#16a3733e4cad26112a2ac36f0b0f35c3b0170eff"
integrity sha512-THNm4bz1DfvR9f0g51+NjuAYELflM8+1vhQ/iv/G8vyZNKzSMuFd5doobngQKq3rRsLdPNZVnGqDdgS884d7Og==
electron-packager@15.4.0:
version "15.4.0"
resolved "https://registry.yarnpkg.com/electron-packager/-/electron-packager-15.4.0.tgz#07ea036b70cde2062d4c8dce4d907d793b303998"
integrity sha512-JrrLcBP15KGrPj0cZ/ALKGmaQ4gJkn3mocf0E3bRKdR3kxKWYcDRpCvdhksYDXw/r3I6tMEcZ7XzyApWFXdVpw==
dependencies:
"@electron/get" "^1.6.0"
asar "^3.0.0"
asar "^3.1.0"
cross-spawn-windows-exe "^1.2.0"
debug "^4.0.1"
electron-notarize "^1.0.0"
electron-osx-sign "^0.4.11"
electron-notarize "^1.1.1"
electron-osx-sign "^0.5.0"
extract-zip "^2.0.0"
filenamify "^4.1.0"
fs-extra "^9.0.0"
@@ -17428,10 +17426,10 @@ electron-packager@15.1.0:
junk "^3.1.0"
parse-author "^2.0.0"
plist "^3.0.0"
rcedit "^2.0.0"
rcedit "^3.0.1"
resolve "^1.1.6"
semver "^7.1.3"
yargs-parser "^19.0.1"
yargs-parser "^20.0.0"
electron-publish@22.13.1:
version "22.13.1"
@@ -21993,19 +21991,6 @@ http-errors@~1.6.2:
setprototypeof "1.1.0"
statuses ">= 1.4.0 < 2"
http-mitm-proxy@0.7.0:
version "0.7.0"
resolved "https://registry.yarnpkg.com/http-mitm-proxy/-/http-mitm-proxy-0.7.0.tgz#82933137ae1c06713961afe50f38ca84cf80bb0c"
integrity sha512-rRMRfQCVwEO31Q6GFiQHfECdMn3Z0ddWWLNgmeyIUDMf0gr/Ek+lhZ17gWzKL4NXZkMc1h982BYl8blRXv7/og==
dependencies:
async "^2.5.0"
debug "^4.1.0"
mkdirp "^0.5.1"
node-forge "^0.8.0"
optimist "^0.6.1"
semaphore "^1.1.0"
ws "^3.2.0"
http-parser-js@>=0.5.1:
version "0.5.3"
resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.5.3.tgz#01d2709c79d41698bb01d4decc5e9da4e4a033d9"
@@ -28363,11 +28348,6 @@ node-forge@^0.10.0:
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.10.0.tgz#32dea2afb3e9926f02ee5ce8794902691a676bf3"
integrity sha512-PPmu8eEeG9saEUvI97fm4OYxXVB6bFvyNTyiUOBichBpFG8A1Ljw3bY62+5oOjDEMHRnd0Y7HQ+x7uzxOzC6JA==
node-forge@^0.8.0:
version "0.8.5"
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.8.5.tgz#57906f07614dc72762c84cef442f427c0e1b86ee"
integrity sha512-vFMQIWt+J/7FLNyKouZ9TazT74PRV3wgv9UT4cRjC8BffxFbKXkgIWR42URCPSnHm/QDz6BOlb2Q0U4+VQT67Q==
node-gyp@^5.0.2, node-gyp@^5.1.0:
version "5.1.1"
resolved "https://registry.yarnpkg.com/node-gyp/-/node-gyp-5.1.1.tgz#eb915f7b631c937d282e33aed44cb7a025f62a3e"
@@ -32679,10 +32659,12 @@ rc@^1.0.1, rc@^1.1.6, rc@^1.2.7, rc@^1.2.8:
minimist "^1.2.0"
strip-json-comments "~2.0.1"
rcedit@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/rcedit/-/rcedit-2.3.0.tgz#951685a079db98a4cc8c21ebab75e374d5a0b108"
integrity sha512-h1gNEl9Oai1oijwyJ1WYqYSXTStHnOcv1KYljg/8WM4NAg3H1KBK3azIaKkQ1WQl+d7PoJpcBMscPfLXVKgCLQ==
rcedit@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/rcedit/-/rcedit-3.0.1.tgz#ae21b43e49c075f4d84df1929832a12c302f3c90"
integrity sha512-XM0Jv40/y4hVAqj/MO70o/IWs4uOsaSoo2mLyk3klFDW+SStLnCtzuQu+1OBTIMGlM8CvaK9ftlYCp6DJ+cMsw==
dependencies:
cross-spawn-windows-exe "^1.1.0"
"react-15.6.1@npm:react@15.6.1":
version "15.6.1"
@@ -35188,7 +35170,7 @@ semantic-release@17.4.2:
signale "^1.2.1"
yargs "^16.2.0"
semaphore@1.1.0, semaphore@^1.1.0:
semaphore@1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/semaphore/-/semaphore-1.1.0.tgz#aaad8b86b20fe8e9b32b16dc2ee682a8cd26a8aa"
integrity sha512-O4OZEaNtkMd/K0i6js9SL+gqy0ZCBMgUvlSqHKi4IBdjhe7wB8pwztUk1BbZ1fmrvpwFrPbHzqd2w5pTcJH6LA==
@@ -35577,16 +35559,16 @@ shell-quote@1.7.2, shell-quote@^1.4.2, shell-quote@^1.6.1:
resolved "https://registry.yarnpkg.com/shell-quote/-/shell-quote-1.7.2.tgz#67a7d02c76c9da24f99d20808fcaded0e0e04be2"
integrity sha512-mRz/m/JVscCrkMyPqHc/bczi3OQHkLTqXHEFu0zDhK/qfv3UcOA4SVmRCLmos4bhjr9ekVQubj/R7waKapmiQg==
shelljs@0.8.3:
version "0.8.3"
resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.3.tgz#a7f3319520ebf09ee81275b2368adb286659b097"
integrity sha512-fc0BKlAWiLpwZljmOvAOTE/gXawtCoNrP5oaY7KIaQbbyHeQVg01pSEuEGvGh3HEdBU4baCD7wQBwADmM/7f7A==
shelljs@0.8.5:
version "0.8.5"
resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.5.tgz#de055408d8361bed66c669d2f000538ced8ee20c"
integrity sha512-TiwcRcrkhHvbrZbnRcFYMLl30Dfov3HKqzp5tO5b4pt6G/SezKcYhmDg15zXVBswHmctSAQKznqNW2LO5tTDow==
dependencies:
glob "^7.0.0"
interpret "^1.0.0"
rechoir "^0.6.2"
shelljs@0.8.4, shelljs@^0.8.3, shelljs@^0.8.4:
shelljs@^0.8.4:
version "0.8.4"
resolved "https://registry.yarnpkg.com/shelljs/-/shelljs-0.8.4.tgz#de7684feeb767f8716b326078a8a00875890e3c2"
integrity sha512-7gk3UZ9kOfPLIAbslLzyWeGiEqx9e3rxwZM0KE6EL8GlGwjym9Mrlx5/p33bWTu9YG6vcS4MBxYZDHYr5lr8BQ==
@@ -41508,7 +41490,7 @@ write@1.0.3:
dependencies:
mkdirp "^0.5.1"
ws@3.3.x, ws@^3.2.0:
ws@3.3.x:
version "3.3.3"
resolved "https://registry.yarnpkg.com/ws/-/ws-3.3.3.tgz#f1cf84fe2d5e901ebce94efaece785f187a228f2"
integrity sha512-nnWLa/NwZSt4KQJu51MYlCcSQ5g7INpOrOMt4XV8j4dqTXdmlUmSHQ8/oLC069ckre0fRsgfvsKwbTdtKLCDkA==
@@ -41812,12 +41794,7 @@ yargs-parser@^18.1.2:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs-parser@^19.0.1:
version "19.0.4"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-19.0.4.tgz#99183a3a59268b205c6b04177f2a5bfb46e79ba7"
integrity sha512-eXeQm7yXRjPFFyf1voPkZgXQZJjYfjgQUmGPbD2TLtZeIYzvacgWX7sQ5a1HsRgVP+pfKAkRZDNtTGev4h9vhw==
yargs-parser@^20.2.2, yargs-parser@^20.2.3:
yargs-parser@^20.0.0, yargs-parser@^20.2.2, yargs-parser@^20.2.3:
version "20.2.9"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-20.2.9.tgz#2eb7dc3b0289718fc295f362753845c41a0c94ee"
integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w==