Merge branch 'develop' into 10.0-release

* develop:
  chore: fix cypress npm package artifact upload path (#20023)
  chore(driver): move cy.within logic into it's own file (#20036)
  chore: update automerge workflows (#19982)
This commit is contained in:
Tim Griesser
2022-02-06 20:41:57 -05:00
27 changed files with 799 additions and 1061 deletions
+18 -15
View File
@@ -21,8 +21,7 @@ const meta = require('./meta')
const build = require('./build')
const upload = require('./upload')
const uploadUtils = require('./util/upload')
const { uploadNpmPackage } = require('./upload-npm-package')
const { uploadUniqueBinary } = require('./upload-unique-binary')
const { uploadArtifactToS3 } = require('./upload-build-artifact')
const { moveBinaries } = require('./move-binaries')
// initialize on existing repo
@@ -252,18 +251,11 @@ const deploy = {
})
},
// upload Cypress NPM package file
'upload-npm-package' (args = process.argv) {
console.log('#packageUpload')
// upload Cypress binary or NPM Package zip file under unique hash
'upload-build-artifact' (args = process.argv) {
console.log('#uploadBuildArtifact')
return uploadNpmPackage(args)
},
// upload Cypress binary zip file under unique hash
'upload-unique-binary' (args = process.argv) {
console.log('#uniqueBinaryUpload')
return uploadUniqueBinary(args)
return uploadArtifactToS3(args)
},
// uploads a single built Cypress binary ZIP file
@@ -288,10 +280,21 @@ const deploy = {
console.log('for platform %s version %s',
options.platform, options.version)
return upload.toS3({
zipFile: options.zip,
const uploadPath = upload.getFullUploadPath({
version: options.version,
platform: options.platform,
name: upload.zipName,
})
return upload.toS3({
file: options.zip,
uploadPath,
}).then(() => {
return uploadUtils.purgeDesktopAppFromCache({
version: options.version,
platform: options.platform,
zipName: options.zip,
})
})
})
},
+6 -4
View File
@@ -18,9 +18,9 @@ import confirm from 'inquirer-confirm'
import uploadUtils from './util/upload'
// @ts-ignore
import { getUploadDirForPlatform } from './upload-unique-binary'
import { getUploadDirForPlatform } from './upload-build-artifact'
// @ts-ignore
import { zipName, getFullUploadName } from './upload'
import { zipName, getFullUploadPath } from './upload'
/**
* 40 character full sha commit string
@@ -160,7 +160,9 @@ export const moveBinaries = async (args = []) => {
const uploadDir = getUploadDirForPlatform({
version: releaseOptions.version,
}, platformArch)
uploadFolder: 'binary',
platformArch,
})
console.log('finding binary for %s in %s', platformArch, uploadDir)
@@ -216,7 +218,7 @@ export const moveBinaries = async (args = []) => {
platformArch: lastBuild.platformArch,
name: zipName,
}
const destinationPath = getFullUploadName(options)
const destinationPath = getFullUploadPath(options)
console.log('copying test runner %s to %s', lastBuild.platformArch, destinationPath)
+145
View File
@@ -0,0 +1,145 @@
const minimist = require('minimist')
const la = require('lazy-ass')
const check = require('check-more-types')
const fs = require('fs')
const hasha = require('hasha')
const _ = require('lodash')
const upload = require('./upload')
const uploadUtils = require('./util/upload')
const { s3helpers } = require('./s3-api')
const uploadTypes = {
binary: {
uploadFolder: 'binary',
uploadFileName: 'cypress.zip',
},
'npm-package': {
uploadFolder: 'npm',
uploadFileName: 'cypress.tgz',
},
}
const getCDN = function (uploadPath) {
return [uploadUtils.getUploadUrl(), uploadPath].join('/')
}
const getUploadDirForPlatform = function (options) {
const { version, uploadFolder, platformArch } = options
return ['beta', uploadFolder, version, platformArch].join('/')
}
// the artifact will be uploaded for every platform and uploaded into under a unique folder
// https://cdn.cypress.io/beta/(binary|npm)/<version>/<platform>/<some unique version info>/cypress.zip
// For binary:
// beta/binary/9.4.2/win32-x64/circle-develop-219138ca4e952edc4af831f2ae16ce659ebdb50b/cypress.zip
// For NPM package:
// beta/npm/9.4.2/circle-develop-219138ca4e952edc4af831f2ae16ce659ebdb50b/cypress.tgz
const getUploadPath = function (options) {
const { hash, uploadFileName } = options
return [getUploadDirForPlatform(options), hash, uploadFileName].join('/')
}
const setChecksum = (filename, key) => {
console.log('setting checksum for file %s', filename)
console.log('on s3 object %s', key)
la(check.unemptyString(filename), 'expected filename', filename)
la(check.unemptyString(key), 'expected uploaded S3 key', key)
const checksum = hasha.fromFileSync(filename, { algorithm: 'sha512' })
const {
size,
} = fs.statSync(filename)
console.log('SHA256 checksum %s', checksum)
console.log('size', size)
const aws = uploadUtils.getS3Credentials()
const s3 = s3helpers.makeS3(aws)
// S3 object metadata can only have string values
const metadata = {
checksum,
size: String(size),
}
// by default s3.copyObject does not preserve ACL when copying
// thus we need to reset it for our public files
return s3helpers.setUserMetadata(aws.bucket, key, metadata,
'application/zip', 'public-read', s3)
}
const validateOptions = (options) => {
const { type, version, platform } = options
const supportedUploadTypes = Object.keys(uploadTypes)
la(check.defined(type) && supportedUploadTypes.includes(type),
`specify which upload type you\'d like to upload. One of ${supportedUploadTypes.join(',')}`, type)
const { uploadFolder, uploadFileName } = uploadTypes[type]
options.uploadFolder = uploadFolder
options.uploadFileName = uploadFileName
la(check.unemptyString(version) && check.semver(version), 'invalid version', version)
if (!options.hash) {
options.hash = uploadUtils.formHashFromEnvironment()
}
la(check.unemptyString(options.hash), 'missing hash to give', options)
options.platformArch = uploadUtils.getUploadNameByOsAndArch(platform || process.platform)
return options
}
const uploadArtifactToS3 = function (args = []) {
const supportedOptions = ['type', 'version', 'file', 'hash', 'platform']
let options = minimist(args, {
string: supportedOptions,
})
console.log('Upload options')
console.log(_.pick(options, supportedOptions))
validateOptions(options)
const uploadPath = getUploadPath(options)
return upload.toS3({ file: options.file, uploadPath })
.then(() => {
return setChecksum(options.file, uploadPath)
})
.then(() => {
const cdnUrl = getCDN(uploadPath)
if (options.type === 'binary') {
console.log('Binary can be downloaded using URL')
console.log(cdnUrl)
} else {
console.log('NPM package can be installed using URL')
console.log('npm install %s', cdnUrl)
}
return cdnUrl
})
.then(uploadUtils.saveUrl(`${options.type}-url.json`))
.catch((e) => {
console.error('There was an issue uploading the artifact.')
console.error(e)
})
}
module.exports = {
getCDN,
getUploadDirForPlatform,
getUploadPath,
setChecksum,
uploadArtifactToS3,
}
if (!module.parent) {
uploadArtifactToS3(process.argv)
}
-122
View File
@@ -1,122 +0,0 @@
const minimist = require('minimist')
const Promise = require('bluebird')
const la = require('lazy-ass')
const check = require('check-more-types')
const fs = require('fs')
const path = require('path')
const awspublish = require('gulp-awspublish')
const rename = require('gulp-rename')
const gulpDebug = require('gulp-debug')
const gulp = require('gulp')
const uploadUtils = require('./util/upload')
const npmPackageExtension = '.tgz'
const uploadFileName = 'cypress.tgz'
const isNpmPackageFile = check.extension(npmPackageExtension)
// the package tgz file will be uploaded into unique folder
// in our case something like this
// https://cdn.cypress.io/beta/npm/<version>/<some unique hash>/cypress.tgz
const rootFolder = 'beta'
const npmFolder = 'npm'
const getCDN = function ({ version, hash, filename }) {
la(check.semver(version), 'invalid version', version)
la(check.unemptyString(hash), 'missing hash', hash)
la(check.unemptyString(filename), 'missing filename', filename)
la(isNpmPackageFile(filename), 'wrong extension for file', filename)
const url = uploadUtils.getUploadUrl()
la(check.url(url), 'could not get upload url', url)
return [url, rootFolder, npmFolder, version, hash, filename].join('/')
}
const getUploadDirName = function (options) {
la(check.unemptyString(options.version), 'missing version', options)
la(check.unemptyString(options.hash), 'missing hash', options)
const dir = [rootFolder, npmFolder, options.version, options.hash, null].join('/')
return dir
}
const uploadFile = (options) => {
return new Promise((resolve, reject) => {
const publisher = uploadUtils.getPublisher()
const headers = {}
headers['Cache-Control'] = 'no-cache'
return gulp.src(options.file)
.pipe(rename((p) => {
p.basename = path.basename(uploadFileName, npmPackageExtension)
p.dirname = getUploadDirName(options)
console.log('renaming upload to', p.dirname, p.basename)
la(check.unemptyString(p.basename), 'missing basename')
la(check.unemptyString(p.dirname), 'missing dirname')
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
}
const uploadNpmPackage = function (args = []) {
console.log(args)
const options = minimist(args, {
string: ['version', 'file', 'hash'],
alias: {
version: 'v',
file: 'f',
hash: 'h',
},
})
console.log('Upload NPM package options')
console.log(options)
la(check.unemptyString(options.file), 'missing file to upload', options)
la(isNpmPackageFile(options.file),
'invalid file to upload extension', options.file)
if (!options.hash) {
options.hash = uploadUtils.formHashFromEnvironment()
}
la(check.unemptyString(options.hash), 'missing hash to give', options)
la(check.unemptyString(options.version), 'missing version', options)
la(fs.existsSync(options.file), 'cannot find file', options.file)
return uploadFile(options)
.then(() => {
const cdnUrl = getCDN({
version: options.version,
hash: options.hash,
filename: uploadFileName,
})
console.log('NPM package can be installed using URL')
console.log('npm install %s', cdnUrl)
return cdnUrl
}).then(uploadUtils.saveUrl('npm-package-url.json'))
}
// for now disable purging from CDN cache
// because each upload should be unique by hash
// .then R.tap(uploadUtils.purgeCache)
module.exports = {
uploadNpmPackage,
getCDN,
}
if (!module.parent) {
uploadNpmPackage(process.argv)
}
-196
View File
@@ -1,196 +0,0 @@
const minimist = require('minimist')
const Promise = require('bluebird')
const la = require('lazy-ass')
const check = require('check-more-types')
const fs = require('fs')
const path = require('path')
const awspublish = require('gulp-awspublish')
const rename = require('gulp-rename')
const gulpDebug = require('gulp-debug')
const gulp = require('gulp')
const hasha = require('hasha')
const _ = require('lodash')
const uploadUtils = require('./util/upload')
const {
s3helpers,
} = require('./s3-api')
// we zip the binary on every platform and upload under same name
const binaryExtension = '.zip'
const uploadFileName = 'cypress.zip'
const isBinaryFile = check.extension(binaryExtension)
const rootFolder = 'beta'
const folder = 'binary'
// the binary will be uploaded into unique folder
// in our case something like this
// https://cdn.cypress.io/desktop/binary/0.20.2/<platform>/<some unique version info>/cypress.zip
const getCDN = function ({ version, hash, filename, platform }) {
la(check.semver(version), 'invalid version', version)
la(check.unemptyString(hash), 'missing hash', hash)
la(check.unemptyString(filename), 'missing filename', filename)
la(isBinaryFile(filename), 'wrong extension for file', filename)
la(check.unemptyString(platform), 'missing platform', platform)
const cdnUrl = uploadUtils.getUploadUrl()
la(check.url(cdnUrl), 'could not get cdn url', cdnUrl)
return [cdnUrl, rootFolder, folder, version, platform, hash, filename].join('/')
}
// returns folder that contains beta (unreleased) binaries for given version
const getUploadVersionDirName = function (options) {
la(check.unemptyString(options.version), 'missing version', options)
const dir = [rootFolder, folder, options.version].join('/')
return dir
}
const getUploadDirForPlatform = function (options, platformArch) {
la(uploadUtils.isValidPlatformArch(platformArch),
'missing or invalid platformArch', platformArch)
const versionDir = getUploadVersionDirName(options)
la(check.unemptyString(versionDir), 'could not form folder from', options)
const dir = [versionDir, platformArch].join('/')
return dir
}
const getUploadDirName = function (options) {
la(check.unemptyString(options.hash), 'missing hash', options)
const uploadFolder = getUploadDirForPlatform(options, options.platformArch)
la(check.unemptyString(uploadFolder), 'could not form folder from', options)
const dir = [uploadFolder, options.hash, null].join('/')
return dir
}
const uploadFile = (options) => {
return new Promise((resolve, reject) => {
const publisher = uploadUtils.getPublisher()
const headers = {}
headers['Cache-Control'] = 'no-cache'
let key = null
return gulp.src(options.file)
.pipe(rename((p) => {
p.basename = path.basename(uploadFileName, binaryExtension)
p.dirname = getUploadDirName(options)
console.log('renaming upload to', p.dirname, p.basename)
la(check.unemptyString(p.basename), 'missing basename')
la(check.unemptyString(p.dirname), 'missing dirname')
key = p.dirname + uploadFileName
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', () => {
return resolve(key)
})
})
}
const setChecksum = (filename, key) => {
console.log('setting checksum for file %s', filename)
console.log('on s3 object %s', key)
la(check.unemptyString(filename), 'expected filename', filename)
la(check.unemptyString(key), 'expected uploaded S3 key', key)
const checksum = hasha.fromFileSync(filename, { algorithm: 'sha512' })
const {
size,
} = fs.statSync(filename)
console.log('SHA256 checksum %s', checksum)
console.log('size', size)
const aws = uploadUtils.getS3Credentials()
const s3 = s3helpers.makeS3(aws)
// S3 object metadata can only have string values
const metadata = {
checksum,
size: String(size),
}
// by default s3.copyObject does not preserve ACL when copying
// thus we need to reset it for our public files
return s3helpers.setUserMetadata(aws.bucket, key, metadata,
'application/zip', 'public-read', s3)
}
const uploadUniqueBinary = function (args = []) {
const options = minimist(args, {
string: ['version', 'file', 'hash', 'platform'],
alias: {
version: 'v',
file: 'f',
hash: 'h',
},
})
console.log('Upload unique binary options')
console.log(_.pick(options, ['file', 'version', 'hash']))
la(check.unemptyString(options.file), 'missing file to upload', options)
la(isBinaryFile(options.file),
'invalid file to upload extension', options.file)
if (!options.hash) {
options.hash = uploadUtils.formHashFromEnvironment()
}
la(check.unemptyString(options.hash), 'missing hash to give', options)
la(check.unemptyString(options.version), 'missing version', options)
la(fs.existsSync(options.file), 'cannot find file', options.file)
const platform = options.platform != null ? options.platform : process.platform
options.platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
return uploadFile(options)
.then((key) => {
return setChecksum(options.file, key)
}).then(() => {
const cdnUrl = getCDN({
version: options.version,
hash: options.hash,
filename: uploadFileName,
platform: options.platformArch,
})
console.log('Binary can be downloaded using URL')
console.log(cdnUrl)
return cdnUrl
}).then(uploadUtils.saveUrl('binary-url.json'))
}
module.exports = {
getUploadDirName,
getUploadDirForPlatform,
uploadUniqueBinary,
getCDN,
}
if (!module.parent) {
uploadUniqueBinary(process.argv)
}
+36 -55
View File
@@ -5,9 +5,9 @@ let fs = require('fs-extra')
const path = require('path')
const gulp = require('gulp')
const Promise = require('bluebird')
const meta = require('./meta')
const la = require('lazy-ass')
const check = require('check-more-types')
const uploadUtils = require('./util/upload')
fs = Promise.promisifyAll(fs)
@@ -30,17 +30,25 @@ module.exports = {
// returns desktop folder for a given folder without platform
// something like desktop/0.20.1
getUploadeVersionFolder (aws, version) {
getUploadVersionFolder (aws, version) {
la(check.unemptyString(aws.folder), 'aws object is missing desktop folder', aws.folder)
const dirName = [aws.folder, version].join('/')
return dirName
},
getFullUploadName ({ folder, version, platformArch, name }) {
la(check.unemptyString(folder), 'missing folder', folder)
la(check.semver(version), 'missing or invalid version', version)
la(check.unemptyString(name), 'missing file name', name)
// store uploaded application in subfolders by version and platform
// something like desktop/0.20.1/darwin-x64/
getFullUploadPath (options) {
let { folder, version, platformArch, name } = options
if (!folder) {
folder = this.getAwsObj().folder
}
la(check.unemptyString(folder), 'missing folder', options)
la(check.semver(version), 'missing or invalid version', options)
la(check.unemptyString(name), 'missing file name', options)
la(uploadUtils.isValidPlatformArch(platformArch),
'invalid platform and arch', platformArch)
@@ -49,20 +57,6 @@ module.exports = {
return fileName
},
// store uploaded application in subfolders by platform and version
// something like desktop/0.20.1/darwin-x64/
getUploadDirName ({ version, platform }) {
const aws = this.getAwsObj()
const platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
const versionFolder = this.getUploadeVersionFolder(aws, version)
const dirName = [versionFolder, platformArch, null].join('/')
console.log('target directory %s', dirName)
return dirName
},
getManifestUrl (folder, version, uploadOsName) {
const url = uploadUtils.getUploadUrl()
@@ -141,48 +135,35 @@ module.exports = {
})
},
toS3 ({ zipFile, version, platform }) {
toS3 ({ file, uploadPath }) {
console.log('#uploadToS3 ⏳')
console.log('uploading', file, 'to', uploadPath)
la(check.unemptyString(version), 'expected version string', version)
la(check.unemptyString(zipFile), 'expected zip filename', zipFile)
la(check.extension('zip', zipFile),
'zip filename should end with .zip', zipFile)
la(check.unemptyString(file), 'missing file to upload', file)
la(fs.existsSync(file), 'cannot find file', file)
la(check.extension(path.extname(uploadPath))(file),
'invalid file to upload extension', file)
la(meta.isValidPlatform(platform), 'invalid platform', platform)
return new Promise((resolve, reject) => {
const publisher = this.getPublisher()
console.log(`zip filename ${zipFile}`)
const headers = {}
if (!fs.existsSync(zipFile)) {
throw new Error(`Cannot find zip file ${zipFile}`)
}
headers['Cache-Control'] = 'no-cache'
const upload = () => {
return new Promise((resolve, reject) => {
const publisher = this.getPublisher()
return gulp.src(file)
.pipe(rename((p) => {
// rename to standard filename for upload
p.basename = path.basename(uploadPath, path.extname(uploadPath))
p.dirname = path.dirname(uploadPath)
const headers = {}
headers['Cache-Control'] = 'no-cache'
return gulp.src(zipFile)
.pipe(rename((p) => {
// rename to standard filename zipName
p.basename = path.basename(zipName, p.extname)
p.dirname = this.getUploadDirName({ version, platform })
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
}
return upload()
.then(() => {
return uploadUtils.purgeDesktopAppFromCache({ version, platform, zipName })
return p
}))
.pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
},
}
-1
View File
@@ -15,7 +15,6 @@ const getUploadUrl = function () {
const url = konfig('cdn_url')
la(check.url(url), 'could not get CDN url', url)
console.log('upload url', url)
return url
}