merge in develop to 10.0-release

This commit is contained in:
Lachlan Miller
2022-03-07 10:44:26 +10:00
parent de4cb66050
commit 76129e26af
60 changed files with 2191 additions and 859 deletions

View File

@@ -114,6 +114,7 @@ export async function buildCypressApp (options: BuildCypressAppOpts) {
meta.distDir('**', 'esprima', 'test'),
meta.distDir('**', 'bmp-js', 'test'),
meta.distDir('**', 'exif-parser', 'test'),
meta.distDir('**', 'app-module-path', 'test'),
], { force: true })
console.log('Deleted excess directories')

View File

@@ -113,6 +113,8 @@ export const prompts = {
export const moveBinaries = async (args = []) => {
debug('moveBinaries with args %o', args)
const options = arg({
'--s3bucket': String,
'--s3folder': String,
'--commit': String,
'--version': String,
// optional, if passed, only the binary for that platform will be moved
@@ -136,8 +138,13 @@ export const moveBinaries = async (args = []) => {
version: options['--version'],
}
const aws = uploadUtils.getS3Credentials()
const s3 = s3helpers.makeS3(aws)
const credentials = await uploadUtils.getS3Credentials()
const aws = {
'bucket': options['--s3bucket'] || uploadUtils.S3Configuration.bucket,
'folder': options['--s3folder'] || uploadUtils.S3Configuration.releaseFolder,
}
const s3 = s3helpers.makeS3(credentials)
// found s3 paths with last build for same commit for all platforms
const lastBuilds: Desktop[] = []
@@ -164,12 +171,12 @@ export const moveBinaries = async (args = []) => {
platformArch,
})
console.log('finding binary for %s in %s', platformArch, uploadDir)
console.log('finding binary in %s for %s in %s', aws.bucket, platformArch, uploadDir)
const list: string[] = await s3helpers.listS3Objects(uploadDir, aws.bucket, s3)
if (debug.enabled) {
console.log('all found subfolders')
console.log('all found sub-folders')
console.log(list.join('\n'))
}

View File

@@ -13,12 +13,18 @@ export const hasOnlyStringValues = (o) => {
*/
export const s3helpers = {
makeS3 (aws) {
la(is.unemptyString(aws.key), 'missing aws key')
la(is.unemptyString(aws.secret), 'missing aws secret')
la(is.unemptyString(aws.accessKeyId), 'missing aws accessKeyId')
la(is.unemptyString(aws.secretAccessKey), 'missing aws secretAccessKey')
if (!process.env.CIRCLECI) {
// sso is not required for CirceCI
la(is.unemptyString(aws.sessionToken), 'missing aws sessionToken')
}
return new S3({
accessKeyId: aws.key,
secretAccessKey: aws.secret,
accessKeyId: aws.accessKeyId,
secretAccessKey: aws.secretAccessKey,
sessionToken: aws.sessionToken,
})
},
@@ -40,7 +46,7 @@ export const s3helpers = {
debug('s3 data for %s', zipFile)
debug(data)
resolve()
resolve(null)
})
})
},

View File

@@ -9,16 +9,7 @@ const upload = require('./upload')
const uploadUtils = require('./util/upload')
const { s3helpers } = require('./s3-api')
const uploadTypes = {
binary: {
uploadFolder: 'binary',
uploadFileName: 'cypress.zip',
},
'npm-package': {
uploadFolder: 'npm',
uploadFileName: 'cypress.tgz',
},
}
const uploadTypes = uploadUtils.S3Configuration.betaUploadTypes
const getCDN = function (uploadPath) {
return [uploadUtils.getUploadUrl(), uploadPath].join('/')
@@ -32,16 +23,16 @@ const getUploadDirForPlatform = function (options) {
// the artifact will be uploaded for every platform and uploaded into under a unique folder
// https://cdn.cypress.io/beta/(binary|npm)/<version>/<platform>/<some unique version info>/cypress.zip
// For binary:
// beta/binary/9.4.2/win32-x64/circle-develop-219138ca4e952edc4af831f2ae16ce659ebdb50b/cypress.zip
// beta/binary/9.4.2/win32-x64/develop-219138ca4e952edc4af831f2ae16ce659ebdb50b/cypress.zip
// For NPM package:
// beta/npm/9.4.2/circle-develop-219138ca4e952edc4af831f2ae16ce659ebdb50b/cypress.tgz
// beta/npm/9.4.2/develop-219138ca4e952edc4af831f2ae16ce659ebdb50b/cypress.tgz
const getUploadPath = function (options) {
const { hash, uploadFileName } = options
return [getUploadDirForPlatform(options), hash, uploadFileName].join('/')
}
const setChecksum = (filename, key) => {
const setChecksum = async (filename, key) => {
console.log('setting checksum for file %s', filename)
console.log('on s3 object %s', key)
@@ -56,7 +47,7 @@ const setChecksum = (filename, key) => {
console.log('SHA256 checksum %s', checksum)
console.log('size', size)
const aws = uploadUtils.getS3Credentials()
const aws = await uploadUtils.getS3Credentials()
const s3 = s3helpers.makeS3(aws)
// S3 object metadata can only have string values
const metadata = {
@@ -66,7 +57,7 @@ const setChecksum = (filename, key) => {
// by default s3.copyObject does not preserve ACL when copying
// thus we need to reset it for our public files
return s3helpers.setUserMetadata(aws.bucket, key, metadata,
return s3helpers.setUserMetadata(uploadUtils.S3Configuration.bucket, key, metadata,
'application/zip', 'public-read', s3)
}
@@ -128,7 +119,7 @@ const uploadArtifactToS3 = function (args = []) {
.then(uploadUtils.saveUrl(`${options.type}-url.json`))
.catch((e) => {
console.error('There was an issue uploading the artifact.')
console.error(e)
throw e
})
}

View File

@@ -15,17 +15,13 @@ fs = Promise.promisifyAll(fs)
// TODO: refactor this
// system expects desktop application to be inside a file
// with this name
const zipName = 'cypress.zip'
const zipName = uploadUtils.S3Configuration.binaryZipName
module.exports = {
zipName,
getPublisher () {
return uploadUtils.getPublisher(this.getAwsObj)
},
getAwsObj () {
return uploadUtils.getS3Credentials()
async getPublisher () {
return uploadUtils.getPublisher()
},
// returns desktop folder for a given folder without platform
@@ -43,7 +39,7 @@ module.exports = {
let { folder, version, platformArch, name } = options
if (!folder) {
folder = this.getAwsObj().folder
folder = uploadUtils.S3Configuration.releaseFolder
}
la(check.unemptyString(folder), 'missing folder', options)
@@ -104,34 +100,34 @@ module.exports = {
},
s3Manifest (version) {
const publisher = this.getPublisher()
return this.getPublisher()
.then((publisher) => {
const { releaseFolder } = uploadUtils.S3Configuration
const aws = this.getAwsObj()
const headers = {
'Cache-Control': 'no-cache',
}
let manifest = null
const headers = {}
return new Promise((resolve, reject) => {
return this.createRemoteManifest(releaseFolder, version)
.then((src) => {
manifest = src
headers['Cache-Control'] = 'no-cache'
return gulp.src(src)
.pipe(rename((p) => {
p.dirname = `${releaseFolder}/${p.dirname}`
let manifest = null
return new Promise((resolve, reject) => {
return this.createRemoteManifest(aws.folder, version)
.then((src) => {
manifest = src
return gulp.src(src)
.pipe(rename((p) => {
p.dirname = `${aws.folder}/${p.dirname}`
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
}).finally(() => {
return fs.removeAsync(manifest)
})
}).finally(() => {
return fs.removeAsync(manifest)
})
},
@@ -144,26 +140,27 @@ module.exports = {
la(check.extension(path.extname(uploadPath))(file),
'invalid file to upload extension', file)
return new Promise((resolve, reject) => {
const publisher = this.getPublisher()
return this.getPublisher()
.then((publisher) => {
const headers = {
'Cache-Control': 'no-cache',
}
const headers = {}
return new Promise((resolve, reject) => {
return gulp.src(file)
.pipe(rename((p) => {
// rename to standard filename for upload
p.basename = path.basename(uploadPath, path.extname(uploadPath))
p.dirname = path.dirname(uploadPath)
headers['Cache-Control'] = 'no-cache'
return gulp.src(file)
.pipe(rename((p) => {
// rename to standard filename for upload
p.basename = path.basename(uploadPath, path.extname(uploadPath))
p.dirname = path.dirname(uploadPath)
return p
}))
.pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
return p
}))
.pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
})
},
}

View File

@@ -1,5 +1,4 @@
const _ = require('lodash')
const path = require('path')
const awspublish = require('gulp-awspublish')
const human = require('human-interval')
const la = require('lazy-ass')
@@ -7,7 +6,8 @@ const check = require('check-more-types')
const fse = require('fs-extra')
const os = require('os')
const Promise = require('bluebird')
const { configFromEnvOrJsonFile, filenameToShellVariable } = require('@cypress/env-or-json-file')
const { fromSSO, fromEnv } = require('@aws-sdk/credential-providers')
const konfig = require('../get-config')()
const { purgeCloudflareCache } = require('./purge-cloudflare-cache')
@@ -25,47 +25,50 @@ const formHashFromEnvironment = function () {
} = process
if (env.CIRCLECI) {
return `circle-${env.CIRCLE_BRANCH}-${env.CIRCLE_SHA1}`
return `${env.CIRCLE_BRANCH}-${env.CIRCLE_SHA1}`
}
throw new Error('Do not know how to form unique build hash on this CI')
}
const getS3Credentials = function () {
const key = path.join('scripts', 'support', 'aws-credentials.json')
const config = configFromEnvOrJsonFile(key)
if (!config) {
console.error('⛔️ Cannot find AWS credentials')
console.error('Using @cypress/env-or-json-file module')
console.error('and filename', key)
console.error('which is environment variable', filenameToShellVariable(key))
console.error('available environment variable keys')
console.error(Object.keys(process.env))
throw new Error('AWS config not found')
}
la(check.unemptyString(config.bucket), 'missing AWS config bucket')
la(check.unemptyString(config.folder), 'missing AWS config folder')
la(check.unemptyString(config.key), 'missing AWS key')
la(check.unemptyString(config.secret), 'missing AWS secret key')
return config
const S3Configuration = {
bucket: 'cdn.cypress.io',
releaseFolder: 'desktop',
binaryZipName: 'cypress.zip',
betaUploadTypes: {
binary: {
uploadFolder: 'binary',
uploadFileName: 'cypress.zip',
},
'npm-package': {
uploadFolder: 'npm',
uploadFileName: 'cypress.tgz',
},
},
}
const getPublisher = function (getAwsObj = getS3Credentials) {
const aws = getAwsObj()
const getS3Credentials = async function () {
// sso is not required for CirceCI
if (process.env.CIRCLECI) {
return fromEnv()()
}
return fromSSO({ profile: process.env.AWS_PROFILE || 'production' })()
}
const getPublisher = async function () {
const aws = await getS3Credentials()
// console.log("aws.bucket", aws.bucket)
return awspublish.create({
httpOptions: {
timeout: human('10 minutes'),
},
params: {
Bucket: aws.bucket,
Bucket: S3Configuration.bucket,
},
accessKeyId: aws.key,
secretAccessKey: aws.secret,
accessKeyId: aws.accessKeyId,
secretAccessKey: aws.secretAccessKey,
sessionToken: aws.sessionToken,
})
}
@@ -156,6 +159,7 @@ const saveUrl = (filename) => {
}
module.exports = {
S3Configuration,
getS3Credentials,
getPublisher,
purgeDesktopAppFromCache,