Merge branch 'develop' into chore/merge_develop

This commit is contained in:
Bill Glesias
2023-07-25 12:28:35 -04:00
42 changed files with 928 additions and 672 deletions
+6 -1
View File
@@ -3,7 +3,7 @@
// "afterSign": "./scripts/after-sign-hook.js"
const fs = require('fs')
const path = require('path')
let electron_notarize = require('electron-notarize')
let electron_notarize = require('@electron/notarize')
module.exports = async function (params) {
// Only notarize the app on Mac OS.
@@ -40,12 +40,17 @@ module.exports = async function (params) {
throw new Error('Missing Apple password for notarization: NOTARIZE_APP_PASSWORD')
}
if (!process.env.NOTARIZE_APP_TEAM_ID) {
throw new Error('Missing Apple team id for notarization: NOTARIZE_APP_TEAM_ID')
}
try {
await electron_notarize.notarize({
appBundleId: appId,
appPath,
appleId: process.env.NOTARIZE_APP_APPLE_ID,
appleIdPassword: process.env.NOTARIZE_APP_PASSWORD,
teamId: process.env.NOTARIZE_APP_TEAM_ID,
})
} catch (error) {
console.error('could not notarize application')
+1 -1
View File
@@ -1,4 +1,4 @@
require('@packages/ts/register')
require('../packages/ts/register')
const command = process.argv[2]
+18 -4
View File
@@ -4,10 +4,11 @@ import path from 'path'
import _ from 'lodash'
import del from 'del'
import chalk from 'chalk'
import electron from '@packages/electron'
import electron from '../../packages/electron'
import la from 'lazy-ass'
import { promisify } from 'util'
import glob from 'glob'
import tar from 'tar'
import * as packages from './util/packages'
import * as meta from './meta'
@@ -25,6 +26,8 @@ const globAsync = promisify(glob)
const CY_ROOT_DIR = path.join(__dirname, '..', '..')
const jsonRoot = fs.readJSONSync(path.join(CY_ROOT_DIR, 'package.json'))
const log = function (msg) {
const time = new Date()
const timeStamp = time.toLocaleTimeString()
@@ -37,6 +40,7 @@ interface BuildCypressAppOpts {
version: string
skipSigning?: boolean
keepBuild?: boolean
createTar?: boolean
}
/**
@@ -74,7 +78,7 @@ async function checkMaxPathLength () {
// For debugging the flow without rebuilding each time
export async function buildCypressApp (options: BuildCypressAppOpts) {
const { platform, version, skipSigning = false, keepBuild = false } = options
const { platform, version, keepBuild = false, createTar } = options
log('#checkPlatform')
if (platform !== os.platform()) {
@@ -109,8 +113,6 @@ export async function buildCypressApp (options: BuildCypressAppOpts) {
await packages.copyAllToDist(DIST_DIR)
fs.copySync(path.join(CY_ROOT_DIR, 'patches'), path.join(DIST_DIR, 'patches'))
const jsonRoot = fs.readJSONSync(path.join(CY_ROOT_DIR, 'package.json'))
const packageJsonContents = _.omit(jsonRoot, [
'devDependencies',
'lint-staged',
@@ -200,11 +202,21 @@ require('./packages/server/index.js')
log('#transformSymlinkRequires')
await transformRequires(meta.distDir())
// optionally create a tar of the `cypress-build` directory. This is used in CI.
if (createTar) {
log('#create tar from dist dir')
await tar.c({ file: 'cypress-dist.tgz', gzip: true, cwd: os.tmpdir() }, ['cypress-build'])
}
log(`#testDistVersion ${meta.distDir()}`)
await testDistVersion(meta.distDir(), version)
log('#testStaticAssets')
await testStaticAssets(meta.distDir())
}
export async function packageElectronApp (options: BuildCypressAppOpts) {
const { platform, version, skipSigning = false } = options
log('#removeCyAndBinFolders')
await del([
@@ -229,6 +241,8 @@ require('./packages/server/index.js')
// to learn how to get the right Mac certificate for signing and notarizing
// the built Test Runner application
const electronVersion = electron.getElectronVersion()
const appFolder = meta.distDir()
const outputFolder = meta.buildRootDir()
+163
View File
@@ -0,0 +1,163 @@
const fs = require('fs-extra')
const rp = require('@cypress/request-promise')
const util = require('util')
const exec = require('child_process').exec
const minimist = require('minimist')
const chalk = require('chalk')
const execPromise = util.promisify(exec)
const artifactJobName = 'publish-binary'
const urlPaths = [
'~/cypress/binary-url.json',
'~/cypress/npm-package-url.json',
]
const archivePaths = [
'~/cypress/cypress.zip',
'~/cypress/cypress.tgz',
]
function getRequestOptions (url) {
return {
method: 'GET',
url,
headers: { 'Circle-Token': process.env.CIRCLE_TOKEN },
}
}
function getPipelineId (pipelineInfoFilePath) {
const data = fs.readFileSync(pipelineInfoFilePath)
const parsedPipelineId = JSON.parse(data).id
if (!parsedPipelineId) {
throw new Error(`error retrieving pipeline id from ${pipelineInfoFilePath}`)
}
return parsedPipelineId
}
async function getWorkflows (pipelineId) {
const response = await rp(getRequestOptions(`https://circleci.com/api/v2/pipeline/${pipelineId}/workflow`))
const parsed = JSON.parse(response)
if (parsed.items.length === 0) {
throw new Error(`did not find any workflows in pipeline ${pipelineId}`)
}
if (parsed.items.length > 1) {
console.log(parsed.items)
throw new Error(`expected pipeline ${pipelineId} to only have one workflow, but it had many`)
}
return parsed.items
}
async function getWorkflowJobs (workflowId) {
const response = await rp(getRequestOptions(`https://circleci.com/api/v2/workflow/${workflowId}/job`))
const parsed = JSON.parse(response)
if (parsed.items.length === 0) {
throw new Error(`did not find any jobs in workflow ${workflowId}`)
}
return parsed.items
}
async function getJobArtifacts (jobNumber) {
const response = await rp(getRequestOptions(`https://circleci.com/api/v2/project/github/cypress-io/cypress-publish-binary/${jobNumber}/artifacts`))
const parsed = JSON.parse(response)
if (parsed.items.length === 0) {
throw new Error(`did not find any artifacts for job ${jobNumber}`)
}
return parsed.items
}
async function downloadArtifact (url, path) {
try {
console.log(`Downloading artifact from ${chalk.cyan(url)} \n to path ${chalk.cyan(path)}...`)
await execPromise(`curl -L --url ${url} --header 'Circle-Token: ${process.env.CIRCLE_TOKEN}' --header 'content-type: application/json' -o ${path}`)
} catch (error) {
throw new Error(`failed to fetch artifact from URL ${url}: ${error}`)
}
}
async function run (args) {
const options = minimist(args)
const pipelineInfoFilePath = options.pipelineInfo
if (!pipelineInfoFilePath) {
throw new Error('--pipelineInfo must be provided as a parameter')
}
console.log(`Parsing pipeline info from ${chalk.cyan(pipelineInfoFilePath)}...`)
const pipelineId = module.exports.getPipelineId(pipelineInfoFilePath)
console.log(`Getting workflows from pipeline ${chalk.cyan(pipelineId)}...`)
const workflows = await module.exports.getWorkflows(pipelineId)
const workflow = workflows[0]
if (workflow.status !== 'success') {
console.error(chalk.red(`\nThe ${chalk.cyan(workflow.name)} workflow that we triggered in the ${chalk.cyan('cypress-publish-binary')} project did not succeed.\n
Status: ${chalk.red(workflow.status)} \n
Check the workflow logs to see why it failed
${chalk.cyan.underline(`https://app.circleci.com/pipelines/workflows/${workflow.id}`)}
`))
process.exitCode = 1
return
}
console.log(`Getting jobs from workflow ${chalk.cyan(workflow.name)}...`)
const jobs = await module.exports.getWorkflowJobs(workflow.id)
const job = jobs.find((job) => job.name === artifactJobName)
if (!job) {
throw new Error(`unable to find job in workflow ${workflow.name} named ${artifactJobName}`)
}
const artifacts = await module.exports.getJobArtifacts(job.job_number)
let artifactPaths
if (process.env.SHOULD_PERSIST_ARTIFACTS) {
artifactPaths = [...urlPaths, ...archivePaths]
} else {
// If we didn't persist the artifacts to the registry, then we only want the build artifacts, no URLs.
artifactPaths = [...archivePaths]
}
const filteredArtifacts = artifacts.filter((artifact) => artifactPaths.includes(artifact.path))
await Promise.all(filteredArtifacts.map(({ url, path }) => {
return module.exports.downloadArtifact(url, path)
}))
console.log('Artifacts successfully downloaded ✅')
}
module.exports = {
getPipelineId,
getWorkflows,
getWorkflowJobs,
getJobArtifacts,
downloadArtifact,
run,
}
if (!module.parent) {
run(process.argv)
}
+42
View File
@@ -22,6 +22,7 @@ const upload = require('./upload')
const uploadUtils = require('./util/upload')
const { uploadArtifactToS3 } = require('./upload-build-artifact')
const { moveBinaries } = require('./move-binaries')
const { exec } = require('child_process')
const success = (str) => {
return console.log(chalk.bgGreen(` ${chalk.black(str)} `))
@@ -197,6 +198,22 @@ const deploy = {
})
},
package (options) {
console.log('#package')
if (options == null) {
options = this.parseOptions(process.argv)
}
debug('parsed build options %o', options)
return askMissingOptions(['version', 'platform'])(options)
.then(() => {
console.log('packaging binary: platform %s version %s', options.platform, options.version)
return build.packageElectronApp(options)
})
},
zip (options) {
console.log('#zip')
if (!options) {
@@ -302,6 +319,31 @@ const deploy = {
})
})
},
async checkIfBinaryExistsOnCdn (args = process.argv) {
console.log('#checkIfBinaryExistsOnCdn')
const url = await uploadArtifactToS3([...args, '--dry-run', 'true'])
console.log(`Checking if ${url} exists...`)
const binaryExists = await rp.head(url)
.then(() => true)
.catch(() => false)
if (binaryExists) {
console.log('A binary was already built for this operating system and commit hash. Skipping binary build process...')
exec('circleci-agent step halt', (_, __, stdout) => {
console.log(stdout)
})
return
}
console.log('Binary does not yet exist. Continuing to build binary...')
return binaryExists
},
}
module.exports = _.bindAll(deploy, _.functions(deploy))
@@ -0,0 +1,50 @@
const fs = require('fs-extra')
const os = require('os')
const path = require('path')
const fetch = require('node-fetch')
const { getNextVersionForBinary } = require('../get-next-version')
;(async () => {
const pipelineInfoFilePath = path.join(os.homedir(), 'triggered_pipeline.json')
const { nextVersion } = await getNextVersionForBinary()
function getArtifactUrl (fileName) {
return `https://output.circle-artifacts.com/output/job/${process.env.CIRCLE_WORKFLOW_JOB_ID}/artifacts/${process.env.CIRCLE_NODE_INDEX}/${fileName}`
}
const body = JSON.stringify({
parameters: {
temp_dir: os.tmpdir(),
sha: process.env.CIRCLE_SHA1,
job_name: process.env.CIRCLE_JOB,
binary_artifact_url: getArtifactUrl('cypress-dist.tgz'),
built_source_artifact_url: getArtifactUrl('cypress-built-source.tgz'),
triggered_workflow_id: process.env.CIRCLE_WORKFLOW_ID,
triggered_job_url: process.env.CIRCLE_BUILD_URL,
branch: process.env.CIRCLE_BRANCH,
should_persist_artifacts: Boolean(process.env.SHOULD_PERSIST_ARTIFACTS),
binary_version: nextVersion,
},
})
try {
console.log('Triggering new pipeline in cypress-publish-binary project...')
const response = await fetch('https://circleci.com/api/v2/project/github/cypress-io/cypress-publish-binary/pipeline', { method: 'POST', headers: { 'Circle-Token': process.env.CIRCLE_TOKEN, 'content-type': 'application/json' }, body })
const pipeline = await response.json()
console.log(pipeline)
console.log(`Triggered pipeline: https://app.circleci.com/pipelines/github/cypress-io/cypress-publish-binary/${pipeline.number}`)
try {
console.log(`Saving pipeline info in ${pipelineInfoFilePath} ...`)
await fs.writeFile(path.resolve(pipelineInfoFilePath), JSON.stringify(pipeline))
} catch (error) {
throw new Error(`error writing triggered pipeline info ${error}`)
}
} catch (error) {
throw new Error(`error triggering new pipeline ${error}`)
}
})()
+7 -3
View File
@@ -87,7 +87,7 @@ const validateOptions = (options) => {
}
const uploadArtifactToS3 = function (args = []) {
const supportedOptions = ['type', 'version', 'file', 'hash', 'platform']
const supportedOptions = ['type', 'version', 'file', 'hash', 'platform', 'dry-run']
let options = minimist(args, {
string: supportedOptions,
})
@@ -99,13 +99,17 @@ const uploadArtifactToS3 = function (args = []) {
const uploadPath = getUploadPath(options)
const cdnUrl = getCDN(uploadPath)
if (options['dry-run']) {
return new Promise((resolve) => resolve(cdnUrl))
}
return upload.toS3({ file: options.file, uploadPath })
.then(() => {
return setChecksum(options.file, uploadPath)
})
.then(() => {
const cdnUrl = getCDN(uploadPath)
if (options.type === 'binary') {
console.log('Binary can be downloaded using URL')
console.log(cdnUrl)
+5 -2
View File
@@ -2,6 +2,7 @@ const path = require('path')
const semver = require('semver')
const bumpCb = require('conventional-recommended-bump')
const { promisify } = require('util')
const minimist = require('minimist')
const checkedInBinaryVersion = require('../package.json').version
const { changeCatagories } = require('./semantic-commits/change-categories')
@@ -102,9 +103,11 @@ if (require.main !== module) {
(async () => {
process.chdir(path.join(__dirname, '..'))
const { nextVersion } = await getNextVersionForBinary()
const args = minimist(process.argv.slice(2))
if (process.argv.includes('--npm') && checkedInBinaryVersion !== nextVersion) {
const nextVersion = args.nextVersion || (await getNextVersionForBinary()).nextVersion
if (args.npm && checkedInBinaryVersion !== nextVersion) {
const cmd = `npm --no-git-tag-version version ${nextVersion}`
console.log(`Running '${cmd}'...`)
@@ -0,0 +1,58 @@
const getPublishedArtifactsModule = require('../../binary/get-published-artifacts')
const sinon = require('sinon')
const { expect } = require('chai')
const mockArtifacts = [
{ url: '/', path: '~/cypress/binary-url.json' },
{ url: '/', path: '~/cypress/npm-package-url.json' },
{ url: '/', path: '~/cypress/cypress.zip' },
{ url: '/', path: '~/cypress/cypress.tgz' },
]
describe('get-published-artifacts', () => {
afterEach(() => {
sinon.reset()
})
it('downloads artifacts', async () => {
process.env.SHOULD_PERSIST_ARTIFACTS = true
const getPipelineIdStub = sinon.stub(getPublishedArtifactsModule, 'getPipelineId').returns('abc123')
const getWorkflowsStub = sinon.stub(getPublishedArtifactsModule, 'getWorkflows').returns([{ id: 'my-workflow', name: 'linux-x64', status: 'success' }])
const getWorkflowJobsStub = sinon.stub(getPublishedArtifactsModule, 'getWorkflowJobs').returns([{ name: 'publish-binary', job_number: 2 }])
const getJobArtifactsStub = sinon.stub(getPublishedArtifactsModule, 'getJobArtifacts').returns(mockArtifacts)
const downloadArtifactStub = sinon.stub(getPublishedArtifactsModule, 'downloadArtifact')
await getPublishedArtifactsModule.run(['--pipelineInfo', 'foo'])
expect(getPipelineIdStub).to.have.been.calledWith('foo')
expect(getWorkflowsStub).to.have.been.calledWith('abc123')
expect(getWorkflowJobsStub).to.have.been.calledWith('my-workflow')
expect(getJobArtifactsStub).to.have.been.calledWith(2)
expect(downloadArtifactStub.getCalls()).to.have.length(4)
expect(downloadArtifactStub).to.have.been.calledWith('/', '~/cypress/binary-url.json')
expect(downloadArtifactStub).to.have.been.calledWith('/', '~/cypress/npm-package-url.json')
expect(downloadArtifactStub).to.have.been.calledWith('/', '~/cypress/cypress.zip')
expect(downloadArtifactStub).to.have.been.calledWith('/', '~/cypress/cypress.tgz')
})
it('URLs are not fetched if SHOULD_PERSIST_ARTIFACTS is false', async () => {
process.env.SHOULD_PERSIST_ARTIFACTS = ''
const getPipelineIdStub = sinon.stub(getPublishedArtifactsModule, 'getPipelineId').returns('abc123')
const getWorkflowsStub = sinon.stub(getPublishedArtifactsModule, 'getWorkflows').returns([{ id: 'my-workflow', name: 'linux-x64', status: 'success' }])
const getWorkflowJobsStub = sinon.stub(getPublishedArtifactsModule, 'getWorkflowJobs').returns([{ name: 'publish-binary', job_number: 2 }])
const getJobArtifactsStub = sinon.stub(getPublishedArtifactsModule, 'getJobArtifacts').returns(mockArtifacts)
const downloadArtifactStub = sinon.stub(getPublishedArtifactsModule, 'downloadArtifact')
await getPublishedArtifactsModule.run(['--pipelineInfo', 'foo'])
expect(getPipelineIdStub).to.have.been.calledWith('foo')
expect(getWorkflowsStub).to.have.been.calledWith('abc123')
expect(getWorkflowJobsStub).to.have.been.calledWith('my-workflow')
expect(getJobArtifactsStub).to.have.been.calledWith(2)
expect(downloadArtifactStub.getCalls()).to.have.length(2)
expect(downloadArtifactStub).to.have.been.calledWith('/', '~/cypress/cypress.zip')
expect(downloadArtifactStub).to.have.been.calledWith('/', '~/cypress/cypress.tgz')
})
})
@@ -93,6 +93,29 @@ describe('upload-release-artifact', () => {
expect(() => uploadArtifactToS3(['--type', 'npm-package', '--version', '1.0.0'])).to.throw()
})
it('does not call s3 methods and returns url when --dry-run is passed', async () => {
uploadUtils.formHashFromEnvironment.returns('hash')
uploadUtils.getUploadNameByOsAndArch.returns('darwin-x64')
const binaryArgs = ['--file', 'my.zip', '--type', 'binary', '--version', '1.0.0', '--dry-run', 'true']
const binaryUrl = await uploadArtifactToS3(binaryArgs)
expect(uploadUtils.formHashFromEnvironment).to.have.calledOnce
expect(uploadUtils.getUploadNameByOsAndArch).to.have.calledOnce
expect(upload.toS3).not.to.have.been.called
expect(binaryUrl).to.equal('https://cdn.cypress.io/beta/binary/1.0.0/darwin-x64/hash/cypress.zip')
const packageArgs = ['--file', 'cypress.tgz', '--type', 'npm-package', '--version', '1.0.0', '--dry-run', 'true']
const packageUrl = await uploadArtifactToS3(packageArgs)
expect(uploadUtils.formHashFromEnvironment).to.have.calledTwice
expect(uploadUtils.getUploadNameByOsAndArch).to.have.calledTwice
expect(upload.toS3).not.to.have.been.called
expect(packageUrl).to.equal('https://cdn.cypress.io/beta/npm/1.0.0/darwin-x64/hash/cypress.tgz')
})
it('uploads binary to s3 and saves url to json', () => {
uploadUtils.formHashFromEnvironment.returns('hash')
uploadUtils.getUploadNameByOsAndArch.returns('darwin-x64')