Merge branch 'develop' into chore/merge_develop

This commit is contained in:
Bill Glesias
2023-07-25 12:28:35 -04:00
42 changed files with 928 additions and 672 deletions

View File

@@ -4,10 +4,11 @@ import path from 'path'
import _ from 'lodash'
import del from 'del'
import chalk from 'chalk'
import electron from '@packages/electron'
import electron from '../../packages/electron'
import la from 'lazy-ass'
import { promisify } from 'util'
import glob from 'glob'
import tar from 'tar'
import * as packages from './util/packages'
import * as meta from './meta'
@@ -25,6 +26,8 @@ const globAsync = promisify(glob)
const CY_ROOT_DIR = path.join(__dirname, '..', '..')
const jsonRoot = fs.readJSONSync(path.join(CY_ROOT_DIR, 'package.json'))
const log = function (msg) {
const time = new Date()
const timeStamp = time.toLocaleTimeString()
@@ -37,6 +40,7 @@ interface BuildCypressAppOpts {
version: string
skipSigning?: boolean
keepBuild?: boolean
createTar?: boolean
}
/**
@@ -74,7 +78,7 @@ async function checkMaxPathLength () {
// For debugging the flow without rebuilding each time
export async function buildCypressApp (options: BuildCypressAppOpts) {
const { platform, version, skipSigning = false, keepBuild = false } = options
const { platform, version, keepBuild = false, createTar } = options
log('#checkPlatform')
if (platform !== os.platform()) {
@@ -109,8 +113,6 @@ export async function buildCypressApp (options: BuildCypressAppOpts) {
await packages.copyAllToDist(DIST_DIR)
fs.copySync(path.join(CY_ROOT_DIR, 'patches'), path.join(DIST_DIR, 'patches'))
const jsonRoot = fs.readJSONSync(path.join(CY_ROOT_DIR, 'package.json'))
const packageJsonContents = _.omit(jsonRoot, [
'devDependencies',
'lint-staged',
@@ -200,11 +202,21 @@ require('./packages/server/index.js')
log('#transformSymlinkRequires')
await transformRequires(meta.distDir())
// optionally create a tar of the `cypress-build` directory. This is used in CI.
if (createTar) {
log('#create tar from dist dir')
await tar.c({ file: 'cypress-dist.tgz', gzip: true, cwd: os.tmpdir() }, ['cypress-build'])
}
log(`#testDistVersion ${meta.distDir()}`)
await testDistVersion(meta.distDir(), version)
log('#testStaticAssets')
await testStaticAssets(meta.distDir())
}
export async function packageElectronApp (options: BuildCypressAppOpts) {
const { platform, version, skipSigning = false } = options
log('#removeCyAndBinFolders')
await del([
@@ -229,6 +241,8 @@ require('./packages/server/index.js')
// to learn how to get the right Mac certificate for signing and notarizing
// the built Test Runner application
const electronVersion = electron.getElectronVersion()
const appFolder = meta.distDir()
const outputFolder = meta.buildRootDir()

View File

@@ -0,0 +1,163 @@
const fs = require('fs-extra')
const rp = require('@cypress/request-promise')
const util = require('util')
const exec = require('child_process').exec
const minimist = require('minimist')
const chalk = require('chalk')
const execPromise = util.promisify(exec)
const artifactJobName = 'publish-binary'
const urlPaths = [
'~/cypress/binary-url.json',
'~/cypress/npm-package-url.json',
]
const archivePaths = [
'~/cypress/cypress.zip',
'~/cypress/cypress.tgz',
]
function getRequestOptions (url) {
return {
method: 'GET',
url,
headers: { 'Circle-Token': process.env.CIRCLE_TOKEN },
}
}
function getPipelineId (pipelineInfoFilePath) {
const data = fs.readFileSync(pipelineInfoFilePath)
const parsedPipelineId = JSON.parse(data).id
if (!parsedPipelineId) {
throw new Error(`error retrieving pipeline id from ${pipelineInfoFilePath}`)
}
return parsedPipelineId
}
async function getWorkflows (pipelineId) {
const response = await rp(getRequestOptions(`https://circleci.com/api/v2/pipeline/${pipelineId}/workflow`))
const parsed = JSON.parse(response)
if (parsed.items.length === 0) {
throw new Error(`did not find any workflows in pipeline ${pipelineId}`)
}
if (parsed.items.length > 1) {
console.log(parsed.items)
throw new Error(`expected pipeline ${pipelineId} to only have one workflow, but it had many`)
}
return parsed.items
}
async function getWorkflowJobs (workflowId) {
const response = await rp(getRequestOptions(`https://circleci.com/api/v2/workflow/${workflowId}/job`))
const parsed = JSON.parse(response)
if (parsed.items.length === 0) {
throw new Error(`did not find any jobs in workflow ${workflowId}`)
}
return parsed.items
}
async function getJobArtifacts (jobNumber) {
const response = await rp(getRequestOptions(`https://circleci.com/api/v2/project/github/cypress-io/cypress-publish-binary/${jobNumber}/artifacts`))
const parsed = JSON.parse(response)
if (parsed.items.length === 0) {
throw new Error(`did not find any artifacts for job ${jobNumber}`)
}
return parsed.items
}
async function downloadArtifact (url, path) {
try {
console.log(`Downloading artifact from ${chalk.cyan(url)} \n to path ${chalk.cyan(path)}...`)
await execPromise(`curl -L --url ${url} --header 'Circle-Token: ${process.env.CIRCLE_TOKEN}' --header 'content-type: application/json' -o ${path}`)
} catch (error) {
throw new Error(`failed to fetch artifact from URL ${url}: ${error}`)
}
}
async function run (args) {
const options = minimist(args)
const pipelineInfoFilePath = options.pipelineInfo
if (!pipelineInfoFilePath) {
throw new Error('--pipelineInfo must be provided as a parameter')
}
console.log(`Parsing pipeline info from ${chalk.cyan(pipelineInfoFilePath)}...`)
const pipelineId = module.exports.getPipelineId(pipelineInfoFilePath)
console.log(`Getting workflows from pipeline ${chalk.cyan(pipelineId)}...`)
const workflows = await module.exports.getWorkflows(pipelineId)
const workflow = workflows[0]
if (workflow.status !== 'success') {
console.error(chalk.red(`\nThe ${chalk.cyan(workflow.name)} workflow that we triggered in the ${chalk.cyan('cypress-publish-binary')} project did not succeed.\n
Status: ${chalk.red(workflow.status)} \n
Check the workflow logs to see why it failed
${chalk.cyan.underline(`https://app.circleci.com/pipelines/workflows/${workflow.id}`)}
`))
process.exitCode = 1
return
}
console.log(`Getting jobs from workflow ${chalk.cyan(workflow.name)}...`)
const jobs = await module.exports.getWorkflowJobs(workflow.id)
const job = jobs.find((job) => job.name === artifactJobName)
if (!job) {
throw new Error(`unable to find job in workflow ${workflow.name} named ${artifactJobName}`)
}
const artifacts = await module.exports.getJobArtifacts(job.job_number)
let artifactPaths
if (process.env.SHOULD_PERSIST_ARTIFACTS) {
artifactPaths = [...urlPaths, ...archivePaths]
} else {
// If we didn't persist the artifacts to the registry, then we only want the build artifacts, no URLs.
artifactPaths = [...archivePaths]
}
const filteredArtifacts = artifacts.filter((artifact) => artifactPaths.includes(artifact.path))
await Promise.all(filteredArtifacts.map(({ url, path }) => {
return module.exports.downloadArtifact(url, path)
}))
console.log('Artifacts successfully downloaded ✅')
}
module.exports = {
getPipelineId,
getWorkflows,
getWorkflowJobs,
getJobArtifacts,
downloadArtifact,
run,
}
if (!module.parent) {
run(process.argv)
}

View File

@@ -22,6 +22,7 @@ const upload = require('./upload')
const uploadUtils = require('./util/upload')
const { uploadArtifactToS3 } = require('./upload-build-artifact')
const { moveBinaries } = require('./move-binaries')
const { exec } = require('child_process')
const success = (str) => {
return console.log(chalk.bgGreen(` ${chalk.black(str)} `))
@@ -197,6 +198,22 @@ const deploy = {
})
},
package (options) {
console.log('#package')
if (options == null) {
options = this.parseOptions(process.argv)
}
debug('parsed build options %o', options)
return askMissingOptions(['version', 'platform'])(options)
.then(() => {
console.log('packaging binary: platform %s version %s', options.platform, options.version)
return build.packageElectronApp(options)
})
},
zip (options) {
console.log('#zip')
if (!options) {
@@ -302,6 +319,31 @@ const deploy = {
})
})
},
async checkIfBinaryExistsOnCdn (args = process.argv) {
console.log('#checkIfBinaryExistsOnCdn')
const url = await uploadArtifactToS3([...args, '--dry-run', 'true'])
console.log(`Checking if ${url} exists...`)
const binaryExists = await rp.head(url)
.then(() => true)
.catch(() => false)
if (binaryExists) {
console.log('A binary was already built for this operating system and commit hash. Skipping binary build process...')
exec('circleci-agent step halt', (_, __, stdout) => {
console.log(stdout)
})
return
}
console.log('Binary does not yet exist. Continuing to build binary...')
return binaryExists
},
}
module.exports = _.bindAll(deploy, _.functions(deploy))

View File

@@ -0,0 +1,50 @@
const fs = require('fs-extra')
const os = require('os')
const path = require('path')
const fetch = require('node-fetch')
const { getNextVersionForBinary } = require('../get-next-version')
;(async () => {
const pipelineInfoFilePath = path.join(os.homedir(), 'triggered_pipeline.json')
const { nextVersion } = await getNextVersionForBinary()
function getArtifactUrl (fileName) {
return `https://output.circle-artifacts.com/output/job/${process.env.CIRCLE_WORKFLOW_JOB_ID}/artifacts/${process.env.CIRCLE_NODE_INDEX}/${fileName}`
}
const body = JSON.stringify({
parameters: {
temp_dir: os.tmpdir(),
sha: process.env.CIRCLE_SHA1,
job_name: process.env.CIRCLE_JOB,
binary_artifact_url: getArtifactUrl('cypress-dist.tgz'),
built_source_artifact_url: getArtifactUrl('cypress-built-source.tgz'),
triggered_workflow_id: process.env.CIRCLE_WORKFLOW_ID,
triggered_job_url: process.env.CIRCLE_BUILD_URL,
branch: process.env.CIRCLE_BRANCH,
should_persist_artifacts: Boolean(process.env.SHOULD_PERSIST_ARTIFACTS),
binary_version: nextVersion,
},
})
try {
console.log('Triggering new pipeline in cypress-publish-binary project...')
const response = await fetch('https://circleci.com/api/v2/project/github/cypress-io/cypress-publish-binary/pipeline', { method: 'POST', headers: { 'Circle-Token': process.env.CIRCLE_TOKEN, 'content-type': 'application/json' }, body })
const pipeline = await response.json()
console.log(pipeline)
console.log(`Triggered pipeline: https://app.circleci.com/pipelines/github/cypress-io/cypress-publish-binary/${pipeline.number}`)
try {
console.log(`Saving pipeline info in ${pipelineInfoFilePath} ...`)
await fs.writeFile(path.resolve(pipelineInfoFilePath), JSON.stringify(pipeline))
} catch (error) {
throw new Error(`error writing triggered pipeline info ${error}`)
}
} catch (error) {
throw new Error(`error triggering new pipeline ${error}`)
}
})()

View File

@@ -87,7 +87,7 @@ const validateOptions = (options) => {
}
const uploadArtifactToS3 = function (args = []) {
const supportedOptions = ['type', 'version', 'file', 'hash', 'platform']
const supportedOptions = ['type', 'version', 'file', 'hash', 'platform', 'dry-run']
let options = minimist(args, {
string: supportedOptions,
})
@@ -99,13 +99,17 @@ const uploadArtifactToS3 = function (args = []) {
const uploadPath = getUploadPath(options)
const cdnUrl = getCDN(uploadPath)
if (options['dry-run']) {
return new Promise((resolve) => resolve(cdnUrl))
}
return upload.toS3({ file: options.file, uploadPath })
.then(() => {
return setChecksum(options.file, uploadPath)
})
.then(() => {
const cdnUrl = getCDN(uploadPath)
if (options.type === 'binary') {
console.log('Binary can be downloaded using URL')
console.log(cdnUrl)