chore: decaffeinate server tests, scripts, other files (#7591)

This commit is contained in:
Zach Bloomquist
2020-06-05 15:12:53 -04:00
committed by GitHub
parent c98eab2933
commit 1875027073
376 changed files with 29789 additions and 24985 deletions
-213
View File
@@ -1,213 +0,0 @@
_ = require("lodash")
fs = require("fs-extra")
glob = require("glob")
Promise = require("bluebird")
inquirer = require("inquirer")
la = require("lazy-ass")
check = require("check-more-types")
path = require("path")
glob = Promise.promisify(glob)
prompt = (questions) ->
Promise.resolve(inquirer.prompt(questions))
fs = Promise.promisifyAll(fs)
getZipFile = ->
[{
name: "zipFile"
type: "string"
default: "cypress.zip"
message: "Which zip file should we upload?"
}]
getPlatformQuestion = ->
[{
name: "platform"
type: "list"
message: "Which OS should we deploy?"
choices: [{
name: "Mac"
value: "darwin"
},{
name: "Linux"
value: "linux"
}, {
name: "Windows",
value: "win32"
}]
}]
getQuestions = (version) ->
[{
name: "publish"
type: "list"
message: "Publish a new version? (currently: #{version})"
choices: [{
name: "Yes: set a new version and deploy new version."
value: true
},{
name: "No: just override the current deployed version."
value: false
}]
},{
name: "version"
type: "input"
message: "Bump version to...? (currently: #{version})"
default: ->
a = version.split(".")
v = a[a.length - 1]
v = Number(v) + 1
a.splice(a.length - 1, 1, v)
a.join(".")
when: (answers) ->
answers.publish
}]
getReleases = (releases) ->
[{
name: "release"
type: "list"
message: "Release which version?"
choices: _.map releases, (r) ->
{
name: r
value: r
}
}]
getNextVersion = ({ version } = {}) ->
if not version
version = require(path.join(__dirname, "..", "..", "package.json")).version
message = "Bump next version to...? (currently: #{version})"
defaultVersion = () ->
a = version.split(".")
v = a[a.length - 1]
v = Number(v) + 1
a.splice(a.length - 1, 1, v)
a.join(".")
[{
name: "nextVersion"
type: "input"
message: message
default: defaultVersion
}]
getVersions = (releases) ->
[{
name: "version"
type: "list"
message: "Bump to which version?"
choices: _.map releases, (r) ->
{
name: r
value: r
}
}]
getBumpTasks = ->
[{
name: "task"
type: "list"
message: "Which bump task?"
choices: [{
name: "Bump Cypress Binary Version for all CI providers"
value: "version"
},{
name: "Run All Projects for all CI providers"
value: "run"
}]
}]
getCommitVersion = (version) ->
[{
name: "commit"
type: "list"
message: "Commit this new version to git? (currently: #{version})"
choices: [{
name: "Yes: commit and push this new release version."
value: true
},{
name: "No: do not commit."
value: false
}]
}]
deployNewVersion = ->
fs.readJsonAsync("./package.json")
.then (json) =>
prompt(getQuestions(json.version))
.then (answers) ->
## set the new version if we're publishing!
## update our own local package.json as well
if answers.publish
# @updateLocalPackageJson(answers.version, json).then ->
answers.version
else
json.version
whichZipFile = ->
prompt(getZipFile())
.get("zipFile")
whichVersion = (distDir) ->
## realpath returns the absolute full path
glob("*/package.json", {cwd: distDir, realpath: true})
.map (pkg) ->
fs.readJsonAsync(pkg)
.get("version")
.then (versions) ->
versions = _.uniq(versions)
prompt(getVersions(versions))
.get("version")
whichRelease = (distDir) ->
## realpath returns the absolute full path
glob("*/package.json", {cwd: distDir, realpath: true})
.map (pkg) =>
fs.readJsonAsync(pkg)
.get("version")
.then (versions) =>
versions = _.uniq(versions)
prompt(getReleases(versions))
.get("release")
whichPlatform = ->
prompt(getPlatformQuestion())
.get("platform")
whichBumpTask = ->
prompt(getBumpTasks())
.get("task")
nextVersion = (version) ->
prompt(getNextVersion(version))
.get("nextVersion")
toCommit = ({ version }) ->
prompt(getCommitVersion(version))
.get("commit")
module.exports = {
toCommit
getZipFile
getPlatformQuestion
getQuestions
getReleases
getVersions
getBumpTasks
deployNewVersion
nextVersion
whichZipFile
whichVersion
whichRelease
whichPlatform
whichBumpTask
}
+242
View File
@@ -0,0 +1,242 @@
const _ = require('lodash')
let fs = require('fs-extra')
let glob = require('glob')
const Promise = require('bluebird')
const inquirer = require('inquirer')
const path = require('path')
glob = Promise.promisify(glob)
const prompt = (questions) => {
return Promise.resolve(inquirer.prompt(questions))
}
fs = Promise.promisifyAll(fs)
const getZipFile = () => {
return [{
name: 'zipFile',
type: 'string',
default: 'cypress.zip',
message: 'Which zip file should we upload?',
}]
}
const getPlatformQuestion = () => {
return [{
name: 'platform',
type: 'list',
message: 'Which OS should we deploy?',
choices: [{
name: 'Mac',
value: 'darwin',
}, {
name: 'Linux',
value: 'linux',
}, {
name: 'Windows',
value: 'win32',
}],
}]
}
const getQuestions = (version) => {
return [{
name: 'publish',
type: 'list',
message: `Publish a new version? (currently: ${version})`,
choices: [{
name: 'Yes: set a new version and deploy new version.',
value: true,
}, {
name: 'No: just override the current deployed version.',
value: false,
}],
}, {
name: 'version',
type: 'input',
message: `Bump version to...? (currently: ${version})`,
default () {
const a = version.split('.')
let v = a[a.length - 1]
v = Number(v) + 1
a.splice(a.length - 1, 1, v)
return a.join('.')
},
when (answers) {
return answers.publish
},
}]
}
const getReleases = (releases) => {
return [{
name: 'release',
type: 'list',
message: 'Release which version?',
choices: _.map(releases, (r) => {
return {
name: r,
value: r,
}
}),
}]
}
const getNextVersion = function ({ version } = {}) {
if (!version) {
({
version,
} = require(path.join(__dirname, '..', '..', 'package.json')))
}
const message = `Bump next version to...? (currently: ${version})`
const defaultVersion = function () {
const a = version.split('.')
let v = a[a.length - 1]
v = Number(v) + 1
a.splice(a.length - 1, 1, v)
return a.join('.')
}
return [{
name: 'nextVersion',
type: 'input',
message,
default: defaultVersion,
}]
}
const getVersions = (releases) => {
return [{
name: 'version',
type: 'list',
message: 'Bump to which version?',
choices: _.map(releases, (r) => {
return {
name: r,
value: r,
}
}),
}]
}
const getBumpTasks = () => {
return [{
name: 'task',
type: 'list',
message: 'Which bump task?',
choices: [{
name: 'Bump Cypress Binary Version for all CI providers',
value: 'version',
}, {
name: 'Run All Projects for all CI providers',
value: 'run',
}],
}]
}
const getCommitVersion = (version) => {
return [{
name: 'commit',
type: 'list',
message: `Commit this new version to git? (currently: ${version})`,
choices: [{
name: 'Yes: commit and push this new release version.',
value: true,
}, {
name: 'No: do not commit.',
value: false,
}],
}]
}
const deployNewVersion = () => {
return fs.readJsonAsync('./package.json')
.then((json) => {
return prompt(getQuestions(json.version))
.then((answers) => {
// set the new version if we're publishing!
// update our own local package.json as well
if (answers.publish) {
// @updateLocalPackageJson(answers.version, json).then ->
return answers.version
}
return json.version
})
})
}
const whichZipFile = () => {
return prompt(getZipFile())
.get('zipFile')
}
const whichVersion = (distDir) => {
return glob('*/package.json', { cwd: distDir, realpath: true })
.map((pkg) => {
return fs.readJsonAsync(pkg)
.get('version')
}).then((versions) => {
versions = _.uniq(versions)
return prompt(getVersions(versions))
.get('version')
})
}
const whichRelease = (distDir) => {
return glob('*/package.json', { cwd: distDir, realpath: true })
.map((pkg) => {
return fs.readJsonAsync(pkg)
.get('version')
}).then((versions) => {
versions = _.uniq(versions)
return prompt(getReleases(versions))
.get('release')
})
}
const whichPlatform = () => {
return prompt(getPlatformQuestion())
.get('platform')
}
const whichBumpTask = () => {
return prompt(getBumpTasks())
.get('task')
}
const nextVersion = (version) => {
return prompt(getNextVersion(version))
.get('nextVersion')
}
const toCommit = ({ version }) => {
return prompt(getCommitVersion(version))
.get('commit')
}
module.exports = {
toCommit,
getZipFile,
getPlatformQuestion,
getQuestions,
getReleases,
getVersions,
getBumpTasks,
deployNewVersion,
nextVersion,
whichZipFile,
whichVersion,
whichRelease,
whichPlatform,
whichBumpTask,
}
-404
View File
@@ -1,404 +0,0 @@
_ = require("lodash")
fse = require("fs-extra")
os = require("os")
del = require("del")
path = require("path")
cp = require("child_process")
gulp = require("gulp")
chalk = require("chalk")
Promise = require("bluebird")
gulpDebug = require("gulp-debug")
gulpCoffee = require("gulp-coffee")
pluralize = require("pluralize")
vinylPaths = require("vinyl-paths")
coffee = require("@packages/coffee")
execa = require("execa")
electron = require("@packages/electron")
debug = require("debug")("cypress:binary")
R = require("ramda")
la = require("lazy-ass")
check = require("check-more-types")
humanInterval = require("human-interval")
meta = require("./meta")
smoke = require("./smoke")
packages = require("./util/packages")
xvfb = require("../../cli/lib/exec/xvfb")
{ transformRequires } = require('./util/transform-requires')
{ testStaticAssets } = require('./util/testStaticAssets')
performanceTracking = require('../../packages/server/test/support/helpers/performance.js')
rootPackage = require("@packages/root")
fs = Promise.promisifyAll(fse)
logger = (msg, platform) ->
time = new Date()
timeStamp = time.toLocaleTimeString()
console.log(timeStamp, chalk.yellow(msg), chalk.blue(platform))
logBuiltAllPackages = () ->
console.log("built all packages")
# can pass options to better control the build
# for example
# skipClean - do not delete "dist" folder before build
buildCypressApp = (platform, version, options = {}) ->
la(check.unemptyString(version), "missing version to build", version)
distDir = _.partial(meta.distDir, platform)
buildDir = _.partial(meta.buildDir, platform)
buildAppDir = _.partial(meta.buildAppDir, platform)
log = _.partialRight(logger, platform)
testVersion = (folderNameFn) -> () ->
log("#testVersion")
dir = folderNameFn()
la(check.unemptyString(dir), "missing folder for platform", platform)
console.log("testing dist package version")
console.log("by calling: node index.js --version")
console.log("in the folder %s", dir)
execa("node", ["index.js", "--version"], {
cwd: dir
}).then (result) ->
la(check.unemptyString(result.stdout),
'missing output when getting built version', result)
console.log('app in %s', dir)
console.log('built app version', result.stdout)
la(result.stdout == version, "different version reported",
result.stdout, "from input version to build", version)
console.log('✅ using node --version works')
testBuiltStaticAssets = ->
log('#testBuiltStaticAssets')
testStaticAssets(distDir())
canBuildInDocker = ->
platform is "linux" and os.platform() is "darwin"
badPlatformMismatch = ->
console.error("⛔️ cannot build #{platform} from #{os.platform()}")
console.error("⛔️ should use matching platform to build it")
console.error("program arguments")
console.error(process.argv)
checkPlatform = ->
log("#checkPlatform")
if platform is os.platform()
return
console.log("trying to build #{platform} from #{os.platform()}")
if platform is "linux" and os.platform() is "darwin"
console.log("npm run binary-build-linux")
Promise.reject(new Error("Build platform mismatch"))
cleanupPlatform = ->
log("#cleanupPlatform")
if options.skipClean
log("skipClean")
return
cleanup = ->
dir = distDir()
la(check.unemptyString(dir), "empty dist dir", dir, "for platform", platform)
fs.removeAsync(distDir())
cleanup()
.catch(cleanup)
buildPackages = ->
log("#buildPackages")
packages.runAllBuild()
# Promise.resolve()
.then(R.tap(logBuiltAllPackages))
copyPackages = ->
log("#copyPackages")
packages.copyAllToDist(distDir())
transformSymlinkRequires = ->
log("#transformSymlinkRequires")
transformRequires(distDir())
.then (replaceCount) ->
la(replaceCount > 5, 'expected to replace more than 5 symlink requires, but only replaced', replaceCount)
npmInstallPackages = ->
log("#npmInstallPackages")
pathToPackages = distDir("packages", "*")
packages.npmInstallAll(pathToPackages)
createRootPackage = ->
log("#createRootPackage #{platform} #{version}")
fs.outputJsonAsync(distDir("package.json"), {
name: "cypress"
productName: "Cypress",
description: rootPackage.description
version: version
main: "index.js"
scripts: {}
env: "production"
})
.then =>
str = """
process.env.CYPRESS_INTERNAL_ENV = process.env.CYPRESS_INTERNAL_ENV || 'production'
require('./packages/server')
"""
fs.outputFileAsync(distDir("index.js"), str)
removeTypeScript = ->
## remove the .ts files in our packages
log("#removeTypeScript")
del([
## include ts files of packages
distDir("**", "*.ts")
## except those in node_modules
"!" + distDir("**", "node_modules", "**", "*.ts")
])
.then (paths) ->
console.log(
"deleted %d TS %s",
paths.length,
pluralize("file", paths.length)
)
console.log(paths)
# we also don't need ".bin" links inside Electron application
# thus we can go through dist/packages/*/node_modules and remove all ".bin" folders
removeBinFolders = ->
log("#removeBinFolders")
searchMask = distDir("packages", "*", "node_modules", ".bin")
console.log("searching for", searchMask)
del([searchMask])
.then (paths) ->
console.log(
"deleted %d .bin %s",
paths.length,
pluralize("folder", paths.length)
)
console.log(paths)
removeCyFolders = ->
log("#removeCyFolders")
searchMask = distDir("packages", "server", ".cy")
console.log("searching", searchMask)
del([searchMask])
.then (paths) ->
console.log(
"deleted %d .cy %s",
paths.length,
pluralize("file", paths.length)
)
console.log(paths)
cleanJs = ->
log("#cleanJs")
packages.runAllCleanJs()
convertCoffeeToJs = ->
log("#convertCoffeeToJs")
## grab everything in src
## convert to js
new Promise (resolve, reject) =>
gulp.src([
## include coffee files of packages
distDir("**", "*.coffee")
## except those in node_modules
"!" + distDir("**", "node_modules", "**", "*.coffee")
], { sourcemaps: true })
.pipe vinylPaths(del)
.pipe(gulpDebug())
.pipe gulpCoffee({
coffee: coffee
})
.pipe gulp.dest(distDir())
.on("end", resolve)
.on("error", reject)
getIconFilename = (platform) ->
filenames = {
darwin: "cypress.icns"
win32: "cypress.ico"
linux: "icon_512x512.png"
}
iconFilename = electron.icons().getPathToIcon(filenames[platform])
console.log("For platform #{platform} using icon #{iconFilename}")
iconFilename
electronPackAndSign = ->
log("#electronPackAndSign")
# See the internal wiki document "Signing Test Runner on MacOS"
# to learn how to get the right Mac certificate for signing and notarizing
# the built Test Runner application
appFolder = distDir()
outputFolder = meta.buildRootDir(platform)
electronVersion = electron.getElectronVersion()
la(check.unemptyString(electronVersion), "missing Electron version to pack", electronVersion)
iconFilename = getIconFilename(platform)
console.log("output folder: #{outputFolder}")
args = [
"--publish=never",
"--c.electronVersion=#{electronVersion}",
"--c.directories.app=#{appFolder}",
"--c.directories.output=#{outputFolder}",
"--c.icon=#{iconFilename}",
# for now we cannot pack source files in asar file
# because electron-builder does not copy nested folders
# from packages/*/node_modules
# see https://github.com/electron-userland/electron-builder/issues/3185
# so we will copy those folders later ourselves
"--c.asar=false"
]
opts = {
stdio: "inherit"
}
console.log("electron-builder arguments:")
console.log(args.join(' '))
execa('electron-builder', args, opts)
removeDevElectronApp = ->
log("#removeDevElectronApp")
# when we copy packages/electron, we get the "dist" folder with
# empty Electron app, symlinked to our server folder
# in production build, we do not need this link, and it
# would not work anyway with code signing
# hint: you can see all symlinks in the build folder
# using "find build/darwin/Cypress.app/ -type l -ls"
console.log("platform", platform)
electronDistFolder = distDir("packages", "electron", "dist")
la(check.unemptyString(electronDistFolder),
"empty electron dist folder for platform", platform)
console.log("Removing unnecessary folder '#{electronDistFolder}'")
fs.removeAsync(electronDistFolder) # .catch(_.noop) why are we ignoring an error here?!
lsDistFolder = ->
log('#lsDistFolder')
buildFolder = buildDir()
console.log("in build folder %s", buildFolder)
execa('ls', ['-la', buildFolder])
.then R.prop("stdout")
.then console.log
runSmokeTests = ->
log("#runSmokeTests")
run = ->
# make sure to use a longer timeout - on Mac the first
# launch of a built application invokes gatekeeper check
# which takes a couple of seconds
executablePath = meta.buildAppExecutable(platform)
smoke.test(executablePath)
if xvfb.isNeeded()
xvfb.start()
.then(run)
.finally(xvfb.stop)
else
run()
verifyAppCanOpen = ->
if (platform != "darwin") then return Promise.resolve()
appFolder = meta.zipDir(platform)
log("#verifyAppCanOpen #{appFolder}")
new Promise (resolve, reject) =>
args = ["-a", "-vvvv", appFolder]
debug("cmd: spctl #{args.join(' ')}")
sp = cp.spawn "spctl", args, {stdio: "inherit"}
sp.on "exit", (code) ->
if code is 0
resolve()
else
reject new Error("Verifying App via GateKeeper failed")
printPackageSizes = ->
appFolder = meta.buildAppDir(platform, "packages")
log("#printPackageSizes #{appFolder}")
if (platform == "win32") then return Promise.resolve()
# "du" - disk usage utility
# -d -1 depth of 1
# -h human readable sizes (K and M)
args = ["-d", "1", appFolder]
parseDiskUsage = (result) ->
lines = result.stdout.split(os.EOL)
# will store {package name: package size}
data = {}
lines.forEach (line) ->
parts = line.split('\t')
packageSize = parseFloat(parts[0])
folder = parts[1]
packageName = path.basename(folder)
if packageName is "packages"
return # root "packages" information
data[packageName] = packageSize
return data
printDiskUsage = (sizes) ->
bySize = R.sortBy(R.prop('1'))
console.log(bySize(R.toPairs(sizes)))
execa("du", args)
.then(parseDiskUsage)
.then(R.tap(printDiskUsage))
.then((sizes) ->
performanceTracking.track('test runner size', sizes)
)
Promise.resolve()
.then(checkPlatform)
.then(cleanupPlatform)
.then(buildPackages)
.then(copyPackages)
.then(npmInstallPackages)
.then(createRootPackage)
.then(convertCoffeeToJs)
.then(removeTypeScript)
.then(cleanJs)
.then(transformSymlinkRequires)
.then(testVersion(distDir))
.then(testBuiltStaticAssets)
.then(removeBinFolders)
.then(removeCyFolders)
.then(removeDevElectronApp)
.then(electronPackAndSign)
.then(lsDistFolder)
.then(testVersion(buildAppDir))
.then(runSmokeTests)
.then(verifyAppCanOpen)
.then(printPackageSizes)
.return({
buildDir: buildDir()
})
module.exports = buildCypressApp
+474
View File
@@ -0,0 +1,474 @@
const _ = require('lodash')
const fse = require('fs-extra')
const os = require('os')
const del = require('del')
const path = require('path')
const cp = require('child_process')
const gulp = require('gulp')
const chalk = require('chalk')
const Promise = require('bluebird')
const gulpDebug = require('gulp-debug')
const gulpCoffee = require('gulp-coffee')
const pluralize = require('pluralize')
const vinylPaths = require('vinyl-paths')
const coffee = require('@packages/coffee')
const execa = require('execa')
const electron = require('@packages/electron')
const debug = require('debug')('cypress:binary')
const R = require('ramda')
const la = require('lazy-ass')
const check = require('check-more-types')
const meta = require('./meta')
const smoke = require('./smoke')
const packages = require('./util/packages')
const xvfb = require('../../cli/lib/exec/xvfb')
const { transformRequires } = require('./util/transform-requires')
const { testStaticAssets } = require('./util/testStaticAssets')
const performanceTracking = require('../../packages/server/test/support/helpers/performance.js')
const rootPackage = require('@packages/root')
const fs = Promise.promisifyAll(fse)
const logger = function (msg, platform) {
const time = new Date()
const timeStamp = time.toLocaleTimeString()
return console.log(timeStamp, chalk.yellow(msg), chalk.blue(platform))
}
const logBuiltAllPackages = () => {
return console.log('built all packages')
}
// can pass options to better control the build
// for example
// skipClean - do not delete "dist" folder before build
const buildCypressApp = function (platform, version, options = {}) {
la(check.unemptyString(version), 'missing version to build', version)
const distDir = _.partial(meta.distDir, platform)
const buildDir = _.partial(meta.buildDir, platform)
const buildAppDir = _.partial(meta.buildAppDir, platform)
const log = _.partialRight(logger, platform)
const testVersion = (folderNameFn) => {
return (function () {
log('#testVersion')
const dir = folderNameFn()
la(check.unemptyString(dir), 'missing folder for platform', platform)
console.log('testing dist package version')
console.log('by calling: node index.js --version')
console.log('in the folder %s', dir)
return execa('node', ['index.js', '--version'], {
cwd: dir,
}).then((result) => {
la(check.unemptyString(result.stdout),
'missing output when getting built version', result)
console.log('app in %s', dir)
console.log('built app version', result.stdout)
la(result.stdout === version, 'different version reported',
result.stdout, 'from input version to build', version)
return console.log('✅ using node --version works')
})
})
}
const testBuiltStaticAssets = function () {
log('#testBuiltStaticAssets')
return testStaticAssets(distDir())
}
const checkPlatform = function () {
log('#checkPlatform')
if (platform === os.platform()) {
return
}
console.log(`trying to build ${platform} from ${os.platform()}`)
if ((platform === 'linux') && (os.platform() === 'darwin')) {
console.log('npm run binary-build-linux')
}
return Promise.reject(new Error('Build platform mismatch'))
}
const cleanupPlatform = function () {
log('#cleanupPlatform')
if (options.skipClean) {
log('skipClean')
return
}
const cleanup = function () {
const dir = distDir()
la(check.unemptyString(dir), 'empty dist dir', dir, 'for platform', platform)
return fs.removeAsync(distDir())
}
return cleanup()
.catch(cleanup)
}
const buildPackages = function () {
log('#buildPackages')
return packages.runAllBuild()
// Promise.resolve()
.then(R.tap(logBuiltAllPackages))
}
const copyPackages = function () {
log('#copyPackages')
return packages.copyAllToDist(distDir())
}
const transformSymlinkRequires = function () {
log('#transformSymlinkRequires')
return transformRequires(distDir())
.then((replaceCount) => {
return la(replaceCount > 5, 'expected to replace more than 5 symlink requires, but only replaced', replaceCount)
})
}
const npmInstallPackages = function () {
log('#npmInstallPackages')
const pathToPackages = distDir('packages', '*')
return packages.npmInstallAll(pathToPackages)
}
const createRootPackage = function () {
log(`#createRootPackage ${platform} ${version}`)
return fs.outputJsonAsync(distDir('package.json'), {
name: 'cypress',
productName: 'Cypress',
description: rootPackage.description,
version,
main: 'index.js',
scripts: {},
env: 'production',
})
.then(() => {
const str = `\
process.env.CYPRESS_INTERNAL_ENV = process.env.CYPRESS_INTERNAL_ENV || 'production'
require('./packages/server')\
`
return fs.outputFileAsync(distDir('index.js'), str)
})
}
const removeTypeScript = function () {
// remove the .ts files in our packages
log('#removeTypeScript')
return del([
// include ts files of packages
distDir('**', '*.ts'),
// except those in node_modules
`!${distDir('**', 'node_modules', '**', '*.ts')}`,
])
.then((paths) => {
console.log(
'deleted %d TS %s',
paths.length,
pluralize('file', paths.length),
)
return console.log(paths)
})
}
// we also don't need ".bin" links inside Electron application
// thus we can go through dist/packages/*/node_modules and remove all ".bin" folders
const removeBinFolders = function () {
log('#removeBinFolders')
const searchMask = distDir('packages', '*', 'node_modules', '.bin')
console.log('searching for', searchMask)
return del([searchMask])
.then((paths) => {
console.log(
'deleted %d .bin %s',
paths.length,
pluralize('folder', paths.length),
)
return console.log(paths)
})
}
const removeCyFolders = function () {
log('#removeCyFolders')
const searchMask = distDir('packages', 'server', '.cy')
console.log('searching', searchMask)
return del([searchMask])
.then((paths) => {
console.log(
'deleted %d .cy %s',
paths.length,
pluralize('file', paths.length),
)
return console.log(paths)
})
}
const cleanJs = function () {
log('#cleanJs')
return packages.runAllCleanJs()
}
const convertCoffeeToJs = function () {
log('#convertCoffeeToJs')
// grab everything in src
// convert to js
return new Promise((resolve, reject) => {
return gulp.src([
// include coffee files of packages
distDir('**', '*.coffee'),
// except those in node_modules
`!${distDir('**', 'node_modules', '**', '*.coffee')}`,
], { sourcemaps: true })
.pipe(vinylPaths(del))
.pipe(gulpDebug())
.pipe(gulpCoffee({
coffee,
})).pipe(gulp.dest(distDir()))
.on('end', resolve)
.on('error', reject)
})
}
const getIconFilename = function (platform) {
const filenames = {
darwin: 'cypress.icns',
win32: 'cypress.ico',
linux: 'icon_512x512.png',
}
const iconFilename = electron.icons().getPathToIcon(filenames[platform])
console.log(`For platform ${platform} using icon ${iconFilename}`)
return iconFilename
}
const electronPackAndSign = function () {
log('#electronPackAndSign')
// See the internal wiki document "Signing Test Runner on MacOS"
// to learn how to get the right Mac certificate for signing and notarizing
// the built Test Runner application
const appFolder = distDir()
const outputFolder = meta.buildRootDir(platform)
const electronVersion = electron.getElectronVersion()
la(check.unemptyString(electronVersion), 'missing Electron version to pack', electronVersion)
const iconFilename = getIconFilename(platform)
console.log(`output folder: ${outputFolder}`)
const args = [
'--publish=never',
`--c.electronVersion=${electronVersion}`,
`--c.directories.app=${appFolder}`,
`--c.directories.output=${outputFolder}`,
`--c.icon=${iconFilename}`,
// for now we cannot pack source files in asar file
// because electron-builder does not copy nested folders
// from packages/*/node_modules
// see https://github.com/electron-userland/electron-builder/issues/3185
// so we will copy those folders later ourselves
'--c.asar=false',
]
const opts = {
stdio: 'inherit',
}
console.log('electron-builder arguments:')
console.log(args.join(' '))
return execa('electron-builder', args, opts)
}
const removeDevElectronApp = function () {
log('#removeDevElectronApp')
// when we copy packages/electron, we get the "dist" folder with
// empty Electron app, symlinked to our server folder
// in production build, we do not need this link, and it
// would not work anyway with code signing
// hint: you can see all symlinks in the build folder
// using "find build/darwin/Cypress.app/ -type l -ls"
console.log('platform', platform)
const electronDistFolder = distDir('packages', 'electron', 'dist')
la(check.unemptyString(electronDistFolder),
'empty electron dist folder for platform', platform)
console.log(`Removing unnecessary folder '${electronDistFolder}'`)
return fs.removeAsync(electronDistFolder) // .catch(_.noop) why are we ignoring an error here?!
}
const lsDistFolder = function () {
log('#lsDistFolder')
const buildFolder = buildDir()
console.log('in build folder %s', buildFolder)
return execa('ls', ['-la', buildFolder])
.then(R.prop('stdout'))
.then(console.log)
}
const runSmokeTests = function () {
log('#runSmokeTests')
const run = function () {
// make sure to use a longer timeout - on Mac the first
// launch of a built application invokes gatekeeper check
// which takes a couple of seconds
const executablePath = meta.buildAppExecutable(platform)
return smoke.test(executablePath)
}
if (xvfb.isNeeded()) {
return xvfb.start()
.then(run)
.finally(xvfb.stop)
}
return run()
}
const verifyAppCanOpen = function () {
if (platform !== 'darwin') {
return Promise.resolve()
}
const appFolder = meta.zipDir(platform)
log(`#verifyAppCanOpen ${appFolder}`)
return new Promise((resolve, reject) => {
const args = ['-a', '-vvvv', appFolder]
debug(`cmd: spctl ${args.join(' ')}`)
const sp = cp.spawn('spctl', args, { stdio: 'inherit' })
return sp.on('exit', (code) => {
if (code === 0) {
return resolve()
}
return reject(new Error('Verifying App via GateKeeper failed'))
})
})
}
const printPackageSizes = function () {
const appFolder = meta.buildAppDir(platform, 'packages')
log(`#printPackageSizes ${appFolder}`)
if (platform === 'win32') {
return Promise.resolve()
}
// "du" - disk usage utility
// -d -1 depth of 1
// -h human readable sizes (K and M)
const args = ['-d', '1', appFolder]
const parseDiskUsage = function (result) {
const lines = result.stdout.split(os.EOL)
// will store {package name: package size}
const data = {}
lines.forEach((line) => {
const parts = line.split('\t')
const packageSize = parseFloat(parts[0])
const folder = parts[1]
const packageName = path.basename(folder)
if (packageName === 'packages') {
return // root "packages" information
}
data[packageName] = packageSize
})
return data
}
const printDiskUsage = function (sizes) {
const bySize = R.sortBy(R.prop('1'))
return console.log(bySize(R.toPairs(sizes)))
}
return execa('du', args)
.then(parseDiskUsage)
.then(R.tap(printDiskUsage))
.then((sizes) => {
return performanceTracking.track('test runner size', sizes)
})
}
return Promise.resolve()
.then(checkPlatform)
.then(cleanupPlatform)
.then(buildPackages)
.then(copyPackages)
.then(npmInstallPackages)
.then(createRootPackage)
.then(convertCoffeeToJs)
.then(removeTypeScript)
.then(cleanJs)
.then(transformSymlinkRequires)
.then(testVersion(distDir))
.then(testBuiltStaticAssets)
.then(removeBinFolders)
.then(removeCyFolders)
.then(removeDevElectronApp)
.then(electronPackAndSign)
.then(lsDistFolder)
.then(testVersion(buildAppDir))
.then(runSmokeTests)
.then(verifyAppCanOpen)
.then(printPackageSizes)
.return({
buildDir: buildDir(),
})
}
module.exports = buildCypressApp
-302
View File
@@ -1,302 +0,0 @@
_ = require("lodash")
fs = require("fs-extra")
Promise = require("bluebird")
bumpercar = require("@cypress/bumpercar")
path = require("path")
la = require('lazy-ass')
check = require('check-more-types')
R = require("ramda")
os = require("os")
{configFromEnvOrJsonFile, filenameToShellVariable} = require('@cypress/env-or-json-file')
makeEmptyGithubCommit = require("make-empty-github-commit")
parse = require("parse-github-repo-url")
{setCommitStatus} = require("@cypress/github-commit-status-check")
fs = Promise.promisifyAll(fs)
car = null
# all the projects to trigger / run / change environment variables for
_PROVIDERS = {
appVeyor: {
main: "cypress-io/cypress"
win32: [
"cypress-io/cypress-test-tiny"
"cypress-io/cypress-test-example-repos"
]
}
circle: {
main: "cypress-io/cypress"
linux: [
"cypress-io/cypress-test-tiny"
"cypress-io/cypress-test-module-api"
"cypress-io/cypress-test-node-versions"
"cypress-io/cypress-test-nested-projects"
"cypress-io/cypress-test-ci-environments"
"cypress-io/cypress-test-example-repos"
]
darwin: [
"cypress-io/cypress-test-tiny"
"cypress-io/cypress-test-example-repos"
]
}
}
remapProjects = (projectsByProvider) ->
list = []
_.mapValues projectsByProvider, (provider, name) ->
remapPlatform = (platform, repos) ->
repos.forEach (repo) ->
list.push({
repo
provider: name
platform
})
if provider.win32 then remapPlatform("win32", provider.win32)
if provider.linux then remapPlatform("linux", provider.linux)
if provider.darwin then remapPlatform("darwin", provider.darwin)
list
remapMain = (projectsByProvider) ->
list = []
_.mapValues projectsByProvider, (provider, name) ->
list.push({
repo: provider.main
provider: name
})
list
# make flat list of objects
# {repo, provider, platform}
PROJECTS = remapProjects(_PROVIDERS)
getCiConfig = ->
key = path.join("scripts", "support", "ci.json")
config = configFromEnvOrJsonFile(key)
if !config
console.error('⛔️ Cannot find CI credentials')
console.error('Using @cypress/env-or-json-file module')
console.error('and filename', key)
console.error('which is environment variable', filenameToShellVariable(key))
throw new Error('CI config not found')
config
awaitEachProjectAndProvider = (projects, fn, filter = R.identity) ->
creds = getCiConfig()
## configure a new Bumpercar
providers = {}
if check.unemptyString(creds.githubToken)
providers.travis = {
githubToken: creds.githubToken
}
if check.unemptyString(creds.circleToken)
providers.circle = {
circleToken: creds.circleToken
}
if check.unemptyString(creds.appVeyorToken)
providers.appVeyor = {
appVeyorToken: creds.appVeyorToken
}
if check.unemptyString(creds.buildkiteToken)
providers.buildkite = {
buildkiteToken: creds.buildkiteToken
}
providerNames = Object.keys(providers)
console.log("configured providers", providerNames)
la(check.not.empty(providerNames), "empty list of providers")
car = bumpercar.create({providers})
filteredProjects = R.filter(filter, projects)
if check.empty(filteredProjects)
console.log("⚠️ zero filtered projects left after filtering")
console.log("filtered projects:")
console.table(filteredProjects)
Promise.mapSeries filteredProjects, (project) ->
fn(project.repo, project.provider, creds)
# do not trigger all projects if there is specific provider
# for example appVeyor should be used for Windows testing
getFilterByProvider = (providerName, platformName) ->
if providerName
console.log("only allow projects for provider", providerName)
providerFilter = R.propEq("provider", providerName)
else
providerFilter = R.identity
if platformName
console.log("only allow projects for platform", platformName)
platformFilter = R.propEq("platform", platformName)
else
platformFilter = R.identity
# combined filter is when both filters pass
projectFilter = R.allPass([providerFilter, platformFilter])
projectFilter
module.exports = {
_PROVIDERS,
remapProjects,
getFilterByProvider,
nextVersion: (version) ->
MAIN_PROJECTS = remapMain(_PROVIDERS)
console.log("Setting next version to build", version)
console.log("In these projects:")
console.table(MAIN_PROJECTS)
la(check.unemptyString(version),
"missing next version to set", version)
setNextDevVersion = (project, provider) ->
console.log("setting env var NEXT_DEV_VERSION to %s on %s in project %s",
version, provider, project)
car.updateProjectEnv(project, provider, {
NEXT_DEV_VERSION: version,
})
awaitEachProjectAndProvider(MAIN_PROJECTS, setNextDevVersion)
# in each project, set a couple of environment variables
version: (nameOrUrl, binaryVersionOrUrl, platform, providerName) ->
console.log("All possible projects:")
console.table(PROJECTS)
la(check.unemptyString(nameOrUrl),
"missing cypress name or url to set", nameOrUrl)
if check.semver(nameOrUrl)
console.log("for version", nameOrUrl)
nameOrUrl = "cypress@#{nameOrUrl}"
console.log("full NPM install name is", nameOrUrl)
la(check.unemptyString(binaryVersionOrUrl),
"missing binary version or url", binaryVersionOrUrl)
result = {
versionName: nameOrUrl,
binary: binaryVersionOrUrl
}
projectFilter = getFilterByProvider(providerName)
updateProject = (project, provider) ->
console.log("setting environment variables in", project)
car.updateProjectEnv(project, provider, {
CYPRESS_NPM_PACKAGE_NAME: nameOrUrl,
CYPRESS_INSTALL_BINARY: binaryVersionOrUrl
})
awaitEachProjectAndProvider(PROJECTS, updateProject, projectFilter)
.then R.always(result)
# triggers test projects on multiple CIs
# the test projects will exercise the new version of
# the Cypress test runner we just built
runTestProjects: (getStatusAndMessage, providerName, version, platform) ->
projectFilter = getFilterByProvider(providerName, platform)
makeCommit = (project, provider, creds) ->
## make empty commit to trigger CIs
## project is owner/repo string like cypress-io/cypress-test-tiny
console.log("making commit to project", project)
# print if we have a few github variables present
console.log("do we have GH_APP_ID?", Boolean(process.env.GH_APP_ID))
console.log("do we have GH_INSTALLATION_ID?", Boolean(process.env.GH_INSTALLATION_ID))
console.log("do we have GH_PRIVATE_KEY?", Boolean(process.env.GH_PRIVATE_KEY))
parsedRepo = parse(project)
owner = parsedRepo[0]
repo = parsedRepo[1]
{ status, message } = getStatusAndMessage(repo)
if not message
message =
"""
Testing new Cypress version #{version}
"""
if process.env.CIRCLE_BUILD_URL
message += "\n"
message += "Circle CI build url #{process.env.CIRCLE_BUILD_URL}"
if process.env.APPVEYOR
slug = process.env.APPVEYOR_PROJECT_SLUG
build = process.env.APPVEYOR_BUILD_ID
message += "\n"
message += "AppVeyor CI #{slug} #{build}"
defaultOptions = {
owner,
repo,
message,
token: creds.githubToken,
}
createGithubCommitStatusCheck = ({ sha }) ->
return if not status
# status is {owner, repo, sha} and maybe a few other properties
isStatus = check.schema({
owner: check.unemptyString,
repo: check.unemptyString,
sha: check.commitId,
context: check.unemptyString,
platform: check.unemptyString,
arch: check.unemptyString
})
if not isStatus(status)
console.error("Invalid status object %o", status)
targetUrl = "https://github.com/#{owner}/#{repo}/commit/#{sha}"
commitStatusOptions = {
targetUrl,
owner: status.owner,
repo: status.repo,
sha: status.sha,
context: status.context,
state: 'pending',
description: "#{owner}/#{repo}",
}
console.log(
'creating commit status check',
commitStatusOptions.description,
commitStatusOptions.context
)
setCommitStatus(commitStatusOptions)
if not version
return makeEmptyGithubCommit(defaultOptions).then(createGithubCommitStatusCheck)
# first try to commit to branch for next upcoming version
specificBranchOptions = {
owner: owner,
repo: repo,
token: creds.githubToken,
message,
branch: version
}
makeEmptyGithubCommit(specificBranchOptions)
.catch () ->
# maybe there is no branch for next version
# try default branch
makeEmptyGithubCommit(defaultOptions)
.then(createGithubCommitStatusCheck)
awaitEachProjectAndProvider(PROJECTS, makeCommit, projectFilter)
}
+358
View File
@@ -0,0 +1,358 @@
const _ = require('lodash')
const Promise = require('bluebird')
const bumpercar = require('@cypress/bumpercar')
const path = require('path')
const la = require('lazy-ass')
const check = require('check-more-types')
const R = require('ramda')
const { configFromEnvOrJsonFile, filenameToShellVariable } = require('@cypress/env-or-json-file')
const makeEmptyGithubCommit = require('make-empty-github-commit')
const parse = require('parse-github-repo-url')
const { setCommitStatus } = require('@cypress/github-commit-status-check')
let car = null
// all the projects to trigger / run / change environment variables for
const _PROVIDERS = {
appVeyor: {
main: 'cypress-io/cypress',
win32: [
'cypress-io/cypress-test-tiny',
'cypress-io/cypress-test-example-repos',
],
},
circle: {
main: 'cypress-io/cypress',
linux: [
'cypress-io/cypress-test-tiny',
'cypress-io/cypress-test-module-api',
'cypress-io/cypress-test-node-versions',
'cypress-io/cypress-test-nested-projects',
'cypress-io/cypress-test-ci-environments',
'cypress-io/cypress-test-example-repos',
],
darwin: [
'cypress-io/cypress-test-tiny',
'cypress-io/cypress-test-example-repos',
],
},
}
const remapProjects = function (projectsByProvider) {
const list = []
_.mapValues(projectsByProvider, (provider, name) => {
const remapPlatform = (platform, repos) => {
return repos.forEach((repo) => {
return list.push({
repo,
provider: name,
platform,
})
})
}
if (provider.win32) {
remapPlatform('win32', provider.win32)
}
if (provider.linux) {
remapPlatform('linux', provider.linux)
}
if (provider.darwin) {
return remapPlatform('darwin', provider.darwin)
}
})
return list
}
const remapMain = function (projectsByProvider) {
const list = []
_.mapValues(projectsByProvider, (provider, name) => {
return list.push({
repo: provider.main,
provider: name,
})
})
return list
}
// make flat list of objects
// {repo, provider, platform}
const PROJECTS = remapProjects(_PROVIDERS)
const getCiConfig = function () {
const key = path.join('scripts', 'support', 'ci.json')
const config = configFromEnvOrJsonFile(key)
if (!config) {
console.error('⛔️ Cannot find CI credentials')
console.error('Using @cypress/env-or-json-file module')
console.error('and filename', key)
console.error('which is environment variable', filenameToShellVariable(key))
throw new Error('CI config not found')
}
return config
}
const awaitEachProjectAndProvider = function (projects, fn, filter = R.identity) {
const creds = getCiConfig()
// configure a new Bumpercar
const providers = {}
if (check.unemptyString(creds.githubToken)) {
providers.travis = {
githubToken: creds.githubToken,
}
}
if (check.unemptyString(creds.circleToken)) {
providers.circle = {
circleToken: creds.circleToken,
}
}
if (check.unemptyString(creds.appVeyorToken)) {
providers.appVeyor = {
appVeyorToken: creds.appVeyorToken,
}
}
if (check.unemptyString(creds.buildkiteToken)) {
providers.buildkite = {
buildkiteToken: creds.buildkiteToken,
}
}
const providerNames = Object.keys(providers)
console.log('configured providers', providerNames)
la(check.not.empty(providerNames), 'empty list of providers')
car = bumpercar.create({ providers })
const filteredProjects = R.filter(filter, projects)
if (check.empty(filteredProjects)) {
console.log('⚠️ zero filtered projects left after filtering')
}
console.log('filtered projects:')
console.table(filteredProjects)
return Promise.mapSeries(filteredProjects, (project) => {
return fn(project.repo, project.provider, creds)
})
}
// do not trigger all projects if there is specific provider
// for example appVeyor should be used for Windows testing
const getFilterByProvider = function (providerName, platformName) {
let platformFilter; let providerFilter
if (providerName) {
console.log('only allow projects for provider', providerName)
providerFilter = R.propEq('provider', providerName)
} else {
providerFilter = R.identity
}
if (platformName) {
console.log('only allow projects for platform', platformName)
platformFilter = R.propEq('platform', platformName)
} else {
platformFilter = R.identity
}
// combined filter is when both filters pass
const projectFilter = R.allPass([providerFilter, platformFilter])
return projectFilter
}
module.exports = {
_PROVIDERS,
remapProjects,
getFilterByProvider,
nextVersion (version) {
const MAIN_PROJECTS = remapMain(_PROVIDERS)
console.log('Setting next version to build', version)
console.log('In these projects:')
console.table(MAIN_PROJECTS)
la(check.unemptyString(version),
'missing next version to set', version)
const setNextDevVersion = function (project, provider) {
console.log('setting env var NEXT_DEV_VERSION to %s on %s in project %s',
version, provider, project)
return car.updateProjectEnv(project, provider, {
NEXT_DEV_VERSION: version,
})
}
return awaitEachProjectAndProvider(MAIN_PROJECTS, setNextDevVersion)
},
// in each project, set a couple of environment variables
version (nameOrUrl, binaryVersionOrUrl, platform, providerName) {
console.log('All possible projects:')
console.table(PROJECTS)
la(check.unemptyString(nameOrUrl),
'missing cypress name or url to set', nameOrUrl)
if (check.semver(nameOrUrl)) {
console.log('for version', nameOrUrl)
nameOrUrl = `cypress@${nameOrUrl}`
console.log('full NPM install name is', nameOrUrl)
}
la(check.unemptyString(binaryVersionOrUrl),
'missing binary version or url', binaryVersionOrUrl)
const result = {
versionName: nameOrUrl,
binary: binaryVersionOrUrl,
}
const projectFilter = getFilterByProvider(providerName)
const updateProject = function (project, provider) {
console.log('setting environment variables in', project)
return car.updateProjectEnv(project, provider, {
CYPRESS_NPM_PACKAGE_NAME: nameOrUrl,
CYPRESS_INSTALL_BINARY: binaryVersionOrUrl,
})
}
return awaitEachProjectAndProvider(PROJECTS, updateProject, projectFilter)
.then(R.always(result))
},
// triggers test projects on multiple CIs
// the test projects will exercise the new version of
// the Cypress test runner we just built
runTestProjects (getStatusAndMessage, providerName, version, platform) {
const projectFilter = getFilterByProvider(providerName, platform)
const makeCommit = function (project, provider, creds) {
// make empty commit to trigger CIs
// project is owner/repo string like cypress-io/cypress-test-tiny
console.log('making commit to project', project)
// print if we have a few github variables present
console.log('do we have GH_APP_ID?', Boolean(process.env.GH_APP_ID))
console.log('do we have GH_INSTALLATION_ID?', Boolean(process.env.GH_INSTALLATION_ID))
console.log('do we have GH_PRIVATE_KEY?', Boolean(process.env.GH_PRIVATE_KEY))
const parsedRepo = parse(project)
const owner = parsedRepo[0]
const repo = parsedRepo[1]
let { status, message } = getStatusAndMessage(repo)
if (!message) {
message =
`\
Testing new Cypress version ${version}
\
`
if (process.env.CIRCLE_BUILD_URL) {
message += '\n'
message += `Circle CI build url ${process.env.CIRCLE_BUILD_URL}`
}
if (process.env.APPVEYOR) {
const slug = process.env.APPVEYOR_PROJECT_SLUG
const build = process.env.APPVEYOR_BUILD_ID
message += '\n'
message += `AppVeyor CI ${slug} ${build}`
}
}
const defaultOptions = {
owner,
repo,
message,
token: creds.githubToken,
}
const createGithubCommitStatusCheck = function ({ sha }) {
if (!status) {
return
}
// status is {owner, repo, sha} and maybe a few other properties
const isStatus = check.schema({
owner: check.unemptyString,
repo: check.unemptyString,
sha: check.commitId,
context: check.unemptyString,
platform: check.unemptyString,
arch: check.unemptyString,
})
if (!isStatus(status)) {
console.error('Invalid status object %o', status)
}
const targetUrl = `https://github.com/${owner}/${repo}/commit/${sha}`
const commitStatusOptions = {
targetUrl,
owner: status.owner,
repo: status.repo,
sha: status.sha,
context: status.context,
state: 'pending',
description: `${owner}/${repo}`,
}
console.log(
'creating commit status check',
commitStatusOptions.description,
commitStatusOptions.context,
)
return setCommitStatus(commitStatusOptions)
}
if (!version) {
return makeEmptyGithubCommit(defaultOptions).then(createGithubCommitStatusCheck)
}
// first try to commit to branch for next upcoming version
const specificBranchOptions = {
owner,
repo,
token: creds.githubToken,
message,
branch: version,
}
return makeEmptyGithubCommit(specificBranchOptions)
.catch(() => {
// maybe there is no branch for next version
// try default branch
return makeEmptyGithubCommit(defaultOptions)
}).then(createGithubCommitStatusCheck)
}
return awaitEachProjectAndProvider(PROJECTS, makeCommit, projectFilter)
},
}
-225
View File
@@ -1,225 +0,0 @@
## store the cwd
cwd = process.cwd()
path = require("path")
_ = require("lodash")
os = require("os")
gift = require("gift")
chalk = require("chalk")
Promise = require("bluebird")
minimist = require("minimist")
la = require("lazy-ass")
check = require("check-more-types")
debug = require("debug")("cypress:binary")
questionsRemain = require("@cypress/questions-remain")
R = require("ramda")
zip = require("./zip")
ask = require("./ask")
bump = require("./bump")
meta = require("./meta")
build = require("./build")
upload = require("./upload")
uploadUtils = require("./util/upload")
{uploadNpmPackage} = require("./upload-npm-package")
{uploadUniqueBinary} = require("./upload-unique-binary")
{moveBinaries} = require('./move-binaries')
## initialize on existing repo
repo = Promise.promisifyAll(gift(cwd))
success = (str) ->
console.log chalk.bgGreen(" " + chalk.black(str) + " ")
fail = (str) ->
console.log chalk.bgRed(" " + chalk.black(str) + " ")
zippedFilename = R.always(upload.zipName)
# goes through the list of properties and asks relevant question
# resolves with all relevant options set
# if the property already exists, skips the question
askMissingOptions = (properties = []) ->
questions = {
platform: ask.whichPlatform,
version: ask.deployNewVersion,
# note: zip file might not be absolute
zip: ask.whichZipFile
nextVersion: ask.nextVersion
commit: ask.toCommit
}
pickedQuestions = _.pick(questions, properties)
questionsRemain(pickedQuestions)
## hack for @packages/server modifying cwd
process.chdir(cwd)
commitVersion = (version) ->
msg = "release #{version} [skip ci]"
repo.commitAsync(msg, {
'allow-empty': true,
})
deploy = {
meta: meta
parseOptions: (argv) ->
opts = minimist(argv, {
boolean: ["skip-clean"]
default: {
"skip-clean": false
}
alias: {
skipClean: "skip-clean",
zip: ["zipFile", "zip-file", "filename"]
}
})
opts.runTests = false if opts["skip-tests"]
if not opts.platform and os.platform() == meta.platforms.linux
# only can build Linux on Linux
opts.platform = meta.platforms.linux
# windows aliases
if opts.platform == "win32" or opts.platform == "win" or opts.platform == "windows"
opts.platform = meta.platforms.windows
if not opts.platform and os.platform() == meta.platforms.windows
# only can build Windows binary on Windows platform
opts.platform = meta.platforms.windows
# be a little bit user-friendly and allow aliased values
if opts.platform == "mac"
opts.platform = meta.platforms.darwin
debug("parsed command line options")
debug(opts)
opts
bump: ->
ask.whichBumpTask()
.then (task) ->
switch task
when "run"
bump.runTestProjects()
when "version"
ask.whichVersion(meta.distDir(""))
.then (v) ->
bump.version(v)
## sets environment variable on each CI provider
## to NEXT version to build
setNextVersion: ->
options = @parseOptions(process.argv)
askMissingOptions(['nextVersion'])(options)
.then ({nextVersion}) ->
bump.nextVersion(nextVersion)
release: ->
## read off the argv
options = @parseOptions(process.argv)
release = ({ version, commit, nextVersion }) =>
upload.s3Manifest(version)
.then ->
if commit
commitVersion(version)
.then ->
bump.nextVersion(nextVersion)
.then ->
success("Release Complete")
.catch (err) ->
fail("Release Failed")
throw err
askMissingOptions(['version', 'nextVersion'])(options)
.then(release)
build: (options) ->
console.log('#build')
options ?= @parseOptions(process.argv)
debug("parsed build options %o", options)
askMissingOptions(['version', 'platform'])(options)
.then ->
debug("building binary: platform %s version %s", options.platform, options.version)
build(options.platform, options.version, options)
zip: (options) ->
console.log('#zip')
if !options then options = @parseOptions(process.argv)
askMissingOptions(['platform'])(options)
.then (options) ->
zipDir = meta.zipDir(options.platform)
console.log("directory to zip %s", zipDir)
options.zip = path.resolve(zippedFilename(options.platform))
zip.ditto(zipDir, options.zip)
# upload Cypress NPM package file
"upload-npm-package": (args = process.argv) ->
console.log('#packageUpload')
uploadNpmPackage(args)
# upload Cypress binary zip file under unique hash
"upload-unique-binary": (args = process.argv) ->
console.log('#uniqueBinaryUpload')
uploadUniqueBinary(args)
# uploads a single built Cypress binary ZIP file
# usually a binary is built on CI and is uploaded
upload: (options) ->
console.log('#upload')
if not options
options = @parseOptions(process.argv)
askMissingOptions(['version', 'platform', 'zip'])(options)
.then (options) ->
la(check.unemptyString(options.zip),
"missing zipped filename", options)
options.zip = path.resolve(options.zip)
options
.then (options) ->
console.log("Need to upload file %s", options.zip)
console.log("for platform %s version %s",
options.platform, options.version)
upload.toS3({
zipFile: options.zip,
version: options.version,
platform: options.platform,
})
"move-binaries": (args = process.argv) ->
console.log('#moveBinaries')
moveBinaries(args)
# purge all platforms of a desktop app for specific version
"purge-version": (args = process.argv) ->
console.log('#purge-version')
options = minimist(args, {
string: 'version',
alias: {
version: 'v'
}
})
la(check.unemptyString(options.version), "missing app version to purge", options)
uploadUtils.purgeDesktopAppAllPlatforms(options.version, upload.zipName)
# goes through the entire pipeline:
# - build
# - zip
# - upload
deploy: ->
options = @parseOptions(process.argv)
askMissingOptions(['version', 'platform'])(options)
.then (options) =>
@build(options)
.then => @zip(options)
# assumes options.zip contains the zipped filename
.then => @upload(options)
}
module.exports = _.bindAll(deploy, _.functions(deploy))
+285
View File
@@ -0,0 +1,285 @@
// store the cwd
const cwd = process.cwd()
const path = require('path')
const _ = require('lodash')
const os = require('os')
const gift = require('gift')
const chalk = require('chalk')
const Promise = require('bluebird')
const minimist = require('minimist')
const la = require('lazy-ass')
const check = require('check-more-types')
const debug = require('debug')('cypress:binary')
const questionsRemain = require('@cypress/questions-remain')
const R = require('ramda')
const zip = require('./zip')
const ask = require('./ask')
const bump = require('./bump')
const meta = require('./meta')
const build = require('./build')
const upload = require('./upload')
const uploadUtils = require('./util/upload')
const { uploadNpmPackage } = require('./upload-npm-package')
const { uploadUniqueBinary } = require('./upload-unique-binary')
const { moveBinaries } = require('./move-binaries')
// initialize on existing repo
const repo = Promise.promisifyAll(gift(cwd))
const success = (str) => {
return console.log(chalk.bgGreen(` ${chalk.black(str)} `))
}
const fail = (str) => {
return console.log(chalk.bgRed(` ${chalk.black(str)} `))
}
const zippedFilename = R.always(upload.zipName)
// goes through the list of properties and asks relevant question
// resolves with all relevant options set
// if the property already exists, skips the question
const askMissingOptions = function (properties = []) {
const questions = {
platform: ask.whichPlatform,
version: ask.deployNewVersion,
// note: zip file might not be absolute
zip: ask.whichZipFile,
nextVersion: ask.nextVersion,
commit: ask.toCommit,
}
const pickedQuestions = _.pick(questions, properties)
return questionsRemain(pickedQuestions)
}
// hack for @packages/server modifying cwd
process.chdir(cwd)
const commitVersion = function (version) {
const msg = `release ${version} [skip ci]`
return repo.commitAsync(msg, {
'allow-empty': true,
})
}
const deploy = {
meta,
parseOptions (argv) {
const opts = minimist(argv, {
boolean: ['skip-clean'],
default: {
'skip-clean': false,
},
alias: {
skipClean: 'skip-clean',
zip: ['zipFile', 'zip-file', 'filename'],
},
})
if (opts['skip-tests']) {
opts.runTests = false
}
if (!opts.platform && (os.platform() === meta.platforms.linux)) {
// only can build Linux on Linux
opts.platform = meta.platforms.linux
}
// windows aliases
if ((opts.platform === 'win32') || (opts.platform === 'win') || (opts.platform === 'windows')) {
opts.platform = meta.platforms.windows
}
if (!opts.platform && (os.platform() === meta.platforms.windows)) {
// only can build Windows binary on Windows platform
opts.platform = meta.platforms.windows
}
// be a little bit user-friendly and allow aliased values
if (opts.platform === 'mac') {
opts.platform = meta.platforms.darwin
}
debug('parsed command line options')
debug(opts)
return opts
},
bump () {
return ask.whichBumpTask()
.then((task) => {
switch (task) {
case 'run':
return bump.runTestProjects()
case 'version':
return ask.whichVersion(meta.distDir(''))
.then((v) => {
return bump.version(v)
})
default:
throw new Error('unknown task')
}
})
},
// sets environment variable on each CI provider
// to NEXT version to build
setNextVersion () {
const options = this.parseOptions(process.argv)
return askMissingOptions(['nextVersion'])(options)
.then(({ nextVersion }) => {
return bump.nextVersion(nextVersion)
})
},
release () {
// read off the argv
const options = this.parseOptions(process.argv)
const release = ({ version, commit, nextVersion }) => {
return upload.s3Manifest(version)
.then(() => {
if (commit) {
return commitVersion(version)
}
}).then(() => {
return bump.nextVersion(nextVersion)
}).then(() => {
return success('Release Complete')
}).catch((err) => {
fail('Release Failed')
throw err
})
}
return askMissingOptions(['version', 'nextVersion'])(options)
.then(release)
},
build (options) {
console.log('#build')
if (options == null) {
options = this.parseOptions(process.argv)
}
debug('parsed build options %o', options)
return askMissingOptions(['version', 'platform'])(options)
.then(() => {
debug('building binary: platform %s version %s', options.platform, options.version)
return build(options.platform, options.version, options)
})
},
zip (options) {
console.log('#zip')
if (!options) {
options = this.parseOptions(process.argv)
}
return askMissingOptions(['platform'])(options)
.then((options) => {
const zipDir = meta.zipDir(options.platform)
console.log('directory to zip %s', zipDir)
options.zip = path.resolve(zippedFilename(options.platform))
return zip.ditto(zipDir, options.zip)
})
},
// upload Cypress NPM package file
'upload-npm-package' (args = process.argv) {
console.log('#packageUpload')
return uploadNpmPackage(args)
},
// upload Cypress binary zip file under unique hash
'upload-unique-binary' (args = process.argv) {
console.log('#uniqueBinaryUpload')
return uploadUniqueBinary(args)
},
// uploads a single built Cypress binary ZIP file
// usually a binary is built on CI and is uploaded
upload (options) {
console.log('#upload')
if (!options) {
options = this.parseOptions(process.argv)
}
return askMissingOptions(['version', 'platform', 'zip'])(options)
.then((options) => {
la(check.unemptyString(options.zip),
'missing zipped filename', options)
options.zip = path.resolve(options.zip)
return options
}).then((options) => {
console.log('Need to upload file %s', options.zip)
console.log('for platform %s version %s',
options.platform, options.version)
return upload.toS3({
zipFile: options.zip,
version: options.version,
platform: options.platform,
})
})
},
'move-binaries' (args = process.argv) {
console.log('#moveBinaries')
return moveBinaries(args)
},
// purge all platforms of a desktop app for specific version
'purge-version' (args = process.argv) {
console.log('#purge-version')
const options = minimist(args, {
string: 'version',
alias: {
version: 'v',
},
})
la(check.unemptyString(options.version), 'missing app version to purge', options)
return uploadUtils.purgeDesktopAppAllPlatforms(options.version, upload.zipName)
},
// goes through the entire pipeline:
// - build
// - zip
// - upload
deploy () {
const options = this.parseOptions(process.argv)
return askMissingOptions(['version', 'platform'])(options)
.then((options) => {
return this.build(options)
.then(() => {
return this.zip(options)
})
// assumes options.zip contains the zipped filename
.then(() => {
return this.upload(options)
})
})
},
}
module.exports = _.bindAll(deploy, _.functions(deploy))
-6
View File
@@ -1,6 +0,0 @@
chalk = require("chalk")
module.exports = (msg, color = "yellow") ->
return if process.env["NODE_ENV"] is "test"
console.log chalk[color](msg), chalk.bgWhite(chalk.black(@osName))
+9
View File
@@ -0,0 +1,9 @@
const chalk = require('chalk')
module.exports = function (msg, color = 'yellow') {
if (process.env['NODE_ENV'] === 'test') {
return
}
return console.log(chalk[color](msg), chalk.bgWhite(chalk.black(this.osName)))
}
-93
View File
@@ -1,93 +0,0 @@
path = require("path")
la = require("lazy-ass")
check = require("check-more-types")
R = require("ramda")
os = require("os")
# canonical platform names
platforms = {
darwin: "darwin"
linux: "linux",
windows: "win32"
}
isValidPlatform = check.oneOf(R.values(platforms))
checkPlatform = (platform) ->
la(isValidPlatform(platform),
"invalid build platform", platform, "valid choices", R.values(platforms))
buildRootDir = () ->
path.resolve("build")
## returns a path into the /build directory
## the output folder should look something like this
## build/
## <platform>/ = linux or darwin
## ... platform-specific files
buildDir = (platform, args...) ->
checkPlatform(platform)
root = buildRootDir()
switch platform
when "darwin"
# the new electron-builder for some reason adds its own platform
# subfolder and it is NOT "darwin" but "mac"
path.resolve(root, "mac", args...)
when "linux"
path.resolve(root, "linux-unpacked", args...)
when "win32"
if os.arch() == "x64"
path.resolve(root, "win-unpacked", args...)
else
# x86 32bit architecture
path.resolve(root, "win-ia32-unpacked", args...)
## returns a path into the /dist directory
distDir = (platform, args...) ->
checkPlatform(platform)
path.resolve("dist", platform, args...)
## returns folder to zip before uploading
zipDir = (platform) ->
checkPlatform(platform)
switch platform
when "darwin"
buildDir(platform, "Cypress.app")
when "linux"
buildDir(platform)
when "win32"
buildDir(platform)
## returns a path into the /build/*/app directory
## specific to each platform
buildAppDir = (platform, args...) ->
checkPlatform(platform)
switch platform
when "darwin"
buildDir(platform, "Cypress.app", "Contents", "resources", "app", args...)
when "linux"
buildDir(platform, "resources", "app", args...)
when "win32"
buildDir(platform, "resources", "app", args...)
buildAppExecutable = (platform) ->
checkPlatform(platform)
switch platform
when "darwin"
buildDir(platform, "Cypress.app", "Contents", "MacOS", "Cypress")
when "linux"
buildDir(platform, "Cypress")
when "win32"
buildDir(platform, "Cypress")
module.exports = {
isValidPlatform
buildRootDir
buildDir
distDir
zipDir
buildAppDir
buildAppExecutable
cacheDir: path.join(process.cwd(), "cache"),
platforms
}
+115
View File
@@ -0,0 +1,115 @@
const path = require('path')
const la = require('lazy-ass')
const check = require('check-more-types')
const R = require('ramda')
const os = require('os')
// canonical platform names
const platforms = {
darwin: 'darwin',
linux: 'linux',
windows: 'win32',
}
const isValidPlatform = check.oneOf(R.values(platforms))
const checkPlatform = (platform) => {
return la(isValidPlatform(platform),
'invalid build platform', platform, 'valid choices', R.values(platforms))
}
const buildRootDir = () => {
return path.resolve('build')
}
// returns a path into the /build directory
// the output folder should look something like this
// build/
// <platform>/ = linux or darwin
// ... platform-specific files
const buildDir = function (platform, ...args) {
checkPlatform(platform)
const root = buildRootDir()
switch (platform) {
case 'darwin':
// the new electron-builder for some reason adds its own platform
// subfolder and it is NOT "darwin" but "mac"
return path.resolve(root, 'mac', ...args)
case 'linux':
return path.resolve(root, 'linux-unpacked', ...args)
case 'win32':
if (os.arch() === 'x64') {
return path.resolve(root, 'win-unpacked', ...args)
}
// x86 32bit architecture
return path.resolve(root, 'win-ia32-unpacked', ...args)
default:
throw new Error('unexpected platform')
}
}
// returns a path into the /dist directory
const distDir = function (platform, ...args) {
checkPlatform(platform)
return path.resolve('dist', platform, ...args)
}
// returns folder to zip before uploading
const zipDir = function (platform) {
checkPlatform(platform)
switch (platform) {
case 'darwin':
return buildDir(platform, 'Cypress.app')
case 'linux':
return buildDir(platform)
case 'win32':
return buildDir(platform)
default:
throw new Error('unexpected platform')
}
}
// returns a path into the /build/*/app directory
// specific to each platform
const buildAppDir = function (platform, ...args) {
checkPlatform(platform)
switch (platform) {
case 'darwin':
return buildDir(platform, 'Cypress.app', 'Contents', 'resources', 'app', ...args)
case 'linux':
return buildDir(platform, 'resources', 'app', ...args)
case 'win32':
return buildDir(platform, 'resources', 'app', ...args)
default:
throw new Error('unexpected platform')
}
}
const buildAppExecutable = function (platform) {
checkPlatform(platform)
switch (platform) {
case 'darwin':
return buildDir(platform, 'Cypress.app', 'Contents', 'MacOS', 'Cypress')
case 'linux':
return buildDir(platform, 'Cypress')
case 'win32':
return buildDir(platform, 'Cypress')
default:
throw new Error('unexpected platform')
}
}
module.exports = {
isValidPlatform,
buildRootDir,
buildDir,
distDir,
zipDir,
buildAppDir,
buildAppExecutable,
cacheDir: path.join(process.cwd(), 'cache'),
platforms,
}
-140
View File
@@ -1,140 +0,0 @@
_ = require("lodash")
fse = require("fs-extra")
cp = require("child_process")
execa = require('execa')
path = require("path")
Promise = require("bluebird")
os = require("os")
verify = require("../../cli/lib/tasks/verify")
Fixtures = require("../../packages/server/test/support/helpers/fixtures")
fs = Promise.promisifyAll(fse)
canRecordVideo = () ->
os.platform() != "win32"
shouldSkipProjectTest = () ->
os.platform() == "win32"
runSmokeTest = (buildAppExecutable, timeoutSeconds = 30) ->
rand = String(_.random(0, 1000))
console.log("executable path #{buildAppExecutable}")
console.log("timeout #{timeoutSeconds} seconds")
hasRightResponse = (stdout) ->
# there could be more debug lines in the output, so find 1 line with
# expected random value
lines = stdout.split('\n').map((s) -> s.trim())
return lines.includes(rand)
args = []
if verify.needsSandbox()
args.push("--no-sandbox")
# separate any Electron command line arguments from Cypress args
args.push("--")
args.push("--smoke-test")
args.push("--ping=#{rand}")
options = {
timeout: timeoutSeconds * 1000
}
execa "#{buildAppExecutable}", args, options
.catch (err) ->
console.error("smoke test failed with error %s", err.message)
throw err
.then ({stdout}) ->
stdout = stdout.replace(/\s/, "")
if !hasRightResponse(stdout)
throw new Error("Stdout: '#{stdout}' did not match the random number: '#{rand}'")
console.log("smoke test response", stdout)
console.log("smokeTest passes")
runProjectTest = (buildAppExecutable, e2e) ->
if shouldSkipProjectTest()
console.log("skipping project test")
return Promise.resolve()
new Promise (resolve, reject) ->
env = _.omit(process.env, "CYPRESS_INTERNAL_ENV")
if !canRecordVideo()
console.log("cannot record video on this platform yet, disabling")
env.CYPRESS_VIDEO_RECORDING = "false"
args = [
"--run-project=#{e2e}",
"--spec=#{e2e}/cypress/integration/simple_passing_spec.coffee"
]
if verify.needsSandbox()
args.push("--no-sandbox")
options = {
stdio: "inherit", env: env
}
console.log("running project test")
console.log(buildAppExecutable, args.join(" "))
cp.spawn(buildAppExecutable, args, options)
.on "exit", (code) ->
if code is 0
resolve()
else
reject(new Error("running project tests failed with: '#{code}' errors."))
runFailingProjectTest = (buildAppExecutable, e2e) ->
if shouldSkipProjectTest()
console.log("skipping failing project test")
return Promise.resolve()
console.log("running failing project test")
verifyScreenshots = ->
screenshot1 = path.join(e2e, "cypress", "screenshots", "simple_failing_spec.coffee", "simple failing spec -- fails1 (failed).png")
screenshot2 = path.join(e2e, "cypress", "screenshots", "simple_failing_spec.coffee", "simple failing spec -- fails2 (failed).png")
Promise.all([
fs.statAsync(screenshot1)
fs.statAsync(screenshot2)
])
spawn = ->
new Promise (resolve, reject) ->
env = _.omit(process.env, "CYPRESS_INTERNAL_ENV")
args = [
"--run-project=#{e2e}",
"--spec=#{e2e}/cypress/integration/simple_failing_spec.coffee"
]
if verify.needsSandbox()
args.push("--no-sandbox")
options = {
stdio: "inherit",
env
}
cp.spawn(buildAppExecutable, args, options)
.on "exit", (code) ->
if code is 2
resolve()
else
reject(new Error("running project tests failed with: '#{code}' errors."))
spawn()
.then(verifyScreenshots)
test = (buildAppExecutable) ->
Fixtures.scaffold()
e2e = Fixtures.projectPath("e2e")
runSmokeTest(buildAppExecutable)
.then ->
runProjectTest(buildAppExecutable, e2e)
.then ->
runFailingProjectTest(buildAppExecutable, e2e)
.then ->
Fixtures.remove()
module.exports = {
test
}
+179
View File
@@ -0,0 +1,179 @@
const _ = require('lodash')
const fse = require('fs-extra')
const cp = require('child_process')
const execa = require('execa')
const path = require('path')
const Promise = require('bluebird')
const os = require('os')
const verify = require('../../cli/lib/tasks/verify')
const Fixtures = require('../../packages/server/test/support/helpers/fixtures')
const fs = Promise.promisifyAll(fse)
const canRecordVideo = () => {
return os.platform() !== 'win32'
}
const shouldSkipProjectTest = () => {
return os.platform() === 'win32'
}
const runSmokeTest = function (buildAppExecutable, timeoutSeconds = 30) {
const rand = String(_.random(0, 1000))
console.log(`executable path ${buildAppExecutable}`)
console.log(`timeout ${timeoutSeconds} seconds`)
const hasRightResponse = function (stdout) {
// there could be more debug lines in the output, so find 1 line with
// expected random value
const lines = stdout.split('\n').map((s) => {
return s.trim()
})
return lines.includes(rand)
}
const args = []
if (verify.needsSandbox()) {
args.push('--no-sandbox')
}
// separate any Electron command line arguments from Cypress args
args.push('--')
args.push('--smoke-test')
args.push(`--ping=${rand}`)
const options = {
timeout: timeoutSeconds * 1000,
}
return execa(`${buildAppExecutable}`, args, options)
.catch((err) => {
console.error('smoke test failed with error %s', err.message)
throw err
}).then(({ stdout }) => {
stdout = stdout.replace(/\s/, '')
if (!hasRightResponse(stdout)) {
throw new Error(`Stdout: '${stdout}' did not match the random number: '${rand}'`)
}
console.log('smoke test response', stdout)
return console.log('smokeTest passes')
})
}
const runProjectTest = function (buildAppExecutable, e2e) {
if (shouldSkipProjectTest()) {
console.log('skipping project test')
return Promise.resolve()
}
return new Promise((resolve, reject) => {
const env = _.omit(process.env, 'CYPRESS_INTERNAL_ENV')
if (!canRecordVideo()) {
console.log('cannot record video on this platform yet, disabling')
env.CYPRESS_VIDEO_RECORDING = 'false'
}
const args = [
`--run-project=${e2e}`,
`--spec=${e2e}/cypress/integration/simple_passing_spec.coffee`,
]
if (verify.needsSandbox()) {
args.push('--no-sandbox')
}
const options = {
stdio: 'inherit', env,
}
console.log('running project test')
console.log(buildAppExecutable, args.join(' '))
return cp.spawn(buildAppExecutable, args, options)
.on('exit', (code) => {
if (code === 0) {
return resolve()
}
return reject(new Error(`running project tests failed with: '${code}' errors.`))
})
})
}
const runFailingProjectTest = function (buildAppExecutable, e2e) {
if (shouldSkipProjectTest()) {
console.log('skipping failing project test')
return Promise.resolve()
}
console.log('running failing project test')
const verifyScreenshots = function () {
const screenshot1 = path.join(e2e, 'cypress', 'screenshots', 'simple_failing_spec.coffee', 'simple failing spec -- fails1 (failed).png')
const screenshot2 = path.join(e2e, 'cypress', 'screenshots', 'simple_failing_spec.coffee', 'simple failing spec -- fails2 (failed).png')
return Promise.all([
fs.statAsync(screenshot1),
fs.statAsync(screenshot2),
])
}
const spawn = () => {
return new Promise((resolve, reject) => {
const env = _.omit(process.env, 'CYPRESS_INTERNAL_ENV')
const args = [
`--run-project=${e2e}`,
`--spec=${e2e}/cypress/integration/simple_failing_spec.coffee`,
]
if (verify.needsSandbox()) {
args.push('--no-sandbox')
}
const options = {
stdio: 'inherit',
env,
}
return cp.spawn(buildAppExecutable, args, options)
.on('exit', (code) => {
if (code === 2) {
return resolve()
}
return reject(new Error(`running project tests failed with: '${code}' errors.`))
})
})
}
return spawn()
.then(verifyScreenshots)
}
const test = function (buildAppExecutable) {
Fixtures.scaffold()
const e2e = Fixtures.projectPath('e2e')
return runSmokeTest(buildAppExecutable)
.then(() => {
return runProjectTest(buildAppExecutable, e2e)
}).then(() => {
return runFailingProjectTest(buildAppExecutable, e2e)
}).then(() => {
return Fixtures.remove()
})
}
module.exports = {
test,
}
-109
View File
@@ -1,109 +0,0 @@
minimist = require("minimist")
Promise = require("bluebird")
la = require("lazy-ass")
check = require("check-more-types")
fs = require("fs")
path = require("path")
awspublish = require('gulp-awspublish')
rename = require('gulp-rename')
gulpDebug = require('gulp-debug')
gulp = require("gulp")
human = require("human-interval")
R = require("ramda")
uploadUtils = require("./util/upload")
npmPackageExtension = ".tgz"
uploadFileName = "cypress.tgz"
isNpmPackageFile = check.extension(npmPackageExtension)
# the package tgz file will be uploaded into unique folder
# in our case something like this
# https://cdn.cypress.io/beta/npm/<version>/<some unique hash>/cypress.tgz
rootFolder = "beta"
npmFolder = "npm"
getCDN = ({version, hash, filename}) ->
la(check.semver(version), 'invalid version', version)
la(check.unemptyString(hash), 'missing hash', hash)
la(check.unemptyString(filename), 'missing filename', filename)
la(isNpmPackageFile(filename), 'wrong extension for file', filename)
url = uploadUtils.getUploadUrl()
la(check.url(url), "could not get upload url", url)
[url, rootFolder, npmFolder, version, hash, filename].join("/")
getUploadDirName = (options) ->
la(check.unemptyString(options.version), 'missing version', options)
la(check.unemptyString(options.hash), 'missing hash', options)
dir = [rootFolder, npmFolder, options.version, options.hash, null].join("/")
dir
uploadFile = (options) ->
new Promise (resolve, reject) ->
publisher = uploadUtils.getPublisher()
headers = {}
headers["Cache-Control"] = "no-cache"
gulp.src(options.file)
.pipe rename (p) =>
p.basename = path.basename(uploadFileName, npmPackageExtension)
p.dirname = getUploadDirName(options)
console.log("renaming upload to", p.dirname, p.basename)
la(check.unemptyString(p.basename), "missing basename")
la(check.unemptyString(p.dirname), "missing dirname")
p
.pipe gulpDebug()
.pipe publisher.publish(headers)
.pipe awspublish.reporter()
.on "error", reject
.on "end", resolve
uploadNpmPackage = (args = []) ->
console.log(args)
options = minimist(args, {
string: ["version", "file", "hash"],
alias: {
version: "v",
file: "f",
hash: "h"
}
})
console.log("Upload NPM package options")
console.log(options)
la(check.unemptyString(options.file), "missing file to upload", options)
la(isNpmPackageFile(options.file),
"invalid file to upload extension", options.file)
if not options.hash
options.hash = uploadUtils.formHashFromEnvironment()
la(check.unemptyString(options.hash), "missing hash to give", options)
la(check.unemptyString(options.version), "missing version", options)
la(fs.existsSync(options.file), "cannot find file", options.file)
uploadFile(options)
.then () ->
cdnUrl = getCDN({
version: options.version,
hash: options.hash,
filename: uploadFileName
})
console.log("NPM package can be installed using URL")
console.log("npm install %s", cdnUrl)
cdnUrl
.then uploadUtils.saveUrl("npm-package-url.json")
# for now disable purging from CDN cache
# because each upload should be unique by hash
# .then R.tap(uploadUtils.purgeCache)
module.exports = {
uploadNpmPackage,
getCDN
}
if not module.parent
uploadNpmPackage(process.argv)
+122
View File
@@ -0,0 +1,122 @@
const minimist = require('minimist')
const Promise = require('bluebird')
const la = require('lazy-ass')
const check = require('check-more-types')
const fs = require('fs')
const path = require('path')
const awspublish = require('gulp-awspublish')
const rename = require('gulp-rename')
const gulpDebug = require('gulp-debug')
const gulp = require('gulp')
const uploadUtils = require('./util/upload')
const npmPackageExtension = '.tgz'
const uploadFileName = 'cypress.tgz'
const isNpmPackageFile = check.extension(npmPackageExtension)
// the package tgz file will be uploaded into unique folder
// in our case something like this
// https://cdn.cypress.io/beta/npm/<version>/<some unique hash>/cypress.tgz
const rootFolder = 'beta'
const npmFolder = 'npm'
const getCDN = function ({ version, hash, filename }) {
la(check.semver(version), 'invalid version', version)
la(check.unemptyString(hash), 'missing hash', hash)
la(check.unemptyString(filename), 'missing filename', filename)
la(isNpmPackageFile(filename), 'wrong extension for file', filename)
const url = uploadUtils.getUploadUrl()
la(check.url(url), 'could not get upload url', url)
return [url, rootFolder, npmFolder, version, hash, filename].join('/')
}
const getUploadDirName = function (options) {
la(check.unemptyString(options.version), 'missing version', options)
la(check.unemptyString(options.hash), 'missing hash', options)
const dir = [rootFolder, npmFolder, options.version, options.hash, null].join('/')
return dir
}
const uploadFile = (options) => {
return new Promise((resolve, reject) => {
const publisher = uploadUtils.getPublisher()
const headers = {}
headers['Cache-Control'] = 'no-cache'
return gulp.src(options.file)
.pipe(rename((p) => {
p.basename = path.basename(uploadFileName, npmPackageExtension)
p.dirname = getUploadDirName(options)
console.log('renaming upload to', p.dirname, p.basename)
la(check.unemptyString(p.basename), 'missing basename')
la(check.unemptyString(p.dirname), 'missing dirname')
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
}
const uploadNpmPackage = function (args = []) {
console.log(args)
const options = minimist(args, {
string: ['version', 'file', 'hash'],
alias: {
version: 'v',
file: 'f',
hash: 'h',
},
})
console.log('Upload NPM package options')
console.log(options)
la(check.unemptyString(options.file), 'missing file to upload', options)
la(isNpmPackageFile(options.file),
'invalid file to upload extension', options.file)
if (!options.hash) {
options.hash = uploadUtils.formHashFromEnvironment()
}
la(check.unemptyString(options.hash), 'missing hash to give', options)
la(check.unemptyString(options.version), 'missing version', options)
la(fs.existsSync(options.file), 'cannot find file', options.file)
return uploadFile(options)
.then(() => {
const cdnUrl = getCDN({
version: options.version,
hash: options.hash,
filename: uploadFileName,
})
console.log('NPM package can be installed using URL')
console.log('npm install %s', cdnUrl)
return cdnUrl
}).then(uploadUtils.saveUrl('npm-package-url.json'))
}
// for now disable purging from CDN cache
// because each upload should be unique by hash
// .then R.tap(uploadUtils.purgeCache)
module.exports = {
uploadNpmPackage,
getCDN,
}
if (!module.parent) {
uploadNpmPackage(process.argv)
}
-168
View File
@@ -1,168 +0,0 @@
minimist = require("minimist")
Promise = require("bluebird")
la = require("lazy-ass")
check = require("check-more-types")
fs = require("fs")
path = require("path")
awspublish = require('gulp-awspublish')
rename = require('gulp-rename')
gulpDebug = require('gulp-debug')
gulp = require("gulp")
human = require("human-interval")
R = require("ramda")
hasha = require('hasha')
uploadUtils = require("./util/upload")
s3helpers = require("./s3-api").s3helpers
# we zip the binary on every platform and upload under same name
binaryExtension = ".zip"
uploadFileName = "cypress.zip"
isBinaryFile = check.extension(binaryExtension)
rootFolder = "beta"
folder = "binary"
# the binary will be uploaded into unique folder
# in our case something like this
# https://cdn.cypress.io/desktop/binary/0.20.2/<platform>/<some unique version info>/cypress.zip
getCDN = ({version, hash, filename, platform}) ->
la(check.semver(version), 'invalid version', version)
la(check.unemptyString(hash), 'missing hash', hash)
la(check.unemptyString(filename), 'missing filename', filename)
la(isBinaryFile(filename), 'wrong extension for file', filename)
la(check.unemptyString(platform), 'missing platform', platform)
cdnUrl = uploadUtils.getUploadUrl()
la(check.url(cdnUrl), "could not get cdn url", cdnUrl)
[cdnUrl, rootFolder, folder, version, platform, hash, filename].join("/")
# returns folder that contains beta (unreleased) binaries for given version
#
getUploadVersionDirName = (options) ->
la(check.unemptyString(options.version), 'missing version', options)
dir = [rootFolder, folder, options.version].join("/")
dir
getUploadDirForPlatform = (options, platformArch) ->
la(uploadUtils.isValidPlatformArch(platformArch),
'missing or invalid platformArch', platformArch)
versionDir = getUploadVersionDirName(options)
la(check.unemptyString(versionDir), 'could not form folder from', options)
dir = [versionDir, platformArch].join("/")
dir
getUploadDirName = (options) ->
la(check.unemptyString(options.hash), 'missing hash', options)
uploadFolder = getUploadDirForPlatform(options, options.platformArch)
la(check.unemptyString(uploadFolder), 'could not form folder from', options)
dir = [uploadFolder, options.hash, null].join("/")
dir
uploadFile = (options) ->
new Promise (resolve, reject) ->
publisher = uploadUtils.getPublisher()
headers = {}
headers["Cache-Control"] = "no-cache"
key = null
gulp.src(options.file)
.pipe rename (p) =>
p.basename = path.basename(uploadFileName, binaryExtension)
p.dirname = getUploadDirName(options)
console.log("renaming upload to", p.dirname, p.basename)
la(check.unemptyString(p.basename), "missing basename")
la(check.unemptyString(p.dirname), "missing dirname")
key = p.dirname + uploadFileName
p
.pipe gulpDebug()
.pipe publisher.publish(headers)
.pipe awspublish.reporter()
.on "error", reject
.on "end", () -> resolve(key)
setChecksum = (filename, key) =>
console.log('setting checksum for file %s', filename)
console.log('on s3 object %s', key)
la(check.unemptyString(filename), 'expected filename', filename)
la(check.unemptyString(key), 'expected uploaded S3 key', key)
checksum = hasha.fromFileSync(filename, { algorithm: 'sha512' })
size = fs.statSync(filename).size
console.log('SHA256 checksum %s', checksum)
console.log('size', size)
aws = uploadUtils.getS3Credentials()
s3 = s3helpers.makeS3(aws)
# S3 object metadata can only have string values
metadata = {
checksum,
size: String(size)
}
# by default s3.copyObject does not preserve ACL when copying
# thus we need to reset it for our public files
s3helpers.setUserMetadata(aws.bucket, key, metadata,
'application/zip', 'public-read', s3)
uploadUniqueBinary = (args = []) ->
options = minimist(args, {
string: ["version", "file", "hash", "platform"],
alias: {
version: "v",
file: "f",
hash: "h"
}
})
console.log("Upload unique binary options")
pickOptions = R.pick(["file", "version", "hash"])
console.log(pickOptions(options))
la(check.unemptyString(options.file), "missing file to upload", options)
la(isBinaryFile(options.file),
"invalid file to upload extension", options.file)
if not options.hash
options.hash = uploadUtils.formHashFromEnvironment()
la(check.unemptyString(options.hash), "missing hash to give", options)
la(check.unemptyString(options.version), "missing version", options)
la(fs.existsSync(options.file), "cannot find file", options.file)
platform = options.platform ? process.platform
options.platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
uploadFile(options)
.then (key) ->
setChecksum(options.file, key)
.then () ->
cdnUrl = getCDN({
version: options.version,
hash: options.hash,
filename: uploadFileName
platform: options.platformArch
})
console.log("Binary can be downloaded using URL")
console.log(cdnUrl)
cdnUrl
.then uploadUtils.saveUrl("binary-url.json")
module.exports = {
getUploadDirName,
getUploadDirForPlatform,
uploadUniqueBinary,
getCDN
}
if not module.parent
uploadUniqueBinary(process.argv)
+198
View File
@@ -0,0 +1,198 @@
const minimist = require('minimist')
const Promise = require('bluebird')
const la = require('lazy-ass')
const check = require('check-more-types')
const fs = require('fs')
const path = require('path')
const awspublish = require('gulp-awspublish')
const rename = require('gulp-rename')
const gulpDebug = require('gulp-debug')
const gulp = require('gulp')
const R = require('ramda')
const hasha = require('hasha')
const uploadUtils = require('./util/upload')
const {
s3helpers,
} = require('./s3-api')
// we zip the binary on every platform and upload under same name
const binaryExtension = '.zip'
const uploadFileName = 'cypress.zip'
const isBinaryFile = check.extension(binaryExtension)
const rootFolder = 'beta'
const folder = 'binary'
// the binary will be uploaded into unique folder
// in our case something like this
// https://cdn.cypress.io/desktop/binary/0.20.2/<platform>/<some unique version info>/cypress.zip
const getCDN = function ({ version, hash, filename, platform }) {
la(check.semver(version), 'invalid version', version)
la(check.unemptyString(hash), 'missing hash', hash)
la(check.unemptyString(filename), 'missing filename', filename)
la(isBinaryFile(filename), 'wrong extension for file', filename)
la(check.unemptyString(platform), 'missing platform', platform)
const cdnUrl = uploadUtils.getUploadUrl()
la(check.url(cdnUrl), 'could not get cdn url', cdnUrl)
return [cdnUrl, rootFolder, folder, version, platform, hash, filename].join('/')
}
// returns folder that contains beta (unreleased) binaries for given version
//
const getUploadVersionDirName = function (options) {
la(check.unemptyString(options.version), 'missing version', options)
const dir = [rootFolder, folder, options.version].join('/')
return dir
}
const getUploadDirForPlatform = function (options, platformArch) {
la(uploadUtils.isValidPlatformArch(platformArch),
'missing or invalid platformArch', platformArch)
const versionDir = getUploadVersionDirName(options)
la(check.unemptyString(versionDir), 'could not form folder from', options)
const dir = [versionDir, platformArch].join('/')
return dir
}
const getUploadDirName = function (options) {
la(check.unemptyString(options.hash), 'missing hash', options)
const uploadFolder = getUploadDirForPlatform(options, options.platformArch)
la(check.unemptyString(uploadFolder), 'could not form folder from', options)
const dir = [uploadFolder, options.hash, null].join('/')
return dir
}
const uploadFile = (options) => {
return new Promise((resolve, reject) => {
const publisher = uploadUtils.getPublisher()
const headers = {}
headers['Cache-Control'] = 'no-cache'
let key = null
return gulp.src(options.file)
.pipe(rename((p) => {
p.basename = path.basename(uploadFileName, binaryExtension)
p.dirname = getUploadDirName(options)
console.log('renaming upload to', p.dirname, p.basename)
la(check.unemptyString(p.basename), 'missing basename')
la(check.unemptyString(p.dirname), 'missing dirname')
key = p.dirname + uploadFileName
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', () => {
return resolve(key)
})
})
}
const setChecksum = (filename, key) => {
console.log('setting checksum for file %s', filename)
console.log('on s3 object %s', key)
la(check.unemptyString(filename), 'expected filename', filename)
la(check.unemptyString(key), 'expected uploaded S3 key', key)
const checksum = hasha.fromFileSync(filename, { algorithm: 'sha512' })
const {
size,
} = fs.statSync(filename)
console.log('SHA256 checksum %s', checksum)
console.log('size', size)
const aws = uploadUtils.getS3Credentials()
const s3 = s3helpers.makeS3(aws)
// S3 object metadata can only have string values
const metadata = {
checksum,
size: String(size),
}
// by default s3.copyObject does not preserve ACL when copying
// thus we need to reset it for our public files
return s3helpers.setUserMetadata(aws.bucket, key, metadata,
'application/zip', 'public-read', s3)
}
const uploadUniqueBinary = function (args = []) {
const options = minimist(args, {
string: ['version', 'file', 'hash', 'platform'],
alias: {
version: 'v',
file: 'f',
hash: 'h',
},
})
console.log('Upload unique binary options')
const pickOptions = R.pick(['file', 'version', 'hash'])
console.log(pickOptions(options))
la(check.unemptyString(options.file), 'missing file to upload', options)
la(isBinaryFile(options.file),
'invalid file to upload extension', options.file)
if (!options.hash) {
options.hash = uploadUtils.formHashFromEnvironment()
}
la(check.unemptyString(options.hash), 'missing hash to give', options)
la(check.unemptyString(options.version), 'missing version', options)
la(fs.existsSync(options.file), 'cannot find file', options.file)
const platform = options.platform != null ? options.platform : process.platform
options.platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
return uploadFile(options)
.then((key) => {
return setChecksum(options.file, key)
}).then(() => {
const cdnUrl = getCDN({
version: options.version,
hash: options.hash,
filename: uploadFileName,
platform: options.platformArch,
})
console.log('Binary can be downloaded using URL')
console.log(cdnUrl)
return cdnUrl
}).then(uploadUtils.saveUrl('binary-url.json'))
}
module.exports = {
getUploadDirName,
getUploadDirForPlatform,
uploadUniqueBinary,
getCDN,
}
if (!module.parent) {
uploadUniqueBinary(process.argv)
}
-166
View File
@@ -1,166 +0,0 @@
awspublish = require('gulp-awspublish')
rename = require('gulp-rename')
gulpDebug = require('gulp-debug')
fs = require("fs-extra")
cp = require("child_process")
path = require("path")
gulp = require("gulp")
human = require("human-interval")
Promise = require("bluebird")
meta = require("./meta")
la = require("lazy-ass")
check = require("check-more-types")
uploadUtils = require("./util/upload")
fs = Promise.promisifyAll(fs)
## TODO: refactor this
# system expects desktop application to be inside a file
# with this name
zipName = "cypress.zip"
module.exports = {
zipName
getPublisher: ->
uploadUtils.getPublisher(@getAwsObj)
getAwsObj: ->
uploadUtils.getS3Credentials()
# returns desktop folder for a given folder without platform
# something like desktop/0.20.1
getUploadeVersionFolder: (aws, version) ->
la(check.unemptyString(aws.folder), 'aws object is missing desktop folder', aws.folder)
dirName = [aws.folder, version].join("/")
dirName
getFullUploadName: ({folder, version, platformArch, name}) ->
la(check.unemptyString(folder), 'missing folder', folder)
la(check.semver(version), 'missing or invalid version', version)
la(check.unemptyString(name), 'missing file name', name)
la(uploadUtils.isValidPlatformArch(platformArch),
'invalid platform and arch', platformArch)
fileName = [folder, version, platformArch, name].join("/")
fileName
# store uploaded application in subfolders by platform and version
# something like desktop/0.20.1/darwin-x64/
getUploadDirName: ({version, platform}) ->
aws = @getAwsObj()
platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
versionFolder = @getUploadeVersionFolder(aws, version)
dirName = [versionFolder, platformArch, null].join("/")
console.log("target directory %s", dirName)
dirName
getManifestUrl: (folder, version, uploadOsName) ->
url = uploadUtils.getUploadUrl()
la(check.url(url), "could not get upload url", url)
{
url: [url, folder, version, uploadOsName, zipName].join("/")
}
getRemoteManifest: (folder, version) ->
la(check.unemptyString(folder), 'missing manifest folder', folder)
la(check.semver(version), 'invalid manifest version', version)
getUrl = @getManifestUrl.bind(null, folder, version)
{
name: "Cypress"
version: version
packages: {
## keep these for compatibility purposes
## although they are now deprecated
mac: getUrl("darwin-x64")
win: getUrl("win32-ia32")
linux64: getUrl("linux-x64")
## start adding the new ones
## using node's platform
darwin: getUrl("darwin-x64")
win32: getUrl("win32-ia32")
linux: getUrl("linux-x64")
## the new-new names that use platform and arch as is
"darwin-x64": getUrl("darwin-x64")
"linux-x64": getUrl("linux-x64")
"win32-ia32": getUrl("win32-ia32")
"win32-x64": getUrl("win32-x64")
}
}
createRemoteManifest: (folder, version) ->
obj = @getRemoteManifest(folder, version)
src = path.resolve("manifest.json")
fs.outputJsonAsync(src, obj).return(src)
s3Manifest: (version) ->
publisher = @getPublisher()
aws = @getAwsObj()
headers = {}
headers["Cache-Control"] = "no-cache"
manifest = null
new Promise (resolve, reject) =>
@createRemoteManifest(aws.folder, version)
.then (src) ->
manifest = src
gulp.src(src)
.pipe rename (p) ->
p.dirname = aws.folder + "/" + p.dirname
p
.pipe gulpDebug()
.pipe publisher.publish(headers)
.pipe awspublish.reporter()
.on "error", reject
.on "end", resolve
.finally ->
fs.removeAsync(manifest)
toS3: ({zipFile, version, platform}) ->
console.log("#uploadToS3 ⏳")
la(check.unemptyString(version), "expected version string", version)
la(check.unemptyString(zipFile), "expected zip filename", zipFile)
la(check.extension("zip", zipFile),
"zip filename should end with .zip", zipFile)
la(meta.isValidPlatform(platform), "invalid platform", platform)
console.log("zip filename #{zipFile}")
if !fs.existsSync(zipFile)
throw new Error("Cannot find zip file #{zipFile}")
upload = =>
new Promise (resolve, reject) =>
publisher = @getPublisher()
headers = {}
headers["Cache-Control"] = "no-cache"
gulp.src(zipFile)
.pipe rename (p) =>
# rename to standard filename zipName
p.basename = path.basename(zipName, p.extname)
p.dirname = @getUploadDirName({version, platform})
p
.pipe debug()
.pipe publisher.publish(headers)
.pipe awspublish.reporter()
.on "error", reject
.on "end", resolve
upload()
.then ->
uploadUtils.purgeDesktopAppFromCache({version, platform, zipName})
}
+191
View File
@@ -0,0 +1,191 @@
const awspublish = require('gulp-awspublish')
const rename = require('gulp-rename')
const gulpDebug = require('gulp-debug')
let fs = require('fs-extra')
const path = require('path')
const gulp = require('gulp')
const Promise = require('bluebird')
const meta = require('./meta')
const la = require('lazy-ass')
const check = require('check-more-types')
const uploadUtils = require('./util/upload')
fs = Promise.promisifyAll(fs)
// TODO: refactor this
// system expects desktop application to be inside a file
// with this name
const zipName = 'cypress.zip'
module.exports = {
zipName,
getPublisher () {
return uploadUtils.getPublisher(this.getAwsObj)
},
getAwsObj () {
return uploadUtils.getS3Credentials()
},
// returns desktop folder for a given folder without platform
// something like desktop/0.20.1
getUploadeVersionFolder (aws, version) {
la(check.unemptyString(aws.folder), 'aws object is missing desktop folder', aws.folder)
const dirName = [aws.folder, version].join('/')
return dirName
},
getFullUploadName ({ folder, version, platformArch, name }) {
la(check.unemptyString(folder), 'missing folder', folder)
la(check.semver(version), 'missing or invalid version', version)
la(check.unemptyString(name), 'missing file name', name)
la(uploadUtils.isValidPlatformArch(platformArch),
'invalid platform and arch', platformArch)
const fileName = [folder, version, platformArch, name].join('/')
return fileName
},
// store uploaded application in subfolders by platform and version
// something like desktop/0.20.1/darwin-x64/
getUploadDirName ({ version, platform }) {
const aws = this.getAwsObj()
const platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
const versionFolder = this.getUploadeVersionFolder(aws, version)
const dirName = [versionFolder, platformArch, null].join('/')
console.log('target directory %s', dirName)
return dirName
},
getManifestUrl (folder, version, uploadOsName) {
const url = uploadUtils.getUploadUrl()
la(check.url(url), 'could not get upload url', url)
return {
url: [url, folder, version, uploadOsName, zipName].join('/'),
}
},
getRemoteManifest (folder, version) {
la(check.unemptyString(folder), 'missing manifest folder', folder)
la(check.semver(version), 'invalid manifest version', version)
const getUrl = this.getManifestUrl.bind(null, folder, version)
return {
name: 'Cypress',
version,
packages: {
// keep these for compatibility purposes
// although they are now deprecated
mac: getUrl('darwin-x64'),
win: getUrl('win32-ia32'),
linux64: getUrl('linux-x64'),
// start adding the new ones
// using node's platform
darwin: getUrl('darwin-x64'),
win32: getUrl('win32-ia32'),
linux: getUrl('linux-x64'),
// the new-new names that use platform and arch as is
'darwin-x64': getUrl('darwin-x64'),
'linux-x64': getUrl('linux-x64'),
'win32-ia32': getUrl('win32-ia32'),
'win32-x64': getUrl('win32-x64'),
},
}
},
createRemoteManifest (folder, version) {
const obj = this.getRemoteManifest(folder, version)
const src = path.resolve('manifest.json')
return fs.outputJsonAsync(src, obj).return(src)
},
s3Manifest (version) {
const publisher = this.getPublisher()
const aws = this.getAwsObj()
const headers = {}
headers['Cache-Control'] = 'no-cache'
let manifest = null
return new Promise((resolve, reject) => {
return this.createRemoteManifest(aws.folder, version)
.then((src) => {
manifest = src
return gulp.src(src)
.pipe(rename((p) => {
p.dirname = `${aws.folder}/${p.dirname}`
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
}).finally(() => {
return fs.removeAsync(manifest)
})
},
toS3 ({ zipFile, version, platform }) {
console.log('#uploadToS3 ⏳')
la(check.unemptyString(version), 'expected version string', version)
la(check.unemptyString(zipFile), 'expected zip filename', zipFile)
la(check.extension('zip', zipFile),
'zip filename should end with .zip', zipFile)
la(meta.isValidPlatform(platform), 'invalid platform', platform)
console.log(`zip filename ${zipFile}`)
if (!fs.existsSync(zipFile)) {
throw new Error(`Cannot find zip file ${zipFile}`)
}
const upload = () => {
return new Promise((resolve, reject) => {
const publisher = this.getPublisher()
const headers = {}
headers['Cache-Control'] = 'no-cache'
return gulp.src(zipFile)
.pipe(rename((p) => {
// rename to standard filename zipName
p.basename = path.basename(zipName, p.extname)
p.dirname = this.getUploadDirName({ version, platform })
return p
})).pipe(gulpDebug())
.pipe(publisher.publish(headers))
.pipe(awspublish.reporter())
.on('error', reject)
.on('end', resolve)
})
}
return upload()
.then(() => {
return uploadUtils.purgeDesktopAppFromCache({ version, platform, zipName })
})
},
}
-249
View File
@@ -1,249 +0,0 @@
_ = require("lodash")
fs = require("fs-extra")
cp = require("child_process")
path = require("path")
# we wrap glob to handle EMFILE error
glob = require("glob")
Promise = require("bluebird")
retry = require("bluebird-retry")
la = require("lazy-ass")
check = require("check-more-types")
execa = require("execa")
R = require("ramda")
os = require("os")
prettyMs = require("pretty-ms")
pluralize = require('pluralize')
debug = require("debug")("cypress:binary")
externalUtils = require("./3rd-party")
fs = Promise.promisifyAll(fs)
glob = Promise.promisify(glob)
DEFAULT_PATHS = "package.json".split(" ")
pathToPackageJson = (packageFolder) ->
la(check.unemptyString(packageFolder), "expected package path", packageFolder)
path.join(packageFolder, "package.json")
createCLIExecutable = (command) ->
(args, cwd, env = {}) ->
commandToExecute = "#{command} " + args.join(" ")
console.log(commandToExecute)
if cwd
console.log("in folder:", cwd)
la(check.maybe.string(cwd), "invalid CWD string", cwd)
execa(command, args, { stdio: "inherit", cwd, env })
# if everything is ok, resolve with nothing
.then R.always(undefined)
.catch (result) ->
msg = "#{commandToExecute} failed with exit code: #{result.code}"
throw new Error(msg)
yarn = createCLIExecutable('yarn')
npx = createCLIExecutable('npx')
runAllBuild = _.partial(npx, ["lerna", "run", "build-prod", "--ignore", "cli"])
# removes transpiled JS files in the original package folders
runAllCleanJs = _.partial(npx, ["lerna", "run", "clean-js", "--ignore", "cli"])
## @returns string[] with names of packages, e.g. ['runner', 'driver', 'server']
getPackagesWithScript = (scriptName) ->
Promise.resolve(glob('./packages/*/package.json'))
.map (pkgPath) ->
fs.readJsonAsync(pkgPath)
.then (json) ->
if json.scripts?.build
return path.basename(path.dirname(pkgPath))
.filter(Boolean)
copyAllToDist = (distDir) ->
copyRelativePathToDist = (relative) ->
dest = path.join(distDir, relative)
retry ->
console.log(relative, "->", dest)
fs.copyAsync(relative, dest)
copyPackage = (pkg) ->
console.log('** copy package: %s **', pkg)
## copies the package to dist
## including the default paths
## and any specified in package.json files
Promise.resolve(fs.readJsonAsync(pathToPackageJson(pkg)))
.then (json) ->
## grab all the files that match "files" wildcards
## but without all negated files ("!src/**/*.spec.js" for example)
## and default included paths
## and convert to relative paths
DEFAULT_PATHS
.concat(json.files or [])
.concat(json.main or [])
.then (pkgFileMasks) ->
debug("for pkg %s have the following file masks %o", pkg, pkgFileMasks)
globOptions = {
cwd: pkg, # search in the package folder
absolute: false # and return relative file paths
followSymbolicLinks: false # do not follow symlinks
}
externalUtils.globby(pkgFileMasks, globOptions)
# we find paths like "src/main.js" wrt "packages/foo"
# now we need to get the file path wrt current working directory
# like "packages/foo/src/main.js" so when we copy
# into the dist folder we get "<dist?/packages/foo/src/main.js"
.map (foundFileRelativeToPackageFolder) ->
path.join(pkg, foundFileRelativeToPackageFolder)
.tap(debug)
.map(copyRelativePathToDist, {concurrency: 1})
## fs-extra concurrency tests (copyPackage / copyRelativePathToDist)
## 1/1 41688
## 1/5 42218
## 1/10 42566
## 2/1 45041
## 2/2 43589
## 3/3 51399
## cp -R concurrency tests
## 1/1 65811
started = new Date()
fs.ensureDirAsync(distDir)
.then ->
glob("./packages/*")
.map(copyPackage, {concurrency: 1})
.then ->
console.log("Finished Copying %dms", new Date() - started)
console.log("")
forceNpmInstall = (packagePath, packageToInstall) ->
console.log("Force installing %s", packageToInstall)
console.log("in %s", packagePath)
la(check.unemptyString(packageToInstall), "missing package to install")
yarn(["install", "--force", packageToInstall], packagePath)
removeDevDependencies = (packageFolder) ->
packagePath = pathToPackageJson(packageFolder)
console.log("removing devDependencies from %s", packagePath)
fs.readJsonAsync(packagePath)
.then (json) ->
delete json.devDependencies
fs.writeJsonAsync(packagePath, json, {spaces: 2})
retryGlobbing = (pathToPackages, delay = 1000) ->
retryGlob = ->
glob(pathToPackages)
.catch {code: "EMFILE"}, ->
## wait, then retry
Promise
.delay(delay)
.then(retryGlob)
retryGlob()
# installs all packages given a wildcard
# pathToPackages would be something like "C:\projects\cypress\dist\win32\packages\*"
npmInstallAll = (pathToPackages) ->
console.log("npmInstallAll packages in #{pathToPackages}")
started = new Date()
retryNpmInstall = (pkg) ->
console.log("installing %s", pkg)
console.log("NODE_ENV is %s", process.env.NODE_ENV)
# force installing only PRODUCTION dependencies
# https://docs.npmjs.com/cli/install
npmInstall = _.partial(yarn, ["install", "--production"])
npmInstall(pkg, {NODE_ENV: "production"})
.catch {code: "EMFILE"}, ->
Promise
.delay(1000)
.then ->
retryNpmInstall(pkg)
.catch (err) ->
console.log(err, err.code)
throw err
printFolders = (folders) ->
console.log("found %s", pluralize("folder", folders.length, true))
## only installs production dependencies
retryGlobbing(pathToPackages)
.tap(printFolders)
.mapSeries (packageFolder) ->
removeDevDependencies(packageFolder)
.then ->
retryNpmInstall(packageFolder)
.then ->
end = new Date()
console.log("Finished NPM Installing", prettyMs(end - started))
removePackageJson = (filename) ->
if filename.endsWith("/package.json") then path.dirname(filename) else filename
ensureFoundSomething = (files) ->
if files.length == 0
throw new Error("Could not find any files")
files
symlinkType = () ->
if os.platform() == "win32"
"junction"
else
"dir"
symlinkAll = (pathToDistPackages, pathTo) ->
console.log("symlink these packages", pathToDistPackages)
la(check.unemptyString(pathToDistPackages),
"missing paths to dist packages", pathToDistPackages)
baseDir = path.dirname(pathTo())
toBase = path.relative.bind(null, baseDir)
symlink = (pkg) ->
# console.log(pkg, dist)
## strip off the initial './'
## ./packages/foo -> node_modules/@packages/foo
pkg = removePackageJson(pkg)
dest = pathTo("node_modules", "@packages", path.basename(pkg))
relativeDest = path.relative(dest + '/..', pkg)
type = symlinkType()
console.log(relativeDest, "link ->", dest, "type", type)
fs.ensureSymlinkAsync(relativeDest, dest, symlinkType)
.catch((err) ->
if not err.message.includes "EEXIST"
throw err
)
glob(pathToDistPackages)
.then(ensureFoundSomething)
.map(symlink)
module.exports = {
runAllBuild
copyAllToDist
npmInstallAll
symlinkAll
runAllCleanJs
forceNpmInstall
getPackagesWithScript
}
if not module.parent
console.log("demo force install")
forceNpmInstall("packages/server", "@ffmpeg-installer/win32-x64")
+293
View File
@@ -0,0 +1,293 @@
const _ = require('lodash')
let fs = require('fs-extra')
const path = require('path')
// we wrap glob to handle EMFILE error
let glob = require('glob')
const Promise = require('bluebird')
const retry = require('bluebird-retry')
const la = require('lazy-ass')
const check = require('check-more-types')
const execa = require('execa')
const R = require('ramda')
const os = require('os')
const prettyMs = require('pretty-ms')
const pluralize = require('pluralize')
const debug = require('debug')('cypress:binary')
const externalUtils = require('./3rd-party')
fs = Promise.promisifyAll(fs)
glob = Promise.promisify(glob)
const DEFAULT_PATHS = 'package.json'.split(' ')
const pathToPackageJson = function (packageFolder) {
la(check.unemptyString(packageFolder), 'expected package path', packageFolder)
return path.join(packageFolder, 'package.json')
}
const createCLIExecutable = (command) => {
return (function (args, cwd, env = {}) {
const commandToExecute = `${command} ${args.join(' ')}`
console.log(commandToExecute)
if (cwd) {
console.log('in folder:', cwd)
}
la(check.maybe.string(cwd), 'invalid CWD string', cwd)
return execa(command, args, { stdio: 'inherit', cwd, env })
// if everything is ok, resolve with nothing
.then(R.always(undefined))
.catch((result) => {
const msg = `${commandToExecute} failed with exit code: ${result.code}`
throw new Error(msg)
})
})
}
const yarn = createCLIExecutable('yarn')
const npx = createCLIExecutable('npx')
const runAllBuild = _.partial(npx, ['lerna', 'run', 'build-prod', '--ignore', 'cli'])
// removes transpiled JS files in the original package folders
const runAllCleanJs = _.partial(npx, ['lerna', 'run', 'clean-js', '--ignore', 'cli'])
// @returns string[] with names of packages, e.g. ['runner', 'driver', 'server']
const getPackagesWithScript = (scriptName) => {
return Promise.resolve(glob('./packages/*/package.json'))
.map((pkgPath) => {
return fs.readJsonAsync(pkgPath)
.then((json) => {
if (json.scripts != null ? json.scripts.build : undefined) {
return path.basename(path.dirname(pkgPath))
}
})
}).filter(Boolean)
}
const copyAllToDist = function (distDir) {
const copyRelativePathToDist = function (relative) {
const dest = path.join(distDir, relative)
return retry(() => {
console.log(relative, '->', dest)
return fs.copyAsync(relative, dest)
})
}
const copyPackage = function (pkg) {
console.log('** copy package: %s **', pkg)
// copies the package to dist
// including the default paths
// and any specified in package.json files
return Promise.resolve(fs.readJsonAsync(pathToPackageJson(pkg)))
.then((json) => {
// grab all the files that match "files" wildcards
// but without all negated files ("!src/**/*.spec.js" for example)
// and default included paths
// and convert to relative paths
return DEFAULT_PATHS
.concat(json.files || [])
.concat(json.main || [])
}).then((pkgFileMasks) => {
debug('for pkg %s have the following file masks %o', pkg, pkgFileMasks)
const globOptions = {
cwd: pkg, // search in the package folder
absolute: false, // and return relative file paths
followSymbolicLinks: false, // do not follow symlinks
}
return externalUtils.globby(pkgFileMasks, globOptions)
}).map((foundFileRelativeToPackageFolder) => {
return path.join(pkg, foundFileRelativeToPackageFolder)
})
.tap(debug)
.map(copyRelativePathToDist, { concurrency: 1 })
}
// fs-extra concurrency tests (copyPackage / copyRelativePathToDist)
// 1/1 41688
// 1/5 42218
// 1/10 42566
// 2/1 45041
// 2/2 43589
// 3/3 51399
// cp -R concurrency tests
// 1/1 65811
const started = new Date()
return fs.ensureDirAsync(distDir)
.then(() => {
return glob('./packages/*')
.map(copyPackage, { concurrency: 1 })
}).then(() => {
console.log('Finished Copying %dms', new Date() - started)
return console.log('')
})
}
const forceNpmInstall = function (packagePath, packageToInstall) {
console.log('Force installing %s', packageToInstall)
console.log('in %s', packagePath)
la(check.unemptyString(packageToInstall), 'missing package to install')
return yarn(['install', '--force', packageToInstall], packagePath)
}
const removeDevDependencies = function (packageFolder) {
const packagePath = pathToPackageJson(packageFolder)
console.log('removing devDependencies from %s', packagePath)
return fs.readJsonAsync(packagePath)
.then((json) => {
delete json.devDependencies
return fs.writeJsonAsync(packagePath, json, { spaces: 2 })
})
}
const retryGlobbing = function (pathToPackages, delay = 1000) {
const retryGlob = () => {
return glob(pathToPackages)
.catch({ code: 'EMFILE' }, () => {
// wait, then retry
return Promise
.delay(delay)
.then(retryGlob)
})
}
return retryGlob()
}
// installs all packages given a wildcard
// pathToPackages would be something like "C:\projects\cypress\dist\win32\packages\*"
const npmInstallAll = function (pathToPackages) {
console.log(`npmInstallAll packages in ${pathToPackages}`)
const started = new Date()
const retryNpmInstall = function (pkg) {
console.log('installing %s', pkg)
console.log('NODE_ENV is %s', process.env.NODE_ENV)
// force installing only PRODUCTION dependencies
// https://docs.npmjs.com/cli/install
const npmInstall = _.partial(yarn, ['install', '--production'])
return npmInstall(pkg, { NODE_ENV: 'production' })
.catch({ code: 'EMFILE' }, () => {
return Promise
.delay(1000)
.then(() => {
return retryNpmInstall(pkg)
})
}).catch((err) => {
console.log(err, err.code)
throw err
})
}
const printFolders = (folders) => {
return console.log('found %s', pluralize('folder', folders.length, true))
}
// only installs production dependencies
return retryGlobbing(pathToPackages)
.tap(printFolders)
.mapSeries((packageFolder) => {
return removeDevDependencies(packageFolder)
.then(() => {
return retryNpmInstall(packageFolder)
})
}).then(() => {
const end = new Date()
return console.log('Finished NPM Installing', prettyMs(end - started))
})
}
const removePackageJson = function (filename) {
if (filename.endsWith('/package.json')) {
return path.dirname(filename)
}
return filename
}
const ensureFoundSomething = function (files) {
if (files.length === 0) {
throw new Error('Could not find any files')
}
return files
}
const symlinkType = function () {
if (os.platform() === 'win32') {
return 'junction'
}
return 'dir'
}
const symlinkAll = function (pathToDistPackages, pathTo) {
console.log('symlink these packages', pathToDistPackages)
la(check.unemptyString(pathToDistPackages),
'missing paths to dist packages', pathToDistPackages)
const symlink = function (pkg) {
// console.log(pkg, dist)
// strip off the initial './'
// ./packages/foo -> node_modules/@packages/foo
pkg = removePackageJson(pkg)
const dest = pathTo('node_modules', '@packages', path.basename(pkg))
const relativeDest = path.relative(`${dest}/..`, pkg)
const type = symlinkType()
console.log(relativeDest, 'link ->', dest, 'type', type)
return fs.ensureSymlinkAsync(relativeDest, dest, symlinkType)
.catch((err) => {
if (!err.message.includes('EEXIST')) {
throw err
}
})
}
return glob(pathToDistPackages)
.then(ensureFoundSomething)
.map(symlink)
}
module.exports = {
runAllBuild,
copyAllToDist,
npmInstallAll,
symlinkAll,
runAllCleanJs,
forceNpmInstall,
getPackagesWithScript,
}
if (!module.parent) {
console.log('demo force install')
forceNpmInstall('packages/server', '@ffmpeg-installer/win32-x64')
}
-147
View File
@@ -1,147 +0,0 @@
_ = require("lodash")
path = require("path")
awspublish = require('gulp-awspublish')
human = require("human-interval")
la = require("lazy-ass")
check = require("check-more-types")
cp = require("child_process")
fse = require("fs-extra")
os = require("os")
Promise = require("bluebird")
{configFromEnvOrJsonFile, filenameToShellVariable} = require('@cypress/env-or-json-file')
konfig = require('../get-config')()
{ purgeCloudflareCache } = require('./purge-cloudflare-cache')
getUploadUrl = () ->
url = konfig("cdn_url")
la(check.url(url), "could not get CDN url", url)
console.log("upload url", url)
url
formHashFromEnvironment = () ->
env = process.env
if env.BUILDKITE
return "buildkite-#{env.BUILDKITE_BRANCH}-#{env.BUILDKITE_COMMIT}-#{env.BUILDKITE_BUILD_NUMBER}"
if env.CIRCLECI
return "circle-#{env.CIRCLE_BRANCH}-#{env.CIRCLE_SHA1}-#{env.CIRCLE_BUILD_NUM}"
if env.APPVEYOR
return "appveyor-#{env.APPVEYOR_REPO_BRANCH}-#{env.APPVEYOR_REPO_COMMIT}-#{env.APPVEYOR_BUILD_ID}"
throw new Error("Do not know how to form unique build hash on this CI")
getS3Credentials = () ->
key = path.join('scripts', 'support', 'aws-credentials.json')
config = configFromEnvOrJsonFile(key)
if !config
console.error('⛔️ Cannot find AWS credentials')
console.error('Using @cypress/env-or-json-file module')
console.error('and filename', key)
console.error('which is environment variable', filenameToShellVariable(key))
console.error('available environment variable keys')
console.error(Object.keys(process.env))
throw new Error('AWS config not found')
la(check.unemptyString(config.bucket), 'missing AWS config bucket')
la(check.unemptyString(config.folder), 'missing AWS config folder')
la(check.unemptyString(config.key), 'missing AWS key')
la(check.unemptyString(config.secret), 'missing AWS secret key')
config
getPublisher = (getAwsObj = getS3Credentials) ->
aws = getAwsObj()
# console.log("aws.bucket", aws.bucket)
awspublish.create {
httpOptions: {
timeout: human("10 minutes")
}
params: {
Bucket: aws.bucket
}
accessKeyId: aws.key
secretAccessKey: aws.secret
}
getDesktopUrl = (version, osName, zipName) ->
url = getUploadUrl()
[url, "desktop", version, osName, zipName].join("/")
# purges desktop application url from Cloudflare cache
purgeDesktopAppFromCache = ({version, platform, zipName}) ->
la(check.unemptyString(version), "missing desktop version", version)
la(check.unemptyString(platform), "missing platform", platform)
la(check.unemptyString(zipName), "missing zip filename")
la(check.extension("zip", zipName),
"zip filename should end with .zip", zipName)
osName = getUploadNameByOsAndArch(platform)
la(check.unemptyString(osName), "missing osName", osName)
url = getDesktopUrl(version, osName, zipName)
purgeCloudflareCache(url)
# purges links to desktop app for all platforms
# for a given version
purgeDesktopAppAllPlatforms = (version, zipName) ->
la(check.unemptyString(version), "missing desktop version", version)
la(check.unemptyString(zipName), "missing zipName", zipName)
platforms = ["darwin", "linux", "win32"]
console.log("purging all desktop links for version #{version} from Cloudflare")
Promise.mapSeries platforms, (platform) ->
purgeDesktopAppFromCache({version, platform, zipName})
# all architectures we are building test runner for
validPlatformArchs = ["darwin-x64", "linux-x64", "win32-ia32", "win32-x64"]
# simple check for platform-arch string
# example: isValidPlatformArch("darwin") // FALSE
isValidPlatformArch = check.oneOf(validPlatformArchs)
getValidPlatformArchs = () -> validPlatformArchs
getUploadNameByOsAndArch = (platform) ->
## just hard code for now...
arch = os.arch()
uploadNames = {
darwin: {
"x64": "darwin-x64"
},
linux: {
"x64": "linux-x64"
},
win32: {
"x64": "win32-x64",
"ia32": "win32-ia32"
}
}
name = _.get(uploadNames[platform], arch)
if not name
throw new Error("Cannot find upload name for OS: '#{platform}' with arch: '#{arch}'")
la(isValidPlatformArch(name), "formed invalid platform", name, "from", platform, arch)
name
saveUrl = (filename) -> (url) ->
la(check.unemptyString(filename), "missing filename", filename)
la(check.url(url), "invalid url to save", url)
s = JSON.stringify({url})
fse.writeFile(filename, s)
.then =>
console.log("saved url", url, "into file", filename)
module.exports = {
getS3Credentials,
getPublisher,
purgeDesktopAppFromCache,
purgeDesktopAppAllPlatforms,
getUploadNameByOsAndArch,
validPlatformArchs,
getValidPlatformArchs,
isValidPlatformArch,
saveUrl,
formHashFromEnvironment,
getUploadUrl
}
+180
View File
@@ -0,0 +1,180 @@
const _ = require('lodash')
const path = require('path')
const awspublish = require('gulp-awspublish')
const human = require('human-interval')
const la = require('lazy-ass')
const check = require('check-more-types')
const fse = require('fs-extra')
const os = require('os')
const Promise = require('bluebird')
const { configFromEnvOrJsonFile, filenameToShellVariable } = require('@cypress/env-or-json-file')
const konfig = require('../get-config')()
const { purgeCloudflareCache } = require('./purge-cloudflare-cache')
const getUploadUrl = function () {
const url = konfig('cdn_url')
la(check.url(url), 'could not get CDN url', url)
console.log('upload url', url)
return url
}
const formHashFromEnvironment = function () {
const {
env,
} = process
if (env.BUILDKITE) {
return `buildkite-${env.BUILDKITE_BRANCH}-${env.BUILDKITE_COMMIT}-${env.BUILDKITE_BUILD_NUMBER}`
}
if (env.CIRCLECI) {
return `circle-${env.CIRCLE_BRANCH}-${env.CIRCLE_SHA1}-${env.CIRCLE_BUILD_NUM}`
}
if (env.APPVEYOR) {
return `appveyor-${env.APPVEYOR_REPO_BRANCH}-${env.APPVEYOR_REPO_COMMIT}-${env.APPVEYOR_BUILD_ID}`
}
throw new Error('Do not know how to form unique build hash on this CI')
}
const getS3Credentials = function () {
const key = path.join('scripts', 'support', 'aws-credentials.json')
const config = configFromEnvOrJsonFile(key)
if (!config) {
console.error('⛔️ Cannot find AWS credentials')
console.error('Using @cypress/env-or-json-file module')
console.error('and filename', key)
console.error('which is environment variable', filenameToShellVariable(key))
console.error('available environment variable keys')
console.error(Object.keys(process.env))
throw new Error('AWS config not found')
}
la(check.unemptyString(config.bucket), 'missing AWS config bucket')
la(check.unemptyString(config.folder), 'missing AWS config folder')
la(check.unemptyString(config.key), 'missing AWS key')
la(check.unemptyString(config.secret), 'missing AWS secret key')
return config
}
const getPublisher = function (getAwsObj = getS3Credentials) {
const aws = getAwsObj()
// console.log("aws.bucket", aws.bucket)
return awspublish.create({
httpOptions: {
timeout: human('10 minutes'),
},
params: {
Bucket: aws.bucket,
},
accessKeyId: aws.key,
secretAccessKey: aws.secret,
})
}
const getDesktopUrl = function (version, osName, zipName) {
const url = getUploadUrl()
return [url, 'desktop', version, osName, zipName].join('/')
}
// purges desktop application url from Cloudflare cache
const purgeDesktopAppFromCache = function ({ version, platform, zipName }) {
la(check.unemptyString(version), 'missing desktop version', version)
la(check.unemptyString(platform), 'missing platform', platform)
la(check.unemptyString(zipName), 'missing zip filename')
la(check.extension('zip', zipName),
'zip filename should end with .zip', zipName)
const osName = getUploadNameByOsAndArch(platform)
la(check.unemptyString(osName), 'missing osName', osName)
const url = getDesktopUrl(version, osName, zipName)
return purgeCloudflareCache(url)
}
// purges links to desktop app for all platforms
// for a given version
const purgeDesktopAppAllPlatforms = function (version, zipName) {
la(check.unemptyString(version), 'missing desktop version', version)
la(check.unemptyString(zipName), 'missing zipName', zipName)
const platforms = ['darwin', 'linux', 'win32']
console.log(`purging all desktop links for version ${version} from Cloudflare`)
return Promise.mapSeries(platforms, (platform) => {
return purgeDesktopAppFromCache({ version, platform, zipName })
})
}
// all architectures we are building test runner for
const validPlatformArchs = ['darwin-x64', 'linux-x64', 'win32-ia32', 'win32-x64']
// simple check for platform-arch string
// example: isValidPlatformArch("darwin") // FALSE
const isValidPlatformArch = check.oneOf(validPlatformArchs)
const getValidPlatformArchs = () => {
return validPlatformArchs
}
const getUploadNameByOsAndArch = function (platform) {
// just hard code for now...
const arch = os.arch()
const uploadNames = {
darwin: {
'x64': 'darwin-x64',
},
linux: {
'x64': 'linux-x64',
},
win32: {
'x64': 'win32-x64',
'ia32': 'win32-ia32',
},
}
const name = _.get(uploadNames[platform], arch)
if (!name) {
throw new Error(`Cannot find upload name for OS: '${platform}' with arch: '${arch}'`)
}
la(isValidPlatformArch(name), 'formed invalid platform', name, 'from', platform, arch)
return name
}
const saveUrl = (filename) => {
return (function (url) {
la(check.unemptyString(filename), 'missing filename', filename)
la(check.url(url), 'invalid url to save', url)
const s = JSON.stringify({ url })
return fse.writeFile(filename, s)
.then(() => {
return console.log('saved url', url, 'into file', filename)
})
})
}
module.exports = {
getS3Credentials,
getPublisher,
purgeDesktopAppFromCache,
purgeDesktopAppAllPlatforms,
getUploadNameByOsAndArch,
validPlatformArchs,
getValidPlatformArchs,
isValidPlatformArch,
saveUrl,
formHashFromEnvironment,
getUploadUrl,
}
-176
View File
@@ -1,176 +0,0 @@
Promise = require("bluebird")
os = require("os")
execa = require("execa")
path = require("path")
la = require("lazy-ass")
fs = require("fs")
R = require("ramda")
filesize = require("filesize")
# prints disk usage numbers using "du" utility
# available on Linux and Mac
printFileSizes = (folder) ->
console.log("File sizes in #{folder}")
paths = path.join(folder, "*")
options = {
stdio: "inherit",
shell: true
}
execa("du -hs #{paths}", options)
# resolves with zipped filename
macZip = (src, dest) ->
printFileSizes(src)
.then () ->
if os.platform() != "darwin"
throw new Error("Can only zip on Mac platform")
# Ditto (Mac) options
# http://www.unix.com/man-page/OSX/1/ditto/
# -c create archive
# -k set archive format to PKZip
# --sequesterRsrc When creating a PKZip archive, preserve resource
# forks and HFS meta-data in the subdirectory __MACOSX
# --keepParent when zipping folder "foo", makes the folder
# the top level in the archive
# foo.zip
# foo/
# ...
zip = "ditto -c -k --sequesterRsrc --keepParent #{src} #{dest}"
options = {
stdio: "inherit",
shell: true
}
console.log(zip)
onZipFinished = () ->
console.log("✅ ditto finished")
onError = (err) ->
console.error("⛔️ could not zip #{src} into #{dest}")
console.error(err.message)
throw err
execa(zip, options)
.then onZipFinished
.then R.always(dest)
.catch onError
megaBytes = (bytes) ->
1024 * 1024 * bytes
checkZipSize = (zipPath) ->
stats = fs.statSync(zipPath)
zipSize = filesize(stats.size, {round: 0})
console.log("zip file size #{zipSize}")
MAX_ALLOWED_SIZE_MB = if os.platform() == "win32" then 245 else 170
MAX_ZIP_FILE_SIZE = megaBytes(MAX_ALLOWED_SIZE_MB)
if stats.size > MAX_ZIP_FILE_SIZE
throw new Error("Zip file is too large: #{zipSize} (#{stats.size} bytes) exceeds #{MAX_ZIP_FILE_SIZE} bytes")
linuxZipAction = (parentFolder, dest, relativeSource) ->
console.log("zipping #{parentFolder}")
cmd = "cd #{parentFolder} && zip -r9 #{dest} #{relativeSource}"
console.log("linux zip: #{cmd}")
onZipFinished = () ->
console.log("✅ zip finished")
onError = (err) ->
console.error("⛔️ could not zip #{relativeSource} in folder #{parentFolder}")
console.error("to produce #{dest}")
console.error(err.message)
throw err
execa(cmd, {shell: true})
.then onZipFinished
.then R.always(dest)
.then R.tap(checkZipSize)
.catch onError
# src is built folder with packed Cypress application
# like /root/app/build/linux-unpacked or build/win-unpacked
# and we want to always have /root/app/build/Cypress
renameFolder = (src) ->
parentFolder = path.dirname(src)
folderName = path.basename(src)
if folderName is "Cypress"
console.log('nothing to rename, folder "%s" ends with Cypress', src)
return Promise.resolve(src)
renamed = path.join(parentFolder, "Cypress")
console.log("renaming #{src} to #{renamed}")
fs.promises.rename(src, renamed)
.then R.always(renamed)
# resolves with zipped filename
linuxZip = (src, dest) ->
# in Linux switch to the folder containing source folder
la(path.isAbsolute(src), "source path should be absolute", src)
la(path.isAbsolute(dest), "destination path should be absolute", dest)
# on Linux, make sure the folder name is "Cypress" first
renameFolder(src)
.then (renamedSource) ->
printFileSizes(renamedSource)
.then R.always(renamedSource)
.then (renamedSource) ->
console.log("will zip folder #{renamedSource}")
parentFolder = path.dirname(renamedSource)
relativeSource = path.basename(renamedSource)
linuxZipAction(parentFolder, dest, relativeSource)
# resolves with zipped filename
windowsZipAction = (src, dest) ->
# use 7Zip to zip
# http://www.7-zip.org/
# zips entire source directory including top level folder name
# Cypress/
# foo.txt
# creates cypress.zip for example
# unzip cypress.zip to get back the folder
# Cypress/
# foo.txt
cmd = "7z a #{dest} #{src}"
console.log("windows zip: #{cmd}")
onZipFinished = () ->
console.log("✅ 7z finished")
onError = (err) ->
console.error("⛔️ could not zip #{src} into #{dest}")
console.error(err.message)
throw err
execa(cmd, {shell: true})
.then onZipFinished
.then R.always(dest)
.then R.tap(checkZipSize)
.catch onError
windowsZip = (src, dest) ->
renameFolder(src)
.then (renamedSource) ->
windowsZipAction(renamedSource, dest)
zippers = {
linux: linuxZip
darwin: macZip
win32: windowsZip
}
module.exports = {
# zip Cypress folder to create destination zip file
# uses tool depending on the platform
ditto: (src, dest) ->
platform = os.platform()
console.log("#zip", platform)
console.log("Zipping %s into %s", src, dest)
zipper = zippers[platform]
if !zipper
throw new Error("Missing zip function for platform #{platform}")
zipper(src, dest)
checkZipSize
}
+213
View File
@@ -0,0 +1,213 @@
const Promise = require('bluebird')
const os = require('os')
const execa = require('execa')
const path = require('path')
const la = require('lazy-ass')
const fs = require('fs')
const R = require('ramda')
const filesize = require('filesize')
// prints disk usage numbers using "du" utility
// available on Linux and Mac
const printFileSizes = function (folder) {
console.log(`File sizes in ${folder}`)
const paths = path.join(folder, '*')
const options = {
stdio: 'inherit',
shell: true,
}
return execa(`du -hs ${paths}`, options)
}
// resolves with zipped filename
const macZip = (src, dest) => {
return printFileSizes(src)
.then(() => {
if (os.platform() !== 'darwin') {
throw new Error('Can only zip on Mac platform')
}
// Ditto (Mac) options
// http://www.unix.com/man-page/OSX/1/ditto/
// -c create archive
// -k set archive format to PKZip
// --sequesterRsrc When creating a PKZip archive, preserve resource
// forks and HFS meta-data in the subdirectory __MACOSX
// --keepParent when zipping folder "foo", makes the folder
// the top level in the archive
// foo.zip
// foo/
// ...
const zip = `ditto -c -k --sequesterRsrc --keepParent ${src} ${dest}`
const options = {
stdio: 'inherit',
shell: true,
}
console.log(zip)
const onZipFinished = () => {
return console.log('✅ ditto finished')
}
const onError = function (err) {
console.error(`⛔️ could not zip ${src} into ${dest}`)
console.error(err.message)
throw err
}
return execa(zip, options)
.then(onZipFinished)
.then(R.always(dest))
.catch(onError)
})
}
const megaBytes = (bytes) => {
return 1024 * 1024 * bytes
}
const checkZipSize = function (zipPath) {
const stats = fs.statSync(zipPath)
const zipSize = filesize(stats.size, { round: 0 })
console.log(`zip file size ${zipSize}`)
const MAX_ALLOWED_SIZE_MB = os.platform() === 'win32' ? 245 : 170
const MAX_ZIP_FILE_SIZE = megaBytes(MAX_ALLOWED_SIZE_MB)
if (stats.size > MAX_ZIP_FILE_SIZE) {
throw new Error(`Zip file is too large: ${zipSize} (${stats.size} bytes) exceeds ${MAX_ZIP_FILE_SIZE} bytes`)
}
}
const linuxZipAction = function (parentFolder, dest, relativeSource) {
console.log(`zipping ${parentFolder}`)
const cmd = `cd ${parentFolder} && zip -r9 ${dest} ${relativeSource}`
console.log(`linux zip: ${cmd}`)
const onZipFinished = () => {
return console.log('✅ zip finished')
}
const onError = function (err) {
console.error(`⛔️ could not zip ${relativeSource} in folder ${parentFolder}`)
console.error(`to produce ${dest}`)
console.error(err.message)
throw err
}
return execa(cmd, { shell: true })
.then(onZipFinished)
.then(R.always(dest))
.then(R.tap(checkZipSize))
.catch(onError)
}
// src is built folder with packed Cypress application
// like /root/app/build/linux-unpacked or build/win-unpacked
// and we want to always have /root/app/build/Cypress
const renameFolder = function (src) {
const parentFolder = path.dirname(src)
const folderName = path.basename(src)
if (folderName === 'Cypress') {
console.log('nothing to rename, folder "%s" ends with Cypress', src)
return Promise.resolve(src)
}
const renamed = path.join(parentFolder, 'Cypress')
console.log(`renaming ${src} to ${renamed}`)
return fs.promises.rename(src, renamed)
.then(R.always(renamed))
}
// resolves with zipped filename
const linuxZip = function (src, dest) {
// in Linux switch to the folder containing source folder
la(path.isAbsolute(src), 'source path should be absolute', src)
la(path.isAbsolute(dest), 'destination path should be absolute', dest)
// on Linux, make sure the folder name is "Cypress" first
return renameFolder(src)
.then((renamedSource) => {
return printFileSizes(renamedSource)
.then(R.always(renamedSource))
}).then((renamedSource) => {
console.log(`will zip folder ${renamedSource}`)
const parentFolder = path.dirname(renamedSource)
const relativeSource = path.basename(renamedSource)
return linuxZipAction(parentFolder, dest, relativeSource)
})
}
// resolves with zipped filename
const windowsZipAction = function (src, dest) {
// use 7Zip to zip
// http://www.7-zip.org/
// zips entire source directory including top level folder name
// Cypress/
// foo.txt
// creates cypress.zip for example
// unzip cypress.zip to get back the folder
// Cypress/
// foo.txt
const cmd = `7z a ${dest} ${src}`
console.log(`windows zip: ${cmd}`)
const onZipFinished = () => {
return console.log('✅ 7z finished')
}
const onError = function (err) {
console.error(`⛔️ could not zip ${src} into ${dest}`)
console.error(err.message)
throw err
}
return execa(cmd, { shell: true })
.then(onZipFinished)
.then(R.always(dest))
.then(R.tap(checkZipSize))
.catch(onError)
}
const windowsZip = (src, dest) => {
return renameFolder(src)
.then((renamedSource) => {
return windowsZipAction(renamedSource, dest)
})
}
const zippers = {
linux: linuxZip,
darwin: macZip,
win32: windowsZip,
}
module.exports = {
// zip Cypress folder to create destination zip file
// uses tool depending on the platform
ditto (src, dest) {
const platform = os.platform()
console.log('#zip', platform)
console.log('Zipping %s into %s', src, dest)
const zipper = zippers[platform]
if (!zipper) {
throw new Error(`Missing zip function for platform ${platform}`)
}
return zipper(src, dest)
},
checkZipSize,
}