mirror of
https://github.com/cypress-io/cypress.git
synced 2026-01-05 22:19:46 -06:00
Copy test runner binaries before releasing new version (#4082)
* adding S3 sdk * test binary folder * linting * before searching for binary * linting * grab folders in the given S3 prefix * grab folders in the given S3 prefix * find the last build * found last builds for commit * refactoring * add tests for upload dir name * create destination zip filename * copying S3 files * move s3 helpers into own object, prepare for testing * add realistic test * linting * chore: add documentation to DEPLOY.md file
This commit is contained in:
36
DEPLOY.md
36
DEPLOY.md
@@ -95,12 +95,36 @@ npm i https://cdn.../npm/<new version>/hash/cypress.tgz
|
||||
also contains the original commit SHA from which it was built.
|
||||
- Build the Mac binary and upload (see above) to the CDN. Make sure to build it from the
|
||||
same commit as the binaries built by CI.
|
||||
- The upload from Mac binary will create new folder on CDN like
|
||||
`https://cdn.../desktop/1.0.5/darwin-x64`. We need to create parallel subfolders for
|
||||
Windows and Linux binaries. Go to the AWS console and create them. In this case you would create
|
||||
folders `desktop/1.0.5/linux-x64` and `desktop/1.0.5/win32-x64`.
|
||||
- Copy _the tested binaries_ from the unique `binary` folder into `desktop/1.0.5` subfolders for each
|
||||
platform.
|
||||
|
||||
**next steps are about to change**
|
||||
|
||||
*old way*
|
||||
|
||||
- The upload from Mac binary will create new folder on CDN like `https://cdn.../desktop/1.0.5/darwin-x64`. We need to create parallel subfolders for Windows and Linux binaries. Go to the AWS console and create them. In this case you would create folders `desktop/1.0.5/linux-x64` and `desktop/1.0.5/win32-x64`.
|
||||
- Copy _the tested binaries_ from the unique `binary` folder into `desktop/1.0.5` subfolders for each platform.
|
||||
|
||||
*new way*
|
||||
|
||||
- run script that [finds and copies built binaries](https://github.com/cypress-io/cypress/pull/4082) for each platform into the final desktop download folder
|
||||
|
||||
```shell
|
||||
$ npm run move-binaries -- --sha <commit sha> --version <new target version>
|
||||
```
|
||||
|
||||
This requires AWS access information in environment variable like. A variable looks like this
|
||||
|
||||
```text
|
||||
aws_credentials_json={"bucket": "cdn.cypress.io","folder": "desktop","key": "...","secret":"..."}
|
||||
```
|
||||
|
||||
If you need to see verbose logs, run with `DEBUG=cypress:binary` environment variable. If you use [as-a](https://github.com/bahmutov/as-a) utility it probably looks like this for a particular commit and version
|
||||
|
||||
```shell
|
||||
$ as-a move-binaries npm run move-binaries -- --sha 455046b928c861d4457b2ec5426a51de1fda74fd --version 3.3.0
|
||||
```
|
||||
|
||||
**continue**
|
||||
|
||||
- Publish the new NPM package under the dev tag. The unique link to the package file `cypress.tgz`
|
||||
is the one already tested above. You can publish to the NPM registry straight from the URL:
|
||||
|
||||
|
||||
23
__snapshots__/move-binaries-spec.js
Normal file
23
__snapshots__/move-binaries-spec.js
Normal file
@@ -0,0 +1,23 @@
|
||||
exports['collected builds and copied desktop'] = {
|
||||
"lastBuilds": [
|
||||
{
|
||||
"platformArch": "darwin-x64",
|
||||
"s3zipPath": "beta/binary/3.3.0/darwin-x64/circle-develop-455046b928c861d4457b2ec5426a51de1fda74fd-102457/cypress.zip"
|
||||
}
|
||||
],
|
||||
"testRunners": [
|
||||
{
|
||||
"platformArch": "darwin-x64",
|
||||
"s3zipPath": "desktop/3.3.0/darwin-x64/cypress.zip"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
exports['move-binaries parseBuildPath parses into SHA and build 1'] = {
|
||||
"path": "beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/",
|
||||
"parsed": {
|
||||
"commit": "47e98fa1d0b18867a74da91a719d0f1ae73fcbc7",
|
||||
"build": 101843,
|
||||
"s3path": "beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/"
|
||||
}
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
exports['getCDN binary returns CDN s3 path 1'] = {
|
||||
exports['getCDN for binary'] = {
|
||||
"input": {
|
||||
"platform": "darwin-x64",
|
||||
"filename": "cypress.zip",
|
||||
@@ -7,3 +7,20 @@ exports['getCDN binary returns CDN s3 path 1'] = {
|
||||
},
|
||||
"result": "https://cdn.cypress.io/beta/binary/3.3.0/darwin-x64/ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123/cypress.zip"
|
||||
}
|
||||
|
||||
exports['upload binary folder'] = {
|
||||
"input": {
|
||||
"platformArch": "darwin-x64",
|
||||
"version": "3.3.0",
|
||||
"hash": "ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123"
|
||||
},
|
||||
"result": "beta/binary/3.3.0/darwin-x64/ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123/"
|
||||
}
|
||||
|
||||
exports['upload binary folder for platform'] = {
|
||||
"input": {
|
||||
"platformArch": "darwin-x64",
|
||||
"version": "3.3.0"
|
||||
},
|
||||
"result": "beta/binary/3.3.0/darwin-x64"
|
||||
}
|
||||
|
||||
41
__snapshots__/util-upload-spec.js
Normal file
41
__snapshots__/util-upload-spec.js
Normal file
@@ -0,0 +1,41 @@
|
||||
exports['upload util isValidPlatformArch checks given strings second 1'] = {
|
||||
"name": "second",
|
||||
"behavior": [
|
||||
{
|
||||
"given": "darwin-x64",
|
||||
"expect": true
|
||||
},
|
||||
{
|
||||
"given": "linux-x64",
|
||||
"expect": true
|
||||
},
|
||||
{
|
||||
"given": "win32-ia32",
|
||||
"expect": true
|
||||
},
|
||||
{
|
||||
"given": "win32-x64",
|
||||
"expect": true
|
||||
},
|
||||
{
|
||||
"given": "darwin",
|
||||
"expect": false
|
||||
},
|
||||
{
|
||||
"given": "win32",
|
||||
"expect": false
|
||||
},
|
||||
{
|
||||
"given": "windows",
|
||||
"expect": false
|
||||
},
|
||||
{
|
||||
"given": "linux",
|
||||
"expect": false
|
||||
},
|
||||
{
|
||||
"given": "linux64",
|
||||
"expect": false
|
||||
}
|
||||
]
|
||||
}
|
||||
@@ -49,8 +49,9 @@
|
||||
"binary-deploy": "node ./scripts/binary.js deploy",
|
||||
"binary-purge": "node ./scripts/binary.js purge-version",
|
||||
"binary-deploy-linux": "./scripts/build-linux-binary.sh",
|
||||
"move-binaries": "node ./scripts/binary.js move-binaries",
|
||||
"binary-release": "node ./scripts/binary.js release",
|
||||
"test-scripts": "mocha -r packages/coffee/register --reporter spec 'scripts/unit/**/*spec.js'",
|
||||
"test-scripts": "mocha -r packages/coffee/register -r packages/ts/register --reporter spec 'scripts/unit/**/*spec.js'",
|
||||
"test-mocha": "mocha --reporter spec scripts/spec.js",
|
||||
"test-mocha-snapshot": "mocha scripts/mocha-snapshot-spec.js",
|
||||
"check-node-version": "node scripts/check-node-version.js",
|
||||
@@ -71,7 +72,7 @@
|
||||
"@cypress/set-commit-status": "1.3.4",
|
||||
"@types/bluebird": "3.5.21",
|
||||
"@types/chai": "3.5.2",
|
||||
"@types/debug": "0.0.31",
|
||||
"@types/debug": "4.1.4",
|
||||
"@types/execa": "0.7.2",
|
||||
"@types/fs-extra": "3.0.0",
|
||||
"@types/lodash": "4.14.122",
|
||||
@@ -81,7 +82,9 @@
|
||||
"@types/request-promise": "4.1.42",
|
||||
"@types/sinon-chai": "3.2.2",
|
||||
"ansi-styles": "3.2.1",
|
||||
"arg": "4.1.0",
|
||||
"ascii-table": "0.0.9",
|
||||
"aws-sdk": "2.445.0",
|
||||
"babel-eslint": "10.0.1",
|
||||
"bluebird": "3.5.3",
|
||||
"bluebird-retry": "0.11.0",
|
||||
@@ -118,6 +121,7 @@
|
||||
"human-interval": "0.1.6",
|
||||
"husky": "0.14.3",
|
||||
"inquirer": "3.3.0",
|
||||
"inquirer-confirm": "2.0.3",
|
||||
"js-codemod": "cpojer/js-codemod#29dafed",
|
||||
"jscodemods": "cypress-io/jscodemods#01b546e",
|
||||
"jscodeshift": "0.6.3",
|
||||
@@ -139,6 +143,7 @@
|
||||
"print-arch": "1.0.0",
|
||||
"ramda": "0.24.1",
|
||||
"shelljs": "0.8.3",
|
||||
"sinon": "7.3.2",
|
||||
"snap-shot-it": "6.3.5",
|
||||
"stop-only": "3.0.1",
|
||||
"strip-ansi": "4.0.0",
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
/* eslint-disable no-console */
|
||||
require('@packages/coffee/register')
|
||||
require('@packages/ts/register')
|
||||
|
||||
const command = process.argv[2]
|
||||
|
||||
|
||||
@@ -23,6 +23,7 @@ upload = require("./upload")
|
||||
uploadUtils = require("./util/upload")
|
||||
{uploadNpmPackage} = require("./upload-npm-package")
|
||||
{uploadUniqueBinary} = require("./upload-unique-binary")
|
||||
{moveBinaries} = require('./move-binaries')
|
||||
|
||||
## initialize on existing repo
|
||||
repo = Promise.promisifyAll(gift(cwd))
|
||||
@@ -162,7 +163,8 @@ deploy = {
|
||||
console.log('#uniqueBinaryUpload')
|
||||
uploadUniqueBinary(args)
|
||||
|
||||
# upload Cypress binary ZIP file
|
||||
# uploads a single built Cypress binary ZIP file
|
||||
# usually a binary is built on CI and is uploaded
|
||||
upload: (options) ->
|
||||
console.log('#upload')
|
||||
|
||||
@@ -186,6 +188,10 @@ deploy = {
|
||||
platform: options.platform,
|
||||
})
|
||||
|
||||
"move-binaries": (args = process.argv) ->
|
||||
console.log('#moveBinaries')
|
||||
moveBinaries(args)
|
||||
|
||||
# purge all platforms of a desktop app for specific version
|
||||
"purge-version": (args = process.argv) ->
|
||||
console.log('#purge-version')
|
||||
|
||||
290
scripts/binary/move-binaries.ts
Normal file
290
scripts/binary/move-binaries.ts
Normal file
@@ -0,0 +1,290 @@
|
||||
const debug = require("debug")("cypress:binary")
|
||||
import la from 'lazy-ass'
|
||||
import is from 'check-more-types'
|
||||
// using "arg" module for parsing CLI arguments
|
||||
// because it plays really nicely with TypeScript
|
||||
import arg from 'arg'
|
||||
import S3 from 'aws-sdk/clients/s3'
|
||||
import {prop, sortBy, last} from 'ramda'
|
||||
import pluralize from 'pluralize'
|
||||
|
||||
// inquirer-confirm is missing type definition
|
||||
// @ts-ignore
|
||||
import confirm from 'inquirer-confirm'
|
||||
|
||||
// ignore TS errors - we are importing from CoffeeScript files
|
||||
// @ts-ignore
|
||||
import uploadUtils from './util/upload'
|
||||
|
||||
// @ts-ignore
|
||||
import {getUploadDirForPlatform} from './upload-unique-binary'
|
||||
// @ts-ignore
|
||||
import {zipName, getFullUploadName} from './upload'
|
||||
|
||||
/**
|
||||
* 40 character full sha commit string
|
||||
*/
|
||||
type commit = string
|
||||
/**
|
||||
* semver string, like "3.3.0"
|
||||
*/
|
||||
type semver = string
|
||||
|
||||
/**
|
||||
* Platform plus architecture string like "darwin-x64"
|
||||
*/
|
||||
type platformArch = "darwin-x64" | "linux-x64"| "win32-ia32" | "win32-x64"
|
||||
|
||||
interface ReleaseInformation {
|
||||
commit: commit,
|
||||
version: semver
|
||||
}
|
||||
|
||||
interface CommitAndBuild {
|
||||
commit: commit,
|
||||
build: number,
|
||||
s3path: string
|
||||
}
|
||||
|
||||
interface Desktop {
|
||||
s3zipPath: string
|
||||
platformArch: platformArch
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a binary S3 path like
|
||||
* "beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-100/"
|
||||
* and returns object with SHA string and build number
|
||||
*/
|
||||
export const parseBuildPath = (s3path: string): CommitAndBuild | null => {
|
||||
const shaAndBuild = /([0-9a-f]{40})-(\d+)\/?$/i
|
||||
const found = s3path.match(shaAndBuild)
|
||||
if (!found) {
|
||||
return null
|
||||
}
|
||||
const [, commit, build] = found
|
||||
return {
|
||||
commit,
|
||||
build: parseInt(build),
|
||||
s3path
|
||||
}
|
||||
}
|
||||
|
||||
export const findBuildByCommit = (commit: commit, s3paths: string[]) => {
|
||||
const matching = s3paths.filter(s => s.includes(commit))
|
||||
if (!matching.length) {
|
||||
// could not find path with commit SHA
|
||||
return null
|
||||
}
|
||||
|
||||
if (matching.length === 1) {
|
||||
return matching[0]
|
||||
}
|
||||
|
||||
// each path includes commit SHA and build number, let's pick the last build
|
||||
const parsedBuilds = matching.map(parseBuildPath)
|
||||
const sortedBuilds = sortBy(prop('build'))(parsedBuilds)
|
||||
return prop('s3path', last(sortedBuilds))
|
||||
}
|
||||
|
||||
/**
|
||||
* An object of all confirm prompts to the user.
|
||||
* Useful to stubbing the confirmation prompts during testing.
|
||||
*/
|
||||
export const prompts = {
|
||||
async shouldCopy () {
|
||||
await confirm({
|
||||
question: 'Would you like to proceed? This will overwrite existing files',
|
||||
default: false,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Utility object with methods that deal with S3.
|
||||
* Useful for testing our code that calls S3 methods.
|
||||
*/
|
||||
export const s3helpers = {
|
||||
makeS3 (aws) {
|
||||
la(is.unemptyString(aws.key), 'missing aws key')
|
||||
la(is.unemptyString(aws.secret), 'missing aws secret')
|
||||
|
||||
return new S3({
|
||||
accessKeyId: aws.key,
|
||||
secretAccessKey: aws.secret
|
||||
})
|
||||
},
|
||||
|
||||
verifyZipFileExists (zipFile: string, bucket: string, s3: S3): Promise<null> {
|
||||
debug('checking S3 file %s', zipFile)
|
||||
debug('bucket %s', bucket)
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
s3.headObject({
|
||||
Bucket: bucket,
|
||||
Key: zipFile
|
||||
}, (err, data) => {
|
||||
if (err) {
|
||||
debug('error getting object %s', zipFile)
|
||||
debug(err)
|
||||
|
||||
return reject(err)
|
||||
}
|
||||
debug('s3 data for %s', zipFile)
|
||||
debug(data)
|
||||
resolve()
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
/**
|
||||
* Returns list of prefixes in a given folder
|
||||
*/
|
||||
listS3Objects (uploadDir: string, bucket: string, s3: S3): Promise<string[]> {
|
||||
la(is.unemptyString(uploadDir), 'invalid upload dir', uploadDir)
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const prefix = uploadDir + '/'
|
||||
s3.listObjectsV2({
|
||||
Bucket: bucket,
|
||||
Prefix: prefix,
|
||||
Delimiter: '/'
|
||||
}, (err, result) => {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
|
||||
debug('AWS result in %s %s', bucket, prefix)
|
||||
debug('%o', result)
|
||||
|
||||
resolve(result.CommonPrefixes.map(prop('Prefix')))
|
||||
})
|
||||
})
|
||||
},
|
||||
|
||||
async copyS3 (sourceKey: string, destinationKey: string, bucket: string, s3: S3) {
|
||||
return new Promise((resole, reject) => {
|
||||
debug('copying %s in bucket %s to %s', sourceKey, bucket, destinationKey)
|
||||
|
||||
s3.copyObject({
|
||||
Bucket: bucket,
|
||||
CopySource: bucket + '/' + sourceKey,
|
||||
Key: destinationKey
|
||||
}, (err, data) => {
|
||||
if (err) {
|
||||
return reject(err)
|
||||
}
|
||||
|
||||
debug('result of copying')
|
||||
debug('%o', data)
|
||||
})
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Moves binaries built for different platforms into a single
|
||||
* folder on S3 before officially releasing as a new version.
|
||||
*/
|
||||
export const moveBinaries = async (args = []) => {
|
||||
debug('moveBinaries with args %o', args)
|
||||
const options = arg({
|
||||
'--commit': String,
|
||||
'--version': String,
|
||||
// aliases
|
||||
'--sha': '--commit',
|
||||
'-v': '--version'
|
||||
}, {
|
||||
argv: args.slice(2)
|
||||
})
|
||||
debug('moveBinaries with options %o', options)
|
||||
|
||||
// @ts-ignore
|
||||
la(is.commitId(options['--commit']), 'missing commit SHA', options)
|
||||
// @ts-ignore
|
||||
la(is.semver(options['--version']), 'missing version to collect', options)
|
||||
|
||||
const releaseOptions: ReleaseInformation = {
|
||||
commit: options['--commit'],
|
||||
version: options['--version']
|
||||
}
|
||||
|
||||
const aws = uploadUtils.getS3Credentials()
|
||||
const s3 = s3helpers.makeS3(aws)
|
||||
|
||||
// found s3 paths with last build for same commit for all platforms
|
||||
const lastBuilds: Desktop[] = []
|
||||
|
||||
const platforms: platformArch[] = uploadUtils.getValidPlatformArchs()
|
||||
|
||||
for (const platformArch of platforms) {
|
||||
la(uploadUtils.isValidPlatformArch(platformArch),
|
||||
'invalid platform arch', platformArch)
|
||||
|
||||
const uploadDir = getUploadDirForPlatform({
|
||||
version: releaseOptions.version
|
||||
}, platformArch)
|
||||
console.log('finding binary for %s in %s', platformArch, uploadDir)
|
||||
|
||||
const list: string[] = await s3helpers.listS3Objects(uploadDir, aws.bucket, s3)
|
||||
|
||||
if (debug.enabled) {
|
||||
console.log('all found subfolders')
|
||||
console.log(list.join('\n'))
|
||||
}
|
||||
|
||||
const lastBuildPath = findBuildByCommit(releaseOptions.commit, list)
|
||||
if (!lastBuildPath) {
|
||||
throw new Error(`Cannot find build with commit ${releaseOptions.commit} for platform ${platformArch}`)
|
||||
}
|
||||
console.log('found %s for commit %s on platform %s',
|
||||
lastBuildPath,
|
||||
releaseOptions.commit, platformArch)
|
||||
|
||||
const s3zipPath = lastBuildPath + zipName
|
||||
|
||||
await s3helpers.verifyZipFileExists(s3zipPath, aws.bucket, s3)
|
||||
|
||||
lastBuilds.push({
|
||||
platformArch,
|
||||
s3zipPath
|
||||
})
|
||||
}
|
||||
|
||||
console.log('Copying %s for commit %s',
|
||||
pluralize('last build', lastBuilds.length, true), releaseOptions.commit)
|
||||
console.log(lastBuilds.map(prop('s3zipPath')).join('\n'))
|
||||
|
||||
try {
|
||||
await prompts.shouldCopy()
|
||||
} catch (e) {
|
||||
console.log('Copying has been cancelled')
|
||||
return
|
||||
}
|
||||
|
||||
console.log('Copying ...')
|
||||
|
||||
// final test runners that we have copied
|
||||
const testRunners: Desktop[] = []
|
||||
|
||||
for (const lastBuild of lastBuilds) {
|
||||
const options = {
|
||||
folder: aws.folder,
|
||||
version: releaseOptions.version,
|
||||
platformArch: lastBuild.platformArch,
|
||||
name: zipName
|
||||
}
|
||||
const destinationPath = getFullUploadName(options)
|
||||
console.log('copying test runner %s to %s', lastBuild.platformArch, destinationPath)
|
||||
|
||||
await s3helpers.copyS3(lastBuild.s3zipPath, destinationPath, aws.bucket, s3)
|
||||
|
||||
testRunners.push({
|
||||
platformArch: lastBuild.platformArch,
|
||||
s3zipPath: destinationPath
|
||||
})
|
||||
}
|
||||
|
||||
// return all available information
|
||||
return {lastBuilds, testRunners}
|
||||
}
|
||||
@@ -32,14 +32,35 @@ getCDN = ({version, hash, filename, platform}) ->
|
||||
la(check.unemptyString(filename), 'missing filename', filename)
|
||||
la(isBinaryFile(filename), 'wrong extension for file', filename)
|
||||
la(check.unemptyString(platform), 'missing platform', platform)
|
||||
[konfig("cdn_url"), rootFolder, folder, version, platform, hash, filename].join("/")
|
||||
|
||||
cdnUrl = konfig("cdn_url")
|
||||
[cdnUrl, rootFolder, folder, version, platform, hash, filename].join("/")
|
||||
|
||||
# returns folder that contains beta (unreleased) binaries for given version
|
||||
#
|
||||
getUploadVersionDirName = (options) ->
|
||||
la(check.unemptyString(options.version), 'missing version', options)
|
||||
|
||||
dir = [rootFolder, folder, options.version].join("/")
|
||||
dir
|
||||
|
||||
getUploadDirForPlatform = (options, platformArch) ->
|
||||
la(uploadUtils.isValidPlatformArch(platformArch),
|
||||
'missing or invalid platformArch', platformArch)
|
||||
|
||||
versionDir = getUploadVersionDirName(options)
|
||||
la(check.unemptyString(versionDir), 'could not form folder from', options)
|
||||
|
||||
dir = [versionDir, platformArch].join("/")
|
||||
dir
|
||||
|
||||
getUploadDirName = (options) ->
|
||||
la(check.unemptyString(options.version), 'missing version', options)
|
||||
la(check.unemptyString(options.hash), 'missing hash', options)
|
||||
la(check.unemptyString(options.platformArch), 'missing platformArch', options)
|
||||
|
||||
dir = [rootFolder, folder, options.version, options.platformArch, options.hash, null].join("/")
|
||||
uploadFolder = getUploadDirForPlatform(options, options.platformArch)
|
||||
la(check.unemptyString(uploadFolder), 'could not form folder from', options)
|
||||
|
||||
dir = [uploadFolder, options.hash, null].join("/")
|
||||
dir
|
||||
|
||||
uploadFile = (options) ->
|
||||
@@ -106,6 +127,8 @@ uploadUniqueBinary = (args = []) ->
|
||||
.then uploadUtils.saveUrl("binary-url.json")
|
||||
|
||||
module.exports = {
|
||||
getUploadDirName,
|
||||
getUploadDirForPlatform,
|
||||
uploadUniqueBinary,
|
||||
getCDN
|
||||
}
|
||||
|
||||
@@ -29,12 +29,32 @@ module.exports = {
|
||||
getAwsObj: ->
|
||||
uploadUtils.getS3Credentials()
|
||||
|
||||
# returns desktop folder for a given folder without platform
|
||||
# something like desktop/0.20.1
|
||||
getUploadeVersionFolder: (aws, version) ->
|
||||
la(check.unemptyString(aws.folder), 'aws object is missing desktop folder', aws.folder)
|
||||
dirName = [aws.folder, version].join("/")
|
||||
dirName
|
||||
|
||||
getFullUploadName: ({folder, version, platformArch, name}) ->
|
||||
la(check.unemptyString(folder), 'missing folder', folder)
|
||||
la(check.semver(version), 'missing or invalid version', version)
|
||||
la(check.unemptyString(name), 'missing file name', name)
|
||||
la(uploadUtils.isValidPlatformArch(platformArch),
|
||||
'invalid platform and arch', platformArch)
|
||||
|
||||
fileName = [folder, version, platformArch, name].join("/")
|
||||
fileName
|
||||
|
||||
# store uploaded application in subfolders by platform and version
|
||||
# something like desktop/0.20.1/darwin-x64/
|
||||
getUploadDirName: ({version, platform}) ->
|
||||
aws = @getAwsObj()
|
||||
platformArch = uploadUtils.getUploadNameByOsAndArch(platform)
|
||||
dirName = [aws.folder, version, platformArch, null].join("/")
|
||||
|
||||
versionFolder = @getUploadeVersionFolder(aws, version)
|
||||
dirName = [versionFolder, platformArch, null].join("/")
|
||||
|
||||
console.log("target directory %s", dirName)
|
||||
dirName
|
||||
|
||||
|
||||
@@ -37,6 +37,9 @@ getS3Credentials = () ->
|
||||
|
||||
la(check.unemptyString(config.bucket), 'missing AWS config bucket')
|
||||
la(check.unemptyString(config.folder), 'missing AWS config folder')
|
||||
la(check.unemptyString(config.key), 'missing AWS key')
|
||||
la(check.unemptyString(config.secret), 'missing AWS secret key')
|
||||
|
||||
config
|
||||
|
||||
getPublisher = (getAwsObj = getS3Credentials) ->
|
||||
@@ -117,6 +120,14 @@ purgeDesktopAppAllPlatforms = (version, zipName) ->
|
||||
Promise.mapSeries platforms, (platform) ->
|
||||
purgeDesktopAppFromCache({version, platform, zipName})
|
||||
|
||||
# all architectures we are building test runner for
|
||||
validPlatformArchs = ["darwin-x64", "linux-x64", "win32-ia32", "win32-x64"]
|
||||
# simple check for platform-arch string
|
||||
# example: isValidPlatformArch("darwin") // FALSE
|
||||
isValidPlatformArch = check.oneOf(validPlatformArchs)
|
||||
|
||||
getValidPlatformArchs = () -> validPlatformArchs
|
||||
|
||||
getUploadNameByOsAndArch = (platform) ->
|
||||
## just hard code for now...
|
||||
arch = os.arch()
|
||||
@@ -136,6 +147,8 @@ getUploadNameByOsAndArch = (platform) ->
|
||||
name = _.get(uploadNames[platform], arch)
|
||||
if not name
|
||||
throw new Error("Cannot find upload name for OS: '#{platform}' with arch: '#{arch}'")
|
||||
la(isValidPlatformArch(name), "formed invalid platform", name, "from", platform, arch)
|
||||
|
||||
name
|
||||
|
||||
saveUrl = (filename) -> (url) ->
|
||||
@@ -153,6 +166,9 @@ module.exports = {
|
||||
purgeDesktopAppFromCache,
|
||||
purgeDesktopAppAllPlatforms,
|
||||
getUploadNameByOsAndArch,
|
||||
validPlatformArchs,
|
||||
getValidPlatformArchs,
|
||||
isValidPlatformArch,
|
||||
saveUrl,
|
||||
formHashFromEnvironment
|
||||
}
|
||||
|
||||
11
scripts/spec-helper.js
Normal file
11
scripts/spec-helper.js
Normal file
@@ -0,0 +1,11 @@
|
||||
/* eslint-env mocha */
|
||||
const sinon = require('sinon')
|
||||
const Promise = require('bluebird')
|
||||
|
||||
global.sinon = sinon
|
||||
|
||||
sinon.usingPromise(Promise)
|
||||
|
||||
afterEach(function () {
|
||||
sinon.restore()
|
||||
})
|
||||
158
scripts/unit/binary/move-binaries-spec.js
Normal file
158
scripts/unit/binary/move-binaries-spec.js
Normal file
@@ -0,0 +1,158 @@
|
||||
const snapshot = require('snap-shot-it')
|
||||
const la = require('lazy-ass')
|
||||
const is = require('check-more-types')
|
||||
const uploadUtils = require('../../binary/util/upload')
|
||||
|
||||
/* eslint-env mocha */
|
||||
/* global sinon */
|
||||
describe('move-binaries', () => {
|
||||
const moveBinaries = require('../../binary/move-binaries')
|
||||
|
||||
context('parseBuildPath', () => {
|
||||
const parseBuildPath = moveBinaries.parseBuildPath
|
||||
|
||||
it('parses into SHA and build', () => {
|
||||
const path =
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/'
|
||||
const parsed = parseBuildPath(path)
|
||||
|
||||
la(is.commitId(parsed.commit), 'missing commit', parsed)
|
||||
la(is.positive(parsed.build), 'missing build', parsed)
|
||||
|
||||
snapshot({
|
||||
path,
|
||||
parsed,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
context('findBuildByCommit', () => {
|
||||
const findBuildByCommit = moveBinaries.findBuildByCommit
|
||||
const sha = '47e98fa1d0b18867a74da91a719d0f1ae73fcbc7'
|
||||
|
||||
it('is a function', () => {
|
||||
la(is.fn(findBuildByCommit))
|
||||
})
|
||||
|
||||
it('finds single matching path', () => {
|
||||
const paths = [
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/',
|
||||
]
|
||||
const found = findBuildByCommit(sha, paths)
|
||||
|
||||
la(found === paths[0], 'expected to find the only path', found)
|
||||
})
|
||||
|
||||
it('finds single matching path among several', () => {
|
||||
const paths = [
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/',
|
||||
// these are not matching
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-ffff8fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/',
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-aaaa8fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/',
|
||||
]
|
||||
const found = findBuildByCommit(sha, paths)
|
||||
|
||||
la(found === paths[0], 'expected to find the only path', found)
|
||||
})
|
||||
|
||||
it('finds last matching build', () => {
|
||||
const paths = [
|
||||
// matching, but not the last one
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-50/',
|
||||
// this is both matching and is the latest build (100)
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-100/',
|
||||
// these are not matching
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-ffff8fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/',
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-aaaa8fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/',
|
||||
// this one is matching, but not the latest one
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-2/',
|
||||
]
|
||||
const found = findBuildByCommit(sha, paths)
|
||||
|
||||
la(found === paths[1], 'expected to find the only path', found)
|
||||
})
|
||||
})
|
||||
|
||||
context('moveBinaries', () => {
|
||||
const move = moveBinaries.moveBinaries
|
||||
|
||||
it('is a function', () => {
|
||||
la(is.fn(move))
|
||||
})
|
||||
|
||||
it('finds and copies latest build for each platform', () => {
|
||||
// realistic end-to-end test
|
||||
// stubs S3 method calls
|
||||
// and lets our "moveBinaries" function collect builds
|
||||
// find latest build for each platform (for the same commit)
|
||||
// then call S3 to copy the desktop zip file to the final destination folder
|
||||
|
||||
const sha = '455046b928c861d4457b2ec5426a51de1fda74fd'
|
||||
const version = '3.3.0'
|
||||
|
||||
// limit ourselves to single platform
|
||||
sinon.stub(uploadUtils, 'getValidPlatformArchs').returns(['darwin-x64'])
|
||||
|
||||
// Mac builds for several commits in the beta folder
|
||||
// below is the latest build matching the commit
|
||||
const latestMacBuild =
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-455046b928c861d4457b2ec5426a51de1fda74fd-102457/'
|
||||
const darwinBuilds = [
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-167934f0e45a07f03f6b1c5ddd6d8f201b5bb708-102287/',
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-455046b928c861d4457b2ec5426a51de1fda74fd-102212/',
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-47e98fa1d0b18867a74da91a719d0f1ae73fcbc7-101843/',
|
||||
latestMacBuild,
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-5015cbbe876687deca571c221dfbc90715ad6d00-101982/',
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-9372bc3f67a6a83bd5ec8a69d7350f5a9b52ddf9-102246/',
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-455046b928c861d4457b2ec5426a51de1fda74fd-102359/',
|
||||
'beta/binary/3.3.0/darwin-x64/circle-develop-ec36bf013224942f6198bf831d62af64b9b16cf5-102729/',
|
||||
'beta/binary/3.3.0/darwin-x64/circle-issue-3996-6d539513e709ddd5aad866f6bf653280db6622cd-98450/',
|
||||
]
|
||||
|
||||
// fake AWS config
|
||||
const aws = {
|
||||
bucket: 'cdn.cypress.io',
|
||||
folder: 'desktop', // destination for test runner downloads
|
||||
}
|
||||
|
||||
sinon.stub(uploadUtils, 'getS3Credentials').returns(aws)
|
||||
|
||||
// fake S3 api
|
||||
const s3 = {}
|
||||
|
||||
sinon.stub(moveBinaries.s3helpers, 'makeS3').returns(s3)
|
||||
sinon
|
||||
.stub(moveBinaries.s3helpers, 'listS3Objects')
|
||||
.withArgs('beta/binary/3.3.0/darwin-x64', aws.bucket)
|
||||
.resolves(darwinBuilds)
|
||||
|
||||
sinon
|
||||
.stub(moveBinaries.s3helpers, 'verifyZipFileExists')
|
||||
.withArgs(`${latestMacBuild}cypress.zip`, aws.bucket)
|
||||
.resolves()
|
||||
|
||||
// our method will ask user to confirm copying
|
||||
sinon.stub(moveBinaries.prompts, 'shouldCopy').resolves()
|
||||
|
||||
sinon
|
||||
.stub(moveBinaries.s3helpers, 'copyS3')
|
||||
.withArgs(
|
||||
`${latestMacBuild}cypress.zip`,
|
||||
'desktop/3.3.0/darwin-x64/cypress.zip',
|
||||
aws.bucket
|
||||
)
|
||||
.resolves()
|
||||
|
||||
// first two arguments are sliced anyway
|
||||
const nodeName = null
|
||||
const scriptName = null
|
||||
const args = [nodeName, scriptName, '--sha', sha, '--version', version]
|
||||
|
||||
return move(args).then((result) => {
|
||||
la(is.object(result), 'expected a result', result)
|
||||
|
||||
snapshot('collected builds and copied desktop', result)
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -1,6 +1,11 @@
|
||||
require('../../spec-helper')
|
||||
|
||||
const snapshot = require('snap-shot-it')
|
||||
const la = require('lazy-ass')
|
||||
const os = require('os')
|
||||
|
||||
/* eslint-env mocha */
|
||||
/* global sinon */
|
||||
describe('upload', () => {
|
||||
const upload = require('../../binary/upload')
|
||||
|
||||
@@ -13,4 +18,37 @@ describe('upload', () => {
|
||||
snapshot('test runner manifest', manifest)
|
||||
})
|
||||
})
|
||||
|
||||
context('getUploadeVersionFolder', () => {
|
||||
it('returns folder', () => {
|
||||
const aws = {
|
||||
folder: 'desktop',
|
||||
}
|
||||
const folder = upload.getUploadeVersionFolder(aws, '3.3.0')
|
||||
|
||||
la(folder === 'desktop/3.3.0', 'wrong desktop folder', folder)
|
||||
})
|
||||
})
|
||||
|
||||
context('getUploadDirName', () => {
|
||||
it('returns folder with platform', () => {
|
||||
const aws = {
|
||||
folder: 'desktop',
|
||||
}
|
||||
|
||||
sinon.stub(upload, 'getAwsObj').returns(aws)
|
||||
sinon.stub(os, 'arch').returns('x64')
|
||||
|
||||
const folder = upload.getUploadDirName({
|
||||
platform: 'darwin',
|
||||
version: '3.3.0',
|
||||
})
|
||||
|
||||
la(
|
||||
folder === 'desktop/3.3.0/darwin-x64/',
|
||||
'wrong upload desktop folder',
|
||||
folder
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,22 +1,61 @@
|
||||
const snapshot = require('snap-shot-it')
|
||||
|
||||
/* eslint-env mocha */
|
||||
describe('getCDN', () => {
|
||||
context('binary', () => {
|
||||
const { getCDN } = require('../../binary/upload-unique-binary')
|
||||
describe('upload-unique-binary', () => {
|
||||
describe('getUploadDirName', () => {
|
||||
const { getUploadDirName } = require('../../binary/upload-unique-binary')
|
||||
|
||||
it('returns CDN s3 path', () => {
|
||||
it('returns folder for given version', () => {
|
||||
const options = {
|
||||
platform: 'darwin-x64',
|
||||
filename: 'cypress.zip',
|
||||
platformArch: 'darwin-x64',
|
||||
version: '3.3.0',
|
||||
// ci name + commit sha + build number
|
||||
hash: 'ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123',
|
||||
}
|
||||
|
||||
snapshot({
|
||||
snapshot('upload binary folder', {
|
||||
input: options,
|
||||
result: getCDN(options),
|
||||
result: getUploadDirName(options),
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getUploadDirForPlatform', () => {
|
||||
const {
|
||||
getUploadDirForPlatform,
|
||||
} = require('../../binary/upload-unique-binary')
|
||||
|
||||
it('returns folder for given version and platform', () => {
|
||||
const options = {
|
||||
platformArch: 'darwin-x64',
|
||||
version: '3.3.0',
|
||||
}
|
||||
const result = getUploadDirForPlatform(options, options.platformArch)
|
||||
|
||||
snapshot('upload binary folder for platform', {
|
||||
input: options,
|
||||
result,
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe('getCDN', () => {
|
||||
context('binary', () => {
|
||||
const { getCDN } = require('../../binary/upload-unique-binary')
|
||||
|
||||
it('returns CDN s3 path', () => {
|
||||
const options = {
|
||||
platform: 'darwin-x64',
|
||||
filename: 'cypress.zip',
|
||||
version: '3.3.0',
|
||||
// ci name + commit sha + build number
|
||||
hash: 'ci-name-e154a40f3f76abd39a1d85c0ebc0ff9565015706-123',
|
||||
}
|
||||
|
||||
snapshot('getCDN for binary', {
|
||||
input: options,
|
||||
result: getCDN(options),
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
21
scripts/unit/binary/util/util-upload-spec.js
Normal file
21
scripts/unit/binary/util/util-upload-spec.js
Normal file
@@ -0,0 +1,21 @@
|
||||
const snapshot = require('snap-shot-it')
|
||||
|
||||
// I named this file util-upload-spec
|
||||
// to avoid snapshots being saved into same file
|
||||
// since "snap-shot-it" v8.x saves all snapshots into single folder
|
||||
|
||||
/* eslint-env mocha */
|
||||
describe('upload util', () => {
|
||||
const upload = require('../../../binary/util/upload')
|
||||
|
||||
context('isValidPlatformArch', () => {
|
||||
const { isValidPlatformArch } = upload
|
||||
|
||||
it('checks given strings', () => {
|
||||
const valid = upload.validPlatformArchs
|
||||
const invalid = ['darwin', 'win32', 'windows', 'linux', 'linux64']
|
||||
|
||||
snapshot(isValidPlatformArch, ...valid, ...invalid)
|
||||
})
|
||||
})
|
||||
})
|
||||
Reference in New Issue
Block a user