diff --git a/circle.yml b/circle.yml index b131e1c7b7..7c412c7cd2 100644 --- a/circle.yml +++ b/circle.yml @@ -216,6 +216,8 @@ jobs: at: ~/ # make sure mocha runs - run: npm run test-mocha + # test binary build code + - run: npm run test-scripts # make sure our snapshots are compared correctly - run: npm run test-mocha-snapshot # make sure packages with TypeScript can be transpiled to JS @@ -276,9 +278,9 @@ jobs: steps: - attach_workspace: at: ~/ - - run: + - run: environment: - DEBUG: test:proxy-performance + DEBUG: test:proxy-performance command: npm run all test-performance -- --package server - store_test_results: path: /tmp/cypress @@ -790,6 +792,7 @@ linux-workflow: &linux-workflow branches: only: - develop + - binary-metadata requires: - build - build-binary: @@ -797,6 +800,7 @@ linux-workflow: &linux-workflow branches: only: - develop + - binary-metadata requires: - build - test-binary-and-npm-against-other-projects: @@ -804,6 +808,7 @@ linux-workflow: &linux-workflow branches: only: - develop + - binary-metadata requires: - build-npm-package - build-binary @@ -863,6 +868,7 @@ mac-workflow: &mac-workflow branches: only: - develop + - binary-metadata requires: - Mac build diff --git a/package.json b/package.json index 2bbe5ddc49..8f166eb91f 100644 --- a/package.json +++ b/package.json @@ -52,6 +52,7 @@ "move-binaries": "node ./scripts/binary.js move-binaries", "binary-release": "node ./scripts/binary.js release", "test-scripts": "mocha -r packages/coffee/register -r packages/ts/register --reporter spec 'scripts/unit/**/*spec.js'", + "test-s3-api": "node -r ./packages/coffee/register -r ./packages/ts/register scripts/binary/s3-api-demo.ts", "test-mocha": "mocha --reporter spec scripts/spec.js", "test-mocha-snapshot": "mocha scripts/mocha-snapshot-spec.js", "check-node-version": "node scripts/check-node-version.js", @@ -118,6 +119,7 @@ "gulp-debug": "3.2.0", "gulp-rename": "1.4.0", "gulp-typescript": "3.2.4", + "hasha": "5.0.0", "human-interval": "0.1.6", "husky": "0.14.3", "inquirer": "3.3.0", diff --git a/scripts/binary/move-binaries.ts b/scripts/binary/move-binaries.ts index 604e42a986..06790d598e 100644 --- a/scripts/binary/move-binaries.ts +++ b/scripts/binary/move-binaries.ts @@ -1,11 +1,11 @@ +import { s3helpers } from './s3-api' const debug = require("debug")("cypress:binary") import la from 'lazy-ass' import is from 'check-more-types' // using "arg" module for parsing CLI arguments // because it plays really nicely with TypeScript import arg from 'arg' -import S3 from 'aws-sdk/clients/s3' -import {prop, sortBy, last} from 'ramda' +import {prop, sortBy, last, equals} from 'ramda' import pluralize from 'pluralize' // inquirer-confirm is missing type definition @@ -100,88 +100,6 @@ export const prompts = { } } -/** - * Utility object with methods that deal with S3. - * Useful for testing our code that calls S3 methods. - */ -export const s3helpers = { - makeS3 (aws) { - la(is.unemptyString(aws.key), 'missing aws key') - la(is.unemptyString(aws.secret), 'missing aws secret') - - return new S3({ - accessKeyId: aws.key, - secretAccessKey: aws.secret - }) - }, - - verifyZipFileExists (zipFile: string, bucket: string, s3: S3): Promise { - debug('checking S3 file %s', zipFile) - debug('bucket %s', bucket) - - return new Promise((resolve, reject) => { - s3.headObject({ - Bucket: bucket, - Key: zipFile - }, (err, data) => { - if (err) { - debug('error getting object %s', zipFile) - debug(err) - - return reject(err) - } - debug('s3 data for %s', zipFile) - debug(data) - resolve() - }) - }) - }, - - /** - * Returns list of prefixes in a given folder - */ - listS3Objects (uploadDir: string, bucket: string, s3: S3): Promise { - la(is.unemptyString(uploadDir), 'invalid upload dir', uploadDir) - - return new Promise((resolve, reject) => { - const prefix = uploadDir + '/' - s3.listObjectsV2({ - Bucket: bucket, - Prefix: prefix, - Delimiter: '/' - }, (err, result) => { - if (err) { - return reject(err) - } - - debug('AWS result in %s %s', bucket, prefix) - debug('%o', result) - - resolve(result.CommonPrefixes.map(prop('Prefix'))) - }) - }) - }, - - async copyS3 (sourceKey: string, destinationKey: string, bucket: string, s3: S3) { - return new Promise((resole, reject) => { - debug('copying %s in bucket %s to %s', sourceKey, bucket, destinationKey) - - s3.copyObject({ - Bucket: bucket, - CopySource: bucket + '/' + sourceKey, - Key: destinationKey - }, (err, data) => { - if (err) { - return reject(err) - } - - debug('result of copying') - debug('%o', data) - }) - }) - } -} - /** * Moves binaries built for different platforms into a single * folder on S3 before officially releasing as a new version. @@ -191,6 +109,8 @@ export const moveBinaries = async (args = []) => { const options = arg({ '--commit': String, '--version': String, + // optional, if passed, only the binary for that platform will be moved + '--platformArch': String, // aliases '--sha': '--commit', '-v': '--version' @@ -200,7 +120,7 @@ export const moveBinaries = async (args = []) => { debug('moveBinaries with options %o', options) // @ts-ignore - la(is.commitId(options['--commit']), 'missing commit SHA', options) + la(is.commitId(options['--commit']), 'missing or invalid commit SHA', options) // @ts-ignore la(is.semver(options['--version']), 'missing version to collect', options) @@ -215,7 +135,14 @@ export const moveBinaries = async (args = []) => { // found s3 paths with last build for same commit for all platforms const lastBuilds: Desktop[] = [] - const platforms: platformArch[] = uploadUtils.getValidPlatformArchs() + let platforms: platformArch[] = uploadUtils.getValidPlatformArchs() + if (options['--platformArch']) { + const onlyPlatform = options['--platformArch'] + console.log('only moving single platform %s', onlyPlatform) + la(uploadUtils.isValidPlatformArch(onlyPlatform), 'invalid platform-arch', onlyPlatform) + platforms = platforms.filter(equals(onlyPlatform)) + } + la(platforms.length, 'no platforms to move', platforms) for (const platformArch of platforms) { la(uploadUtils.isValidPlatformArch(platformArch), @@ -277,7 +204,8 @@ export const moveBinaries = async (args = []) => { const destinationPath = getFullUploadName(options) console.log('copying test runner %s to %s', lastBuild.platformArch, destinationPath) - await s3helpers.copyS3(lastBuild.s3zipPath, destinationPath, aws.bucket, s3) + await s3helpers.copyS3(lastBuild.s3zipPath, destinationPath, aws.bucket, + 'application/zip', 'public-read', s3) testRunners.push({ platformArch: lastBuild.platformArch, diff --git a/scripts/binary/s3-api-demo.ts b/scripts/binary/s3-api-demo.ts new file mode 100644 index 0000000000..4c6f52429c --- /dev/null +++ b/scripts/binary/s3-api-demo.ts @@ -0,0 +1,21 @@ +// ignore TS errors - we are importing from CoffeeScript files +// @ts-ignore +import uploadUtils from './util/upload' +import { s3helpers } from './s3-api' + +const aws = uploadUtils.getS3Credentials() +const s3 = s3helpers.makeS3(aws) + +const bucket = aws.bucket +const key = 'beta/binary/3.3.0/darwin-x64/circle-develop-455046b928c861d4457b2ec5426a51de1fda74fd-102212/cypress.zip' + +/* + a little demo showing how user metadata can be set and read on a S3 object. +*/ + +s3helpers.setUserMetadata(bucket, key, { + user: 'bar' +}, s3) +.then(() => { + return s3helpers.getUserMetadata(bucket, key, s3) +}).then(console.log, console.error) diff --git a/scripts/binary/s3-api.ts b/scripts/binary/s3-api.ts new file mode 100644 index 0000000000..10f4966882 --- /dev/null +++ b/scripts/binary/s3-api.ts @@ -0,0 +1,161 @@ +const debug = require("debug")("cypress:binary") +import la from 'lazy-ass' +import is from 'check-more-types' +import S3 from 'aws-sdk/clients/s3' +import {prop, values, all} from 'ramda' + +export const hasOnlyStringValues = (o) => + all(is.unemptyString, values(o)) + +/** + * Utility object with methods that deal with S3. + * Useful for testing our code that calls S3 methods. + */ +export const s3helpers = { + makeS3 (aws) { + la(is.unemptyString(aws.key), 'missing aws key') + la(is.unemptyString(aws.secret), 'missing aws secret') + + return new S3({ + accessKeyId: aws.key, + secretAccessKey: aws.secret + }) + }, + + verifyZipFileExists (zipFile: string, bucket: string, s3: S3): Promise { + debug('checking S3 file %s', zipFile) + debug('bucket %s', bucket) + + return new Promise((resolve, reject) => { + s3.headObject({ + Bucket: bucket, + Key: zipFile + }, (err, data) => { + if (err) { + debug('error getting object %s', zipFile) + debug(err) + + return reject(err) + } + debug('s3 data for %s', zipFile) + debug(data) + resolve() + }) + }) + }, + + /** + * Returns list of prefixes in a given folder + */ + listS3Objects (uploadDir: string, bucket: string, s3: S3): Promise { + la(is.unemptyString(uploadDir), 'invalid upload dir', uploadDir) + + return new Promise((resolve, reject) => { + const prefix = uploadDir + '/' + s3.listObjectsV2({ + Bucket: bucket, + Prefix: prefix, + Delimiter: '/' + }, (err, result) => { + if (err) { + return reject(err) + } + + debug('AWS result in %s %s', bucket, prefix) + debug('%o', result) + + resolve(result.CommonPrefixes.map(prop('Prefix'))) + }) + }) + }, + + /** + * Copies one S3 object into another key, metadata is copied. + * For copying a public zip file use content 'application/zip' + * and ACL 'public-read' + */ + copyS3 (sourceKey: string, destinationKey: string, bucket: string, + contentType: S3.ContentType, acl: S3.ObjectCannedACL, + s3: S3): Promise { + return new Promise((resolve, reject) => { + debug('copying %s in bucket %s to %s', sourceKey, bucket, destinationKey) + + const params: S3.CopyObjectRequest = { + Bucket: bucket, + CopySource: bucket + '/' + sourceKey, + Key: destinationKey, + // when we copy S3 object, copy the original metadata, if any + MetadataDirective: 'COPY', + ContentType: contentType, + ACL: acl + } + s3.copyObject(params, (err, data) => { + if (err) { + return reject(err) + } + + debug('result of copying') + debug('%o', data) + resolve(data) + }) + }) + }, + + /** + * Returns user metadata for the given S3 object. + * Note: on S3 when adding user metadata, each key is prefixed with "x-amz-meta-" + * but the returned object has these prefixes stripped. Thus if we set + * a single "x-amz-meta-user: gleb", the resolved object will be simply {user: "gleb"} + */ + getUserMetadata (bucket: string, key: string, s3: S3): Promise { + return new Promise((resole, reject) => { + debug('getting user metadata from %s %s', bucket, key) + + s3.headObject({ + Bucket: bucket, + Key: key + }, (err, data) => { + if (err) { + return reject(err) + } + + debug('user metadata') + debug('%o', data.Metadata) + resole(data.Metadata) + }) + }) + }, + + /** + * Setting user metadata can be accomplished with copying the object back onto itself + * with replaced metadata object. + */ + setUserMetadata (bucket: string, key: string, metadata: S3.Metadata, + contentType: S3.ContentType, acl: S3.ObjectCannedACL, s3: S3): Promise { + la(hasOnlyStringValues(metadata), + 'metadata object can only have string values', metadata) + + return new Promise((resolve, reject) => { + debug('setting metadata to %o for %s %s', metadata, bucket, key) + + const params: S3.CopyObjectRequest = { + Bucket: bucket, + CopySource: bucket + '/' + key, + Key: key, + Metadata: metadata, + MetadataDirective: 'REPLACE', + ContentType: contentType, + ACL: acl + } + s3.copyObject(params, (err, data) => { + if (err) { + return reject(err) + } + + debug('result of copying') + debug('%o', data) + resolve(data) + }) + }) + } +} diff --git a/scripts/binary/upload-unique-binary.coffee b/scripts/binary/upload-unique-binary.coffee index d47287c538..39361cddfd 100644 --- a/scripts/binary/upload-unique-binary.coffee +++ b/scripts/binary/upload-unique-binary.coffee @@ -10,9 +10,11 @@ debug = require('gulp-debug') gulp = require("gulp") human = require("human-interval") R = require("ramda") +hasha = require('hasha') konfig = require('../binary/get-config')() uploadUtils = require("./util/upload") +s3helpers = require("./s3-api").s3helpers # we zip the binary on every platform and upload under same name binaryExtension = ".zip" @@ -70,6 +72,8 @@ uploadFile = (options) -> headers = {} headers["Cache-Control"] = "no-cache" + key = null + gulp.src(options.file) .pipe rename (p) => p.basename = path.basename(uploadFileName, binaryExtension) @@ -77,12 +81,37 @@ uploadFile = (options) -> console.log("renaming upload to", p.dirname, p.basename) la(check.unemptyString(p.basename), "missing basename") la(check.unemptyString(p.dirname), "missing dirname") + key = p.dirname + uploadFileName p .pipe debug() .pipe publisher.publish(headers) .pipe awspublish.reporter() .on "error", reject - .on "end", resolve + .on "end", () -> resolve(key) + +setChecksum = (filename, key) => + console.log('setting checksum for file %s', filename) + console.log('on s3 object %s', key) + + la(check.unemptyString(filename), 'expected filename', filename) + la(check.unemptyString(key), 'expected uploaded S3 key', key) + + checksum = hasha.fromFileSync(filename) + size = fs.statSync(filename).size + console.log('SHA256 checksum %s', checksum) + console.log('size', size) + + aws = uploadUtils.getS3Credentials() + s3 = s3helpers.makeS3(aws) + # S3 object metadata can only have string values + metadata = { + checksum, + size: String(size) + } + # by default s3.copyObject does not preserve ACL when copying + # thus we need to reset it for our public files + s3helpers.setUserMetadata(aws.bucket, key, metadata, + 'application/zip', 'public-read', s3) uploadUniqueBinary = (args = []) -> options = minimist(args, { @@ -114,6 +143,8 @@ uploadUniqueBinary = (args = []) -> options.platformArch = uploadUtils.getUploadNameByOsAndArch(platform) uploadFile(options) + .then (key) -> + setChecksum(options.file, key) .then () -> cdnUrl = getCDN({ version: options.version, diff --git a/scripts/binary/upload.coffee b/scripts/binary/upload.coffee index f22d41d196..67407cd878 100644 --- a/scripts/binary/upload.coffee +++ b/scripts/binary/upload.coffee @@ -1,6 +1,6 @@ -awspublish = require('gulp-awspublish') -rename = require('gulp-rename') -debug = require('gulp-debug') +awspublish = require('gulp-awspublish') +rename = require('gulp-rename') +debug = require('gulp-debug') fs = require("fs-extra") cp = require("child_process") path = require("path") diff --git a/scripts/unit/binary/move-binaries-spec.js b/scripts/unit/binary/move-binaries-spec.js index d89dac60eb..f53cd81cd0 100644 --- a/scripts/unit/binary/move-binaries-spec.js +++ b/scripts/unit/binary/move-binaries-spec.js @@ -2,6 +2,7 @@ const snapshot = require('snap-shot-it') const la = require('lazy-ass') const is = require('check-more-types') const uploadUtils = require('../../binary/util/upload') +const s3helpers = require('../../binary/s3-api').s3helpers /* eslint-env mocha */ /* global sinon */ @@ -120,14 +121,14 @@ describe('move-binaries', () => { // fake S3 api const s3 = {} - sinon.stub(moveBinaries.s3helpers, 'makeS3').returns(s3) + sinon.stub(s3helpers, 'makeS3').returns(s3) sinon - .stub(moveBinaries.s3helpers, 'listS3Objects') + .stub(s3helpers, 'listS3Objects') .withArgs('beta/binary/3.3.0/darwin-x64', aws.bucket) .resolves(darwinBuilds) sinon - .stub(moveBinaries.s3helpers, 'verifyZipFileExists') + .stub(s3helpers, 'verifyZipFileExists') .withArgs(`${latestMacBuild}cypress.zip`, aws.bucket) .resolves() @@ -135,7 +136,7 @@ describe('move-binaries', () => { sinon.stub(moveBinaries.prompts, 'shouldCopy').resolves() sinon - .stub(moveBinaries.s3helpers, 'copyS3') + .stub(s3helpers, 'copyS3') .withArgs( `${latestMacBuild}cypress.zip`, 'desktop/3.3.0/darwin-x64/cypress.zip', diff --git a/scripts/unit/binary/s3-api-spec.js b/scripts/unit/binary/s3-api-spec.js new file mode 100644 index 0000000000..42e35e2296 --- /dev/null +++ b/scripts/unit/binary/s3-api-spec.js @@ -0,0 +1,26 @@ +const la = require('lazy-ass') + +/* eslint-env mocha */ +describe('s3-api', () => { + context('hasOnlyStringValues', () => { + const { hasOnlyStringValues } = require('../../binary/s3-api') + + it('returns true if object has only string values', () => { + const o = { + foo: 'bar', + baz: 'baz', + } + + la(hasOnlyStringValues(o)) + }) + + it('returns false if object has non-string value', () => { + const o = { + foo: 'bar', + baz: 42, + } + + la(!hasOnlyStringValues(o)) + }) + }) +})