From 4bc43eb358874aeb6881f89e9d555de3f2c307c6 Mon Sep 17 00:00:00 2001 From: Nathan Sobo Date: Thu, 18 Apr 2019 10:25:50 -0600 Subject: [PATCH] On Azure DevOps, upload Windows crash dumps to S3 on release branches On release branches, we can't upload crash dumps because they will leak secret environment variables. So instead we will upload them to our S3 bucket with 'private' ACL. They can then be manually retrieved via the AWS CLI with our private credentials. --- script/vsts/lib/upload-to-s3.js | 4 +-- script/vsts/platforms/windows.yml | 11 ++++++++ script/vsts/upload-crash-reports.js | 39 +++++++++++++++++++++++++++++ 3 files changed, 52 insertions(+), 2 deletions(-) create mode 100644 script/vsts/upload-crash-reports.js diff --git a/script/vsts/lib/upload-to-s3.js b/script/vsts/lib/upload-to-s3.js index 91c50b384..fde60210c 100644 --- a/script/vsts/lib/upload-to-s3.js +++ b/script/vsts/lib/upload-to-s3.js @@ -4,7 +4,7 @@ const fs = require('fs') const path = require('path') const aws = require('aws-sdk') -module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets) { +module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets, acl = 'public-read') { const s3 = new aws.S3({ accessKeyId: s3Key, secretAccessKey: s3Secret, @@ -37,7 +37,7 @@ module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets) { console.info(`Uploading ${assetPath}`) const params = { Key: `${directory}${path.basename(assetPath)}`, - ACL: 'public-read', + ACL: acl, Body: fs.createReadStream(assetPath) } diff --git a/script/vsts/platforms/windows.yml b/script/vsts/platforms/windows.yml index 515daff90..8acedc752 100644 --- a/script/vsts/platforms/windows.yml +++ b/script/vsts/platforms/windows.yml @@ -116,6 +116,17 @@ jobs: displayName: Publish crash reports on non-release branch condition: and(failed(), eq(variables['IsReleaseBranch'], 'false')) + - script: > + node $(Build.SourcesDirectory)\script\vsts\upload-crash-reports.js --crash-report-path "%ARTIFACT_STAGING_DIR%\crash-reports" --s3-path "vsts-artifacts/%BUILD_ID%/" + env: + ATOM_RELEASES_S3_KEY: $(ATOM_RELEASES_S3_KEY) + ATOM_RELEASES_S3_SECRET: $(ATOM_RELEASES_S3_SECRET) + ATOM_RELEASES_S3_BUCKET: $(ATOM_RELEASES_S3_BUCKET) + ARTIFACT_STAGING_DIR: $(Build.ArtifactStagingDirectory) + BUILD_ID: $(Build.BuildId) + displayName: Upload crash reports to S3 on release branch + condition: and(failed(), ne(variables['ATOM_RELEASES_S3_KEY'], '')) + - task: PublishBuildArtifacts@1 inputs: PathtoPublish: $(Build.SourcesDirectory)/out/atom-x64-windows.zip diff --git a/script/vsts/upload-crash-reports.js b/script/vsts/upload-crash-reports.js new file mode 100644 index 000000000..0861afa1c --- /dev/null +++ b/script/vsts/upload-crash-reports.js @@ -0,0 +1,39 @@ +'use strict' + +const glob = require('glob') +const uploadToS3 = require('./lib/upload-to-s3') + +const yargs = require('yargs') +const argv = yargs + .usage('Usage: $0 [options]') + .help('help') + .describe('crash-report-path', 'The local path of a directory containing crash reports to upload') + .describe('s3-path', 'Indicates the S3 path in which the crash reports should be uploaded') + .wrap(yargs.terminalWidth()) + .argv + +async function uploadCrashReports () { + const crashesPath = argv.crashReportPath + const crashes = glob.sync('/*.dmp', { root: crashesPath }) + const bucketPath = argv.s3Path + + if (crashes && crashes.length > 0) { + console.log(`Uploading ${crashes.length} private crash reports to S3 under '${bucketPath}'`) + + await uploadToS3( + process.env.ATOM_RELEASES_S3_KEY, + process.env.ATOM_RELEASES_S3_SECRET, + process.env.ATOM_RELEASES_S3_BUCKET, + bucketPath, + crashes, + 'private' + ) + } +} + +// Wrap the call the async function and catch errors from its promise because +// Node.js doesn't yet allow use of await at the script scope +uploadCrashReports().catch(err => { + console.error('An error occurred while uploading crash reports:\n\n', err) + process.exit(1) +})