On Azure DevOps, upload Windows crash dumps to S3 on release branches

On release branches, we can't upload crash dumps because they will leak
secret environment variables. So instead we will upload them to our S3
bucket with 'private' ACL. They can then be manually retrieved via the
AWS CLI with our private credentials.
This commit is contained in:
Nathan Sobo
2019-04-18 10:25:50 -06:00
parent b03d49973e
commit 4bc43eb358
3 changed files with 52 additions and 2 deletions

View File

@@ -4,7 +4,7 @@ const fs = require('fs')
const path = require('path')
const aws = require('aws-sdk')
module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets) {
module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets, acl = 'public-read') {
const s3 = new aws.S3({
accessKeyId: s3Key,
secretAccessKey: s3Secret,
@@ -37,7 +37,7 @@ module.exports = function (s3Key, s3Secret, s3Bucket, directory, assets) {
console.info(`Uploading ${assetPath}`)
const params = {
Key: `${directory}${path.basename(assetPath)}`,
ACL: 'public-read',
ACL: acl,
Body: fs.createReadStream(assetPath)
}

View File

@@ -116,6 +116,17 @@ jobs:
displayName: Publish crash reports on non-release branch
condition: and(failed(), eq(variables['IsReleaseBranch'], 'false'))
- script: >
node $(Build.SourcesDirectory)\script\vsts\upload-crash-reports.js --crash-report-path "%ARTIFACT_STAGING_DIR%\crash-reports" --s3-path "vsts-artifacts/%BUILD_ID%/"
env:
ATOM_RELEASES_S3_KEY: $(ATOM_RELEASES_S3_KEY)
ATOM_RELEASES_S3_SECRET: $(ATOM_RELEASES_S3_SECRET)
ATOM_RELEASES_S3_BUCKET: $(ATOM_RELEASES_S3_BUCKET)
ARTIFACT_STAGING_DIR: $(Build.ArtifactStagingDirectory)
BUILD_ID: $(Build.BuildId)
displayName: Upload crash reports to S3 on release branch
condition: and(failed(), ne(variables['ATOM_RELEASES_S3_KEY'], ''))
- task: PublishBuildArtifacts@1
inputs:
PathtoPublish: $(Build.SourcesDirectory)/out/atom-x64-windows.zip

View File

@@ -0,0 +1,39 @@
'use strict'
const glob = require('glob')
const uploadToS3 = require('./lib/upload-to-s3')
const yargs = require('yargs')
const argv = yargs
.usage('Usage: $0 [options]')
.help('help')
.describe('crash-report-path', 'The local path of a directory containing crash reports to upload')
.describe('s3-path', 'Indicates the S3 path in which the crash reports should be uploaded')
.wrap(yargs.terminalWidth())
.argv
async function uploadCrashReports () {
const crashesPath = argv.crashReportPath
const crashes = glob.sync('/*.dmp', { root: crashesPath })
const bucketPath = argv.s3Path
if (crashes && crashes.length > 0) {
console.log(`Uploading ${crashes.length} private crash reports to S3 under '${bucketPath}'`)
await uploadToS3(
process.env.ATOM_RELEASES_S3_KEY,
process.env.ATOM_RELEASES_S3_SECRET,
process.env.ATOM_RELEASES_S3_BUCKET,
bucketPath,
crashes,
'private'
)
}
}
// Wrap the call the async function and catch errors from its promise because
// Node.js doesn't yet allow use of await at the script scope
uploadCrashReports().catch(err => {
console.error('An error occurred while uploading crash reports:\n\n', err)
process.exit(1)
})