mattermost-desktop/e2e/utils/artifacts.js
Trivikram Kamat a63bdd7cf2
chore: migrate AWS SDK for JavaScript v2 APIs to v3 (#2904)
* chore: convert s3.promise callback to async-await

* chore: run codemod on e2e/utils/artifacts.js

* chore: format

* chore(deps): replace AWS SDK for JavaScript v2 with v3

* chore: bump lockfile

* chore: changes to bracket spacing in imports

Co-authored-by: Devin Binnie <52460000+devinbinnie@users.noreply.github.com>

---------

Co-authored-by: Devin Binnie <52460000+devinbinnie@users.noreply.github.com>
2023-11-16 16:39:55 -05:00

90 lines
2.7 KiB
JavaScript

// Copyright (c) 2016-present Mattermost, Inc. All Rights Reserved.
// See LICENSE.txt for license information.
/* eslint-disable no-console,consistent-return */
const fs = require('fs');
const path = require('path');
const async = require('async');
const {Upload} = require('@aws-sdk/lib-storage');
const {S3} = require('@aws-sdk/client-s3');
const mime = require('mime-types');
const readdir = require('recursive-readdir');
const {MOCHAWESOME_REPORT_DIR} = require('./constants');
require('dotenv').config();
const {
AWS_S3_BUCKET,
AWS_ACCESS_KEY_ID,
AWS_SECRET_ACCESS_KEY,
BUILD_ID,
BRANCH,
BUILD_TAG,
} = process.env;
const s3 = new S3({
credentials: {
accessKeyId: AWS_ACCESS_KEY_ID,
secretAccessKey: AWS_SECRET_ACCESS_KEY
},
});
function getFiles(dirPath) {
return fs.existsSync(dirPath) ? readdir(dirPath) : [];
}
async function saveArtifacts() {
if (!AWS_S3_BUCKET || !AWS_ACCESS_KEY_ID || !AWS_SECRET_ACCESS_KEY) {
console.log('No AWS credentials found. Test artifacts not uploaded to S3.');
return;
}
const s3Folder = `${BUILD_ID}-${BRANCH}-${BUILD_TAG}`.replace(/\./g, '-');
const uploadPath = path.resolve(__dirname, `../../${MOCHAWESOME_REPORT_DIR}`);
const filesToUpload = await getFiles(uploadPath);
return new Promise((resolve, reject) => {
async.eachOfLimit(
filesToUpload,
10,
async.asyncify(async (file) => {
const Key = file.replace(uploadPath, s3Folder).replaceAll('\\', '/');
const contentType = mime.lookup(file);
const charset = mime.charset(contentType);
try {
await new Upload({
client: s3,
params: {
Key,
Bucket: AWS_S3_BUCKET,
Body: fs.readFileSync(file),
ContentType: `${contentType}${charset ? '; charset=' + charset : ''}`,
},
}).done();
return {success: true};
} catch (e) {
console.log('Failed to upload artifact:', file);
throw new Error(e);
}
}),
(err) => {
if (err) {
console.log('Failed to upload artifacts');
return reject(new Error(err));
}
const reportLink = `https://${AWS_S3_BUCKET}.s3.amazonaws.com/${s3Folder}/mochawesome.html`;
resolve({success: true, reportLink});
},
);
});
}
module.exports = {saveArtifacts};