obdfcascrape/debuglogs.js
Martin Donnelly be5d3eae07 init
2019-05-05 20:13:56 +01:00

111 lines
2.7 KiB
JavaScript

// https://github.com/markcallen/snssqs
const AWS = require('aws-sdk');
const util = require('util');
const async = require('async');
const fs = require('fs-extra');
const path = require('path');
const archiver = require('archiver-promise');
const logger = require('log4js').getLogger('DebugUploader');
const dateFormat = require('dateformat');
const { promisify } = require('util');
const readFileAsync = promisify( fs.readFile);
require('dotenv').config({
'path': `${__dirname }/.env`
});
logger.level = process.env.LOGGER_LEVEL || 'debug';
// configure AWS
AWS.config.update({ 'accessKeyId': process.env.AWS_ACCESS_KEY_ID, 'secretAccessKey': process.env.AWS_SECRET_ACCESS_KEY, 'region': process.env.AWS_REGION || 'eu-west-1' });
const s3 = new AWS.S3();
async function _createDirectory(destPath = null) {
if (!fs.existsSync(destPath))
fs.ensureDirSync(destPath);
return destPath;
}
async function _createArchive(destPath = null, filename = null, glob = false) {
return new Promise((resolve, reject) => {
if (!destPath || !filename) {
const e = new Error('Missing paths');
logger.error(e);
reject(e);
}
const archive = archiver(filename, {
'zlib': { 'level': 9 } // Sets the compression level.
});
if (glob)
archive.glob(`${destPath}`);
else
archive.directory(`${destPath}/`);
archive.finalize().then(function() {
logger.debug('Archive finished');
resolve();
});
});
}
async function _upload(filename) {
logger.info('^^^ UPLOADING ^^^');
const filePath = path.parse(filename);
await readFileAsync(filename).then(async (data) => {
const base64data = new Buffer.from(data, 'binary');
const s3Obj = {
'Bucket': process.env.S3_BUCKET,
'Key': filePath.base,
'Body': base64data,
'ACL': 'public-read'
};
return await s3.upload(s3Obj).promise()
.then((data) => {
logger.info('Successfully uploaded file.');
return data;
})
.catch((err) => {
logger.error(err);
return err;
});
}).catch((err) => {
logger.error(err);
});
}
async function _archive() {
logger.info('>-< ARCHIVING >-<');
try{
const now = new Date();
await _createDirectory('dist');
const timestamp = dateFormat(now, process.env.FILE_DATE_FOTMAT || 'yyyymmdd');
const filename = `dist/debug-${process.env.HOSTNAME}-${timestamp}.zip`;
const eFilename = `dist/pl-${process.env.HOSTNAME}-${timestamp}.zip`;
await _createArchive('debug/', filename);
await _createArchive('artefacts/PL', eFilename, true);
await _upload(filename);
await _upload(eFilename);
}
catch (e) {
logger.error(e);
}
}
logger.info('Debug Archiver');
async.series([_archive]);