366 lines
12 KiB
JavaScript
366 lines
12 KiB
JavaScript
/**
|
|
* @fileOverview Test morgan (activity) logging to mongodb
|
|
*/
|
|
/* eslint max-nested-callbacks: ["error", 7] */
|
|
'use strict';
|
|
|
|
const chai = require('chai');
|
|
const sinon = require('sinon');
|
|
const sinonChai = require('sinon-chai');
|
|
const {stdout, stderr} = require('test-console');
|
|
const rewire = require('rewire');
|
|
|
|
const initMorgan = rewire('../init_morgan');
|
|
const mainDBPStub = initMorgan.__get__('mainDBP');
|
|
|
|
const expect = chai.expect;
|
|
const sandbox = sinon.createSandbox();
|
|
chai.use(sinonChai);
|
|
|
|
const FAKE_ACTIVITY_LOG_COLLECTION = 'FAKE AL COLLECTION';
|
|
|
|
const FAKE_MORGAN_RECORD = 'Some morgan string';
|
|
const FAKE_MORGAN_RECORD_2 = 'A different message';
|
|
const FAKE_MORGAN_RECORD_3 = 'Another different message';
|
|
|
|
/**
|
|
* Test responses as if from Mongo
|
|
*/
|
|
const MONGO_SUCCESS = {
|
|
result: {
|
|
ok: 1,
|
|
n: 1 // eslint-disable-line id-length
|
|
}
|
|
};
|
|
|
|
/**
|
|
* Expected results
|
|
*/
|
|
const EXPECTED_DB_ENTRY = {
|
|
request: FAKE_MORGAN_RECORD
|
|
};
|
|
const EXPECTED_DB_ENTRY_2 = {
|
|
request: FAKE_MORGAN_RECORD_2
|
|
};
|
|
const EXPECTED_DB_ENTRY_3 = {
|
|
request: FAKE_MORGAN_RECORD_3
|
|
};
|
|
|
|
/**
|
|
* The tests
|
|
*/
|
|
describe('Morgan mongo stream', () => {
|
|
let streamUnderTest;
|
|
let inspectStdOut;
|
|
let inspectStdErr;
|
|
let output;
|
|
|
|
before(() => {
|
|
/**
|
|
* Fake the important parts of the database
|
|
*/
|
|
mainDBPStub._mainDB = mainDBPStub.mainDB;
|
|
mainDBPStub.mainDB = {
|
|
dbOnline: 1,
|
|
collectionActivityLog: FAKE_ACTIVITY_LOG_COLLECTION
|
|
};
|
|
});
|
|
|
|
after(() => {
|
|
/**
|
|
* Put the original mainDB stuff back
|
|
*/
|
|
mainDBPStub.mainDB = mainDBPStub._mainDB;
|
|
});
|
|
|
|
beforeEach(() => {
|
|
/**
|
|
* Create a new stream and stub the mainDB
|
|
*/
|
|
streamUnderTest = initMorgan.writeableStream();
|
|
sandbox.stub(mainDBPStub, 'addMany').resolves(MONGO_SUCCESS);
|
|
});
|
|
|
|
afterEach(() => {
|
|
sandbox.restore();
|
|
});
|
|
|
|
it('is an object-mode stream', () => {
|
|
return expect(streamUnderTest._writableState.objectMode).to.be.true;
|
|
});
|
|
|
|
describe('with an online database', () => {
|
|
beforeEach(() => {
|
|
output = stdout.inspectSync(() => streamUnderTest.write(FAKE_MORGAN_RECORD));
|
|
});
|
|
|
|
it('logs the record to stdout', () => {
|
|
return expect(output)
|
|
.to.have.lengthOf(1)
|
|
.to.include(FAKE_MORGAN_RECORD + '\n');
|
|
});
|
|
|
|
it('writes stream records to the database', () => {
|
|
return expect(mainDBPStub.addMany).to.have.been
|
|
.calledOnce
|
|
.calledWithMatch(
|
|
FAKE_ACTIVITY_LOG_COLLECTION,
|
|
[sinon.match(EXPECTED_DB_ENTRY)],
|
|
{}
|
|
);
|
|
});
|
|
|
|
it('adds timestamp to the record stored in the database', () => {
|
|
return expect(mainDBPStub.addMany).to.have.been
|
|
.calledOnce
|
|
.calledWithMatch(
|
|
FAKE_ACTIVITY_LOG_COLLECTION,
|
|
[
|
|
sinon.match({timestamp: sinon.match.date})
|
|
],
|
|
{}
|
|
);
|
|
});
|
|
});
|
|
|
|
describe('with an offline database', () => {
|
|
beforeEach(() => {
|
|
mainDBPStub.mainDB.dbOnline = 0;
|
|
output = stdout.inspectSync(() => streamUnderTest.write(FAKE_MORGAN_RECORD));
|
|
});
|
|
|
|
afterEach(() => {
|
|
mainDBPStub.mainDB.dbOnline = 1;
|
|
});
|
|
|
|
it('logs the record to stdout', () => {
|
|
return expect(output)
|
|
.to.have.lengthOf(1)
|
|
.to.include(FAKE_MORGAN_RECORD + '\n');
|
|
});
|
|
|
|
it('does not attempt to stream records to the database', () => {
|
|
return expect(mainDBPStub.addMany).to.not.have.been.called;
|
|
});
|
|
|
|
describe('that stays offline for 1000 records', () => {
|
|
beforeEach(() => {
|
|
inspectStdErr = stderr.inspect();
|
|
inspectStdOut = stdout.inspect();
|
|
for (let i = 0; i < 999; ++i) {
|
|
streamUnderTest.write(String(i));
|
|
}
|
|
inspectStdErr.restore();
|
|
inspectStdOut.restore();
|
|
});
|
|
|
|
it('still logs the incoming records to stdout', () => {
|
|
return expect(inspectStdOut.output)
|
|
.to.have.lengthOf(999) // plus the 1 in the parent section = 1000
|
|
.to.include('0\n')
|
|
.to.include('345\n')
|
|
.to.include('998\n');
|
|
});
|
|
|
|
it('doesn\'t report any errors', () => {
|
|
return expect(inspectStdErr.output)
|
|
.to.have.lengthOf(0);
|
|
});
|
|
|
|
describe('then recovers', () => {
|
|
beforeEach(() => {
|
|
mainDBPStub.mainDB.dbOnline = 1;
|
|
output = stdout.inspectSync(() => streamUnderTest.write(FAKE_MORGAN_RECORD_2));
|
|
});
|
|
|
|
afterEach(() => {
|
|
mainDBPStub.mainDB.dbOnline = 0;
|
|
});
|
|
|
|
it('logs the record to stdout', () => {
|
|
return expect(output)
|
|
.to.include(FAKE_MORGAN_RECORD_2 + '\n');
|
|
});
|
|
|
|
it('writes all records (including ones cached when offline) to the database', () => {
|
|
const expectedArray = [
|
|
sinon.match(EXPECTED_DB_ENTRY)
|
|
];
|
|
for (let i = 0; i < 999; ++i) {
|
|
expectedArray.push(sinon.match({
|
|
request: String(i)
|
|
}));
|
|
}
|
|
expectedArray.push(sinon.match(EXPECTED_DB_ENTRY_2));
|
|
|
|
return expect(mainDBPStub.addMany).to.have.been
|
|
.calledOnce
|
|
.calledWithMatch(
|
|
FAKE_ACTIVITY_LOG_COLLECTION,
|
|
expectedArray
|
|
);
|
|
});
|
|
});
|
|
|
|
describe('then receives a 1001st record with offline db', () => {
|
|
beforeEach(() => {
|
|
inspectStdErr = stderr.inspect();
|
|
inspectStdOut = stdout.inspect();
|
|
streamUnderTest.write(FAKE_MORGAN_RECORD_2);
|
|
inspectStdErr.restore();
|
|
inspectStdOut.restore();
|
|
});
|
|
|
|
it('still logs the incoming record to stdout', () => {
|
|
return expect(inspectStdOut.output)
|
|
.to.have.lengthOf(1)
|
|
.to.include(FAKE_MORGAN_RECORD_2 + '\n');
|
|
});
|
|
|
|
it('reports a data loss warning on stderr', () => {
|
|
return expect(inspectStdErr.output)
|
|
.to.have.lengthOf(1)
|
|
.to.include('Activity log buffer exceeded. MESSAGES WILL BE LOST!\n');
|
|
});
|
|
|
|
describe('then recovers', () => {
|
|
beforeEach(() => {
|
|
mainDBPStub.mainDB.dbOnline = 1;
|
|
output = stdout.inspectSync(() => streamUnderTest.write(FAKE_MORGAN_RECORD_3));
|
|
});
|
|
|
|
afterEach(() => {
|
|
mainDBPStub.mainDB.dbOnline = 0;
|
|
});
|
|
|
|
it('logs the record to stdout', () => {
|
|
return expect(output)
|
|
.to.include(FAKE_MORGAN_RECORD_3 + '\n');
|
|
});
|
|
|
|
it('writes all cached records and the new record, but not the 1001st one, to the database', () => {
|
|
const expectedArray = [
|
|
sinon.match(EXPECTED_DB_ENTRY)
|
|
];
|
|
for (let i = 0; i < 999; ++i) {
|
|
expectedArray.push(sinon.match({
|
|
request: String(i)
|
|
}));
|
|
}
|
|
expectedArray.push(sinon.match(EXPECTED_DB_ENTRY_3));
|
|
|
|
return expect(mainDBPStub.addMany).to.have.been
|
|
.calledOnce
|
|
.calledWithMatch(
|
|
FAKE_ACTIVITY_LOG_COLLECTION,
|
|
expectedArray
|
|
);
|
|
});
|
|
});
|
|
});
|
|
});
|
|
});
|
|
|
|
describe('with a pending database write', () => {
|
|
let resolved = false;
|
|
let addManyResolve;
|
|
let addManyReject;
|
|
|
|
beforeEach(() => {
|
|
resolved = false;
|
|
|
|
//
|
|
// Use a promise we control as the fake response to the mainDB call
|
|
// so that we can check what happens with requests that happen while
|
|
// a db call is pending.
|
|
//
|
|
const addManyP = new Promise((resolve, reject) => {
|
|
addManyResolve = resolve;
|
|
addManyReject = reject;
|
|
});
|
|
mainDBPStub.addMany.returns(addManyP);
|
|
|
|
inspectStdOut = stdout.inspect();
|
|
|
|
// First request to trigger the pending write
|
|
streamUnderTest.write(FAKE_MORGAN_RECORD);
|
|
|
|
// Second request while the first one is pending
|
|
streamUnderTest.write(FAKE_MORGAN_RECORD_2);
|
|
|
|
inspectStdOut.restore();
|
|
});
|
|
|
|
afterEach(() => {
|
|
// Run the timer to ensure all requests are resolved before the next test
|
|
if (!resolved) {
|
|
addManyResolve(MONGO_SUCCESS);
|
|
resolved = true;
|
|
}
|
|
});
|
|
|
|
it('logs the pending and subsequent record to stdout', () => {
|
|
return expect(inspectStdOut.output)
|
|
.to.have.lengthOf(2)
|
|
.to.include(FAKE_MORGAN_RECORD + '\n')
|
|
.to.include(FAKE_MORGAN_RECORD_2 + '\n');
|
|
});
|
|
|
|
it('does not attempt to stream the subsequent record to the database', () => {
|
|
return expect(mainDBPStub.addMany).to.have.been
|
|
.calledOnce; // only the pending one
|
|
});
|
|
|
|
describe('with another write after the pending one resolves successfully', () => {
|
|
it('logs the one buffered during the pending, and the new one', (cb) => {
|
|
// Advance the timer to complete the previous request
|
|
addManyResolve(MONGO_SUCCESS);
|
|
resolved = true;
|
|
|
|
//
|
|
// Continue after a timeout to allow the thread of the
|
|
// pending DB request to complete before we continue.
|
|
//
|
|
setTimeout(() => {
|
|
// New record to trigger the sending of the cached one + the new one
|
|
stdout.inspectSync(() => streamUnderTest.write(FAKE_MORGAN_RECORD_3));
|
|
|
|
expect(mainDBPStub.addMany.secondCall).to.have.been
|
|
.calledWithMatch(
|
|
FAKE_ACTIVITY_LOG_COLLECTION,
|
|
[sinon.match(EXPECTED_DB_ENTRY_2), sinon.match(EXPECTED_DB_ENTRY_3)]
|
|
);
|
|
cb();
|
|
});
|
|
});
|
|
});
|
|
|
|
describe('with another write after the pending one resolves unsuccessfully', () => {
|
|
it('logs the pending record that failed + the one bufferd during pending, and the new one', (cb) => {
|
|
addManyReject('Some mongo error');
|
|
resolved = true;
|
|
|
|
//
|
|
// Continue after a timeout to allow the thread of the
|
|
// pending DB request to complete before we continue.
|
|
//
|
|
setTimeout(() => {
|
|
// New record to trigger the sending of the cached one + the new one
|
|
stdout.inspectSync(() => streamUnderTest.write(FAKE_MORGAN_RECORD_3));
|
|
|
|
expect(mainDBPStub.addMany.secondCall).to.have.been
|
|
.calledWithMatch(
|
|
FAKE_ACTIVITY_LOG_COLLECTION,
|
|
[
|
|
sinon.match(EXPECTED_DB_ENTRY),
|
|
sinon.match(EXPECTED_DB_ENTRY_2),
|
|
sinon.match(EXPECTED_DB_ENTRY_3)
|
|
]
|
|
);
|
|
cb();
|
|
}, 0);
|
|
});
|
|
});
|
|
});
|
|
});
|