Skip to content

Commit

Permalink
Merge pull request #1234 from ParsePlatform/flovilmart.winston
Browse files Browse the repository at this point in the history
Better logging with winston
  • Loading branch information
drew-gross committed Mar 29, 2016
2 parents 9dd8609 + 247a06f commit ef08dcf
Show file tree
Hide file tree
Showing 15 changed files with 180 additions and 255 deletions.
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# Logs
logs
test_logs
*.log

# Runtime data
Expand Down
1 change: 1 addition & 0 deletions package.json
Original file line number Diff line number Diff line change
Expand Up @@ -41,6 +41,7 @@
"request": "^2.65.0",
"tv4": "^1.2.7",
"winston": "^2.1.1",
"winston-daily-rotate-file": "^1.0.1",
"ws": "^1.0.1"
},
"devDependencies": {
Expand Down
32 changes: 1 addition & 31 deletions spec/FileLoggerAdapter.spec.js
Original file line number Diff line number Diff line change
@@ -1,35 +1,10 @@
var FileLoggerAdapter = require('../src/Adapters/Logger/FileLoggerAdapter').FileLoggerAdapter;
var Parse = require('parse/node').Parse;
var request = require('request');
var fs = require('fs');

var LOGS_FOLDER = './test_logs/';

var deleteFolderRecursive = function(path) {
if( fs.existsSync(path) ) {
fs.readdirSync(path).forEach(function(file,index){
var curPath = path + "/" + file;
if(fs.lstatSync(curPath).isDirectory()) { // recurse
deleteFolderRecursive(curPath);
} else { // delete file
fs.unlinkSync(curPath);
}
});
fs.rmdirSync(path);
}
};

describe('info logs', () => {

afterEach((done) => {
deleteFolderRecursive(LOGS_FOLDER);
done();
});

it("Verify INFO logs", (done) => {
var fileLoggerAdapter = new FileLoggerAdapter({
logsFolder: LOGS_FOLDER
});
var fileLoggerAdapter = new FileLoggerAdapter();
fileLoggerAdapter.info('testing info logs', () => {
fileLoggerAdapter.query({
size: 1,
Expand All @@ -49,11 +24,6 @@ describe('info logs', () => {

describe('error logs', () => {

afterEach((done) => {
deleteFolderRecursive(LOGS_FOLDER);
done();
});

it("Verify ERROR logs", (done) => {
var fileLoggerAdapter = new FileLoggerAdapter();
fileLoggerAdapter.error('testing error logs', () => {
Expand Down
34 changes: 20 additions & 14 deletions spec/LoggerController.spec.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,36 +10,40 @@ describe('LoggerController', () => {

expect(() => {
loggerController.getLogs(query).then(function(res) {
expect(res.length).toBe(0);
expect(res.length).not.toBe(0);
done();
}).catch((err) => {
console.error(err);
fail("should not fail");
done();
})
}).not.toThrow();
});

it('properly validates dateTimes', (done) => {
expect(LoggerController.validDateTime()).toBe(null);
expect(LoggerController.validDateTime("String")).toBe(null);
expect(LoggerController.validDateTime(123456).getTime()).toBe(123456);
expect(LoggerController.validDateTime("2016-01-01Z00:00:00").getTime()).toBe(1451606400000);
done();
});

it('can set the proper default values', (done) => {
// Make mock request
var result = LoggerController.parseOptions();
expect(result.size).toEqual(10);
expect(result.order).toEqual('desc');
expect(result.level).toEqual('info');

done();
});

it('can process a query witout throwing', (done) => {
// Make mock request
var query = {
from: "2016-01-01Z00:00:00",
until: "2016-01-01Z00:00:00",
size: 5,
size: 5,
order: 'asc',
level: 'error'
};
Expand All @@ -51,16 +55,16 @@ describe('LoggerController', () => {
expect(result.size).toEqual(5);
expect(result.order).toEqual('asc');
expect(result.level).toEqual('error');

done();
});

it('can check process a query witout throwing', (done) => {
// Make mock request
var query = {
from: "2015-01-01",
until: "2016-01-01",
size: 5,
from: "2016-01-01",
until: "2016-01-30",
size: 5,
order: 'desc',
level: 'error'
};
Expand All @@ -71,13 +75,15 @@ describe('LoggerController', () => {
loggerController.getLogs(query).then(function(res) {
expect(res.length).toBe(0);
done();
}).catch((err) => {
console.error(err);
fail("should not fail");
done();
})
}).not.toThrow();
});

it('should throw without an adapter', (done) => {


it('should throw without an adapter', (done) => {
expect(() => {
var loggerController = new LoggerController();
}).toThrow();
Expand Down
153 changes: 24 additions & 129 deletions src/Adapters/Logger/FileLoggerAdapter.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,15 @@
// Logger
//
// Wrapper around Winston logging library with custom query
//
//
// expected log entry to be in the shape of:
// {"level":"info","message":"Your Message","timestamp":"2016-02-04T05:59:27.412Z"}
//
import { LoggerAdapter } from './LoggerAdapter';
import winston from 'winston';
import fs from 'fs';
import { Parse } from 'parse/node';
import { logger, configure } from '../../logger';

const MILLISECONDS_IN_A_DAY = 24 * 60 * 60 * 1000;
const CACHE_TIME = 1000 * 60;
Expand Down Expand Up @@ -57,24 +58,6 @@ let _hasValidCache = (from, until, level) => {
return false;
}

// renews transports to current date
let _renewTransports = ({infoLogger, errorLogger, logsFolder}) => {
if (infoLogger) {
infoLogger.add(winston.transports.File, {
filename: logsFolder + _getFileName() + '.info',
name: 'info-file',
level: 'info'
});
}
if (errorLogger) {
errorLogger.add(winston.transports.File, {
filename: logsFolder + _getFileName() + '.error',
name: 'error-file',
level: 'error'
});
}
};

// check that log entry has valid time stamp based on query
let _isValidLogEntry = (from, until, entry) => {
var _entry = JSON.parse(entry),
Expand All @@ -84,139 +67,51 @@ let _isValidLogEntry = (from, until, entry) => {
: false
};

// ensure that file name is up to date
let _verifyTransports = ({infoLogger, errorLogger, logsFolder}) => {
if (_getNearestDay(currentDate) !== _getNearestDay(new Date())) {
currentDate = new Date();
if (infoLogger) {
infoLogger.remove('info-file');
}
if (errorLogger) {
errorLogger.remove('error-file');
}
_renewTransports({infoLogger, errorLogger, logsFolder});
}
}

export class FileLoggerAdapter extends LoggerAdapter {
constructor(options = {}) {
super();
this._logsFolder = options.logsFolder || LOGS_FOLDER;

// check logs folder exists
if (!fs.existsSync(this._logsFolder)) {
fs.mkdirSync(this._logsFolder);
}

this._errorLogger = new (winston.Logger)({
exitOnError: false,
transports: [
new (winston.transports.File)({
filename: this._logsFolder + _getFileName() + '.error',
name: 'error-file',
level: 'error'
})
]
});

this._infoLogger = new (winston.Logger)({
exitOnError: false,
transports: [
new (winston.transports.File)({
filename: this._logsFolder + _getFileName() + '.info',
name: 'info-file',
level: 'info'
})
]
});
}

info() {
_verifyTransports({infoLogger: this._infoLogger, logsFolder: this._logsFolder});
return this._infoLogger.info.apply(undefined, arguments);
return logger.info.apply(undefined, arguments);
}

error() {
_verifyTransports({errorLogger: this._errorLogger, logsFolder: this._logsFolder});
return this._errorLogger.error.apply(undefined, arguments);
return logger.error.apply(undefined, arguments);
}

// custom query as winston is currently limited
query(options, callback) {
query(options, callback = () => {}) {
if (!options) {
options = {};
}
// defaults to 7 days prior
let from = options.from || new Date(Date.now() - (7 * MILLISECONDS_IN_A_DAY));
let until = options.until || new Date();
let size = options.size || 10;
let limit = options.size || 10;
let order = options.order || 'desc';
let level = options.level || 'info';
let roundedUntil = _getNearestDay(until);
let roundedFrom = _getNearestDay(from);

if (_hasValidCache(roundedFrom, roundedUntil, level)) {
let logs = [];
if (order !== simpleCache.order) {
// reverse order of data
simpleCache.data.forEach((entry) => {
logs.unshift(entry);
});
} else {
logs = simpleCache.data;
}
callback(logs.slice(0, size));
return;
}

let curDate = roundedUntil;
let curSize = 0;
let method = order === 'desc' ? 'push' : 'unshift';
let files = [];
let promises = [];

// current a batch call, all files with valid dates are read
while (curDate >= from) {
files[method](this._logsFolder + curDate.toISOString() + '.' + level);
curDate = _getPrevDay(curDate);
}
var options = {
from,
until,
limit,
order
};

// read each file and split based on newline char.
// limitation is message cannot contain newline
// TODO: strip out delimiter from logged message
files.forEach(function(file, i) {
let promise = new Parse.Promise();
fs.readFile(file, 'utf8', function(err, data) {
return new Promise((resolve, reject) => {
logger.query(options, (err, res) => {
if (err) {
promise.resolve([]);
} else {
let results = data.split('\n').filter((value) => {
return value.trim() !== '';
});
promise.resolve(results);
callback(err);
return reject(err);
}
});
promises[method](promise);
});

Parse.Promise.when(promises).then((results) => {
let logs = [];
results.forEach(function(logEntries, i) {
logEntries.forEach(function(entry) {
if (_isValidLogEntry(from, until, entry)) {
logs[method](JSON.parse(entry));
}
});
});
simpleCache = {
timestamp: new Date(),
from: roundedFrom,
until: roundedUntil,
data: logs,
order,
level,
};
callback(logs.slice(0, size));
if (level == 'error') {
callback(res['parse-server-error']);
resolve(res['parse-server-error']);
} else {
callback(res['parse-server']);
resolve(res['parse-server']);
}
})
});
}
}
Expand Down
Loading

0 comments on commit ef08dcf

Please sign in to comment.