diff --git a/README.md b/README.md
index 08d69b0..0aae930 100644
--- a/README.md
+++ b/README.md
@@ -48,6 +48,8 @@ An opinionated boilerplate for Node web APIs focused on separation of concerns a
Logging
The Log4js logger is highly pluggable, being able to append the messages to a file during the development and send them to a logging service when on production. Even the requests (through morgan) and queries will be logged.
+ a second Layer of logging called Trace Logging is implemented throu log4js and a memory appender, it serves as an error only file appender meaning that it traces your actions through
+ operations and if an error is detected, the trace is saved to a file.
Linter
diff --git a/config/configUtils.js b/config/configUtils.js
new file mode 100644
index 0000000..b25b908
--- /dev/null
+++ b/config/configUtils.js
@@ -0,0 +1,124 @@
+
+
+module.exports={
+ getdataWithString(sourceObject, string){
+ if(sourceObject.constructor===Object ){
+ let strings = string.split('.');
+ let finalData=sourceObject;
+ strings.forEach((elem)=>{
+ if(elem!==''){
+ finalData=finalData[elem];
+ }
+ });
+
+ return finalData;
+ }else{
+ throw 'sourceObject must be an object';
+ }
+ },
+ setdataWithString(sourceObject, string, data){
+ if(sourceObject.constructor===Object ){
+ let strings = string.split('.');
+ let finalDestination=sourceObject;
+ strings.forEach((elem, index)=>{
+ if(finalDestination[elem]){
+ finalDestination=finalDestination[elem];
+ }else{
+ finalDestination[elem]={};
+ }
+ if(index===strings.length-1){
+ finalDestination[elem]=data;
+ }
+ });
+
+ finalDestination=data;
+ return finalDestination;
+ }else{
+ throw 'sourceObject must be an object';
+ }
+ },
+
+ /**
+ * will set data based on hierarchy to a destination object
+ * @param destinationObject
+ * @param hierarchy
+ */
+ setDatawithHierarchy(destinationObject, hierarchy){
+
+ if(destinationObject && destinationObject.constructor === Object && hierarchy && hierarchy.constructor === Object){
+ let strings = this.getStringOutOfHierarchy(hierarchy);
+
+ strings.map((elem)=>{
+ this.setdataWithString(destinationObject, elem, this.getdataWithString(hierarchy, elem));
+ });
+ }else{
+ throw 'argument must be objects and not null';
+ }
+
+ },
+
+
+ /**
+ * Will return a list of hierarchy strings based on the parameter object
+ * @param obj
+ * @returns {Array}
+ */
+ getStringOutOfHierarchy(obj){
+ let finalString=[];
+ let lastString=[];
+ let stringHierarachy = recHierarchy(obj, Object.keys(obj))[0];
+
+ finalString.map(elem=>{
+ if(elem.constructor===Array){
+ elem.forEach(
+ (subelem=>{
+ lastString.push(subelem);
+ })
+ );
+ }
+ });
+ return lastString;
+
+
+ function recHierarchy(obj, initial){
+ if(Object.keys(obj).length!=0){
+ return Object.keys(obj).map((elem)=>{
+ let basString= elem;
+ switch (typeof obj[elem]) {
+ case 'string':{
+
+ return [`${elem.toString()}`];
+ }
+ default :{
+ return [`${elem.toString()}`];
+ }
+ case 'object':{
+ if(obj[elem].constructor === Array ){
+ return [`${elem.toString()}`];
+ }
+ let returnedValue = recHierarchy(obj[elem]);
+ let value = returnedValue.map(
+ (elem)=>{
+ if(elem.constructor === Array){
+ return elem.map(
+ (subelem)=>{
+ return `${basString}.${subelem.toString()}`;
+ }
+ );
+ }
+ return `${basString}.${elem.toString()}`;
+ }
+ );
+ (initial && initial.indexOf(elem)!==-1)?finalString=finalString.concat(value) :null;
+ return value;
+ }
+
+ }
+ });
+ }else{
+ return [''];
+ }
+
+ }
+ },
+};
diff --git a/config/environments/development.js b/config/environments/development.js
deleted file mode 100644
index 9baa31d..0000000
--- a/config/environments/development.js
+++ /dev/null
@@ -1,14 +0,0 @@
-const path = require('path');
-const logPath = path.join(__dirname, '../../logs/development.log');
-
-module.exports = {
- web: {
- port: 3000
- },
- logging: {
- appenders: [
- { type: 'console' },
- { type: 'file', filename: logPath }
- ]
- }
-};
diff --git a/config/environments/development/development.js b/config/environments/development/development.js
new file mode 100644
index 0000000..224cbb4
--- /dev/null
+++ b/config/environments/development/development.js
@@ -0,0 +1,189 @@
+const path = require('path');
+const logPath = path.join(__dirname, '../../logs/development.log');
+const tracePath = path.join(__dirname, '../../logs/trace.log');
+var config = require('src/infra/logging/MemoryAppender');
+const fs = require('fs');
+const ENV = process.env.NODE_ENV || 'development';
+const configUtils = require('../../configUtils');
+const JSONFileHandlingService = require('src/infra/files').JSONFileHandler;
+//a global variable holding the configuration is not the ideal way since it can have
+//sensitive information so this should be moved elsewhere
+var cfg = {};
+var fileCfg ={};
+var extraConfig={
+ 'logging': {
+ 'appenders': {
+ 'file': {
+ 'filename': logPath
+ },
+ 'trace': {
+ 'type': {
+ 'configure': config.config()
+ },
+ },
+ 'traceFile': {
+ 'filename': tracePath,
+ }
+ },
+
+ }
+};
+
+
+ //defining the file and setting the configuration
+const file = path.join(__dirname, ENV)+'.json';
+//loading the initial configuration
+load();
+//setting the listener for json file changes to reload the configuration
+fs.watchFile(file, load);
+
+//injecting variables using the objectAssign
+
+function load() {
+ const parsed = JSON.parse(fs.readFileSync(file));
+ Object.assign(fileCfg, parsed);
+ cfg = Object.assign({}, fileCfg, cfg);
+ configUtils.setDatawithHierarchy(cfg, extraConfig);
+}
+
+
+function setConfig(hierarchy){
+
+ if(hierarchy){
+ if(hierarchy.constructor === Object){
+ let strings = configUtils.getStringOutOfHierarchy(hierarchy);
+
+ strings.forEach((elem)=>{
+ if(fileCfg[elem.split('.')[0]]){
+ JSONFileHandlingService.setToJsonFile(file, elem, configUtils.getdataWithString(hierarchy, elem));
+ }else{
+ configUtils.setdataWithString(extraConfig, elem, configUtils.getdataWithString(hierarchy, elem));
+ }
+ });
+ }
+
+ load();
+ }else{
+ throw 'the hierarchy argument is mandatory';
+ }
+}
+
+
+function getConfig(){
+ return cfg;
+}
+
+module.exports= {
+ config:cfg,
+ set:(hierarchy)=>{
+ return setConfig(hierarchy);
+ },
+ get:()=>{
+ return getConfig();
+ }
+};
+
+
+
+//deprecated
+//left for comparison
+/*
+module.exports = {
+ web: {
+ port: 4000
+ },
+ logging: {
+ appenders: {
+ console :{ type: 'console' },
+ file: { type: 'file', filename: logPath },
+ trace :{
+ type: {
+ configure:config.config()
+ },
+ layout: {
+ type: 'pattern',
+ pattern: '%d %p %c %x{user} %m%n',
+ }
+ },
+ traceFile: {
+ type: 'file',
+ filename: tracePath,
+ layout: {
+ type: 'pattern',
+ pattern: '%m%n',
+ }
+ },
+ },
+ categories: {
+ default:
+ {
+ appenders:
+ [
+ 'console',
+ 'file',
+
+ ],
+ level: 'debug'
+ },
+ trace: {
+ appenders:
+ [
+ 'trace'
+ ],
+ level:'TRACE'
+ },
+ traceFile:{
+ appenders:
+ [
+ 'traceFile'
+ ],
+ level:'ALL'
+ }
+
+ }
+ },
+ memoryloggerId:'x-test-req-ID',
+ appMetrics:{
+ replSocketURL:'localhost:4001',
+ userAppMetric:true,
+ tracking:{
+ cpu:true,
+ eventloop:true,
+ profiling:true,
+ http:{
+ use:true,
+ config:{
+ filters:{
+ pattern:'', //(String) a regular expression pattern to match HTTP method and URL against, eg. 'GET /favicon.ico$'
+ to:'' //(String) a conversion for the URL to allow grouping. A value of '' causes the URL to be ignored.
+ }
+ }
+ },
+ mongo:true,
+ socketio:true,
+ mqlight:true,
+ postgresql:true,
+ mqtt:true,
+ mysql:true,
+ redis:true,
+ riak:true,
+ memcached:true,
+ oracledb:true,
+ oracle:true,
+ 'strong-oracle':true,
+ requests:{
+ use:true,
+ config:{
+ excludeModules:[] //(Array) of String names of modules to exclude from request tracking.
+ }
+ },
+ trace:{
+ use:true,
+ config:{
+ includeModules:[] //Array) of String names for modules to include in function tracing. By default only non-module functions are traced when trace is enabled.
+ }
+ }
+ }
+ }
+};
+*/
diff --git a/config/environments/development/development.json b/config/environments/development/development.json
new file mode 100644
index 0000000..90b0154
--- /dev/null
+++ b/config/environments/development/development.json
@@ -0,0 +1,97 @@
+{
+ "web": {
+ "port": 4000
+ },
+ "logging": {
+ "appenders": {
+ "console": {
+ "type": "console"
+ },
+ "file": {
+ "type": "file",
+ "filename": ""
+ },
+ "trace": {
+ "type": {
+ "configure":""
+ },
+ "layout": {
+ "type": "pattern",
+ "pattern": "%d %p %c %x{user} %m%n"
+ }
+ },
+ "traceFile": {
+ "type": "file",
+ "filename": "",
+ "layout": {
+ "type": "pattern",
+ "pattern": "%m%n"
+ }
+ }
+ },
+ "categories": {
+ "default": {
+ "appenders": [
+ "console",
+ "file"
+ ],
+ "level": "debug"
+ },
+ "trace": {
+ "appenders": [
+ "trace"
+ ],
+ "level": "TRACE"
+ },
+ "traceFile": {
+ "appenders": [
+ "traceFile"
+ ],
+ "level": "ALL"
+ }
+ }
+ },
+ "memoryloggerId": "x-test-req-ID",
+ "appMetrics": {
+ "replSocketURL": "localhost:4001",
+ "userAppMetric": true,
+ "tracking": {
+ "cpu": true,
+ "eventloop": true,
+ "profiling": true,
+ "http": {
+ "use": true,
+ "config": {
+ "filters": {
+ "pattern": "",
+ "to": ""
+ }
+ }
+ },
+ "mongo": true,
+ "socketio": true,
+ "mqlight": true,
+ "postgresql": true,
+ "mqtt": true,
+ "mysql": true,
+ "redis": true,
+ "riak": true,
+ "memcached": true,
+ "oracledb": true,
+ "oracle": true,
+ "strong-oracle": true,
+ "requests": {
+ "use": true,
+ "config": {
+ "excludeModules": []
+ }
+ },
+ "trace": {
+ "use": true,
+ "config": {
+ "includeModules": []
+ }
+ }
+ }
+ }
+}
diff --git a/config/environments/production.js b/config/environments/production/production.js
similarity index 100%
rename from config/environments/production.js
rename to config/environments/production/production.js
diff --git a/config/environments/production/production.json b/config/environments/production/production.json
new file mode 100644
index 0000000..90b0154
--- /dev/null
+++ b/config/environments/production/production.json
@@ -0,0 +1,97 @@
+{
+ "web": {
+ "port": 4000
+ },
+ "logging": {
+ "appenders": {
+ "console": {
+ "type": "console"
+ },
+ "file": {
+ "type": "file",
+ "filename": ""
+ },
+ "trace": {
+ "type": {
+ "configure":""
+ },
+ "layout": {
+ "type": "pattern",
+ "pattern": "%d %p %c %x{user} %m%n"
+ }
+ },
+ "traceFile": {
+ "type": "file",
+ "filename": "",
+ "layout": {
+ "type": "pattern",
+ "pattern": "%m%n"
+ }
+ }
+ },
+ "categories": {
+ "default": {
+ "appenders": [
+ "console",
+ "file"
+ ],
+ "level": "debug"
+ },
+ "trace": {
+ "appenders": [
+ "trace"
+ ],
+ "level": "TRACE"
+ },
+ "traceFile": {
+ "appenders": [
+ "traceFile"
+ ],
+ "level": "ALL"
+ }
+ }
+ },
+ "memoryloggerId": "x-test-req-ID",
+ "appMetrics": {
+ "replSocketURL": "localhost:4001",
+ "userAppMetric": true,
+ "tracking": {
+ "cpu": true,
+ "eventloop": true,
+ "profiling": true,
+ "http": {
+ "use": true,
+ "config": {
+ "filters": {
+ "pattern": "",
+ "to": ""
+ }
+ }
+ },
+ "mongo": true,
+ "socketio": true,
+ "mqlight": true,
+ "postgresql": true,
+ "mqtt": true,
+ "mysql": true,
+ "redis": true,
+ "riak": true,
+ "memcached": true,
+ "oracledb": true,
+ "oracle": true,
+ "strong-oracle": true,
+ "requests": {
+ "use": true,
+ "config": {
+ "excludeModules": []
+ }
+ },
+ "trace": {
+ "use": true,
+ "config": {
+ "includeModules": []
+ }
+ }
+ }
+ }
+}
diff --git a/config/index.js b/config/index.js
index 7e248a5..e29940f 100644
--- a/config/index.js
+++ b/config/index.js
@@ -5,16 +5,21 @@ const path = require('path');
const ENV = process.env.NODE_ENV || 'development';
-const envConfig = require(path.join(__dirname, 'environments', ENV));
+//deprecated
+const envConfig = require(path.join(__dirname, 'environments', ENV, ENV)+'.js');
const dbConfig = loadDbConfig();
-const config = Object.assign({
+const config = Object.assign({}, {
[ENV]: true,
env: ENV,
db: dbConfig
-}, envConfig);
+});
-module.exports = config;
+
+//The injection is done based on two environments you should change this when adding more
+//UPDATE : we got rid of the injection due to multiple subsequent injections needed.
+
+module.exports = Object.assign({}, config, envConfig.config);
function loadDbConfig() {
if(process.env.DATABASE_URL) {
diff --git a/package.json b/package.json
index dac12c7..283f6c2 100644
--- a/package.json
+++ b/package.json
@@ -17,6 +17,7 @@
"coverage": "cross-env NODE_PATH=. NODE_ENV=test nyc mocha --opts test/mocha.opts.unit",
"lint": "eslint {src,test,config}/**/*.js",
"sequelize": "cross-env NODE_PATH=. sequelize",
+ "makemigration": "NODE_PATH=. makemigration",
"console": "cross-env NODE_PATH=. node src/interfaces/console/index.js",
"heroku-postbuild": "NODE_ENV=production NODE_PATH=. sequelize db:migrate --url=$DATABASE_URL",
"pm2": "pm2",
@@ -26,6 +27,7 @@
"author": "Talysson ",
"license": "MIT",
"dependencies": {
+ "appmetrics": "^5.1.1",
"awilix": "^3.0.9",
"awilix-express": "^0.11.0",
"body-parser": "^1.17.1",
@@ -34,19 +36,24 @@
"cross-env": "^3.2.3",
"del": "^2.2.2",
"dotenv": "^4.0.0",
+ "edit-json-file": "^1.3.2",
"eslint": "^4.7.2",
"express": "^4.15.2",
"express-status-monitor": "^0.1.9",
"http-status": "^1.0.1",
- "log4js": "^1.1.1",
+ "log4js": "^5.1.0",
"method-override": "^2.3.7",
"morgan": "^1.8.1",
+ "mysql": "^2.17.1",
+ "net": "^1.0.2",
"pg": "^6.1.3",
"pm2": "^2.4.2",
"sequelize": "^3.30.4",
+ "sequelize-auto-migrations": "^1.0.3",
"sequelize-cli": "^3.0.0",
"structure": "^1.2.0",
- "swagger-ui-express": "^2.0.14"
+ "swagger-ui-express": "^2.0.14",
+ "uuidv4": "^5.0.1"
},
"devDependencies": {
"chai": "^4.1.2",
diff --git a/src/app/Operation.js b/src/app/Operation.js
index 5dbc3a7..fbf55b6 100644
--- a/src/app/Operation.js
+++ b/src/app/Operation.js
@@ -2,6 +2,45 @@ const EventEmitter = require('events');
const define = Object.defineProperty;
class Operation extends EventEmitter {
+
+ constructor(){
+ super();
+ this.steps=0;
+ }
+
+ logToTrace(data){
+ this.container = require('src/container');
+ this.trace = this.container.resolve('Tracelogger');
+ this.trace.trace(data);
+ }
+ logToError(data){
+ this.container = require('src/container');
+ this.trace = this.container.resolve('Tracelogger');
+ this.trace.error(data);
+ }
+
+ logStart(data=undefined){
+
+ this.logToTrace(`starting operation ${this.constructor.name}, with input data ${data?typeof data =='string'?data:JSON.stringify(data):'No Data'}`);
+ }
+
+ logFinish(data=undefined){
+ this.logToTrace(`Ending operation ${this.constructor.name}, with output data : ${data?typeof data =='string'?data:JSON.stringify(data):'No Data'}`);
+ }
+
+ logStep({step, data=undefined}){
+ this.steps++;
+
+
+ this.logToTrace(`Operation ${this.constructor.name} | Step ${this.steps} : ${step} , with data : ${data?typeof data =='string'?data:JSON.stringify(data):'No Data'}`);
+ }
+
+ logError({error=undefined}){
+ this.steps++;
+ this.logToError(`Error after step : ${this.steps} in operation ${this.constructor.name}, with error data : ${error?typeof data =='string'?error:JSON.stringify(error):'No Data'}`);
+ }
+
+
static setOutputs(outputs) {
define(this.prototype, 'outputs', {
value: createOutputs(outputs)
@@ -15,6 +54,10 @@ class Operation extends EventEmitter {
throw new Error(`Invalid output "${output}" to operation ${this.constructor.name}.`);
}
+
+ execute(){
+ this.logStart(arguments);
+ }
}
const createOutputs = (outputsArray) => {
diff --git a/src/app/appMetrics.js b/src/app/appMetrics.js
new file mode 100644
index 0000000..9752aad
--- /dev/null
+++ b/src/app/appMetrics.js
@@ -0,0 +1,71 @@
+const Operation = require('../app/Operation');
+
+class appMetrics extends Operation {
+
+
+ constructor({config, logger}){
+
+ super();
+ this.config=config;
+ this.logger=logger;
+ //find out if appMetrics is enabled :
+ if(this.config.appMetrics.userAppMetric===false){
+ this.logger.info('App metrics is disabled, no Application metrics will be recorded or available through API');
+ return undefined;
+ }
+
+ const appmetrics = require('appmetrics');
+
+ //enabling metrics types :
+
+ let numberOMetricsEnabled =0;
+ let numberOfMetricsEnablingErrors=0;
+ Object.keys(config.appMetrics.tracking).map((elem, index)=>{
+ let type= config.appMetrics.tracking[elem];
+ try{
+ switch (typeof type) {
+ case Boolean:{
+ appmetrics.enable(type, {});
+ numberOMetricsEnabled++;
+ break;
+ }
+ case Object:{
+ if(type.use===true){
+ if(type.config){
+ appmetrics.enable(type, type.config);
+ numberOMetricsEnabled++;
+ }
+ }
+ break;
+ }
+ }
+ }catch (e) {
+ numberOfMetricsEnablingErrors++;
+ }
+ });
+
+ this.logger.info(`${numberOMetricsEnabled} metric(s) has been enabled`);
+ this.logger.info(`${numberOfMetricsEnablingErrors} metric(s) have errored or were not loaded`);
+
+ //initializing monitor app.
+ this.monitoring = appmetrics.monitor();
+
+
+ //setting the outputs and attaching events
+ appMetrics.setOutputs(Object.keys(config.appMetrics.tracking).map((elem)=> {
+
+ this.monitoring.on(elem.toLowerCase(), (data) => {
+ this.emit(elem.toUpperCase(),data);
+ });
+
+ return elem.toUpperCase();
+ }));
+
+ }
+
+}
+
+
+
+
+module.exports = appMetrics;
diff --git a/src/app/user/CreateUser.js b/src/app/user/CreateUser.js
index 5dff6e7..327fea0 100644
--- a/src/app/user/CreateUser.js
+++ b/src/app/user/CreateUser.js
@@ -8,19 +8,26 @@ class CreateUser extends Operation {
}
async execute(userData) {
+ super.execute();
const { SUCCESS, ERROR, VALIDATION_ERROR } = this.outputs;
const user = new User(userData);
try {
+ /**
+ * The logStep method is one of multiple predefined logging methods
+ * that create the log trace.
+ * they are used here only for demonstration
+ */
+ this.logStep({step:'adding user to repository', data:userData});
const newUser = await this.usersRepository.add(user);
-
+ this.logStep({step:'Emitting success event', data:newUser});
this.emit(SUCCESS, newUser);
} catch(error) {
if(error.message === 'ValidationError') {
return this.emit(VALIDATION_ERROR, error);
}
-
+ this.logError({error:error});
this.emit(ERROR, error);
}
}
diff --git a/src/app/user/GetAllUsers.js b/src/app/user/GetAllUsers.js
index e6c7f91..b545a4f 100644
--- a/src/app/user/GetAllUsers.js
+++ b/src/app/user/GetAllUsers.js
@@ -7,6 +7,7 @@ class GetAllUsers extends Operation {
}
async execute() {
+ super.execute();
const { SUCCESS, ERROR } = this.outputs;
try {
diff --git a/src/container.js b/src/container.js
index e3b8e84..4e15b9a 100644
--- a/src/container.js
+++ b/src/container.js
@@ -13,14 +13,22 @@ const {
const UserSerializer = require('./interfaces/http/user/UserSerializer');
+const LogAppender = require('../src/infra/logging/MemoryAppender.js');
const Server = require('./interfaces/http/Server');
+const appMetrics = require('./app/appMetrics');
const router = require('./interfaces/http/router');
const loggerMiddleware = require('./interfaces/http/logging/loggerMiddleware');
+const loggerIdInjectorMiddleware = require('./interfaces/http/MiddleWare/LogIdInjectorMiddleware');
const errorHandler = require('./interfaces/http/errors/errorHandler');
const devErrorHandler = require('./interfaces/http/errors/devErrorHandler');
const swaggerMiddleware = require('./interfaces/http/swagger/swaggerMiddleware');
+
+
const logger = require('./infra/logging/logger');
+const ControllerLogger = require('./infra/logging/ControllerLogger');
+const Tracelogger = require('./infra/logging/dataTraceLogger');
+const JSONFileHandlingService = require('./infra/files').JSONFileHandler;
const SequelizeUsersRepository = require('./infra/user/SequelizeUsersRepository');
const { database, User: UserModel } = require('./infra/database/models');
@@ -30,30 +38,39 @@ const container = createContainer();
container
.register({
app: asClass(Application).singleton(),
- server: asClass(Server).singleton()
+ server: asClass(Server).singleton(),
+ appMetrics: asClass(appMetrics)
})
.register({
router: asFunction(router).singleton(),
- logger: asFunction(logger).singleton()
+ logger: asFunction(logger).singleton(),
+ Tracelogger:asFunction(Tracelogger).singleton(),
+ ControllerLogger:asFunction(ControllerLogger).singleton()
})
.register({
- config: asValue(config)
+ config: asValue(config),
});
// Middlewares
container
.register({
- loggerMiddleware: asFunction(loggerMiddleware).singleton()
+ loggerMiddleware: asFunction(loggerMiddleware).singleton(),
+ JSONFileHandlingService:asValue(JSONFileHandlingService)
})
.register({
containerMiddleware: asValue(scopePerRequest(container)),
errorHandler: asValue(config.production ? errorHandler : devErrorHandler),
- swaggerMiddleware: asValue([swaggerMiddleware])
+ swaggerMiddleware: asValue([swaggerMiddleware]),
+ appenderConfig: asFunction(LogAppender.config).singleton(),
+ appenderBuffer: asValue(LogAppender.buffer)
+ })
+ .register({
+ loggerIdInjectorMiddleware:asFunction(loggerIdInjectorMiddleware).singleton()
});
// Repositories
container.register({
- usersRepository: asClass(SequelizeUsersRepository).singleton()
+ usersRepository: asClass(SequelizeUsersRepository).setLifetime('TRANSIENT')
});
// Database
diff --git a/src/infra/SequelizeBaseRepository.js b/src/infra/SequelizeBaseRepository.js
new file mode 100644
index 0000000..0b28519
--- /dev/null
+++ b/src/infra/SequelizeBaseRepository.js
@@ -0,0 +1,187 @@
+
+
+
+
+/**
+ * A superclass for the basic sequelize operations based on models and
+ */
+class SequelizeobjectsRepository {
+ constructor(Model, Mapper) {
+ this.localModel = Model;
+ this.localMapper = Mapper;
+
+ }
+
+ /**
+ * parameters are sequelize query parameters
+ * @param args
+ * will return a promise that resolves an object of the specified model
+ * @returns {Promise}
+ */
+ async getAll(...args) {
+ const objects = await this.localModel.findAll(...args);
+
+ return objects.map(this.localMapper.toEntity);
+ }
+
+ /**
+ *
+ * @param id
+ * will return a promise that resolves an object of the specified model
+ * @returns {Promise<*>}
+ */
+ async getById(id) {
+ const object = await this._getById(id);
+
+ return this.localMapper.toEntity(object);
+ }
+
+ /**
+ * parameters are the attribute that should query by and its value
+ * @param attribute
+ * @param value
+ * will return a promise that resolves an object of the specified model
+ * @returns {Promise<*>}
+ */
+ async getByAttribute(attribute, value){
+ const object = await this._getBy(attribute, value);
+
+ return this.localMapper.toEntity(object);
+ }
+
+ /**
+ * will add a new object record to storage
+ * object parameter should be a structure object and should have the
+ * validate method in its prototype. see structure on npmjs.
+ * @param object
+ * will return a promise that resolves an object of the specified model
+ * @returns {Promise<*>}
+ */
+ async add(object) {
+ const { valid, errors } = object.validate();
+
+ if(!valid) {
+ const error = new Error('ValidationError');
+ error.details = errors;
+
+ throw error;
+ }
+
+ const newobject = await this.localModel.create(this.localMapper.toDatabase(object));
+ return this.localMapper.toEntity(newobject);
+ }
+
+ /**
+ * Will remove a record form the storage
+ * @param id
+ * will return a promise that resolves when the action is complete
+ * @returns {Promise}
+ * @throws NotFoundError | any
+ */
+ async remove(id) {
+ const object = await this._getById(id);
+
+ await object.destroy();
+ return;
+ }
+
+ /**
+ *
+ * @param id
+ * @param newData
+ * will return a promise that resolves an object of the specified updated model
+ * @returns {Promise<*>}
+ * @throws NotFoundError | any
+ */
+ async update(id, newData) {
+ const object = await this._getById(id);
+
+ const transaction = await this.localModel.sequelize.transaction();
+
+ try {
+ const updatedobject = await object.update(newData, { transaction });
+ const objectEntity = this.localMapper.toEntity(updatedobject);
+
+ const { valid, errors } = objectEntity.validate();
+
+ if(!valid) {
+ const error = new Error('ValidationError');
+ error.details = errors;
+
+ throw error;
+ }
+
+ await transaction.commit();
+
+ return objectEntity;
+ } catch(error) {
+ await transaction.rollback();
+
+ throw error;
+ }
+ }
+
+ /**
+ * Will count the number of records of the specified model
+ * will return a promise that resolves to a number
+ * @returns {Promise<*>}
+ */
+ async count() {
+ return await this.localModel.count();
+ }
+
+ // Private
+
+ /**
+ * Private internal function
+ * @param id
+ * will return a promise that resolves an object of the specified model
+ * @returns {Promise}
+ * @private
+ */
+ async _getById(id) {
+ try {
+ return await this.localModel.findById(id, { rejectOnEmpty: true });
+ } catch(error) {
+ if(error.name === 'SequelizeEmptyResultError') {
+ const notFoundError = new Error('NotFoundError');
+ notFoundError.details = `object with id ${id} can't be found.`;
+
+ throw notFoundError;
+ }
+
+ throw error;
+ }
+ }
+
+
+ /**
+ * Private internal function
+ * @param attribute
+ * @param value
+ * will return a promise that resolves an object of the specified model
+ * @returns {Promise}
+ * @private
+ */
+ async _getBy(attribute, value) {
+ try {
+ let options = {};
+ options[attribute]=value;
+ return await this.localModel.findAll({
+ where:options,
+ rejectOnEmpty: true
+ });
+ } catch(error) {
+ if(error.name === 'SequelizeEmptyResultError') {
+ const notFoundError = new Error('NotFoundError');
+ notFoundError.details = `object with attribute ${attribute} and value ${value} can't be found.`;
+
+ throw notFoundError;
+ }
+
+ throw error;
+ }
+ }
+}
+
+module.exports = SequelizeobjectsRepository;
diff --git a/src/infra/files/FileHandlingService.js b/src/infra/files/FileHandlingService.js
new file mode 100644
index 0000000..de27ce9
--- /dev/null
+++ b/src/infra/files/FileHandlingService.js
@@ -0,0 +1,19 @@
+const editJsonFile = require('edit-json-file');
+
+module.exports = {
+
+
+ setToJsonFile(file, key, data) {
+ let File = editJsonFile(file);
+ File.set(key, data);
+ File.save();
+ },
+
+
+ readFromJson(file, key) {
+ let File = editJsonFile(file);
+ return File.get(key);
+ }
+
+
+};
diff --git a/src/infra/files/index.js b/src/infra/files/index.js
new file mode 100644
index 0000000..b3f35d6
--- /dev/null
+++ b/src/infra/files/index.js
@@ -0,0 +1,3 @@
+module.exports = {
+ JSONFileHandler: require('./FileHandlingService')
+};
diff --git a/src/infra/logging/ControllerLogger.js b/src/infra/logging/ControllerLogger.js
new file mode 100644
index 0000000..3dfcb22
--- /dev/null
+++ b/src/infra/logging/ControllerLogger.js
@@ -0,0 +1,79 @@
+
+
+
+
+
+
+module.exports =({appenderBuffer, Tracelogger, config})=>{
+
+ return class ControllerLogger {
+
+ constructor(req, EventEmitterClass, {success, error}={} ){
+ this.appenderBuffer = appenderBuffer;
+ this.TraceLogger = Tracelogger;
+
+
+ if(!success){
+ success='SUCCESS';
+ }
+
+ if(!error){
+ error=['VALIDATION_ERROR', 'ERROR', 'NOT_FOUND'];
+ }
+ if(success instanceof Array){
+ success.map((elem, index)=>{
+ if(EventEmitterClass.outputs[elem]){
+ EventEmitterClass.on(elem, ()=>{
+ if(req.headers[config.memoryloggerId]){
+ this.appenderBuffer.deletemyLog(req.headers[config.memoryloggerId]);
+ }else{
+ this.TraceLogger.toFileLogger.trace('Couldn\'t find a headers ID int he given request');
+ }
+ });
+ }
+ });
+ }else{
+ if(success instanceof String){
+ if(EventEmitterClass.outputs[success]){
+ EventEmitterClass.on(success, ()=>{
+ if(req.headers[config.memoryloggerId]){
+ this.appenderBuffer.deletemyLog(req.headers[config.memoryloggerId]);
+ }else{
+ this.TraceLogger.toFileLogger.trace('Couldn\'t find a headers ID int he given request');
+ }
+ });
+ }
+ }
+ }
+
+ if(error instanceof Array){
+ error.map((elem, index)=>{
+ if(EventEmitterClass.outputs[elem]){
+ EventEmitterClass.on(elem, ()=>{
+ if(req.headers[config.memoryloggerId]){
+ this.TraceLogger.toFileLogger.trace(this.appenderBuffer.gemyLog(req.headers[config.memoryloggerId]));
+ }else{
+ this.TraceLogger.toFileLogger.trace('Couldn\'t find a headers ID int he given request');
+ }
+ });
+ }
+ });
+ }else{
+ if(error instanceof String){
+ if(EventEmitterClass.outputs[error]){
+ EventEmitterClass.on(error, ()=>{
+ if(req.headers[config.memoryloggerId]){
+ this.TraceLogger.toFileLogger.trace(this.appenderBuffer.gemyLog(req.headers[config.memoryloggerId]));
+ }else{
+ this.TraceLogger.toFileLogger.trace('Couldn\'t find a headers ID int he given request');
+ }
+ });
+ }
+
+ }
+ }
+ return EventEmitterClass;
+ }
+ };
+
+};
diff --git a/src/infra/logging/MemoryAppender.js b/src/infra/logging/MemoryAppender.js
new file mode 100644
index 0000000..e199114
--- /dev/null
+++ b/src/infra/logging/MemoryAppender.js
@@ -0,0 +1,75 @@
+/**
+ * The memory appender is an appender for log4js. it is not found as a dependency
+ * but as an implementation inside our code. it handles Trace logs by saving them in
+ * memory in a variable called Buffermap.
+ * The buffer Map variable holds all the traces for the current running requests
+ * once the request is closed the traces are either deleted, or saved to file and then
+ * deleted form memory, all based on the outcome fo the request.
+ *
+ * The memory appender follows the log4js memory adapter specification with
+ * a small alteration that lets us access the buffer map variable from memory
+ * as that is not natively supported by log4js specification.
+ */
+
+var oglayouts = undefined;
+var buffer = null;
+var maxBufferSize = null;
+var bufferMap = null;
+
+var options = options || {};
+buffer = options.buffer || [];
+bufferMap = options.bufferMap || {};
+maxBufferSize = options.maxBufferSize || 100;
+
+
+
+module.exports.config = ()=>{
+
+ return (config, layouts, findAppender, levels) => {
+ oglayouts=layouts;
+ var layout = null;
+ if (config.layout) {
+ layout = layouts.layout(config.layout.type, config.layout);
+ }
+ if(config.maxBufferSize){
+ maxBufferSize = config.maxBufferSize;
+ }
+ return memoryAppender(layout, config.timezoneOffset);
+ };
+
+
+};
+
+var memoryAppender = function memoryAppender(layout, timezoneOffset) {
+ layout = layout || oglayouts?oglayouts.basicLayout:undefined;
+ return function(loggingEvent) {
+ if((buffer.length + 1) > maxBufferSize)
+ {
+ var numtoRemove = (buffer.length - maxBufferSize) + 1;
+ if(numtoRemove > 0){ buffer.splice(0, numtoRemove); }
+ }
+ let id = loggingEvent.data[0]?loggingEvent.data[0].id?loggingEvent.data[0].id:'na':'na';
+
+ if(!bufferMap[id]){
+ bufferMap[id]=[];
+ }
+
+ bufferMap[id].push(layout(loggingEvent, timezoneOffset));
+
+ };
+};
+
+
+module.exports.buffer = {
+ bufferMap:bufferMap,
+ deletemyLog:(id)=>{
+ delete bufferMap[id];
+ return true;
+ },
+ gemyLog:(id)=>{
+ return bufferMap[id];
+ }
+
+};
+
+
diff --git a/src/infra/logging/dataTraceLogger.js b/src/infra/logging/dataTraceLogger.js
new file mode 100644
index 0000000..bb7b317
--- /dev/null
+++ b/src/infra/logging/dataTraceLogger.js
@@ -0,0 +1,42 @@
+const Log4js = require('log4js');
+
+/**
+ *
+ * The dataTraceLogger is a wrapper over the log4js data logging, it injects
+ * a specific Id to identify the log and choose the logger to choose.
+ *
+ * This can be used to inject any kind of data in the logs.
+ */
+
+var TraceData ={
+ user_id:''
+};
+
+
+module.exports = ({ config }) => {
+ Log4js.configure(config.logging);
+
+ let logger = Log4js.getLogger('trace');
+ let toFileLogger = Log4js.getLogger('traceFile');
+ return {
+ logger:logger,
+ toFileLogger:toFileLogger,
+ trace:(data)=>{
+ logger.trace({
+ id:TraceData.user_id,
+ log:data
+ });
+ },
+ error:(data)=>{
+ logger.error({
+ id:TraceData.user_id,
+ log:data
+ });
+ },
+ setuserID:(id)=>{
+ TraceData.user_id=id;
+
+ },
+ traceStaticData:TraceData
+ };
+};
diff --git a/src/infra/user/SequelizeUsersRepository.js b/src/infra/user/SequelizeUsersRepository.js
index 046fa62..14993ac 100644
--- a/src/infra/user/SequelizeUsersRepository.js
+++ b/src/infra/user/SequelizeUsersRepository.js
@@ -1,90 +1,10 @@
const UserMapper = require('./SequelizeUserMapper');
+const BaseSequelizeRepository = require('../SequelizeBaseRepository');
-class SequelizeUsersRepository {
+class SequelizeUsersRepository extends BaseSequelizeRepository{
constructor({ UserModel }) {
- this.UserModel = UserModel;
- }
-
- async getAll(...args) {
- const users = await this.UserModel.findAll(...args);
-
- return users.map(UserMapper.toEntity);
- }
-
- async getById(id) {
- const user = await this._getById(id);
-
- return UserMapper.toEntity(user);
- }
-
- async add(user) {
- const { valid, errors } = user.validate();
-
- if(!valid) {
- const error = new Error('ValidationError');
- error.details = errors;
-
- throw error;
- }
-
- const newUser = await this.UserModel.create(UserMapper.toDatabase(user));
- return UserMapper.toEntity(newUser);
- }
-
- async remove(id) {
- const user = await this._getById(id);
-
- await user.destroy();
- return;
- }
-
- async update(id, newData) {
- const user = await this._getById(id);
-
- const transaction = await this.UserModel.sequelize.transaction();
-
- try {
- const updatedUser = await user.update(newData, { transaction });
- const userEntity = UserMapper.toEntity(updatedUser);
-
- const { valid, errors } = userEntity.validate();
-
- if(!valid) {
- const error = new Error('ValidationError');
- error.details = errors;
-
- throw error;
- }
-
- await transaction.commit();
-
- return userEntity;
- } catch(error) {
- await transaction.rollback();
-
- throw error;
- }
- }
-
- async count() {
- return await this.UserModel.count();
- }
-
- // Private
-
- async _getById(id) {
- try {
- return await this.UserModel.findById(id, { rejectOnEmpty: true });
- } catch(error) {
- if(error.name === 'SequelizeEmptyResultError') {
- const notFoundError = new Error('NotFoundError');
- notFoundError.details = `User with id ${id} can't be found.`;
-
- throw notFoundError;
- }
+ super(UserModel, UserMapper);
- throw error;
- }
}
}
diff --git a/src/interfaces/console/Console.js b/src/interfaces/console/Console.js
index 8f191ce..668dcf7 100644
--- a/src/interfaces/console/Console.js
+++ b/src/interfaces/console/Console.js
@@ -3,10 +3,12 @@ const vm = require('vm');
module.exports = {
start(options = {}) {
- const { expose } = options;
-
+ const { expose, socket } = options;
const repl = REPL.start({
- eval: promisableEval
+ eval: promisableEval,
+ terminal:true,
+ input: socket,
+ output: socket,
});
Object.assign(repl.context, expose);
diff --git a/src/interfaces/console/index.js b/src/interfaces/console/index.js
index 898ff63..0257b2a 100644
--- a/src/interfaces/console/index.js
+++ b/src/interfaces/console/index.js
@@ -4,5 +4,7 @@ const Console = require('./Console');
const container = require('src/container');
Console.start({
- expose: { container }
+ expose: { container},
});
+
+
diff --git a/src/interfaces/http/MiddleWare/LogIdInjectorMiddleware.js b/src/interfaces/http/MiddleWare/LogIdInjectorMiddleware.js
new file mode 100644
index 0000000..53fa4d3
--- /dev/null
+++ b/src/interfaces/http/MiddleWare/LogIdInjectorMiddleware.js
@@ -0,0 +1,17 @@
+const uuidv4 = require('uuid/v4');
+
+/**
+ * This will inject a request Id for every request and use that to store the trace related
+ * to that request. using this id, the trace can then be identified and either stored
+ * in case of error or deleted in case of success.
+ * @param config
+ * @param Tracelogger
+ * @returns {Function}
+ */
+module.exports=({config, Tracelogger})=>{
+ return (req, res, next)=>{
+ req.headers[config.memoryloggerId] = uuidv4();
+ Tracelogger.setuserID(req.headers[config.memoryloggerId]);
+ next();
+ };
+};
diff --git a/src/interfaces/http/router.js b/src/interfaces/http/router.js
index d5485d4..c44cca3 100644
--- a/src/interfaces/http/router.js
+++ b/src/interfaces/http/router.js
@@ -5,8 +5,9 @@ const bodyParser = require('body-parser');
const compression = require('compression');
const methodOverride = require('method-override');
const controller = require('./utils/createControllerRoutes');
+const { inject } = require('awilix-express');
-module.exports = ({ config, containerMiddleware, loggerMiddleware, errorHandler, swaggerMiddleware }) => {
+module.exports = ({ config, loggerIdInjectorMiddleware, containerMiddleware, loggerMiddleware, errorHandler, swaggerMiddleware }) => {
const router = Router();
/* istanbul ignore if */
@@ -27,6 +28,8 @@ module.exports = ({ config, containerMiddleware, loggerMiddleware, errorHandler,
.use(bodyParser.json())
.use(compression())
.use(containerMiddleware)
+ .use(inject('ControllerLogger'))
+ .use(loggerIdInjectorMiddleware)
.use('/docs', swaggerMiddleware);
/*
diff --git a/src/interfaces/http/user/UsersController.js b/src/interfaces/http/user/UsersController.js
index ee4f935..f71e6d9 100644
--- a/src/interfaces/http/user/UsersController.js
+++ b/src/interfaces/http/user/UsersController.js
@@ -1,14 +1,21 @@
const { Router } = require('express');
const { inject } = require('awilix-express');
const Status = require('http-status');
-
+const loggerInjector = require('../utils/ControllerLoggerWrapper');
const UsersController = {
get router() {
const router = Router();
router.use(inject('userSerializer'));
- router.get('/', inject('getAllUsers'), this.index);
+ /**
+ * In the next injection the loggerInjector is a wrapper over the
+ * awilix inject method. what is does is still to inject the the operation
+ * using the inject method from awilix and then add the log handling by using
+ * the ControllerLogger module. see infra/logging/ControllerLogger.js.
+ * for demonstration it is only used in the first route.
+ */
+ router.get('/', loggerInjector('getAllUsers'), this.index);
router.get('/:id', inject('getUser'), this.show);
router.post('/', inject('createUser'), this.create);
router.put('/:id', inject('updateUser'), this.update);
diff --git a/src/interfaces/http/utils/ControllerLoggerWrapper.js b/src/interfaces/http/utils/ControllerLoggerWrapper.js
new file mode 100644
index 0000000..128f92e
--- /dev/null
+++ b/src/interfaces/http/utils/ControllerLoggerWrapper.js
@@ -0,0 +1,16 @@
+const { inject } = require('awilix-express');
+
+/**
+ * The controller logger wrapper is used to inject an operation (module)
+ * and inject into the operation the loggingController to listen to success and error events
+ * and respectively delete temporary trace or store in file.
+ * @param module
+ * @returns {*[]}
+ */
+module.exports=(module)=>{
+
+ return [inject(module), (req, res, next)=>{
+ req[module]= new req.ControllerLogger(req, req[module]);
+ next();
+ }];
+};