Hashi Vault And NodeJs Application

node-vault

.env

NODE_TLS_REJECT_UNAUTHORIZED=0
SKIP_PREFLIGHT_CHECK=true

VAULT_SKIP_VERIFY=true
VAULT_ENDPOINT=https://enterprise.serer.org.com
VAULT_ROLE_ID=g1e85bfa-cwea-10b0-a91d-e505t255c6c0
VAULT_SECRET_ID=cd4cd268-dde4-1664-0e85-7a298555fd04

Asynchronous version

const vault = require('node-vault')({
    apiVersion: 'v1',
    endpoint: process.env.VAULT_ENDPOINT,
    namespace: 'NS/PROD',
})

vault
    .approleLogin({
        role_id: process.env.VAULT_ROLE_ID,
        secret_id: process.env.VAULT_SECRET_ID,
    })
    .then((result) => {
        console.log('Start Reading', result)
        vault.token = result.auth.client_token

        vault
            .read('kv/data/mysql/webapp')
            .then((result) => {
                console.log('Read webapp', result)
            })
            .catch((err) => console.error('webapp error', err))

        vault
            .read('ad/creds/app_prod')
            .then((result) => {
                console.log('Read app_prod', result)
            })
            .catch((err) =>
                console.error('app_proderror', JSON.stringify(err))
            )
    })
    .catch((err) => console.error('Final error', JSON.stringify(err)))

const readVault = async (key) => {
    const { data } = await vault.read(key)
    return data
}

module.exports = {
    readVault,
}

Synchronous Version


const vault = require('node-vault')({
    apiVersion: 'v1',
    endpoint: process.env.VAULT_ENDPOINT,
    namespace: 'NS/PROD',
})

const init = async () => {
  const result = await vault.approleLogin({
    role_id: process.env.VAULT_ROLE_ID,
    secret_id: process.env.VAULT_SECRET_ID,
  });

  vault.token = result.auth.client_token; // Add token to vault object for subsequent requests.

  await sampleRuns(); // This is for testing purpose only, remove it
};

const readVault = async (key) => {
    const { data } = await vault.read(key);
    return data;
}

const sampleRuns = async () => {
    const db = await readVault("kv/data/mysql/webapp");

    const databaseName = db.data.db_name;
    const username = db.data.username;
    const password = db.data.password;
  
    console.log({
      databaseName,
      username,
      password,
    });
  
    const  nas  = await readVault("ad/creds/app_prod");
    const currentPassword = nas.current_password;
    const lastPassword = nas.last_password;
    const nasUsername = nas.username;
  
    console.log({
      nasUsername,
      currentPassword,
      lastPassword,
    });
}

init();

module.exports = {
    readVault,
}


hashi-vault-js

.env

NODE_TLS_REJECT_UNAUTHORIZED=0
SKIP_PREFLIGHT_CHECK=true

VAULT_SKIP_VERIFY=true
VAULT_ENDPOINT=https://enterprise.serer.org.com/v1
VAULT_ROLE_ID=g1e85bfa-cwea-10b0-a91d-e505t255c6c0
VAULT_SECRET_ID=cd4cd268-dde4-1664-0e85-7a298555fd04

Asynchronous version

const Vault = require('hashi-vault-js');

const vault = new Vault( {
    https: false,
    baseUrl: process.env.VAULT_ENDPOINT,
    //rootPath: 'PROD',
    timeout: 2000,
    proxy: false,
    // Only for Vault Enterprise
    namespace: 'NS/PROD'
});

let token;

vault.healthCheck().then(status => {
    if (!status.sealed) {
        console.log('Vault Status: ', status)
        vault.loginWithAppRole(process.env.VAULT_ROLE_ID, process.env.VAULT_SECRET_ID)
        .then(data => {
            token = data.client_token
            console.log('server token ', token)

            vault
                .readKVSecret(token, "mysql/webapp", null, 'kv')
                .then(data => {
                    console.log('webapp ', data)
                }).catch((error) => {
                    console.log('webapp error ', error)
                });

        }).catch((error) => {
            console.log('Login error ', error)
        });
    }
}).catch((error) => {
    console.log('Health Check error ', error)
})

Backend for Frontend BFF Pattern Implementation

In its simple form it is like a proxy for other dependent APIs

How Does it help?

  • Don’t have to expose all the APIs externally, there by reducing the security concerns
  • Caching or rate limiting requests outside of APIs
  • Batch calls to other APIs and return the aggregated data.

Implementation

It can be easily implemented with NodeJs http-proxy-middleware

Install the following package

npm i http-proxy-middleware

Add the following file in your react application

app_server.js

const express = require('express');
const path = require('path');

const { createProxyMiddleware } = require('http-proxy-middleware');

const app = express();

const api1Service = process.env.API1_ROUTE || 'api1:5555';
const api1Url = `http://${api1Service}`;
console.log(api1Url);

const api2Service = process.env.API2_ROUTE || 'api2:5555';
const api2Url = `http://${api2Service}`;
console.log(api2Url);


app.use(express.static(path.join(__dirname, 'build')));
app.use('/api1', createProxyMiddleware({ target: api1Url, changeOrigin: false}));
app.use('/api2', createProxyMiddleware({ target: api2Url, changeOrigin: false}));


app.get('/status', function(req, res) {
    res.sendStatus(200);
    console.log('working!');
});

app.get('*', function(req, res) {
    res.sendFile(path.join(__dirname, 'build', 'index.html'));
});

let server = app.listen(3000);


Dockerfile

FROM node:10.19.0-jessie

EXPOSE 3000

ENV APP_ROOT=/root/app-root \
    NODEJS_VERSION=8 \
    NPM_RUN=start \
    NAME=nodejs

ENV HOME=${APP_ROOT} \
    NPM_CONFIG_PREFIX=${APP_ROOT}/.npm 


COPY . ${APP_ROOT}


WORKDIR ${APP_ROOT}

RUN npm install && npm run build

CMD node app_server.js

What Next ?

With GraphQL we can implement BFF very easily plus it has got lots of other benefits. If possible switch to GraphQL

Also See

Observability For NodeJs Applications Using Opentelemetry

Project demonstrating Complete Observability Stack utilizing Prometheus, Loki (For distributed logging), Tempo (For Distributed tracing, this basically uses Jaeger Internally), Grafana for NodeJs based applications (With OpenTelemetry auto / manual Instrumentation) involving microservices with DB interactions.

https://github.com/mnadeem/nodejs-opentelemetry-tempo

Demo

Clone the project and run the following commands

docker-compose up --build

Access the api endpoint

View the log and trace in Grafana

Get the trace information Using Jaeger

View the metrics in Prometheus

View prometheus metrics in Grafana as well

Opentelemetry Components

LibraryPurpose
Opentelemetry APIOpenTelemetry API, including all TypeScript interfaces, enums, and no-op implementations. It is intended for use both on the server and in the browser.
Opentelemetry CoreThis package provides default implementations of the OpenTelemetry API for trace and metrics. It’s intended for use both on the server and in the browser.
Opentelemetry NodeThis module provides automated instrumentation and tracing for Node.js applications.
Opentelemetry TracingUsed standalone, this module provides methods for manual instrumentation of code, offering full control over span creation for client-side JavaScript (browser) and Node.js.
It does not provide automated instrumentation of known libraries, context propagation for asynchronous invocations or distributed-context out-of-the-box. Contains processors and exporters
Opentelemetry InstrumentationInstrumentation for web and node module, provides mechanism to register instrumentations
Opentelemetry Js ContribThis is a repository for OpenTelemetry JavaScript contributions that are not part of the core repository and core distribution of the API and SDK.
Opentelemetry Js Exporter JaegerOpenTelemetry Jaeger Trace Exporter allows the user to send collected traces to Jaeger.

OpenTelemetry can collect tracing data automatically using plugins

Auto Instrumentation PluginPurpose
@opentelemetry/plugin-expressInstruments expressJs
@opentelemetry/plugin-http @opentelemetry/plugin-httpsInstruments http and https calls
opentelemetry-plugin-aws-sdkInstrument Amazon S3 api calls
opentelemetry-plugin-mssqlInstruments Microsoft SQL Server calls
@opentelemetry/plugin-mysqlInstruments MySQL db calls

Opentelemetry is still in early stages when it comes to metrics export, hence we would be using Prometheus Nodejs client which is pretty mature.

There is no parallel to Grafana Loki for distributed logging.

docker-compose.yaml file would take care of the following things

npm install --save @opentelemetry/api @opentelemetry/core @opentelemetry/node @opentelemetry/tracing @opentelemetry/instrumentation @opentelemetry/exporter-jaeger

Enabling Auto Instrumentation

npm install --save @opentelemetry/plugin-http @opentelemetry/plugin-https @opentelemetry/plugin-express opentelemetry-plugin-aws-sdk opentelemetry-plugin-mssql

The following would enable automatic tracing for express, http/https, aws and mssql

import log4js from 'log4js';
import opentelemetry, { context, getSpan, getSpanContext } from '@opentelemetry/api';
import {NodeTracerProvider} from '@opentelemetry/node'
import {registerInstrumentations} from '@opentelemetry/instrumentation'
import {JaegerExporter} from '@opentelemetry/exporter-jaeger'
import {SimpleSpanProcessor, BatchSpanProcessor, ConsoleSpanExporter} from '@opentelemetry/tracing'

const logger = log4js.getLogger("tracing");
logger.level = "debug";

// Enable OpenTelemetry exporters to export traces to Grafan Tempo.
const provider = new NodeTracerProvider ({
    plugins: {
        express: {
          enabled: true,
          path: '@opentelemetry/plugin-express',
        },
        http: {
            enabled: true,
            path: '@opentelemetry/plugin-http',
        },
        'aws-sdk': {
            enabled: true,
            // You may use a package name or absolute path to the file.
            path: "opentelemetry-plugin-aws-sdk",
        },
        mssql: {
            enabled: true,
            // You may use a package name or absolute path to the file.
            path: "opentelemetry-plugin-mssql",
        },
    },
});
// register and load instrumentation and old plugins - old plugins will be loaded automatically as previously
// but instrumentations needs to be added
registerInstrumentations({
    tracerProvider: provider
});

// Initialize the exporter. 
const options = {
    serviceName: process.env.OTEL_SERVICE_NAME,
    tags: [], // optional
    // You can use the default UDPSender
    //host: 'localhost', // optional
    //port: 6832, // optional
    // OR you can use the HTTPSender as follows
    //14250 : model.proto not working 
    endpoint: process.env.OTEL_EXPORTER_JAEGER_ENDPOINT,
    maxPacketSize: 65000 // optional
}

/**
 * 
 * Configure the span processor to send spans to the exporter
 * The SimpleSpanProcessor does no batching and exports spans
 * immediately when they end. For most production use cases,
 * OpenTelemetry recommends use of the BatchSpanProcessor.
 */
provider.addSpanProcessor(new BatchSpanProcessor(new JaegerExporter(options)));
//provider.addSpanProcessor(new SimpleSpanProcessor(new ConsoleSpanExporter()));

/**
 * Registering the provider with the API allows it to be discovered
 * and used by instrumentation libraries. The OpenTelemetry API provides
 * methods to set global SDK implementations, but the default SDK provides
 * a convenience method named `register` which registers same defaults
 * for you.
 *
 * By default the NodeTracerProvider uses Trace Context for propagation
 * and AsyncHooksScopeManager for context management. To learn about
 * customizing this behavior, see API Registration Options below.
 */
// Initialize the OpenTelemetry APIs to use the NodeTracerProvider bindings
provider.register();

export const tracer = opentelemetry.trace.getTracer(process.env.OTEL_SERVICE_NAME);

export const addTraceId = (req, res, next) => {
    const spanContext = getSpanContext(context.active());
    req.traceId = spanContext && spanContext.traceId;    
    next();
};

logger.debug("tracing initialized for %s sending span to %s", options.serviceName, options.endpoint);

Manual Instrumentation

Here is an example for manual instrumentation

import { tracer, addTraceId} from './tracing';
import { context, setSpan, getSpan } from '@opentelemetry/api';

:
:
:

app.get('/health', (req, res) => {
    const parentSpan = getSpan(context.active()); 
    doSomeWorkInNewSpan(parentSpan);

    return res.status(200).send({ message: "Health is good" });
});

const doSomeWorkInNewSpan = (parentSpan) => {

    //const ctx = setSpan(context.active(), parentSpan);
    //const childSpan = tracer.startSpan('doWork', undefined, ctx);
    const childSpan = tracer.startSpan('doSomeWorkInNewSpan', {
        attributes: { 'code.function' : 'doSomeWorkInNewSpan' }
    }, context.active());

    childSpan.setAttribute('code.filepath', "test");
    doSomeWorkInNewNestedSpan(childSpan);
    childSpan.end();
}

Reference

Publish a npm package locally for testing

How to use local Node packages as project dependencies, we would be using yalc for this purpose

npm install -g yalc

Lets publish the package that you are developing to your local yalc store

yalc publish

Add the package as a dependency from yalc store

yalc add <dependency name>

Install the new dependency which is added

 npm install 

Push back the changes to local dependent projects

 yalc push 

Alternatively

publish the changes

yalc publish

and update individual projects

yalc update

Remove the yalc dependency

yalc remove opentelemetry-instrumentation-mssql

References

Writing A Plugin For Opentelemetry Automatic Instrumentation Of A NodeJs Library

Library Identification

mssql npm has a huge weekly download

And no opentelemetry implementation in core, contrib and extensions, it would be a great library to instrument since lots of folks are using it.

Create a request with Opentelemetry team

Analyzing Existing Echo System

Currently there are lots of libraries which are instrumented, explore the source code, understand the api, patterns, design, implementation and idioms.

While analyzing the code you would quickly notice that there two ways to do it. Work with opentelemetry team to understand the write approach. This way you would find the write approach to proceed, in this case you have to go ahead with Instrumentation

One more thing you would notice is specific libraries/transpiler are used, Typescript for example, Mocha for testing, shimmer for monkey patching and so on.

Analyzing the Library APIs

To Start with you would stick to ConnectionPool and Request Interface, to implement the first use case.

ConnectionPool to grab the config, which would be used as Span attributes

And Request to to execute the actual queries.

Implementation

Lets Extend InstrumentationBase, you would be asked to override init method (Since it is typescript). An example implementation can be found here

import type * as mssql from 'mssql';

import {
    InstrumentationBase,
    InstrumentationConfig,
    InstrumentationModuleDefinition,
} from '@opentelemetry/instrumentation';

type Config = InstrumentationConfig & MssqlInstrumentationConfig;

export class MssqlPlugin extends InstrumentationBase<typeof mssql> {
      

    protected init(): void | InstrumentationModuleDefinition<any> | InstrumentationModuleDefinition<any>[] {
        throw new Error('Method not implemented.');
    }
}

Add constructor.

import type * as mssql from 'mssql';

import {
    InstrumentationBase,
    InstrumentationConfig,
    InstrumentationModuleDefinition,
} from '@opentelemetry/instrumentation';

import { VERSION } from './version';

type Config = InstrumentationConfig & MssqlInstrumentationConfig;;

export class MssqlPlugin extends InstrumentationBase<typeof mssql> {

    static readonly COMPONENT = 'mssql';

    
    constructor(config: Config = {}) {
        super('opentelemetry-plugin-mssql', VERSION, Object.assign({}, config));
    }

    protected init(): void | InstrumentationModuleDefinition<any> | InstrumentationModuleDefinition<any>[] {
        throw new Error('Method not implemented.');
    }
    private _getConfig(): MssqlInstrumentationConfig {
        return this._config as MssqlInstrumentationConfig;
    }
}

Provide Instrumentation module definition(s), i.e, provide patch and unpatch methods

// New Module Added
import {
    InstrumentationBase,
    InstrumentationConfig,
    InstrumentationModuleDefinition,
    InstrumentationNodeModuleDefinition,
} from '@opentelemetry/instrumentation';

// init() expanded for patch and unpatch
protected init(): void | InstrumentationModuleDefinition<any> | InstrumentationModuleDefinition<any>[] {
        const module = new InstrumentationNodeModuleDefinition<typeof mssql>(
            MssqlPlugin.COMPONENT,
            ['*'],
            this.patch.bind(this),
            this.unpatch.bind(this)
        );
        return module;
    } 
    protected patch(moduleExports: typeof mssql): typeof mssql {
        if (moduleExports === undefined || moduleExports === null) {
            return moduleExports;
        }
        return moduleExports;
    }
    protected unpatch(moduleExports: typeof mssql): void {
    }

Complete example

import { DatabaseAttribute } from '@opentelemetry/semantic-conventions';
import {
    InstrumentationBase,
    InstrumentationConfig,
    InstrumentationModuleDefinition,
    InstrumentationNodeModuleDefinition,
    isWrapped
} from '@opentelemetry/instrumentation';

import {
    SpanKind,
    SpanStatusCode,
    getSpan,
    context,
    diag
} from '@opentelemetry/api';

import type * as mssql from 'mssql';
import { MssqlInstrumentationConfig } from './types';
import { getConnectionAttributes, getSpanName } from './Spans';
import { VERSION } from './version';

type Config = InstrumentationConfig & MssqlInstrumentationConfig;

export class MssqlInstrumentation extends InstrumentationBase<typeof mssql> {

    static readonly COMPONENT = 'mssql';
    static readonly COMMON_ATTRIBUTES = {
        [DatabaseAttribute.DB_SYSTEM]: MssqlInstrumentation.COMPONENT,
    };

    constructor(config: Config = {}) {
        super('opentelemetry-instrumentation-mssql', VERSION, Object.assign({}, config));
    }

    private _getConfig(): MssqlInstrumentationConfig {
        return this._config as MssqlInstrumentationConfig;
    }

    protected init(): InstrumentationModuleDefinition<typeof mssql> | InstrumentationModuleDefinition<typeof mssql>[] | void {
        const module = new InstrumentationNodeModuleDefinition<typeof mssql>(
            MssqlInstrumentation.COMPONENT,
            ['*'],
            this.patch.bind(this),
            this.unpatch.bind(this)
        );

        return module;
    }

    protected patch(moduleExports: typeof mssql) {
        if (moduleExports === undefined || moduleExports === null) {
            return moduleExports;
        }
        diag.debug(`applying patch to ${MssqlInstrumentation.COMPONENT}`);
        this.unpatch(moduleExports);

        this._wrap(moduleExports, 'ConnectionPool', this._patchCreatePool() as any);
        this._wrap(moduleExports, 'Request', this._patchRequest() as any);

        return moduleExports;
    }

    // global export function
    private _patchCreatePool() {
        return (originalConnectionPool: any) => {
            const thisInstrumentation = this;
            diag.debug('MssqlPlugin#patch: patching mssql ConnectionPool');
            return function createPool(_config: string | mssql.config) {
                if (thisInstrumentation._getConfig()?.ignoreOrphanedSpans && !getSpan(context.active())) {
                    return new originalConnectionPool(...arguments);
                }
                const pool = new originalConnectionPool(...arguments);
                thisInstrumentation._wrap(pool, 'query', thisInstrumentation._patchPoolQuery(pool));
                return pool;
            };
        };
    }

    private _patchPoolQuery(pool: mssql.ConnectionPool) {
        return (originalQuery: Function) => {
            const thisInstrumentation = this;
            diag.debug('MssqlPlugin#patch: patching mssql pool request');
            return function request() {
                if (thisInstrumentation.shouldIgnoreOrphanSpans(thisInstrumentation._getConfig())) {
                    return originalQuery.apply(pool, arguments);
                }
                const args = arguments[0];
                const span = thisInstrumentation.tracer.startSpan(getSpanName(args[0]), {
                    kind: SpanKind.CLIENT
                });               
                return originalQuery.apply(pool, arguments)
                    .catch((error: { message: any; }) => {
                        span.setStatus({
                            code: SpanStatusCode.ERROR,
                            message: error.message,
                        })
                    }).finally(() => {
                        span.end();
                    });

            };
        };
    }

    private _patchRequest() {
        return (originalRequest: any) => {
            const thisInstrumentation = this;
            diag.debug('MssqlPlugin#patch: patching mssql pool request');
            return function request() {
                const request: mssql.Request = new originalRequest(...arguments);
                thisInstrumentation._wrap(request, 'query', thisInstrumentation._patchQuery(request));
                return request;
            };
        };
    }

    private _patchQuery(request: mssql.Request) {
        return (originalQuery: Function) => {
            const thisInstrumentation = this;

            diag.debug('MssqlPlugin#patch: patching mssql request query');
            return function query(command: string | TemplateStringsArray): Promise<mssql.IResult<any>> {
                if (thisInstrumentation.shouldIgnoreOrphanSpans(thisInstrumentation._getConfig())) {
                    return originalQuery.apply(request, arguments);
                }
                const span = thisInstrumentation.tracer.startSpan(getSpanName(command), {
                    kind: SpanKind.CLIENT,
                    attributes: {
                        ...MssqlInstrumentation.COMMON_ATTRIBUTES,
                        ...getConnectionAttributes((<any>request).parent!.config)
                    },
                });
                var interpolated = thisInstrumentation.formatDbStatement(command)
                for (const property in request.parameters) {
                    interpolated = interpolated.replace(`@${property}`, `${(request.parameters[property].value)}`);
                }
                span.setAttribute(DatabaseAttribute.DB_STATEMENT, interpolated);
                const result = originalQuery.apply(request, arguments);

                result
                    .catch((error: { message: any; }) => {
                        span.setStatus({
                            code: SpanStatusCode.ERROR,
                            message: error.message,
                        })
                    }).finally(() => {
                        span.end()
                    });

                return result;
            };
        };
    }

    private shouldIgnoreOrphanSpans(config: MssqlInstrumentationConfig) {
        return config?.ignoreOrphanedSpans && !getSpan(context.active())
    }

    private formatDbStatement(command: string | TemplateStringsArray) {
        if (typeof command === 'object') {
            return command[0];
        }
        return command;
    }

    protected unpatch(moduleExports: typeof mssql): void {
        if (isWrapped(moduleExports.ConnectionPool)) {
            this._unwrap(moduleExports, 'ConnectionPool');            
        }
        if (isWrapped(moduleExports.Request)) {
            this._unwrap(moduleExports, 'Request');
        }
    }
}

make sure to create index.ts and types.ts

package.json

Test Cases

  • The instrumentation class (In this case MssqlInstrumentation) should be instantiated first before requiring the library you are instrumenting in the test cases, otherwise calls will not be patched. i.e, load all instrumentations before a real usage – not event importing / requiring it so it can be patched correctly. The instrumentation needs to be loaded also, you can do it by either creating a new instance and setting things manually or you can use registerInstrumentations where you pass the new instance of your instrumentation there.
  • The instrumentation is enabled by default unless you call it with config option to disable it.

Plugin Version

The plugin version can be found here, Keep a note that plugin approach to instrumentation is deprecated and would be removed as soon as existing plugins are converted to new instrumentation approach.

Plugin Limitations

  • Plugin version only supports traces, and end goal is to have a single auto instrumentation that generate both traces and metrics.
  • Being able to auto instrument multiples packages at once, this makes sense for @opentelemetry/instrumentation-http that both handle http and https or @opentelemetry/instrumentation-grpc that handle grpc and @grpc/grpc-js, that allows to keep related utils where we use them
  • Allows auto instrumentation to only depend on @opentelemetry/api so they works with any SDK (and not necessarily only the sdk we provide)
  • Not tech stuff but at the time the name instrumentation was decided at the spec level to represent all auto instrumentations packages so we neeed to rename all of the plugins that already exists.
  • Some issues with plugin approach, #1412 and #1315
  • The instrumentation also allows you to patch more packages and individual files – this was not possible with plugin so this is huge and the biggest difference between those 2 classes.

Also See

Credits

Instrumenting NodeJs Express Applications For Prometheus Metrics

NodeJs Application

Create Folder

mkdir nodejs-prometheus
cd nodejs-prometheus

create package.json

npm init --yes

install dev dependencies

npm install -D  babel-cli babel-preset-env nodemon npm-run-all rimraf pino-pretty

install prod dependencies

npm install -P  cors dotenv express prom-client pino express-pino-logger

script

"scripts": {
    "test": "echo \"Error: no test specified\" && exit 1",
    "clean": "rimraf ./dist/",
    "build": "babel ./src/ --presets=babel-preset-env --out-dir dist --ignore ./node_modules,./.babelrc,./package.json,./npm-debug.log --copy-files",
    "server:dev": "nodemon ./src/server.js --exec babel-node --presets babel-preset-env",
    "server:prod": "node ./dist/server.js",
    "prod:build": "npm-run-all clean build",
    "prod": "npm-run-all clean prod:build server:prod",
    "dev": "npm-run-all server:dev | pino-pretty"
  }

Complete package.json

{
  "name": "nodejs-prometheus",
  "version": "1.0.0",
  "description": "",
  "main": "index.js",
  "scripts": {
    "test": "echo \"Error: no test specified\" && exit 1",
    "clean": "rimraf ./dist/",
    "build": "babel ./src/ --presets=babel-preset-env --out-dir dist --ignore ./node_modules,./.babelrc,./package.json,./npm-debug.log --copy-files",
    "server:dev": "nodemon ./src/server.js --exec babel-node --presets babel-preset-env",
    "server:prod": "node ./dist/server.js",
    "prod:build": "npm-run-all clean build",
    "prod": "npm-run-all clean prod:build server:prod",
    "dev": "npm-run-all server:dev | pino-pretty"
  },
  "keywords": [],
  "author": "",
  "license": "ISC",
  "devDependencies": {
    "babel-cli": "^6.26.0",
    "babel-preset-env": "^1.7.0",
    "nodemon": "^2.0.7",
    "npm-run-all": "^4.1.5",
    "pino-pretty": "^4.5.0",
    "rimraf": "^3.0.2"
  },
  "dependencies": {
    "cors": "^2.8.5",
    "dotenv": "^8.2.0",
    "express": "^4.17.1",
    "express-pino-logger": "^6.0.0",
    "pino": "^6.11.1",
    "prom-client": "^13.1.0"
  }
}

server.js file

import express from 'express';
import pino from 'pino';
import expressPino from 'express-pino-logger';

const PORT = process.env.PORT || 5555;

const logger = pino({level:process.env.LOG_LEVEL || 'info'})
const expressLogger = expressPino({logger});

const app = express();
app.use(express.json(), expressLogger);

app.get('/health', (req, res) => {
    logger.debug('Calling res.send');
    return res.status(200).send({message: "Health is good"});
});

app.listen(PORT, () => {
    logger.info('App is listening for requests on port %d', PORT);
});

Dockerfile

FROM node:10.15.0-jessie

EXPOSE 5555

ENV APP_ROOT=/root/app-root \
    NODEJS_VERSION=8 \
    NPM_RUN=start \
    NAME=nodejs

ENV HOME=${APP_ROOT} \
    NPM_CONFIG_PREFIX=${APP_ROOT}/.npm 

COPY . ${APP_ROOT}

WORKDIR ${APP_ROOT}

RUN npm install && npm run prod:build
CMD node ./dist/server.js

start the application

npm run dev

looks good

Application Instrumentation Using prom-client

import

import promClient from 'prom-client';

add counter variable

const Counter = promClient.Counter;

create an instance, and keep incrementing

const c = new Counter({
	name: 'test_counter',
	help: 'Example of a counter',
	labelNames: ['code'],
});
setInterval(() => {
	c.inc({ code: 200 });
}, 500);

expose metric endpoint

// Setup server to Prometheus scrapes:
app.get('/metrics', async (req, res) => {
	try {
		res.set('Content-Type', promClient.register.contentType);
		res.end(await promClient.register.metrics());
	} catch (ex) {
		res.status(500).end(ex);
	}
});

metrics endpoint

Lets go real now, and expose http request durations.

server.js

import express from 'express';
import pino from 'pino';
import expressPino from 'express-pino-logger';
import promClient from 'prom-client';

const PORT = process.env.PORT || 5555;

const logger = pino({level:process.env.LOG_LEVEL || 'info'})
const expressLogger = expressPino({logger});

const app = express();
app.use(express.json(), expressLogger);

const collectDefaultMetrics = promClient.collectDefaultMetrics;

collectDefaultMetrics();
const Histogram = promClient.Histogram;
const requestDuration = new Histogram({
	name: 'http_request_duration_milliseconds',
	help: 'request duration histogram',
	labelNames: ['handler' , 'method', 'statuscode'],
});

const profilerMiddleware = (req, res, next) => {
    const start = Date.now();
   res.once('finish', () => {
    const duration = Date.now() - start;
    requestDuration.labels(req.url, req.method, res.statusCode).observe(duration)
   });

  next();
};
app.use(profilerMiddleware);


app.get('/health', (req, res) => {
    logger.debug('Calling res.send');    
    return res.status(200).send({message: "Health is good"});
});

app.listen(PORT, () => {
    logger.info('App is listening for requests on port %d', PORT);
});

// Setup server to Prometheus scrapes:
app.get('/metrics', async (req, res) => {
	try {
		res.set('Content-Type', promClient.register.contentType);
		res.end(await promClient.register.metrics());
	} catch (ex) {
		res.status(500).end(ex);
	}
});

Metrics exposed

An Alternate implementation of server.js

import express from 'express';
import pino from 'pino';
import expressPino from 'express-pino-logger';
import promClient from 'prom-client';

const PORT = process.env.PORT || 5555;

const logger = pino({level:process.env.LOG_LEVEL || 'info'})
const expressLogger = expressPino({logger});

const app = express();
app.use(express.json(), expressLogger);

// Create a Registry which registers the metrics
const register = new promClient.Registry()
promClient.collectDefaultMetrics({ register });

const Histogram = promClient.Histogram;
const requestDuration = new Histogram({
	name: 'http_request_duration_seconds',
	help: 'request duration histogram',
    labelNames: ['handler' , 'method', 'statuscode'],
    //buckets: [0.5, 10, 25, 50, 100, 250, 500, 1000, 2500, 5000, 10000],
    buckets: [0.005, 0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1, 2.5, 5, 10],
});

// Register the histogram
register.registerMetric(requestDuration)

const profilerMiddleware = (req, res, next) => {
    //const start = Date.now();
    const end = requestDuration.startTimer()
    res.once('finish', () => {
      //const duration = Date.now() - start;
      //requestDuration.labels(req.url, req.method, res.statusCode).observe(duration);
      //requestDuration.observe({ handler:req.url, method: req.method, statuscode: res.statusCode }, duration);
      const duration = end({ handler:req.url, method: req.method, statuscode: res.statusCode });
      logger.info('Duration  %d', duration);
    });

  next();
};
app.use(profilerMiddleware);


app.get('/health', (req, res) => {
    logger.debug('Calling res.send');    
    return res.status(200).send({message: "Health is good"});
});

app.listen(PORT, () => {
    logger.info('App is listening for requests on port %d', PORT);
});

// Setup server to Prometheus scrapes:
app.get('/metrics', async (req, res) => {
	try {
		res.set('Content-Type', register.contentType);
		res.end(await register.metrics());
	} catch (ex) {
		res.status(500).end(ex);
	}
});

Prometheus Integration

scrap config

scrape_configs:  
  - job_name: 'nodeJsApp'
    static_configs:
      - targets: ['localhost:5555']   

Targets

Average request time by dividing sum over count

http_request_duration_seconds_sum / http_request_duration_seconds_count

Calculating 50% percentile (second quartile) for last 10 :

histogram_quantile(.5, rate(http_request_duration_seconds_bucket[10m]))

References

Elegantly Consuming data In React Components Using Context API – Provider / Consumer Pattern


How do we make sure every component get the data they need ?

Pass data using properties

Consuming data using React Context API

Create Context

Context basically exposes state (Data) and the API which operates on the data to be consumed by Components and Context has to operate on a specific functional area covering set up components, for Example AuthContext deals with authentication, DataContext provides data for the application it can fine-grained level as well for example ToastContext provides a way to add Notification from anywhere in the application.

Refer this as an example

Provide Context

Providers are basically singletons, and are responsible for interaction with API and updating the state.

Step 1: Create Context

export const AuthContext = React.createContext();

Step 2: Populate Initial State

const initialState = {
  isAuthenticated: false,
  userId: "",
  roles: [],
  flash: ""
};

Step 3 : Expose data and API

constructor(props) {
    super(props);

    this.state = {
      ...initialState,
      doAuthRedirect: (idp, realm) => {
        return this.doAuthRedirect(idp, realm);
      },
      getAuthToken: (code, idp, realm) => {
        return this.getAuthToken(code, idp, realm);
      },
      reAuth: () => {
        return this.reAuth();
      },
      logout: () => {
        return this.logout();
      }
    };
  }

Consume Context Data

The following example shows how data can be consumed within a Component

export class Content extends Component {

  render() {
    
    return (<ToastConsumer>
      {({add}) => ( <div title="Notes" onclick= {() => this.copyToClipboard(add)}>
        <NotesIcon />
      </div>)}
    </ToastConsumer>
    )
  }
}

In the following snippet, App is is provider for AuthContext, DataContext and ToastProvider, and App is consumer for DataContext and AuthContext and not for ToastContext

import React from "react";

import "./App.css";
import { AuthContext, AuthProvider } from "../../providers/AuthProvider";
import { DataContext, DataProvider } from "../../providers/DataProvider";
import {ToastProvider} from 'react-toast-notifications';
import { Content } from "../Content/Content";
import {Timeout} from '../Timeout/Timeout'

function App() {
  return (
    <AuthProvider>
      <AuthContext.Consumer>
        {authContext => (
          <DataProvider authContext={authContext}>
            <DataContext.Consumer>
              {dataContext => (
                <ToastProvider>
                  <React.Fragment>
                    <div className="App">
                      <Content key={"app-1"} authContext={authContext} dataContext={dataContext}  />
                    </div>
                    <Timeout />
                  </React.Fragment>
                </ToastProvider>
              )}
            </DataContext.Consumer>
          </DataProvider>
        )}
      </AuthContext.Consumer>
    </AuthProvider>
  );
}
export default App;

Refer this as an example

References

SVG Sprites with React Applications

Preliminary

https://icomoon.io/app/#/select

Create SVG file sprites.svg under public folder, and paste the contents, under defs section one by one, each item would go as symbol

<svg version="1.1" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
    <defs>
        <symbol id="icon-pacman" viewBox="0 0 32 32">
            <path d="M30.148 5.588c-2.934-3.42-7.288-5.588-12.148-5.588-8.837 0-16 7.163-16 16s7.163 16 16 16c4.86 0 9.213-2.167 12.148-5.588l-10.148-10.412 10.148-10.412zM22 3.769c1.232 0 2.231 0.999 2.231 2.231s-0.999 2.231-2.231 2.231-2.231-0.999-2.231-2.231c0-1.232 0.999-2.231 2.231-2.231z"></path>
        </symbol>
        <symbol id="icon-spades" viewBox="0 0 32 32">
            <path d="M25.549 10.88c-6.049-4.496-8.133-8.094-9.549-10.88v0c-0 0-0-0-0-0v0c-1.415 2.785-3.5 6.384-9.549 10.88-10.314 7.665-0.606 18.365 7.93 12.476-0.556 3.654-2.454 6.318-4.381 7.465v1.179h12.001v-1.179c-1.928-1.147-3.825-3.811-4.382-7.465 8.535 5.889 18.244-4.811 7.93-12.476z"></path>
        </symbol>
        <symbol id="icon-clubs" viewBox="0 0 32 32">
            <path d="M24.588 12.274c-1.845 0-3.503 0.769-4.683 2.022-0.5 0.531-1.368 1.16-2.306 1.713 0.441-1.683 1.834-3.803 2.801-4.733 1.239-1.193 2-2.87 2-4.734 0-3.59-2.859-6.503-6.4-6.541-3.541 0.038-6.4 2.951-6.4 6.541 0 1.865 0.761 3.542 2 4.734 0.967 0.93 2.36 3.050 2.801 4.733-0.939-0.553-1.806-1.182-2.306-1.713-1.18-1.253-2.838-2.022-4.683-2.022-3.575 0-6.471 2.927-6.471 6.541s2.897 6.542 6.471 6.542c1.845 0 3.503-0.792 4.683-2.045 0.525-0.558 1.451-1.254 2.447-1.832-0.094 4.615-2.298 8.005-4.541 9.341v1.179h12v-1.179c-2.244-1.335-4.448-4.726-4.541-9.341 0.995 0.578 1.922 1.274 2.447 1.832 1.18 1.253 2.838 2.045 4.683 2.045 3.575 0 6.471-2.928 6.471-6.542s-2.897-6.541-6.471-6.541z"></path>
        </symbol>
    </defs>
</svg>

Option #1 : One Back-end Call

Add to your SVG Component

 <svg viewBox="0 0 28.3 28.3" className="icon icon-pacman">
            <use xlinkHref="/sprites.svg#icon-pacman" />
            <span class="name"> icon-pacman</span>
          </svg>

Option #2 : Ehanced Previous approach

Lets create Icons.svg file as follows under src folder

Lets create Icon.js file as follows

import React from "react";
import Icons from "./Icons.svg";
import PropTypes from 'prop-types';

const Icon = ({ name, color, size }) => (
    <svg className={`icon icon-${name}`} fill={color} width={size} height={size} >
      <use xlinkHref={`${Icons}#icon-${name}`} />
    </svg>
  );

  Icon.propTypes = {
    name: PropTypes.string.isRequired,
    color: PropTypes.string,
    size: PropTypes.number
  };
  
  export default Icon;

Usage

import React from "react";
import "./App.css";
import Icon from "./Icon.js";

function App() {
  return (
    <div className="App">
      <div></div>
      <header className="App-header">
        <div>
          <Icon name="pacman" color="#FFFFFF" size={35} />
        </div>
        <div>
          <Icon name="spades" color="#FFFFFF" size={35} />
        </div>
        <div>
          <Icon name="clubs" color="#FFFFFF" size={35} />
        </div>
      </header>
    </div>
  );
}

export default App;

Option #3 : Zero Back-End Call

TODO

Multi IDP Support on React App For SSO Using OAuth2 / JWT

Okta Setup

Fusion Auth Setup

User

Add use to groups

Keycloak Setup

Refer this on setting up keycloak

Application Setup

Application setup

Demo

Source Code

Download from github

Source Code