This is my recipe for a Microservice implemented in NodeJS.
I’m fairly new to NodeJS as opposed to Java SpringBoot (see my recipe for SpringBoot Microservices here). I look forward getting challenged on my chosen stack so I can refine and improve.
Ingredients:
Express is ubiquitous.
I’m going to see where KOA takes us, I think this will result in a cleaner implementation.
mkdir node-microservice-recipe
cd node-microservice-recipe
npm init node-microservice-recipe
npm i koa koa-router koa-logger koa-combine-routers koa-bodyparser mongoose apollo-server-koa lodash
npm install nodemon --save-dev0
Edit package.json
...
"scripts": {
"dev": "npx nodemon app.js",
...
},
...
app.js
const Koa = require('koa')
const Logger = require('koa-logger')
const router = require('./routes')
const PORT = process.env.PORT || 3000
const app = new Koa()
app.use(Logger())
app.use(router())
app.listen(PORT, () => {
console.log('Server running on port ' + PORT)
})
/routes/root.js
const Router = require('koa-router')
const router = new Router()
router.get('/', async (ctx, next) => {
ctx.body = 'Hello'
})
module.exports = router
/routes/todo.js
const Router = require('koa-router')
const router = new Router({
prefix: '/todo'
})
router.get('/', async (ctx, next) => {
ctx.body = [{
description: 'Do the thing',
completed: false
},
{
description: 'Pickup the stuff',
completed: false
},
{
description: 'Meet with Team',
completed: true
},
]
})
module.exports = router
/routes/index.js
const combineRouters = require('koa-combine-routers')
const rootRouter = require('./root')
const todoRouter = require('./todo')
const router = combineRouters(rootRouter, todoRouter)
module.exports = router
npm run dev
open a browser to http://localhost:3000/data
For a javascript implementation, MongoDB no-SQL database seems like a natural fit. In a Java-based microservice, relational databases are more standard, and I will include a schema migration tool like Liquibase. With a no-SQL datastore, this is not as crucial, and I will omit it from this solution. To add some structure around the data access, I am including Mongoose.
# mongo
> use micro-test
> db.todos.insert({"description":"Do the thing","completed":false})
> db.todos.insert({"description":"Pickup the Stuff","completed":false})
> db.todos.insert({"description":"Meet the Team","completed":true})
> show dbs
> db.todos.find()
Let’s use the data.
/models/todo.js
const mongoose = require('mongoose')
// Declare Schema
const TodoSchema = new mongoose.Schema({
description: {
type: String
},
completed: {
type: Boolean
},
}, {
timestamps: true
})
// Declare Model to mongoose with Schema
const Todo = mongoose.model('Todo', TodoSchema)
// Export Model to be used in Node
module.exports = mongoose.model('Todo')
/controller/todo.js
const Todo = require('../models/todo')
async function findAll(ctx) {
// Fetch all Todo’s from the database and return as payload
const todos = await Todo.find({})
ctx.body = todos
}
module.exports = {
findAll,
}
/routes/todo.js
const Router = require('koa-router')
const router = new Router({
prefix: '/todo'
})
const controller = require('../controllers/todo')
router.get('/', controller.findAll)
module.exports = router
/app.js
const Koa = require('koa')
const Logger = require('koa-logger')
const router = require('./routes')
const mongoose = require('mongoose')
const PORT = process.env.PORT || 3000
const db = mongoose.connection
db.on('error', console.error.bind(console, 'connection error:'))
db.once('open', function() {
console.log('we are connected!')
})
mongoose.connect(`mongodb://localhost:27017/micro-test`, {
useNewUrlParser: true,
})
const app = new Koa()
app.use(Logger())
app.use(router())
app.listen(PORT, () => {
console.log('Server running on port ' + PORT)
})
We can build out the remaining REST functionality now.
/controller/todo.js
const Todo = require('../models/todo')
async function findAll(ctx) {
const todos = await Todo.find({})
ctx.body = todos
}
async function create(ctx) {
console.log('ctx.request.body', ctx.request.body)
const newTodo = new Todo(ctx.request.body)
const savedTodo = await newTodo.save()
ctx.body = savedTodo
}
async function destroy(ctx) {
const id = ctx.params.id
const todo = await Todo.findById(id)
const deletedTodo = await todo.remove()
ctx.body = deletedTodo
}
async function update(ctx) {
const id = ctx.params.id
const todo = await Todo.findById(id)
todo.completed = !todo.completed
const updatedTodo = await todo.save()
ctx.body = updatedTodo
}
module.exports = {
findAll,
create,
destroy,
update,
}
/routes/todo.js
const Router = require('koa-router')
const router = new Router({
prefix: '/todo'
})
const controller = require('../controllers/todo')
router.get('/', controller.findAll)
router.post('/', controller.create)
router.post('/:id', controller.update)
router.put('/:id', controller.update)
router.delete('/:id', controller.destroy)
module.exports = router
Add bodyParser
/app.js
const Koa = require('koa')
const Logger = require('koa-logger')
const router = require('./routes')
const bodyParser = require('koa-bodyparser')
const mongoose = require('mongoose')
const PORT = process.env.PORT || 3000
const db = mongoose.connection
db.on('error', console.error.bind(console, 'connection error:'))
db.once('open', function() {
console.log('we are connected!')
})
mongoose.connect(`mongodb://localhost:27017/micro-test`, {
useNewUrlParser: true,
})
const app = new Koa()
app.use(bodyParser())
app.use(Logger())
app.use(router())
app.listen(PORT, () => {
console.log('Server running on port ' + PORT)
})
Let’s add Swagger UI to our app so developers and discover the API. We need to create a Swagger spec like this:
npm run dev
import koa2-swagger-ui
npm install koa-mount koa-static koa2-swagger-ui --save
Update
app.json
const Koa = require('koa')
const Logger = require('koa-logger')
const router = require('./routes')
const bodyParser = require('koa-bodyparser')
const mongoose = require('mongoose')
const graphqlServer = require('./graphql/graphqlServer')
const {
kafkaServer
} = require('./messaging/kafka')
const serve = require('koa-static')
const mount = require('koa-mount')
const koaSwagger = require('koa2-swagger-ui')
const PORT = process.env.PORT || 3000
const db = mongoose.connection
db.on('error', console.error.bind(console, 'connection error:'))
db.once('open', function() {
console.log('we are connected!')
})
mongoose.connect(`mongodb://localhost:27017/micro-test`, {
useNewUrlParser: true,
})
const app = new Koa()
app.use(bodyParser())
app.use(Logger())
app.use(mount('/static', serve('./static')))
app.use(router())
app.use(graphqlServer.getMiddleware())
app.use(
koaSwagger({
swaggerOptions: {
url: 'static/swagger.json',
},
})
)
kafkaServer.run().catch(console.error)
app.listen(PORT, () => {
console.log('Server running on port ' + PORT)
console.log(
`🚀 GraphQL Server ready at http://localhost:3000${graphqlServer.graphqlPath}`
)
})
static/swagger.json
{
"swagger": "2.0",
"info": {
"description": "Node Microservice API",
"version": "0.1",
"title": "Node Microservice API"
},
"paths": {
"/": {
"get": {
"produces": ["text/plain"],
"parameters": [],
"responses": {
"200": {
"description": "Healthcheck"
}
}
}
},
"/todo": {
"get": {
"produces": ["application/json"],
"parameters": [],
"description": "Get all the Todos",
"responses": {
"200": {
"schema": {
"$ref": "#/definitions/Todos"
}
}
}
},
"post": {
"consumes": ["application/json"],
"produces": ["application/json"],
"description": "Create a new Todo",
"parameters": [{
"in": "body",
"name": "body",
"required": true,
"description": "New Todo text",
"schema": {
"$ref": "#/definitions/TodoCreate"
},
"x-examples": {
"application/json": "{\"description\": \"Don't forget\"}"
}
}],
"responses": {
"200": {
"description": "Definition generated from Swagger Inspector",
"schema": {
"$ref": "#/definitions/Todos"
}
}
}
}
},
"/todo/{id}": {
"post": {
"consumes": ["application/json"],
"produces": ["application/json"],
"description": "Toggle the Todo completed status",
"parameters": [{
"in": "path",
"name": "id",
"required": true,
"type": "string"
}],
"responses": {
"200": {
"description": "Updated Todo",
"schema": {
"$ref": "#/definitions/Todo"
}
}
}
},
"put": {
"consumes": ["application/json"],
"produces": ["application/json"],
"description": "Toggle the Todo completed status",
"parameters": [{
"in": "path",
"name": "id",
"required": true,
"type": "string"
}],
"responses": {
"200": {
"description": "Updated Todo",
"schema": {
"$ref": "#/definitions/Todo"
}
}
}
},
"delete": {
"consumes": ["application/json"],
"produces": ["application/json"],
"description": "Delete the Todo",
"parameters": [{
"in": "path",
"name": "id",
"required": true,
"type": "string"
}],
"responses": {
"200": {
"description": "Deleted Todo",
"schema": {
"$ref": "#/definitions/Todo"
}
}
}
}
},
"/message/todo": {
"post": {
"consumes": ["application/json"],
"produces": ["application/json"],
"description": "Insert a message in the Todo Queue",
"parameters": [{
"in": "body",
"name": "body",
"required": false,
"schema": {
"$ref": "#/definitions/TodoCreate"
},
"x-examples": {
"application/json": "{\"message\": \"Don't forget\"}"
}
}],
"responses": {
"200": {
"description": "Definition generated from Swagger Inspector",
"schema": {
"$ref": "#/definitions/Success"
}
}
}
}
}
},
"definitions": {
"Todo": {
"properties": {
"_id": {
"type": "string"
},
"__v": {
"type": "integer",
"format": "int32"
},
"description": {
"type": "string"
},
"completed": {
"type": "boolean"
},
"createdAt": {
"type": "string"
},
"updatedAt": {
"type": "string"
}
}
},
"Todos": {
"type": "array",
"items": {
"$ref": "#/definitions/Todo"
}
},
"TodoCreate": {
"properties": {
"description": {
"type": "string"
}
}
},
"Success": {
"properties": {
"success": {
"type": "boolean"
}
}
}
}
}
Open browser to http://localhost:3000/docs
/app.js
...
const graphqlServer = require('./graphql/graphqlServer')
...
const app = new Koa()
app.use(bodyParser())
app.use(Logger())
app.use(router())
app.use(graphqlServer.getMiddleware())
...
/graphql/graphqlServer.js
const {
ApolloServer,
gql
} = require('apollo-server-koa')
const {
makeExecutableSchema
} = require('graphql-tools')
const merge = require('lodash/merge')
const {
typeDef: Todo,
resolvers: TodoResolvers
} = require('./types/todo')
const Query = gql`
type Query {
hello: String
}
type Mutation {
null: Boolean
}
`
const SchemaDefinition = gql`
schema {
query: Query
mutation: Mutation
}
`
const resolvers = {
Query: {
hello: () => 'Hello world!',
},
}
const schema = makeExecutableSchema({
typeDefs: [SchemaDefinition, Query, Todo],
resolvers: merge(resolvers, TodoResolvers),
})
module.exports = new ApolloServer({
schema
})
/graphql/types/todo.js
const {
gql
} = require('apollo-server-koa')
const model = require('../../models/todo')
const typeDef = gql`
type Todo {
id: String
version: Int
description: String!
completed: Boolean!
}
extend type Query {
Todos: [Todo]
}
extend type Mutation {
createTodo(description: String): Todo
completeTodo(id: String): Todo
deleteTodo(id: String): Todo
}
`
const resolvers = {
Todo: {
id: (val) => val._id,
version: (val) => val.__v,
},
Query: {
Todos: async () => {
return await model.find({}).lean()
},
},
Mutation: {
createTodo: async (root, {
description
}) => {
const newTodo = new model({
description,
completed: false
})
return await newTodo.save()
},
completeTodo: async (root, {
id
}) => {
const todo = await model.findById(id)
todo.completed = !todo.completed
return await todo.save()
},
deleteTodo: async (root, {
id
}) => {
const todo = await model.findById(id)
if (todo) return await todo.remove()
return null
},
},
}
module.exports = {
typeDef,
resolvers,
}
Open a browser to http://localhost:3000/graphql
Let’s use Kafka cause it’s the new hotness, but we can just as easily use ActiveMQ or some Cloud offering. For debugging, we should install the Kafka command-line tool. This install is not a requirement, but it gives visibility into the queue. I suggest using Homebrew to install.
brew install kafka
Let’s use docker-compose to run Kafka.
kafka/docker-compose.yml
version: '3'
services:
zookeeper:
image: wurstmeister/zookeeper
kafka:
image: wurstmeister/kafka
ports:
- '9092:9092'
environment:
KAFKA_ADVERTISED_HOST_NAME: localhost
KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
Start Kafka with this command.
docker-compose up -d
docker ps
Start an interactive producer with this command.
kafka-console-producer --broker-list localhost:9092 --topic test
Start a consumer to monitor the queue with this command.
kafka-console-consumer --bootstrap-server localhost:9092 --topic test
npm i kafkajs uuid
/app.js
...
const {
kafkaServer
} = require('./messaging/kafka')
...
app.use(graphqlServer.getMiddleware())
kafkaServer.run().catch(console.error)
...
/routes/message.js
const Router = require('koa-router')
const router = new Router({
prefix: '/message'
})
const controller = require('../controllers/todo')
router.post('/todo', controller.sendMessage)
module.exports = router
/routes/index.js
const combineRouters = require('koa-combine-routers')
const rootRouter = require('./root')
const todoRouter = require('./todo')
const messageRouter = require('./message')
const router = combineRouters(rootRouter, todoRouter, messageRouter)
module.exports = router
/messaging/kafka.js
const {
Kafka
} = require('kafkajs')
const model = require('../models/todo')
const kafka = new Kafka({
clientId: 'my-app',
brokers: ['localhost:9092'],
})
const producer = kafka.producer()
const consumer = kafka.consumer({
groupId: 'group_id'
})
const kafkaServer = {
run: async () => {
// Consuming
await consumer.connect()
await consumer.subscribe({
topic: 'todo',
fromBeginning: true
})
await consumer.run({
eachMessage: async ({
topic,
partition,
message
}) => {
console.log({
partition,
offset: message.offset,
value: message.value.toString(),
})
const payload = JSON.parse(message.value.toString())
const newTodo = new model({
description: payload.description,
completed: false,
})
const savedTodo = await newTodo.save()
},
})
},
}
module.exports = {
kafkaServer,
producer
}
/controllers/todo.js
const {
v4: uuidv4
} = require('uuid')
const {
producer
} = require('../messaging/kafka')
...
async function sendMessage(ctx) {
// place a new Todo message on the topic
const todoMessage = {
description: ctx.request.body.description,
transactionId: uuidv4(),
}
await producer.send({
topic: 'todo',
messages: [{
value: JSON.stringify(todoMessage)
}],
})
ctx.body = {
success: true
}
}
module.exports = {
findAll,
create,
destroy,
update,
sendMessage,
}
Enough about the tooling, whatever way you choose you MUST LOG MESSAGE CONTEXT. Failing to log message context necessarily render your aggregated log useless, and I’ve seen this in so many organizations. What does it take to do this correctly? When an event enters your microservice network is must be assigned a globally unique identifier. The identifier must be part of the messages passed between microservices. A microservice that transforms a message must respond with the same identifier that was received. All aggregated logging must consistently include the identifier. A unique identifier is the MINIMAL contextual information that is included in a message and requires consistent logging. Your organization will need to determine what additional fields are required to trace message processing within your microservice network.
In my post Java Microservice stack we use a SpringBoot Configuration Server to serve properties consistently across a server array. NodeJS has client modules for SpringBoot Configuration Server. It’s another option.