Many times, we start a project with a monolithic architecture. However, over time, as the project scales, it may become necessary to transition to a microservices architecture. But what if your code is already used in other parts of your application? In this article, I will show you a way to implement gRPC in your monolithic architecture.
Monolith
The first step is create your main monolith server with necessary logic
server.js
const express = require("express");
const app = express();
const morgan = require("morgan");
const bodyParser = require("body-parser");
const { legacyController } = require("./controllers");
const PORT = process.env.PORT || 3001;
app.use(morgan("dev"));
app.use(bodyParser.json());
app.use(bodyParser.urlencoded({ extended: true }));
app.get("/", (req, res) => {
res.send("Hello World");
});
app.get("/api/users", async (req, res) => {
const { sort = "desc" } = req.query;
const users = await legacyController.getUsers(sort);
return res.json(users);
});
const server = app.listen(PORT, () => {
console.log(
`Server is running on port ${server.address().port}\nhttp://localhost:${
server.address().port
}`
);
});
controllers/legacyController.js
const getUsers = async (sort) => {
const users = [
{ name: "John", age: 21 },
{ name: "Jane", age: 22 },
];
if (sort === "desc")
return Promise.resolve(users.sort((a, b) => b.age - a.age));
return Promise.resolve(users.sort((a, b) => a.age - b.age));
};
module.exports = {
getUsers,
};
controllers/index.js
module.exports = {
legacyController: require("./legacyController")
}
The response to GET https://localhost:3000/api/user
is:
[
{
"name": "Jane",
"age": 22
},
{
"name": "John",
"age": 21
}
]
This represents the legacy logic, and now we're going to implement it in our new gRPC server as a microservice.
gRPC server
server.js
const grpc = require("@grpc/grpc-js");
const { grpc: legacyController } = require("./legacyController/infrastructure");
function main() {
const PORT = process.env.PORT || 50051;
const server = new grpc.Server();
const services = [[legacyController.service, legacyController.methods]];
services.forEach(([service, methods]) => {
server.addService(service, methods);
});
const serverCredentials = grpc.ServerCredentials.createInsecure();
server.bindAsync(`0.0.0.0:${PORT}`, serverCredentials, (error) => {
if (error) throw error;
console.log(`Server running at http://localhost:${PORT}`);
server.start();
});
}
main();
legacyController/application/index.js
In this part, it's important to use the same package name as specified in the model.proto file, and remember that package names are case-sensitive.
const path = require("path");
const protoPath = path.join(__dirname, "..", "domain", "model.proto");
const grpc = require("@grpc/grpc-js");
const protoLoader = require("@grpc/proto-loader");
const methods = require("./senders");
const packageDefinition = protoLoader.loadSync(protoPath, {
keepCase: true,
longs: String,
enums: String,
defaults: true,
oneofs: true,
});
const service = grpc.loadPackageDefinition(packageDefinition).legacyController;
module.exports = {
methods,
service: service.Output.service,
};
legacyController/application/methods.js
In this file, we utilize the same logic as in our legacy controller, with the distinction that function parameters are passed in an object.
async function getUsers({ sort }) {
const users = [
{ name: "John", age: 21 },
{ name: "Jane", age: 22 },
];
let data = [];
if (sort === "desc") data = users.sort((a, b) => b.age - a.age);
else data = users.sort((a, b) => a.age - b.age);
return {
parse: true,
data: JSON.stringify(data),
};
}
module.exports = {
getUsers,
};
legacyController/application/senders.js
In this function, we bind our main function written in methods. However, gRPC operates using callbacks, so we employ this implementation to ensure seamless interaction with asynchronous functions and proper data return
const methods = require("./methods");
// Function to be used as grpc method pass first to main function and then return data to callback response
async function functionParser({ request }, callback) {
const { mainFunction } = this;
mainFunction(request).then((data) => callback(null, data));
}
// Create object with all functions from methods and bind them to functionParser to be used as grpc methods
const senders = Object.entries(methods).reduce(
(acc, [functionName, functionVal]) => {
acc[functionName] = functionParser.bind({
mainFunction: functionVal,
});
return acc;
},
{}
);
module.exports = { ...senders };
legacyController/domain/model.proto
In this model, we write function parameters and return types using hard typing. It's crucial to use the same function names in both the model and client implementations.
We define data as a string because if we need to pass an array with an unknown type structure, it would be challenging to implement it as a message structure in the proto model.
syntax = "proto3";
package legacyController;
import "google/protobuf/any.proto";
service Output {
rpc getUsers (UserParams) returns (Response) {}
}
message UserParams {
string sort = 1;
}
message Response {
string data = 1;
bool parse = 2;
}
legacyController/infrastructure/index.js
Finally, we export all the necessary methods to use in server.js.
const { methods, service } = require("../application");
module.exports = {
grpc: {
service,
methods,
},
};
Update legacy code
First, we need to save the proto file and ensure it's updated on both sides whenever changes are made.
protos/legacyController.proto
syntax = "proto3";
package legacyController;
import "google/protobuf/any.proto";
service Output {
rpc getUsers (UserParams) returns (Response) {}
}
message UserParams {
string sort = 1;
}
message Response {
string data = 1;
bool parse = 2;
}
Now, we return to the monolith server and head to controllers/legacyController.js
. Here, we'll create the gRPC client to establish a connection with the other server.
const { getGrpcClient } = require("../helpers");
const CLIENT_NAME = "legacyController";
const path = require("path");
const protoPath = path.join(
__dirname,
"..",
"protos",
"legacyController.proto"
);
const client = getGrpcClient(protoPath, CLIENT_NAME);
helpers/grpcClient.js
const grpc = require("@grpc/grpc-js");
const protoLoader = require("@grpc/proto-loader");
const LOCAL_HOST = "localhost:50051";
const loadProto = (protoPath) =>
protoLoader.loadSync(protoPath, {
keepCase: true,
longs: String,
enums: String,
defaults: true,
oneofs: true,
});
const getGrpcClient = (protoPath, serviceName, host = LOCAL_HOST) => {
const packageDefinition = loadProto(protoPath);
const service = grpc.loadPackageDefinition(packageDefinition)[serviceName];
const client = new service.Output(host, grpc.credentials.createInsecure());
return client;
};
module.exports = getGrpcClient;
helpers/index.js
module.exports = {
getGrpcClient: require("./grpcClient"),
};
Comment the old code
/* const getUsers = async (sort) => {
const users = [
{ name: "John", age: 21 },
{ name: "Jane", age: 22 },
];
if (sort === "desc")
return Promise.resolve(users.sort((a, b) => b.age - a.age));
return Promise.resolve(users.sort((a, b) => a.age - b.age));
}; */
We create a function to pass arguments by function to object
const transformMethodParams = {
getUsers: ["sort"],
};
const passArgsToObj = (args, method) => {
const params = transformMethodParams[method];
const context = {};
if (!params) return context;
params.forEach((param, index) => {
context[param] = args[index];
});
return context;
};
Since gRPC operates using callbacks and our code is asynchronous, we utilize an abstraction to intercept all calls to our function and wait for them to complete or handle errors when they occur.
async function transformCall(...args) {
const { functionName } = this;
const context = passArgsToObj(args, functionName);
return new Promise((resolve, reject) => {
client[functionName](context, {}, (err, response) => {
if (err) return reject(err);
if (response?.parse) return resolve(JSON.parse(response?.data));
resolve(response.data);
});
});
}
const outputMethod = (method) => transformCall.bind({ functionName: method });
module.exports = {
getUsers: outputMethod("getUsers"),
};
full code
const { getGrpcClient } = require("../helpers");
const CLIENT_NAME = "legacyController";
const path = require("path");
const protoPath = path.join(
__dirname,
"..",
"protos",
"legacyController.proto"
);
const client = getGrpcClient(protoPath, CLIENT_NAME);
/* const getUsers = async (sort) => {
const users = [
{ name: "John", age: 21 },
{ name: "Jane", age: 22 },
];
if (sort === "desc")
return Promise.resolve(users.sort((a, b) => b.age - a.age));
return Promise.resolve(users.sort((a, b) => a.age - b.age));
}; */
const transformMethodParams = {
getUsers: ["sort"],
};
const passArgsToObj = (args, method) => {
const params = transformMethodParams[method];
const context = {};
if (!params) return context;
params.forEach((param, index) => {
context[param] = args[index];
});
return context;
};
async function transformCall(...args) {
const { functionName } = this;
const context = passArgsToObj(args, functionName);
return new Promise((resolve, reject) => {
client[functionName](context, {}, (err, response) => {
if (err) return reject(err);
if (response?.parse) return resolve(JSON.parse(response?.data));
resolve(response.data);
});
});
}
const outputMethod = (method) => transformCall.bind({ functionName: method });
module.exports = {
getUsers: outputMethod("getUsers"),
};
Conclusions
In conclusion, gRPC is a powerful protocol for microservices communication, offering strong typing to ensure the integrity of data transmitted between clients and servers. Its implementation in monolithic systems allows for a strategic deconstruction of the monolith into smaller, more manageable parts.
Top comments (0)