Im using redis package , and im having a lot of connection issues with connections giving ECONNREFUSED suddenly.
I suspect its because i have done a wrong connection management.
The issue with this project is that my app, sends parameters to the api (ip, and port) , and the api must create a connection given those values, fetch some data, and return it.
I have hundreds of servers, so I dont know how to manage all those connections.
So far im managing it in a single connection. And thats why I think its failing.
It currently looks like this..
let redisClient;
const killRedis = () => {
redisClient.quit()
console.log("redis client shut down")
}
const createRedisClient = async (port, url) => {
redisClient = require('redis').createClient(port, url, {
no_ready_check: true,
db: 1
})
redisClient.on('error', function (err) {
console.log('Error ' + err);
killRedis();
return undefined;
});
redisClient.on('connect', function () {
console.log('Connected to Redis');
});
return redisClient;
}
module.exports = { createRedisClient, }
It kind of works, but ends up failing with econnection refused from time to time.
I use it like the following in my routes.
const scanAll = async (port, url) => {
const redisClient = await createRedisClient(port, url)
if (!redisClient) return 500
const scan = promisify(redisClient.scan).bind(redisClient);
const found = [];
let cursor = '0';
do {
const reply = await scan(cursor, 'MATCH', '*');
cursor = reply[0];
found.push(...reply[1]);
} while (cursor !== '0');
return found;
};
/* Return all the users id */
router.post('/user/list', async function (req, res, next) {
const data = await scanAll(req.body.port, req.body.ip);
console.log("data ", data)
if (data === 500) {
res.status(500).json({
error: "Error, server connection refused"
})
}
else if (data.length === 0) {
res.status(204).json(data)
} else {
res.status(200).json(data);
}
})
How can i do a proper connections management?
EDIT: my new attempt, but i think my connections are overflowing when making 2 simultaneos petitions
let connections = []
findConnection = (ip, port) => {
let connection = connections.filter(i => i.ip == ip && i.port == port)
console.log("pre")
console.log(connection)
if (connection && connection.connection) {
console.log("opcion1: ", connection.ip)
console.log("connection already exists")
return connection[0].connection
} else {
console.log("opcion2")
console.log(connections)
connections.push({
ip: ip,
port: port,
connection: require('redis').createClient(port, ip, {
no_ready_check: true,
db: 1
})
})
return connections.filter(i => i.ip == ip && i.port == port)[0].connection
}
}
const createRedisClient = async (port, url) => {
let redisClient = findConnection(url, port)
redisClient.on('error', function (err) {
console.log('Error ' + err);
redisClient.quit()
return undefined;
});
redisClient.on('connect', function () {
console.log('Connected to Redis');
});
return redisClient;
}
module.exports = { createRedisClient, }
I have noticed that i get the following error
MaxListenersExceededWarning: Possible EventEmitter memory leak
detected. 11 error listeners added. Use emitter.setMaxListeners() to
increase limit
EDIT: Last implementation problem
My current implementation is the following
let connections = []
const killRedis = (redisClient, ip, port) => {
redisClient.quit()
connections = connections.filter((i) => { return i.ip !== ip && i.port != port })
}
const subscribe = (redisClient, url, port) => {
redisClient.on('error', function (err) {
console.log('Error ' + err);
killRedis(redisClient, url, port)
return undefined;
});
redisClient.on('connect', function () {
console.log('Connected to Redis');
return redisClient;
});
}
findConnection = (ip, port) => {
let connection = connections.filter(i => i.ip == ip && i.port == port)
if (connection && connection.length > 0) {
subscribe(connection[0].connection)
return connection[0].connection
} else {
connections.push({
ip: ip,
port: port,
connection: require('redis').createClient(port, ip, {
no_ready_check: true,
db: 1
})
})
subscribe(connections.filter(i => i.ip == ip && i.port == port)[0].connection, ip, port)
return connections.filter(i => i.ip == ip && i.port == port)[0].connection
}
}
const createRedisClient = async (port, url) => {
let redisClient = findConnection(url, port)
return redisClient
}
module.exports = { createRedisClient }
It is almost working, the problem is that i dont know how to handle the rror of
And im not sure how to handle the error event listener. If it fails i should return an undefined, But seems that it isnt doing so,
4
Answers
My final implementation that works as expected
you can use this npm package
You can build a cache pool of existing connections and reuse the connections, rather than creating a new Redis connection for every new request. This way, you won’t cross the limit of event listeners per Redis Connection. You just need to create a connection, if it doesn’t already exist in the pool.
PS – For simplicity, my current implementation of cache pool creates a single connection for every pair of host and port and stores them. You can implement an LRU cache in your
Cache Pool
to evict unused Redis connections if need be.This way you should be able to solve your connection management problem since you will be creating the connection once and reuse them.
cache-pool.js
cache.js
test-sever.js
The single connection solution or cache pool solution are all available. You meet some problems with these two solutions.
I guess the error triggered when redis
maxclient
reached, because your single connection solution do not close the client when processing each request. So it kind of works when do not reachmaxclient
limit, but end with ECONNREFUSED.You can try to resolve it, just add
killRedis
before request return.Refer to the answer MaxListenersExceededWarning: Possible EventEmitter memory leak detected. 11 message lis teners added. Use emitter.setMaxListeners() to increase limit
For each request, you run
redisClient.on()
twice increateRedisClient
function, you do a subscribe twice, then two new listener added. The default limit is 10. It will end up with MaxListenersExceededWarning because lacking of unsubscribe. The solution is moving allredisClient.on
code fromcreateRedisClient
function tofindConnection
function, just subscribe twice when connection created, and not related to user request.