Revert apicache changes as the debug log is too crazy

This commit is contained in:
Louis Lam 2024-10-09 07:08:05 +08:00
parent da8da0bf59
commit 6437b9afab
1 changed files with 35 additions and 18 deletions

View File

@ -1,6 +1,5 @@
let url = require("url");
let MemoryCache = require("./memory-cache");
const { log } = require("../../../src/util");
let t = {
ms: 1,
@ -91,6 +90,24 @@ function ApiCache() {
instances.push(this);
this.id = instances.length;
/**
* Logs a message to the console if the `DEBUG` environment variable is set.
* @param {string} a The first argument to log.
* @param {string} b The second argument to log.
* @param {string} c The third argument to log.
* @param {string} d The fourth argument to log, and so on... (optional)
*
* Generated by Trelent
*/
function debug(a, b, c, d) {
let arr = ["\x1b[36m[apicache]\x1b[0m", a, b, c, d].filter(function (arg) {
return arg !== undefined;
});
let debugEnv = process.env.DEBUG && process.env.DEBUG.split(",").indexOf("apicache") !== -1;
return (globalOptions.debug || debugEnv) && console.log.apply(null, arr);
}
/**
* Returns true if the given request and response should be logged.
* @param {Object} request The HTTP request object.
@ -129,7 +146,7 @@ function ApiCache() {
let groupName = req.apicacheGroup;
if (groupName) {
log.debug("apicache", `group detected "${groupName}"`);
debug("group detected \"" + groupName + "\"");
let group = (index.groups[groupName] = index.groups[groupName] || []);
group.unshift(key);
}
@ -195,7 +212,7 @@ function ApiCache() {
redis.hset(key, "duration", duration);
redis.expire(key, duration / 1000, expireCallback || function () {});
} catch (err) {
log.debug("apicache", `error in redis.hset(): ${err}`);
debug("[apicache] error in redis.hset()");
}
} else {
memCache.add(key, value, duration, expireCallback);
@ -303,10 +320,10 @@ function ApiCache() {
// display log entry
let elapsed = new Date() - req.apicacheTimer;
log.debug("apicache", `adding cache entry for "${key}" @ ${strDuration} ${logDuration(elapsed)}`);
log.debug("apicache", `_apicache.headers: ${JSON.stringify(res._apicache.headers)}`);
log.debug("apicache", `res.getHeaders(): ${JSON.stringify(getSafeHeaders(res))}`);
log.debug("apicache", `cacheObject: ${JSON.stringify(cacheObject)}`);
debug("adding cache entry for \"" + key + "\" @ " + strDuration, logDuration(elapsed));
debug("_apicache.headers: ", res._apicache.headers);
debug("res.getHeaders(): ", getSafeHeaders(res));
debug("cacheObject: ", cacheObject);
}
}
@ -385,10 +402,10 @@ function ApiCache() {
let redis = globalOptions.redisClient;
if (group) {
log.debug("apicache", `clearing group "${target}"`);
debug("clearing group \"" + target + "\"");
group.forEach(function (key) {
log.debug("apicache", `clearing cached entry for "${key}"`);
debug("clearing cached entry for \"" + key + "\"");
clearTimeout(timers[key]);
delete timers[key];
if (!globalOptions.redisClient) {
@ -397,7 +414,7 @@ function ApiCache() {
try {
redis.del(key);
} catch (err) {
log.info("apicache", "error in redis.del(\"" + key + "\")");
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
}
index.all = index.all.filter(doesntMatch(key));
@ -405,7 +422,7 @@ function ApiCache() {
delete index.groups[target];
} else if (target) {
log.debug("apicache", `clearing ${isAutomatic ? "expired" : "cached"} entry for "${target}"`);
debug("clearing " + (isAutomatic ? "expired" : "cached") + " entry for \"" + target + "\"");
clearTimeout(timers[target]);
delete timers[target];
// clear actual cached entry
@ -415,7 +432,7 @@ function ApiCache() {
try {
redis.del(target);
} catch (err) {
log.error("apicache", "error in redis.del(\"" + target + "\")");
console.log("[apicache] error in redis.del(\"" + target + "\")");
}
}
@ -432,7 +449,7 @@ function ApiCache() {
}
});
} else {
log.debug("apicache", "clearing entire index");
debug("clearing entire index");
if (!redis) {
memCache.clear();
@ -444,7 +461,7 @@ function ApiCache() {
try {
redis.del(key);
} catch (err) {
log.error("apicache", `error in redis.del("${key}"): ${err}`);
console.log("[apicache] error in redis.del(\"" + key + "\")");
}
});
}
@ -735,7 +752,7 @@ function ApiCache() {
*/
let cache = function (req, res, next) {
function bypass() {
log.debug("apicache", "bypass detected, skipping cache.");
debug("bypass detected, skipping cache.");
return next();
}
@ -788,7 +805,7 @@ function ApiCache() {
// send if cache hit from memory-cache
if (cached) {
let elapsed = new Date() - req.apicacheTimer;
log.debug("apicache", `sending cached (memory-cache) version of ${key} ${logDuration(elapsed)}`);
debug("sending cached (memory-cache) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(req, res, cached, middlewareToggle, next, duration);
@ -800,7 +817,7 @@ function ApiCache() {
redis.hgetall(key, function (err, obj) {
if (!err && obj && obj.response) {
let elapsed = new Date() - req.apicacheTimer;
log.debug("apicache", "sending cached (redis) version of "+ key+" "+ logDuration(elapsed));
debug("sending cached (redis) version of", key, logDuration(elapsed));
perf.hit(key);
return sendCachedResponse(
@ -856,7 +873,7 @@ function ApiCache() {
}
if (globalOptions.trackPerformance) {
log.debug("apicache", "WARNING: using trackPerformance flag can cause high memory usage!");
debug("WARNING: using trackPerformance flag can cause high memory usage!");
}
return this;