Revert "Added support for "Refresh Ahead" caching strategy" (#19502)

Reverts TryGhost/Ghost#19499
This commit is contained in:
Simon Backx 2024-01-17 14:12:58 +01:00 committed by GitHub
parent aaaa3ba797
commit a60704c588
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
13 changed files with 217 additions and 212 deletions

View File

@ -17,7 +17,6 @@ class AdapterCacheRedis extends BaseCacheAdapter {
* @param {Object} [config.clusterConfig] - redis cluster config used in case no cache instance provided
* @param {Object} [config.storeConfig] - extra redis client config used in case no cache instance provided
* @param {Number} [config.ttl] - default cached value Time To Live (expiration) in *seconds*
* @param {Number} [config.refreshAheadFactor] - 0-1 number to use to determine how old (as a percentage of ttl) an entry should be before refreshing it
* @param {String} [config.keyPrefix] - prefix to use when building a unique cache key, e.g.: 'some_id:image-sizes:'
* @param {Boolean} [config.reuseConnection] - specifies if the redis store/connection should be reused within the process
*/
@ -58,9 +57,6 @@ class AdapterCacheRedis extends BaseCacheAdapter {
});
}
this.ttl = config.ttl;
this.refreshAheadFactor = config.refreshAheadFactor || 0;
this.currentlyExecutingBackgroundRefreshes = new Set();
this.keyPrefix = config.keyPrefix;
this._keysPattern = config.keyPrefix ? `${config.keyPrefix}*` : '';
this.redisClient = this.cache.store.getClient();
@ -140,66 +136,9 @@ class AdapterCacheRedis extends BaseCacheAdapter {
*
* @param {String} key
*/
async shouldRefresh(key) {
const internalKey = this._buildKey(key);
if (this.refreshAheadFactor === 0) {
debug(`shouldRefresh ${internalKey}: false - refreshAheadFactor = 0`);
return false;
}
if (this.refreshAheadFactor === 1) {
debug(`shouldRefresh ${internalKey}: true - refreshAheadFactor = 1`);
return true;
}
async get(key) {
try {
const ttlRemainingForEntry = await this.cache.ttl(internalKey);
const shouldRefresh = ttlRemainingForEntry < this.refreshAheadFactor * this.ttl;
debug(`shouldRefresh ${internalKey}: ${shouldRefresh} - TTL remaining = ${ttlRemainingForEntry}`);
return shouldRefresh;
} catch (err) {
logging.error(err);
return false;
}
}
/**
*
* @param {string} key
* @param {() => Promise<any>} [fetchData] An optional function to fetch the data, which will be used in the case of a cache MISS or a background refresh
*/
async get(key, fetchData) {
const internalKey = this._buildKey(key);
try {
const result = await this.cache.get(internalKey);
debug(`get ${internalKey}: Cache ${result ? 'HIT' : 'MISS'}`);
if (!fetchData) {
return result;
}
if (result) {
const shouldRefresh = await this.shouldRefresh(internalKey);
const isRefreshing = this.currentlyExecutingBackgroundRefreshes.has(internalKey);
if (isRefreshing) {
debug(`Background refresh already happening for ${internalKey}`);
}
if (!isRefreshing && shouldRefresh) {
debug(`Doing background refresh for ${internalKey}`);
this.currentlyExecutingBackgroundRefreshes.add(internalKey);
fetchData().then(async (data) => {
await this.set(key, data); // We don't use `internalKey` here because `set` handles it
this.currentlyExecutingBackgroundRefreshes.delete(internalKey);
}).catch((error) => {
this.currentlyExecutingBackgroundRefreshes.delete(internalKey);
logging.error({
message: 'There was an error refreshing cache data in the background',
error: error
});
});
}
return result;
} else {
const data = await fetchData();
await this.set(key, data); // We don't use `internalKey` here because `set` handles it
return data;
}
return await this.cache.get(this._buildKey(key));
} catch (err) {
logging.error(err);
}
@ -211,10 +150,9 @@ class AdapterCacheRedis extends BaseCacheAdapter {
* @param {*} value
*/
async set(key, value) {
const internalKey = this._buildKey(key);
debug('set', internalKey);
debug('set', key);
try {
return await this.cache.set(internalKey, value);
return await this.cache.set(this._buildKey(key), value);
} catch (err) {
logging.error(err);
}

View File

@ -59,127 +59,6 @@ describe('Adapter Cache Redis', function () {
assert.equal(value, 'value from cache');
});
it('can update the cache in the case of a cache miss', async function () {
const KEY = 'update-cache-on-miss';
let cachedValue = null;
const redisCacheInstanceStub = {
get: function (key) {
assert(key === KEY);
return cachedValue;
},
set: function (key, value) {
assert(key === KEY);
cachedValue = value;
},
store: {
getClient: sinon.stub().returns({
on: sinon.stub()
})
}
};
const cache = new RedisCache({
cache: redisCacheInstanceStub
});
const fetchData = sinon.stub().resolves('Da Value');
checkFirstRead: {
const value = await cache.get(KEY, fetchData);
assert.equal(fetchData.callCount, 1);
assert.equal(value, 'Da Value');
break checkFirstRead;
}
checkSecondRead: {
const value = await cache.get(KEY, fetchData);
assert.equal(fetchData.callCount, 1);
assert.equal(value, 'Da Value');
break checkSecondRead;
}
});
it('Can do a background update of the cache', async function () {
const KEY = 'update-cache-in-background';
let cachedValue = null;
let remainingTTL = 100;
const redisCacheInstanceStub = {
get: function (key) {
assert(key === KEY);
return cachedValue;
},
set: function (key, value) {
assert(key === KEY);
cachedValue = value;
},
ttl: function (key) {
assert(key === KEY);
return remainingTTL;
},
store: {
getClient: sinon.stub().returns({
on: sinon.stub()
})
}
};
const cache = new RedisCache({
cache: redisCacheInstanceStub,
ttl: 100,
refreshAheadFactor: 0.2
});
const fetchData = sinon.stub();
fetchData.onFirstCall().resolves('First Value');
fetchData.onSecondCall().resolves('Second Value');
checkFirstRead: {
const value = await cache.get(KEY, fetchData);
assert.equal(fetchData.callCount, 1);
assert.equal(value, 'First Value');
break checkFirstRead;
}
// We simulate having been in the cache for 15 seconds
remainingTTL = 85;
checkSecondRead: {
const value = await cache.get(KEY, fetchData);
assert.equal(fetchData.callCount, 1);
assert.equal(value, 'First Value');
break checkSecondRead;
}
// We simulate having been in the cache for 30 seconds
remainingTTL = 70;
checkThirdRead: {
const value = await cache.get(KEY, fetchData);
assert.equal(fetchData.callCount, 1);
assert.equal(value, 'First Value');
break checkThirdRead;
}
// We simulate having been in the cache for 85 seconds
// This should trigger a background refresh
remainingTTL = 15;
checkFourthRead: {
const value = await cache.get(KEY, fetchData);
assert.equal(fetchData.callCount, 2);
assert.equal(value, 'First Value');
break checkFourthRead;
}
// We reset the TTL to 100 for the most recent write
remainingTTL = 100;
checkFifthRead: {
const value = await cache.get(KEY, fetchData);
assert.equal(fetchData.callCount, 2);
assert.equal(value, 'Second Value');
break checkFifthRead;
}
});
});
describe('set', function () {

View File

@ -238,28 +238,35 @@ const pipeline = (apiController, apiUtils, apiType) => {
const cacheKey = stringify(cacheKeyData);
if (apiImpl.cache) {
const response = await apiImpl.cache.get(cacheKey, getResponse);
const response = await apiImpl.cache.get(cacheKey);
if (response) {
return Promise.resolve(response);
}
}
async function getResponse() {
await STAGES.validation.input(apiUtils, apiConfig, apiImpl, frame);
await STAGES.serialisation.input(apiUtils, apiConfig, apiImpl, frame);
await STAGES.permissions(apiUtils, apiConfig, apiImpl, frame);
const response = await STAGES.query(apiUtils, apiConfig, apiImpl, frame);
await STAGES.serialisation.output(response, apiUtils, apiConfig, apiImpl, frame);
return frame.response;
}
const response = await getResponse();
if (apiImpl.cache) {
await apiImpl.cache.set(cacheKey, response);
}
return response;
return Promise.resolve()
.then(() => {
return STAGES.validation.input(apiUtils, apiConfig, apiImpl, frame);
})
.then(() => {
return STAGES.serialisation.input(apiUtils, apiConfig, apiImpl, frame);
})
.then(() => {
return STAGES.permissions(apiUtils, apiConfig, apiImpl, frame);
})
.then(() => {
return STAGES.query(apiUtils, apiConfig, apiImpl, frame);
})
.then((response) => {
return STAGES.serialisation.output(response, apiUtils, apiConfig, apiImpl, frame);
})
.then(async () => {
if (apiImpl.cache) {
await apiImpl.cache.set(cacheKey, frame.response);
}
return frame.response;
});
};
Object.assign(obj[method], apiImpl);

View File

@ -8,18 +8,34 @@ class PostsPublicServiceWrapper {
// Wire up all the dependencies
const adapterManager = require('../adapter-manager');
const config = require('../../../shared/config');
const EventAwareCacheWrapper = require('@tryghost/event-aware-cache-wrapper');
const EventRegistry = require('../../lib/common/events');
let postsCache;
if (config.get('hostSettings:postsPublicCache:enabled')) {
postsCache = adapterManager.getAdapter('cache:postsPublic');
EventRegistry.on('site.changed', () => {
postsCache.reset();
const cache = adapterManager.getAdapter('cache:postsPublic');
postsCache = new EventAwareCacheWrapper({
cache: cache,
resetEvents: ['site.changed'],
eventRegistry: EventRegistry
});
}
let cache;
if (postsCache) {
// @NOTE: exposing cache through getter and setter to not loose the context of "this"
cache = {
get() {
return postsCache.get(...arguments);
},
set() {
return postsCache.set(...arguments);
}
};
}
this.api = {
cache: postsCache
cache: cache
};
}
}

View File

@ -8,18 +8,33 @@ class TagsPublicServiceWrapper {
// Wire up all the dependencies
const adapterManager = require('../adapter-manager');
const config = require('../../../shared/config');
const EventAwareCacheWrapper = require('@tryghost/event-aware-cache-wrapper');
const EventRegistry = require('../../lib/common/events');
let tagsCache;
if (config.get('hostSettings:tagsPublicCache:enabled')) {
tagsCache = adapterManager.getAdapter('cache:tagsPublic');
EventRegistry.on('site.changed', () => {
tagsCache.reset();
let tagsPublicCache = adapterManager.getAdapter('cache:tagsPublic');
tagsCache = new EventAwareCacheWrapper({
cache: tagsPublicCache,
resetEvents: ['site.changed'],
eventRegistry: EventRegistry
});
}
let cache;
if (tagsCache) {
// @NOTE: exposing cache through getter and setter to not loose the context of "this"
cache = {
get() {
return tagsCache.get(...arguments);
},
set() {
return tagsCache.set(...arguments);
}
};
}
this.api = {
cache: tagsCache
cache: cache
};
}
}

View File

@ -87,6 +87,7 @@
"@tryghost/email-service": "0.0.0",
"@tryghost/email-suppression-list": "0.0.0",
"@tryghost/errors": "1.2.26",
"@tryghost/event-aware-cache-wrapper": "0.0.0",
"@tryghost/express-dynamic-redirects": "0.0.0",
"@tryghost/external-media-inliner": "0.0.0",
"@tryghost/helpers": "1.1.88",

View File

@ -0,0 +1,6 @@
module.exports = {
plugins: ['ghost'],
extends: [
'plugin:ghost/node'
]
};

View File

@ -0,0 +1,23 @@
# Event Aware Cache Wrapper
Cache wrapper allowing to reset the cache after certain events
## Usage
## Develop
This is a monorepo package.
Follow the instructions for the top-level repo.
1. `git clone` this repo & `cd` into it as usual
2. Run `yarn` to install top-level dependencies.
## Test
- `yarn lint` run just eslint
- `yarn test` run lint and tests

View File

@ -0,0 +1 @@
module.exports = require('./lib/EventAwareCacheWrapper');

View File

@ -0,0 +1,38 @@
class EventAwareCacheWrapper {
#cache;
/**
* @param {Object} deps
* @param {Object} deps.cache - cache instance extending adapter-base-cache
* @param {Object} [deps.eventRegistry] - event registry instance
* @param {String[]} [deps.resetEvents] - event to listen to triggering reset
*/
constructor(deps) {
this.#cache = deps.cache;
if (deps.resetEvents && deps.eventRegistry) {
this.#initListeners(deps.eventRegistry, deps.resetEvents);
}
}
#initListeners(eventRegistry, eventsToResetOn) {
eventsToResetOn.forEach((event) => {
eventRegistry.on(event, () => {
this.reset();
});
});
}
async get(key) {
return this.#cache.get(key);
}
async set(key, value) {
return this.#cache.set(key, value);
}
reset() {
return this.#cache.reset();
}
}
module.exports = EventAwareCacheWrapper;

View File

@ -0,0 +1,27 @@
{
"name": "@tryghost/event-aware-cache-wrapper",
"version": "0.0.0",
"repository": "https://github.com/TryGhost/Ghost/tree/main/packages/event-aware-cache-wrapper",
"author": "Ghost Foundation",
"private": true,
"main": "index.js",
"scripts": {
"dev": "echo \"Implement me!\"",
"test:unit": "NODE_ENV=testing c8 --all --check-coverage --100 --reporter text --reporter cobertura -- mocha --reporter dot './test/**/*.test.js'",
"test": "yarn test:unit",
"lint:code": "eslint *.js lib/ --ext .js --cache",
"lint": "yarn lint:code && yarn lint:test",
"lint:test": "eslint -c test/.eslintrc.js test/ --ext .js --cache"
},
"files": [
"index.js",
"lib"
],
"devDependencies": {
"@tryghost/adapter-cache-memory-ttl": "0.0.0",
"c8": "8.0.1",
"mocha": "10.2.0",
"sinon": "15.2.0"
},
"dependencies": {}
}

View File

@ -0,0 +1,6 @@
module.exports = {
plugins: ['ghost'],
extends: [
'plugin:ghost/test'
]
};

View File

@ -0,0 +1,48 @@
const assert = require('assert/strict');
const InMemoryCache = require('@tryghost/adapter-cache-memory-ttl');
const EventAwareCacheWrapper = require('../index');
const {EventEmitter} = require('stream');
describe('EventAwareCacheWrapper', function () {
it('Can initialize', function () {
const cache = new InMemoryCache();
const wrappedCache = new EventAwareCacheWrapper({
cache
});
assert.ok(wrappedCache);
});
describe('get', function () {
it('calls a wrapped cache with extra key', async function () {
const cache = new InMemoryCache();
const wrapper = new EventAwareCacheWrapper({
cache: cache
});
await wrapper.set('a', 'b');
assert.equal(await wrapper.get('a'), 'b');
assert.equal(await cache.get('a'), 'b');
});
});
describe('listens to reset events', function () {
it('resets the cache when reset event is triggered', async function () {
const cache = new InMemoryCache();
const eventRegistry = new EventEmitter();
const wrapper = new EventAwareCacheWrapper({
cache: cache,
resetEvents: ['site.changed'],
eventRegistry: eventRegistry
});
await wrapper.set('a', 'b');
assert.equal(await wrapper.get('a'), 'b');
eventRegistry.emit('site.changed');
assert.equal(await wrapper.get('a'), undefined);
});
});
});