Added cache support to api-framework

refs https://github.com/TryGhost/Toolbox/issues/522

- API-level response caching allows to cache responses bypassing the "pipeline" processing
- The main usecase for these caches is caching GET requests for expensive Content API requests
- To enable response caching add a "cache" key with a cache instance as a value, for example for posts public cache configuration can look like:
```
module.exports = {
    docName: 'posts',

    browse: {
        cache: postsPublicService.api.cache,
        options: [ ...
```
This commit is contained in:
Naz 2023-02-22 15:17:39 +08:00
parent 80ebd1c5be
commit 3cfe6d2cbb
No known key found for this signature in database
2 changed files with 97 additions and 2 deletions

View File

@ -187,7 +187,7 @@ const pipeline = (apiController, apiUtils, apiType) => {
return keys.reduce((obj, method) => {
const apiImpl = _.cloneDeep(apiController)[method];
obj[method] = function wrapper() {
obj[method] = async function wrapper() {
const apiConfig = {docName, method};
let options;
let data;
@ -229,6 +229,15 @@ const pipeline = (apiController, apiUtils, apiType) => {
frame.docName = docName;
frame.method = method;
let cacheKey = JSON.stringify(frame.options);
if (apiImpl.cache) {
const response = await apiImpl.cache.get(cacheKey);
if (response) {
return Promise.resolve(response);
}
}
return Promise.resolve()
.then(() => {
return STAGES.validation.input(apiUtils, apiConfig, apiImpl, frame);
@ -245,7 +254,10 @@ const pipeline = (apiController, apiUtils, apiType) => {
.then((response) => {
return STAGES.serialisation.output(response, apiUtils, apiConfig, apiImpl, frame);
})
.then(() => {
.then(async () => {
if (apiImpl.cache) {
await apiImpl.cache.set(cacheKey, frame.response);
}
return frame.response;
});
};

View File

@ -250,4 +250,87 @@ describe('Pipeline', function () {
});
});
});
describe('caching', function () {
beforeEach(function () {
sinon.stub(shared.pipeline.STAGES.validation, 'input');
sinon.stub(shared.pipeline.STAGES.serialisation, 'input');
sinon.stub(shared.pipeline.STAGES.serialisation, 'output');
sinon.stub(shared.pipeline.STAGES, 'permissions');
sinon.stub(shared.pipeline.STAGES, 'query');
});
it('should set a cache if configured on endpoint level', async function () {
const apiController = {
browse: {
cache: {
get: sinon.stub().resolves(null),
set: sinon.stub().resolves(true)
}
}
};
const apiUtils = {};
const result = shared.pipeline(apiController, apiUtils);
shared.pipeline.STAGES.validation.input.resolves();
shared.pipeline.STAGES.serialisation.input.resolves();
shared.pipeline.STAGES.permissions.resolves();
shared.pipeline.STAGES.query.resolves('response');
shared.pipeline.STAGES.serialisation.output.callsFake(function (response, _apiUtils, apiConfig, apiImpl, frame) {
frame.response = response;
});
const response = await result.browse();
response.should.eql('response');
// request went through all stages
shared.pipeline.STAGES.validation.input.calledOnce.should.be.true();
shared.pipeline.STAGES.serialisation.input.calledOnce.should.be.true();
shared.pipeline.STAGES.permissions.calledOnce.should.be.true();
shared.pipeline.STAGES.query.calledOnce.should.be.true();
shared.pipeline.STAGES.serialisation.output.calledOnce.should.be.true();
// cache was set
apiController.browse.cache.set.calledOnce.should.be.true();
apiController.browse.cache.set.args[0][1].should.equal('response');
});
it('should use cache if configured on endpoint level', async function () {
const apiController = {
browse: {
cache: {
get: sinon.stub().resolves('CACHED RESPONSE'),
set: sinon.stub().resolves(true)
}
}
};
const apiUtils = {};
const result = shared.pipeline(apiController, apiUtils);
shared.pipeline.STAGES.validation.input.resolves();
shared.pipeline.STAGES.serialisation.input.resolves();
shared.pipeline.STAGES.permissions.resolves();
shared.pipeline.STAGES.query.resolves('response');
shared.pipeline.STAGES.serialisation.output.callsFake(function (response, _apiUtils, apiConfig, apiImpl, frame) {
frame.response = response;
});
const response = await result.browse();
response.should.eql('CACHED RESPONSE');
// request went through all stages
shared.pipeline.STAGES.validation.input.calledOnce.should.be.false();
shared.pipeline.STAGES.serialisation.input.calledOnce.should.be.false();
shared.pipeline.STAGES.permissions.calledOnce.should.be.false();
shared.pipeline.STAGES.query.calledOnce.should.be.false();
shared.pipeline.STAGES.serialisation.output.calledOnce.should.be.false();
// cache not set
apiController.browse.cache.set.calledOnce.should.be.false();
});
});
});