Removed usage of unquoted ids in filter strings (#19070)
fixes GRO-34 fixes GRO-33 This is a revision of a previous commit, that broke the browser tests because changes in the data generator (requiring bookshelf had side effects). This adds a new way to run all tests with enforced numeric ObjectIDs. These numeric ids cause issues if they are used withing NQL filters. So they surface tiny bugs in our codebase. You can run tests using this option via: NUMERIC_IDS=1 yarn test:e2e Removed some defensive logic that could be explained by this discovered issue.
This commit is contained in:
parent
dca11d0eeb
commit
b6519e0f1f
@ -174,7 +174,7 @@ const HistoryModal = NiceModal.create<RoutingModalProps>(({params}) => {
|
||||
filter: [
|
||||
excludedEvents.length && `event:-[${excludedEvents.join(',')}]`,
|
||||
excludedResources.length && `resource_type:-[${excludedResources.join(',')}]`,
|
||||
params?.user && `actor_id:${params.user}`
|
||||
params?.user && `actor_id:'${params.user}'`
|
||||
].filter(Boolean).join('+')
|
||||
},
|
||||
getNextPageParams: (lastPage, otherParams) => ({
|
||||
|
@ -144,7 +144,7 @@ function setupGhostApi({siteUrl = window.location.origin, apiUrl, apiKey}: {site
|
||||
});
|
||||
},
|
||||
async replies({commentId, afterReplyId, limit}: {commentId: string; afterReplyId: string; limit?: number | 'all'}) {
|
||||
const filter = encodeURIComponent(`id:>${afterReplyId}`);
|
||||
const filter = encodeURIComponent(`id:>'${afterReplyId}'`);
|
||||
const order = encodeURIComponent('created_at ASC, id ASC');
|
||||
|
||||
const url = endpointFor({type: 'members', resource: `comments/${commentId}/replies`, params: `?limit=${limit ?? 5}&order=${order}&filter=${filter}`});
|
||||
|
@ -723,7 +723,7 @@ export default class DashboardStatsService extends Service {
|
||||
}
|
||||
|
||||
const start30d = new Date(Date.now() - 30 * 86400 * 1000);
|
||||
const result = yield this.store.query('email', {limit: 100, filter: 'submitted_at:>' + start30d.toISOString()});
|
||||
const result = yield this.store.query('email', {limit: 100, filter: `submitted_at:>'${start30d.toISOString()}'`});
|
||||
this.emailsSent30d = result.reduce((c, email) => c + email.emailCount, 0);
|
||||
}
|
||||
|
||||
|
@ -609,7 +609,7 @@ export class CollectionsService {
|
||||
}
|
||||
async getCollectionsForPost(postId: string): Promise<CollectionDTO[]> {
|
||||
const collections = await this.collectionsRepository.getAll({
|
||||
filter: `posts:${postId},slug:latest`
|
||||
filter: `posts:'${postId}',slug:latest`
|
||||
});
|
||||
|
||||
return Promise.all(collections.sort((a, b) => {
|
||||
|
@ -7,6 +7,10 @@ const schema = require('../../../data/schema');
|
||||
// This wires up our model event system
|
||||
const events = require('../../../lib/common/events');
|
||||
|
||||
// Run tests or development with NUMERIC_IDS=1 to enable numeric object IDs
|
||||
let forceNumericObjectIds = process.env.NODE_ENV !== 'production' && !!process.env.NUMERIC_IDS;
|
||||
let numberGenerator = 0;
|
||||
|
||||
module.exports = function (Bookshelf) {
|
||||
Bookshelf.Model = Bookshelf.Model.extend({
|
||||
initializeEvents: function () {
|
||||
@ -39,7 +43,7 @@ module.exports = function (Bookshelf) {
|
||||
* no auto increment
|
||||
*/
|
||||
setId: function setId() {
|
||||
this.set('id', ObjectId().toHexString());
|
||||
this.set('id', Bookshelf.Model.generateId());
|
||||
},
|
||||
|
||||
/**
|
||||
@ -268,5 +272,20 @@ module.exports = function (Bookshelf) {
|
||||
|
||||
this.addAction(model, 'deleted', options);
|
||||
}
|
||||
}, {
|
||||
generateId: function generateId() {
|
||||
if (forceNumericObjectIds) {
|
||||
numberGenerator = numberGenerator + 1;
|
||||
const counter = numberGenerator.toString();
|
||||
|
||||
// 77777777 here are to make sure generated ids's are larger than naturally generated ones
|
||||
const base = '777777770000000000000000';
|
||||
const id = base.substring(0, base.length - counter.length) + counter;
|
||||
|
||||
//// This always generates a valid object ID that is fully numeric
|
||||
return id;
|
||||
}
|
||||
return ObjectId().toHexString();
|
||||
}
|
||||
});
|
||||
};
|
||||
|
@ -33,7 +33,7 @@ describe('Actions API', function () {
|
||||
postUpdatedAt = res.body.posts[0].updated_at;
|
||||
|
||||
const res2 = await request
|
||||
.get(localUtils.API.getApiQuery(`actions/?filter=resource_id:${postId}&include=actor`))
|
||||
.get(localUtils.API.getApiQuery(`actions/?filter=${encodeURIComponent(`resource_id:'${postId}'`)}&include=actor`))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect('Cache-Control', testUtils.cacheRules.private)
|
||||
@ -73,7 +73,7 @@ describe('Actions API', function () {
|
||||
postUpdatedAt = res3.body.posts[0].updated_at;
|
||||
|
||||
const res4 = await request
|
||||
.get(localUtils.API.getApiQuery(`actions/?filter=resource_id:${postId}&include=actor`))
|
||||
.get(localUtils.API.getApiQuery(`actions/?filter=${encodeURIComponent(`resource_id:'${postId}'`)}&include=actor`))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect('Cache-Control', testUtils.cacheRules.private)
|
||||
@ -113,7 +113,7 @@ describe('Actions API', function () {
|
||||
.expect(200);
|
||||
|
||||
const res5 = await request
|
||||
.get(localUtils.API.getApiQuery(`actions/?filter=resource_id:${postId}&include=actor`))
|
||||
.get(localUtils.API.getApiQuery(`actions/?filter=${encodeURIComponent(`resource_id:'${postId}'`)}&include=actor`))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect('Cache-Control', testUtils.cacheRules.private)
|
||||
|
@ -9,7 +9,7 @@ const logging = require('@tryghost/logging');
|
||||
let agent;
|
||||
|
||||
async function testPagination(skippedTypes, postId, totalExpected, limit) {
|
||||
const postFilter = postId ? `+data.post_id:${postId}` : '';
|
||||
const postFilter = postId ? `+data.post_id:'${postId}'` : '';
|
||||
|
||||
// To make the test cover more edge cases, we test different limit configurations
|
||||
const {body: firstPage} = await agent
|
||||
@ -49,7 +49,7 @@ async function testPagination(skippedTypes, postId, totalExpected, limit) {
|
||||
const remaining = totalExpected - (page - 1) * limit;
|
||||
|
||||
const {body: secondPage} = await agent
|
||||
.get(`/members/events?filter=${encodeURIComponent(`type:-[${skippedTypes.join(',')}]${postFilter}+(data.created_at:<'${lastCreatedAt}',(data.created_at:'${lastCreatedAt}'+id:<${lastId}))`)}&limit=${limit}`)
|
||||
.get(`/members/events?filter=${encodeURIComponent(`type:-[${skippedTypes.join(',')}]${postFilter}+(data.created_at:<'${lastCreatedAt}',(data.created_at:'${lastCreatedAt}'+id:<'${lastId}'))`)}&limit=${limit}`)
|
||||
.expectStatus(200)
|
||||
.matchHeaderSnapshot({
|
||||
etag: anyEtag,
|
||||
@ -162,7 +162,7 @@ describe('Activity Feed API', function () {
|
||||
const memberId = fixtureManager.get('members', 0).id;
|
||||
|
||||
await agent
|
||||
.get(`/members/events?filter=${encodeURIComponent(`data.post_id:${postId},data.member_id:${memberId}`)}`)
|
||||
.get(`/members/events?filter=${encodeURIComponent(`data.post_id:'${postId}',data.member_id:'${memberId}'`)}`)
|
||||
.expectStatus(200)
|
||||
.matchBodySnapshot({
|
||||
events: new Array(10).fill({
|
||||
@ -180,7 +180,7 @@ describe('Activity Feed API', function () {
|
||||
const memberId = fixtureManager.get('members', 0).id;
|
||||
|
||||
await agent
|
||||
.get(`/members/events?filter=${encodeURIComponent(`(type:comment_event,type:click_event)+(data.post_id:${postId},data.member_id:${memberId})`)}`)
|
||||
.get(`/members/events?filter=${encodeURIComponent(`(type:comment_event,type:click_event)+(data.post_id:'${postId}',data.member_id:'${memberId}')`)}`)
|
||||
.expectStatus(200)
|
||||
.matchBodySnapshot({
|
||||
events: new Array(3).fill({
|
||||
@ -407,7 +407,7 @@ describe('Activity Feed API', function () {
|
||||
const postId = fixtureManager.get('posts', 0).id;
|
||||
|
||||
await agent
|
||||
.get(`/members/events?filter=data.post_id:${postId}&limit=20`)
|
||||
.get(`/members/events?filter=data.post_id:'${postId}'&limit=20`)
|
||||
.expectStatus(200)
|
||||
.matchHeaderSnapshot({
|
||||
etag: anyEtag,
|
||||
@ -441,7 +441,7 @@ describe('Activity Feed API', function () {
|
||||
it('Can limit events', async function () {
|
||||
const postId = fixtureManager.get('posts', 0).id;
|
||||
await agent
|
||||
.get(`/members/events?filter=data.post_id:${postId}&limit=2`)
|
||||
.get(`/members/events?filter=data.post_id:'${postId}'&limit=2`)
|
||||
.expectStatus(200)
|
||||
.matchHeaderSnapshot({
|
||||
etag: anyEtag,
|
||||
|
@ -262,7 +262,7 @@ describe('Integrations API', function () {
|
||||
should.equal(updatedIntegration.id, createdIntegration.id);
|
||||
updatedAdminApiKey.secret.should.not.eql(adminApiKey.secret);
|
||||
|
||||
const res3 = await request.get(localUtils.API.getApiQuery(`actions/?filter=resource_id:${adminApiKey.id}&include=actor`))
|
||||
const res3 = await request.get(localUtils.API.getApiQuery(`actions/?filter=resource_id:'${adminApiKey.id}'&include=actor`))
|
||||
.set('Origin', config.get('url'))
|
||||
.expect('Content-Type', /json/)
|
||||
.expect('Cache-Control', testUtils.cacheRules.private)
|
||||
|
@ -50,7 +50,7 @@ describe('Links API', function () {
|
||||
});
|
||||
const postId = siteLink.post_id;
|
||||
const originalTo = siteLink.link.to;
|
||||
const filter = `post_id:${postId}+to:'${originalTo}'`;
|
||||
const filter = `post_id:'${postId}'+to:'${originalTo}'`;
|
||||
|
||||
// Wait minimum 2 seconds
|
||||
clock.tick(2 * 1000);
|
||||
@ -122,7 +122,7 @@ describe('Links API', function () {
|
||||
});
|
||||
const postId = siteLink.post_id;
|
||||
const originalTo = siteLink.link.to;
|
||||
const filter = `post_id:${postId}+to:'${originalTo}'`;
|
||||
const filter = `post_id:'${postId}'+to:'${originalTo}'`;
|
||||
|
||||
// Wait minimum 2 seconds
|
||||
clock.tick(2 * 1000);
|
||||
@ -187,7 +187,7 @@ describe('Links API', function () {
|
||||
});
|
||||
const postId = siteLink.post_id;
|
||||
const originalTo = 'https://empty.example.com';
|
||||
const filter = `post_id:${postId}+to:'${originalTo}'`;
|
||||
const filter = `post_id:'${postId}'+to:'${originalTo}'`;
|
||||
await agent
|
||||
.put(`links/bulk/?filter=${encodeURIComponent(filter)}`)
|
||||
.body({
|
||||
|
@ -38,7 +38,7 @@ function basicAsserts(member, row) {
|
||||
async function testOutput(member, asserts, filters = []) {
|
||||
// Add default filters that always should match
|
||||
filters.push('limit=all');
|
||||
filters.push(`filter=id:${member.id}`);
|
||||
filters.push(`filter=id:'${member.id}'`);
|
||||
|
||||
for (const filter of filters) {
|
||||
// Test all
|
||||
@ -61,7 +61,7 @@ async function testOutput(member, asserts, filters = []) {
|
||||
|
||||
asserts(row);
|
||||
|
||||
if (filter === 'filter=id:${member.id}') {
|
||||
if (filter === `filter=id:'${member.id}'`) {
|
||||
csv.data.length.should.eql(1);
|
||||
}
|
||||
}
|
||||
|
@ -1958,7 +1958,7 @@ describe('Members API', function () {
|
||||
|
||||
// Check activity feed
|
||||
const {body: eventsBody} = await agent
|
||||
.get(`/members/events?filter=data.member_id:${newMember.id}`)
|
||||
.get(`/members/events?filter=data.member_id:'${newMember.id}'`)
|
||||
.body({members: [memberChanged]})
|
||||
.expectStatus(200)
|
||||
.matchHeaderSnapshot({
|
||||
@ -3037,7 +3037,7 @@ describe('Members API Bulk operations', function () {
|
||||
should(model.relations.newsletters.models.length).equal(newsletterCount, 'This test requires a member with 2 or more newsletters');
|
||||
|
||||
await agent
|
||||
.put(`/members/bulk/?filter=id:${member.id}`)
|
||||
.put(`/members/bulk/?filter=id:'${member.id}'`)
|
||||
.body({bulk: {
|
||||
action: 'unsubscribe'
|
||||
}})
|
||||
@ -3065,7 +3065,7 @@ describe('Members API Bulk operations', function () {
|
||||
|
||||
// When we do it again, we should still receive a count of 1, because we unsubcribed one member (who happens to be already unsubscribed)
|
||||
await agent
|
||||
.put(`/members/bulk/?filter=id:${member.id}`)
|
||||
.put(`/members/bulk/?filter=id:'${member.id}'`)
|
||||
.body({bulk: {
|
||||
action: 'unsubscribe'
|
||||
}})
|
||||
@ -3303,7 +3303,7 @@ describe('Members API Bulk operations', function () {
|
||||
should(model2.relations.labels.models.map(m => m.id)).match([firstId, secondId]);
|
||||
|
||||
await agent
|
||||
.put(`/members/bulk/?filter=id:${member1.id}`)
|
||||
.put(`/members/bulk/?filter=id:'${member1.id}'`)
|
||||
.body({bulk: {
|
||||
action: 'removeLabel',
|
||||
meta: {
|
||||
|
@ -213,7 +213,7 @@ describe('Comments API', function () {
|
||||
|
||||
it('Can browse all comments of a post', async function () {
|
||||
await membersAgent
|
||||
.get(`/api/comments/?filter=post_id:${postId}`)
|
||||
.get(`/api/comments/?filter=post_id:'${postId}'`)
|
||||
.expectStatus(200)
|
||||
.matchHeaderSnapshot({
|
||||
etag: anyEtag
|
||||
@ -308,7 +308,7 @@ describe('Comments API', function () {
|
||||
|
||||
it('Can browse all comments of a post', async function () {
|
||||
await membersAgent
|
||||
.get(`/api/comments/?filter=post_id:${postId}`)
|
||||
.get(`/api/comments/?filter=post_id:'${postId}'`)
|
||||
.expectStatus(200)
|
||||
.matchHeaderSnapshot({
|
||||
etag: anyEtag
|
||||
@ -679,7 +679,7 @@ describe('Comments API', function () {
|
||||
}]});
|
||||
|
||||
const {body} = await membersAgent
|
||||
.get(`/api/comments/?filter=post_id:${anotherPostId}`);
|
||||
.get(`/api/comments/?filter=post_id:'${anotherPostId}'`);
|
||||
|
||||
assert(!body.comments.find(comment => comment.id === commentId), 'The comment should not have moved post');
|
||||
});
|
||||
|
@ -6,13 +6,13 @@ const {Recommendation} = require('@tryghost/recommendations');
|
||||
|
||||
async function testClicked({recommendationId, memberId}, test) {
|
||||
const before = await recommendationsService.clickEventRepository.getAll({
|
||||
filter: 'recommendationId:' + recommendationId
|
||||
filter: `recommendationId:'${recommendationId}'`
|
||||
});
|
||||
|
||||
await test();
|
||||
|
||||
const after = await recommendationsService.clickEventRepository.getAll({
|
||||
filter: 'recommendationId:' + recommendationId
|
||||
filter: `recommendationId:'${recommendationId}'`
|
||||
});
|
||||
|
||||
assert.equal(after.length, before.length + 1);
|
||||
@ -40,11 +40,11 @@ async function testNotSubscribed(test) {
|
||||
|
||||
async function testSubscribed({recommendationId, memberId}, test) {
|
||||
const before = await recommendationsService.subscribeEventRepository.getAll({
|
||||
filter: 'recommendationId:' + recommendationId
|
||||
filter: `recommendationId:'${recommendationId}'`
|
||||
});
|
||||
await test();
|
||||
const after = await recommendationsService.subscribeEventRepository.getAll({
|
||||
filter: 'recommendationId:' + recommendationId
|
||||
filter: `recommendationId:'${recommendationId}'`
|
||||
});
|
||||
|
||||
assert.equal(after.length, before.length + 1);
|
||||
|
@ -49,7 +49,7 @@ describe('Click Tracking', function () {
|
||||
await jobService.allSettled();
|
||||
|
||||
const {body: {links}} = await agent.get(
|
||||
`/links/?filter=post_id:${post.id}`
|
||||
`/links/?filter=${encodeURIComponent(`post_id:'${post.id}'`)}`
|
||||
);
|
||||
|
||||
/** @type {(url: string) => Promise<import('node-fetch').Response>} */
|
||||
@ -101,13 +101,13 @@ describe('Click Tracking', function () {
|
||||
await fetchWithoutFollowingRedirect(urlOfLinkToClick.href);
|
||||
|
||||
const {body: {links: [clickedLink]}} = await agent.get(
|
||||
`/links/?filter=post_id:${post.id}`
|
||||
`/links/?filter=${encodeURIComponent(`post_id:'${post.id}'`)}`
|
||||
);
|
||||
|
||||
const clickCount = clickedLink.count.clicks;
|
||||
|
||||
const {body: {events: clickEvents}} = await agent.get(
|
||||
`/members/events/?filter=data.member_id:${memberToClickLink.id}${encodeURIComponent('+')}type:click_event`
|
||||
`/members/events/?filter=${encodeURIComponent(`data.member_id:'${memberToClickLink.id}'+type:click_event`)}`
|
||||
);
|
||||
|
||||
const clickEvent = clickEvents.find((/** @type any */ event) => {
|
||||
|
@ -54,7 +54,7 @@ async function testEmailBatches(settings, email_recipient_filter, expectedBatche
|
||||
assert.equal(emailModel.get('email_count'), expectedTotal, 'This email should have an email_count of ' + expectedTotal + ' recipients');
|
||||
|
||||
// Did we create batches?
|
||||
const batches = await models.EmailBatch.findAll({filter: `email_id:${emailModel.id}`});
|
||||
const batches = await models.EmailBatch.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
assert.equal(batches.models.length, expectedBatches.length);
|
||||
const remainingBatches = batches.models.slice();
|
||||
const emailRecipients = [];
|
||||
@ -74,7 +74,7 @@ async function testEmailBatches(settings, email_recipient_filter, expectedBatche
|
||||
assert.equal(firstBatch.get('error_data'), null);
|
||||
|
||||
// Did we create recipients?
|
||||
const emailRecipientsFirstBatch = await models.EmailRecipient.findAll({filter: `email_id:${emailModel.id}+batch_id:${firstBatch.id}`});
|
||||
const emailRecipientsFirstBatch = await models.EmailRecipient.findAll({filter: `email_id:'${emailModel.id}'+batch_id:'${firstBatch.id}'`});
|
||||
assert.equal(emailRecipientsFirstBatch.models.length, expectedBatch.recipients);
|
||||
|
||||
emailRecipients.push(...emailRecipientsFirstBatch.models);
|
||||
@ -142,7 +142,7 @@ describe('Batch sending tests', function () {
|
||||
assert.equal(emailModel.get('email_count'), 4);
|
||||
|
||||
// Did we create batches?
|
||||
const batches = await models.EmailBatch.findAll({filter: `email_id:${emailModel.id}`});
|
||||
const batches = await models.EmailBatch.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
assert.equal(batches.models.length, 1);
|
||||
|
||||
// Check all batches are in send state
|
||||
@ -157,7 +157,7 @@ describe('Batch sending tests', function () {
|
||||
}
|
||||
|
||||
// Did we create recipients?
|
||||
const emailRecipients = await models.EmailRecipient.findAll({filter: `email_id:${emailModel.id}`});
|
||||
const emailRecipients = await models.EmailRecipient.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
assert.equal(emailRecipients.models.length, 4);
|
||||
|
||||
for (const recipient of emailRecipients.models) {
|
||||
@ -189,7 +189,7 @@ describe('Batch sending tests', function () {
|
||||
assert.equal(emailModel.get('email_count'), 4);
|
||||
|
||||
// Did we create batches?
|
||||
const batches = await models.EmailBatch.findAll({filter: `email_id:${emailModel.id}`});
|
||||
const batches = await models.EmailBatch.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
assert.equal(batches.models.length, 1);
|
||||
});
|
||||
|
||||
@ -223,11 +223,11 @@ describe('Batch sending tests', function () {
|
||||
assert.equal(emailModel.get('email_count'), 4);
|
||||
|
||||
// Did we create batches?
|
||||
const batches = await models.EmailBatch.findAll({filter: `email_id:${emailModel.id}`});
|
||||
const batches = await models.EmailBatch.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
assert.equal(batches.models.length, 1);
|
||||
|
||||
// Did we create recipients?
|
||||
const emailRecipients = await models.EmailRecipient.findAll({filter: `email_id:${emailModel.id}`});
|
||||
const emailRecipients = await models.EmailRecipient.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
assert.equal(emailRecipients.models.length, 4);
|
||||
|
||||
for (const recipient of emailRecipients.models) {
|
||||
@ -238,7 +238,7 @@ describe('Batch sending tests', function () {
|
||||
// Create a new email and see if it is included now
|
||||
const {emailModel: emailModel2} = await sendEmail(agent);
|
||||
assert.equal(emailModel2.get('email_count'), 5);
|
||||
const emailRecipients2 = await models.EmailRecipient.findAll({filter: `email_id:${emailModel2.id}`});
|
||||
const emailRecipients2 = await models.EmailRecipient.findAll({filter: `email_id:'${emailModel2.id}'`});
|
||||
assert.equal(emailRecipients2.models.length, emailRecipients.models.length + 1);
|
||||
});
|
||||
|
||||
@ -411,7 +411,7 @@ describe('Batch sending tests', function () {
|
||||
assert.equal(emailModel.get('email_count'), 5);
|
||||
|
||||
// Did we create batches?
|
||||
let batches = await models.EmailBatch.findAll({filter: `email_id:${emailModel.id}`});
|
||||
let batches = await models.EmailBatch.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
assert.equal(batches.models.length, 5);
|
||||
|
||||
// sort batches by id because findAll doesn't have order option
|
||||
@ -449,7 +449,7 @@ describe('Batch sending tests', function () {
|
||||
assert.equal(batch.get('member_segment'), null);
|
||||
|
||||
// Did we create recipients?
|
||||
const batchRecipients = await models.EmailRecipient.findAll({filter: `email_id:${emailModel.id}+batch_id:${batch.id}`});
|
||||
const batchRecipients = await models.EmailRecipient.findAll({filter: `email_id:'${emailModel.id}'+batch_id:'${batch.id}'`});
|
||||
assert.equal(batchRecipients.models.length, 1);
|
||||
|
||||
emailRecipients.push(...batchRecipients.models);
|
||||
@ -463,7 +463,7 @@ describe('Batch sending tests', function () {
|
||||
await jobManager.allSettled();
|
||||
|
||||
await emailModel.refresh();
|
||||
batches = await models.EmailBatch.findAll({filter: `email_id:${emailModel.id}`});
|
||||
batches = await models.EmailBatch.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
|
||||
// sort batches by provider_id (nullable) because findAll doesn't have order option
|
||||
batches.models.sort(sortBatches);
|
||||
@ -472,7 +472,7 @@ describe('Batch sending tests', function () {
|
||||
assert.equal(emailModel.get('email_count'), 5);
|
||||
|
||||
// Did we keep the batches?
|
||||
batches = await models.EmailBatch.findAll({filter: `email_id:${emailModel.id}`});
|
||||
batches = await models.EmailBatch.findAll({filter: `email_id:'${emailModel.id}'`});
|
||||
|
||||
// sort batches by provider_id (nullable) because findAll doesn't have order option
|
||||
batches.models.sort(sortBatches);
|
||||
@ -491,7 +491,7 @@ describe('Batch sending tests', function () {
|
||||
assert.equal(batch.get('error_data'), null);
|
||||
|
||||
// Did we create recipients?
|
||||
const batchRecipients = await models.EmailRecipient.findAll({filter: `email_id:${emailModel.id}+batch_id:${batch.id}`});
|
||||
const batchRecipients = await models.EmailRecipient.findAll({filter: `email_id:'${emailModel.id}'+batch_id:'${batch.id}'`});
|
||||
assert.equal(batchRecipients.models.length, 1);
|
||||
|
||||
emailRecipients.push(...batchRecipients.models);
|
||||
|
@ -268,7 +268,7 @@ describe('EmailEventStorage', function () {
|
||||
|
||||
// Check we have a stored permanent failure
|
||||
const permanentFailures = await models.EmailRecipientFailure.findAll({
|
||||
filter: `email_recipient_id:${emailRecipient.id}`
|
||||
filter: `email_recipient_id:'${emailRecipient.id}'`
|
||||
});
|
||||
assert.equal(permanentFailures.length, 1);
|
||||
|
||||
@ -364,7 +364,7 @@ describe('EmailEventStorage', function () {
|
||||
|
||||
// Check we have a stored permanent failure
|
||||
const permanentFailures = await models.EmailRecipientFailure.findAll({
|
||||
filter: `email_recipient_id:${emailRecipient.id}`
|
||||
filter: `email_recipient_id:'${emailRecipient.id}'`
|
||||
});
|
||||
assert.equal(permanentFailures.length, 1);
|
||||
|
||||
@ -455,7 +455,7 @@ describe('EmailEventStorage', function () {
|
||||
|
||||
// Check we have a stored permanent failure
|
||||
const permanentFailures = await models.EmailRecipientFailure.findAll({
|
||||
filter: `email_recipient_id:${emailRecipient.id}`
|
||||
filter: `email_recipient_id:'${emailRecipient.id}'`
|
||||
});
|
||||
assert.equal(permanentFailures.length, 1);
|
||||
|
||||
@ -544,7 +544,7 @@ describe('EmailEventStorage', function () {
|
||||
|
||||
// Check we have a stored permanent failure
|
||||
const permanentFailures = await models.EmailRecipientFailure.findAll({
|
||||
filter: `email_recipient_id:${emailRecipient.id}`
|
||||
filter: `email_recipient_id:'${emailRecipient.id}'`
|
||||
});
|
||||
assert.equal(permanentFailures.length, 1);
|
||||
|
||||
@ -661,7 +661,7 @@ describe('EmailEventStorage', function () {
|
||||
|
||||
// Check we have a stored temporary failure
|
||||
const failures = await models.EmailRecipientFailure.findAll({
|
||||
filter: `email_recipient_id:${emailRecipient.id}`
|
||||
filter: `email_recipient_id:'${emailRecipient.id}'`
|
||||
});
|
||||
assert.equal(failures.length, 1);
|
||||
|
||||
@ -763,7 +763,7 @@ describe('EmailEventStorage', function () {
|
||||
|
||||
// Check we have a stored temporary failure
|
||||
const failures = await models.EmailRecipientFailure.findAll({
|
||||
filter: `email_recipient_id:${emailRecipient.id}`
|
||||
filter: `email_recipient_id:'${emailRecipient.id}'`
|
||||
});
|
||||
assert.equal(failures.length, 1);
|
||||
|
||||
@ -865,7 +865,7 @@ describe('EmailEventStorage', function () {
|
||||
|
||||
// Check we have a stored temporary failure
|
||||
const failures = await models.EmailRecipientFailure.findAll({
|
||||
filter: `email_recipient_id:${emailRecipient.id}`
|
||||
filter: `email_recipient_id:'${emailRecipient.id}'`
|
||||
});
|
||||
assert.equal(failures.length, 1);
|
||||
|
||||
@ -967,7 +967,7 @@ describe('EmailEventStorage', function () {
|
||||
|
||||
// Check we have a stored temporary failure
|
||||
const failures = await models.EmailRecipientFailure.findAll({
|
||||
filter: `email_recipient_id:${emailRecipient.id}`
|
||||
filter: `email_recipient_id:'${emailRecipient.id}'`
|
||||
});
|
||||
assert.equal(failures.length, 1);
|
||||
|
||||
@ -991,7 +991,7 @@ describe('EmailEventStorage', function () {
|
||||
const providerId = emailBatch.provider_id;
|
||||
const timestamp = new Date(2000, 0, 1);
|
||||
const eventsURI = '/members/events/?' + encodeURIComponent(
|
||||
`filter=type:-[comment_event,aggregated_click_event]+data.member_id:${memberId}`
|
||||
`filter=type:-[comment_event,aggregated_click_event]+data.member_id:'${memberId}'`
|
||||
);
|
||||
|
||||
// Check not unsubscribed
|
||||
|
@ -1501,7 +1501,7 @@ describe('Post Model', function () {
|
||||
|
||||
return models.MobiledocRevision
|
||||
.findAll({
|
||||
filter: `post_id:${updatedPost.id}`
|
||||
filter: `post_id:'${updatedPost.id}'`
|
||||
});
|
||||
})
|
||||
.then((mobiledocRevisions) => {
|
||||
@ -1536,7 +1536,7 @@ describe('Post Model', function () {
|
||||
})
|
||||
.then(() => models.MobiledocRevision
|
||||
.findAll({
|
||||
filter: `post_id:${revisionedPost.id}`
|
||||
filter: `post_id:'${revisionedPost.id}'`
|
||||
})
|
||||
)
|
||||
.then((mobiledocRevisions) => {
|
||||
@ -1566,7 +1566,7 @@ describe('Post Model', function () {
|
||||
|
||||
return models.MobiledocRevision
|
||||
.findAll({
|
||||
filter: `post_id:${createdPost.id}`
|
||||
filter: `post_id:'${createdPost.id}'`
|
||||
});
|
||||
})
|
||||
.then((mobiledocRevisions) => {
|
||||
@ -1582,7 +1582,7 @@ describe('Post Model', function () {
|
||||
|
||||
return models.MobiledocRevision
|
||||
.findAll({
|
||||
filter: `post_id:${editedPost.id}`
|
||||
filter: `post_id:'${editedPost.id}'`
|
||||
});
|
||||
})
|
||||
.then((mobiledocRevisions) => {
|
||||
|
@ -248,6 +248,8 @@ class BatchSendingService {
|
||||
logging.info(`Fetching members batch for email ${email.id} segment ${segment}, lastId: ${lastId}`);
|
||||
|
||||
const filter = segmentFilter + `+id:<'${lastId}'`;
|
||||
logging.info(`Fetching members batch for email ${email.id} segment ${segment}, lastId: ${lastId} ${filter}`);
|
||||
|
||||
members = await this.#models.Member.getFilteredCollectionQuery({filter})
|
||||
.orderByRaw('id DESC')
|
||||
.select('members.id', 'members.uuid', 'members.email', 'members.name').limit(BATCH_SIZE + 1);
|
||||
@ -505,18 +507,9 @@ class BatchSendingService {
|
||||
|
||||
const BATCH_SIZE = this.#sendingService.getMaximumRecipients();
|
||||
if (models.length > BATCH_SIZE) {
|
||||
// @NOTE: filtering by batch_id is our best effort to "correct" returned data
|
||||
logging.warn(`Email batch ${batchId} has ${models.length} members, which exceeds the maximum of ${BATCH_SIZE} members per batch. Filtering by batch_id: ${batchId}`);
|
||||
models = models.filter(m => m.get('batch_id') === batchId);
|
||||
|
||||
if (models.length > BATCH_SIZE) {
|
||||
// @NOTE this is a best effort logic to still try sending an email batch
|
||||
// even if it exceeds the maximum recipients limit of the sending service.
|
||||
// In theory this should never happen, but being extra safe to make sure
|
||||
// the email delivery still happens.
|
||||
logging.error(`Email batch ${batchId} has ${models.length} members, which exceeds the maximum of ${BATCH_SIZE}. Truncating to ${BATCH_SIZE}`);
|
||||
models = models.slice(0, BATCH_SIZE);
|
||||
}
|
||||
throw new errors.EmailError({
|
||||
message: `Email batch ${batchId} has ${models.length} members, which exceeds the maximum of ${BATCH_SIZE} members per batch.`
|
||||
});
|
||||
}
|
||||
|
||||
return models.map((model) => {
|
||||
|
@ -984,7 +984,7 @@ class EmailRenderer {
|
||||
if (newsletter.get('show_latest_posts')) {
|
||||
// Fetch last 3 published posts
|
||||
const {data} = await this.#models.Post.findPage({
|
||||
filter: 'status:published+id:-' + post.id,
|
||||
filter: `status:published+id:-'${post.id}'`,
|
||||
order: 'published_at DESC',
|
||||
limit: 3
|
||||
});
|
||||
|
@ -8,11 +8,9 @@ const errors = require('@tryghost/errors');
|
||||
|
||||
describe('Batch Sending Service', function () {
|
||||
let errorLog;
|
||||
let warnLog;
|
||||
|
||||
beforeEach(function () {
|
||||
errorLog = sinon.stub(logging, 'error');
|
||||
warnLog = sinon.stub(logging, 'warn');
|
||||
sinon.stub(logging, 'info');
|
||||
});
|
||||
|
||||
@ -332,7 +330,7 @@ describe('Batch Sending Service', function () {
|
||||
},
|
||||
emailSegmenter: {
|
||||
getMemberFilterForSegment(n) {
|
||||
return `newsletters.id:${n.id}`;
|
||||
return `newsletters.id:'${n.id}'`;
|
||||
}
|
||||
},
|
||||
db
|
||||
@ -432,7 +430,7 @@ describe('Batch Sending Service', function () {
|
||||
},
|
||||
emailSegmenter: {
|
||||
getMemberFilterForSegment(n, _, segment) {
|
||||
return `newsletters.id:${n.id}+(${segment})`;
|
||||
return `newsletters.id:'${n.id}'+(${segment})`;
|
||||
}
|
||||
},
|
||||
db
|
||||
@ -537,7 +535,7 @@ describe('Batch Sending Service', function () {
|
||||
},
|
||||
emailSegmenter: {
|
||||
getMemberFilterForSegment(n, _, segment) {
|
||||
return `newsletters.id:${n.id}+(${segment})`;
|
||||
return `newsletters.id:'${n.id}'+(${segment})`;
|
||||
}
|
||||
},
|
||||
db
|
||||
@ -997,7 +995,7 @@ describe('Batch Sending Service', function () {
|
||||
assert.equal(members.length, 2);
|
||||
});
|
||||
|
||||
it('Truncates recipients if more than the maximum are returned in a batch', async function () {
|
||||
it('Throws error if more than the maximum are returned in a batch', async function () {
|
||||
const EmailBatch = createModelClass({
|
||||
findOne: {
|
||||
id: '123_batch_id',
|
||||
@ -1081,7 +1079,7 @@ describe('Batch Sending Service', function () {
|
||||
const service = new BatchSendingService({
|
||||
models: {EmailBatch, EmailRecipient: DoubleTheEmailRecipients},
|
||||
sendingService,
|
||||
BEFORE_RETRY_CONFIG: {maxRetries: 10, maxTime: 2000, sleep: 1}
|
||||
BEFORE_RETRY_CONFIG: {maxRetries: 1, maxTime: 2000, sleep: 1}
|
||||
});
|
||||
|
||||
const result = await service.sendBatch({
|
||||
@ -1093,25 +1091,11 @@ describe('Batch Sending Service', function () {
|
||||
newsletter: createModel({})
|
||||
});
|
||||
|
||||
assert.equal(result, true);
|
||||
|
||||
sinon.assert.calledOnce(warnLog);
|
||||
const firstLoggedWarn = warnLog.firstCall.args[0];
|
||||
assert.match(firstLoggedWarn, /Email batch 123_batch_id has 4 members, which exceeds the maximum of 2 members per batch. Filtering by batch_id: 123_batch_id/);
|
||||
|
||||
sinon.assert.calledOnce(errorLog);
|
||||
const firstLoggedError = errorLog.firstCall.args[0];
|
||||
|
||||
assert.match(firstLoggedError, /Email batch 123_batch_id has 3 members, which exceeds the maximum of 2. Truncating to 2/);
|
||||
|
||||
sinon.assert.calledOnce(sendingService.send);
|
||||
const {members} = sendingService.send.firstCall.args[0];
|
||||
assert.equal(members.length, 2);
|
||||
assert.equal(result, false);
|
||||
|
||||
sinon.assert.calledOnce(findOne);
|
||||
const batch = await findOne.firstCall.returnValue;
|
||||
assert.equal(batch.get('status'), 'submitted');
|
||||
assert.equal(batch.get('provider_id'), 'providerid@example.com');
|
||||
assert.equal(batch.get('status'), 'failed');
|
||||
});
|
||||
|
||||
it('Stops retrying after the email retry cut off time', async function () {
|
||||
|
@ -47,7 +47,7 @@ describe('InMemoryMentionRepository', function () {
|
||||
}
|
||||
|
||||
const pageOne = await repository.getPage({
|
||||
filter: `resource_id:${resourceId.toHexString()}`,
|
||||
filter: `resource_id:'${resourceId.toHexString()}'`,
|
||||
limit: 2,
|
||||
page: 1
|
||||
});
|
||||
@ -57,7 +57,7 @@ describe('InMemoryMentionRepository', function () {
|
||||
assert(pageOne.meta.pagination.next === 2);
|
||||
|
||||
const pageTwo = await repository.getPage({
|
||||
filter: `resource_id:${resourceId.toHexString()}`,
|
||||
filter: `resource_id:'${resourceId.toHexString()}'`,
|
||||
limit: 2,
|
||||
page: 2
|
||||
});
|
||||
@ -67,7 +67,7 @@ describe('InMemoryMentionRepository', function () {
|
||||
assert(pageTwo.meta.pagination.next === 3);
|
||||
|
||||
const pageThree = await repository.getPage({
|
||||
filter: `resource_id:${resourceId.toHexString()}`,
|
||||
filter: `resource_id:'${resourceId.toHexString()}'`,
|
||||
limit: 2,
|
||||
page: 3
|
||||
});
|
||||
|
Loading…
Reference in New Issue
Block a user