2023-05-02 23:43:47 +03:00
|
|
|
const EventProcessingResult = require('./EventProcessingResult');
|
2023-02-13 17:25:36 +03:00
|
|
|
const logging = require('@tryghost/logging');
|
2023-02-20 18:44:13 +03:00
|
|
|
const errors = require('@tryghost/errors');
|
2020-11-26 16:09:38 +03:00
|
|
|
|
2022-11-29 13:15:19 +03:00
|
|
|
/**
|
|
|
|
* @typedef {import('@tryghost/email-service').EmailEventProcessor} EmailEventProcessor
|
|
|
|
*/
|
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
/**
|
|
|
|
* @typedef {object} FetchData
|
|
|
|
* @property {boolean} running
|
|
|
|
* @property {Date} [lastStarted] Date the last fetch started on
|
|
|
|
* @property {Date} [lastBegin] The begin time used during the last fetch
|
|
|
|
* @property {Date} [lastEventTimestamp]
|
|
|
|
* @property {boolean} [canceled] Set to quit the job early
|
|
|
|
*/
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @typedef {FetchData & {schedule: {begin: Date, end: Date}}} FetchDataScheduled
|
|
|
|
*/
|
|
|
|
|
|
|
|
const TRUST_THRESHOLD_MS = 30 * 60 * 1000; // 30 minutes
|
|
|
|
const FETCH_LATEST_END_MARGIN_MS = 1 * 60 * 1000; // Do not fetch events newer than 1 minute (yet). Reduces the chance of having missed events in fetchLatest.
|
|
|
|
|
2021-01-16 19:22:52 +03:00
|
|
|
module.exports = class EmailAnalyticsService {
|
2022-11-29 13:15:19 +03:00
|
|
|
config;
|
|
|
|
settings;
|
|
|
|
queries;
|
|
|
|
eventProcessor;
|
|
|
|
providers;
|
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
/**
|
|
|
|
* @type {FetchData}
|
|
|
|
*/
|
|
|
|
#fetchLatestData = null;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @type {FetchData}
|
|
|
|
*/
|
|
|
|
#fetchMissingData = null;
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @type {FetchDataScheduled}
|
|
|
|
*/
|
|
|
|
#fetchScheduledData = null;
|
|
|
|
|
2022-11-29 13:15:19 +03:00
|
|
|
/**
|
2022-12-14 13:17:45 +03:00
|
|
|
* @param {object} dependencies
|
2022-11-29 13:15:19 +03:00
|
|
|
* @param {EmailEventProcessor} dependencies.eventProcessor
|
|
|
|
*/
|
|
|
|
constructor({config, settings, queries, eventProcessor, providers}) {
|
2020-11-26 16:09:38 +03:00
|
|
|
this.config = config;
|
|
|
|
this.settings = settings;
|
2021-01-16 19:22:52 +03:00
|
|
|
this.queries = queries;
|
|
|
|
this.eventProcessor = eventProcessor;
|
|
|
|
this.providers = providers;
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
getStatus() {
|
|
|
|
return {
|
|
|
|
latest: this.#fetchLatestData,
|
|
|
|
missing: this.#fetchMissingData,
|
|
|
|
scheduled: this.#fetchScheduledData
|
|
|
|
};
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Returns the timestamp of the last event we processed. Defaults to now minus 30 minutes if we have no data yet.
|
|
|
|
*/
|
|
|
|
async getLastEventTimestamp() {
|
|
|
|
return this.#fetchLatestData?.lastEventTimestamp ?? (await this.queries.getLastSeenEventTimestamp()) ?? new Date(Date.now() - TRUST_THRESHOLD_MS);
|
|
|
|
}
|
2021-03-02 00:31:07 +03:00
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
async fetchLatest({maxEvents = Infinity} = {}) {
|
|
|
|
// Start where we left of, or the last stored event in the database, or start 30 minutes ago if we have nothing available
|
|
|
|
const begin = await this.getLastEventTimestamp();
|
|
|
|
const end = new Date(Date.now() - FETCH_LATEST_END_MARGIN_MS); // ALways stop at x minutes ago to give Mailgun a bit more time to stabilize storage
|
|
|
|
|
2023-02-23 17:44:01 +03:00
|
|
|
if (end <= begin) {
|
2023-02-20 18:44:13 +03:00
|
|
|
// Skip for now
|
|
|
|
logging.info('[EmailAnalytics] Skipping fetchLatest because end (' + end + ') is before begin (' + begin + ')');
|
2023-02-23 17:44:01 +03:00
|
|
|
return 0;
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
// Create the fetch data object if it doesn't exist yet
|
|
|
|
if (!this.#fetchLatestData) {
|
|
|
|
this.#fetchLatestData = {
|
|
|
|
running: false
|
|
|
|
};
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
return await this.#fetchEvents(this.#fetchLatestData, {begin, end, maxEvents});
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
/**
|
|
|
|
* Fetches events that are older than 30 minutes, because then the 'storage' of the Mailgun API is stable. And we are sure we don't miss any events.
|
|
|
|
* @param {object} options
|
|
|
|
* @param {number} [options.maxEvents] Not a strict maximum. We stop fetching after we reached the maximum AND received at least one event after begin (not equal) to prevent deadlocks.
|
|
|
|
*/
|
|
|
|
async fetchMissing({maxEvents = Infinity} = {}) {
|
|
|
|
// We start where we left of, or 1,5h ago after a server restart
|
|
|
|
const begin = this.#fetchMissingData?.lastEventTimestamp ?? this.#fetchMissingData?.lastBegin ?? new Date(Date.now() - TRUST_THRESHOLD_MS * 3);
|
|
|
|
|
|
|
|
// Always stop at the time the fetchLatest started fetching on, or maximum 30 minutes ago
|
|
|
|
const end = new Date(
|
|
|
|
Math.min(
|
|
|
|
Date.now() - TRUST_THRESHOLD_MS,
|
|
|
|
this.#fetchLatestData?.lastBegin?.getTime()
|
|
|
|
)
|
|
|
|
);
|
|
|
|
|
2023-02-23 17:44:01 +03:00
|
|
|
if (end <= begin) {
|
2023-02-20 18:44:13 +03:00
|
|
|
// Skip for now
|
|
|
|
logging.info('[EmailAnalytics] Skipping fetchMissing because end (' + end + ') is before begin (' + begin + ')');
|
|
|
|
return 0;
|
|
|
|
}
|
2021-03-02 00:31:07 +03:00
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
// Create the fetch data object if it doesn't exist yet
|
|
|
|
if (!this.#fetchMissingData) {
|
|
|
|
this.#fetchMissingData = {
|
|
|
|
running: false
|
|
|
|
};
|
2020-12-01 13:15:01 +03:00
|
|
|
}
|
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
return await this.#fetchEvents(this.#fetchMissingData, {begin, end, maxEvents});
|
|
|
|
}
|
2021-01-16 19:22:52 +03:00
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
/**
|
|
|
|
* Schedule a new fetch that should happen
|
|
|
|
*/
|
|
|
|
schedule({begin, end}) {
|
|
|
|
if (this.#fetchScheduledData && this.#fetchScheduledData.running) {
|
|
|
|
throw new errors.ValidationError({
|
|
|
|
message: 'Already fetching scheduled events. Wait for it to finish before scheduling a new one.'
|
|
|
|
});
|
|
|
|
}
|
|
|
|
logging.info('[EmailAnalytics] Scheduling fetch from ' + begin.toISOString() + ' until ' + end.toISOString());
|
|
|
|
this.#fetchScheduledData = {
|
|
|
|
running: false,
|
|
|
|
schedule: {
|
|
|
|
begin,
|
|
|
|
end
|
|
|
|
}
|
|
|
|
};
|
|
|
|
}
|
2020-11-26 16:09:38 +03:00
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
cancelScheduled() {
|
|
|
|
if (this.#fetchScheduledData) {
|
|
|
|
if (this.#fetchScheduledData.running) {
|
|
|
|
// Cancel the running fetch
|
|
|
|
this.#fetchScheduledData.canceled = true;
|
|
|
|
} else {
|
|
|
|
this.#fetchScheduledData = null;
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
}
|
2023-02-20 18:44:13 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* Continues fetching the scheduled events (does not start one). Resets the scheduled event when received 0 events.
|
|
|
|
*/
|
|
|
|
async fetchScheduled({maxEvents = Infinity} = {}) {
|
|
|
|
if (!this.#fetchScheduledData || !this.#fetchScheduledData.schedule) {
|
|
|
|
// Nothing scheduled
|
|
|
|
return 0;
|
|
|
|
}
|
2020-11-26 16:09:38 +03:00
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
if (this.#fetchScheduledData.canceled) {
|
|
|
|
// Skip for now
|
|
|
|
this.#fetchScheduledData = null;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
let begin = this.#fetchScheduledData.schedule.begin;
|
|
|
|
const end = this.#fetchScheduledData.schedule.end;
|
|
|
|
|
|
|
|
if (this.#fetchScheduledData.lastEventTimestamp && this.#fetchScheduledData.lastEventTimestamp > begin) {
|
|
|
|
// Continue where we left of
|
|
|
|
begin = this.#fetchScheduledData.lastEventTimestamp;
|
|
|
|
}
|
|
|
|
|
2023-02-23 17:44:01 +03:00
|
|
|
if (end <= begin) {
|
2023-02-20 18:44:13 +03:00
|
|
|
// Skip for now
|
2023-02-23 17:44:01 +03:00
|
|
|
logging.info('[EmailAnalytics] Ending fetchScheduled because end is before begin');
|
2023-02-20 18:44:13 +03:00
|
|
|
this.#fetchScheduledData = null;
|
|
|
|
return 0;
|
|
|
|
}
|
|
|
|
|
|
|
|
const count = await this.#fetchEvents(this.#fetchScheduledData, {begin, end, maxEvents});
|
|
|
|
if (count === 0 || this.#fetchScheduledData.canceled) {
|
|
|
|
// Reset the scheduled fetch
|
|
|
|
this.#fetchScheduledData = null;
|
|
|
|
}
|
|
|
|
return count;
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
/**
|
|
|
|
* Start fetching analytics and store the data of the progress inside fetchData
|
|
|
|
* @param {FetchData} fetchData
|
|
|
|
* @param {object} options
|
|
|
|
* @param {Date} options.begin
|
|
|
|
* @param {Date} options.end
|
|
|
|
* @param {number} [options.maxEvents] Not a strict maximum. We stop fetching after we reached the maximum AND received at least one event after begin (not equal) to prevent deadlocks.
|
|
|
|
*/
|
|
|
|
async #fetchEvents(fetchData, {begin, end, maxEvents = Infinity}) {
|
|
|
|
// Start where we left of, or the last stored event in the database, or start 30 minutes ago if we have nothing available
|
|
|
|
logging.info('[EmailAnalytics] Fetching from ' + begin.toISOString() + ' until ' + end.toISOString() + ' (maxEvents: ' + maxEvents + ')');
|
|
|
|
|
|
|
|
// Store that we started fetching
|
|
|
|
fetchData.running = true;
|
|
|
|
fetchData.lastStarted = new Date();
|
|
|
|
fetchData.lastBegin = begin;
|
|
|
|
|
|
|
|
let lastAggregation = Date.now();
|
|
|
|
let eventCount = 0;
|
|
|
|
|
|
|
|
// We keep the processing result here, so we also have a result in case of failures
|
|
|
|
let processingResult = new EventProcessingResult();
|
|
|
|
let error = null;
|
|
|
|
|
|
|
|
const processBatch = async (events) => {
|
|
|
|
// Even if the fetching is interrupted because of an error, we still store the last event timestamp
|
|
|
|
await this.processEventBatch(events, processingResult, fetchData);
|
|
|
|
eventCount += events.length;
|
|
|
|
|
|
|
|
// Every 5 minutes or 5000 members we do an aggregation and clear the processingResult
|
|
|
|
// Otherwise we need to loop a lot of members afterwards, and this takes too long without updating the stat counts in between
|
|
|
|
if (Date.now() - lastAggregation > 5 * 60 * 1000 || processingResult.memberIds.length > 5000) {
|
|
|
|
// Aggregate and clear the processingResult
|
|
|
|
// We do this here because otherwise it could take a long time before the new events are visible in the stats
|
|
|
|
try {
|
|
|
|
await this.aggregateStats(processingResult);
|
|
|
|
lastAggregation = Date.now();
|
|
|
|
processingResult = new EventProcessingResult();
|
|
|
|
} catch (err) {
|
|
|
|
logging.error('[EmailAnalytics] Error while aggregating stats');
|
|
|
|
logging.error(err);
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (fetchData.canceled) {
|
|
|
|
throw new errors.InternalServerError({
|
|
|
|
message: 'Fetching canceled'
|
|
|
|
});
|
|
|
|
}
|
|
|
|
};
|
|
|
|
|
|
|
|
try {
|
|
|
|
for (const provider of this.providers) {
|
|
|
|
await provider.fetchLatest(processBatch, {begin, end, maxEvents});
|
|
|
|
}
|
|
|
|
|
2023-02-27 17:00:18 +03:00
|
|
|
logging.info('[EmailAnalytics] Fetching finished');
|
2023-02-20 18:44:13 +03:00
|
|
|
} catch (err) {
|
|
|
|
if (err.message !== 'Fetching canceled') {
|
|
|
|
logging.error('[EmailAnalytics] Error while fetching');
|
|
|
|
logging.error(err);
|
|
|
|
error = err;
|
|
|
|
} else {
|
|
|
|
logging.error('[EmailAnalytics] Canceled fetching');
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Aggregate
|
|
|
|
try {
|
|
|
|
await this.aggregateStats(processingResult);
|
|
|
|
} catch (err) {
|
|
|
|
logging.error('[EmailAnalytics] Error while aggregating stats');
|
|
|
|
logging.error(err);
|
|
|
|
|
|
|
|
if (!error) {
|
|
|
|
error = err;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Small trick: if reached the end of new events, we are going to keep
|
|
|
|
// fetching the same events because 'begin' won't change
|
|
|
|
// So if we didn't have errors while fetching, and total events < maxEvents, increase lastEventTimestamp with one second
|
|
|
|
if (!error && eventCount > 0 && eventCount < maxEvents && fetchData.lastEventTimestamp && fetchData.lastEventTimestamp.getTime() < Date.now() - 2000) {
|
|
|
|
logging.info('[EmailAnalytics] Reached end of new events, increasing lastEventTimestamp with one second');
|
|
|
|
fetchData.lastEventTimestamp = new Date(fetchData.lastEventTimestamp.getTime() + 1000);
|
|
|
|
}
|
2020-11-26 16:09:38 +03:00
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
fetchData.running = false;
|
|
|
|
|
|
|
|
if (error) {
|
|
|
|
throw error;
|
|
|
|
}
|
|
|
|
return eventCount;
|
|
|
|
}
|
|
|
|
|
|
|
|
/**
|
|
|
|
* @param {any[]} events
|
|
|
|
* @param {FetchData} fetchData
|
|
|
|
*/
|
|
|
|
async processEventBatch(events, result, fetchData) {
|
|
|
|
const processStart = Date.now();
|
2020-11-26 16:09:38 +03:00
|
|
|
for (const event of events) {
|
2022-11-29 13:15:19 +03:00
|
|
|
const batchResult = await this.processEvent(event);
|
2023-02-20 18:44:13 +03:00
|
|
|
|
|
|
|
// Save last event timestamp
|
|
|
|
if (!fetchData.lastEventTimestamp || (event.timestamp && event.timestamp > fetchData.lastEventTimestamp)) {
|
|
|
|
fetchData.lastEventTimestamp = event.timestamp;
|
|
|
|
}
|
|
|
|
|
2020-11-26 16:09:38 +03:00
|
|
|
result.merge(batchResult);
|
|
|
|
}
|
2023-02-20 18:44:13 +03:00
|
|
|
const processEnd = Date.now();
|
|
|
|
const time = processEnd - processStart;
|
|
|
|
if (time > 1000) {
|
|
|
|
// This is a means to show in the logs that the analytics job is still alive.
|
|
|
|
logging.warn(`[EmailAnalytics] Processing event batch took ${(time / 1000).toFixed(1)}s`);
|
|
|
|
}
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
|
2022-11-29 13:15:19 +03:00
|
|
|
/**
|
2022-12-14 13:17:45 +03:00
|
|
|
*
|
2023-02-20 18:44:13 +03:00
|
|
|
* @param {{id: string, type: any; severity: any; recipientEmail: any; emailId?: string; providerId: string; timestamp: Date; error: {code: number; message: string; enhandedCode: string|number} | null}} event
|
2022-11-29 13:15:19 +03:00
|
|
|
* @returns {Promise<EventProcessingResult>}
|
|
|
|
*/
|
|
|
|
async processEvent(event) {
|
|
|
|
if (event.type === 'delivered') {
|
|
|
|
const recipient = await this.eventProcessor.handleDelivered({emailId: event.emailId, providerId: event.providerId, email: event.recipientEmail}, event.timestamp);
|
|
|
|
|
|
|
|
if (recipient) {
|
|
|
|
return new EventProcessingResult({
|
|
|
|
delivered: 1,
|
|
|
|
emailIds: [recipient.emailId],
|
|
|
|
memberIds: [recipient.memberId]
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return new EventProcessingResult({unprocessable: 1});
|
|
|
|
}
|
|
|
|
|
|
|
|
if (event.type === 'opened') {
|
|
|
|
const recipient = await this.eventProcessor.handleOpened({emailId: event.emailId, providerId: event.providerId, email: event.recipientEmail}, event.timestamp);
|
|
|
|
|
|
|
|
if (recipient) {
|
|
|
|
return new EventProcessingResult({
|
|
|
|
opened: 1,
|
|
|
|
emailIds: [recipient.emailId],
|
|
|
|
memberIds: [recipient.memberId]
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return new EventProcessingResult({unprocessable: 1});
|
|
|
|
}
|
|
|
|
|
|
|
|
if (event.type === 'failed') {
|
|
|
|
if (event.severity === 'permanent') {
|
2022-12-01 12:00:53 +03:00
|
|
|
const recipient = await this.eventProcessor.handlePermanentFailed({emailId: event.emailId, providerId: event.providerId, email: event.recipientEmail}, {id: event.id, timestamp: event.timestamp, error: event.error});
|
2022-11-29 13:15:19 +03:00
|
|
|
|
|
|
|
if (recipient) {
|
|
|
|
return new EventProcessingResult({
|
|
|
|
permanentFailed: 1,
|
|
|
|
emailIds: [recipient.emailId],
|
|
|
|
memberIds: [recipient.memberId]
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return new EventProcessingResult({unprocessable: 1});
|
|
|
|
} else {
|
2022-12-01 12:00:53 +03:00
|
|
|
const recipient = await this.eventProcessor.handleTemporaryFailed({emailId: event.emailId, providerId: event.providerId, email: event.recipientEmail}, {id: event.id, timestamp: event.timestamp, error: event.error});
|
2022-11-29 13:15:19 +03:00
|
|
|
|
|
|
|
if (recipient) {
|
|
|
|
return new EventProcessingResult({
|
|
|
|
temporaryFailed: 1,
|
|
|
|
emailIds: [recipient.emailId],
|
|
|
|
memberIds: [recipient.memberId]
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return new EventProcessingResult({unprocessable: 1});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if (event.type === 'unsubscribed') {
|
|
|
|
const recipient = await this.eventProcessor.handleUnsubscribed({emailId: event.emailId, providerId: event.providerId, email: event.recipientEmail}, event.timestamp);
|
|
|
|
|
|
|
|
if (recipient) {
|
|
|
|
return new EventProcessingResult({
|
|
|
|
unsubscribed: 1,
|
|
|
|
emailIds: [recipient.emailId],
|
|
|
|
memberIds: [recipient.memberId]
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return new EventProcessingResult({unprocessable: 1});
|
|
|
|
}
|
|
|
|
|
|
|
|
if (event.type === 'complained') {
|
|
|
|
const recipient = await this.eventProcessor.handleComplained({emailId: event.emailId, providerId: event.providerId, email: event.recipientEmail}, event.timestamp);
|
|
|
|
|
|
|
|
if (recipient) {
|
|
|
|
return new EventProcessingResult({
|
|
|
|
complained: 1,
|
|
|
|
emailIds: [recipient.emailId],
|
|
|
|
memberIds: [recipient.memberId]
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
return new EventProcessingResult({unprocessable: 1});
|
|
|
|
}
|
|
|
|
|
|
|
|
return new EventProcessingResult({unhandled: 1});
|
|
|
|
}
|
|
|
|
|
2020-11-26 16:09:38 +03:00
|
|
|
async aggregateStats({emailIds = [], memberIds = []}) {
|
2023-02-20 18:44:13 +03:00
|
|
|
logging.info(`[EmailAnalytics] Aggregating for ${emailIds.length} emails`);
|
2020-11-26 16:09:38 +03:00
|
|
|
for (const emailId of emailIds) {
|
|
|
|
await this.aggregateEmailStats(emailId);
|
|
|
|
}
|
2023-02-13 17:25:36 +03:00
|
|
|
|
2023-02-20 18:44:13 +03:00
|
|
|
logging.info(`[EmailAnalytics] Aggregating for ${memberIds.length} members`);
|
2020-11-26 16:09:38 +03:00
|
|
|
for (const memberId of memberIds) {
|
2020-12-08 15:43:10 +03:00
|
|
|
await this.aggregateMemberStats(memberId);
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-03-02 00:31:07 +03:00
|
|
|
async aggregateEmailStats(emailId) {
|
2021-01-16 19:22:52 +03:00
|
|
|
return this.queries.aggregateEmailStats(emailId);
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
|
|
|
|
2021-03-02 00:31:07 +03:00
|
|
|
async aggregateMemberStats(memberId) {
|
2021-01-16 19:22:52 +03:00
|
|
|
return this.queries.aggregateMemberStats(memberId);
|
2020-11-26 16:09:38 +03:00
|
|
|
}
|
2021-01-16 19:22:52 +03:00
|
|
|
};
|