Use endpoint changes

This commit is contained in:
DrMint 2024-07-28 13:21:01 +02:00
parent 4609c1b1d2
commit 326c3393ca
10 changed files with 101 additions and 277 deletions

149
src/cache/dataCache.ts vendored
View File

@ -1,149 +0,0 @@
import { getLogger } from "src/utils/logger";
import { writeFile, mkdir, readFile } from "fs/promises";
import { existsSync } from "fs";
import type { PayloadSDK } from "src/shared/payload/sdk";
const ON_DISK_ROOT = `.cache/dataCache`;
const ON_DISK_RESPONSE_CACHE_FILE = `${ON_DISK_ROOT}/responseCache.json`;
export class DataCache {
private readonly logger = getLogger("[DataCache]");
private initialized = false;
private readonly responseCache = new Map<string, any>();
private readonly invalidationMap = new Map<string, Set<string>>();
private scheduleSaveTimeout: NodeJS.Timeout | undefined;
constructor(
private readonly payload: PayloadSDK,
private readonly uncachedPayload: PayloadSDK,
private readonly onInvalidate: (urls: string[]) => Promise<void>
) {}
async init() {
if (this.initialized) return;
if (process.env.DATA_PRECACHING === "true") {
await this.precache();
}
this.initialized = true;
}
private async precache() {
// Get all keys from CMS
const allSDKUrls = (await this.uncachedPayload.getAllSdkUrls()).data.urls;
// Load cache from disk if available
if (existsSync(ON_DISK_RESPONSE_CACHE_FILE)) {
this.logger.log("Loading cache from disk...");
const buffer = await readFile(ON_DISK_RESPONSE_CACHE_FILE);
const data = JSON.parse(buffer.toString()) as [string, any][];
for (const [key, value] of data) {
// Do not include cache where the key is no longer in the CMS
if (!allSDKUrls.includes(key)) continue;
this.set(key, value);
}
}
const cacheSizeBeforePrecaching = this.responseCache.size;
for (const url of allSDKUrls) {
// Do not precache response if already included in the loaded cache from disk
if (this.responseCache.has(url)) continue;
try {
await this.payload.request(url);
} catch {
this.logger.warn("Precaching failed for url", url);
}
}
if (cacheSizeBeforePrecaching !== this.responseCache.size) {
this.scheduleSave();
}
this.logger.log("Precaching completed!", this.responseCache.size, "responses cached");
}
get(url: string) {
if (process.env.DATA_CACHING !== "true") return;
const cachedResponse = this.responseCache.get(url);
if (cachedResponse) {
this.logger.log("Retrieved cached response for", url);
return structuredClone(cachedResponse);
}
}
set(url: string, response: any) {
if (process.env.DATA_CACHING !== "true") return;
const stringData = JSON.stringify(response);
const regex = /[a-f0-9]{24}/g;
const ids = [...stringData.matchAll(regex)].map((match) => match[0]);
const uniqueIds = [...new Set(ids)];
uniqueIds.forEach((id) => {
const current = this.invalidationMap.get(id);
if (current) {
current.add(url);
} else {
this.invalidationMap.set(id, new Set([url]));
}
});
this.responseCache.set(url, response);
this.logger.log("Cached response for", url);
if (this.initialized) {
this.scheduleSave();
}
}
async invalidate(ids: string[], urls: string[]) {
if (process.env.DATA_CACHING !== "true") return;
const urlsToInvalidate = new Set<string>(urls);
ids.forEach((id) => {
const urlsForThisId = this.invalidationMap.get(id);
if (!urlsForThisId) return;
this.invalidationMap.delete(id);
[...urlsForThisId].forEach((url) => urlsToInvalidate.add(url));
});
for (const url of urlsToInvalidate) {
this.responseCache.delete(url);
this.logger.log("Invalidated cache for", url);
try {
await this.payload.request(url);
} catch (e) {
this.logger.log("Revalidation fails for", url);
}
}
this.onInvalidate([...urlsToInvalidate]);
this.logger.log("There are currently", this.responseCache.size, "responses in cache.");
if (this.initialized) {
this.scheduleSave();
}
}
private scheduleSave() {
if (this.scheduleSaveTimeout) {
clearTimeout(this.scheduleSaveTimeout);
}
this.scheduleSaveTimeout = setTimeout(() => {
this.save();
}, 10_000);
}
private async save() {
if (!existsSync(ON_DISK_ROOT)) {
await mkdir(ON_DISK_ROOT, { recursive: true });
}
const serializedResponseCache = JSON.stringify([...this.responseCache]);
await writeFile(ON_DISK_RESPONSE_CACHE_FILE, serializedResponseCache, {
encoding: "utf-8",
});
this.logger.log("Saved", ON_DISK_RESPONSE_CACHE_FILE);
}
}

View File

@ -1,49 +0,0 @@
import type { Meilisearch } from "meilisearch";
import { getMeiliDocumentsFromRequest } from "src/convert";
import { MeiliIndexes } from "src/shared/meilisearch/constants";
import type {
MeiliDocument,
MeiliDocumentRequest,
} from "src/shared/meilisearch/types";
import { getLogger } from "src/utils/logger";
export class DocumentInvalidator {
private readonly logger = getLogger("[DocumentInvalidator]");
constructor(private readonly meili: Meilisearch) {}
async invalidate(urls: string[]) {
const index = this.meili.index(MeiliIndexes.DOCUMENT);
const documentRequests: MeiliDocumentRequest[] = [];
for (const url of urls) {
const result = await index.search(undefined, {
filter: `endpointCalled = "${url}"`,
limit: 1,
});
const doc = result.hits[0] as MeiliDocument | undefined;
if (!doc) continue;
await index.deleteDocument(doc.docId);
documentRequests.push(doc);
}
const documents: MeiliDocument[] = [];
for (const request of documentRequests) {
try {
documents.push(...(await getMeiliDocumentsFromRequest(request)));
} catch (e) {
this.logger.log("Failure to revalidate a document");
}
}
this.logger.log(
"Adding",
documents.length,
"documents to Meilisearch"
);
await index.addDocuments(documents);
}
}

View File

@ -19,7 +19,11 @@ import {
formatInlineTitle,
formatRichTextContentToString,
} from "src/shared/payload/format";
import type { PayloadSDKResponse } from "src/shared/payload/sdk";
import {
SDKEndpointNames,
type PayloadSDKResponse,
} from "src/shared/payload/sdk";
import type { EndpointChange } from "src/shared/payload/webhooks";
const convertPageToDocument = ({
data,
@ -261,22 +265,29 @@ export const getMeiliDocumentsFromRequest = async (
switch (request.type) {
case Collections.Audios:
return convertAudioToDocument(await payload.getAudioByID(request.id));
case Collections.ChronologyEvents:
return convertChronologyEventToDocument(
await payload.getChronologyEventByID(request.id)
);
case Collections.Collectibles:
return convertCollectibleToDocument(
await payload.getCollectible(request.slug)
);
case Collections.Files:
return convertFileToDocument(await payload.getFileByID(request.id));
case Collections.Folders:
return convertFolderToDocument(await payload.getFolder(request.slug));
case Collections.Images:
return convertImageToDocument(await payload.getImageByID(request.id));
case Collections.Pages:
return convertPageToDocument(await payload.getPage(request.slug));
case Collections.Recorders:
return convertRecorderToDocument(
await payload.getRecorderByID(request.id)
@ -285,3 +296,48 @@ export const getMeiliDocumentsFromRequest = async (
return convertVideoToDocument(await payload.getVideoByID(request.id));
}
};
export const convertChangeToMeiliDocumentRequest = (
change: EndpointChange
): MeiliDocumentRequest | undefined => {
switch (change.type) {
case SDKEndpointNames.getFolder:
return { type: Collections.Folders, slug: change.slug };
case SDKEndpointNames.getPage:
return { type: Collections.Pages, slug: change.slug };
case SDKEndpointNames.getCollectible:
return { type: Collections.Pages, slug: change.slug };
case SDKEndpointNames.getChronologyEventByID:
return { type: Collections.ChronologyEvents, id: change.id };
case SDKEndpointNames.getImageByID:
return { type: Collections.Images, id: change.id };
case SDKEndpointNames.getAudioByID:
return { type: Collections.Images, id: change.id };
case SDKEndpointNames.getVideoByID:
return { type: Collections.Images, id: change.id };
case SDKEndpointNames.getFileByID:
return { type: Collections.Images, id: change.id };
case SDKEndpointNames.getRecorderByID:
return { type: Collections.Images, id: change.id };
case SDKEndpointNames.getWebsiteConfig:
case SDKEndpointNames.getLanguages:
case SDKEndpointNames.getCurrencies:
case SDKEndpointNames.getWordings:
case SDKEndpointNames.getCollectibleScans:
case SDKEndpointNames.getCollectibleScanPage:
case SDKEndpointNames.getCollectibleGallery:
case SDKEndpointNames.getCollectibleGalleryImage:
case SDKEndpointNames.getChronologyEvents:
default:
return undefined;
}
};

View File

@ -1,10 +1,8 @@
import http from "http";
import { synchronizeMeiliDocs } from "./synchro";
import type { AfterOperationWebHookMessage } from "src/shared/payload/webhooks";
import { webhookHandler } from "src/webhook";
import { dataCache } from "src/services";
import type { EndpointChange } from "src/shared/payload/webhooks";
await dataCache.init();
await synchronizeMeiliDocs();
export const requestListener: http.RequestListener = async (req, res) => {
@ -25,7 +23,7 @@ export const requestListener: http.RequestListener = async (req, res) => {
}
const message = JSON.parse(
Buffer.concat(buffers).toString()
) as AfterOperationWebHookMessage;
) as EndpointChange[];
// Not awaiting on purpose to respond with a 202 and not block the CMS
webhookHandler(message);

View File

@ -1,6 +1,4 @@
import { MeiliSearch } from "meilisearch";
import { DataCache } from "src/cache/dataCache";
import { DocumentInvalidator } from "src/cache/documentCache";
import { TokenCache } from "src/cache/tokenCache";
import { PayloadSDK } from "src/shared/payload/sdk";
@ -23,17 +21,3 @@ export const payload = new PayloadSDK(
process.env.PAYLOAD_PASSWORD
);
payload.addTokenCache(tokenCache);
export const uncachedPayload = new PayloadSDK(
process.env.PAYLOAD_API_URL,
process.env.PAYLOAD_USER,
process.env.PAYLOAD_PASSWORD
);
uncachedPayload.addTokenCache(tokenCache);
const documentInvalidator = new DocumentInvalidator(meili);
export const dataCache = new DataCache(payload, uncachedPayload, (urls) =>
documentInvalidator.invalidate(urls)
);
payload.addDataCache(dataCache);

@ -1 +1 @@
Subproject commit 806543a487319e56cb9f678c9c3a35666f90b82a
Subproject commit caa79dee9eca5b9b6959e6f5a721245202423612

View File

@ -1,11 +1,14 @@
import { getMeiliDocumentsFromRequest } from "src/convert";
import { meili, uncachedPayload } from "src/services";
import {
convertChangeToMeiliDocumentRequest,
getMeiliDocumentsFromRequest,
} from "src/convert";
import { meili, payload } from "src/services";
import { MeiliIndexes } from "src/shared/meilisearch/constants";
import type {
MeiliDocument,
MeiliDocumentRequest,
} from "src/shared/meilisearch/types";
import { Collections } from "src/shared/payload/constants";
import { isDefined } from "src/utils";
export const synchronizeMeiliDocs = async () => {
const version = await meili.getVersion();
@ -34,47 +37,11 @@ export const synchronizeMeiliDocs = async () => {
await index.updateDistinctAttribute("distinctId");
// await index.updateDisplayedAttributes(["type", "page"]);
const allIds = (await uncachedPayload.getAllIds()).data;
const documentRequests: MeiliDocumentRequest[] = [
...allIds.pages.slugs.map((slug) => ({
type: Collections.Pages as const,
slug,
})),
...allIds.collectibles.slugs.map((slug) => ({
type: Collections.Collectibles as const,
slug,
})),
...allIds.folders.slugs.map((slug) => ({
type: Collections.Folders as const,
slug,
})),
...allIds.audios.ids.map((id) => ({
type: Collections.Audios as const,
id,
})),
...allIds.images.ids.map((id) => ({
type: Collections.Images as const,
id,
})),
...allIds.videos.ids.map((id) => ({
type: Collections.Videos as const,
id,
})),
...allIds.files.ids.map((id) => ({
type: Collections.Files as const,
id,
})),
...allIds.recorders.ids.map((id) => ({
type: Collections.Recorders as const,
id,
})),
...allIds.chronologyEvents.ids.map((id) => ({
type: Collections.ChronologyEvents as const,
id,
})),
];
const allChanges = (await payload.getAll()).data;
const documentRequests: MeiliDocumentRequest[] = allChanges
.map(convertChangeToMeiliDocumentRequest)
.filter(isDefined);
const documents: MeiliDocument[] = [];
for (const request of documentRequests) {
documents.push(...(await getMeiliDocumentsFromRequest(request)));

6
src/utils.ts Normal file
View File

@ -0,0 +1,6 @@
export const isDefined = <T>(value: T | null | undefined): value is T =>
value !== null && value !== undefined;
export const isUndefined = <T>(
value: T | null | undefined
): value is null | undefined => !isDefined(value);

View File

@ -1,6 +0,0 @@
export const getLogger = (prefix: string): Pick<Console, "log" | "error" | "warn" | "debug"> => ({
debug: (...message) => console.debug(prefix, ...message),
log: (...message) => console.log(prefix, ...message),
warn: (...message) => console.warn(prefix, ...message),
error: (...message) => console.error(prefix, ...message),
});

View File

@ -1,10 +1,27 @@
import { dataCache } from "src/services";
import type { AfterOperationWebHookMessage } from "src/shared/payload/webhooks";
import {
convertChangeToMeiliDocumentRequest,
getMeiliDocumentsFromRequest,
} from "src/convert";
import { meili } from "src/services";
import { MeiliIndexes } from "src/shared/meilisearch/constants";
import type { MeiliDocument } from "src/shared/meilisearch/types";
import type { EndpointChange } from "src/shared/payload/webhooks";
export const webhookHandler = async ({
id,
addedDependantIds,
urls,
}: AfterOperationWebHookMessage) => {
await dataCache.invalidate([...(id ? [id] : []), ...addedDependantIds], urls);
export const webhookHandler = async (changes: EndpointChange[]) => {
const index = meili.index(MeiliIndexes.DOCUMENT);
const documents: MeiliDocument[] = [];
for (const change of changes) {
await index.deleteDocuments({
filter: `endpointCalled = "${change.url}"`,
});
const meiliDocRequest = convertChangeToMeiliDocumentRequest(change);
if (!meiliDocRequest) continue;
documents.push(...(await getMeiliDocumentsFromRequest(meiliDocRequest)));
}
console.log("[Webhook] Adding", documents.length, "documents to Meilisearch");
await index.addDocuments(documents);
};