From 326c3393ca9f9c269c83879d06b98b58a4110322 Mon Sep 17 00:00:00 2001 From: DrMint <29893320+DrMint@users.noreply.github.com> Date: Sun, 28 Jul 2024 13:21:01 +0200 Subject: [PATCH] Use endpoint changes --- src/cache/dataCache.ts | 149 ------------------------------------- src/cache/documentCache.ts | 49 ------------ src/convert.ts | 58 ++++++++++++++- src/index.ts | 6 +- src/services.ts | 16 ---- src/shared | 2 +- src/synchro.ts | 53 +++---------- src/utils.ts | 6 ++ src/utils/logger.ts | 6 -- src/webhook.ts | 33 ++++++-- 10 files changed, 101 insertions(+), 277 deletions(-) delete mode 100644 src/cache/dataCache.ts delete mode 100644 src/cache/documentCache.ts create mode 100644 src/utils.ts delete mode 100644 src/utils/logger.ts diff --git a/src/cache/dataCache.ts b/src/cache/dataCache.ts deleted file mode 100644 index b4a72a8..0000000 --- a/src/cache/dataCache.ts +++ /dev/null @@ -1,149 +0,0 @@ -import { getLogger } from "src/utils/logger"; -import { writeFile, mkdir, readFile } from "fs/promises"; -import { existsSync } from "fs"; -import type { PayloadSDK } from "src/shared/payload/sdk"; - -const ON_DISK_ROOT = `.cache/dataCache`; -const ON_DISK_RESPONSE_CACHE_FILE = `${ON_DISK_ROOT}/responseCache.json`; - -export class DataCache { - private readonly logger = getLogger("[DataCache]"); - private initialized = false; - - private readonly responseCache = new Map(); - private readonly invalidationMap = new Map>(); - - private scheduleSaveTimeout: NodeJS.Timeout | undefined; - - constructor( - private readonly payload: PayloadSDK, - private readonly uncachedPayload: PayloadSDK, - private readonly onInvalidate: (urls: string[]) => Promise - ) {} - - async init() { - if (this.initialized) return; - - if (process.env.DATA_PRECACHING === "true") { - await this.precache(); - } - - this.initialized = true; - } - - private async precache() { - // Get all keys from CMS - const allSDKUrls = (await this.uncachedPayload.getAllSdkUrls()).data.urls; - - // Load cache from disk if available - if (existsSync(ON_DISK_RESPONSE_CACHE_FILE)) { - this.logger.log("Loading cache from disk..."); - const buffer = await readFile(ON_DISK_RESPONSE_CACHE_FILE); - const data = JSON.parse(buffer.toString()) as [string, any][]; - for (const [key, value] of data) { - // Do not include cache where the key is no longer in the CMS - if (!allSDKUrls.includes(key)) continue; - this.set(key, value); - } - } - - const cacheSizeBeforePrecaching = this.responseCache.size; - - for (const url of allSDKUrls) { - // Do not precache response if already included in the loaded cache from disk - if (this.responseCache.has(url)) continue; - try { - await this.payload.request(url); - } catch { - this.logger.warn("Precaching failed for url", url); - } - } - - if (cacheSizeBeforePrecaching !== this.responseCache.size) { - this.scheduleSave(); - } - - this.logger.log("Precaching completed!", this.responseCache.size, "responses cached"); - } - - get(url: string) { - if (process.env.DATA_CACHING !== "true") return; - const cachedResponse = this.responseCache.get(url); - if (cachedResponse) { - this.logger.log("Retrieved cached response for", url); - return structuredClone(cachedResponse); - } - } - - set(url: string, response: any) { - if (process.env.DATA_CACHING !== "true") return; - const stringData = JSON.stringify(response); - const regex = /[a-f0-9]{24}/g; - const ids = [...stringData.matchAll(regex)].map((match) => match[0]); - const uniqueIds = [...new Set(ids)]; - - uniqueIds.forEach((id) => { - const current = this.invalidationMap.get(id); - if (current) { - current.add(url); - } else { - this.invalidationMap.set(id, new Set([url])); - } - }); - - this.responseCache.set(url, response); - this.logger.log("Cached response for", url); - if (this.initialized) { - this.scheduleSave(); - } - } - - async invalidate(ids: string[], urls: string[]) { - if (process.env.DATA_CACHING !== "true") return; - const urlsToInvalidate = new Set(urls); - - ids.forEach((id) => { - const urlsForThisId = this.invalidationMap.get(id); - if (!urlsForThisId) return; - this.invalidationMap.delete(id); - [...urlsForThisId].forEach((url) => urlsToInvalidate.add(url)); - }); - - for (const url of urlsToInvalidate) { - this.responseCache.delete(url); - this.logger.log("Invalidated cache for", url); - try { - await this.payload.request(url); - } catch (e) { - this.logger.log("Revalidation fails for", url); - } - } - - this.onInvalidate([...urlsToInvalidate]); - this.logger.log("There are currently", this.responseCache.size, "responses in cache."); - if (this.initialized) { - this.scheduleSave(); - } - } - - private scheduleSave() { - if (this.scheduleSaveTimeout) { - clearTimeout(this.scheduleSaveTimeout); - } - this.scheduleSaveTimeout = setTimeout(() => { - this.save(); - }, 10_000); - } - - private async save() { - if (!existsSync(ON_DISK_ROOT)) { - await mkdir(ON_DISK_ROOT, { recursive: true }); - } - - const serializedResponseCache = JSON.stringify([...this.responseCache]); - await writeFile(ON_DISK_RESPONSE_CACHE_FILE, serializedResponseCache, { - encoding: "utf-8", - }); - this.logger.log("Saved", ON_DISK_RESPONSE_CACHE_FILE); - } -} diff --git a/src/cache/documentCache.ts b/src/cache/documentCache.ts deleted file mode 100644 index 1869e08..0000000 --- a/src/cache/documentCache.ts +++ /dev/null @@ -1,49 +0,0 @@ -import type { Meilisearch } from "meilisearch"; -import { getMeiliDocumentsFromRequest } from "src/convert"; -import { MeiliIndexes } from "src/shared/meilisearch/constants"; -import type { - MeiliDocument, - MeiliDocumentRequest, -} from "src/shared/meilisearch/types"; -import { getLogger } from "src/utils/logger"; - -export class DocumentInvalidator { - private readonly logger = getLogger("[DocumentInvalidator]"); - constructor(private readonly meili: Meilisearch) {} - - async invalidate(urls: string[]) { - const index = this.meili.index(MeiliIndexes.DOCUMENT); - - const documentRequests: MeiliDocumentRequest[] = []; - - for (const url of urls) { - const result = await index.search(undefined, { - filter: `endpointCalled = "${url}"`, - limit: 1, - }); - - const doc = result.hits[0] as MeiliDocument | undefined; - if (!doc) continue; - - await index.deleteDocument(doc.docId); - documentRequests.push(doc); - } - - const documents: MeiliDocument[] = []; - for (const request of documentRequests) { - try { - documents.push(...(await getMeiliDocumentsFromRequest(request))); - } catch (e) { - this.logger.log("Failure to revalidate a document"); - } - } - - this.logger.log( - "Adding", - documents.length, - "documents to Meilisearch" - ); - - await index.addDocuments(documents); - } -} diff --git a/src/convert.ts b/src/convert.ts index e08148b..8c37752 100644 --- a/src/convert.ts +++ b/src/convert.ts @@ -19,7 +19,11 @@ import { formatInlineTitle, formatRichTextContentToString, } from "src/shared/payload/format"; -import type { PayloadSDKResponse } from "src/shared/payload/sdk"; +import { + SDKEndpointNames, + type PayloadSDKResponse, +} from "src/shared/payload/sdk"; +import type { EndpointChange } from "src/shared/payload/webhooks"; const convertPageToDocument = ({ data, @@ -261,22 +265,29 @@ export const getMeiliDocumentsFromRequest = async ( switch (request.type) { case Collections.Audios: return convertAudioToDocument(await payload.getAudioByID(request.id)); + case Collections.ChronologyEvents: return convertChronologyEventToDocument( await payload.getChronologyEventByID(request.id) ); + case Collections.Collectibles: return convertCollectibleToDocument( await payload.getCollectible(request.slug) ); + case Collections.Files: return convertFileToDocument(await payload.getFileByID(request.id)); + case Collections.Folders: return convertFolderToDocument(await payload.getFolder(request.slug)); + case Collections.Images: return convertImageToDocument(await payload.getImageByID(request.id)); + case Collections.Pages: return convertPageToDocument(await payload.getPage(request.slug)); + case Collections.Recorders: return convertRecorderToDocument( await payload.getRecorderByID(request.id) @@ -285,3 +296,48 @@ export const getMeiliDocumentsFromRequest = async ( return convertVideoToDocument(await payload.getVideoByID(request.id)); } }; + +export const convertChangeToMeiliDocumentRequest = ( + change: EndpointChange +): MeiliDocumentRequest | undefined => { + switch (change.type) { + case SDKEndpointNames.getFolder: + return { type: Collections.Folders, slug: change.slug }; + + case SDKEndpointNames.getPage: + return { type: Collections.Pages, slug: change.slug }; + + case SDKEndpointNames.getCollectible: + return { type: Collections.Pages, slug: change.slug }; + + case SDKEndpointNames.getChronologyEventByID: + return { type: Collections.ChronologyEvents, id: change.id }; + + case SDKEndpointNames.getImageByID: + return { type: Collections.Images, id: change.id }; + + case SDKEndpointNames.getAudioByID: + return { type: Collections.Images, id: change.id }; + + case SDKEndpointNames.getVideoByID: + return { type: Collections.Images, id: change.id }; + + case SDKEndpointNames.getFileByID: + return { type: Collections.Images, id: change.id }; + + case SDKEndpointNames.getRecorderByID: + return { type: Collections.Images, id: change.id }; + + case SDKEndpointNames.getWebsiteConfig: + case SDKEndpointNames.getLanguages: + case SDKEndpointNames.getCurrencies: + case SDKEndpointNames.getWordings: + case SDKEndpointNames.getCollectibleScans: + case SDKEndpointNames.getCollectibleScanPage: + case SDKEndpointNames.getCollectibleGallery: + case SDKEndpointNames.getCollectibleGalleryImage: + case SDKEndpointNames.getChronologyEvents: + default: + return undefined; + } +}; diff --git a/src/index.ts b/src/index.ts index ff20ddb..7616490 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,10 +1,8 @@ import http from "http"; import { synchronizeMeiliDocs } from "./synchro"; -import type { AfterOperationWebHookMessage } from "src/shared/payload/webhooks"; import { webhookHandler } from "src/webhook"; -import { dataCache } from "src/services"; +import type { EndpointChange } from "src/shared/payload/webhooks"; -await dataCache.init(); await synchronizeMeiliDocs(); export const requestListener: http.RequestListener = async (req, res) => { @@ -25,7 +23,7 @@ export const requestListener: http.RequestListener = async (req, res) => { } const message = JSON.parse( Buffer.concat(buffers).toString() - ) as AfterOperationWebHookMessage; + ) as EndpointChange[]; // Not awaiting on purpose to respond with a 202 and not block the CMS webhookHandler(message); diff --git a/src/services.ts b/src/services.ts index 3b948e2..acc57ac 100644 --- a/src/services.ts +++ b/src/services.ts @@ -1,6 +1,4 @@ import { MeiliSearch } from "meilisearch"; -import { DataCache } from "src/cache/dataCache"; -import { DocumentInvalidator } from "src/cache/documentCache"; import { TokenCache } from "src/cache/tokenCache"; import { PayloadSDK } from "src/shared/payload/sdk"; @@ -23,17 +21,3 @@ export const payload = new PayloadSDK( process.env.PAYLOAD_PASSWORD ); payload.addTokenCache(tokenCache); - -export const uncachedPayload = new PayloadSDK( - process.env.PAYLOAD_API_URL, - process.env.PAYLOAD_USER, - process.env.PAYLOAD_PASSWORD -); -uncachedPayload.addTokenCache(tokenCache); - -const documentInvalidator = new DocumentInvalidator(meili); - -export const dataCache = new DataCache(payload, uncachedPayload, (urls) => - documentInvalidator.invalidate(urls) -); -payload.addDataCache(dataCache); diff --git a/src/shared b/src/shared index 806543a..caa79de 160000 --- a/src/shared +++ b/src/shared @@ -1 +1 @@ -Subproject commit 806543a487319e56cb9f678c9c3a35666f90b82a +Subproject commit caa79dee9eca5b9b6959e6f5a721245202423612 diff --git a/src/synchro.ts b/src/synchro.ts index 8ff081c..be9885c 100644 --- a/src/synchro.ts +++ b/src/synchro.ts @@ -1,11 +1,14 @@ -import { getMeiliDocumentsFromRequest } from "src/convert"; -import { meili, uncachedPayload } from "src/services"; +import { + convertChangeToMeiliDocumentRequest, + getMeiliDocumentsFromRequest, +} from "src/convert"; +import { meili, payload } from "src/services"; import { MeiliIndexes } from "src/shared/meilisearch/constants"; import type { MeiliDocument, MeiliDocumentRequest, } from "src/shared/meilisearch/types"; -import { Collections } from "src/shared/payload/constants"; +import { isDefined } from "src/utils"; export const synchronizeMeiliDocs = async () => { const version = await meili.getVersion(); @@ -34,47 +37,11 @@ export const synchronizeMeiliDocs = async () => { await index.updateDistinctAttribute("distinctId"); // await index.updateDisplayedAttributes(["type", "page"]); - const allIds = (await uncachedPayload.getAllIds()).data; - - const documentRequests: MeiliDocumentRequest[] = [ - ...allIds.pages.slugs.map((slug) => ({ - type: Collections.Pages as const, - slug, - })), - ...allIds.collectibles.slugs.map((slug) => ({ - type: Collections.Collectibles as const, - slug, - })), - ...allIds.folders.slugs.map((slug) => ({ - type: Collections.Folders as const, - slug, - })), - ...allIds.audios.ids.map((id) => ({ - type: Collections.Audios as const, - id, - })), - ...allIds.images.ids.map((id) => ({ - type: Collections.Images as const, - id, - })), - ...allIds.videos.ids.map((id) => ({ - type: Collections.Videos as const, - id, - })), - ...allIds.files.ids.map((id) => ({ - type: Collections.Files as const, - id, - })), - ...allIds.recorders.ids.map((id) => ({ - type: Collections.Recorders as const, - id, - })), - ...allIds.chronologyEvents.ids.map((id) => ({ - type: Collections.ChronologyEvents as const, - id, - })), - ]; + const allChanges = (await payload.getAll()).data; + const documentRequests: MeiliDocumentRequest[] = allChanges + .map(convertChangeToMeiliDocumentRequest) + .filter(isDefined); const documents: MeiliDocument[] = []; for (const request of documentRequests) { documents.push(...(await getMeiliDocumentsFromRequest(request))); diff --git a/src/utils.ts b/src/utils.ts new file mode 100644 index 0000000..0bab86f --- /dev/null +++ b/src/utils.ts @@ -0,0 +1,6 @@ +export const isDefined = (value: T | null | undefined): value is T => + value !== null && value !== undefined; + +export const isUndefined = ( + value: T | null | undefined +): value is null | undefined => !isDefined(value); diff --git a/src/utils/logger.ts b/src/utils/logger.ts deleted file mode 100644 index 509acf2..0000000 --- a/src/utils/logger.ts +++ /dev/null @@ -1,6 +0,0 @@ -export const getLogger = (prefix: string): Pick => ({ - debug: (...message) => console.debug(prefix, ...message), - log: (...message) => console.log(prefix, ...message), - warn: (...message) => console.warn(prefix, ...message), - error: (...message) => console.error(prefix, ...message), -}); diff --git a/src/webhook.ts b/src/webhook.ts index e0bccaf..d9fd930 100644 --- a/src/webhook.ts +++ b/src/webhook.ts @@ -1,10 +1,27 @@ -import { dataCache } from "src/services"; -import type { AfterOperationWebHookMessage } from "src/shared/payload/webhooks"; +import { + convertChangeToMeiliDocumentRequest, + getMeiliDocumentsFromRequest, +} from "src/convert"; +import { meili } from "src/services"; +import { MeiliIndexes } from "src/shared/meilisearch/constants"; +import type { MeiliDocument } from "src/shared/meilisearch/types"; +import type { EndpointChange } from "src/shared/payload/webhooks"; -export const webhookHandler = async ({ - id, - addedDependantIds, - urls, -}: AfterOperationWebHookMessage) => { - await dataCache.invalidate([...(id ? [id] : []), ...addedDependantIds], urls); +export const webhookHandler = async (changes: EndpointChange[]) => { + const index = meili.index(MeiliIndexes.DOCUMENT); + + const documents: MeiliDocument[] = []; + for (const change of changes) { + await index.deleteDocuments({ + filter: `endpointCalled = "${change.url}"`, + }); + + const meiliDocRequest = convertChangeToMeiliDocumentRequest(change); + if (!meiliDocRequest) continue; + documents.push(...(await getMeiliDocumentsFromRequest(meiliDocRequest))); + } + + console.log("[Webhook] Adding", documents.length, "documents to Meilisearch"); + + await index.addDocuments(documents); };