From d8a3f011f35e9909d455d2cffd58a3cb95df5742 Mon Sep 17 00:00:00 2001 From: Razzmatazz Date: Wed, 14 Aug 2024 13:13:57 -0500 Subject: [PATCH] fix caching --- index.mjs | 6 ++++-- plugins/plugin-nightbot.mjs | 16 +++++++++------- plugins/plugin-use-cache-machine.mjs | 25 ++++++++++++++++++++++--- utils/cache-machine.mjs | 24 +++++++++++++++--------- 4 files changed, 50 insertions(+), 21 deletions(-) diff --git a/index.mjs b/index.mjs index 47d0a961..37827f02 100644 --- a/index.mjs +++ b/index.mjs @@ -89,10 +89,12 @@ async function graphqlHandler(request, env, ctx) { specialCache = 'application/json'; } + let key; // Check the cache service for data first - If cached data exists, return it // we don't check the cache if we're the http server because the worker already did if (env.SKIP_CACHE !== 'true' && !env.CLOUDFLARE_TOKEN) { - const cachedResponse = await cacheMachine.get(env, query, variables, specialCache); + key = await cacheMachine.createKey(env, query, variables, specialCache); + const cachedResponse = await cacheMachine.get(env, {key}); if (cachedResponse) { // Construct a new response with the cached data const newResponse = new Response(cachedResponse, responseOptions); @@ -173,7 +175,7 @@ async function graphqlHandler(request, env, ctx) { if (env.SKIP_CACHE !== 'true' && ttl > 0) { // using waitUntil doesn't hold up returning a response but keeps the worker alive as long as needed - ctx.waitUntil(cacheMachine.put(env, body, {query, variables, ttl, specialCache})); + ctx.waitUntil(cacheMachine.put(env, body, {key, query, variables, ttl, specialCache})); } return response; diff --git a/plugins/plugin-nightbot.mjs b/plugins/plugin-nightbot.mjs index be468001..72f67b33 100644 --- a/plugins/plugin-nightbot.mjs +++ b/plugins/plugin-nightbot.mjs @@ -13,7 +13,6 @@ const usePaths = [ ]; export async function getNightbotResponse(request, url, env, serverContext) { - console.log('serverContext', Object.keys(serverContext)); if (request.method.toUpperCase() !== 'GET') { return new Response(null, { status: 405, @@ -32,9 +31,11 @@ export async function getNightbotResponse(request, url, env, serverContext) { const gameMode = url.searchParams.get('m') || 'regular'; const query = url.searchParams.get('q'); - if (env.SKIP_CACHE !== 'true') { + let key; + if (env.SKIP_CACHE !== 'true' && !request.headers.has('cache-check-complete')) { const requestStart = new Date(); - const cachedResponse = await cacheMachine.get(env, 'nightbot', { q: query, l: lang, m: gameMode }); + key = await cacheMachine.createKey(env, 'nightbot', { q: query, l: lang, m: gameMode }); + const cachedResponse = await cacheMachine.get(env, {key}); if (cachedResponse) { // Construct a new response with the cached data const newResponse = new Response(cachedResponse); @@ -42,6 +43,7 @@ export async function getNightbotResponse(request, url, env, serverContext) { newResponse.headers.append('X-CACHE', 'HIT'); console.log(`Request served from cache: ${new Date() - requestStart} ms`); // Return the new cached response + request.cached = true; return newResponse; } else { console.log('no cached response'); @@ -57,7 +59,7 @@ export async function getNightbotResponse(request, url, env, serverContext) { let responseBody = 'Found no item matching that name'; try { items = await data.worker.item.getItemsByName(context, info, query); - ttl = data.getRequestTtl(context.requestId) + ttl = data.getRequestTtl(context.requestId); if (items.length > 0) { const bestPrice = items[0].sellFor.sort((a, b) => b.price - a.price); @@ -72,13 +74,13 @@ export async function getNightbotResponse(request, url, env, serverContext) { // Update the cache with the results of the query if (env.SKIP_CACHE !== 'true' && ttl > 0) { - const putCachePromise = cacheMachine.put(env, 'nightbot', { q: query, l: lang, m: gameMode }, responseBody, String(ttl)); + const putCachePromise = cacheMachine.put(env, responseBody, { key, query: 'nightbot', variables: { q: query, l: lang, m: gameMode }, ttl: String(ttl)}); // using waitUntil doens't hold up returning a response but keeps the worker alive as long as needed if (request.ctx?.waitUntil) { request.ctx.waitUntil(putCachePromise); - } /*else if (serverContext.waitUntil) { + } else if (serverContext.waitUntil) { serverContext.waitUntil(putCachePromise); - }*/ + } } return new Response(responseBody) } diff --git a/plugins/plugin-use-cache-machine.mjs b/plugins/plugin-use-cache-machine.mjs index f379fe01..40f966f7 100644 --- a/plugins/plugin-use-cache-machine.mjs +++ b/plugins/plugin-use-cache-machine.mjs @@ -21,13 +21,22 @@ export default function useCacheMachine(env) { console.log(`Skipping cache check already performed by worker`); return; } - const cachedResponse = await cacheMachine.get(env, params.query, params.variables, specialCache(request)); + const cachedResponse = await cacheMachine.get(env, {query: params.query, variables: params.variables, specialCache: specialCache(request)}); if (cachedResponse) { console.log('Request served from cache'); request.cached = true; setResult(JSON.parse(cachedResponse)); } }, + onValidate({ context, extendContext, params, validateFn, addValidationRule, setValidationFn, setResult }) { + return ({ valid, result, context, extendContext, setResult }) => { + // collect stats on if query was valid + if (valid) { + return; + } + // result is an array of errors we can log + }; + }, onContextBuilding({context, extendContext, breakContextBuilding}) { context.request.ctx = context.ctx ?? context.request.ctx; if (typeof context.waitUntil === 'function') { @@ -40,11 +49,21 @@ export default function useCacheMachine(env) { console.log(`KVs pre-loaded: ${context.data.kvLoaded.join(', ') || 'none'}`); extendContext({requestId: context.request.requestId}); }, + onExecute({ executeFn, setExecuteFn, setResultAndStopExecution, extendContext, args }) { + const executeStart = new Date(); + //extendContext({executeStart: new Date()}); + return { + onExecuteDone: ({ args, result, setResult }) => { + console.log(args.contextValue.requestId, `Executaion time: ${new Date() - executeStart} ms`); + // can check for errors at result.errors + }, + }; + }, onResultProcess({request, acceptableMediaTypes, result, setResult, resultProcessor, setResultProcessor}) { if (request.cached) { return; } - if (!result.data) { + if (!result.data && !result.errors) { return; } if (request.errors?.length > 0) { @@ -60,7 +79,7 @@ export default function useCacheMachine(env) { result.warnings.push(...request.warnings); } - let ttl = request.data.getRequestTtl(request.requestId); + let ttl = request.data.getRequestTtl(request.requestId) ?? 60 * 5; const sCache = specialCache(request); if (sCache === 'application/json') { diff --git a/utils/cache-machine.mjs b/utils/cache-machine.mjs index e467c512..fc5a934d 100644 --- a/utils/cache-machine.mjs +++ b/utils/cache-machine.mjs @@ -6,7 +6,7 @@ let cachePaused = false; function pauseCache() { cacheFailCount++; - if (cacheFailCount <= 2) { + if (cacheFailCount <= 4) { return; } cachePaused = true; @@ -39,17 +39,20 @@ async function hash(string) { } const cacheMachine = { - createKey: (environment, query, variables, specialCache = '') => { + createKey: (env, query, variables = {}, specialCache = '') => { if (typeof variables !== 'string') { variables = JSON.stringify(variables); } + if (typeof query !== 'string') { + query = JSON.stringify(query); + } query = query.trim(); - return hash(environment + query + variables + specialCache); + return hash(env.ENVIRONMENT + query + variables + specialCache); }, // Checks the caching service to see if a request has been cached // :param json: the json payload of the incoming worker request // :return: json results of the item found in the cache or false if not found - get: async (env, query, variables, specialCache = '') => { + get: async (env, options = {}) => { try { if (!env.CACHE_BASIC_AUTH) { console.warn('env.CACHE_BASIC_AUTH is not set; skipping cache check'); @@ -59,14 +62,17 @@ const cacheMachine = { console.warn('Cache paused; skipping cache check'); return false; } + let query = options.query ?? ''; query = query.trim(); - const cacheKey = await cacheMachine.createKey(env.ENVIRONMENT, query, variables, specialCache); - if (!cacheKey) { + let { key, variables = {}, specialCache = '' } = options; + key = key ?? await cacheMachine.createKey(env, query, variables, specialCache); + //console.log('getting cache ', key, typeof query, query); + if (!key) { console.warn('Skipping cache check; key is empty'); return false; } - const response = await fetchWithTimeout(`${cacheUrl}/api/cache?key=${cacheKey}`, { + const response = await fetchWithTimeout(`${cacheUrl}/api/cache?key=${key}`, { headers: { 'content-type': 'application/json;charset=UTF-8', 'Authorization': `Basic ${env.CACHE_BASIC_AUTH}` @@ -108,10 +114,10 @@ const cacheMachine = { console.warn('Key or query not provided, skipping cache put'); return false; } - let { key, query, variables, ttl = 60, specialCache = '' } = options; + let { key, query, variables, ttl = 60 * 5, specialCache = '' } = options; if (!key) { query = query.trim(); - key = await cacheMachine.createKey(env.ENVIRONMENT, query, variables, specialCache); + key = await cacheMachine.createKey(env, query, variables, specialCache); } ttl = String(ttl); console.log(`Caching ${body.length} byte response for ${env.ENVIRONMENT} environment${ttl ? ` for ${ttl} seconds` : ''}`);