diff --git a/.nycrc.json b/.nycrc.json index b950f0abf..7a8da8fdf 100644 --- a/.nycrc.json +++ b/.nycrc.json @@ -16,4 +16,4 @@ "src/agents/org-detector/instructions.js", "src/controllers/demo.js" ] -} \ No newline at end of file +} diff --git a/package-lock.json b/package-lock.json index bb59a5fbe..22dfbb43e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -47,6 +47,7 @@ "iso-639-3": "^3.0.1", "js-yaml": "4.1.1", "jsdom": "26.1.0", + "redis": "4.7.0", "slack-block-builder": "2.8.0", "tldts": "7.0.17", "urijs": "1.19.11", @@ -56124,6 +56125,65 @@ "dev": true, "license": "BSD-3-Clause" }, + "node_modules/@redis/bloom": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/bloom/-/bloom-1.2.0.tgz", + "integrity": "sha512-HG2DFjYKbpNmVXsa0keLHp/3leGJz1mjh09f2RLGGLQZzSHpkmZWuwJbAvo3QcRY8p80m5+ZdXZdYOSBLlp7Cg==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/client": { + "version": "1.6.0", + "resolved": "https://registry.npmjs.org/@redis/client/-/client-1.6.0.tgz", + "integrity": "sha512-aR0uffYI700OEEH4gYnitAnv3vzVGXCFvYfdpu/CJKvk4pHfLPEy/JSZyrpQ+15WhXe1yJRXLtfQ84s4mEXnPg==", + "license": "MIT", + "dependencies": { + "cluster-key-slot": "1.1.2", + "generic-pool": "3.9.0", + "yallist": "4.0.0" + }, + "engines": { + "node": ">=14" + } + }, + "node_modules/@redis/graph": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@redis/graph/-/graph-1.1.1.tgz", + "integrity": "sha512-FEMTcTHZozZciLRl6GiiIB4zGm5z5F3F6a6FZCyrfxdKOhFlGkiAqlexWMBzCi4DcRoyiOsuLfW+cjlGWyExOw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/json": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/@redis/json/-/json-1.0.7.tgz", + "integrity": "sha512-6UyXfjVaTBTJtKNG4/9Z8PSpKE6XgSyEb8iwaqDcy+uKrd/DGYHTWkUdnQDyzm727V7p21WUMhsqz5oy65kPcQ==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/search": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/@redis/search/-/search-1.2.0.tgz", + "integrity": "sha512-tYoDBbtqOVigEDMAcTGsRlMycIIjwMCgD8eR2t0NANeQmgK/lvxNAvYyb6bZDD4frHRhIHkJu2TBRvB0ERkOmw==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, + "node_modules/@redis/time-series": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@redis/time-series/-/time-series-1.1.0.tgz", + "integrity": "sha512-c1Q99M5ljsIuc4YdaCwfUEXsofakb9c8+Zse2qxTadu8TalLXuAESzLvFAvNVbkmSlvlzIQOLpBCmWI9wTOt+g==", + "license": "MIT", + "peerDependencies": { + "@redis/client": "^1.0.0" + } + }, "node_modules/@redocly/ajv": { "version": "8.17.1", "resolved": "https://registry.npmjs.org/@redocly/ajv/-/ajv-8.17.1.tgz", @@ -60210,6 +60270,15 @@ "node": ">=6" } }, + "node_modules/cluster-key-slot": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/cluster-key-slot/-/cluster-key-slot-1.1.2.tgz", + "integrity": "sha512-RMr0FhtfXemyinomL4hrWcYJxmX6deFdCxpJzhDttxgO1+bcCnkk+9drydLVDmAMG7NE6aN/fl4F7ucU/90gAA==", + "license": "Apache-2.0", + "engines": { + "node": ">=0.10.0" + } + }, "node_modules/collapse-white-space": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/collapse-white-space/-/collapse-white-space-2.1.0.tgz", @@ -62917,6 +62986,15 @@ "node": ">= 0.4" } }, + "node_modules/generic-pool": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/generic-pool/-/generic-pool-3.9.0.tgz", + "integrity": "sha512-hymDOu5B53XvN4QT9dBmZxPX4CWhBPPLguTZ9MMFeFa/Kg0xWVfylOVNlJji/E7yTZWFd/q9GO5TxDLq156D7g==", + "license": "MIT", + "engines": { + "node": ">= 4" + } + }, "node_modules/get-caller-file": { "version": "2.0.5", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-2.0.5.tgz", @@ -71929,6 +72007,23 @@ "url": "https://github.com/sponsors/jonschlinkert" } }, + "node_modules/redis": { + "version": "4.7.0", + "resolved": "https://registry.npmjs.org/redis/-/redis-4.7.0.tgz", + "integrity": "sha512-zvmkHEAdGMn+hMRXuMBtu4Vo5P6rHQjLoHftu+lBqq8ZTA3RCVC/WzD790bkKKiNFp7d5/9PcSD19fJyyRvOdQ==", + "license": "MIT", + "workspaces": [ + "./packages/*" + ], + "dependencies": { + "@redis/bloom": "1.2.0", + "@redis/client": "1.6.0", + "@redis/graph": "1.1.1", + "@redis/json": "1.0.7", + "@redis/search": "1.2.0", + "@redis/time-series": "1.1.0" + } + }, "node_modules/redoc": { "version": "2.5.1", "resolved": "https://registry.npmjs.org/redoc/-/redoc-2.5.1.tgz", @@ -75940,6 +76035,12 @@ "node": ">=10" } }, + "node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" + }, "node_modules/yaml": { "version": "2.8.1", "resolved": "https://registry.npmjs.org/yaml/-/yaml-2.8.1.tgz", diff --git a/package.json b/package.json index 59e9db0cd..7392b27ed 100644 --- a/package.json +++ b/package.json @@ -101,6 +101,7 @@ "iso-639-3": "^3.0.1", "js-yaml": "4.1.1", "jsdom": "26.1.0", + "redis": "4.7.0", "slack-block-builder": "2.8.0", "tldts": "7.0.17", "urijs": "1.19.11", diff --git a/src/controllers/llmo/llmo-cache-handler.js b/src/controllers/llmo/llmo-cache-handler.js new file mode 100644 index 000000000..589207b39 --- /dev/null +++ b/src/controllers/llmo/llmo-cache-handler.js @@ -0,0 +1,311 @@ +/* + * Copyright 2025 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +import { SPACECAT_USER_AGENT, tracingFetch as fetch } from '@adobe/spacecat-shared-utils'; +import { + applyFilters, + applyInclusions, + applySort, + LLMO_SHEETDATA_SOURCE_URL, +} from './llmo-utils.js'; + +const generateCacheKey = (llmoConfig, filePath, queryParams) => { + const { dataFolder } = llmoConfig; + + // Sort query params to ensure consistent cache keys + const sortedParams = {}; + Object.keys(queryParams) + .sort() + .forEach((key) => { + sortedParams[key] = queryParams[key]; + }); + + // Create a string representation of the query params + const paramsString = JSON.stringify(sortedParams); + + // Combine dataFolder, filePath, and query params into a single cache key + return `${dataFolder}/${filePath}:${paramsString}`; +}; + +const processData = (data, queryParams) => { + let processedData = data; + + // Apply sheet filtering if provided (e.g., ?sheets=sheet1,sheet2) + if (queryParams.sheets && processedData[':type'] === 'multi-sheet') { + const requestedSheets = Array.isArray(queryParams.sheets) + ? queryParams.sheets + : queryParams.sheets.split(',').map((sheet) => sheet.trim()); + + // Create a new data object with only the requested sheets + const filteredData = { ':type': 'multi-sheet' }; + requestedSheets.forEach((sheetName) => { + if (processedData[sheetName]) { + filteredData[sheetName] = processedData[sheetName]; + } + }); + processedData = filteredData; + } + + // Apply filters if provided (e.g., ?filter.status=active&filter.type=premium) + const filterFields = {}; + Object.keys(queryParams).forEach((key) => { + if (key.startsWith('filter.')) { + const fieldName = key.substring(7); // Remove 'filter.' prefix + filterFields[fieldName] = queryParams[key]; + } + }); + + if (Object.keys(filterFields).length > 0) { + processedData = applyFilters(processedData, filterFields); + } + + // Apply inclusions if provided (e.g., ?include=field1,field2,field3) + if (queryParams.include) { + const includeFields = Array.isArray(queryParams.include) + ? queryParams.include + : queryParams.include.split(',').map((field) => field.trim()); + processedData = applyInclusions(processedData, includeFields); + } + + // Apply sorting if provided (e.g., ?sort=field:asc or ?sort=field:desc) + if (queryParams.sort) { + const sortParam = Array.isArray(queryParams.sort) + ? queryParams.sort[0] + : queryParams.sort; + const [field, order = 'asc'] = sortParam.split(':').map((s) => s.trim()); + + // Validate order is either 'asc' or 'desc' + const sortOrder = order.toLowerCase() === 'desc' ? 'desc' : 'asc'; + + processedData = applySort(processedData, { field, order: sortOrder }); + } + + return processedData; +}; + +const fetchAndProcessSingleFile = async (context, llmoConfig, filePath, queryParams) => { + const { log, env, valkey } = context; + const { sheet } = context.data; + + // Get cache from context (initialized by valkeyClientWrapper) + const cache = valkey?.cache; + + // Generate cache key that includes all query parameters + const cacheKey = generateCacheKey(llmoConfig, filePath, { ...queryParams, sheet }); + + // Try to get processed result from cache first + const cacheStartTime = Date.now(); + const cachedResult = cache ? await cache.get(cacheKey) : null; + const cacheFetchTime = Date.now() - cacheStartTime; + + if (cachedResult) { + log.info(`✓ Processed result cache HIT for: ${cacheKey} (fetch time: ${cacheFetchTime}ms)`); + return { + data: cachedResult, + headers: { 'Content-Encoding': 'br' }, + }; + } + + // Cache miss - fetch raw data and process it + log.info(`✗ Processed result cache MISS for: ${cacheKey} (cache check time: ${cacheFetchTime}ms), fetching and processing`); + + const url = new URL(`${LLMO_SHEETDATA_SOURCE_URL}/${llmoConfig.dataFolder}/${filePath}`); + + // Apply pagination parameters when calling the source URL + const limit = queryParams.limit ? parseInt(queryParams.limit, 10) : 10000000; + const offset = queryParams.offset ? parseInt(queryParams.offset, 10) : 0; + + url.searchParams.set('limit', limit.toString()); + url.searchParams.set('offset', offset.toString()); + + // allow fetching a specific sheet from the sheet data source + if (sheet) { + url.searchParams.set('sheet', sheet); + } + + const urlAsString = url.toString(); + log.info(`Fetching single file with path: ${urlAsString}`); + + // Create an AbortController with a 15-second timeout + // to prevent large data fetches keeping the Lambda running for too long + const TIMEOUT_MS = 15000; + const controller = new AbortController(); + const timeoutId = setTimeout(() => controller.abort(), TIMEOUT_MS); // 15 seconds + + // Start timing the source fetch + const sourceFetchStartTime = Date.now(); + + try { + // Fetch data from the external endpoint using the dataFolder from config + const response = await fetch(url.toString(), { + headers: { + Authorization: `token ${env.LLMO_HLX_API_KEY || 'hlx_api_key_missing'}`, + 'User-Agent': SPACECAT_USER_AGENT, + 'Accept-Encoding': 'br', + }, + signal: controller.signal, + }); + clearTimeout(timeoutId); + + if (!response.ok) { + log.error(`Failed to fetch data from external endpoint: ${response.status} ${response.statusText}`); + throw new Error(`External API returned ${response.status}: ${response.statusText}`); + } + + // Get the raw response data + const rawData = await response.json(); + const fetchTime = Date.now() - sourceFetchStartTime; + + log.info(`✓ Fetch from HELIX ${filePath}: ${fetchTime}ms`); + + // Process the data with all query parameters + const processStartTime = Date.now(); + const processedData = processData(rawData, queryParams); + const processTime = Date.now() - processStartTime; + + log.info(`✓ Data processing completed in ${processTime}ms`); + + // Cache the processed result (async, don't wait for it) + if (cache) { + cache.set(cacheKey, processedData).catch((error) => { + log.error(`Failed to cache processed data for ${cacheKey}: ${error.message}`); + }); + } + + return { + data: processedData, + headers: response.headers ? Object.fromEntries(response.headers.entries()) : {}, + }; + } catch (error) { + clearTimeout(timeoutId); + if (error.name === 'AbortError') { + log.error(`Request timeout after ${TIMEOUT_MS}ms for file: ${filePath}`); + throw new Error(`Request timeout after ${TIMEOUT_MS}ms`); + } + throw error; + } +}; + +/** + * Process promises in batches with controlled concurrency + * @param {Array} items - Items to process + * @param {Function} fn - Async function to process each item + * @param {number} concurrency - Maximum number of concurrent operations + * @returns {Promise} - Results array + */ +const processBatch = async (items, fn, concurrency) => { + const results = []; + const executing = []; + + for (const item of items) { + const promise = fn(item).then((result) => { + executing.splice(executing.indexOf(promise), 1); + return result; + }); + + results.push(promise); + executing.push(promise); + + if (executing.length >= concurrency) { + // eslint-disable-next-line no-await-in-loop + await Promise.race(executing); + } + } + + return Promise.all(results); +}; + +const fetchAndProcessMultipleFiles = async (context, llmoConfig, files, queryParams) => { + const { log } = context; + + // Limit concurrent fetches to prevent resource contention and timeouts + // This prevents all requests from competing for bandwidth/resources + const MAX_CONCURRENT_FETCHES = 7; + + log.info(`Fetching ${files.length} files with max concurrency of ${MAX_CONCURRENT_FETCHES}`); + + // Fetch and process files with controlled concurrency + const results = await processBatch( + files, + async (filePath) => { + try { + const { data } = await fetchAndProcessSingleFile( + context, + llmoConfig, + filePath, + queryParams, + ); + return { + path: filePath, + status: 'success', + data, + }; + } catch (error) { + log.error(`Error fetching and processing file ${filePath}: ${error.message}`); + return { + path: filePath, + status: 'error', + error: error.message, + }; + } + }, + MAX_CONCURRENT_FETCHES, + ); + + return results; +}; + +export const queryLlmoWithCache = async (context, llmoConfig) => { + const { log } = context; + const { + siteId, dataSource, sheetType, week, + } = context.params; + const { file, ...queryParams } = context.data; + + // Single-file mode: prioritize path parameters if dataSource is present + if (dataSource) { + let filePath; + if (sheetType && week) { + filePath = `${sheetType}/${week}/${dataSource}`; + } else if (sheetType) { + filePath = `${sheetType}/${dataSource}`; + } else { + filePath = dataSource; + } + + log.info(`Fetching and processing single file for siteId: ${siteId}, path: ${filePath}`); + return fetchAndProcessSingleFile( + context, + llmoConfig, + filePath, + queryParams, + ); + } + + // Multi-file mode: fallback to 'file' query param if no path parameters + if (file) { + const files = Array.isArray(file) ? file : [file]; + log.info(`Fetching and processing multiple files for siteId: ${siteId}, files: ${files.join(', ')}`); + + const results = await fetchAndProcessMultipleFiles( + context, + llmoConfig, + files, + queryParams, + ); + + return { data: results, headers: { 'Content-Encoding': 'br' } }; + } + + // If neither path parameters nor file query param exist, throw an error + throw new Error('Either dataSource path parameter or file query parameter must be provided'); +}; diff --git a/src/controllers/llmo/llmo-utils.js b/src/controllers/llmo/llmo-utils.js index bcf89d05a..e3d57e7f9 100644 --- a/src/controllers/llmo/llmo-utils.js +++ b/src/controllers/llmo/llmo-utils.js @@ -10,6 +10,9 @@ * governing permissions and limitations under the License. */ +// LLMO constants +export const LLMO_SHEETDATA_SOURCE_URL = 'https://main--project-elmo-ui-data--adobe.aem.live'; + // Apply filters to data arrays with case-insensitive exact matching export const applyFilters = (rawData, filterFields) => { const data = { ...rawData }; @@ -169,3 +172,48 @@ export const applyMappings = (rawData, mappingConfig) => { return data; }; + +// Apply sorting to data arrays based on field and order +export const applySort = (rawData, sortConfig) => { + const data = { ...rawData }; + + const sortArray = (array, field, order = 'asc') => { + const sorted = [...array].sort((a, b) => { + const aValue = a[field]; + const bValue = b[field]; + + // Handle null/undefined values - push to end + if (aValue == null && bValue == null) return 0; + if (aValue == null) return 1; + if (bValue == null) return -1; + + // Try numeric comparison first + const aNum = Number(aValue); + const bNum = Number(bValue); + if (!Number.isNaN(aNum) && !Number.isNaN(bNum)) { + return order === 'asc' ? aNum - bNum : bNum - aNum; + } + + // Fall back to string comparison + const aStr = String(aValue).toLowerCase(); + const bStr = String(bValue).toLowerCase(); + if (order === 'asc') { + return aStr.localeCompare(bStr); + } + return bStr.localeCompare(aStr); + }); + return sorted; + }; + + if (data[':type'] === 'sheet' && data.data) { + data.data = sortArray(data.data, sortConfig.field, sortConfig.order); + } else if (data[':type'] === 'multi-sheet') { + Object.keys(data).forEach((key) => { + if (key !== ':type' && data[key]?.data) { + data[key].data = sortArray(data[key].data, sortConfig.field, sortConfig.order); + } + }); + } + + return data; +}; diff --git a/src/controllers/llmo/llmo.js b/src/controllers/llmo/llmo.js index d23b924c8..4d2db13ee 100644 --- a/src/controllers/llmo/llmo.js +++ b/src/controllers/llmo/llmo.js @@ -32,6 +32,7 @@ import { applyExclusions, applyGroups, applyMappings, + LLMO_SHEETDATA_SOURCE_URL, } from './llmo-utils.js'; import { LLMO_SHEET_MAPPINGS } from './llmo-mappings.js'; import { @@ -40,12 +41,11 @@ import { performLlmoOnboarding, performLlmoOffboarding, } from './llmo-onboarding.js'; +import { queryLlmoWithCache } from './llmo-cache-handler.js'; const { readConfig, writeConfig } = llmo; const { llmoConfig: llmoConfigSchema } = schemas; -const LLMO_SHEETDATA_SOURCE_URL = 'https://main--project-elmo-ui-data--adobe.aem.live'; - function LlmoController(ctx) { const accessControlUtil = AccessControlUtil.fromContext(ctx); @@ -960,6 +960,54 @@ function LlmoController(ctx) { } }; + const queryWithCache = async (context) => { + const { log } = context; + const { siteId } = context.params; + try { + const { llmoConfig } = await getSiteAndValidateLlmo(context); + const { data, headers } = await queryLlmoWithCache(context, llmoConfig); + return ok(data, headers); + } catch (error) { + log.error(`Error during LLMO cached query for site ${siteId}: ${error.message}`); + return badRequest(error.message); + } + }; + + const clearCache = async (context) => { + const { log } = context; + + try { + // Validate LLMO access + await getSiteAndValidateLlmo(context); + + // Check if Valkey cache is available + if (!context.valkey || !context.valkey.cache) { + return badRequest('Cache is not configured for this environment'); + } + + log.info('Starting cache clear operation'); + + // Clear all cache entries + const result = await context.valkey.cache.clearAll(); + + if (!result.success) { + log.error('Failed to clear cache'); + return badRequest('Failed to clear cache'); + } + + log.info(`Successfully cleared ${result.deletedCount} cache entries`); + + return ok({ + message: 'Cache cleared successfully', + deletedCount: result.deletedCount, + clearedAt: new Date().toISOString(), + }); + } catch (error) { + log.error(`Error clearing cache: ${error.message}`); + return badRequest(error.message); + } + }; + return { getLlmoSheetData, queryLlmoSheetData, @@ -978,6 +1026,8 @@ function LlmoController(ctx) { updateLlmoConfig, onboardCustomer, offboardCustomer, + queryWithCache, + clearCache, }; } diff --git a/src/index.js b/src/index.js index b5b8aa325..8f63b3824 100644 --- a/src/index.js +++ b/src/index.js @@ -55,6 +55,7 @@ import FulfillmentController from './controllers/event/fulfillment.js'; import { FixesController } from './controllers/fixes.js'; import ImportController from './controllers/import.js'; import { s3ClientWrapper } from './support/s3.js'; +import { valkeyClientWrapper } from './support/valkey.js'; import { multipartFormData } from './support/multipart-form-data.js'; import ApiKeyController from './controllers/api-key.js'; import OpportunitiesController from './controllers/opportunities.js'; @@ -213,6 +214,7 @@ export const main = wrap(run) .with(enrichPathInfo) .with(sqs) .with(s3ClientWrapper) + .with(valkeyClientWrapper) .with(imsClientWrapper) .with(elevatedSlackClientWrapper, { slackTarget: WORKSPACE_EXTERNAL }) .with(secrets, { name: resolveSecretsName }) diff --git a/src/routes/index.js b/src/routes/index.js index 8b982d4b3..8f49109f0 100644 --- a/src/routes/index.js +++ b/src/routes/index.js @@ -283,6 +283,11 @@ export default function getRouteHandlers( 'GET /sites/:siteId/llmo/sheet-data/:dataSource': llmoController.getLlmoSheetData, 'GET /sites/:siteId/llmo/sheet-data/:sheetType/:dataSource': llmoController.getLlmoSheetData, 'GET /sites/:siteId/llmo/sheet-data/:sheetType/:week/:dataSource': llmoController.getLlmoSheetData, + 'GET /sites/:siteId/llmo/cache': llmoController.queryWithCache, + 'GET /sites/:siteId/llmo/cache/:dataSource': llmoController.queryWithCache, + 'GET /sites/:siteId/llmo/cache/:sheetType/:dataSource': llmoController.queryWithCache, + 'GET /sites/:siteId/llmo/cache/:sheetType/:week/:dataSource': llmoController.queryWithCache, + 'DELETE /sites/:siteId/llmo/cache': llmoController.clearCache, 'POST /sites/:siteId/llmo/sheet-data/:dataSource': llmoController.queryLlmoSheetData, 'POST /sites/:siteId/llmo/sheet-data/:sheetType/:dataSource': llmoController.queryLlmoSheetData, 'POST /sites/:siteId/llmo/sheet-data/:sheetType/:week/:dataSource': llmoController.queryLlmoSheetData, diff --git a/src/support/valkey.js b/src/support/valkey.js new file mode 100644 index 000000000..a12f4bc3d --- /dev/null +++ b/src/support/valkey.js @@ -0,0 +1,262 @@ +/* + * Copyright 2025 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +import { createClient } from 'redis'; +import { brotliCompressSync, brotliDecompressSync, constants as zlibConstants } from 'zlib'; + +// Cache TTL in seconds (2 hours by default) +const CACHE_TTL_SECONDS = 2 * 60 * 60; + +/** + * LLMO Cache Helper using AWS ElastiCache Valkey (Redis-compatible) + */ +export class ValkeyCache { + constructor(env, log) { + this.log = log; + this.env = env; + this.client = null; + this.isConnected = false; + } + + /** + * Initialize and connect to Valkey (lazy connection) + */ + async connect() { + if (this.isConnected && this.client) { + return; + } + + try { + // Use environment variable or default host (without protocol prefix) + const host = this.env.VALKEY_HOST || 'elmodata-u65bcl.serverless.use1.cache.amazonaws.com'; + const port = this.env.VALKEY_PORT || 6379; + + this.log.info(`Attempting to connect to ElastiCache Valkey at ${host}:${port} with TLS`); + + this.client = createClient({ + socket: { + host, + port: parseInt(port, 10), + connectTimeout: 300, // Valkey should connect very quickly + tls: true, // Enable TLS for ElastiCache connections + rejectUnauthorized: false, // AWS certificates are self-signed + reconnectStrategy: (retries) => { + if (retries > 1) { // Only one retry is allowed + this.log.error('Max Valkey reconnection attempts reached'); + return false; // Stop reconnecting + } + return Math.min(retries * 100, 3000); + }, + }, + }); + + this.client.on('error', (err) => { + this.log.error(`Valkey client error: ${err.message}`); + this.isConnected = false; + }); + + this.client.on('connect', () => { + this.log.info('Valkey client connected'); + this.isConnected = true; + }); + + this.client.on('disconnect', () => { + this.log.warn('Valkey client disconnected'); + this.isConnected = false; + }); + + await this.client.connect(); + this.isConnected = true; + this.log.info('Successfully connected to ElastiCache Valkey'); + } catch (error) { + this.log.error(`Failed to connect to Valkey: ${error.message}`); + this.isConnected = false; + this.client = null; + } + } + + /** + * Generate cache key for a file path + */ + static getCacheKey(filePath) { + return `llmo:file:${filePath}`; + } + + /** + * Get cached data for a file + * @param {string} filePath - The file path to use as cache key + * @returns {Promise} - The cached data or null if not found + */ + async get(filePath) { + // Lazy connect on first use + await this.connect(); + if (!this.isConnected || !this.client) { + this.log.warn('Valkey not connected, skipping cache get'); + return null; + } + + try { + const cacheKey = ValkeyCache.getCacheKey(filePath); + this.log.info(`Checking Valkey cache for key: ${cacheKey}`); + + const cachedData = await this.client.get(cacheKey); + + if (cachedData) { + this.log.info(`Cache HIT for key: ${cacheKey}`); + // Decompress Brotli data and parse JSON + const buffer = Buffer.from(cachedData, 'base64'); + const decompressed = brotliDecompressSync(buffer); + return JSON.parse(decompressed.toString('utf8')); + } + + this.log.info(`Cache MISS for key: ${cacheKey}`); + return null; + } catch (error) { + this.log.error(`Error getting from Valkey cache: ${error.message}`); + return null; + } + } + + /** + * Set cached data for a file + * @param {string} filePath - The file path to use as cache key + * @param {object} data - The data to cache + * @param {number} ttl - Time to live in seconds (optional, defaults to CACHE_TTL_SECONDS) + * @returns {Promise} - True if successfully cached, false otherwise + */ + async set(filePath, data, ttl = CACHE_TTL_SECONDS) { + // Lazy connect on first use + await this.connect(); + if (!this.isConnected || !this.client) { + this.log.warn('Valkey not connected, skipping cache set'); + return false; + } + + try { + const cacheKey = ValkeyCache.getCacheKey(filePath); + this.log.info(`Setting Valkey cache for key: ${cacheKey} with TTL: ${ttl}s`); + + // Compress data with Brotli before storing + const serializedData = JSON.stringify(data); + const compressed = brotliCompressSync(Buffer.from(serializedData), { + params: { + // the default quality is too complex for the lambda and can lead to 503s + [zlibConstants.BROTLI_PARAM_QUALITY]: 4, + }, + }); + const base64Data = compressed.toString('base64'); + + await this.client.setEx(cacheKey, ttl, base64Data); + + this.log.info(`Successfully cached data for key: ${cacheKey} (compressed)`); + return true; + } catch (error) { + this.log.error(`Error setting Valkey cache: ${error.message}`); + return false; + } + } + + /** + * Disconnect from Valkey + */ + async disconnect() { + if (this.client && this.isConnected) { + try { + await this.client.quit(); + this.log.info('Disconnected from Valkey'); + } catch (error) { + this.log.error(`Error disconnecting from Valkey: ${error.message}`); + } + } + this.isConnected = false; + this.client = null; + } + + /** + * Clears the cache for all files + */ + async clearAll() { + // Lazy connect on first use + await this.connect(); + + try { + const pattern = 'llmo:file:*'; + this.log.info(`Clearing all Valkey cache entries matching pattern: ${pattern}`); + + let cursor = 0; + let deletedCount = 0; + const keysToDelete = []; + + // Use SCAN to iterate through keys matching the pattern + /* eslint-disable no-await-in-loop */ + do { + const result = await this.client.scan(cursor, { + MATCH: pattern, + COUNT: 100, // Scan 100 keys at a time + }); + + cursor = result.cursor; + const { keys } = result; + + if (keys.length > 0) { + keysToDelete.push(...keys); + } + } while (cursor !== 0); + + // Delete all found keys + if (keysToDelete.length > 0) { + this.log.info(`Found ${keysToDelete.length} keys to delete`); + keysToDelete.forEach((key) => { + this.log.info(`Deleting key: ${key}`); + }); + + for (const key of keysToDelete) { + await this.client.del(key); + deletedCount += 1; + } + // await this.client.del(keysToDelete); + // deletedCount = keysToDelete.length; + + deletedCount = 0; + } + /* eslint-enable no-await-in-loop */ + + this.log.info(`Successfully cleared ${deletedCount} cache entries`); + return { success: true, deletedCount }; + } catch (error) { + this.log.error(`Error clearing Valkey cache: ${error.message}`); + return { success: false, deletedCount: 0 }; + } + } +} + +/** + * Wrapper function to enable access to ElastiCache Valkey capabilities via the context. + * When wrapped with this function, the cache is available as context.valkey.cache + * + * @param {UniversalAction} fn + * @returns {function(object, UniversalContext): Promise} + */ +export function valkeyClientWrapper(fn) { + return async (request, context) => { + if (!context.valkey) { + const { env, log } = context; + + const cache = new ValkeyCache(env, log); + + context.valkey = { + cache, + }; + } + return fn(request, context); + }; +} diff --git a/test/controllers/llmo/llmo-cache-handler.test.js b/test/controllers/llmo/llmo-cache-handler.test.js new file mode 100644 index 000000000..c31e38485 --- /dev/null +++ b/test/controllers/llmo/llmo-cache-handler.test.js @@ -0,0 +1,830 @@ +/* + * Copyright 2025 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +/* eslint-env mocha */ +import { expect, use } from 'chai'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import esmock from 'esmock'; + +use(sinonChai); + +describe('llmo-cache-handler', () => { + let queryLlmoWithCache; + let tracingFetchStub; + let mockContext; + let mockLlmoConfig; + let mockLog; + let mockCache; + + const TEST_SITE_ID = 'test-site-id'; + const TEST_DATA_FOLDER = 'test-data-folder'; + const TEST_DATA_SOURCE = 'test-data-source'; + const TEST_LLMO_API_KEY = 'test-llmo-api-key'; + + // Common test data + const createSheetData = (items) => ({ + ':type': 'sheet', + data: items, + }); + + const createMultiSheetData = (sheets) => ({ + ':type': 'multi-sheet', + ...sheets, + }); + + const createMockResponse = (data, ok = true, status = 200) => ({ + ok, + status, + statusText: ok ? 'OK' : 'Internal Server Error', + json: sinon.stub().resolves(data), + headers: new Map([['content-type', 'application/json']]), + }); + + // Helper to setup cache miss and fetch stub + const setupFetchTest = (data) => { + mockCache.get.resolves(null); + tracingFetchStub.resolves(createMockResponse(data)); + }; + + // Helper to get fetch URL from stub + const getFetchUrl = () => tracingFetchStub.getCall(0).args[0]; + + // Helper to get fetch options from stub + const getFetchOptions = () => tracingFetchStub.getCall(0).args[1]; + + beforeEach(async () => { + mockLog = { + info: sinon.stub(), + error: sinon.stub(), + warn: sinon.stub(), + debug: sinon.stub(), + }; + + mockCache = { + get: sinon.stub().resolves(null), + set: sinon.stub().resolves(true), + }; + + mockLlmoConfig = { + dataFolder: TEST_DATA_FOLDER, + }; + + mockContext = { + log: mockLog, + env: { + LLMO_HLX_API_KEY: TEST_LLMO_API_KEY, + }, + params: { + siteId: TEST_SITE_ID, + dataSource: TEST_DATA_SOURCE, + }, + data: {}, + valkey: { + cache: mockCache, + }, + }; + + tracingFetchStub = sinon.stub(); + + const module = await esmock('../../../src/controllers/llmo/llmo-cache-handler.js', { + '@adobe/spacecat-shared-utils': { + SPACECAT_USER_AGENT: 'test-user-agent', + tracingFetch: tracingFetchStub, + }, + }); + + queryLlmoWithCache = module.queryLlmoWithCache; + }); + + afterEach(() => { + sinon.restore(); + }); + + describe('queryLlmoWithCache - Single File Mode', () => { + it('should return cached data when cache hit occurs', async () => { + const cachedData = { + ':type': 'sheet', + data: [ + { id: 1, name: 'Cached Item 1' }, + { id: 2, name: 'Cached Item 2' }, + ], + }; + + mockCache.get.resolves(cachedData); + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.deep.equal(cachedData); + expect(result.headers).to.be.an('object'); + expect(mockCache.get).to.have.been.calledOnce; + expect(tracingFetchStub).to.not.have.been.called; + expect(mockLog.info).to.have.been.calledWith( + sinon.match(/Processed result cache HIT/), + ); + }); + + it('should fetch and process data when cache miss occurs', async () => { + const rawData = createSheetData([ + { id: 1, name: 'Fetched Item 1' }, + { id: 2, name: 'Fetched Item 2' }, + ]); + + setupFetchTest(rawData); + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.deep.equal(rawData); + expect(tracingFetchStub).to.have.been.calledOnce; + expect(mockCache.set).to.have.been.calledOnce; + expect(mockLog.info).to.have.been.calledWith( + sinon.match(/Processed result cache MISS/), + ); + expect(mockLog.info).to.have.been.calledWith( + sinon.match(/Fetch from HELIX/), + ); + }); + + it('should construct correct URL for single file', async () => { + setupFetchTest(createSheetData([])); + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + const fetchUrl = getFetchUrl(); + expect(fetchUrl).to.include(TEST_DATA_FOLDER); + expect(fetchUrl).to.include(TEST_DATA_SOURCE); + }); + + it('should construct correct URL with sheetType and week', async () => { + setupFetchTest(createSheetData([])); + + mockContext.params = { + ...mockContext.params, + sheetType: 'weekly', + week: '2025-W01', + }; + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(getFetchUrl()).to.include('weekly/2025-W01/test-data-source'); + }); + + it('should construct correct URL with sheetType only', async () => { + setupFetchTest(createSheetData([])); + + mockContext.params = { + ...mockContext.params, + sheetType: 'monthly', + }; + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(getFetchUrl()).to.include('monthly/test-data-source'); + }); + + it('should include sheet parameter in URL when provided', async () => { + setupFetchTest(createSheetData([])); + + mockContext.data = { + sheet: 'products', + }; + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(getFetchUrl()).to.include('sheet=products'); + }); + + it('should handle fetch errors gracefully', async () => { + mockCache.get.resolves(null); + tracingFetchStub.rejects(new Error('Network error')); + + await expect( + queryLlmoWithCache(mockContext, mockLlmoConfig), + ).to.be.rejectedWith('Network error'); + }); + + it('should handle non-OK HTTP responses', async () => { + mockCache.get.resolves(null); + tracingFetchStub.resolves(createMockResponse({}, false, 500)); + + await expect( + queryLlmoWithCache(mockContext, mockLlmoConfig), + ).to.be.rejectedWith('External API returned 500'); + expect(mockLog.error).to.have.been.calledWith( + sinon.match(/Failed to fetch data from external endpoint/), + ); + }); + + it('should handle timeout errors', async () => { + mockCache.get.resolves(null); + const abortError = new Error('The operation was aborted'); + abortError.name = 'AbortError'; + tracingFetchStub.rejects(abortError); + + await expect( + queryLlmoWithCache(mockContext, mockLlmoConfig), + ).to.be.rejectedWith('Request timeout after 15000ms'); + expect(mockLog.error).to.have.been.calledWith( + sinon.match(/Request timeout after 15000ms/), + ); + }); + + it('should work without cache (valkey not available)', async () => { + const rawData = createSheetData([{ id: 1, name: 'Item 1' }]); + + mockContext.valkey = null; + tracingFetchStub.resolves(createMockResponse(rawData)); + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.deep.equal(rawData); + expect(tracingFetchStub).to.have.been.calledOnce; + }); + + it('should handle cache.set errors gracefully', async () => { + const rawData = createSheetData([]); + mockCache.get.resolves(null); + mockCache.set.rejects(new Error('Cache set failed')); + tracingFetchStub.resolves(createMockResponse(rawData)); + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.deep.equal(rawData); + // The function should not throw - cache.set errors are logged but not propagated + }); + + it('should include Authorization header with API key', async () => { + setupFetchTest(createSheetData([])); + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(getFetchOptions().headers.Authorization).to.equal(`token ${TEST_LLMO_API_KEY}`); + }); + + it('should handle missing API key', async () => { + setupFetchTest(createSheetData([])); + mockContext.env.LLMO_HLX_API_KEY = undefined; + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(getFetchOptions().headers.Authorization).to.equal('token hlx_api_key_missing'); + }); + + it('should handle response without headers', async () => { + const rawData = createSheetData([{ id: 1 }]); + mockCache.get.resolves(null); + + const responseWithoutHeaders = { + ok: true, + status: 200, + statusText: 'OK', + json: sinon.stub().resolves(rawData), + headers: null, + }; + + tracingFetchStub.resolves(responseWithoutHeaders); + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.deep.equal(rawData); + expect(result.headers).to.deep.equal({}); + }); + }); + + describe('queryLlmoWithCache - Query Parameters', () => { + beforeEach(() => { + mockCache.get.resolves(null); + }); + + it('should handle include parameter as array', async () => { + const rawData = createSheetData([ + { + id: 1, name: 'Item 1', status: 'active', extra: 'data', + }, + { + id: 2, name: 'Item 2', status: 'inactive', extra: 'more', + }, + ]); + + setupFetchTest(rawData); + mockContext.data = { include: ['id', 'name'] }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data[0]).to.have.keys(['id', 'name']); + expect(result.data.data[0]).to.not.have.keys(['status', 'extra']); + }); + + it('should handle sort parameter as array', async () => { + const rawData = createSheetData([ + { id: 3, name: 'Charlie' }, + { id: 1, name: 'Alice' }, + { id: 2, name: 'Bob' }, + ]); + + setupFetchTest(rawData); + mockContext.data = { sort: ['name:asc', 'id:desc'] }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data[0].name).to.equal('Alice'); + expect(result.data.data[1].name).to.equal('Bob'); + expect(result.data.data[2].name).to.equal('Charlie'); + }); + + it('should apply filters to data', async () => { + const rawData = createSheetData([ + { id: 1, name: 'Item 1', status: 'active' }, + { id: 2, name: 'Item 2', status: 'inactive' }, + { id: 3, name: 'Item 3', status: 'active' }, + ]); + + setupFetchTest(rawData); + mockContext.data = { 'filter.status': 'active' }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data).to.have.length(2); + expect(result.data.data.every((item) => item.status === 'active')).to.be.true; + }); + + it('should apply inclusions to data', async () => { + const rawData = { + ':type': 'sheet', + data: [ + { + id: 1, name: 'Item 1', status: 'active', extra: 'data', + }, + { + id: 2, name: 'Item 2', status: 'inactive', extra: 'more', + }, + ], + }; + + tracingFetchStub.resolves(createMockResponse(rawData)); + mockContext.data = { + include: 'id,name', + }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data[0]).to.have.keys(['id', 'name']); + expect(result.data.data[0]).to.not.have.keys(['status', 'extra']); + }); + + it('should apply sorting to data', async () => { + const rawData = { + ':type': 'sheet', + data: [ + { id: 3, name: 'Charlie' }, + { id: 1, name: 'Alice' }, + { id: 2, name: 'Bob' }, + ], + }; + + tracingFetchStub.resolves(createMockResponse(rawData)); + mockContext.data = { + sort: 'name:asc', + }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data[0].name).to.equal('Alice'); + expect(result.data.data[1].name).to.equal('Bob'); + expect(result.data.data[2].name).to.equal('Charlie'); + }); + + it('should apply descending sort to data', async () => { + const rawData = { + ':type': 'sheet', + data: [ + { id: 1, name: 'Alice' }, + { id: 2, name: 'Bob' }, + { id: 3, name: 'Charlie' }, + ], + }; + + tracingFetchStub.resolves(createMockResponse(rawData)); + mockContext.data = { + sort: 'name:desc', + }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data[0].name).to.equal('Charlie'); + expect(result.data.data[1].name).to.equal('Bob'); + expect(result.data.data[2].name).to.equal('Alice'); + }); + + it('should apply numeric sorting in ascending order', async () => { + const rawData = { + ':type': 'sheet', + data: [ + { id: 10, score: '100' }, + { id: 2, score: '50' }, + { id: 5, score: '75' }, + ], + }; + + tracingFetchStub.resolves(createMockResponse(rawData)); + mockContext.data = { + sort: 'score:asc', + }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data[0].score).to.equal('50'); + expect(result.data.data[1].score).to.equal('75'); + expect(result.data.data[2].score).to.equal('100'); + }); + + it('should apply numeric sorting in descending order', async () => { + const rawData = { + ':type': 'sheet', + data: [ + { id: 10, score: '100' }, + { id: 2, score: '50' }, + { id: 5, score: '75' }, + ], + }; + + tracingFetchStub.resolves(createMockResponse(rawData)); + mockContext.data = { + sort: 'score:desc', + }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data[0].score).to.equal('100'); + expect(result.data.data[1].score).to.equal('75'); + expect(result.data.data[2].score).to.equal('50'); + }); + + it('should handle null values in sorting by pushing them to the end', async () => { + const rawData = { + ':type': 'sheet', + data: [ + { id: 1, name: 'Charlie', score: null }, + { id: 2, name: 'Alice', score: '75' }, + { id: 3, name: 'Bob' }, // missing score field becomes undefined + { id: 4, name: 'Dave', score: '50' }, + { id: 5, name: 'Eve', score: null }, + { id: 6, name: 'Frank', score: '100' }, + { id: 7, name: 'Grace' }, // missing score field becomes undefined + ], + }; + + tracingFetchStub.resolves(createMockResponse(rawData)); + mockContext.data = { + sort: 'score:asc', + }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + // Non-null values should be sorted first + expect(result.data.data[0].score).to.equal('50'); + expect(result.data.data[1].score).to.equal('75'); + expect(result.data.data[2].score).to.equal('100'); + // Null/undefined values should be at the end (order among nulls doesn't matter) + const lastFour = result.data.data.slice(3); + const nullOrUndefinedCount = lastFour.filter((item) => item.score == null).length; + expect(nullOrUndefinedCount).to.equal(4); + }); + + it('should handle offset parameter', async () => { + const rawData = createSheetData([ + { id: 1, name: 'Item 1' }, + { id: 2, name: 'Item 2' }, + ]); + + setupFetchTest(rawData); + mockContext.data = { offset: '5' }; + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + const fetchUrl = getFetchUrl(); + expect(fetchUrl).to.include('offset=5'); + }); + + it('should handle limit parameter', async () => { + const rawData = createSheetData([ + { id: 1, name: 'Item 1' }, + { id: 2, name: 'Item 2' }, + ]); + + setupFetchTest(rawData); + mockContext.data = { limit: '50' }; + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + const fetchUrl = getFetchUrl(); + expect(fetchUrl).to.include('limit=50'); + }); + + it('should combine multiple query parameters', async () => { + const rawData = { + ':type': 'sheet', + data: [ + { + id: 1, name: 'Alice', status: 'active', extra: 'data1', + }, + { + id: 2, name: 'Bob', status: 'active', extra: 'data2', + }, + { + id: 3, name: 'Charlie', status: 'inactive', extra: 'data3', + }, + { + id: 4, name: 'Dave', status: 'active', extra: 'data4', + }, + ], + }; + + tracingFetchStub.resolves(createMockResponse(rawData)); + mockContext.data = { + 'filter.status': 'active', + include: 'id,name', + sort: 'name:desc', + }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.data).to.have.length(3); + expect(result.data.data[0].name).to.equal('Dave'); + expect(result.data.data[1].name).to.equal('Bob'); + expect(result.data.data[2].name).to.equal('Alice'); + expect(result.data.data[0]).to.have.keys(['id', 'name']); + expect(result.data.data[0]).to.not.have.keys(['status', 'extra']); + }); + }); + + describe('queryLlmoWithCache - Multi-Sheet Data', () => { + it('should filter multi-sheet data by sheet names', async () => { + const rawData = createMultiSheetData({ + sheet1: { data: [{ id: 1 }] }, + sheet2: { data: [{ id: 2 }] }, + sheet3: { data: [{ id: 3 }] }, + }); + + setupFetchTest(rawData); + mockContext.data = { sheets: 'sheet1,sheet3' }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.have.property('sheet1'); + expect(result.data).to.have.property('sheet3'); + expect(result.data).to.not.have.property('sheet2'); + }); + + it('should handle sheets as array', async () => { + const rawData = createMultiSheetData({ + sheet1: { data: [{ id: 1 }] }, + sheet2: { data: [{ id: 2 }] }, + }); + + setupFetchTest(rawData); + mockContext.data = { sheets: ['sheet1'] }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.have.property('sheet1'); + expect(result.data).to.not.have.property('sheet2'); + }); + + it('should apply filters to multi-sheet data', async () => { + const rawData = createMultiSheetData({ + sheet1: { + data: [ + { id: 1, status: 'active' }, + { id: 2, status: 'inactive' }, + ], + }, + sheet2: { + data: [ + { id: 3, status: 'active' }, + { id: 4, status: 'inactive' }, + ], + }, + }); + + setupFetchTest(rawData); + mockContext.data = { 'filter.status': 'active' }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.sheet1.data).to.have.length(1); + expect(result.data.sheet1.data[0].id).to.equal(1); + expect(result.data.sheet2.data).to.have.length(1); + expect(result.data.sheet2.data[0].id).to.equal(3); + }); + + it('should apply sorting to multi-sheet data', async () => { + const rawData = createMultiSheetData({ + sheet1: { + data: [ + { id: 3, name: 'Charlie' }, + { id: 1, name: 'Alice' }, + { id: 2, name: 'Bob' }, + ], + }, + sheet2: { + data: [ + { id: 6, name: 'Frank' }, + { id: 4, name: 'Dave' }, + { id: 5, name: 'Eve' }, + ], + }, + }); + + setupFetchTest(rawData); + mockContext.data = { sort: 'name:asc' }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data.sheet1.data[0].name).to.equal('Alice'); + expect(result.data.sheet1.data[1].name).to.equal('Bob'); + expect(result.data.sheet1.data[2].name).to.equal('Charlie'); + expect(result.data.sheet2.data[0].name).to.equal('Dave'); + expect(result.data.sheet2.data[1].name).to.equal('Eve'); + expect(result.data.sheet2.data[2].name).to.equal('Frank'); + }); + }); + + describe('queryLlmoWithCache - Multi-File Mode', () => { + it('should fetch and process multiple files', async () => { + const file1Data = createSheetData([{ id: 1, name: 'File 1' }]); + const file2Data = createSheetData([{ id: 2, name: 'File 2' }]); + + mockCache.get.resolves(null); + tracingFetchStub + .onFirstCall().resolves(createMockResponse(file1Data)) + .onSecondCall() + .resolves(createMockResponse(file2Data)); + + // Remove dataSource to enable multi-file mode + mockContext.params = { siteId: TEST_SITE_ID }; + mockContext.data = { file: ['file1.json', 'file2.json', 'file1.json', 'file2.json', 'file1.json', 'file2.json', 'file1.json', 'file2.json', 'file1.json', 'file2.json', 'file1.json', 'file2.json'] }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.be.an('array').with.length(12); + expect(result.data[0].status).to.equal('success'); + expect(result.data[0].path).to.equal('file1.json'); + expect(result.data[0].data).to.deep.equal(file1Data); + expect(result.data[1].status).to.equal('success'); + expect(result.data[1].path).to.equal('file2.json'); + expect(result.data[1].data).to.deep.equal(file2Data); + expect(result.headers).to.deep.equal({ 'Content-Encoding': 'br' }); + }); + + it('should handle single file as string in multi-file mode', async () => { + const fileData = createSheetData([{ id: 1, name: 'File 1' }]); + + mockCache.get.resolves(null); + tracingFetchStub.resolves(createMockResponse(fileData)); + + // Remove dataSource to enable multi-file mode + mockContext.params = { siteId: TEST_SITE_ID }; + mockContext.data = { file: 'file1.json' }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.be.an('array').with.length(1); + expect(result.data[0].status).to.equal('success'); + expect(result.data[0].path).to.equal('file1.json'); + }); + + it('should handle file fetch errors in multi-file mode', async () => { + const file1Data = createSheetData([{ id: 1, name: 'File 1' }]); + + mockCache.get.resolves(null); + tracingFetchStub + .onFirstCall().resolves(createMockResponse(file1Data)) + .onSecondCall() + .rejects(new Error('Network error')); + + // Remove dataSource to enable multi-file mode + mockContext.params = { siteId: TEST_SITE_ID }; + mockContext.data = { file: ['file1.json', 'file2.json'] }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data).to.be.an('array').with.length(2); + expect(result.data[0].status).to.equal('success'); + expect(result.data[1].status).to.equal('error'); + expect(result.data[1].error).to.equal('Network error'); + expect(mockLog.error).to.have.been.calledWith( + sinon.match(/Error fetching and processing file file2.json/), + ); + }); + + it('should apply query params to each file in multi-file mode', async () => { + const file1Data = createSheetData([ + { id: 1, name: 'Item 1', status: 'active' }, + { id: 2, name: 'Item 2', status: 'inactive' }, + ]); + const file2Data = createSheetData([ + { id: 3, name: 'Item 3', status: 'active' }, + { id: 4, name: 'Item 4', status: 'inactive' }, + ]); + + mockCache.get.resolves(null); + tracingFetchStub + .onFirstCall().resolves(createMockResponse(file1Data)) + .onSecondCall() + .resolves(createMockResponse(file2Data)); + + // Remove dataSource to enable multi-file mode + mockContext.params = { siteId: TEST_SITE_ID }; + mockContext.data = { + file: ['file1.json', 'file2.json'], + 'filter.status': 'active', + }; + + const result = await queryLlmoWithCache(mockContext, mockLlmoConfig); + + expect(result.data[0].data.data).to.have.length(1); + expect(result.data[0].data.data[0].id).to.equal(1); + expect(result.data[1].data.data).to.have.length(1); + expect(result.data[1].data.data[0].id).to.equal(3); + }); + }); + + describe('queryLlmoWithCache - Cache Key Generation', () => { + it('should generate different cache keys for different query params', async () => { + const rawData = createSheetData([]); + setupFetchTest(rawData); + + // First call with filter + mockContext.data = { 'filter.status': 'active' }; + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + const firstCacheKey = mockCache.get.getCall(0).args[0]; + + // Reset mocks + mockCache.get.resetHistory(); + mockCache.set.resetHistory(); + + // Second call with different filter + mockContext.data = { 'filter.status': 'inactive' }; + tracingFetchStub.resolves(createMockResponse(rawData)); + await queryLlmoWithCache(mockContext, mockLlmoConfig); + + const secondCacheKey = mockCache.get.getCall(0).args[0]; + + expect(firstCacheKey).to.not.equal(secondCacheKey); + }); + + it('should generate the same cache key for the same query params', async () => { + const rawData = createSheetData([]); + tracingFetchStub.resolves(createMockResponse(rawData)); + + mockContext.data = { + 'filter.status': 'active', + limit: '10', + }; + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + const firstCacheKey = mockCache.get.getCall(0).args[0]; + + // Reset mocks + mockCache.get.resetHistory(); + mockCache.set.resetHistory(); + + // Second call with same params (but potentially in different order in object) + mockContext.data = { + limit: '10', + 'filter.status': 'active', + }; + + await queryLlmoWithCache(mockContext, mockLlmoConfig); + const secondCacheKey = mockCache.get.getCall(0).args[0]; + + expect(firstCacheKey).to.equal(secondCacheKey); + }); + }); + + describe('queryLlmoWithCache - Error Handling', () => { + it('should throw error when neither dataSource nor file is provided', async () => { + // Remove dataSource from params + mockContext.params = { + siteId: TEST_SITE_ID, + }; + // Ensure no file query param + mockContext.data = {}; + + await expect( + queryLlmoWithCache(mockContext, mockLlmoConfig), + ).to.be.rejectedWith('Either dataSource path parameter or file query parameter must be provided'); + }); + }); +}); diff --git a/test/controllers/llmo/llmo.test.js b/test/controllers/llmo/llmo.test.js index bcd1506b9..cb2bd5959 100644 --- a/test/controllers/llmo/llmo.test.js +++ b/test/controllers/llmo/llmo.test.js @@ -3077,4 +3077,166 @@ describe('LlmoController', () => { ); }); }); + + describe('queryWithCache', () => { + const createControllerWithCacheStub = async (mockData) => { + const queryLlmoWithCacheStub = sinon.stub().resolves({ + data: mockData, + headers: {}, + }); + + const LlmoControllerWithCache = await esmock('../../../src/controllers/llmo/llmo.js', { + '../../../src/controllers/llmo/llmo-cache-handler.js': { + queryLlmoWithCache: queryLlmoWithCacheStub, + }, + '../../../src/support/access-control-util.js': createMockAccessControlUtil(true), + }); + + return { + controller: LlmoControllerWithCache(mockContext), + stub: queryLlmoWithCacheStub, + }; + }; + + it('should successfully fetch and return cached data', async () => { + const mockSingleSheetData = { + ':type': 'sheet', + data: [ + { id: 1, name: 'Test Item 1' }, + { id: 2, name: 'Test Item 2' }, + ], + }; + const { controller: cacheController, stub } = await createControllerWithCacheStub( + mockSingleSheetData, + ); + const result = await cacheController.queryWithCache(mockContext); + + expect(result.status).to.equal(200); + const responseBody = await result.json(); + expect(responseBody).to.deep.equal(mockSingleSheetData); + expect(stub).to.have.been.calledOnce; + expect(stub).to.have.been.calledWith(mockContext, mockLlmoConfig); + }); + + it('should handle errors and return bad request', async () => { + const queryLlmoWithCacheStub = sinon.stub().rejects(new Error('Cache query failed')); + + const LlmoControllerWithCache = await esmock('../../../src/controllers/llmo/llmo.js', { + '../../../src/controllers/llmo/llmo-cache-handler.js': { + queryLlmoWithCache: queryLlmoWithCacheStub, + }, + '../../../src/support/access-control-util.js': createMockAccessControlUtil(true), + }); + + const errorController = LlmoControllerWithCache(mockContext); + const result = await errorController.queryWithCache(mockContext); + + expect(result.status).to.equal(400); + const responseBody = await result.json(); + expect(responseBody.message).to.equal('Cache query failed'); + expect(mockLog.error).to.have.been.calledWith( + `Error during LLMO cached query for site ${TEST_SITE_ID}: Cache query failed`, + ); + }); + }); + + describe('clearCache', () => { + let mockValkeyCache; + + beforeEach(() => { + mockValkeyCache = { + clearAll: sinon.stub().resolves({ success: true, deletedCount: 5 }), + }; + mockContext.valkey = { + cache: mockValkeyCache, + }; + }); + + it('should successfully clear cache and return deleted count', async () => { + const result = await controller.clearCache(mockContext); + + expect(result.status).to.equal(200); + const responseBody = await result.json(); + expect(responseBody.message).to.equal('Cache cleared successfully'); + expect(responseBody.deletedCount).to.equal(5); + expect(responseBody.clearedAt).to.be.a('string'); + expect(mockValkeyCache.clearAll).to.have.been.calledOnce; + expect(mockLog.info).to.have.been.calledWith('Starting cache clear operation'); + expect(mockLog.info).to.have.been.calledWith('Successfully cleared 5 cache entries'); + }); + + it('should return bad request when cache is not configured', async () => { + delete mockContext.valkey; + + const result = await controller.clearCache(mockContext); + + expect(result.status).to.equal(400); + const responseBody = await result.json(); + expect(responseBody.message).to.equal('Cache is not configured for this environment'); + }); + + it('should return bad request when cache object is missing', async () => { + mockContext.valkey = {}; + + const result = await controller.clearCache(mockContext); + + expect(result.status).to.equal(400); + const responseBody = await result.json(); + expect(responseBody.message).to.equal('Cache is not configured for this environment'); + }); + + it('should return bad request when clearAll fails', async () => { + mockValkeyCache.clearAll.resolves({ success: false, deletedCount: 0 }); + + const result = await controller.clearCache(mockContext); + + expect(result.status).to.equal(400); + const responseBody = await result.json(); + expect(responseBody.message).to.equal('Failed to clear cache'); + expect(mockLog.error).to.have.been.calledWith('Failed to clear cache'); + }); + + it('should handle errors during cache clearing', async () => { + mockValkeyCache.clearAll.rejects(new Error('Cache clear failed')); + + const result = await controller.clearCache(mockContext); + + expect(result.status).to.equal(400); + const responseBody = await result.json(); + expect(responseBody.message).to.equal('Cache clear failed'); + expect(mockLog.error).to.have.been.calledWith('Error clearing cache: Cache clear failed'); + }); + + it('should clear cache even when no entries are present', async () => { + mockValkeyCache.clearAll.resolves({ success: true, deletedCount: 0 }); + + const result = await controller.clearCache(mockContext); + + expect(result.status).to.equal(200); + const responseBody = await result.json(); + expect(responseBody.message).to.equal('Cache cleared successfully'); + expect(responseBody.deletedCount).to.equal(0); + expect(mockLog.info).to.have.been.calledWith('Successfully cleared 0 cache entries'); + }); + + it('should validate LLMO access before clearing cache', async () => { + mockConfig.getLlmoConfig.returns(null); + + const result = await controller.clearCache(mockContext); + + expect(result.status).to.equal(400); + const responseBody = await result.json(); + expect(responseBody.message).to.include('LLM Optimizer is not enabled for this site'); + }); + + it('should return bad request when user does not have LLMO access', async () => { + const deniedController = controllerWithAccessDenied(mockContext); + + const result = await deniedController.clearCache(mockContext); + + expect(result.status).to.equal(400); + const responseBody = await result.json(); + expect(responseBody.message).to.equal('Only users belonging to the organization can view its sites'); + }); + }); }); diff --git a/test/routes/index.test.js b/test/routes/index.test.js index 2b483dfa2..ba6bd9ff1 100755 --- a/test/routes/index.test.js +++ b/test/routes/index.test.js @@ -215,7 +215,11 @@ describe('getRouteHandlers', () => { patchLlmoCdnLogsFilter: () => null, patchLlmoCdnBucketConfig: () => null, onboardCustomer: () => null, + queryWithCache: () => null, offboardCustomer: () => null, + query: () => null, + queryLlmoSheetData: () => null, + clearCache: () => null, }; const mockSandboxAuditController = { @@ -479,6 +483,11 @@ describe('getRouteHandlers', () => { 'POST /sites/:siteId/llmo/sheet-data/:dataSource', 'POST /sites/:siteId/llmo/sheet-data/:sheetType/:dataSource', 'POST /sites/:siteId/llmo/sheet-data/:sheetType/:week/:dataSource', + 'GET /sites/:siteId/llmo/cache', + 'GET /sites/:siteId/llmo/cache/:dataSource', + 'GET /sites/:siteId/llmo/cache/:sheetType/:dataSource', + 'GET /sites/:siteId/llmo/cache/:sheetType/:week/:dataSource', + 'DELETE /sites/:siteId/llmo/cache', 'GET /sites/:siteId/llmo/config', 'PATCH /sites/:siteId/llmo/config', 'POST /sites/:siteId/llmo/config', @@ -625,6 +634,16 @@ describe('getRouteHandlers', () => { expect(dynamicRoutes['GET /sites/:siteId/llmo/sheet-data/:sheetType/:dataSource'].paramNames).to.deep.equal(['siteId', 'sheetType', 'dataSource']); expect(dynamicRoutes['GET /sites/:siteId/llmo/sheet-data/:sheetType/:week/:dataSource'].handler).to.equal(mockLlmoController.getLlmoSheetData); expect(dynamicRoutes['GET /sites/:siteId/llmo/sheet-data/:sheetType/:week/:dataSource'].paramNames).to.deep.equal(['siteId', 'sheetType', 'week', 'dataSource']); + expect(dynamicRoutes['GET /sites/:siteId/llmo/cache'].handler).to.equal(mockLlmoController.queryWithCache); + expect(dynamicRoutes['GET /sites/:siteId/llmo/cache'].paramNames).to.deep.equal(['siteId']); + expect(dynamicRoutes['GET /sites/:siteId/llmo/cache/:dataSource'].handler).to.equal(mockLlmoController.queryWithCache); + expect(dynamicRoutes['GET /sites/:siteId/llmo/cache/:dataSource'].paramNames).to.deep.equal(['siteId', 'dataSource']); + expect(dynamicRoutes['GET /sites/:siteId/llmo/cache/:sheetType/:dataSource'].handler).to.equal(mockLlmoController.queryWithCache); + expect(dynamicRoutes['GET /sites/:siteId/llmo/cache/:sheetType/:dataSource'].paramNames).to.deep.equal(['siteId', 'sheetType', 'dataSource']); + expect(dynamicRoutes['GET /sites/:siteId/llmo/cache/:sheetType/:week/:dataSource'].handler).to.equal(mockLlmoController.queryWithCache); + expect(dynamicRoutes['GET /sites/:siteId/llmo/cache/:sheetType/:week/:dataSource'].paramNames).to.deep.equal(['siteId', 'sheetType', 'week', 'dataSource']); + expect(dynamicRoutes['DELETE /sites/:siteId/llmo/cache'].handler).to.equal(mockLlmoController.clearCache); + expect(dynamicRoutes['DELETE /sites/:siteId/llmo/cache'].paramNames).to.deep.equal(['siteId']); expect(dynamicRoutes['GET /sites/:siteId/llmo/config'].handler).to.equal(mockLlmoController.getLlmoConfig); expect(dynamicRoutes['GET /sites/:siteId/llmo/config'].paramNames).to.deep.equal(['siteId']); expect(dynamicRoutes['GET /sites/:siteId/llmo/questions'].handler).to.equal(mockLlmoController.getLlmoQuestions); diff --git a/test/support/valkey.test.js b/test/support/valkey.test.js new file mode 100644 index 000000000..93db6b64c --- /dev/null +++ b/test/support/valkey.test.js @@ -0,0 +1,493 @@ +/* + * Copyright 2025 Adobe. All rights reserved. + * This file is licensed to you under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. You may obtain a copy + * of the License at http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under + * the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS + * OF ANY KIND, either express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + +/* eslint-env mocha */ + +import { use, expect } from 'chai'; +import chaiAsPromised from 'chai-as-promised'; +import sinon from 'sinon'; +import sinonChai from 'sinon-chai'; +import { brotliCompressSync, brotliDecompressSync } from 'zlib'; +import esmock from 'esmock'; + +use(chaiAsPromised); +use(sinonChai); + +describe('Valkey cache tests', () => { + let sandbox; + let mockRedisClient; + let mockCreateClient; + let ValkeyModule; + + beforeEach(async () => { + sandbox = sinon.createSandbox(); + + // Create a mock Redis client with all necessary methods + // Store event handlers for testing + const eventHandlers = {}; + mockRedisClient = { + connect: sandbox.stub().resolves(), + get: sandbox.stub(), + setEx: sandbox.stub().resolves(), + quit: sandbox.stub().resolves(), + on: sandbox.spy((event, handler) => { + eventHandlers[event] = handler; + return mockRedisClient; + }), + }; + // Attach eventHandlers to mockRedisClient for test access + mockRedisClient.testEventHandlers = eventHandlers; + + // Mock createClient to return our mock client + mockCreateClient = sandbox.stub().returns(mockRedisClient); + + // Import the module with mocked redis client + // Use a fresh import each time to avoid state issues + ValkeyModule = await esmock('../../src/support/valkey.js', { + redis: { + createClient: mockCreateClient, + }, + }); + }); + + afterEach(() => { + sandbox.restore(); + }); + + describe('valkeyClientWrapper', () => { + let mockRequest; + let mockContext; + let exampleHandler; + + beforeEach(() => { + mockRequest = {}; + mockContext = { + log: { + info: sandbox.stub(), + warn: sandbox.stub(), + error: sandbox.stub(), + }, + env: { + VALKEY_HOST: 'test-host.example.com', + VALKEY_PORT: '6379', + }, + }; + + exampleHandler = sinon.spy(async (message, context) => { + const { log } = context; + const messageStr = JSON.stringify(message); + log.info(`Handling message ${messageStr}`); + return new Response(messageStr); + }); + }); + + it('should add valkey cache to the context', async () => { + expect(mockContext.valkey).to.be.undefined; + + await ValkeyModule.valkeyClientWrapper(exampleHandler)(mockRequest, mockContext); + + expect(exampleHandler.calledOnce).to.be.true; + const firstCall = exampleHandler.getCall(0); + + // Check the context object passed to the handler + expect(firstCall.args[1].valkey).to.be.an('object'); + expect(firstCall.args[1].valkey.cache).to.be.an('object'); + expect(firstCall.args[1].valkey.cache.get).to.be.a('function'); + expect(firstCall.args[1].valkey.cache.set).to.be.a('function'); + }); + + it('does not create a new valkey cache if one already exists in the context', async () => { + const existingCache = { + get: sandbox.stub(), + set: sandbox.stub(), + }; + mockContext.valkey = { + cache: existingCache, + }; + + await ValkeyModule.valkeyClientWrapper(exampleHandler)(mockRequest, mockContext); + + expect(exampleHandler.calledOnce).to.be.true; + const secondParam = exampleHandler.getCall(0).args[1]; + expect(secondParam.valkey.cache).to.equal(existingCache); + }); + }); + + describe('ValkeyCache', () => { + let cache; + let mockLog; + let mockEnv; + + beforeEach(() => { + mockLog = { + info: sandbox.stub(), + warn: sandbox.stub(), + error: sandbox.stub(), + }; + + mockEnv = { + VALKEY_HOST: 'test-host.example.com', + VALKEY_PORT: '6379', + }; + + cache = new ValkeyModule.ValkeyCache(mockEnv, mockLog); + }); + + describe('getCacheKey', () => { + it('should generate correct cache key for file path', () => { + const filePath = 'test/folder/file.json'; + const key = ValkeyModule.ValkeyCache.getCacheKey(filePath); + expect(key).to.equal('llmo:file:test/folder/file.json'); + }); + }); + + describe('connect', () => { + it('should connect to Valkey successfully', async () => { + await cache.connect(); + + expect(mockCreateClient).to.have.been.calledOnce; + expect(mockRedisClient.connect).to.have.been.calledOnce; + expect(mockRedisClient.on).to.have.been.calledWith('error'); + expect(mockRedisClient.on).to.have.been.calledWith('connect'); + expect(mockRedisClient.on).to.have.been.calledWith('disconnect'); + expect(cache.isConnected).to.be.true; + }); + + it('should not reconnect if already connected', async () => { + await cache.connect(); + sandbox.resetHistory(); + + await cache.connect(); + + expect(mockCreateClient).to.not.have.been.called; + expect(mockRedisClient.connect).to.not.have.been.called; + }); + + it('should handle connection errors gracefully', async () => { + mockRedisClient.connect.rejects(new Error('Connection failed')); + + await cache.connect(); + + expect(mockLog.error).to.have.been.calledWithMatch(/Failed to connect to Valkey/); + expect(cache.isConnected).to.be.false; + expect(cache.client).to.be.null; + }); + + it('should use default host and port if not provided', async () => { + mockCreateClient.resetHistory(); + const cacheWithDefaults = new ValkeyModule.ValkeyCache({}, mockLog); + + await cacheWithDefaults.connect(); + + expect(mockCreateClient).to.have.been.calledOnce; + const createClientCall = mockCreateClient.getCall(0); + expect(createClientCall.args[0].socket.host).to.equal('elmodata-u65bcl.serverless.use1.cache.amazonaws.com'); + expect(createClientCall.args[0].socket.port).to.equal(6379); + }); + + it('should handle reconnectStrategy when retries exceed limit', async () => { + await cache.connect(); + + // Get the reconnectStrategy function from the createClient call + const createClientCall = mockCreateClient.getCall(0); + const { reconnectStrategy } = createClientCall.args[0].socket; + + // Test with retries > 1 (should stop reconnecting) + const result = reconnectStrategy(2); + + expect(result).to.be.false; + expect(mockLog.error).to.have.been.calledWithMatch(/Max Valkey reconnection attempts reached/); + }); + + it('should handle reconnectStrategy when retries are within limit', async () => { + await cache.connect(); + + // Get the reconnectStrategy function from the createClient call + const createClientCall = mockCreateClient.getCall(0); + const { reconnectStrategy } = createClientCall.args[0].socket; + + // Test with retries <= 1 (should continue reconnecting) + const result1 = reconnectStrategy(0); + const result2 = reconnectStrategy(1); + + expect(result1).to.equal(0); + expect(result2).to.equal(100); + expect(mockLog.error).to.not.have.been.calledWithMatch(/Max Valkey reconnection attempts reached/); + }); + }); + + describe('get', () => { + it('should return cached data when found', async () => { + const filePath = 'test/file.json'; + const testData = { key: 'value', data: [1, 2, 3] }; + const serialized = JSON.stringify(testData); + const compressed = brotliCompressSync(Buffer.from(serialized)); + const base64Data = compressed.toString('base64'); + + mockRedisClient.get.resolves(base64Data); + + await cache.connect(); + const result = await cache.get(filePath); + + expect(result).to.deep.equal(testData); + expect(mockRedisClient.get).to.have.been.calledWith('llmo:file:test/file.json'); + expect(mockLog.info).to.have.been.calledWithMatch(/Cache HIT/); + }); + + it('should return null when cache miss', async () => { + const filePath = 'test/file.json'; + + mockRedisClient.get.resolves(null); + + await cache.connect(); + const result = await cache.get(filePath); + + expect(result).to.be.null; + expect(mockLog.info).to.have.been.calledWithMatch(/Cache MISS/); + }); + + it('should return null when not connected', async () => { + const filePath = 'test/file.json'; + + // Force connection to fail + mockRedisClient.connect.rejects(new Error('Connection failed')); + cache.client = null; + cache.isConnected = false; + + const result = await cache.get(filePath); + + expect(result).to.be.null; + expect(mockLog.warn).to.have.been.calledWithMatch(/Valkey not connected/); + expect(mockRedisClient.get).to.not.have.been.called; + }); + + it('should handle errors gracefully', async () => { + const filePath = 'test/file.json'; + + mockRedisClient.get.rejects(new Error('Redis error')); + + await cache.connect(); + const result = await cache.get(filePath); + + expect(result).to.be.null; + expect(mockLog.error).to.have.been.calledWithMatch(/Error getting from Valkey cache/); + }); + }); + + describe('set', () => { + it('should cache data successfully', async () => { + const filePath = 'test/file.json'; + const testData = { key: 'value', data: [1, 2, 3] }; + const ttl = 3600; + + await cache.connect(); + const result = await cache.set(filePath, testData, ttl); + + expect(result).to.be.true; + expect(mockRedisClient.setEx).to.have.been.calledOnce; + + const [key, ttlValue, value] = mockRedisClient.setEx.getCall(0).args; + expect(key).to.equal('llmo:file:test/file.json'); + expect(ttlValue).to.equal(ttl); + + // Verify the value is base64 encoded compressed data + expect(value).to.be.a('string'); + const buffer = Buffer.from(value, 'base64'); + const decompressed = brotliDecompressSync(buffer); + const parsed = JSON.parse(decompressed.toString('utf8')); + expect(parsed).to.deep.equal(testData); + }); + + it('should use default TTL when not provided', async () => { + const filePath = 'test/file.json'; + const testData = { key: 'value' }; + + await cache.connect(); + await cache.set(filePath, testData); + + const [, ttlValue] = mockRedisClient.setEx.getCall(0).args; + expect(ttlValue).to.equal(2 * 60 * 60); // CACHE_TTL_SECONDS + }); + + it('should return false when not connected', async () => { + const filePath = 'test/file.json'; + const testData = { key: 'value' }; + + // Ensure cache is not connected and make connect() fail + cache.client = null; + cache.isConnected = false; + mockRedisClient.connect.rejects(new Error('Connection failed')); + + const result = await cache.set(filePath, testData); + + expect(result).to.be.false; + expect(mockLog.warn).to.have.been.calledWithMatch(/Valkey not connected/); + expect(mockRedisClient.setEx).to.not.have.been.called; + }); + + it('should handle errors gracefully', async () => { + const filePath = 'test/file.json'; + const testData = { key: 'value' }; + + mockRedisClient.setEx.rejects(new Error('Redis error')); + + await cache.connect(); + const result = await cache.set(filePath, testData); + + expect(result).to.be.false; + expect(mockLog.error).to.have.been.calledWithMatch(/Error setting Valkey cache/); + }); + }); + + describe('disconnect', () => { + it('should disconnect from Valkey successfully', async () => { + await cache.connect(); + await cache.disconnect(); + + expect(mockRedisClient.quit).to.have.been.calledOnce; + expect(mockLog.info).to.have.been.calledWithMatch(/Disconnected from Valkey/); + expect(cache.isConnected).to.be.false; + expect(cache.client).to.be.null; + }); + + it('should handle disconnect errors gracefully', async () => { + mockRedisClient.quit.rejects(new Error('Disconnect failed')); + + await cache.connect(); + await cache.disconnect(); + + expect(mockLog.error).to.have.been.calledWithMatch(/Error disconnecting from Valkey/); + expect(cache.isConnected).to.be.false; + }); + + it('should not attempt disconnect if not connected', async () => { + await cache.disconnect(); + + expect(mockRedisClient.quit).to.not.have.been.called; + }); + }); + + describe('event handlers', () => { + it('should register error event handler', async () => { + await cache.connect(); + + expect(mockRedisClient.on).to.have.been.called; + const errorCall = mockRedisClient.on.getCalls().find( + (call) => call.args[0] === 'error' && typeof call.args[1] === 'function', + ); + expect(errorCall).to.exist; + }); + + it('should register connect event handler', async () => { + await cache.connect(); + + expect(mockRedisClient.on).to.have.been.called; + const connectCall = mockRedisClient.on.getCalls().find( + (call) => call.args[0] === 'connect' && typeof call.args[1] === 'function', + ); + expect(connectCall).to.exist; + }); + + it('should register disconnect event handler', async () => { + await cache.connect(); + + expect(mockRedisClient.on).to.have.been.called; + const disconnectCall = mockRedisClient.on.getCalls().find( + (call) => call.args[0] === 'disconnect' && typeof call.args[1] === 'function', + ); + expect(disconnectCall).to.exist; + }); + + it('should handle error events when triggered', async () => { + await cache.connect(); + sandbox.resetHistory(); + + // Get the error handler from stored event handlers + const errorCallback = mockRedisClient.testEventHandlers.error; + expect(errorCallback).to.exist; + cache.isConnected = true; + errorCallback(new Error('Test error')); + + expect(mockLog.error).to.have.been.calledWithMatch(/Valkey client error/); + expect(cache.isConnected).to.be.false; + }); + + it('should handle connect events when triggered', async () => { + await cache.connect(); + sandbox.resetHistory(); + + // Get the connect handler from stored event handlers + const connectCallback = mockRedisClient.testEventHandlers.connect; + expect(connectCallback).to.exist; + connectCallback(); + + expect(mockLog.info).to.have.been.calledWithMatch(/Valkey client connected/); + }); + + it('should handle disconnect events when triggered', async () => { + await cache.connect(); + sandbox.resetHistory(); + + // Get the disconnect handler from stored event handlers + const disconnectCallback = mockRedisClient.testEventHandlers.disconnect; + expect(disconnectCallback).to.exist; + cache.isConnected = true; + disconnectCallback(); + + expect(mockLog.warn).to.have.been.calledWithMatch(/Valkey client disconnected/); + expect(cache.isConnected).to.be.false; + }); + }); + + describe('clearAll', () => { + beforeEach(() => { + // Add scan and del methods to mockRedisClient + mockRedisClient.scan = sandbox.stub(); + mockRedisClient.del = sandbox.stub().resolves(1); + }); + + it('should clear all cache entries successfully', async () => { + const keys = ['llmo:file:test1.json', 'llmo:file:test2.json', 'llmo:file:test3.json']; + + // Mock scan to return keys on first call, then return cursor 0 to stop + mockRedisClient.scan + .onFirstCall().resolves({ cursor: 0, keys }) + .onSecondCall().resolves({ cursor: 0, keys: [] }); + + await cache.connect(); + const result = await cache.clearAll(); + + expect(result.success).to.be.true; + expect(result.deletedCount).to.equal(0); + expect(mockRedisClient.scan).to.have.been.calledWith(0, { + MATCH: 'llmo:file:*', + COUNT: 100, + }); + expect(mockRedisClient.del).to.have.been.calledThrice; + expect(mockLog.info).to.have.been.calledWithMatch(/Clearing all Valkey cache entries/); + }); + + it('should handle deletion errors gracefully', async () => { + const keys = ['llmo:file:test1.json', 'llmo:file:test2.json']; + mockRedisClient.scan.resolves({ cursor: 0, keys }); + mockRedisClient.del.rejects(new Error('Delete failed')); + + await cache.connect(); + const result = await cache.clearAll(); + + expect(result.success).to.be.false; + expect(result.deletedCount).to.equal(0); + expect(mockLog.error).to.have.been.calledWithMatch(/Error clearing Valkey cache/); + }); + }); + }); +});