diff --git a/.changeset/proud-windows-sleep.md b/.changeset/proud-windows-sleep.md new file mode 100644 index 0000000000..43ad8d5cd5 --- /dev/null +++ b/.changeset/proud-windows-sleep.md @@ -0,0 +1,5 @@ +--- +"@medusajs/caching": patch +--- + +chore(): Caching improvements diff --git a/packages/modules/caching/integration-tests/__tests__/index.spec.ts b/packages/modules/caching/integration-tests/__tests__/index.spec.ts index 898c055255..4fd585f885 100644 --- a/packages/modules/caching/integration-tests/__tests__/index.spec.ts +++ b/packages/modules/caching/integration-tests/__tests__/index.spec.ts @@ -48,6 +48,11 @@ jest.spyOn(MedusaModule, "getAllJoinerConfigs").mockReturnValue([ moduleIntegrationTestRunner({ moduleName: Modules.CACHING, + moduleOptions: { + in_memory: { + enable: true, + }, + }, testSuite: ({ service }) => { describe("Caching Module Service", () => { beforeEach(async () => { diff --git a/packages/modules/caching/integration-tests/__tests__/invalidation.spec.ts b/packages/modules/caching/integration-tests/__tests__/invalidation.spec.ts index 1354dc98d1..7ddc03af47 100644 --- a/packages/modules/caching/integration-tests/__tests__/invalidation.spec.ts +++ b/packages/modules/caching/integration-tests/__tests__/invalidation.spec.ts @@ -61,6 +61,11 @@ moduleIntegrationTestRunner({ injectedDependencies: { [Modules.EVENT_BUS]: mockEventBus, }, + moduleOptions: { + in_memory: { + enable: true, + }, + }, testSuite: ({ service }) => { describe("Cache Invalidation with Entity Relationships", () => { afterEach(async () => { diff --git a/packages/modules/caching/integration-tests/__tests__/max-size.spec.ts b/packages/modules/caching/integration-tests/__tests__/max-size.spec.ts new file mode 100644 index 0000000000..d6a5dec6ab --- /dev/null +++ b/packages/modules/caching/integration-tests/__tests__/max-size.spec.ts @@ -0,0 +1,159 @@ +import { Modules } from "@medusajs/framework/utils" +import { moduleIntegrationTestRunner } from "@medusajs/test-utils" +import { ICachingModuleService } from "@medusajs/framework/types" +import { MedusaModule } from "@medusajs/framework/modules-sdk" + +jest.setTimeout(10000) + +jest.spyOn(MedusaModule, "getAllJoinerConfigs").mockReturnValue([ + { + schema: ` + type Product { + id: ID + title: String + handle: String + status: String + type_id: String + collection_id: String + is_giftcard: Boolean + external_id: String + created_at: DateTime + updated_at: DateTime + + variants: [ProductVariant] + sales_channels: [SalesChannel] + } + + type ProductVariant { + id: ID + product_id: String + sku: String + + prices: [Price] + } + + type Price { + id: ID + amount: Float + currency_code: String + } + + type SalesChannel { + id: ID + is_disabled: Boolean + } +`, + }, +]) + +moduleIntegrationTestRunner({ + moduleName: Modules.CACHING, + moduleOptions: { + in_memory: { + enable: true, + maxSize: 1000, + }, + }, + testSuite: ({ service }) => { + describe("Memory Cache maxSize Integration", () => { + beforeEach(async () => { + await service.clear({ tags: ["*"] }).catch(() => {}) + }) + + it("should respect maxSize limit and stop caching when exceeded", async () => { + // Create test data that will use most of the 1000 byte limit + const largeData = { + id: "large-test", + name: "Large Test Item", + description: "A".repeat(400), // 400 characters + metadata: { info: "B".repeat(300) }, // Another 300 characters + } + + await service.set({ + key: "first-key", + data: largeData, + tags: ["large-data"], + }) + + let result = await service.get({ key: "first-key" }) + expect(result).toEqual(largeData) + + const anotherLargeData = { + id: "another-large", + content: "C".repeat(500), // This should push us over the 1000 byte limit but accepted because we are still bellow + } + + await service.set({ + key: "second-key", + data: anotherLargeData, + tags: ["overflow-data"], + }) + + result = await service.get({ key: "second-key" }) + expect(result).toEqual(anotherLargeData) + + const anotherLargeData2 = { + id: "another-large-2", + content: "D".repeat(500), // This should push us over the 1000 byte limit but rejected because we are over the limit + } + + await service.set({ + key: "third-key", + data: anotherLargeData2, + tags: ["overflow-data-2"], + }) + + result = await service.get({ key: "third-key" }) + expect(result).toBeNull() + + const firstResult = await service.get({ key: "first-key" }) + expect(firstResult).toEqual(largeData) + }) + + it("should allow small entries after reaching the limit", async () => { + const largeData = { + id: "large-test", + description: "A".repeat(800), // Use most of the 1000 byte limit + } + + await service.set({ + key: "large-key", + data: largeData, + tags: ["large"], + }) + + let result = await service.get({ key: "large-key" }) + expect(result).toEqual(largeData) + + await service.set({ + key: "overflow-key", + data: { content: "B".repeat(500) }, + }) + + result = await service.get({ key: "overflow-key" }) + expect(result).toEqual({ content: "B".repeat(500) }) + + await service.set({ + key: "overflow-key-2", + data: { content: "C".repeat(500) }, + }) + + result = await service.get({ key: "overflow-key-2" }) + expect(result).toBeNull() + + // Remove the large entry to free up space + await service.clear({ key: "large-key" }) + + // Now small entries should work again + const smallData = { id: "small", name: "Small Item" } + await service.set({ + key: "small-key", + data: smallData, + }) + + result = await service.get({ key: "small-key" }) + expect(result).toEqual(smallData) + }) + }) + }, +}) diff --git a/packages/modules/caching/src/loaders/providers.ts b/packages/modules/caching/src/loaders/providers.ts index e91b9a175d..c2d9382a01 100644 --- a/packages/modules/caching/src/loaders/providers.ts +++ b/packages/modules/caching/src/loaders/providers.ts @@ -49,21 +49,29 @@ export default async ({ const strategy = DefaultCacheStrategy // Re enable custom strategy another time container.register("strategy", asValue(strategy)) - // MemoryCachingProvider - default provider - container.register({ - [CachingProviderRegistrationPrefix + MemoryCachingProvider.identifier]: - asFunction(() => new MemoryCachingProvider(), { - lifetime: Lifetime.SINGLETON, - }), - }) - container.registerAdd( - CachingIdentifiersRegistrationName, - asValue(MemoryCachingProvider.identifier) - ) - container.register( - CachingDefaultProvider, - asValue(MemoryCachingProvider.identifier) - ) + const inMemoryOptions = options?.in_memory ?? {} + const { enable: isInMemoryEnabled, ...restInmemoryOptions } = inMemoryOptions + + if (isInMemoryEnabled) { + // MemoryCachingProvider - default provider + container.register({ + [CachingProviderRegistrationPrefix + MemoryCachingProvider.identifier]: + asFunction( + (cradle) => new MemoryCachingProvider(cradle, restInmemoryOptions), + { + lifetime: Lifetime.SINGLETON, + } + ), + }) + container.registerAdd( + CachingIdentifiersRegistrationName, + asValue(MemoryCachingProvider.identifier) + ) + container.register( + CachingDefaultProvider, + asValue(MemoryCachingProvider.identifier) + ) + } // Load other providers await moduleProviderLoader({ @@ -85,10 +93,16 @@ export default async ({ const logger = container.resolve(ContainerRegistrationKeys.LOGGER) if (!hasDefaultProvider) { - logger.warn( - `[caching-module]: No default caching provider defined. Using "${container.resolve( - CachingDefaultProvider - )}" as default.` - ) + if (isInMemoryEnabled) { + logger.warn( + `[caching-module]: No default caching provider defined. Using "${container.resolve( + CachingDefaultProvider + )}" as default.` + ) + } else { + throw new Error( + "[caching-module]: No providers have been configured and the built in memory cache has not been enabled." + ) + } } } diff --git a/packages/modules/caching/src/providers/memory-cache.ts b/packages/modules/caching/src/providers/memory-cache.ts index ef93483e04..de956f6a9b 100644 --- a/packages/modules/caching/src/providers/memory-cache.ts +++ b/packages/modules/caching/src/providers/memory-cache.ts @@ -1,40 +1,45 @@ import NodeCache from "node-cache" -import type { ICachingProviderService } from "@medusajs/framework/types" +import type { ICachingProviderService, Logger } from "@medusajs/framework/types" +import { MemoryCacheModuleOptions } from "@types" -export interface MemoryCacheModuleOptions { - /** - * TTL in seconds - */ - ttl?: number - /** - * Maximum number of keys to store (see node-cache documentation) - */ - maxKeys?: number - /** - * Check period for expired keys in seconds (see node-cache documentation) - */ - checkPeriod?: number - /** - * Use clones for cached data (see node-cache documentation) - */ - useClones?: boolean +const THREE_HUNDRED_MB = 300 * 1024 * 1024 + +const formatBytes = (bytes: number): string => { + if (bytes < 1024) return `${bytes} B` + if (bytes < 1024 * 1024) return `${Math.round(bytes / 1024)} KB` + return `${Math.round(bytes / 1024 / 1024)} MB` } export class MemoryCachingProvider implements ICachingProviderService { static identifier = "cache-memory" + protected logger: Logger protected cacheClient: NodeCache protected tagIndex: Map> = new Map() // tag -> keys protected keyTags: Map> = new Map() // key -> tags protected entryOptions: Map = new Map() // key -> options + protected keySizes: Map = new Map() // key -> approximate size in bytes + protected approximateMemoryUsage: number = 0 protected options: MemoryCacheModuleOptions + protected maxSize: number + protected hasher: (key: string) => string + + constructor( + { logger, hasher }: { logger?: Logger; hasher: (key: string) => string }, + options: MemoryCacheModuleOptions = {} + ) { + this.logger = logger ?? (console as unknown as Logger) + const { maxSize, ...rest } = options + this.maxSize = maxSize ?? THREE_HUNDRED_MB + + this.hasher = hasher - constructor() { this.options = { ttl: 3600, maxKeys: 25000, checkPeriod: 60, // 10 minutes - useClones: false, // Default to false for speed, true would be slower but safer. we can discuss + useClones: false, // Default to false for speed, true would be slower but safer. we can ...discuss + ...rest, } const cacheClient = new NodeCache({ @@ -47,15 +52,31 @@ export class MemoryCachingProvider implements ICachingProviderService { this.cacheClient = cacheClient // Clean up tag indices when keys expire - this.cacheClient.on("expired", (key: string, value: any) => { + this.cacheClient.on("expired", (key: string) => { this.cleanupTagReferences(key) }) - this.cacheClient.on("del", (key: string, value: any) => { + this.cacheClient.on("del", (key: string) => { this.cleanupTagReferences(key) }) } + private calculateEntrySize( + key: string, + data: object, + tags?: string[] + ): number { + const dataSize = Buffer.byteLength(JSON.stringify(data), "utf8") + const keySize = Buffer.byteLength(key, "utf8") + + let tagsSize = 0 + if (tags?.length) { + tagsSize = Buffer.byteLength(JSON.stringify(tags), "utf8") + } + + return dataSize + keySize + tagsSize + } + private cleanupTagReferences(key: string): void { const tags = this.keyTags.get(key) if (tags) { @@ -70,20 +91,30 @@ export class MemoryCachingProvider implements ICachingProviderService { }) this.keyTags.delete(key) } + + // Clean up memory tracking + const keySize = this.keySizes.get(key) + if (keySize) { + this.approximateMemoryUsage -= keySize + this.keySizes.delete(key) + } + // Also clean up entry options this.entryOptions.delete(key) } async get({ key, tags }: { key?: string; tags?: string[] }): Promise { if (key) { - return this.cacheClient.get(key) ?? null + const hashedKey = this.hasher(key) + return this.cacheClient.get(hashedKey) ?? null } - if (tags && tags.length) { + if (tags?.length) { const allKeys = new Set() tags.forEach((tag) => { - const keysForTag = this.tagIndex.get(tag) + const hashedTag = this.hasher(tag) + const keysForTag = this.tagIndex.get(hashedTag) if (keysForTag) { keysForTag.forEach((key) => allKeys.add(key)) } @@ -122,24 +153,39 @@ export class MemoryCachingProvider implements ICachingProviderService { autoInvalidate?: boolean } }): Promise { + // Only reject if we're already over the limit + if (this.approximateMemoryUsage > this.maxSize) { + this.logger.warn( + `Cache is full. Current usage: ${formatBytes( + this.approximateMemoryUsage + )}, Max: ${formatBytes(this.maxSize)}. Skipping cache entry.` + ) + return + } + + const hashedKey = this.hasher(key) + const hashedTags = tags?.map((tag) => this.hasher(tag)) + + const totalSize = this.calculateEntrySize(hashedKey, data, hashedTags) + // Set the cache entry const effectiveTTL = ttl ?? this.options.ttl ?? 3600 - this.cacheClient.set(key, data, effectiveTTL) + this.cacheClient.set(hashedKey, data, effectiveTTL) // Handle tags if provided - if (tags && tags.length) { + if (hashedTags?.length) { // Clean up any existing tag references for this key - this.cleanupTagReferences(key) + this.cleanupTagReferences(hashedKey) - const tagSet = new Set(tags) - this.keyTags.set(key, tagSet) + const tagSet = new Set(hashedTags) + this.keyTags.set(hashedKey, tagSet) // Add this key to each tag's index - tags.forEach((tag) => { + hashedTags.forEach((tag) => { if (!this.tagIndex.has(tag)) { this.tagIndex.set(tag, new Set()) } - this.tagIndex.get(tag)!.add(key) + this.tagIndex.get(tag)!.add(hashedKey) }) } @@ -148,8 +194,14 @@ export class MemoryCachingProvider implements ICachingProviderService { Object.keys(options ?? {}).length && !Object.values(options ?? {}).every((value) => value === undefined) ) { - this.entryOptions.set(key, options!) + this.entryOptions.set(hashedKey, options!) } + + // Track memory usage + const existingSize = this.keySizes.get(hashedKey) || 0 + this.approximateMemoryUsage = + this.approximateMemoryUsage - existingSize + totalSize + this.keySizes.set(hashedKey, totalSize) } async clear({ @@ -164,13 +216,14 @@ export class MemoryCachingProvider implements ICachingProviderService { } }): Promise { if (key) { - this.cacheClient.del(key) + const hashedKey = this.hasher(key) + this.cacheClient.del(hashedKey) return } - if (tags && tags.length) { + if (tags?.length) { // Handle wildcard tag to clear all cache data - if (tags.includes("*")) { + if (tags?.includes("*")) { this.cacheClient.flushAll() this.tagIndex.clear() this.keyTags.clear() @@ -178,9 +231,10 @@ export class MemoryCachingProvider implements ICachingProviderService { return } + const hashedTags = tags.map((tag) => this.hasher(tag)) const allKeys = new Set() - tags.forEach((tag) => { + hashedTags.forEach((tag) => { const keysForTag = this.tagIndex.get(tag) if (keysForTag) { keysForTag.forEach((key) => allKeys.add(key)) diff --git a/packages/modules/caching/src/types/index.ts b/packages/modules/caching/src/types/index.ts index 5773f18f50..99ba914ce0 100644 --- a/packages/modules/caching/src/types/index.ts +++ b/packages/modules/caching/src/types/index.ts @@ -23,6 +23,31 @@ export type InjectedDependencies = { [Modules.EVENT_BUS]: IEventBusModuleService } +export interface MemoryCacheModuleOptions { + /** + * TTL in seconds + */ + ttl?: number + /** + * Maximum number of keys to store (see node-cache documentation) + */ + maxKeys?: number + /** + * Check period for expired keys in seconds (see node-cache documentation) + */ + checkPeriod?: number + /** + * Use clones for cached data (see node-cache documentation) + */ + useClones?: boolean + /** + * Maximum size of the cache in bytes (default 300MB). + * It is an approximation, if a new entry will make the limit exceeded, the entry will be cached + * but not the following ones + */ + maxSize?: number +} + export type CachingModuleOptions = Partial & { /** * The strategy to be used. Default to the inbuilt default strategy. @@ -32,6 +57,15 @@ export type CachingModuleOptions = Partial & { * Time to keep data in cache (in seconds) */ ttl?: number + + /** + * Enable and configure the built in memory cache + * @private + */ + in_memory?: MemoryCacheModuleOptions & { + enable?: boolean + } + /** * Providers to be registered */