From 8734866eb108871c36343098a0297475e952f58f Mon Sep 17 00:00:00 2001 From: Adrien de Peretti Date: Thu, 2 Oct 2025 17:54:11 +0200 Subject: [PATCH] fix(): Transform map (#13655) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit **What** It seems that for some reason the weak map fail in some scenario, but after investigation, the usage of map would not have a bad impact as it will be released after the Distributed transaction if finished. Therefore, falling back to Map instead FIXES https://github.com/medusajs/medusa/issues/13654 NOTE: Waiting for the user feedback as he is also using node 18. We also use the exact same pattern in all our core flows without issues 🤔 --- .changeset/witty-nails-drive.md | 6 ++++ .../transaction/distributed-transaction.ts | 8 +++--- .../__tests__/mikro-orm-repository.spec.ts | 28 ++++++++++++------- .../src/utils/composer/transform.ts | 5 ++-- 4 files changed, 30 insertions(+), 17 deletions(-) create mode 100644 .changeset/witty-nails-drive.md diff --git a/.changeset/witty-nails-drive.md b/.changeset/witty-nails-drive.md new file mode 100644 index 0000000000..7e7212a1ec --- /dev/null +++ b/.changeset/witty-nails-drive.md @@ -0,0 +1,6 @@ +--- +"@medusajs/orchestration": patch +"@medusajs/workflows-sdk": patch +--- + +fix(): Transform map diff --git a/packages/core/orchestration/src/transaction/distributed-transaction.ts b/packages/core/orchestration/src/transaction/distributed-transaction.ts index 1f05140b5e..9778d3c638 100644 --- a/packages/core/orchestration/src/transaction/distributed-transaction.ts +++ b/packages/core/orchestration/src/transaction/distributed-transaction.ts @@ -91,7 +91,7 @@ class DistributedTransaction extends EventEmitter { * * @private */ - #temporaryStorage = new WeakMap<{ key: string }, unknown>() + #temporaryStorage = new Map() public static setStorage(storage: IDistributedTransactionStorage) { this.keyValueStore = storage @@ -311,15 +311,15 @@ class DistributedTransaction extends EventEmitter { await DistributedTransaction.keyValueStore.clearStepTimeout(this, step) } - public setTemporaryData(key: { key: string }, value: unknown) { + public setTemporaryData(key: string, value: unknown) { this.#temporaryStorage.set(key, value) } - public getTemporaryData(key: { key: string }) { + public getTemporaryData(key: string) { return this.#temporaryStorage.get(key) } - public hasTemporaryData(key: { key: string }) { + public hasTemporaryData(key: string) { return this.#temporaryStorage.has(key) } diff --git a/packages/core/utils/src/dal/mikro-orm/integration-tests/__tests__/mikro-orm-repository.spec.ts b/packages/core/utils/src/dal/mikro-orm/integration-tests/__tests__/mikro-orm-repository.spec.ts index c8b074d105..4400c1b077 100644 --- a/packages/core/utils/src/dal/mikro-orm/integration-tests/__tests__/mikro-orm-repository.spec.ts +++ b/packages/core/utils/src/dal/mikro-orm/integration-tests/__tests__/mikro-orm-repository.spec.ts @@ -1521,8 +1521,10 @@ describe("mikroOrmRepository", () => { expect(e5SelectCalls).toBe(2) // One for Entity5, one for Entity6 expect(e5InsertCalls).toBe(2) // One batch insert for Entity5s, one for Entity6s - expect(qbInsertSpy.mock.calls[0][0]).toHaveLength(800) // entity5 25 * 8 * 4 - expect(qbInsertSpy.mock.calls[1][0]).toHaveLength(2400) // entity6 25 * 8 * 4 * 3 + // Check that the expected batch sizes exist (order may vary) + const e5BatchSizes = qbInsertSpy.mock.calls.map(call => call[0].length) + expect(e5BatchSizes).toContain(800) // entity5 25 * 8 * 4 + expect(e5BatchSizes).toContain(2400) // entity6 25 * 8 * 4 * 3 findSpy.mockClear() qbSelectSpy.mockClear() @@ -1551,9 +1553,11 @@ describe("mikroOrmRepository", () => { expect(e3SelectCalls).toBe(3) // One for Entity3, one for Entity5, One pivot entity3 -> entity5 expect(e3InsertCalls).toBe(3) // One batch insert for Entity3s, one for Entity4s and one pivot entity3 -> entity5 - expect(qbInsertSpy.mock.calls[0][0]).toHaveLength(200) // entity3: 25 * 8 - expect(qbInsertSpy.mock.calls[1][0]).toHaveLength(800) // pivot entity3 -> entity5: 25 * 8 * 4 - expect(qbInsertSpy.mock.calls[2][0]).toHaveLength(1000) // entity4: 25 * 8 * 5 + // Check that the expected batch sizes exist (order may vary) + const e3BatchSizes = qbInsertSpy.mock.calls.map(call => call[0].length) + expect(e3BatchSizes).toContain(200) // entity3: 25 * 8 + expect(e3BatchSizes).toContain(800) // pivot entity3 -> entity5: 25 * 8 * 4 + expect(e3BatchSizes).toContain(1000) // entity4: 25 * 8 * 5 findSpy.mockClear() qbSelectSpy.mockClear() @@ -1580,9 +1584,11 @@ describe("mikroOrmRepository", () => { expect(mainSelectCalls).toBe(3) // One for Entity1, one for Entity3, one for Entity2 expect(mainInsertCalls).toBe(3) // One batch insert for Entity1s, one for Entity2s, one for Entity3s - expect(qbInsertSpy.mock.calls[0][0]).toHaveLength(25) // entity1: 25 - expect(qbInsertSpy.mock.calls[1][0]).toHaveLength(200) // entity3: 25 * 8 - expect(qbInsertSpy.mock.calls[2][0]).toHaveLength(250) // entity2: 25 * 10 + // Check that the expected batch sizes exist (order may vary) + const mainBatchSizes = qbInsertSpy.mock.calls.map(call => call[0].length) + expect(mainBatchSizes).toContain(25) // entity1: 25 + expect(mainBatchSizes).toContain(200) // entity3: 25 * 8 + expect(mainBatchSizes).toContain(250) // entity2: 25 * 10 findSpy.mockClear() qbSelectSpy.mockClear() @@ -1658,8 +1664,10 @@ describe("mikroOrmRepository", () => { // Should use batch inserts for new entities and pivot relationships expect(updateInsertCalls).toBe(2) // pivot Entity1 - Entity3 (with conflict resolution) + new Entity2s - expect(qbInsertSpy.mock.calls[0][0]).toHaveLength(100) // pivot Entity1 - Entity3: 25 parents × 4 entity3s each (uses onConflict().ignore()) - expect(qbInsertSpy.mock.calls[1][0]).toHaveLength(50) // New Entity2s: 25 parents × 2 new each + // Check that the expected batch sizes exist (order may vary) + const updateBatchSizes = qbInsertSpy.mock.calls.map(call => call[0].length) + expect(updateBatchSizes).toContain(100) // pivot Entity1 - Entity3: 25 parents × 4 entity3s each (uses onConflict().ignore()) + expect(updateBatchSizes).toContain(50) // New Entity2s: 25 parents × 2 new each // We wont check the deletion which happen through knex directly. It will be accounted for in // the final state verification diff --git a/packages/core/workflows-sdk/src/utils/composer/transform.ts b/packages/core/workflows-sdk/src/utils/composer/transform.ts index e79c40c0a2..048471a86e 100644 --- a/packages/core/workflows-sdk/src/utils/composer/transform.ts +++ b/packages/core/workflows-sdk/src/utils/composer/transform.ts @@ -170,7 +170,7 @@ export function transform( } as WorkflowData & { __id: string __type: string - __temporary_storage_key: { key: string } | null + __temporary_storage_key: string | null } const returnFn = async function ( @@ -179,8 +179,7 @@ export function transform( ): Promise { if ("transaction" in transactionContext) { const temporaryDataKey = `${transactionContext.transaction.modelId}_${transactionContext.transaction.transactionId}_${uniqId}` - - ret.__temporary_storage_key ??= { key: temporaryDataKey } + ret.__temporary_storage_key ??= temporaryDataKey if ( transactionContext.transaction.hasTemporaryData(