chore(): improve cart operations + Mikro orm 6.4.16 (#13712)

* chore(): Mikro orm 6.4.16

* Create small-ghosts-draw.md

* update config

* update config

* fix delete

* update config

* update workflows

* order improvements

* test pricing quuery

* test pricing quuery

* configurable connection options

* configurable connection options

* configurable connection options

* Update packages/modules/pricing/src/models/price.ts

Co-authored-by: Oli Juhl <59018053+olivermrbl@users.noreply.github.com>

---------

Co-authored-by: Oli Juhl <59018053+olivermrbl@users.noreply.github.com>
This commit is contained in:
Adrien de Peretti
2025-10-10 08:58:19 +02:00
committed by GitHub
parent 76bf364440
commit c54c5ed6de
23 changed files with 481 additions and 288 deletions

View File

@@ -0,0 +1,9 @@
---
"@medusajs/deps": patch
"@medusajs/utils": patch
"@medusajs/cart": patch
"@medusajs/pricing": patch
"@medusajs/core-flows": patch
---
chore(): improve cart operations + Mikro orm 6.4.16

View File

@@ -4515,7 +4515,7 @@ medusaIntegrationTestRunner({
expect(
// @ts-ignore
transaction.context.invoke["use-remote-query"].output.output
transaction.context.invoke["fetch-cart"].output.output.data
.shipping_address.metadata
).toEqual({
testing_tax: true,

View File

@@ -18,7 +18,7 @@ export interface GetVariantsStepInput {
export const getVariantsStepId = "get-variants"
/**
* This step retrieves variants matching the specified filters.
*
*
* @example
* const data = getVariantsStep({
* filter: {

View File

@@ -2,12 +2,14 @@ import {
AddItemAdjustmentAction,
AddShippingMethodAdjustment,
ComputeActions,
IPromotionModuleService,
PromotionDTO,
RemoveItemAdjustmentAction,
RemoveShippingMethodAdjustment,
} from "@medusajs/framework/types"
import { ComputedActions, Modules } from "@medusajs/framework/utils"
import {
ComputedActions,
ContainerRegistrationKeys,
} from "@medusajs/framework/utils"
import { StepResponse, createStep } from "@medusajs/framework/workflows-sdk"
/**
@@ -102,9 +104,7 @@ export const prepareAdjustmentsFromPromotionActionsStep = createStep(
data: PrepareAdjustmentsFromPromotionActionsStepInput,
{ container }
) => {
const promotionModuleService: IPromotionModuleService = container.resolve(
Modules.PROMOTION
)
const query = container.resolve(ContainerRegistrationKeys.QUERY)
const { actions = [] } = data
@@ -118,46 +118,59 @@ export const prepareAdjustmentsFromPromotionActionsStep = createStep(
} as PrepareAdjustmentsFromPromotionActionsStepOutput)
}
const promotions = await promotionModuleService.listPromotions(
{ code: actions.map((a) => a.code) },
{ select: ["id", "code"] }
const { data: promotions } = await query.graph(
{
entity: "promotion",
fields: ["id", "code"],
filters: { code: actions.map((a) => a.code) },
},
{ cache: { enable: true } }
)
const promotionsMap = new Map<string, PromotionDTO>(
promotions.map((promotion) => [promotion.code!, promotion])
)
const lineItemAdjustmentsToCreate = actions
.filter((a) => a.action === ComputedActions.ADD_ITEM_ADJUSTMENT)
.map((action) => ({
code: action.code,
amount: (action as AddItemAdjustmentAction).amount,
is_tax_inclusive: (action as AddItemAdjustmentAction).is_tax_inclusive,
item_id: (action as AddItemAdjustmentAction).item_id,
promotion_id: promotionsMap.get(action.code)?.id,
}))
const lineItemAdjustmentsToCreate: PrepareAdjustmentsFromPromotionActionsStepOutput["lineItemAdjustmentsToCreate"] =
[]
const lineItemAdjustmentIdsToRemove: string[] = []
const shippingMethodAdjustmentsToCreate: PrepareAdjustmentsFromPromotionActionsStepOutput["shippingMethodAdjustmentsToCreate"] =
[]
const shippingMethodAdjustmentIdsToRemove: string[] = []
const lineItemAdjustmentIdsToRemove = actions
.filter((a) => a.action === ComputedActions.REMOVE_ITEM_ADJUSTMENT)
.map((a) => (a as RemoveItemAdjustmentAction).adjustment_id)
const shippingMethodAdjustmentsToCreate = actions
.filter(
(a) => a.action === ComputedActions.ADD_SHIPPING_METHOD_ADJUSTMENT
)
.map((action) => ({
code: action.code,
amount: (action as AddShippingMethodAdjustment).amount,
shipping_method_id: (action as AddShippingMethodAdjustment)
.shipping_method_id,
promotion_id: promotionsMap.get(action.code)?.id,
}))
const shippingMethodAdjustmentIdsToRemove = actions
.filter(
(a) => a.action === ComputedActions.REMOVE_SHIPPING_METHOD_ADJUSTMENT
)
.map((a) => (a as RemoveShippingMethodAdjustment).adjustment_id)
for (const action of actions) {
switch (action.action) {
case ComputedActions.ADD_ITEM_ADJUSTMENT:
const itemAction = action as AddItemAdjustmentAction
lineItemAdjustmentsToCreate.push({
code: action.code,
amount: itemAction.amount as number,
is_tax_inclusive: itemAction.is_tax_inclusive, // TODO: there is a discrepeancy between the type and the actual data
item_id: itemAction.item_id,
promotion_id: promotionsMap.get(action.code)?.id,
} as PrepareAdjustmentsFromPromotionActionsStepOutput["lineItemAdjustmentsToCreate"][number])
break
case ComputedActions.REMOVE_ITEM_ADJUSTMENT:
lineItemAdjustmentIdsToRemove.push(
(action as RemoveItemAdjustmentAction).adjustment_id
)
break
case ComputedActions.ADD_SHIPPING_METHOD_ADJUSTMENT:
const shippingAction = action as AddShippingMethodAdjustment
shippingMethodAdjustmentsToCreate.push({
code: action.code,
amount: shippingAction.amount as number,
shipping_method_id: shippingAction.shipping_method_id,
promotion_id: promotionsMap.get(action.code)?.id,
})
break
case ComputedActions.REMOVE_SHIPPING_METHOD_ADJUSTMENT:
shippingMethodAdjustmentIdsToRemove.push(
(action as RemoveShippingMethodAdjustment).adjustment_id
)
break
}
}
const computedPromotionCodes = [
...lineItemAdjustmentsToCreate,

View File

@@ -104,30 +104,31 @@ export const completeCartWorkflow = createWorkflow(
ttl: TWO_MINUTES,
})
const orderCart = useQueryGraphStep({
entity: "order_cart",
fields: ["cart_id", "order_id"],
filters: { cart_id: input.id },
options: {
isList: false,
},
})
const [orderCart, cartData] = parallelize(
useQueryGraphStep({
entity: "order_cart",
fields: ["cart_id", "order_id"],
filters: { cart_id: input.id },
options: {
isList: false,
},
}),
useQueryGraphStep({
entity: "cart",
fields: completeCartFields,
filters: { id: input.id },
options: {
isList: false,
},
}).config({
name: "cart-query",
})
)
const orderId = transform({ orderCart }, ({ orderCart }) => {
return orderCart?.data?.order_id
})
const cartData = useQueryGraphStep({
entity: "cart",
fields: completeCartFields,
filters: { id: input.id },
options: {
isList: false,
},
}).config({
name: "cart-query",
})
// this needs to be before the validation step
const paymentSessions = validateCartPaymentsStep({ cart: cartData.data })
// purpose of this step is to run compensation if cart completion fails

View File

@@ -9,7 +9,6 @@ import {
WorkflowResponse,
} from "@medusajs/framework/workflows-sdk"
import { useQueryGraphStep } from "../../common"
import { useRemoteQueryStep } from "../../common/steps/use-remote-query"
import { acquireLockStep, releaseLockStep } from "../../locking"
import { updateLineItemsStep } from "../steps"
import { cartFieldsForRefreshSteps } from "../utils/fields"
@@ -169,15 +168,18 @@ export const refreshCartItemsWorkflow = createWorkflow(
})
})
const refetchedCart = useRemoteQueryStep({
entry_point: "cart",
const { data: refetchedCart } = useQueryGraphStep({
entity: "cart",
fields: cartFieldsForRefreshSteps,
variables: { id: input.cart_id },
list: false,
}).config({ name: "refetchcart" })
filters: { id: input.cart_id },
options: { isList: false },
}).config({ name: "refetch-cart" })
refreshCartShippingMethodsWorkflow.runAsStep({
input: { cart: refetchedCart, additional_data: input.additional_data },
input: {
cart: refetchedCart, // Pass cart to avoid refetch
additional_data: input.additional_data,
},
})
when("force-refresh-update-tax-lines", { input }, ({ input }) => {
@@ -223,6 +225,7 @@ export const refreshCartItemsWorkflow = createWorkflow(
updateCartPromotionsWorkflow.runAsStep({
input: {
cart_id: input.cart_id,
cart: refetchedCart, // Pass cart to avoid refetch in updateCartPromotionsWorkflow
promo_codes: cartPromoCodes,
action: PromotionActions.REPLACE,
},

View File

@@ -9,7 +9,7 @@ import {
WorkflowResponse,
} from "@medusajs/framework/workflows-sdk"
import { AdditionalData } from "@medusajs/types"
import { useRemoteQueryStep } from "../../common"
import { useQueryGraphStep } from "../../common"
import { acquireLockStep, releaseLockStep } from "../../locking"
import { removeShippingMethodFromCartStep } from "../steps"
import { updateShippingMethodsStep } from "../steps/update-shipping-methods"
@@ -62,10 +62,11 @@ export const refreshCartShippingMethodsWorkflow = createWorkflow(
>
) => {
const shouldExecute = transform({ input }, ({ input }) => {
return (
!!input.cart_id ||
(!!input.cart && !!input.cart.shipping_methods?.length)
)
if (input.cart) {
return !!input.cart.shipping_methods?.length
}
return !!input.cart_id
})
const cartId = transform({ input }, ({ input }) => {
@@ -79,8 +80,8 @@ export const refreshCartShippingMethodsWorkflow = createWorkflow(
return shouldExecute
}
).then(() => {
return useRemoteQueryStep({
entry_point: "cart",
const { data: cart } = useQueryGraphStep({
entity: "cart",
fields: [
"id",
"sales_channel_id",
@@ -94,10 +95,14 @@ export const refreshCartShippingMethodsWorkflow = createWorkflow(
"shipping_methods.data",
"total",
],
variables: { id: cartId },
throw_if_key_not_found: true,
list: false,
filters: { id: cartId },
options: {
throwIfKeyNotFound: true,
isList: false,
},
}).config({ name: "get-cart" })
return cart
})
const cart = transform({ fetchCart, input }, ({ fetchCart, input }) => {

View File

@@ -8,7 +8,7 @@ import {
transform,
when,
} from "@medusajs/framework/workflows-sdk"
import { useRemoteQueryStep } from "../../common/steps/use-remote-query"
import { useQueryGraphStep } from "../../common"
import { acquireLockStep, releaseLockStep } from "../../locking"
import { updatePaymentCollectionStep } from "../../payment-collection"
import { deletePaymentSessionsWorkflow } from "../../payment-collection/workflows/delete-payment-sessions"
@@ -61,20 +61,26 @@ export const refreshPaymentCollectionForCartWorkflow = createWorkflow(
},
(input: WorkflowData<RefreshPaymentCollectionForCartWorklowInput>) => {
const shouldExecute = transform({ input }, ({ input }) => {
return (
!!input.cart_id || (!!input.cart && !!input.cart.payment_collection)
)
if (input.cart) {
return !!input.cart.payment_collection
}
return !!input.cart_id
})
const cartId = transform({ input }, ({ input }) => {
return input.cart_id ?? input.cart?.id
})
const fetchCart = when("should-fetch-cart", { input }, ({ input }) => {
return shouldExecute
}).then(() => {
return useRemoteQueryStep({
entry_point: "cart",
const fetchCart = when(
"should-fetch-cart",
{ shouldExecute },
({ shouldExecute }) => {
return shouldExecute
}
).then(() => {
const { data: cart } = useQueryGraphStep({
entity: "cart",
fields: [
"id",
"region_id",
@@ -87,10 +93,14 @@ export const refreshPaymentCollectionForCartWorkflow = createWorkflow(
"payment_collection.currency_code",
"payment_collection.payment_sessions.id",
],
variables: { id: cartId },
throw_if_key_not_found: true,
list: false,
})
filters: { id: cartId },
options: {
throwIfKeyNotFound: true,
isList: false,
},
}).config({ name: "fetch-cart" })
return cart
})
const cart = transform({ fetchCart, input }, ({ fetchCart, input }) => {

View File

@@ -8,7 +8,7 @@ import {
WorkflowData,
WorkflowResponse,
} from "@medusajs/framework/workflows-sdk"
import { useRemoteQueryStep } from "../../common"
import { useQueryGraphStep } from "../../common"
import { acquireLockStep, releaseLockStep } from "../../locking"
import {
createLineItemAdjustmentsStep,
@@ -82,12 +82,14 @@ export const updateCartPromotionsWorkflow = createWorkflow(
const fetchCart = when("should-fetch-cart", { input }, ({ input }) => {
return !input.cart
}).then(() => {
return useRemoteQueryStep({
entry_point: "cart",
const { data: cart } = useQueryGraphStep({
entity: "cart",
fields: cartFieldsForRefreshSteps,
variables: { id: input.cart_id },
list: false,
})
filters: { id: input.cart_id },
options: { isList: false },
}).config({ name: "fetch-cart" })
return cart
})
const cart = transform({ fetchCart, input }, ({ fetchCart, input }) => {

View File

@@ -8,7 +8,7 @@ import {
transform,
when,
} from "@medusajs/framework/workflows-sdk"
import { useRemoteQueryStep } from "../../common"
import { useQueryGraphStep } from "../../common"
import { acquireLockStep, releaseLockStep } from "../../locking"
import { getItemTaxLinesStep } from "../../tax/steps/get-item-tax-lines"
import { setTaxLinesForItemsStep } from "../steps"
@@ -127,13 +127,17 @@ export const updateTaxLinesWorkflow = createWorkflow(
const fetchCart = when("should-fetch-cart", { input }, ({ input }) => {
return !input.cart
}).then(() => {
return useRemoteQueryStep({
entry_point: "cart",
const { data: cart } = useQueryGraphStep({
entity: "cart",
fields: cartFields,
variables: { id: input.cart_id },
throw_if_key_not_found: true,
list: false,
})
filters: { id: input.cart_id },
options: {
throwIfKeyNotFound: true,
isList: false,
},
}).config({ name: "fetch-cart" })
return cart
})
const cart = transform({ fetchCart, input }, ({ fetchCart, input }) => {

View File

@@ -8,7 +8,7 @@ import {
transform,
when,
} from "@medusajs/framework/workflows-sdk"
import { useRemoteQueryStep } from "../../common"
import { useQueryGraphStep } from "../../common"
import { getItemTaxLinesStep } from "../../tax/steps/get-item-tax-lines"
import { upsertTaxLinesForItemsStep } from "../steps/upsert-tax-lines-for-items"
@@ -124,13 +124,17 @@ export const upsertTaxLinesWorkflow = createWorkflow(
const fetchCart = when("should-fetch-cart", { input }, ({ input }) => {
return !input.cart
}).then(() => {
return useRemoteQueryStep({
entry_point: "cart",
const { data: cart } = useQueryGraphStep({
entity: "cart",
fields: cartFields,
variables: { id: input.cart_id },
throw_if_key_not_found: true,
list: false,
})
filters: { id: input.cart_id },
options: {
throwIfKeyNotFound: true,
isList: false,
},
}).config({ name: "fetch-cart" })
return cart
})
const cart = transform({ fetchCart, input }, ({ fetchCart, input }) => {

View File

@@ -98,7 +98,12 @@ export async function mikroOrmCreateConnection(
filters: database.filters ?? {},
useBatchInserts: true,
useBatchUpdates: true,
implicitTransactions: false,
ignoreUndefinedInQuery: true,
batchSize: 100,
metadataCache: {
enabled: true,
},
assign: {
convertCustomTypes: true,
},

View File

@@ -436,20 +436,23 @@ export function mikroOrmBaseRepositoryFactory<const T extends object>(
.getKnexQuery()
.toSQL()
const where = [
whereSqlInfo.sql.split("where ")[1],
whereSqlInfo.bindings,
] as [string, any[]]
return await (manager.getTransactionContext() ?? manager.getKnex())
const builder = (manager.getTransactionContext() ?? manager.getKnex())
.queryBuilder()
.from(this.tableName)
.delete()
.where(manager.getKnex().raw(...where))
.returning("id")
.then((rows: { id: string }[]) => {
return rows.map((row: { id: string }) => row.id)
})
const hasWhere = whereSqlInfo.sql.includes("where ")
if (hasWhere) {
const where = [
whereSqlInfo.sql.split("where ")[1],
whereSqlInfo.bindings,
] as [string, any[]]
builder.where(manager.getKnex().raw(...where))
}
return await builder.returning("id").then((rows: { id: string }[]) => {
return rows.map((row: { id: string }) => row.id)
})
}
async find(

View File

@@ -13,6 +13,16 @@ export function createPgConnection(options: Options) {
options.driverOptions?.ssl ??
options.driverOptions?.connection?.ssl ??
false
const connectionTimeoutMillis =
driverOptions?.connectionTimeoutMillis ??
driverOptions?.connection?.connectionTimeoutMillis ??
5000
const keepAliveInitialDelayMillis =
driverOptions?.keepAliveInitialDelayMillis ??
driverOptions?.connection?.keepAliveInitialDelayMillis ??
10000
const keepAlive =
driverOptions?.keepAlive ?? driverOptions?.connection?.keepAlive ?? true
return knex<any, any>({
client: "pg",
@@ -23,11 +33,16 @@ export function createPgConnection(options: Options) {
idle_in_transaction_session_timeout:
(driverOptions?.idle_in_transaction_session_timeout as number) ??
undefined, // prevent null to be passed
connectionTimeoutMillis: connectionTimeoutMillis as number, // Fail fast on slow connects
keepAlive: keepAlive as boolean, // Prevent connections from being dropped
keepAliveInitialDelayMillis: keepAliveInitialDelayMillis as number, // Start keepalive probes after 10s
},
pool: {
propagateCreateError: false, // Don't fail entire pool on one bad connection
min: (pool?.min as number) ?? 1,
// https://knexjs.org/guide/#pool
...(pool ?? {}),
min: (pool?.min as number) ?? 1,
},
})
}

View File

@@ -37,11 +37,11 @@
"build": "rimraf dist && tsc --build"
},
"dependencies": {
"@mikro-orm/cli": "6.4.3",
"@mikro-orm/core": "6.4.3",
"@mikro-orm/knex": "6.4.3",
"@mikro-orm/migrations": "6.4.3",
"@mikro-orm/postgresql": "6.4.3",
"@mikro-orm/cli": "6.4.16",
"@mikro-orm/core": "6.4.16",
"@mikro-orm/knex": "6.4.16",
"@mikro-orm/migrations": "6.4.16",
"@mikro-orm/postgresql": "6.4.16",
"@opentelemetry/instrumentation-pg": "^0.44.0",
"@opentelemetry/resources": "^1.26.0",
"@opentelemetry/sdk-node": "^0.53.0",

View File

@@ -280,12 +280,8 @@ export default class CartModuleService
const carts = await this.createCarts_(input, sharedContext)
const result = await this.listCarts(
{ id: carts.map((p) => p!.id) },
{
relations: ["shipping_address", "billing_address"],
},
sharedContext
const result = await this.baseRepository_.serialize<CartTypes.CartDTO[]>(
carts
)
return (Array.isArray(data) ? result : result[0]) as
@@ -297,23 +293,30 @@ export default class CartModuleService
protected async createCarts_(
data: CartTypes.CreateCartDTO[],
@MedusaContext() sharedContext: Context = {}
) {
): Promise<InferEntityType<typeof Cart>[]> {
const cartsWithItems = data.map(({ items, ...cart }) => {
const cartId = generateEntityId((cart as any).id, "cart")
return {
cart: { ...cart, id: cartId },
items: items || [],
}
})
// Batch create all carts a once instead of sequentially
const createdCarts = await this.cartService_.create(
cartsWithItems.map((c) => c.cart),
sharedContext
)
const lineItemsToCreate: CreateLineItemDTO[] = []
const createdCarts: InferEntityType<typeof Cart>[] = []
for (const { items, ...cart } of data) {
const [created] = await this.cartService_.create([cart], sharedContext)
createdCarts.push(created)
if (items?.length) {
const cartItems = items.map((item) => {
return {
for (const { cart, items } of cartsWithItems) {
if (items.length) {
lineItemsToCreate.push(
...items.map((item) => ({
...item,
cart_id: created.id,
}
})
lineItemsToCreate.push(...cartItems)
cart_id: cart.id,
}))
)
}
}
@@ -321,7 +324,19 @@ export default class CartModuleService
await this.addLineItemsBulk_(lineItemsToCreate, sharedContext)
}
return createdCarts
const fullCarts = await this.cartService_.list(
{ id: createdCarts.map((c) => c.id) },
{
relations: ["shipping_address", "billing_address"],
},
sharedContext
)
// Return in the same input order
const orderedInputId = cartsWithItems.map((c) => c.cart.id)
return orderedInputId.map((id) =>
fullCarts.find((c) => c.id === id)
) as InferEntityType<typeof Cart>[]
}
// @ts-expect-error

View File

@@ -136,6 +136,15 @@
"unique": false,
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_price_list_deleted_at\" ON \"price_list\" (deleted_at) WHERE deleted_at IS NULL"
},
{
"keyName": "IDX_price_list_id_status_starts_at_ends_at",
"columnNames": [],
"composite": false,
"constraint": false,
"primary": false,
"unique": false,
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_price_list_id_status_starts_at_ends_at\" ON \"price_list\" (id, status, starts_at, ends_at) WHERE deleted_at IS NULL AND status = 'active'"
},
{
"keyName": "price_list_pkey",
"columnNames": [
@@ -252,6 +261,15 @@
"unique": false,
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_price_list_rule_attribute\" ON \"price_list_rule\" (attribute) WHERE deleted_at IS NULL"
},
{
"keyName": "IDX_price_list_rule_value",
"columnNames": [],
"composite": false,
"constraint": false,
"primary": false,
"unique": false,
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_price_list_rule_value\" ON \"price_list_rule\" USING gin (value) WHERE deleted_at IS NULL"
},
{
"keyName": "price_list_rule_pkey",
"columnNames": [
@@ -625,6 +643,15 @@
"unique": false,
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_price_currency_code\" ON \"price\" (currency_code) WHERE deleted_at IS NULL"
},
{
"keyName": "IDX_price_currency_code_price_set_id_min_quantity",
"columnNames": [],
"composite": false,
"constraint": false,
"primary": false,
"unique": false,
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_price_currency_code_price_set_id_min_quantity\" ON \"price\" (currency_code, price_set_id, min_quantity) WHERE deleted_at IS NULL"
},
{
"keyName": "price_pkey",
"columnNames": [
@@ -822,6 +849,15 @@
"unique": false,
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_price_rule_operator_value\" ON \"price_rule\" (operator, value) WHERE deleted_at IS NULL"
},
{
"keyName": "IDX_price_rule_attribute_value_price_id",
"columnNames": [],
"composite": false,
"constraint": false,
"primary": false,
"unique": false,
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_price_rule_attribute_value_price_id\" ON \"price_rule\" (attribute, value, price_id) WHERE deleted_at IS NULL"
},
{
"keyName": "price_rule_pkey",
"columnNames": [

View File

@@ -0,0 +1,28 @@
import { Migration } from "@mikro-orm/migrations"
export class Migration20251009110625 extends Migration {
override async up(): Promise<void> {
this.addSql(
`CREATE INDEX IF NOT EXISTS "IDX_price_list_id_status_starts_at_ends_at" ON "price_list" (id, status, starts_at, ends_at) WHERE deleted_at IS NULL AND status = 'active';`
)
this.addSql(
`CREATE INDEX IF NOT EXISTS "IDX_price_list_rule_value" ON "price_list_rule" USING gin (value) WHERE deleted_at IS NULL;`
)
this.addSql(
`CREATE INDEX IF NOT EXISTS "IDX_price_rule_attribute_value_price_id" ON "price_rule" (attribute, value, price_id) WHERE deleted_at IS NULL;`
)
}
override async down(): Promise<void> {
this.addSql(
`drop index if exists "IDX_price_list_id_status_starts_at_ends_at";`
)
this.addSql(`drop index if exists "IDX_price_list_rule_value";`)
this.addSql(
`drop index if exists "IDX_price_rule_attribute_value_price_id";`
)
}
}

View File

@@ -19,6 +19,11 @@ const PriceListRule = model
on: ["attribute"],
where: "deleted_at IS NULL",
},
{
on: ["value"],
where: "deleted_at IS NULL",
type: "gin",
},
])
export default PriceListRule

View File

@@ -26,5 +26,11 @@ const PriceList = model
.cascades({
delete: ["price_list_rules", "prices"],
})
.indexes([
{
on: ["id", "status", "starts_at", "ends_at"],
where: "deleted_at IS NULL AND status = 'active'",
},
])
export default PriceList

View File

@@ -30,6 +30,10 @@ const PriceRule = model
on: ["operator", "value"],
where: "deleted_at IS NULL",
},
{
on: ["attribute", "value", "price_id"],
where: "deleted_at IS NULL",
},
])
export default PriceRule

View File

@@ -6,6 +6,10 @@ import {
PriceListStatus,
} from "@medusajs/framework/utils"
import {
Knex,
SqlEntityManager,
} from "@medusajs/framework/mikro-orm/postgresql"
import {
CalculatedPriceSetDTO,
Context,
@@ -13,10 +17,6 @@ import {
PricingFilters,
PricingRepositoryService,
} from "@medusajs/framework/types"
import {
Knex,
SqlEntityManager,
} from "@medusajs/framework/mikro-orm/postgresql"
export class PricingRepository
extends MikroOrmBase
@@ -172,101 +172,108 @@ export class PricingRepository
})
if (hasComplexContext) {
const priceRuleConditions = knex.raw(
`
(
price.rules_count = 0 OR
(
/* Count all matching rules and compare to total rule count */
SELECT COUNT(*)
FROM price_rule pr
WHERE pr.price_id = price.id
AND pr.deleted_at IS NULL
AND (
${flattenedContext
.map(([key, value]) => {
if (typeof value === "number") {
return `
(pr.attribute = ? AND (
(pr.operator = 'eq' AND pr.value = ?) OR
(pr.operator = 'gt' AND ? > pr.value::numeric) OR
(pr.operator = 'gte' AND ? >= pr.value::numeric) OR
(pr.operator = 'lt' AND ? < pr.value::numeric) OR
(pr.operator = 'lte' AND ? <= pr.value::numeric)
))
`
} else {
const normalizeValue = Array.isArray(value) ? value : [value]
const placeholders = normalizeValue.map(() => "?").join(",")
return `(pr.attribute = ? AND pr.value IN (${placeholders}))`
}
})
.join(" OR ")})
) = (
/* Get total rule count */
SELECT COUNT(*)
FROM price_rule pr
WHERE pr.price_id = price.id
AND pr.deleted_at IS NULL
)
)
`,
flattenedContext.flatMap(([key, value]) => {
// Build match conditions for LATERAL join
const priceRuleMatchConditions = flattenedContext
.map(([_key, value]) => {
if (typeof value === "number") {
return [key, value.toString(), value, value, value, value]
return `
(pr.attribute = ? AND (
(pr.operator = 'eq' AND pr.value = ?) OR
(pr.operator = 'gt' AND ? > pr.value::numeric) OR
(pr.operator = 'gte' AND ? >= pr.value::numeric) OR
(pr.operator = 'lt' AND ? < pr.value::numeric) OR
(pr.operator = 'lte' AND ? <= pr.value::numeric)
))
`
} else {
const normalizeValue = Array.isArray(value) ? value : [value]
return [key, ...normalizeValue]
const placeholders = normalizeValue.map(() => "?").join(",")
return `(pr.attribute = ? AND pr.value IN (${placeholders}))`
}
})
)
.join(" OR ")
const priceListRuleConditions = knex.raw(
`
(
pl.rules_count = 0 OR
(
/* Count all matching rules and compare to total rule count */
SELECT COUNT(*)
FROM price_list_rule plr
WHERE plr.price_list_id = pl.id
AND plr.deleted_at IS NULL
AND (
${flattenedContext
.map(([key, value]) => {
if (Array.isArray(value)) {
return value
.map((v) => `(plr.attribute = ? AND plr.value @> ?)`)
.join(" OR ")
}
return `(plr.attribute = ? AND plr.value @> ?)`
})
.join(" OR ")}
)
) = (
/* Get total rule count */
SELECT COUNT(*)
FROM price_list_rule plr
WHERE plr.price_list_id = pl.id
AND plr.deleted_at IS NULL
)
)
`,
flattenedContext.flatMap(([key, value]) => {
const priceRuleMatchParams = flattenedContext.flatMap(([key, value]) => {
if (typeof value === "number") {
return [key, value.toString(), value, value, value, value]
} else {
const normalizeValue = Array.isArray(value) ? value : [value]
return [key, ...normalizeValue]
}
})
const priceListRuleMatchConditions = flattenedContext
.map(([_key, value]) => {
if (Array.isArray(value)) {
return value
.map((_v) => `(plr.attribute = ? AND plr.value @> ?)`)
.join(" OR ")
}
return `(plr.attribute = ? AND plr.value @> ?)`
})
.join(" OR ")
const priceListRuleMatchParams = flattenedContext.flatMap(
([key, value]) => {
const valueAsArray = Array.isArray(value) ? value : [value]
return valueAsArray.flatMap((v) => [key, JSON.stringify(v)])
})
}
)
// Use LATERAL joins to compute matched and total counts in one go
query
.leftJoin(
knex.raw(
`LATERAL (
SELECT
COUNT(*) FILTER (WHERE ${priceRuleMatchConditions}) as matched_count,
COUNT(*) as total_count
FROM price_rule pr
WHERE pr.price_id = price.id
AND pr.deleted_at IS NULL
) pr_stats`,
priceRuleMatchParams
),
knex.raw("true")
)
.leftJoin(
knex.raw(
`LATERAL (
SELECT
COUNT(*) FILTER (WHERE ${priceListRuleMatchConditions}) as matched_count,
COUNT(*) as total_count
FROM price_list_rule plr
WHERE plr.price_list_id = pl.id
AND plr.deleted_at IS NULL
) plr_stats`,
priceListRuleMatchParams
),
knex.raw("true")
)
query.where((qb) => {
qb.whereNull("price.price_list_id")
.andWhereRaw(priceRuleConditions)
.orWhere((qb2) => {
qb2
.whereNotNull("price.price_list_id")
.whereRaw(priceListRuleConditions)
.andWhereRaw(priceRuleConditions)
qb.where((qb2) => {
// No price list: price rules must match or be zero
qb2.whereNull("price.price_list_id").andWhere((qb3) => {
qb3
.where("price.rules_count", 0)
.orWhereRaw("pr_stats.matched_count = price.rules_count")
})
}).orWhere((qb2) => {
// Has price list: both price rules and price list rules must match
qb2
.whereNotNull("price.price_list_id")
.andWhere((qb3) => {
qb3
.where("price.rules_count", 0)
.orWhereRaw("pr_stats.matched_count = price.rules_count")
})
.andWhere((qb3) => {
qb3
.where("pl.rules_count", 0)
.orWhereRaw("plr_stats.matched_count = pl.rules_count")
})
})
})
} else {
query.where(function (this: Knex.QueryBuilder) {

124
yarn.lock
View File

@@ -6439,11 +6439,11 @@ __metadata:
version: 0.0.0-use.local
resolution: "@medusajs/deps@workspace:packages/deps"
dependencies:
"@mikro-orm/cli": 6.4.3
"@mikro-orm/core": 6.4.3
"@mikro-orm/knex": 6.4.3
"@mikro-orm/migrations": 6.4.3
"@mikro-orm/postgresql": 6.4.3
"@mikro-orm/cli": 6.4.16
"@mikro-orm/core": 6.4.16
"@mikro-orm/knex": 6.4.16
"@mikro-orm/migrations": 6.4.16
"@mikro-orm/postgresql": 6.4.16
"@opentelemetry/instrumentation-pg": ^0.44.0
"@opentelemetry/resources": ^1.26.0
"@opentelemetry/sdk-node": ^0.53.0
@@ -7564,43 +7564,43 @@ __metadata:
languageName: unknown
linkType: soft
"@mikro-orm/cli@npm:6.4.3":
version: 6.4.3
resolution: "@mikro-orm/cli@npm:6.4.3"
"@mikro-orm/cli@npm:6.4.16":
version: 6.4.16
resolution: "@mikro-orm/cli@npm:6.4.16"
dependencies:
"@jercle/yargonaut": 1.1.5
"@mikro-orm/core": 6.4.3
"@mikro-orm/knex": 6.4.3
fs-extra: 11.2.0
"@mikro-orm/core": 6.4.16
"@mikro-orm/knex": 6.4.16
fs-extra: 11.3.0
tsconfig-paths: 4.2.0
yargs: 17.7.2
bin:
mikro-orm: ./cli
mikro-orm-esm: ./esm
checksum: de49d269d942fad5b3bde9cc4d21d0252a38543e00e70335024c98c1dfbc12fb30ff6271215b998d5a3c56f0c5cc5bab30c0109cda76b47f8b64d81f312c8698
checksum: c58cfc6a89a4770671c58e3c40a60c8a38229a74e1722d1fb6c14a8d19a43303aa26557ec6a0000ac1d494270f3cfd98e32e38c69163f6b9cafb7c3ef6804b00
languageName: node
linkType: hard
"@mikro-orm/core@npm:6.4.3":
version: 6.4.3
resolution: "@mikro-orm/core@npm:6.4.3"
"@mikro-orm/core@npm:6.4.16":
version: 6.4.16
resolution: "@mikro-orm/core@npm:6.4.16"
dependencies:
dataloader: 2.2.3
dotenv: 16.4.7
dotenv: 16.5.0
esprima: 4.0.1
fs-extra: 11.2.0
fs-extra: 11.3.0
globby: 11.1.0
mikro-orm: 6.4.3
mikro-orm: 6.4.16
reflect-metadata: 0.2.2
checksum: d056eb9323a10f940e101a8a95454dcb2c4fa3429517e1a14a1b706869f53e92c2d2d2c9dfe330c02afbef101612787b1184db255ac5d74df5b0df0059928dd2
checksum: beeb614134d908674916105326c4846fe80fb9a7adc1251a8b9bd70f4db1115256a1bdaa08107fab1577986bbca46dd24b0ff24d87753925f382d1ef216bea18
languageName: node
linkType: hard
"@mikro-orm/knex@npm:6.4.3":
version: 6.4.3
resolution: "@mikro-orm/knex@npm:6.4.3"
"@mikro-orm/knex@npm:6.4.16":
version: 6.4.16
resolution: "@mikro-orm/knex@npm:6.4.16"
dependencies:
fs-extra: 11.2.0
fs-extra: 11.3.0
knex: 3.1.0
sqlstring: 2.3.3
peerDependencies:
@@ -7615,35 +7615,35 @@ __metadata:
optional: true
mariadb:
optional: true
checksum: 08dabcf1ea99d6a976892b4973e5de3f4b53e7ed0d3a1e3dcec20dd9349ba5a3ed153bbb96173990cd12367b3744df17f27bb3de751624207592e5d2f6e8637d
checksum: b0584fe3bd79b131512712ec9e31a1b76e272dcff519d5607daebfd5b3dd856116d9aae836c86baa006abc58e0331926aa3bd4ef4bba1a7ced4781d8c6fd3d21
languageName: node
linkType: hard
"@mikro-orm/migrations@npm:6.4.3":
version: 6.4.3
resolution: "@mikro-orm/migrations@npm:6.4.3"
"@mikro-orm/migrations@npm:6.4.16":
version: 6.4.16
resolution: "@mikro-orm/migrations@npm:6.4.16"
dependencies:
"@mikro-orm/knex": 6.4.3
fs-extra: 11.2.0
"@mikro-orm/knex": 6.4.16
fs-extra: 11.3.0
umzug: 3.8.2
peerDependencies:
"@mikro-orm/core": ^6.0.0
checksum: 04e7e2405a3d0d94213f2bf40c34988e190f8f0df5ff4565ef23c750680b198f293247418ee60c0605edfcbca119177f2e87dc01d3e6abfc15ec7c4dc976324a
checksum: 1d5beb2423c20879cffc9c51f035b2f24b963997bea4d69445f1686616686a3bd8d7b34c2081fcb421df24c0f14ee2c51b4ed01299b5c9dbf2af4fc4cbc22de5
languageName: node
linkType: hard
"@mikro-orm/postgresql@npm:6.4.3":
version: 6.4.3
resolution: "@mikro-orm/postgresql@npm:6.4.3"
"@mikro-orm/postgresql@npm:6.4.16":
version: 6.4.16
resolution: "@mikro-orm/postgresql@npm:6.4.16"
dependencies:
"@mikro-orm/knex": 6.4.3
pg: 8.13.1
postgres-array: 3.0.2
"@mikro-orm/knex": 6.4.16
pg: 8.16.0
postgres-array: 3.0.4
postgres-date: 2.1.0
postgres-interval: 4.0.2
peerDependencies:
"@mikro-orm/core": ^6.0.0
checksum: c0068dbd8f81d646de1171e9933e1385cd24faedb8a4347b9ab2c24e9bfa19a56cc5a6245aaf84fc8a19b355d02a7f1c160400483aa12411e3487e82b854e12c
checksum: 0d7b6dd04f9c476d445fd31b5544be88d38da9b3c5c3666c2c84c1fb35db55de27a98e23b62ba26528f9056bb28966cc8dc5673418bf4b2da6bf6bfeb6af0264
languageName: node
linkType: hard
@@ -20897,10 +20897,10 @@ __metadata:
languageName: node
linkType: hard
"dotenv@npm:16.4.7":
version: 16.4.7
resolution: "dotenv@npm:16.4.7"
checksum: be9f597e36a8daf834452daa1f4cc30e5375a5968f98f46d89b16b983c567398a330580c88395069a77473943c06b877d1ca25b4afafcdd6d4adb549e8293462
"dotenv@npm:16.5.0":
version: 16.5.0
resolution: "dotenv@npm:16.5.0"
checksum: 5bc94c919fbd955bf0ba44d33922a1e93d1078e64a1db5c30faeded1d996e7a83c55332cb8ea4fae5a9ca4d0be44cbceb95c5811e70f9f095298df09d1997dd9
languageName: node
linkType: hard
@@ -23278,14 +23278,14 @@ __metadata:
languageName: node
linkType: hard
"fs-extra@npm:11.2.0, fs-extra@npm:^11.1.0, fs-extra@npm:^11.2.0":
version: 11.2.0
resolution: "fs-extra@npm:11.2.0"
"fs-extra@npm:11.3.0":
version: 11.3.0
resolution: "fs-extra@npm:11.3.0"
dependencies:
graceful-fs: ^4.2.0
jsonfile: ^6.0.1
universalify: ^2.0.0
checksum: d77a9a9efe60532d2e790e938c81a02c1b24904ef7a3efb3990b835514465ba720e99a6ea56fd5e2db53b4695319b644d76d5a0e9988a2beef80aa7b1da63398
checksum: 5f95e996186ff45463059feb115a22fb048bdaf7e487ecee8a8646c78ed8fdca63630e3077d4c16ce677051f5e60d3355a06f3cd61f3ca43f48cc58822a44d0a
languageName: node
linkType: hard
@@ -23311,6 +23311,17 @@ __metadata:
languageName: node
linkType: hard
"fs-extra@npm:^11.1.0, fs-extra@npm:^11.2.0":
version: 11.2.0
resolution: "fs-extra@npm:11.2.0"
dependencies:
graceful-fs: ^4.2.0
jsonfile: ^6.0.1
universalify: ^2.0.0
checksum: d77a9a9efe60532d2e790e938c81a02c1b24904ef7a3efb3990b835514465ba720e99a6ea56fd5e2db53b4695319b644d76d5a0e9988a2beef80aa7b1da63398
languageName: node
linkType: hard
"fs-extra@npm:^7.0.1, fs-extra@npm:~7.0.1":
version: 7.0.1
resolution: "fs-extra@npm:7.0.1"
@@ -27550,10 +27561,10 @@ __metadata:
languageName: node
linkType: hard
"mikro-orm@npm:6.4.3":
version: 6.4.3
resolution: "mikro-orm@npm:6.4.3"
checksum: 6d489b7ba8ee9e97545bf035c0d1571e5e00baba4653c24713f4f4703311461f10d467d42f230674049282add6340fc9f33f3e3034827f86d89b5d62b3cc0180
"mikro-orm@npm:6.4.16":
version: 6.4.16
resolution: "mikro-orm@npm:6.4.16"
checksum: 6a7d6ad717503433eba0372f890fc66c8f0f80927b40ae8666bc0795006b0a5a089e662a7a5fc121d3e0fbdb1350c1878dc3bee2c1c6bc4028c729b2c4a45f7a
languageName: node
linkType: hard
@@ -30223,10 +30234,10 @@ __metadata:
languageName: node
linkType: hard
"postgres-array@npm:3.0.2, postgres-array@npm:~3.0.1":
version: 3.0.2
resolution: "postgres-array@npm:3.0.2"
checksum: 644aa071f67a66a59f641f8e623887d2b915bc102a32643e2aa8b54c11acd343c5ad97831ea444dd37bd4b921ba35add4aa2cb0c6b76700a8252c2324aeba5b4
"postgres-array@npm:3.0.4":
version: 3.0.4
resolution: "postgres-array@npm:3.0.4"
checksum: 47f3e648da512bacdd6a5ed55cf770605ec271330789faeece0fd13805a49f376d6e5c9e0e353377be11a9545e727dceaa2473566c505432bf06366ccd04c6b2
languageName: node
linkType: hard
@@ -30237,6 +30248,13 @@ __metadata:
languageName: node
linkType: hard
"postgres-array@npm:~3.0.1":
version: 3.0.2
resolution: "postgres-array@npm:3.0.2"
checksum: 644aa071f67a66a59f641f8e623887d2b915bc102a32643e2aa8b54c11acd343c5ad97831ea444dd37bd4b921ba35add4aa2cb0c6b76700a8252c2324aeba5b4
languageName: node
linkType: hard
"postgres-bytea@npm:~1.0.0":
version: 1.0.0
resolution: "postgres-bytea@npm:1.0.0"