diff --git a/.changeset/silver-baboons-drop.md b/.changeset/silver-baboons-drop.md new file mode 100644 index 0000000000..d71e19a842 --- /dev/null +++ b/.changeset/silver-baboons-drop.md @@ -0,0 +1,8 @@ +--- +"@medusajs/index": patch +"@medusajs/medusa": patch +"@medusajs/utils": patch +"@medusajs/modules-sdk": patch +--- + +chore(medusa): index engine feature flag diff --git a/integration-tests/modules/__tests__/index/query.index.ts b/integration-tests/modules/__tests__/index/query.index.ts index 1addde7a17..8bab143f87 100644 --- a/integration-tests/modules/__tests__/index/query.index.ts +++ b/integration-tests/modules/__tests__/index/query.index.ts @@ -9,6 +9,8 @@ import { jest.setTimeout(120000) +// NOTE: In this tests, both API are used to query, we use object pattern and string pattern + process.env.ENABLE_INDEX_MODULE = "true" medusaIntegrationTestRunner({ @@ -23,12 +25,9 @@ medusaIntegrationTestRunner({ process.env.ENABLE_INDEX_MODULE = "false" }) - beforeEach(async () => { - await createAdminUser(dbConnection, adminHeaders, appContainer) - }) - describe("Index engine - Query.index", () => { - it("should use query.index to query the index module and hydrate the data", async () => { + beforeEach(async () => { + await createAdminUser(dbConnection, adminHeaders, appContainer) const shippingProfile = ( await api.post( `/admin/shipping-profiles`, @@ -40,6 +39,7 @@ medusaIntegrationTestRunner({ const payload = [ { title: "Test Product", + status: "published", description: "test-product-description", shipping_profile_id: shippingProfile.id, options: [{ title: "Denominations", values: ["100"] }], @@ -66,6 +66,7 @@ medusaIntegrationTestRunner({ { title: "Extra product", description: "extra description", + status: "published", shipping_profile_id: shippingProfile.id, options: [{ title: "Colors", values: ["Red"] }], variants: new Array(2).fill(0).map((_, i) => ({ @@ -88,13 +89,16 @@ medusaIntegrationTestRunner({ }, ] - for (const data of payload) { - await api.post("/admin/products", data, adminHeaders).catch((err) => { + await api + .post("/admin/products/batch", { create: payload }, adminHeaders) + .catch((err) => { console.log(err) }) - } - await setTimeout(5000) + await setTimeout(2000) + }) + + it("should use query.index to query the index module and hydrate the data", async () => { const query = appContainer.resolve( ContainerRegistrationKeys.QUERY ) as RemoteQueryFunction @@ -105,7 +109,7 @@ medusaIntegrationTestRunner({ "id", "description", "status", - + "title", "variants.sku", "variants.barcode", "variants.material", @@ -120,17 +124,25 @@ medusaIntegrationTestRunner({ "variants.prices.amount": { $gt: 30 }, }, pagination: { + take: 10, + skip: 0, order: { "variants.prices.amount": "DESC", }, }, }) + expect(resultset.metadata).toEqual({ + count: 2, + skip: 0, + take: 10, + }) expect(resultset.data).toEqual([ { id: expect.any(String), description: "extra description", - status: "draft", + title: "Extra product", + status: "published", variants: [ { sku: "extra-variant-0", @@ -194,7 +206,8 @@ medusaIntegrationTestRunner({ { id: expect.any(String), description: "test-product-description", - status: "draft", + title: "Test Product", + status: "published", variants: [ { sku: "test-variant-1", @@ -234,6 +247,104 @@ medusaIntegrationTestRunner({ }, ]) }) + + it("should use query.index to query the index module sorting by price desc", async () => { + const query = appContainer.resolve( + ContainerRegistrationKeys.QUERY + ) as RemoteQueryFunction + + const resultset = await query.index({ + entity: "product", + fields: [ + "id", + "variants.prices.amount", + "variants.prices.currency_code", + ], + filters: { + "variants.prices.currency_code": "USD", + }, + pagination: { + take: 1, + skip: 0, + order: { + "variants.prices.amount": "DESC", + }, + }, + }) + + // Limiting to 1 on purpose to keep it simple and check the correct order is maintained + expect(resultset.data).toEqual([ + { + id: expect.any(String), + variants: expect.arrayContaining([ + expect.objectContaining({ + prices: expect.arrayContaining([ + { + amount: 20, + currency_code: "CAD", + id: expect.any(String), + }, + { + amount: 80, + currency_code: "USD", + id: expect.any(String), + }, + ]), + }), + ]), + }, + ]) + + const resultset2 = await query.index({ + entity: "product", + fields: [ + "id", + "variants.prices.amount", + "variants.prices.currency_code", + ], + filters: { + variants: { + prices: { + currency_code: "USD", + }, + }, + }, + pagination: { + take: 1, + skip: 0, + order: { + variants: { + prices: { + amount: "ASC", + }, + }, + }, + }, + }) + + // Limiting to 1 on purpose to keep it simple and check the correct order is maintained + expect(resultset2.data).toEqual([ + { + id: expect.any(String), + variants: [ + expect.objectContaining({ + prices: expect.arrayContaining([ + { + amount: 30, + currency_code: "USD", + id: expect.any(String), + }, + { + amount: 50, + currency_code: "EUR", + id: expect.any(String), + }, + ]), + }), + ], + }, + ]) + }) }) }, }) diff --git a/packages/core/modules-sdk/src/remote-query/query.ts b/packages/core/modules-sdk/src/remote-query/query.ts index 1d246dc4ec..e132065cc8 100644 --- a/packages/core/modules-sdk/src/remote-query/query.ts +++ b/packages/core/modules-sdk/src/remote-query/query.ts @@ -14,6 +14,7 @@ import { MedusaError, isObject, remoteQueryObjectFromString, + unflattenObjectKeys, } from "@medusajs/utils" import { RemoteQuery } from "./remote-query" import { toRemoteQuery } from "./to-remote-query" @@ -211,7 +212,9 @@ export class Query { : ({} as any) const pagination = queryOptions.pagination as any if (pagination?.order) { - pagination.order = { [mainEntity]: pagination.order } + pagination.order = { + [mainEntity]: unflattenObjectKeys(pagination?.order), + } } const indexResponse = (await this.#indexModule.query({ diff --git a/packages/core/modules-sdk/src/remote-query/remote-query.ts b/packages/core/modules-sdk/src/remote-query/remote-query.ts index b05499d2c0..fed1806fe4 100644 --- a/packages/core/modules-sdk/src/remote-query/remote-query.ts +++ b/packages/core/modules-sdk/src/remote-query/remote-query.ts @@ -18,6 +18,8 @@ import { isPresent, isString, toPascalCase } from "@medusajs/utils" import { MedusaModule } from "../medusa-module" const BASE_PREFIX = "" +const MAX_BATCH_SIZE = 4000 +const MAX_CONCURRENT_REQUESTS = 10 export class RemoteQuery { private remoteJoiner: RemoteJoiner private modulesMap: Map = new Map() @@ -182,6 +184,102 @@ export class RemoteQuery { } } + private async fetchRemoteDataBatched(args: { + serviceName: string + keyField: string + service: any + methodName: string + filters: any + options: any + ids: (unknown | unknown[])[] + }): Promise { + const { + serviceName, + keyField, + service, + methodName, + filters, + options, + ids, + } = args + + const getBatch = function* ( + idArray: (unknown | unknown[])[], + batchSize: number + ) { + for (let i = 0; i < idArray.length; i += batchSize) { + yield idArray.slice(i, i + batchSize) + } + } + + const idsToFetch = getBatch(ids, MAX_BATCH_SIZE) + const results: any[] = [] + let running = 0 + const fetchPromises: Promise[] = [] + + const processBatch = async (batch: (unknown | unknown[])[]) => { + running++ + const batchFilters = { ...filters, [keyField]: batch } + let result + + try { + if (RemoteQuery.traceFetchRemoteData) { + result = await RemoteQuery.traceFetchRemoteData( + async () => service[methodName](batchFilters, options), + serviceName, + methodName, + options + ) + } else { + result = await service[methodName](batchFilters, options) + } + results.push(result) + } finally { + running-- + processAllBatches() + } + } + + let batchesDone: (value: void) => void = () => {} + const awaitBatches = new Promise((ok) => { + batchesDone = ok + }) + const processAllBatches = async () => { + let isDone = false + while (running < MAX_CONCURRENT_REQUESTS) { + const nextBatch = idsToFetch.next() + if (nextBatch.done) { + isDone = true + break + } + + const batch = nextBatch.value + fetchPromises.push(processBatch(batch)) + } + + if (isDone) { + await Promise.all(fetchPromises) + batchesDone() + } + } + + processAllBatches() + await awaitBatches + + const flattenedResults = results.reduce((acc, result) => { + if ( + Array.isArray(result) && + result.length === 2 && + Array.isArray(result[0]) + ) { + return acc.concat(result[0]) + } + return acc.concat(result) + }, []) + + return flattenedResults + } + public async remoteFetchData( expand: RemoteExpandProperty, keyField: string, @@ -267,6 +365,19 @@ export class RemoteQuery { options.take = null } + if (ids && ids.length >= MAX_BATCH_SIZE && !hasPagination) { + const data = await this.fetchRemoteDataBatched({ + serviceName: serviceConfig.serviceName, + keyField, + service, + methodName, + filters, + options, + ids, + }) + return { data } + } + let result: any if (RemoteQuery.traceFetchRemoteData) { result = await RemoteQuery.traceFetchRemoteData( diff --git a/packages/core/utils/src/common/__tests__/unflatten-object-keys.spec.ts b/packages/core/utils/src/common/__tests__/unflatten-object-keys.spec.ts new file mode 100644 index 0000000000..a5008ffdfe --- /dev/null +++ b/packages/core/utils/src/common/__tests__/unflatten-object-keys.spec.ts @@ -0,0 +1,45 @@ +import { unflattenObjectKeys } from "../unflatten-object-keys" + +describe("unflattenWhereClauses", () => { + it("should unflatten where clauses", () => { + const where = { + "variants.sku": { $like: "%-1" }, + "variants.prices.amount": { $gt: 30 }, + "variants.prices.currency_code": "USD", + variants: { + prices: { + something: "else", + }, + }, + } + + const result = unflattenObjectKeys(where) + + expect(result).toEqual({ + variants: { + prices: { + something: "else", + amount: { + $gt: 30, + }, + currency_code: "USD", + }, + sku: { + $like: "%-1", + }, + }, + }) + }) + + it("should unflatten obj", () => { + const where = { + created_at: "ASC", + } + + const result = unflattenObjectKeys(where) + + expect(result).toEqual({ + created_at: "ASC", + }) + }) +}) diff --git a/packages/core/utils/src/common/index.ts b/packages/core/utils/src/common/index.ts index af5d0171b6..423c1e1b81 100644 --- a/packages/core/utils/src/common/index.ts +++ b/packages/core/utils/src/common/index.ts @@ -46,6 +46,8 @@ export * from "./load-env" export * from "./lower-case-first" export * from "./map-object-to" export * from "./medusa-container" +export * from "./merge-metadata" +export * from "./merge-plugin-modules" export * from "./normalize-import-path-with-source" export * from "./object-from-string-path" export * from "./object-to-string-path" @@ -76,10 +78,9 @@ export * from "./to-camel-case" export * from "./to-handle" export * from "./to-kebab-case" export * from "./to-pascal-case" +export * from "./to-unix-slash" export * from "./trim-zeros" +export * from "./unflatten-object-keys" export * from "./upper-case-first" export * from "./validate-handle" export * from "./wrap-handler" -export * from "./merge-plugin-modules" -export * from "./to-unix-slash" -export * from "./merge-metadata" \ No newline at end of file diff --git a/packages/core/utils/src/common/unflatten-object-keys.ts b/packages/core/utils/src/common/unflatten-object-keys.ts new file mode 100644 index 0000000000..b8158ce419 --- /dev/null +++ b/packages/core/utils/src/common/unflatten-object-keys.ts @@ -0,0 +1,68 @@ +import { isObject } from "./is-object" + +/** + * unFlatten object keys + * @example + * input: { + * "variants.sku": { $like: "%-1" }, + * "variants.prices.amount": { $gt: 30 }, + * "variants.prices.currency": "USD" + * } + * + * output: { + * { + * "variants": { + * "sku": { + * "$like": "%-1" + * }, + * "prices": { + * "amount": { + * "$gt": 30 + * }, + * "currency": "USD" + * } + * } + * } + * } + * + * @param input + */ +export function unflattenObjectKeys( + flattened: Record +): Record { + const result: Record = {} + + for (const key in flattened) { + if (!key.includes(".")) { + if (isObject(result[key])) { + result[key] = { ...result[key], ...flattened[key] } + } else { + result[key] = flattened[key] + } + } + } + + for (const key in flattened) { + if (key.includes(".")) { + const value = flattened[key] + const keys = key.split(".") + let current = result + + for (let i = 0; i < keys.length; i++) { + const part = keys[i] + + if (i === keys.length - 1) { + if (isObject(value) && current[part]) { + current[part] = { ...current[part], ...value } + } else { + current[part] = value + } + } else { + current = current[part] = current[part] ?? {} + } + } + } + } + + return result +} diff --git a/packages/medusa/src/api/admin/products/middlewares.ts b/packages/medusa/src/api/admin/products/middlewares.ts index bbbd111366..f71b14d62a 100644 --- a/packages/medusa/src/api/admin/products/middlewares.ts +++ b/packages/medusa/src/api/admin/products/middlewares.ts @@ -1,4 +1,5 @@ import { + featureFlagRouter, validateAndTransformBody, validateAndTransformQuery, } from "@medusajs/framework" @@ -35,6 +36,7 @@ import { CreateProduct, CreateProductVariant, } from "./validators" +import IndexEngineFeatureFlag from "../../../loaders/feature-flags/index-engine" // TODO: For now we keep the files in memory, as that's how they get passed to the workflows // This will need revisiting once we are closer to prod-ready v2, since with workflows and potentially @@ -50,11 +52,17 @@ export const adminProductRoutesMiddlewares: MiddlewareRoute[] = [ AdminGetProductsParams, QueryConfig.listProductQueryConfig ), - maybeApplyLinkFilter({ - entryPoint: "product_sales_channel", - resourceId: "product_id", - filterableField: "sales_channel_id", - }), + (req, res, next) => { + if (featureFlagRouter.isFeatureEnabled(IndexEngineFeatureFlag.key)) { + return next() + } + + return maybeApplyLinkFilter({ + entryPoint: "product_sales_channel", + resourceId: "product_id", + filterableField: "sales_channel_id", + })(req, res, next) + }, maybeApplyPriceListsFilter(), ], }, diff --git a/packages/medusa/src/api/admin/products/route.ts b/packages/medusa/src/api/admin/products/route.ts index cd384b954a..383850f60d 100644 --- a/packages/medusa/src/api/admin/products/route.ts +++ b/packages/medusa/src/api/admin/products/route.ts @@ -7,11 +7,33 @@ import { refetchEntity, } from "@medusajs/framework/http" import { remapKeysForProduct, remapProductResponse } from "./helpers" +import IndexEngineFeatureFlag from "../../../loaders/feature-flags/index-engine" +import { featureFlagRouter } from "@medusajs/framework" +import { ContainerRegistrationKeys, isPresent } from "@medusajs/framework/utils" export const GET = async ( req: AuthenticatedMedusaRequest, res: MedusaResponse ) => { + if (featureFlagRouter.isFeatureEnabled(IndexEngineFeatureFlag.key)) { + // TODO: These filters are not supported by the index engine yet + if ( + isPresent(req.filterableFields.tags) || + isPresent(req.filterableFields.categories) + ) { + return await getProducts(req, res) + } + + return await getProductsWithIndexEngine(req, res) + } + + return await getProducts(req, res) +} + +async function getProducts( + req: AuthenticatedMedusaRequest, + res: MedusaResponse +) { const selectFields = remapKeysForProduct(req.queryConfig.fields ?? []) const { rows: products, metadata } = await refetchEntities( @@ -30,6 +52,27 @@ export const GET = async ( }) } +async function getProductsWithIndexEngine( + req: AuthenticatedMedusaRequest, + res: MedusaResponse +) { + const query = req.scope.resolve(ContainerRegistrationKeys.QUERY) + + const { data: products, metadata } = await query.index({ + entity: "product", + fields: req.queryConfig.fields ?? [], + filters: req.filterableFields, + pagination: req.queryConfig.pagination, + }) + + res.json({ + products: products.map(remapProductResponse), + count: metadata!.count, + offset: metadata!.skip, + limit: metadata!.take, + }) +} + export const POST = async ( req: AuthenticatedMedusaRequest< HttpTypes.AdminCreateProduct & AdditionalData diff --git a/packages/medusa/src/api/store/products/middlewares.ts b/packages/medusa/src/api/store/products/middlewares.ts index 3b5282f9be..83e7a4942d 100644 --- a/packages/medusa/src/api/store/products/middlewares.ts +++ b/packages/medusa/src/api/store/products/middlewares.ts @@ -1,4 +1,7 @@ -import { validateAndTransformQuery } from "@medusajs/framework" +import { + featureFlagRouter, + validateAndTransformQuery, +} from "@medusajs/framework" import { applyDefaultFilters, applyParamsAsFilters, @@ -16,6 +19,7 @@ import { } from "../../utils/middlewares" import * as QueryConfig from "./query-config" import { StoreGetProductsParams } from "./validators" +import IndexEngineFeatureFlag from "../../../loaders/feature-flags/index-engine" export const storeProductRoutesMiddlewares: MiddlewareRoute[] = [ { @@ -30,11 +34,17 @@ export const storeProductRoutesMiddlewares: MiddlewareRoute[] = [ QueryConfig.listProductQueryConfig ), filterByValidSalesChannels(), - maybeApplyLinkFilter({ - entryPoint: "product_sales_channel", - resourceId: "product_id", - filterableField: "sales_channel_id", - }), + (req, res, next) => { + if (featureFlagRouter.isFeatureEnabled(IndexEngineFeatureFlag.key)) { + return next() + } + + return maybeApplyLinkFilter({ + entryPoint: "product_sales_channel", + resourceId: "product_id", + filterableField: "sales_channel_id", + })(req, res, next) + }, applyDefaultFilters({ status: ProductStatus.PUBLISHED, // TODO: the type here seems off and the implementation does not take into account $and and $or possible filters. Might be worth re working (original type used here was StoreGetProductsParamsType) diff --git a/packages/medusa/src/api/store/products/route.ts b/packages/medusa/src/api/store/products/route.ts index 6db5f57ec2..96d75eb70b 100644 --- a/packages/medusa/src/api/store/products/route.ts +++ b/packages/medusa/src/api/store/products/route.ts @@ -1,17 +1,95 @@ +import { featureFlagRouter } from "@medusajs/framework" +import { MedusaResponse } from "@medusajs/framework/http" +import { HttpTypes } from "@medusajs/framework/types" import { ContainerRegistrationKeys, isPresent, + QueryContext, remoteQueryObjectFromString, } from "@medusajs/framework/utils" -import { MedusaResponse } from "@medusajs/framework/http" +import IndexEngineFeatureFlag from "../../../loaders/feature-flags/index-engine" import { wrapVariantsWithInventoryQuantityForSalesChannel } from "../../utils/middlewares" import { RequestWithContext, wrapProductsWithTaxPrices } from "./helpers" -import { HttpTypes } from "@medusajs/framework/types" export const GET = async ( req: RequestWithContext, res: MedusaResponse ) => { + if (featureFlagRouter.isFeatureEnabled(IndexEngineFeatureFlag.key)) { + // TODO: These filters are not supported by the index engine yet + if ( + isPresent(req.filterableFields.tags) || + isPresent(req.filterableFields.categories) + ) { + return await getProducts(req, res) + } + + return await getProductsWithIndexEngine(req, res) + } + + return await getProducts(req, res) +} + +async function getProductsWithIndexEngine( + req: RequestWithContext, + res: MedusaResponse +) { + const query = req.scope.resolve(ContainerRegistrationKeys.QUERY) + + const context: object = {} + const withInventoryQuantity = req.queryConfig.fields.some((field) => + field.includes("variants.inventory_quantity") + ) + + if (withInventoryQuantity) { + req.queryConfig.fields = req.queryConfig.fields.filter( + (field) => !field.includes("variants.inventory_quantity") + ) + } + + if (isPresent(req.pricingContext)) { + context["variants"] ??= {} + context["variants.calculated_price"] = QueryContext(req.pricingContext!) + } + + const filters: Record = req.filterableFields + if (isPresent(filters.sales_channel_id)) { + const salesChannelIds = filters.sales_channel_id + + filters["sales_channels"] ??= {} + filters["sales_channels"]["id"] = salesChannelIds + + delete filters.sales_channel_id + } + + const { data: products = [], metadata } = await query.index({ + entity: "product", + fields: req.queryConfig.fields, + filters, + pagination: req.queryConfig.pagination, + context, + }) + + if (withInventoryQuantity) { + await wrapVariantsWithInventoryQuantityForSalesChannel( + req, + products.map((product) => product.variants).flat(1) + ) + } + + await wrapProductsWithTaxPrices(req, products) + res.json({ + products, + count: metadata!.count, + offset: metadata!.skip, + limit: metadata!.take, + }) +} + +async function getProducts( + req: RequestWithContext, + res: MedusaResponse +) { const remoteQuery = req.scope.resolve(ContainerRegistrationKeys.REMOTE_QUERY) const context: object = {} const withInventoryQuantity = req.queryConfig.fields.some((field) => diff --git a/packages/medusa/src/api/utils/common-validators/products/index.ts b/packages/medusa/src/api/utils/common-validators/products/index.ts index f377fee22f..9706e62303 100644 --- a/packages/medusa/src/api/utils/common-validators/products/index.ts +++ b/packages/medusa/src/api/utils/common-validators/products/index.ts @@ -1,5 +1,5 @@ -import { FilterableProductProps } from "@medusajs/framework/types" -import { ProductStatus } from "@medusajs/framework/utils" +import { FilterableProductProps, OperatorMap } from "@medusajs/framework/types" +import { isPresent, ProductStatus } from "@medusajs/framework/utils" import { z } from "zod" import { createOperatorMap } from "../../validators" import { booleanString } from "../common" @@ -36,14 +36,19 @@ type HttpProductFilters = FilterableProductProps & { export const transformProductParams = ( data: HttpProductFilters ): FilterableProductProps => { - const res = { + const res: HttpProductFilters = { ...data, - tags: { id: data.tag_id }, - categories: { id: data.category_id }, } - delete res.tag_id - delete res.category_id + if (isPresent(data.tag_id)) { + res.tags = { id: data.tag_id as string[] } + delete res.tag_id + } + + if (isPresent(data.category_id)) { + res.categories = { id: data.category_id as OperatorMap } + delete res.category_id + } return res as FilterableProductProps } diff --git a/packages/medusa/src/loaders/feature-flags/index-engine.ts b/packages/medusa/src/loaders/feature-flags/index-engine.ts new file mode 100644 index 0000000000..510a180b3e --- /dev/null +++ b/packages/medusa/src/loaders/feature-flags/index-engine.ts @@ -0,0 +1,10 @@ +import { FlagSettings } from "@medusajs/framework/feature-flags" + +const IndexEngineFeatureFlag: FlagSettings = { + key: "index_engine", + default_val: false, + env_key: "MEDUSA_FF_INDEX_ENGINE", + description: "Enable Medusa to use the index engine in some part of the core", +} + +export default IndexEngineFeatureFlag diff --git a/packages/modules/index/integration-tests/__fixtures__/schema.ts b/packages/modules/index/integration-tests/__fixtures__/schema.ts index 17dfeb3804..e5e1f96a36 100644 --- a/packages/modules/index/integration-tests/__fixtures__/schema.ts +++ b/packages/modules/index/integration-tests/__fixtures__/schema.ts @@ -2,6 +2,8 @@ export const schema = ` type Product @Listeners(values: ["product.created", "product.updated", "product.deleted"]) { id: String title: String + created_at: DateTime + deep: InternalNested variants: [ProductVariant] } diff --git a/packages/modules/index/integration-tests/__tests__/config-sync.spec.ts b/packages/modules/index/integration-tests/__tests__/config-sync.spec.ts index 259858357c..6a6dd82747 100644 --- a/packages/modules/index/integration-tests/__tests__/config-sync.spec.ts +++ b/packages/modules/index/integration-tests/__tests__/config-sync.spec.ts @@ -133,7 +133,7 @@ describe("IndexModuleService syncIndexConfig", function () { afterEach(afterEach_) - it("should full sync all entities when the config has changed", async () => { + it.only("should full sync all entities when the config has changed", async () => { await setTimeout(1000) const currentMetadata = await indexMetadataService.list() @@ -148,7 +148,7 @@ describe("IndexModuleService syncIndexConfig", function () { }), expect.objectContaining({ entity: "Product", - fields: "id,title", + fields: "created_at,id,title", status: "done", }), expect.objectContaining({ diff --git a/packages/modules/index/integration-tests/__tests__/data-synchronizer.spec.ts b/packages/modules/index/integration-tests/__tests__/data-synchronizer.spec.ts index 684fd33af2..17f48c5b54 100644 --- a/packages/modules/index/integration-tests/__tests__/data-synchronizer.spec.ts +++ b/packages/modules/index/integration-tests/__tests__/data-synchronizer.spec.ts @@ -199,7 +199,7 @@ describe("DataSynchronizer", () => { filters: { id: [testProductId], }, - fields: ["id", "title"], + fields: ["id", "created_at", "title"], }) // Second loop fetching products @@ -225,7 +225,7 @@ describe("DataSynchronizer", () => { filters: { id: [testProductId2], }, - fields: ["id", "title"], + fields: ["id", "created_at", "title"], }) expect(ackMock).toHaveBeenNthCalledWith(1, { diff --git a/packages/modules/index/integration-tests/__tests__/index-engine-module.spec.ts b/packages/modules/index/integration-tests/__tests__/index-engine-module.spec.ts index 10b5a20c05..7e244f5624 100644 --- a/packages/modules/index/integration-tests/__tests__/index-engine-module.spec.ts +++ b/packages/modules/index/integration-tests/__tests__/index-engine-module.spec.ts @@ -30,29 +30,34 @@ const dbUtils = TestDatabaseUtils.dbTestUtilFactory() jest.setTimeout(300000) const productId = "prod_1" +const productId2 = "prod_2" const variantId = "var_1" +const variantId2 = "var_2" const priceSetId = "price_set_1" const priceId = "money_amount_1" const linkId = "link_id_1" const sendEvents = async (eventDataToEmit) => { - let a = 0 + let productCounter = 0 + let variantCounter = 0 queryMock.graph = jest.fn().mockImplementation((query) => { const entity = query.entity if (entity === "product") { return { data: { - id: a++ > 0 ? "aaaa" : productId, + id: productCounter++ > 0 ? productId2 : productId, + title: "Test Product " + productCounter, }, } } else if (entity === "product_variant") { + const counter = variantCounter++ return { data: { - id: variantId, + id: counter > 0 ? variantId2 : variantId, sku: "aaa test aaa", product: { - id: productId, + id: counter > 0 ? productId2 : productId, }, }, } @@ -374,7 +379,16 @@ describe("IndexModuleService", function () { { name: "product.created", data: { - id: "PRODUCTASDASDAS", + id: productId2, + }, + }, + { + name: "variant.created", + data: { + id: variantId2, + product: { + id: productId2, + }, }, }, { @@ -426,14 +440,46 @@ describe("IndexModuleService", function () { }) expect(productIndexEntries).toHaveLength(2) - expect(productIndexEntries[0].id).toEqual(productId) + expect(productIndexEntries).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + id: productId, + data: expect.objectContaining({ + id: productId, + title: expect.stringContaining("Test Product"), + }), + }), + expect.objectContaining({ + id: productId2, + data: expect.objectContaining({ + id: productId2, + title: expect.stringContaining("Test Product"), + }), + }), + ]) + ) const variantIndexEntries = indexEntries.filter((entry) => { return entry.name === "ProductVariant" }) - expect(variantIndexEntries).toHaveLength(1) - expect(variantIndexEntries[0].id).toEqual(variantId) + expect(variantIndexEntries).toHaveLength(2) + expect(variantIndexEntries).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + id: variantId, + data: expect.objectContaining({ + id: variantId, + }), + }), + expect.objectContaining({ + id: variantId2, + data: expect.objectContaining({ + id: variantId2, + }), + }), + ]) + ) const priceSetIndexEntries = indexEntries.filter((entry) => { return entry.name === "PriceSet" @@ -461,7 +507,7 @@ describe("IndexModuleService", function () { {} ) - expect(indexRelationEntries).toHaveLength(4) + expect(indexRelationEntries).toHaveLength(5) const productVariantIndexRelationEntries = indexRelationEntries.filter( (entry) => { diff --git a/packages/modules/index/integration-tests/__tests__/query-builder.spec.ts b/packages/modules/index/integration-tests/__tests__/query-builder.spec.ts index 43848846b3..35562b40de 100644 --- a/packages/modules/index/integration-tests/__tests__/query-builder.spec.ts +++ b/packages/modules/index/integration-tests/__tests__/query-builder.spec.ts @@ -414,7 +414,19 @@ describe("IndexModuleService query", function () { }, }) + // NULLS LAST (DESC = first) expect(data).toEqual([ + { + id: "prod_2", + title: "Product 2 title", + deep: { + a: 1, + obj: { + b: 15, + }, + }, + variants: [], + }, { id: "prod_1", variants: [ @@ -440,17 +452,6 @@ describe("IndexModuleService query", function () { }, ], }, - { - id: "prod_2", - title: "Product 2 title", - deep: { - a: 1, - obj: { - b: 15, - }, - }, - variants: [], - }, ]) const { data: dataAsc } = await module.query({ @@ -469,17 +470,6 @@ describe("IndexModuleService query", function () { }) expect(dataAsc).toEqual([ - { - id: "prod_2", - title: "Product 2 title", - deep: { - a: 1, - obj: { - b: 15, - }, - }, - variants: [], - }, { id: "prod_1", variants: [ @@ -505,6 +495,17 @@ describe("IndexModuleService query", function () { }, ], }, + { + id: "prod_2", + title: "Product 2 title", + deep: { + a: 1, + obj: { + b: 15, + }, + }, + variants: [], + }, ]) }) @@ -565,6 +566,11 @@ describe("IndexModuleService query", function () { pagination: { take: 100, skip: 0, + order: { + product: { + created_at: "ASC", + }, + }, }, }) @@ -596,7 +602,7 @@ describe("IndexModuleService query", function () { product: { variants: { prices: { - amount: "DESC", + amount: "ASC", }, }, }, @@ -608,14 +614,14 @@ describe("IndexModuleService query", function () { { id: "prod_1", variants: [ - { - id: "var_1", - sku: "aaa test aaa", - }, { id: "var_2", sku: "sku 123", }, + { + id: "var_1", + sku: "aaa test aaa", + }, ], }, { diff --git a/packages/modules/index/src/migrations/Migration20250218132404.ts b/packages/modules/index/src/migrations/Migration20250218132404.ts new file mode 100644 index 0000000000..cf9bb7aad7 --- /dev/null +++ b/packages/modules/index/src/migrations/Migration20250218132404.ts @@ -0,0 +1,53 @@ +import { Migration } from "@mikro-orm/migrations" + +export class Migration20250218132404 extends Migration { + override async up(): Promise { + this.addSql( + ` + ALTER TABLE index_data + ADD COLUMN document_tsv tsvector; + ` + ) + this.addSql( + ` + UPDATE index_data + SET document_tsv = to_tsvector('simple', ( + SELECT string_agg(value, ' ') + FROM jsonb_each_text(data) + )); + ` + ) + this.addSql( + ` + CREATE INDEX idx_documents_document_tsv + ON index_data + USING gin(document_tsv); + ` + ) + this.addSql( + ` + CREATE OR REPLACE FUNCTION update_document_tsv() RETURNS trigger AS $$ + BEGIN + NEW.document_tsv := to_tsvector('simple', ( + SELECT string_agg(value, ' ') + FROM jsonb_each_text(NEW.data) + )); + RETURN NEW; + END; + $$ LANGUAGE plpgsql; + + CREATE TRIGGER trg_update_document_tsv + BEFORE INSERT OR UPDATE ON index_data + FOR EACH ROW + EXECUTE FUNCTION update_document_tsv(); + ` + ) + } + + override async down(): Promise { + this.addSql(`DROP TRIGGER IF EXISTS trg_update_document_tsv ON index_data;`) + this.addSql(`DROP FUNCTION IF EXISTS update_document_tsv;`) + this.addSql(`DROP INDEX IF EXISTS idx_documents_document_tsv;`) + this.addSql(`ALTER TABLE index_data DROP COLUMN IF EXISTS document_tsv;`) + } +} diff --git a/packages/modules/index/src/models/index-data.ts b/packages/modules/index/src/models/index-data.ts index 580de3d93b..df34af5d3f 100644 --- a/packages/modules/index/src/models/index-data.ts +++ b/packages/modules/index/src/models/index-data.ts @@ -5,6 +5,7 @@ const IndexData = model.define("IndexData", { name: model.text().primaryKey(), data: model.json().default({}), staled_at: model.dateTime().nullable(), + // document_tsv: model.tsvector(), NOTE: This is not supported and it is here for reference of its counter part in the migration }) export default IndexData diff --git a/packages/modules/index/src/services/index-module-service.ts b/packages/modules/index/src/services/index-module-service.ts index bf399176c4..c35d4ec04e 100644 --- a/packages/modules/index/src/services/index-module-service.ts +++ b/packages/modules/index/src/services/index-module-service.ts @@ -178,8 +178,14 @@ export default class IndexModuleService return this.schemaObjectRepresentation_ } + const baseSchema = ` + scalar DateTime + scalar Date + scalar Time + scalar JSON + ` const [objectRepresentation, entityMap] = buildSchemaObjectRepresentation( - this.moduleOptions_.schema ?? defaultSchema + baseSchema + (this.moduleOptions_.schema ?? defaultSchema) ) this.schemaObjectRepresentation_ = objectRepresentation diff --git a/packages/modules/index/src/services/postgres-provider.ts b/packages/modules/index/src/services/postgres-provider.ts index 2f722acfc0..6a4c3f5d99 100644 --- a/packages/modules/index/src/services/postgres-provider.ts +++ b/packages/modules/index/src/services/postgres-provider.ts @@ -13,8 +13,8 @@ import { InjectTransactionManager, isDefined, MedusaContext, - promiseAll, toMikroORMEntity, + unflattenObjectKeys, } from "@medusajs/framework/utils" import { EntityManager, @@ -250,10 +250,11 @@ export class PostgresProvider implements IndexTypes.StorageProvider { const { take, skip, order: inputOrderBy = {} } = config.pagination ?? {} const select = normalizeFieldsSelection(fields) - const where = flattenObjectKeys(filters) + const where = flattenObjectKeys(unflattenObjectKeys(filters)) - const joinWhere = flattenObjectKeys(joinFilters) - const orderBy = flattenObjectKeys(inputOrderBy) + const inputOrderByObj = unflattenObjectKeys(inputOrderBy) + const joinWhere = flattenObjectKeys(unflattenObjectKeys(joinFilters)) + const orderBy = flattenObjectKeys(inputOrderByObj) const { manager } = sharedContext as { manager: SqlEntityManager } let hasPagination = false @@ -266,7 +267,10 @@ export class PostgresProvider implements IndexTypes.StorageProvider { } } - const requestedFields = deepMerge(deepMerge(select, filters), inputOrderBy) + const requestedFields = deepMerge( + deepMerge(select, filters), + inputOrderByObj + ) const connection = manager.getConnection() const qb = new QueryBuilder({ @@ -288,26 +292,20 @@ export class PostgresProvider implements IndexTypes.StorageProvider { requestedFields, }) - const [sql, sqlCount] = qb.buildQuery({ + const sql = qb.buildQuery({ hasPagination, returnIdOnly: !!keepFilteredEntities, hasCount, }) - const promises: Promise[] = [] - - promises.push(manager.execute(sql)) - - if (hasCount && sqlCount) { - promises.push(manager.execute(sqlCount)) - } - - let [resultSet, count] = await promiseAll(promises) + const resultSet = await manager.execute(sql) const resultMetadata: IndexTypes.QueryFunctionReturnPagination | undefined = hasPagination ? { - count: hasCount ? parseInt(count[0].count) : undefined, + count: hasCount + ? parseInt(resultSet[0]?.count_total ?? 0) + : undefined, skip, take, } @@ -436,7 +434,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider { { onConflictAction: "merge", onConflictFields: ["id", "name"], - onConflictMergeFields: ["data", "staled_at"], + onConflictMergeFields: ["staled_at"], } ) diff --git a/packages/modules/index/src/types/index.ts b/packages/modules/index/src/types/index.ts index cd1e1c3f2a..8a4cde818c 100644 --- a/packages/modules/index/src/types/index.ts +++ b/packages/modules/index/src/types/index.ts @@ -1,6 +1,10 @@ export const schemaObjectRepresentationPropertiesToOmit = [ "_schemaPropertiesMap", "_serviceNameModuleConfigMap", + "JSON", + "DateTime", + "Date", + "Time", ] export type Select = { diff --git a/packages/modules/index/src/utils/build-config.ts b/packages/modules/index/src/utils/build-config.ts index 7e5d59fe01..6e72e51b07 100644 --- a/packages/modules/index/src/utils/build-config.ts +++ b/packages/modules/index/src/utils/build-config.ts @@ -21,7 +21,13 @@ export const CustomDirectives = { export function makeSchemaExecutable(inputSchema: string) { const { schema: cleanedSchema } = GraphQLUtils.cleanGraphQLSchema(inputSchema) - return GraphQLUtils.makeExecutableSchema({ typeDefs: cleanedSchema }) + if (!cleanedSchema) { + return + } + + return GraphQLUtils.makeExecutableSchema({ + typeDefs: cleanedSchema, + }) } function extractNameFromAlias( @@ -68,9 +74,9 @@ function retrieveModuleAndAlias(entityName, moduleJoinerConfigs) { if (moduleSchema) { const executableSchema = makeSchemaExecutable(moduleSchema) - const entitiesMap = executableSchema.getTypeMap() + const entitiesMap = executableSchema?.getTypeMap() - if (entitiesMap[entityName]) { + if (entitiesMap?.[entityName]) { relatedModule = moduleJoinerConfig } } @@ -191,6 +197,10 @@ function retrieveLinkModuleAndAlias({ const executableSchema = makeSchemaExecutable( foreignModuleConfig.schema ) + if (!executableSchema) { + continue + } + const entitiesMap = executableSchema.getTypeMap() let intermediateEntities: string[] = [] @@ -704,7 +714,7 @@ export function buildSchemaObjectRepresentation( ): [IndexTypes.SchemaObjectRepresentation, Record] { const moduleJoinerConfigs = MedusaModule.getAllJoinerConfigs() const augmentedSchema = CustomDirectives.Listeners.definition + schema - const executableSchema = makeSchemaExecutable(augmentedSchema) + const executableSchema = makeSchemaExecutable(augmentedSchema)! const entitiesMap = executableSchema.getTypeMap() const objectRepresentation = { diff --git a/packages/modules/index/src/utils/default-schema.ts b/packages/modules/index/src/utils/default-schema.ts index 313e996c78..2966d95b2f 100644 --- a/packages/modules/index/src/utils/default-schema.ts +++ b/packages/modules/index/src/utils/default-schema.ts @@ -4,14 +4,24 @@ export const defaultSchema = ` type Product @Listeners(values: ["${Modules.PRODUCT}.product.created", "${Modules.PRODUCT}.product.updated", "${Modules.PRODUCT}.product.deleted"]) { id: String title: String + handle: String + status: String + type_id: String + collection_id: String + is_giftcard: String + external_id: String + created_at: DateTime + updated_at: DateTime + variants: [ProductVariant] sales_channels: [SalesChannel] } - + type ProductVariant @Listeners(values: ["${Modules.PRODUCT}.product-variant.created", "${Modules.PRODUCT}.product-variant.updated", "${Modules.PRODUCT}.product-variant.deleted"]) { id: String product_id: String sku: String + prices: [Price] } diff --git a/packages/modules/index/src/utils/gql-to-types.ts b/packages/modules/index/src/utils/gql-to-types.ts index 6e6e91ddaf..83248cf476 100644 --- a/packages/modules/index/src/utils/gql-to-types.ts +++ b/packages/modules/index/src/utils/gql-to-types.ts @@ -1,15 +1,15 @@ -import { join } from "path" -import { CustomDirectives, makeSchemaExecutable } from "./build-config" import { MedusaModule } from "@medusajs/framework/modules-sdk" import { FileSystem, gqlSchemaToTypes as ModulesSdkGqlSchemaToTypes, } from "@medusajs/framework/utils" +import { join } from "path" import * as process from "process" +import { CustomDirectives, makeSchemaExecutable } from "./build-config" export async function gqlSchemaToTypes(schema: string) { const augmentedSchema = CustomDirectives.Listeners.definition + schema - const executableSchema = makeSchemaExecutable(augmentedSchema) + const executableSchema = makeSchemaExecutable(augmentedSchema)! const filename = "index-service-entry-points" const filenameWithExt = filename + ".d.ts" const dir = join(process.cwd(), ".medusa") diff --git a/packages/modules/index/src/utils/query-builder.ts b/packages/modules/index/src/utils/query-builder.ts index 63723640ba..e158c5036c 100644 --- a/packages/modules/index/src/utils/query-builder.ts +++ b/packages/modules/index/src/utils/query-builder.ts @@ -4,6 +4,7 @@ import { isObject, isPresent, isString, + unflattenObjectKeys, } from "@medusajs/framework/utils" import { Knex } from "@mikro-orm/knex" import { OrderBy, QueryFormat, QueryOptions, Select } from "@types" @@ -22,6 +23,8 @@ export const OPERATOR_MAP = { } export class QueryBuilder { + #searchVectorColumnName = "document_tsv" + private readonly structure: Select private readonly entityMap: Record private readonly knex: Knex @@ -82,6 +85,7 @@ export class QueryBuilder { private getGraphQLType(path, field) { const entity = this.getEntity(path)?.ref?.entity! const fieldRef = this.entityMap[entity]._fields[field] + if (!fieldRef) { throw new Error(`Field ${field} is not indexed.`) } @@ -111,6 +115,7 @@ export class QueryBuilder { Boolean: (val) => Boolean(val), ID: (val) => String(val), Date: (val) => new Date(val).toISOString(), + DateTime: (val) => new Date(val).toISOString(), Time: (val) => new Date(`1970-01-01T${val}Z`).toISOString(), } @@ -132,6 +137,7 @@ export class QueryBuilder { Float: "::double precision", Boolean: "::boolean", Date: "::timestamp", + DateTime: "::timestamp", Time: "::time", "": "", } @@ -141,6 +147,7 @@ export class QueryBuilder { Float: "0", Boolean: "false", Date: "1970-01-01 00:00:00", + DateTime: "1970-01-01 00:00:00", Time: "00:00:00", "": "", } @@ -560,9 +567,10 @@ export class QueryBuilder { hasPagination?: boolean hasCount?: boolean returnIdOnly?: boolean - }): [string, string | null] { + }): string { const queryBuilder = this.knex.queryBuilder() + const selectOnlyStructure = this.selector.select const structure = this.requestedFields const filter = this.selector.where ?? {} @@ -579,6 +587,16 @@ export class QueryBuilder { const rootEntity = entity.toLowerCase() const aliasMapping: { [path: string]: string } = {} + let hasTextSearch: boolean = false + let textSearchQuery: string | null = null + const searchQueryFilterProp = `${rootEntity}.q` + + if (filter[searchQueryFilterProp]) { + hasTextSearch = true + textSearchQuery = filter[searchQueryFilterProp] + delete filter[searchQueryFilterProp] + } + const joinParts = this.buildQueryParts( rootStructure, "", @@ -591,7 +609,11 @@ export class QueryBuilder { const rootAlias = aliasMapping[rootKey] const selectParts = !returnIdOnly - ? this.buildSelectParts(rootStructure, rootKey, aliasMapping) + ? this.buildSelectParts( + selectOnlyStructure[rootKey] as Select, + rootKey, + aliasMapping + ) : { [rootKey + ".id"]: `${rootAlias}.id` } queryBuilder.select(selectParts) @@ -604,6 +626,36 @@ export class QueryBuilder { queryBuilder.joinRaw(joinPart) }) + let searchWhereParts: string[] = [] + if (hasTextSearch) { + /** + * Build the search where parts for the query,. + * Apply the search query to the search vector column for every joined tabled except + * the pivot joined table. + */ + searchWhereParts = [ + `${this.getShortAlias(aliasMapping, rootEntity)}.${ + this.#searchVectorColumnName + } @@ plainto_tsquery('simple', '${textSearchQuery}')`, + ...joinParts.flatMap((part) => { + const aliases = part + .split(" as ") + .flatMap((chunk) => chunk.split(" on ")) + .filter( + (alias) => alias.startsWith('"t_') && !alias.includes("_ref") + ) + return aliases.map( + (alias) => + `${alias}.${ + this.#searchVectorColumnName + } @@ plainto_tsquery('simple', '${textSearchQuery}')` + ) + }), + ] + + queryBuilder.whereRaw(`(${searchWhereParts.join(" OR ")})`) + } + // WHERE clause this.parseWhere(aliasMapping, filter, queryBuilder) @@ -618,49 +670,60 @@ export class QueryBuilder { const direction = orderBy[aliasPath] queryBuilder.orderByRaw( - pgType.coalesce(`${alias}.data->>'${field}'`) + " " + direction + `(${alias}.data->>'${field}')${pgType.cast}` + " " + direction ) } - let distinctQueryBuilder = queryBuilder.clone() - let take_ = !isNaN(+take!) ? +take! : 15 let skip_ = !isNaN(+skip!) ? +skip! : 0 - let sql = "" + let cte = "" if (hasPagination) { - const idColumn = `${this.getShortAlias(aliasMapping, rootEntity)}.id` - distinctQueryBuilder.clearSelect() - distinctQueryBuilder.select( - this.knex.raw(`DISTINCT ON (${idColumn}) ${idColumn} as "id"`) - ) - distinctQueryBuilder.limit(take_) - distinctQueryBuilder.offset(skip_) + cte = this.buildCTEData({ + hasCount, + searchWhereParts, + take: take_, + skip: skip_, + orderBy, + }) - sql += `WITH paginated_data AS (${distinctQueryBuilder.toQuery()}),` + if (hasCount) { + queryBuilder.select(this.knex.raw("pd.count_total")) + } - queryBuilder.andWhere( - this.knex.raw(`${idColumn} IN (SELECT id FROM "paginated_data")`) + queryBuilder.joinRaw( + `JOIN paginated_data AS pd ON ${rootAlias}.id = pd.id` ) } - sql += `${hasPagination ? " " : "WITH"} data AS (${queryBuilder.toQuery()}) - SELECT * - FROM data` - - let sqlCount = "" - if (hasCount) { - sqlCount = this.buildQueryCount() - } - - return [sql, hasCount ? sqlCount : null] + return cte + queryBuilder.toQuery() } - public buildQueryCount(): string { + public buildCTEData({ + hasCount, + searchWhereParts = [], + skip, + take, + orderBy, + }: { + hasCount: boolean + searchWhereParts: string[] + skip?: number + take: number + orderBy: OrderBy + }): string { const queryBuilder = this.knex.queryBuilder() - const hasWhere = isPresent(this.rawConfig?.filters) - const structure = hasWhere ? this.rawConfig?.filters! : this.requestedFields + const hasWhere = isPresent(this.rawConfig?.filters) || isPresent(orderBy) + const structure = + hasWhere && !searchWhereParts.length + ? unflattenObjectKeys({ + ...(this.rawConfig?.filters + ? unflattenObjectKeys(this.rawConfig?.filters) + : {}), + ...orderBy, + }) + : this.requestedFields const rootKey = this.getStructureKeys(structure)[0] @@ -682,9 +745,7 @@ export class QueryBuilder { const rootAlias = aliasMapping[rootKey] - queryBuilder.select( - this.knex.raw(`COUNT(DISTINCT ${rootAlias}.id) as count`) - ) + queryBuilder.select(this.knex.raw(`${rootAlias}.id as id`)) queryBuilder.from( `cat_${rootEntity} AS ${this.getShortAlias(aliasMapping, rootEntity)}` @@ -695,10 +756,58 @@ export class QueryBuilder { queryBuilder.joinRaw(joinPart) }) + if (searchWhereParts.length) { + queryBuilder.whereRaw(`(${searchWhereParts.join(" OR ")})`) + } + this.parseWhere(aliasMapping, this.selector.where!, queryBuilder) } - return queryBuilder.toQuery() + // ORDER BY clause + const orderAliases: string[] = [] + for (const aliasPath in orderBy) { + const path = aliasPath.split(".") + const field = path.pop() + const attr = path.join(".") + + const pgType = this.getPostgresCastType(attr, [field]) + + const alias = aliasMapping[attr] + const direction = orderBy[aliasPath] + + const orderAlias = `"${alias}.data->>'${field}'"` + orderAliases.push(orderAlias + " " + direction) + + // transform the order by clause to a select MIN/MAX + queryBuilder.select( + direction === "ASC" + ? this.knex.raw( + `MIN((${alias}.data->>'${field}')${pgType.cast}) as ${orderAlias}` + ) + : this.knex.raw( + `MAX((${alias}.data->>'${field}')${pgType.cast}) as ${orderAlias}` + ) + ) + } + + queryBuilder.groupByRaw(`${rootAlias}.id`) + + const countSubQuery = hasCount + ? `, (SELECT count(id) FROM data_select) as count_total` + : "" + + return ` + WITH data_select AS ( + ${queryBuilder.toQuery()} + ), + paginated_data AS ( + SELECT id ${countSubQuery} + FROM data_select + ${orderAliases.length ? "ORDER BY " + orderAliases.join(", ") : ""} + LIMIT ${take} + ${skip ? `OFFSET ${skip}` : ""} + ) + ` } // NOTE: We are keeping the bellow code for now as reference to alternative implementation for us. DO NOT REMOVE