feat(index): add filterable fields to link definition (#11898)
* feat(index): add filterable fields to link definition * rm comment * break recursion * validate read only links * validate filterable * gql schema array * link parents * isInverse * push id when not present * Fix ciruclar relationships and add tests to ensure proper behaviour (part 1) * log and fallback to entity.alias * cleanup and fixes * cleanup and fixes * cleanup and fixes * fix get attributes * gql type * unit test * array inference * rm only * package.json * pacvkage.json * fix link retrieval on duplicated entity type and aliases + tests * link parents as array * Match only parent entity * rm comment * remove hard coded schema * extend types * unit test * test * types * pagination type * type * fix integration tests * Improve performance of in selection * use @@ to filter property * escape jsonPath * add Event Bus by default * changeset * rm postgres analyze * estimate count * new query * parent aliases * inner query w/ filter and sort relations * address comments --------- Co-authored-by: adrien2p <adrien.deperetti@gmail.com> Co-authored-by: Oli Juhl <59018053+olivermrbl@users.noreply.github.com>
This commit is contained in:
committed by
GitHub
parent
8a3f639f01
commit
b868a4ef4d
11
.changeset/small-jokes-shake.md
Normal file
11
.changeset/small-jokes-shake.md
Normal file
@@ -0,0 +1,11 @@
|
||||
---
|
||||
"@medusajs/index": patch
|
||||
"@medusajs/types": patch
|
||||
"@medusajs/utils": patch
|
||||
"@medusajs/framework": patch
|
||||
"@medusajs/link-modules": patch
|
||||
"@medusajs/pricing": patch
|
||||
"@medusajs/modules-sdk": patch
|
||||
---
|
||||
|
||||
feat(index): add filterable fields to link definition
|
||||
@@ -143,7 +143,7 @@ medusaIntegrationTestRunner({
|
||||
)
|
||||
|
||||
expect(resultset.metadata).toEqual({
|
||||
count: 2,
|
||||
estimate_count: expect.any(Number),
|
||||
skip: 0,
|
||||
take: 10,
|
||||
})
|
||||
|
||||
@@ -59,7 +59,7 @@ async function populateData(
|
||||
}
|
||||
|
||||
medusaIntegrationTestRunner({
|
||||
testSuite: ({ getContainer, dbConnection, api, dbConfig }) => {
|
||||
testSuite: ({ getContainer, dbConnection, api }) => {
|
||||
let indexEngine: IndexTypes.IIndexService
|
||||
let appContainer
|
||||
|
||||
|
||||
@@ -93,6 +93,7 @@ medusaIntegrationTestRunner({
|
||||
;(indexEngine as any).storageProvider_.onApplicationStart = jest.fn()
|
||||
|
||||
// Trigger a sync
|
||||
;(indexEngine as any).schemaObjectRepresentation_ = null
|
||||
await (indexEngine as any).onApplicationStart_()
|
||||
|
||||
// 28 ms - 6511 records
|
||||
@@ -138,6 +139,7 @@ medusaIntegrationTestRunner({
|
||||
;(indexEngine as any).storageProvider_.onApplicationStart = jest.fn()
|
||||
|
||||
// Trigger a sync
|
||||
;(indexEngine as any).schemaObjectRepresentation_ = null
|
||||
await (indexEngine as any).onApplicationStart_()
|
||||
|
||||
const { data: results } = await indexEngine.query<"product">({
|
||||
@@ -172,8 +174,8 @@ medusaIntegrationTestRunner({
|
||||
}
|
||||
`,
|
||||
}
|
||||
|
||||
// Trigger a sync
|
||||
;(indexEngine as any).schemaObjectRepresentation_ = null
|
||||
await (indexEngine as any).onApplicationStart_()
|
||||
await setTimeout(3000)
|
||||
|
||||
|
||||
@@ -26,6 +26,7 @@ import {
|
||||
} from "@medusajs/utils"
|
||||
import { pgConnectionLoader } from "./database"
|
||||
|
||||
import type { Knex } from "@mikro-orm/knex"
|
||||
import { aliasTo, asValue } from "awilix"
|
||||
import { configManager } from "./config"
|
||||
import {
|
||||
@@ -33,7 +34,6 @@ import {
|
||||
container as mainContainer,
|
||||
MedusaContainer,
|
||||
} from "./container"
|
||||
import type { Knex } from "@mikro-orm/knex"
|
||||
|
||||
export class MedusaAppLoader {
|
||||
/**
|
||||
@@ -88,6 +88,7 @@ export class MedusaAppLoader {
|
||||
const def = {} as ModuleDefinition
|
||||
def.key ??= key
|
||||
def.label ??= ModulesDefinition[key]?.label ?? upperCaseFirst(key)
|
||||
def.dependencies ??= ModulesDefinition[key]?.dependencies
|
||||
def.isQueryable = ModulesDefinition[key]?.isQueryable ?? true
|
||||
|
||||
const orignalDef = value?.definition ?? ModulesDefinition[key]
|
||||
|
||||
@@ -22,6 +22,7 @@ import {
|
||||
isString,
|
||||
MedusaModuleProviderType,
|
||||
MedusaModuleType,
|
||||
Modules,
|
||||
ModulesSdkUtils,
|
||||
toMikroOrmEntities,
|
||||
} from "@medusajs/utils"
|
||||
@@ -223,7 +224,8 @@ export async function loadInternalModule(args: {
|
||||
ContainerRegistrationKeys.MANAGER,
|
||||
ContainerRegistrationKeys.CONFIG_MODULE,
|
||||
ContainerRegistrationKeys.LOGGER,
|
||||
ContainerRegistrationKeys.PG_CONNECTION
|
||||
ContainerRegistrationKeys.PG_CONNECTION,
|
||||
Modules.EVENT_BUS
|
||||
)
|
||||
|
||||
for (const dependency of dependencies) {
|
||||
|
||||
@@ -456,3 +456,7 @@ export type TransformObjectMethodToAsync<T extends object> = {
|
||||
? TransformObjectMethodToAsync<T[K]>
|
||||
: T[K]
|
||||
}
|
||||
|
||||
export type QueryContextType = Record<string, any> & {
|
||||
__type?: "QueryContext"
|
||||
}
|
||||
|
||||
@@ -38,6 +38,10 @@ export type PaginatedResponse<T> = {
|
||||
* The total number of items.
|
||||
*/
|
||||
count: number
|
||||
/**
|
||||
* The estimated number of items.
|
||||
*/
|
||||
estimate_count?: number
|
||||
} & T
|
||||
|
||||
export type BatchResponse<T> = {
|
||||
@@ -59,7 +63,7 @@ export type BatchResponse<T> = {
|
||||
ids: string[]
|
||||
/**
|
||||
* The type of the items that were deleted.
|
||||
*
|
||||
*
|
||||
* @example
|
||||
* "product"
|
||||
*/
|
||||
|
||||
@@ -9,6 +9,7 @@ describe("IndexQueryConfig", () => {
|
||||
|
||||
expectTypeOf<IndexConfig["fields"]>().toEqualTypeOf<
|
||||
(
|
||||
| "*"
|
||||
| "id"
|
||||
| "title"
|
||||
| "variants.*"
|
||||
|
||||
@@ -31,10 +31,21 @@ export type SchemaObjectEntityRepresentation = {
|
||||
*/
|
||||
targetProp: string
|
||||
|
||||
/**
|
||||
* The property the parent is assigned to in my side
|
||||
*/
|
||||
inverseSideProp: string
|
||||
|
||||
/**
|
||||
* Are the data expected to be a list or not
|
||||
*/
|
||||
isList?: boolean
|
||||
|
||||
/**
|
||||
* Whether the entity is the inverse of the link (not the owner.):
|
||||
* e.g: order -> cart, order is the owner, cart is the inverse
|
||||
*/
|
||||
isInverse?: boolean
|
||||
}[]
|
||||
|
||||
/**
|
||||
@@ -69,6 +80,8 @@ export type SchemaPropertiesMap = {
|
||||
[key: string]: {
|
||||
shortCutOf?: string
|
||||
ref: SchemaObjectEntityRepresentation
|
||||
isInverse?: boolean
|
||||
isList?: boolean
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { ExcludedProps, TypeOnly } from "./common"
|
||||
|
||||
type Marker = [never, 0, 1, 2, 3, 4]
|
||||
|
||||
type RawBigNumberPrefix = "raw_"
|
||||
@@ -17,7 +16,7 @@ export type ObjectToIndexFields<
|
||||
MaybeT,
|
||||
Depth extends number = 2,
|
||||
Exclusion extends string[] = [],
|
||||
T = TypeOnly<MaybeT>
|
||||
T = TypeOnly<MaybeT> & { "*": "*" }
|
||||
> = Depth extends never
|
||||
? never
|
||||
: T extends object
|
||||
|
||||
@@ -1,7 +1,56 @@
|
||||
import { RemoteQueryInput } from "../../modules-sdk/remote-query-object-from-string"
|
||||
import { QueryContextType } from "../../common"
|
||||
import { IndexServiceEntryPoints } from "../index-service-entry-points"
|
||||
import { ObjectToIndexFields } from "./query-input-config-fields"
|
||||
import { IndexFilters } from "./query-input-config-filters"
|
||||
import { IndexOrderBy } from "./query-input-config-order-by"
|
||||
|
||||
export type IndexQueryInput<TEntry extends string> = {
|
||||
/**
|
||||
* The name of the entity to retrieve. For example, `product`.
|
||||
*/
|
||||
entity: TEntry | keyof IndexServiceEntryPoints
|
||||
/**
|
||||
* The fields and relations to retrieve in the entity.
|
||||
*/
|
||||
fields: ObjectToIndexFields<
|
||||
IndexServiceEntryPoints[TEntry & keyof IndexServiceEntryPoints]
|
||||
> extends never
|
||||
? string[]
|
||||
:
|
||||
| ObjectToIndexFields<
|
||||
IndexServiceEntryPoints[TEntry & keyof IndexServiceEntryPoints]
|
||||
>[]
|
||||
| string[]
|
||||
/**
|
||||
* Pagination configurations for the returned list of items.
|
||||
*/
|
||||
pagination?: {
|
||||
/**
|
||||
* The number of items to skip before retrieving the returned items.
|
||||
*/
|
||||
skip?: number
|
||||
/**
|
||||
* The maximum number of items to return.
|
||||
*/
|
||||
take?: number
|
||||
/**
|
||||
* Sort by field names in ascending or descending order.
|
||||
*/
|
||||
order?: IndexOrderBy<TEntry>
|
||||
}
|
||||
/**
|
||||
* Filters to apply on the retrieved items.
|
||||
*/
|
||||
filters?: IndexFilters<TEntry>
|
||||
/**
|
||||
* Apply a query context on the retrieved data. For example, to retrieve product prices for a certain context.
|
||||
*/
|
||||
context?: QueryContextType
|
||||
/**
|
||||
* Apply a `withDeleted` flag on the retrieved data to retrieve soft deleted items.
|
||||
*/
|
||||
withDeleted?: boolean
|
||||
}
|
||||
|
||||
export type IndexQueryConfig<TEntry extends string> = {
|
||||
fields: ObjectToIndexFields<
|
||||
@@ -13,14 +62,14 @@ export type IndexQueryConfig<TEntry extends string> = {
|
||||
>[]
|
||||
filters?: IndexFilters<TEntry>
|
||||
joinFilters?: IndexFilters<TEntry>
|
||||
pagination?: Partial<RemoteQueryInput<TEntry>["pagination"]>
|
||||
pagination?: Partial<IndexQueryInput<TEntry>["pagination"]>
|
||||
keepFilteredEntities?: boolean
|
||||
}
|
||||
|
||||
export type QueryFunctionReturnPagination = {
|
||||
skip?: number
|
||||
take?: number
|
||||
count?: number
|
||||
skip: number
|
||||
take: number
|
||||
estimate_count: number
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -21,6 +21,7 @@ export type JoinerRelationship = {
|
||||
export interface JoinerServiceConfigAlias {
|
||||
name: string | string[]
|
||||
entity?: string
|
||||
filterable?: string[]
|
||||
/**
|
||||
* Extra arguments to pass to the remoteFetchData callback
|
||||
*/
|
||||
|
||||
@@ -248,6 +248,11 @@ export declare type ModuleJoinerRelationship = JoinerRelationship & {
|
||||
* If true, the link joiner will cascade deleting the relationship
|
||||
*/
|
||||
deleteCascade?: boolean
|
||||
|
||||
/**
|
||||
* The fields to be filterable by the Index module using query.index
|
||||
*/
|
||||
filterable?: string[]
|
||||
/**
|
||||
* Allow multiple relationships to exist for this
|
||||
* entity
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { QueryContextType } from "../common"
|
||||
import { IndexOrderBy } from "../index-data/query-config/query-input-config-order-by"
|
||||
import { ObjectToRemoteQueryFields } from "./object-to-remote-query-fields"
|
||||
import { RemoteQueryEntryPoints } from "./remote-query-entry-points"
|
||||
import { RemoteQueryFilters } from "./to-remote-query"
|
||||
|
||||
export type RemoteQueryObjectConfig<TEntry extends string> = {
|
||||
// service: string This property is still supported under the hood but part of the type due to types missmatch towards fields
|
||||
entryPoint: TEntry | keyof RemoteQueryEntryPoints
|
||||
variables?: any
|
||||
fields: ObjectToRemoteQueryFields<
|
||||
@@ -26,7 +26,6 @@ export type RemoteQueryObjectFromStringResult<
|
||||
}
|
||||
|
||||
export type RemoteQueryInput<TEntry extends string> = {
|
||||
// service: string This property is still supported under the hood but part of the type due to types missmatch towards fields
|
||||
/**
|
||||
* The name of the entity to retrieve. For example, `product`.
|
||||
*/
|
||||
@@ -67,7 +66,7 @@ export type RemoteQueryInput<TEntry extends string> = {
|
||||
/**
|
||||
* Apply a query context on the retrieved data. For example, to retrieve product prices for a certain context.
|
||||
*/
|
||||
context?: any
|
||||
context?: QueryContextType
|
||||
/**
|
||||
* Apply a `withDeleted` flag on the retrieved data to retrieve soft deleted items.
|
||||
*/
|
||||
|
||||
@@ -1,4 +1,8 @@
|
||||
import { Prettify } from "../common"
|
||||
import {
|
||||
IndexQueryInput,
|
||||
QueryResultSet,
|
||||
} from "../index-data/query-config/query-input-config"
|
||||
import { RemoteJoinerOptions, RemoteJoinerQuery } from "../joiner"
|
||||
import { RemoteQueryEntryPoints } from "./remote-query-entry-points"
|
||||
import {
|
||||
@@ -6,7 +10,6 @@ import {
|
||||
RemoteQueryObjectConfig,
|
||||
RemoteQueryObjectFromStringResult,
|
||||
} from "./remote-query-object-from-string"
|
||||
import { RemoteQueryFilters } from "./to-remote-query"
|
||||
|
||||
/*type ExcludedProps = "__typename"*/
|
||||
|
||||
@@ -40,17 +43,14 @@ export type QueryGraphFunction = {
|
||||
}
|
||||
|
||||
/**
|
||||
* QueryIndexFunction is a wrapper on top of remoteQuery
|
||||
* QueryIndexFunction is a wrapper on top of indexModule
|
||||
* that simplifies the input it accepts and returns
|
||||
* a normalized/consistent output.
|
||||
*/
|
||||
export type QueryIndexFunction = {
|
||||
<const TEntry extends string>(
|
||||
queryOptions: RemoteQueryInput<TEntry> & {
|
||||
joinFilters?: RemoteQueryFilters<TEntry>
|
||||
},
|
||||
options?: RemoteJoinerOptions
|
||||
): Promise<Prettify<GraphResultSet<TEntry>>>
|
||||
<const TEntry extends string>(queryOptions: IndexQueryInput<TEntry>): Promise<
|
||||
Prettify<QueryResultSet<TEntry>>
|
||||
>
|
||||
}
|
||||
|
||||
/*export type RemoteQueryReturnedData<TEntry extends string> =
|
||||
|
||||
@@ -69,7 +69,7 @@ describe("GraphQL builder", () => {
|
||||
id: ID!
|
||||
username: String!
|
||||
email: Email!
|
||||
spend_limit: String!
|
||||
spend_limit: Float!
|
||||
phones: [String]!
|
||||
group_id:String!
|
||||
group: Group!
|
||||
|
||||
@@ -9,7 +9,7 @@ const GRAPHQL_TYPES = {
|
||||
boolean: "Boolean",
|
||||
dateTime: "DateTime",
|
||||
number: "Int",
|
||||
bigNumber: "String",
|
||||
bigNumber: "Float",
|
||||
text: "String",
|
||||
json: "JSON",
|
||||
array: "[String]",
|
||||
|
||||
@@ -18,6 +18,7 @@ type InputSource = {
|
||||
alias?: string
|
||||
linkable: string
|
||||
primaryKey: string
|
||||
filterable?: string[]
|
||||
}
|
||||
|
||||
type ReadOnlyInputSource = {
|
||||
@@ -42,6 +43,7 @@ type InputOptions = {
|
||||
field?: string
|
||||
isList?: boolean
|
||||
deleteCascade?: boolean
|
||||
filterable?: string[]
|
||||
}
|
||||
|
||||
type Shortcut = {
|
||||
@@ -87,6 +89,7 @@ type ModuleLinkableKeyConfig = {
|
||||
alias: string
|
||||
hasMany?: boolean
|
||||
shortcut?: Shortcut | Shortcut[]
|
||||
filterable?: string[]
|
||||
}
|
||||
|
||||
function isInputOptions(input: any): input is InputOptions {
|
||||
@@ -141,6 +144,7 @@ function prepareServiceConfig(
|
||||
isList: false,
|
||||
hasMany: false,
|
||||
deleteCascade: false,
|
||||
filterable: source.filterable,
|
||||
module: source.serviceName,
|
||||
entity: source.entity,
|
||||
}
|
||||
@@ -159,6 +163,7 @@ function prepareServiceConfig(
|
||||
isList: input.isList ?? false,
|
||||
hasMany,
|
||||
deleteCascade: input.deleteCascade ?? false,
|
||||
filterable: input.filterable,
|
||||
module: source.serviceName,
|
||||
entity: source.entity,
|
||||
}
|
||||
@@ -192,6 +197,17 @@ export function defineLink(
|
||||
const serviceBObj = prepareServiceConfig(rightService)
|
||||
|
||||
if (linkServiceOptions?.readOnly) {
|
||||
if (!leftService.linkable || !leftService.field) {
|
||||
throw new Error(
|
||||
`ReadOnly link requires "linkable" and "field" to be defined for the left service.`
|
||||
)
|
||||
} else if (
|
||||
(leftService as DefineLinkInputSource).filterable ||
|
||||
(rightService as DefineLinkInputSource).filterable
|
||||
) {
|
||||
throw new Error(`ReadOnly link does not support filterable fields.`)
|
||||
}
|
||||
|
||||
return defineReadOnlyLink(
|
||||
serviceAObj,
|
||||
serviceBObj,
|
||||
@@ -378,6 +394,7 @@ ${serviceBObj.module}: {
|
||||
methodSuffix: serviceAMethodSuffix,
|
||||
},
|
||||
deleteCascade: serviceAObj.deleteCascade,
|
||||
filterable: serviceAObj.filterable,
|
||||
hasMany: serviceAObj.hasMany,
|
||||
},
|
||||
{
|
||||
@@ -390,6 +407,7 @@ ${serviceBObj.module}: {
|
||||
methodSuffix: serviceBMethodSuffix,
|
||||
},
|
||||
deleteCascade: serviceBObj.deleteCascade,
|
||||
filterable: serviceBObj.filterable,
|
||||
hasMany: serviceBObj.hasMany,
|
||||
},
|
||||
],
|
||||
|
||||
@@ -1,11 +1,13 @@
|
||||
type QueryContextType = {
|
||||
import { QueryContextType } from "@medusajs/types"
|
||||
|
||||
type QueryContexFnType = {
|
||||
(query: Record<string, unknown>): Record<string, unknown>
|
||||
isQueryContext: (obj: any) => boolean
|
||||
}
|
||||
|
||||
const __type = "QueryContext"
|
||||
|
||||
function QueryContextFn(query: Record<string, unknown>) {
|
||||
function QueryContextFn(query: Record<string, unknown>): QueryContextType {
|
||||
return {
|
||||
...query,
|
||||
__type,
|
||||
@@ -16,4 +18,4 @@ QueryContextFn.isQueryContext = (obj: any) => {
|
||||
return obj.__type === __type
|
||||
}
|
||||
|
||||
export const QueryContext: QueryContextType = QueryContextFn
|
||||
export const QueryContext: QueryContexFnType = QueryContextFn
|
||||
|
||||
@@ -79,7 +79,7 @@ async function getProductsWithIndexEngine(
|
||||
|
||||
res.json({
|
||||
products: products.map(remapProductResponse),
|
||||
count: metadata!.count,
|
||||
count: metadata!.estimate_count,
|
||||
offset: metadata!.skip,
|
||||
limit: metadata!.take,
|
||||
})
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { featureFlagRouter } from "@medusajs/framework"
|
||||
import { MedusaResponse } from "@medusajs/framework/http"
|
||||
import { HttpTypes } from "@medusajs/framework/types"
|
||||
import { HttpTypes, QueryContextType } from "@medusajs/framework/types"
|
||||
import {
|
||||
ContainerRegistrationKeys,
|
||||
isPresent,
|
||||
@@ -36,7 +36,7 @@ async function getProductsWithIndexEngine(
|
||||
) {
|
||||
const query = req.scope.resolve(ContainerRegistrationKeys.QUERY)
|
||||
|
||||
const context: object = {}
|
||||
const context: QueryContextType = {}
|
||||
const withInventoryQuantity = req.queryConfig.fields.some((field) =>
|
||||
field.includes("variants.inventory_quantity")
|
||||
)
|
||||
@@ -80,7 +80,8 @@ async function getProductsWithIndexEngine(
|
||||
await wrapProductsWithTaxPrices(req, products)
|
||||
res.json({
|
||||
products,
|
||||
count: metadata!.count,
|
||||
count: metadata!.estimate_count,
|
||||
estimate_count: metadata!.estimate_count,
|
||||
offset: metadata!.skip,
|
||||
limit: metadata!.take,
|
||||
})
|
||||
|
||||
@@ -414,7 +414,6 @@ describe("IndexModuleService query", function () {
|
||||
},
|
||||
})
|
||||
|
||||
// NULLS LAST (DESC = first)
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_2",
|
||||
@@ -526,7 +525,7 @@ describe("IndexModuleService query", function () {
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
estimate_count: expect.any(Number),
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
@@ -575,7 +574,7 @@ describe("IndexModuleService query", function () {
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
estimate_count: expect.any(Number),
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
@@ -631,6 +630,86 @@ describe("IndexModuleService query", function () {
|
||||
])
|
||||
})
|
||||
|
||||
it("should filter using IN operator with array of strings", async () => {
|
||||
const { data } = await module.query({
|
||||
fields: ["product.id", "product.variants.*"],
|
||||
filters: {
|
||||
product: {
|
||||
variants: {
|
||||
sku: { $in: ["sku 123", "aaa test aaa", "does-not-exist"] },
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
order: {
|
||||
product: {
|
||||
variants: {
|
||||
prices: {
|
||||
amount: "DESC",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_1",
|
||||
variants: [
|
||||
{
|
||||
id: "var_1",
|
||||
sku: "aaa test aaa",
|
||||
},
|
||||
{
|
||||
id: "var_2",
|
||||
sku: "sku 123",
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should filter using IN operator with array of strings", async () => {
|
||||
const { data } = await module.query({
|
||||
fields: ["product.id", "product.variants.*"],
|
||||
filters: {
|
||||
product: {
|
||||
variants: {
|
||||
sku: { $in: ["sku 123", "aaa test aaa", "does-not-exist"] },
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
order: {
|
||||
product: {
|
||||
variants: {
|
||||
prices: {
|
||||
amount: "DESC",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_1",
|
||||
variants: [
|
||||
{
|
||||
id: "var_1",
|
||||
sku: "aaa test aaa",
|
||||
},
|
||||
{
|
||||
id: "var_2",
|
||||
sku: "sku 123",
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should query products filtering by price and returning the complete entity", async () => {
|
||||
const { data, metadata } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
@@ -651,7 +730,7 @@ describe("IndexModuleService query", function () {
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
estimate_count: expect.any(Number),
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
@@ -736,11 +815,16 @@ describe("IndexModuleService query", function () {
|
||||
pagination: {
|
||||
take: 1,
|
||||
skip: 1,
|
||||
order: {
|
||||
product: {
|
||||
id: "ASC",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 2,
|
||||
estimate_count: expect.any(Number),
|
||||
skip: 1,
|
||||
take: 1,
|
||||
})
|
||||
@@ -759,77 +843,6 @@ describe("IndexModuleService query", function () {
|
||||
])
|
||||
})
|
||||
|
||||
it("should handle null values on where clause", async () => {
|
||||
const { data: data_, metadata } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
filters: {
|
||||
product: {
|
||||
variants: {
|
||||
sku: null,
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
take: 100,
|
||||
skip: 0,
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
|
||||
expect(data_).toEqual([
|
||||
{
|
||||
id: "prod_2",
|
||||
deep: { a: 1, obj: { b: 15 } },
|
||||
title: "Product 2 title",
|
||||
variants: [],
|
||||
},
|
||||
])
|
||||
|
||||
const { data, metadata: metadata2 } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
filters: {
|
||||
product: {
|
||||
variants: {
|
||||
sku: { $ne: null },
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
take: 100,
|
||||
skip: 0,
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata2).toEqual({
|
||||
count: 1,
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_1",
|
||||
variants: [
|
||||
{
|
||||
id: "var_1",
|
||||
sku: "aaa test aaa",
|
||||
prices: [{ id: "money_amount_1", amount: 100 }],
|
||||
},
|
||||
{
|
||||
id: "var_2",
|
||||
sku: "sku 123",
|
||||
prices: [{ id: "money_amount_2", amount: 10 }],
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should query products filtering by deep nested levels", async () => {
|
||||
const { data, metadata } = await module.query({
|
||||
fields: ["product.*"],
|
||||
@@ -849,7 +862,7 @@ describe("IndexModuleService query", function () {
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
estimate_count: expect.any(Number),
|
||||
skip: 0,
|
||||
take: 1,
|
||||
})
|
||||
@@ -866,4 +879,52 @@ describe("IndexModuleService query", function () {
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should query products filtering by prices bigger than 20", async () => {
|
||||
const { data, metadata } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
filters: {
|
||||
product: {
|
||||
variants: {
|
||||
prices: {
|
||||
amount: { $gt: 20 },
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
take: 100,
|
||||
skip: 0,
|
||||
order: {
|
||||
product: {
|
||||
created_at: "ASC",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
estimate_count: expect.any(Number),
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_1",
|
||||
variants: [
|
||||
{
|
||||
id: "var_1",
|
||||
sku: "aaa test aaa",
|
||||
prices: [
|
||||
{
|
||||
id: "money_amount_1",
|
||||
amount: 100,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
@@ -3,11 +3,11 @@ import { Migration } from "@mikro-orm/migrations"
|
||||
export class Migration20231019174230 extends Migration {
|
||||
async up(): Promise<void> {
|
||||
this.addSql(
|
||||
`create table "index_data" ("id" text not null, "name" text not null, "data" jsonb not null default '{}', constraint "index_data_pkey" primary key ("id", "name")) PARTITION BY LIST("name");`
|
||||
`create table IF NOT EXISTS "index_data" ("id" text not null, "name" text not null, "data" jsonb not null default '{}', constraint "index_data_pkey" primary key ("id", "name")) PARTITION BY LIST("name");`
|
||||
)
|
||||
|
||||
this.addSql(
|
||||
`create table "index_relation" ("id" bigserial, "pivot" text not null, "parent_id" text not null, "parent_name" text not null, "child_id" text not null, "child_name" text not null, constraint "index_relation_pkey" primary key ("id", "pivot")) PARTITION BY LIST("pivot");`
|
||||
`create table IF NOT EXISTS "index_relation" ("id" bigserial, "pivot" text not null, "parent_id" text not null, "parent_name" text not null, "child_id" text not null, "child_name" text not null, constraint "index_relation_pkey" primary key ("id", "pivot")) PARTITION BY LIST("pivot");`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
import {
|
||||
MikroOrmBaseRepository as BaseRepository,
|
||||
ContainerRegistrationKeys,
|
||||
GraphQLUtils,
|
||||
Modules,
|
||||
ModulesSdkUtils,
|
||||
} from "@medusajs/framework/utils"
|
||||
@@ -20,6 +21,7 @@ import {
|
||||
defaultSchema,
|
||||
gqlSchemaToTypes,
|
||||
} from "@utils"
|
||||
import { baseGraphqlSchema } from "../utils/base-graphql-schema"
|
||||
import { DataSynchronizer } from "./data-synchronizer"
|
||||
|
||||
type InjectedDependencies = {
|
||||
@@ -105,7 +107,7 @@ export default class IndexModuleService
|
||||
|
||||
protected async onApplicationStart_() {
|
||||
try {
|
||||
this.buildSchemaObjectRepresentation_()
|
||||
const executableSchema = this.buildSchemaObjectRepresentation_()
|
||||
|
||||
this.storageProvider_ = new this.storageProviderCtr_(
|
||||
this.container_,
|
||||
@@ -122,7 +124,7 @@ export default class IndexModuleService
|
||||
await this.storageProvider_.onApplicationStart()
|
||||
}
|
||||
|
||||
await gqlSchemaToTypes(this.moduleOptions_.schema ?? defaultSchema)
|
||||
await gqlSchemaToTypes(executableSchema!)
|
||||
|
||||
this.dataSynchronizer_.onApplicationStart({
|
||||
schemaObjectRepresentation: this.schemaObjectRepresentation_,
|
||||
@@ -174,24 +176,21 @@ export default class IndexModuleService
|
||||
}
|
||||
}
|
||||
|
||||
private buildSchemaObjectRepresentation_() {
|
||||
private buildSchemaObjectRepresentation_():
|
||||
| GraphQLUtils.GraphQLSchema
|
||||
| undefined {
|
||||
if (this.schemaObjectRepresentation_) {
|
||||
return this.schemaObjectRepresentation_
|
||||
return
|
||||
}
|
||||
|
||||
const baseSchema = `
|
||||
scalar DateTime
|
||||
scalar Date
|
||||
scalar Time
|
||||
scalar JSON
|
||||
`
|
||||
const [objectRepresentation, entityMap] = buildSchemaObjectRepresentation(
|
||||
baseSchema + (this.moduleOptions_.schema ?? defaultSchema)
|
||||
)
|
||||
const { objectRepresentation, entitiesMap, executableSchema } =
|
||||
buildSchemaObjectRepresentation(
|
||||
baseGraphqlSchema + (this.moduleOptions_.schema ?? defaultSchema)
|
||||
)
|
||||
|
||||
this.schemaObjectRepresentation_ = objectRepresentation
|
||||
this.schemaEntitiesMap_ = entityMap
|
||||
this.schemaEntitiesMap_ = entitiesMap
|
||||
|
||||
return this.schemaObjectRepresentation_
|
||||
return executableSchema
|
||||
}
|
||||
}
|
||||
|
||||
@@ -68,37 +68,6 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
|
||||
this.schemaObjectRepresentation_ = options.schemaObjectRepresentation
|
||||
this.schemaEntitiesMap_ = options.entityMap
|
||||
|
||||
// Add a new column for each key that can be found in the jsonb data column to perform indexes and query on it.
|
||||
// So far, the execution time is about the same
|
||||
/*;(async () => {
|
||||
const query = [
|
||||
...new Set(
|
||||
Object.keys(this.schemaObjectRepresentation_)
|
||||
.filter(
|
||||
(key) =>
|
||||
![
|
||||
"_serviceNameModuleConfigMap",
|
||||
"_schemaPropertiesMap",
|
||||
].includes(key)
|
||||
)
|
||||
.map((key) => {
|
||||
return this.schemaObjectRepresentation_[key].fields.filter(
|
||||
(field) => !field.includes(".")
|
||||
)
|
||||
})
|
||||
.flat()
|
||||
),
|
||||
].map(
|
||||
(field) =>
|
||||
"ALTER TABLE index_data ADD IF NOT EXISTS " +
|
||||
field +
|
||||
" text GENERATED ALWAYS AS (NEW.data->>'" +
|
||||
field +
|
||||
"') STORED"
|
||||
)
|
||||
await this.manager_.execute(query.join(";"))
|
||||
})()*/
|
||||
}
|
||||
|
||||
async onApplicationStart() {
|
||||
@@ -138,7 +107,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
const parentAlias = field.split(".")[0]
|
||||
const parentSchemaObjectRepresentation =
|
||||
schemaEntityObjectRepresentation.parents.find(
|
||||
(parent) => parent.ref.alias === parentAlias
|
||||
(parent) => parent.inverseSideProp === parentAlias
|
||||
)
|
||||
|
||||
if (!parentSchemaObjectRepresentation) {
|
||||
@@ -304,7 +273,6 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
schema: this.schemaObjectRepresentation_,
|
||||
entityMap: this.schemaEntitiesMap_,
|
||||
knex: connection.getKnex(),
|
||||
rawConfig: config,
|
||||
selector: {
|
||||
select,
|
||||
where,
|
||||
@@ -316,26 +284,30 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
keepFilteredEntities,
|
||||
orderBy,
|
||||
},
|
||||
rawConfig: config,
|
||||
requestedFields,
|
||||
})
|
||||
|
||||
const sql = qb.buildQuery({
|
||||
const { sql, sqlCount } = qb.buildQuery({
|
||||
hasPagination,
|
||||
returnIdOnly: !!keepFilteredEntities,
|
||||
hasCount,
|
||||
})
|
||||
|
||||
const resultSet = await manager.execute(sql)
|
||||
const [resultSet, countResult] = await Promise.all([
|
||||
manager.execute(sql),
|
||||
hasCount ? manager.execute(sqlCount!) : null,
|
||||
])
|
||||
|
||||
const resultMetadata: IndexTypes.QueryFunctionReturnPagination | undefined =
|
||||
hasPagination
|
||||
? {
|
||||
count: hasCount
|
||||
? parseInt(resultSet[0]?.count_total ?? 0)
|
||||
? ({
|
||||
estimate_count: hasCount
|
||||
? parseInt(countResult![0]?.estimate_count ?? 0)
|
||||
: undefined,
|
||||
skip,
|
||||
take,
|
||||
}
|
||||
} as IndexTypes.QueryFunctionReturnPagination)
|
||||
: undefined
|
||||
|
||||
if (keepFilteredEntities) {
|
||||
|
||||
1857
packages/modules/index/src/utils/__tests__/build-config.spec.ts
Normal file
1857
packages/modules/index/src/utils/__tests__/build-config.spec.ts
Normal file
File diff suppressed because it is too large
Load Diff
6
packages/modules/index/src/utils/base-graphql-schema.ts
Normal file
6
packages/modules/index/src/utils/base-graphql-schema.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export const baseGraphqlSchema = `
|
||||
scalar DateTime
|
||||
scalar Date
|
||||
scalar Time
|
||||
scalar JSON
|
||||
`
|
||||
File diff suppressed because it is too large
Load Diff
@@ -9,6 +9,8 @@ export async function createPartitions(
|
||||
const activeSchema = manager.config.get("schema")
|
||||
? `"${manager.config.get("schema")}".`
|
||||
: ""
|
||||
|
||||
const createdPartitions: Set<string> = new Set()
|
||||
const partitions = Object.keys(schemaObjectRepresentation)
|
||||
.filter(
|
||||
(key) =>
|
||||
@@ -17,16 +19,30 @@ export async function createPartitions(
|
||||
)
|
||||
.map((key) => {
|
||||
const cName = key.toLowerCase()
|
||||
|
||||
if (createdPartitions.has(cName)) {
|
||||
return []
|
||||
}
|
||||
createdPartitions.add(cName)
|
||||
|
||||
const part: string[] = []
|
||||
part.push(
|
||||
`CREATE TABLE IF NOT EXISTS ${activeSchema}cat_${cName} PARTITION OF ${activeSchema}index_data FOR VALUES IN ('${key}')`
|
||||
)
|
||||
|
||||
for (const parent of schemaObjectRepresentation[key].parents) {
|
||||
const pKey = `${parent.ref.entity}-${key}`
|
||||
const pName = `${parent.ref.entity}${key}`.toLowerCase()
|
||||
if (parent.isInverse) {
|
||||
continue
|
||||
}
|
||||
|
||||
const pName = `cat_pivot_${parent.ref.entity}${key}`.toLowerCase()
|
||||
if (createdPartitions.has(pName)) {
|
||||
continue
|
||||
}
|
||||
createdPartitions.add(pName)
|
||||
|
||||
part.push(
|
||||
`CREATE TABLE IF NOT EXISTS ${activeSchema}cat_pivot_${pName} PARTITION OF ${activeSchema}index_relation FOR VALUES IN ('${pKey}')`
|
||||
`CREATE TABLE IF NOT EXISTS ${activeSchema}${pName} PARTITION OF ${activeSchema}index_relation FOR VALUES IN ('${parent.ref.entity}-${key}')`
|
||||
)
|
||||
}
|
||||
return part
|
||||
@@ -58,11 +74,14 @@ export async function createPartitions(
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS "IDX_cat_${cName}_id" ON ${activeSchema}cat_${cName} ("id")`
|
||||
)
|
||||
|
||||
// create child id index on pivot partitions
|
||||
for (const parent of schemaObjectRepresentation[key].parents) {
|
||||
const pName = `${parent.ref.entity}${key}`.toLowerCase()
|
||||
if (parent.isInverse) {
|
||||
continue
|
||||
}
|
||||
|
||||
const pName = `cat_pivot_${parent.ref.entity}${key}`.toLowerCase()
|
||||
part.push(
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS "IDX_cat_pivot_${pName}_child_id" ON ${activeSchema}cat_pivot_${pName} ("child_id")`
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS "IDX_${pName}_child_id" ON ${activeSchema}${pName} ("child_id")`
|
||||
)
|
||||
}
|
||||
|
||||
@@ -70,18 +89,25 @@ export async function createPartitions(
|
||||
})
|
||||
.flat()
|
||||
|
||||
// Execute index creation commands separately to avoid blocking
|
||||
for (const cmd of indexCreationCommands) {
|
||||
try {
|
||||
await manager.execute(cmd)
|
||||
} catch (error) {
|
||||
// Log error but continue with other indexes
|
||||
console.error(`Failed to create index: ${error.message}`)
|
||||
}
|
||||
}
|
||||
|
||||
partitions.push(`analyse ${activeSchema}index_data`)
|
||||
partitions.push(`analyse ${activeSchema}index_relation`)
|
||||
// Create count estimate function
|
||||
partitions.push(`
|
||||
CREATE OR REPLACE FUNCTION count_estimate(query text) RETURNS bigint AS $$
|
||||
DECLARE
|
||||
plan jsonb;
|
||||
BEGIN
|
||||
EXECUTE 'EXPLAIN (FORMAT JSON) ' || query INTO plan;
|
||||
RETURN (plan->0->'Plan'->>'Plan Rows')::bigint;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
`)
|
||||
|
||||
await manager.execute(partitions.join("; "))
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ import { Modules } from "@medusajs/utils"
|
||||
|
||||
export const defaultSchema = `
|
||||
type Product @Listeners(values: ["${Modules.PRODUCT}.product.created", "${Modules.PRODUCT}.product.updated", "${Modules.PRODUCT}.product.deleted"]) {
|
||||
id: String
|
||||
id: ID
|
||||
title: String
|
||||
handle: String
|
||||
status: String
|
||||
@@ -18,7 +18,7 @@ export const defaultSchema = `
|
||||
}
|
||||
|
||||
type ProductVariant @Listeners(values: ["${Modules.PRODUCT}.product-variant.created", "${Modules.PRODUCT}.product-variant.updated", "${Modules.PRODUCT}.product-variant.deleted"]) {
|
||||
id: String
|
||||
id: ID
|
||||
product_id: String
|
||||
sku: String
|
||||
|
||||
@@ -26,13 +26,13 @@ export const defaultSchema = `
|
||||
}
|
||||
|
||||
type Price @Listeners(values: ["${Modules.PRICING}.price.created", "${Modules.PRICING}.price.updated", "${Modules.PRICING}.price.deleted"]) {
|
||||
id: String
|
||||
id: ID
|
||||
amount: Float
|
||||
currency_code: String
|
||||
}
|
||||
|
||||
type SalesChannel @Listeners(values: ["${Modules.SALES_CHANNEL}.sales_channel.created", "${Modules.SALES_CHANNEL}.sales_channel.updated", "${Modules.SALES_CHANNEL}.sales_channel.deleted"]) {
|
||||
id: String
|
||||
id: ID
|
||||
is_disabled: Boolean
|
||||
}
|
||||
`
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import { MedusaModule } from "@medusajs/framework/modules-sdk"
|
||||
import {
|
||||
FileSystem,
|
||||
GraphQLUtils,
|
||||
gqlSchemaToTypes as ModulesSdkGqlSchemaToTypes,
|
||||
} from "@medusajs/framework/utils"
|
||||
import { join } from "path"
|
||||
import * as process from "process"
|
||||
import { CustomDirectives, makeSchemaExecutable } from "./build-config"
|
||||
|
||||
export async function gqlSchemaToTypes(schema: string) {
|
||||
const augmentedSchema = CustomDirectives.Listeners.definition + schema
|
||||
const executableSchema = makeSchemaExecutable(augmentedSchema)!
|
||||
export async function gqlSchemaToTypes(
|
||||
executableSchema: GraphQLUtils.GraphQLSchema
|
||||
) {
|
||||
const filename = "index-service-entry-points"
|
||||
const filenameWithExt = filename + ".d.ts"
|
||||
const dir = join(process.cwd(), ".medusa")
|
||||
const dir = join(process.cwd(), ".medusa/types")
|
||||
|
||||
await ModulesSdkGqlSchemaToTypes({
|
||||
schema: executableSchema,
|
||||
|
||||
@@ -1,15 +1,42 @@
|
||||
import { IndexTypes } from "@medusajs/framework/types"
|
||||
import {
|
||||
GraphQLUtils,
|
||||
isDefined,
|
||||
isObject,
|
||||
isPresent,
|
||||
isString,
|
||||
unflattenObjectKeys,
|
||||
} from "@medusajs/framework/utils"
|
||||
import { Knex } from "@mikro-orm/knex"
|
||||
import { OrderBy, QueryFormat, QueryOptions, Select } from "@types"
|
||||
|
||||
function escapeJsonPathString(val: string): string {
|
||||
// Escape for JSONPath string
|
||||
return val.replace(/\\/g, "\\\\").replace(/"/g, '\\"').replace(/'/g, "\\'")
|
||||
}
|
||||
|
||||
function buildSafeJsonPathQuery(
|
||||
field: string,
|
||||
operator: string,
|
||||
value: any
|
||||
): string {
|
||||
let jsonPathOperator = operator
|
||||
if (operator === "=") {
|
||||
jsonPathOperator = "=="
|
||||
} else if (operator.toUpperCase().includes("LIKE")) {
|
||||
jsonPathOperator = "like_regex"
|
||||
}
|
||||
|
||||
if (typeof value === "string") {
|
||||
let val = value
|
||||
if (jsonPathOperator === "like_regex") {
|
||||
// Convert SQL LIKE wildcards to regex
|
||||
val = val.replace(/%/g, ".*").replace(/_/g, ".")
|
||||
}
|
||||
value = `"${escapeJsonPathString(val)}"`
|
||||
}
|
||||
|
||||
return `$.${field} ${jsonPathOperator} ${value}`
|
||||
}
|
||||
|
||||
export const OPERATOR_MAP = {
|
||||
$eq: "=",
|
||||
$lt: "<",
|
||||
@@ -91,17 +118,10 @@ export class QueryBuilder {
|
||||
throw new Error(`Field ${field} is not indexed.`)
|
||||
}
|
||||
|
||||
let currentType = fieldRef.type
|
||||
let isArray = false
|
||||
while (currentType.ofType) {
|
||||
if (currentType instanceof GraphQLUtils.GraphQLList) {
|
||||
isArray = true
|
||||
}
|
||||
|
||||
currentType = currentType.ofType
|
||||
}
|
||||
|
||||
return currentType.name + (isArray ? "[]" : "")
|
||||
const fieldType = fieldRef.type.toString()
|
||||
const isArray = fieldType.startsWith("[")
|
||||
const currentType = fieldType.replace(/\[|\]|\!/g, "")
|
||||
return currentType + (isArray ? "[]" : "")
|
||||
}
|
||||
|
||||
private transformValueToType(path, field, value) {
|
||||
@@ -244,9 +264,8 @@ export class QueryBuilder {
|
||||
field,
|
||||
value[subKey]
|
||||
)
|
||||
const castType = this.getPostgresCastType(attr, [field]).cast
|
||||
|
||||
const val = operator === "IN" ? subValue : [subValue]
|
||||
let val = operator === "IN" ? subValue : [subValue]
|
||||
if (operator === "=" && subValue === null) {
|
||||
operator = "IS"
|
||||
} else if (operator === "!=" && subValue === null) {
|
||||
@@ -254,18 +273,65 @@ export class QueryBuilder {
|
||||
}
|
||||
|
||||
if (operator === "=") {
|
||||
builder.whereRaw(
|
||||
`${aliasMapping[attr]}.data @> '${getPathOperation(
|
||||
attr,
|
||||
field as string[],
|
||||
subValue
|
||||
)}'::jsonb`
|
||||
)
|
||||
const hasId = field[field.length - 1] === "id"
|
||||
if (hasId) {
|
||||
builder.whereRaw(`${aliasMapping[attr]}.id = ?`, subValue)
|
||||
} else {
|
||||
builder.whereRaw(
|
||||
`${aliasMapping[attr]}.data @> '${getPathOperation(
|
||||
attr,
|
||||
field as string[],
|
||||
subValue
|
||||
)}'::jsonb`
|
||||
)
|
||||
}
|
||||
} else if (operator === "IN") {
|
||||
if (val && !Array.isArray(val)) {
|
||||
val = [val]
|
||||
}
|
||||
if (!val || val.length === 0) {
|
||||
return
|
||||
}
|
||||
|
||||
const inPlaceholders = val.map(() => "?").join(",")
|
||||
const hasId = field[field.length - 1] === "id"
|
||||
if (hasId) {
|
||||
builder.whereRaw(
|
||||
`${aliasMapping[attr]}.id IN (${inPlaceholders})`,
|
||||
val
|
||||
)
|
||||
} else {
|
||||
const targetField = field[field.length - 1] as string
|
||||
|
||||
const jsonbValues = val.map((item) =>
|
||||
JSON.stringify({
|
||||
[targetField]: item === null ? null : item,
|
||||
})
|
||||
)
|
||||
builder.whereRaw(
|
||||
`${aliasMapping[attr]}.data${nested} @> ANY(ARRAY[${inPlaceholders}]::JSONB[])`,
|
||||
jsonbValues
|
||||
)
|
||||
}
|
||||
} else {
|
||||
builder.whereRaw(
|
||||
`(${aliasMapping[attr]}.data${nested}->>?)${castType} ${operator} ?`,
|
||||
[...field, ...val]
|
||||
)
|
||||
const potentialIdFields = field[field.length - 1]
|
||||
const hasId = potentialIdFields === "id"
|
||||
if (hasId) {
|
||||
builder.whereRaw(`(${aliasMapping[attr]}.id) ${operator} ?`, [
|
||||
...val,
|
||||
])
|
||||
} else {
|
||||
const targetField = field[field.length - 1] as string
|
||||
|
||||
const jsonPath = buildSafeJsonPathQuery(
|
||||
targetField,
|
||||
operator,
|
||||
val[0]
|
||||
)
|
||||
builder.whereRaw(`${aliasMapping[attr]}.data${nested} @@ ?`, [
|
||||
jsonPath,
|
||||
])
|
||||
}
|
||||
}
|
||||
} else {
|
||||
throw new Error(`Unsupported operator: ${subKey}`)
|
||||
@@ -281,29 +347,60 @@ export class QueryBuilder {
|
||||
return
|
||||
}
|
||||
|
||||
const castType = this.getPostgresCastType(attr, field).cast
|
||||
const inPlaceholders = value.map(() => "?").join(",")
|
||||
builder.whereRaw(
|
||||
`(${aliasMapping[attr]}.data${nested}->>?)${castType} IN (${inPlaceholders})`,
|
||||
[...field, ...value]
|
||||
)
|
||||
} else if (isDefined(value)) {
|
||||
const operator = value === null ? "IS" : "="
|
||||
|
||||
if (operator === "=") {
|
||||
const hasId = field[field.length - 1] === "id"
|
||||
if (hasId) {
|
||||
builder.whereRaw(
|
||||
`${aliasMapping[attr]}.data @> '${getPathOperation(
|
||||
attr,
|
||||
field as string[],
|
||||
value
|
||||
)}'::jsonb`
|
||||
`${aliasMapping[attr]}.id IN (${inPlaceholders})`,
|
||||
[...value]
|
||||
)
|
||||
} else {
|
||||
const castType = this.getPostgresCastType(attr, field).cast
|
||||
builder.whereRaw(
|
||||
`(${aliasMapping[attr]}.data${nested}->>?)${castType} ${operator} ?`,
|
||||
[...field, value]
|
||||
const jsonbValues = value.map((item) =>
|
||||
JSON.stringify({ [nested]: item === null ? null : item })
|
||||
)
|
||||
builder.whereRaw(
|
||||
`${aliasMapping[attr]}.data IN ANY(ARRAY[${inPlaceholders}]::JSONB[])`,
|
||||
jsonbValues
|
||||
)
|
||||
}
|
||||
} else if (isDefined(value)) {
|
||||
let operator = "="
|
||||
|
||||
if (operator === "=") {
|
||||
const hasId = field[field.length - 1] === "id"
|
||||
if (hasId) {
|
||||
builder.whereRaw(`${aliasMapping[attr]}.id = ?`, value)
|
||||
} else {
|
||||
builder.whereRaw(
|
||||
`${aliasMapping[attr]}.data @> '${getPathOperation(
|
||||
attr,
|
||||
field as string[],
|
||||
value
|
||||
)}'::jsonb`
|
||||
)
|
||||
}
|
||||
} else {
|
||||
if (value === null) {
|
||||
operator = "IS"
|
||||
}
|
||||
|
||||
const hasId = field[field.length - 1] === "id"
|
||||
if (hasId) {
|
||||
builder.whereRaw(`(${aliasMapping[attr]}.id) ${operator} ?`, [
|
||||
value,
|
||||
])
|
||||
} else {
|
||||
const targetField = field[field.length - 1] as string
|
||||
|
||||
const jsonPath = buildSafeJsonPathQuery(
|
||||
targetField,
|
||||
operator,
|
||||
value
|
||||
)
|
||||
builder.whereRaw(`${aliasMapping[attr]}.data${nested} @@ ?`, [
|
||||
jsonPath,
|
||||
])
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -312,14 +409,15 @@ export class QueryBuilder {
|
||||
return builder
|
||||
}
|
||||
|
||||
private getShortAlias(aliasMapping, alias: string) {
|
||||
private getShortAlias(aliasMapping, alias, level = 0) {
|
||||
aliasMapping.__aliasIndex ??= 0
|
||||
|
||||
if (aliasMapping[alias]) {
|
||||
return aliasMapping[alias]
|
||||
}
|
||||
|
||||
aliasMapping[alias] = "t_" + aliasMapping.__aliasIndex++ + "_"
|
||||
aliasMapping[alias] =
|
||||
"t_" + aliasMapping.__aliasIndex++ + (level > 0 ? `_${level}` : "")
|
||||
|
||||
return aliasMapping[alias]
|
||||
}
|
||||
@@ -327,7 +425,7 @@ export class QueryBuilder {
|
||||
private buildQueryParts(
|
||||
structure: Select,
|
||||
parentAlias: string,
|
||||
parentEntity: string,
|
||||
parentEntity: IndexTypes.SchemaObjectEntityRepresentation["parents"][0],
|
||||
parentProperty: string,
|
||||
aliasPath: string[] = [],
|
||||
level = 0,
|
||||
@@ -337,23 +435,30 @@ export class QueryBuilder {
|
||||
|
||||
const isSelectableField = this.allSchemaFields.has(parentProperty)
|
||||
const entities = this.getEntity(currentAliasPath, false)
|
||||
const entityRef = entities?.ref!
|
||||
|
||||
// !entityRef.alias means the object has not table, it's a nested object
|
||||
if (isSelectableField || !entities || !entityRef?.alias) {
|
||||
if (isSelectableField || !entities || !entities?.ref?.alias) {
|
||||
// We are currently selecting a specific field of the parent entity or the entity is not found on the index schema
|
||||
// We don't need to build the query parts for this as there is no join
|
||||
return []
|
||||
}
|
||||
|
||||
const mainEntity = entityRef.entity
|
||||
const mainAlias =
|
||||
this.getShortAlias(aliasMapping, mainEntity.toLowerCase()) + level
|
||||
const mainEntity = entities
|
||||
const mainAlias = this.getShortAlias(
|
||||
aliasMapping,
|
||||
mainEntity.ref.entity.toLowerCase(),
|
||||
level
|
||||
)
|
||||
|
||||
const allEntities: any[] = []
|
||||
const allEntities: {
|
||||
entity: IndexTypes.SchemaPropertiesMap[0]
|
||||
parEntity: IndexTypes.SchemaObjectEntityRepresentation["parents"][0]
|
||||
parAlias: string
|
||||
alias: string
|
||||
}[] = []
|
||||
if (!entities.shortCutOf) {
|
||||
allEntities.push({
|
||||
entity: mainEntity,
|
||||
entity: entities,
|
||||
parEntity: parentEntity,
|
||||
parAlias: parentAlias,
|
||||
alias: mainAlias,
|
||||
@@ -372,7 +477,7 @@ export class QueryBuilder {
|
||||
|
||||
intermediateAlias.pop()
|
||||
|
||||
if (intermediateEntity.ref.entity === parentEntity) {
|
||||
if (intermediateEntity.ref.entity === parentEntity?.ref.entity) {
|
||||
break
|
||||
}
|
||||
|
||||
@@ -383,20 +488,20 @@ export class QueryBuilder {
|
||||
const alias =
|
||||
this.getShortAlias(
|
||||
aliasMapping,
|
||||
intermediateEntity.ref.entity.toLowerCase()
|
||||
intermediateEntity.ref.entity.toLowerCase(),
|
||||
level
|
||||
) +
|
||||
level +
|
||||
"_" +
|
||||
x
|
||||
|
||||
const parAlias =
|
||||
parentIntermediateEntity.ref.entity === parentEntity
|
||||
parentIntermediateEntity.ref.entity === parentEntity?.ref.entity
|
||||
? parentAlias
|
||||
: this.getShortAlias(
|
||||
aliasMapping,
|
||||
parentIntermediateEntity.ref.entity.toLowerCase()
|
||||
parentIntermediateEntity.ref.entity.toLowerCase(),
|
||||
level
|
||||
) +
|
||||
level +
|
||||
"_" +
|
||||
(x + 1)
|
||||
|
||||
@@ -405,8 +510,9 @@ export class QueryBuilder {
|
||||
}
|
||||
|
||||
allEntities.unshift({
|
||||
entity: intermediateEntity.ref.entity,
|
||||
parEntity: parentIntermediateEntity.ref.entity,
|
||||
entity: intermediateEntity as any,
|
||||
parEntity:
|
||||
parentIntermediateEntity as IndexTypes.SchemaObjectEntityRepresentation["parents"][0],
|
||||
parAlias,
|
||||
alias,
|
||||
})
|
||||
@@ -421,18 +527,41 @@ export class QueryBuilder {
|
||||
aliasMapping[currentAliasPath] = alias
|
||||
|
||||
if (level > 0) {
|
||||
const cName = entity.toLowerCase()
|
||||
const pName = `${parEntity}${entity}`.toLowerCase()
|
||||
const cName = entity.ref.entity.toLowerCase()
|
||||
|
||||
let joinTable = `cat_${cName} AS ${alias}`
|
||||
|
||||
const pivotTable = `cat_pivot_${pName}`
|
||||
joinBuilder.leftJoin(
|
||||
`${pivotTable} AS ${alias}_ref`,
|
||||
`${alias}_ref.parent_id`,
|
||||
`${parAlias}.id`
|
||||
)
|
||||
joinBuilder.leftJoin(joinTable, `${alias}.id`, `${alias}_ref.child_id`)
|
||||
if (entity.isInverse || parEntity.isInverse) {
|
||||
const pName =
|
||||
`${entity.ref.entity}${parEntity.ref.entity}`.toLowerCase()
|
||||
const pivotTable = `cat_pivot_${pName}`
|
||||
|
||||
joinBuilder.leftJoin(
|
||||
`${pivotTable} AS ${alias}_ref`,
|
||||
`${alias}_ref.child_id`,
|
||||
`${parAlias}.id`
|
||||
)
|
||||
joinBuilder.leftJoin(
|
||||
joinTable,
|
||||
`${alias}.id`,
|
||||
`${alias}_ref.parent_id`
|
||||
)
|
||||
} else {
|
||||
const pName =
|
||||
`${parEntity.ref.entity}${entity.ref.entity}`.toLowerCase()
|
||||
const pivotTable = `cat_pivot_${pName}`
|
||||
|
||||
joinBuilder.leftJoin(
|
||||
`${pivotTable} AS ${alias}_ref`,
|
||||
`${alias}_ref.parent_id`,
|
||||
`${parAlias}.id`
|
||||
)
|
||||
joinBuilder.leftJoin(
|
||||
joinTable,
|
||||
`${alias}.id`,
|
||||
`${alias}_ref.child_id`
|
||||
)
|
||||
}
|
||||
|
||||
const joinWhere = this.selector.joinWhere ?? {}
|
||||
const joinKey = Object.keys(joinWhere).find((key) => {
|
||||
@@ -441,7 +570,7 @@ export class QueryBuilder {
|
||||
const curPath = k.join(".")
|
||||
if (curPath === currentAliasPath) {
|
||||
const relEntity = this.getEntity(curPath, false)
|
||||
return relEntity?.ref?.entity === entity
|
||||
return relEntity?.ref?.entity === entity.ref.entity
|
||||
}
|
||||
|
||||
return false
|
||||
@@ -469,7 +598,7 @@ export class QueryBuilder {
|
||||
this.buildQueryParts(
|
||||
childStructure,
|
||||
mainAlias,
|
||||
mainEntity,
|
||||
mainEntity as any,
|
||||
child,
|
||||
aliasPath.concat(parentProperty),
|
||||
level + 1,
|
||||
@@ -499,9 +628,14 @@ export class QueryBuilder {
|
||||
const parentAliasPath = aliasPath.join(".")
|
||||
const alias = aliasMapping[parentAliasPath]
|
||||
delete selectParts[parentAliasPath]
|
||||
selectParts[currentAliasPath] = this.knex.raw(
|
||||
`${alias}.data->'${parentProperty}'`
|
||||
)
|
||||
|
||||
if (parentProperty === "id") {
|
||||
selectParts[currentAliasPath] = `${alias}.id`
|
||||
} else {
|
||||
selectParts[currentAliasPath] = this.knex.raw(
|
||||
`${alias}.data->'${parentProperty}'`
|
||||
)
|
||||
}
|
||||
return selectParts
|
||||
}
|
||||
|
||||
@@ -572,9 +706,7 @@ export class QueryBuilder {
|
||||
hasPagination?: boolean
|
||||
hasCount?: boolean
|
||||
returnIdOnly?: boolean
|
||||
}): string {
|
||||
const queryBuilder = this.knex.queryBuilder()
|
||||
|
||||
}): { sql: string; sqlCount?: string } {
|
||||
const selectOnlyStructure = this.selector.select
|
||||
const structure = this.requestedFields
|
||||
const filter = this.selector.where ?? {}
|
||||
@@ -584,17 +716,19 @@ export class QueryBuilder {
|
||||
const orderBy = this.transformOrderBy(
|
||||
(order && !Array.isArray(order) ? [order] : order) ?? []
|
||||
)
|
||||
const take_ = !isNaN(+take!) ? +take! : 15
|
||||
const skip_ = !isNaN(+skip!) ? +skip! : 0
|
||||
|
||||
const rootKey = this.getStructureKeys(structure)[0]
|
||||
const rootStructure = structure[rootKey] as Select
|
||||
|
||||
const entity = this.getEntity(rootKey)!.ref.entity
|
||||
const rootEntity = entity.toLowerCase()
|
||||
const entity = this.getEntity(rootKey)!
|
||||
const rootEntity = entity.ref.entity.toLowerCase()
|
||||
const aliasMapping: { [path: string]: string } = {}
|
||||
|
||||
let hasTextSearch: boolean = false
|
||||
let textSearchQuery: string | null = null
|
||||
const searchQueryFilterProp = `${rootEntity}.q`
|
||||
const searchQueryFilterProp = `${rootKey}.q`
|
||||
|
||||
if (searchQueryFilterProp in filter) {
|
||||
if (!filter[searchQueryFilterProp]) {
|
||||
@@ -606,10 +740,18 @@ export class QueryBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
const filterSortStructure =
|
||||
unflattenObjectKeys({
|
||||
...(this.rawConfig?.filters
|
||||
? unflattenObjectKeys(this.rawConfig?.filters)
|
||||
: {}),
|
||||
...orderBy,
|
||||
})[rootKey] ?? {}
|
||||
|
||||
const joinParts = this.buildQueryParts(
|
||||
rootStructure,
|
||||
filterSortStructure,
|
||||
"",
|
||||
entity,
|
||||
entity as IndexTypes.SchemaObjectEntityRepresentation["parents"][0],
|
||||
rootKey,
|
||||
[],
|
||||
0,
|
||||
@@ -617,35 +759,72 @@ export class QueryBuilder {
|
||||
)
|
||||
|
||||
const rootAlias = aliasMapping[rootKey]
|
||||
const selectParts = !returnIdOnly
|
||||
? this.buildSelectParts(
|
||||
selectOnlyStructure[rootKey] as Select,
|
||||
rootKey,
|
||||
aliasMapping
|
||||
)
|
||||
: { [rootKey + ".id"]: `${rootAlias}.id` }
|
||||
|
||||
queryBuilder.select(selectParts)
|
||||
const innerQueryBuilder = this.knex.queryBuilder()
|
||||
// Outer query to select the full data based on the paginated IDs
|
||||
const outerQueryBuilder = this.knex.queryBuilder()
|
||||
|
||||
queryBuilder.from(
|
||||
`cat_${rootEntity} AS ${this.getShortAlias(aliasMapping, rootEntity)}`
|
||||
innerQueryBuilder.distinct(`${rootAlias}.id`)
|
||||
|
||||
const orderBySelects: Array<string | Knex.Raw> = []
|
||||
const orderByClauses: string[] = []
|
||||
|
||||
for (const aliasPath in orderBy) {
|
||||
const path = aliasPath.split(".")
|
||||
const field = path.pop()!
|
||||
const attr = path.join(".")
|
||||
const alias = aliasMapping[attr]
|
||||
const direction = orderBy[aliasPath]
|
||||
const pgType = this.getPostgresCastType(attr, [field])
|
||||
const hasId = field === "id"
|
||||
|
||||
let orderExpression:
|
||||
| string
|
||||
| Knex.Raw<any> = `${rootAlias}.id ${direction}`
|
||||
|
||||
if (alias) {
|
||||
const aggregateAlias = `"${aliasPath}_agg"`
|
||||
let aggregateExpression = `(${alias}.data->>'${field}')${pgType.cast}`
|
||||
|
||||
if (hasId) {
|
||||
aggregateExpression = `${alias}.id`
|
||||
} else {
|
||||
orderBySelects.push(
|
||||
direction === "ASC"
|
||||
? this.knex.raw(
|
||||
`MIN(${aggregateExpression}) AS ${aggregateAlias}`
|
||||
)
|
||||
: this.knex.raw(
|
||||
`MAX(${aggregateExpression}) AS ${aggregateAlias}`
|
||||
)
|
||||
)
|
||||
orderExpression = `${aggregateAlias} ${direction}`
|
||||
}
|
||||
|
||||
outerQueryBuilder.orderByRaw(`${aggregateExpression} ${direction}`)
|
||||
}
|
||||
|
||||
orderByClauses.push(orderExpression as string)
|
||||
}
|
||||
|
||||
// Add ordering columns to the select list of the inner query
|
||||
if (orderBySelects.length > 0) {
|
||||
innerQueryBuilder.select(orderBySelects)
|
||||
}
|
||||
|
||||
innerQueryBuilder.from(
|
||||
`cat_${rootEntity} AS ${this.getShortAlias(aliasMapping, rootKey)}`
|
||||
)
|
||||
|
||||
joinParts.forEach((joinPart) => {
|
||||
queryBuilder.joinRaw(joinPart)
|
||||
innerQueryBuilder.joinRaw(joinPart)
|
||||
})
|
||||
|
||||
let searchWhereParts: string[] = []
|
||||
if (hasTextSearch) {
|
||||
/**
|
||||
* Build the search where parts for the query,.
|
||||
* Apply the search query to the search vector column for every joined tabled except
|
||||
* the pivot joined table.
|
||||
*/
|
||||
searchWhereParts = [
|
||||
`${this.getShortAlias(aliasMapping, rootEntity)}.${
|
||||
const searchWhereParts = [
|
||||
`${rootAlias}.${
|
||||
this.#searchVectorColumnName
|
||||
} @@ plainto_tsquery('simple', '${textSearchQuery}')`,
|
||||
} @@ plainto_tsquery('simple', ?)`,
|
||||
...joinParts.flatMap((part) => {
|
||||
const aliases = part
|
||||
.split(" as ")
|
||||
@@ -657,233 +836,94 @@ export class QueryBuilder {
|
||||
(alias) =>
|
||||
`${alias}.${
|
||||
this.#searchVectorColumnName
|
||||
} @@ plainto_tsquery('simple', '${textSearchQuery}')`
|
||||
} @@ plainto_tsquery('simple', ?)`
|
||||
)
|
||||
}),
|
||||
]
|
||||
|
||||
queryBuilder.whereRaw(`(${searchWhereParts.join(" OR ")})`)
|
||||
}
|
||||
|
||||
// WHERE clause
|
||||
this.parseWhere(aliasMapping, filter, queryBuilder)
|
||||
|
||||
// ORDER BY clause
|
||||
for (const aliasPath in orderBy) {
|
||||
const path = aliasPath.split(".")
|
||||
const field = path.pop()
|
||||
const attr = path.join(".")
|
||||
|
||||
const pgType = this.getPostgresCastType(attr, [field])
|
||||
const alias = aliasMapping[attr]
|
||||
const direction = orderBy[aliasPath]
|
||||
|
||||
queryBuilder.orderByRaw(
|
||||
`(${alias}.data->>'${field}')${pgType.cast}` + " " + direction
|
||||
innerQueryBuilder.whereRaw(
|
||||
`(${searchWhereParts.join(" OR ")})`,
|
||||
Array(searchWhereParts.length).fill(textSearchQuery)
|
||||
)
|
||||
}
|
||||
|
||||
let take_ = !isNaN(+take!) ? +take! : 15
|
||||
let skip_ = !isNaN(+skip!) ? +skip! : 0
|
||||
this.parseWhere(aliasMapping, filter, innerQueryBuilder)
|
||||
|
||||
let cte = ""
|
||||
// Group by root ID in the inner query
|
||||
if (orderBySelects.length > 0) {
|
||||
innerQueryBuilder.groupBy(`${rootAlias}.id`)
|
||||
}
|
||||
|
||||
if (orderByClauses.length > 0) {
|
||||
innerQueryBuilder.orderByRaw(orderByClauses.join(", "))
|
||||
} else {
|
||||
innerQueryBuilder.orderBy(`${rootAlias}.id`, "ASC")
|
||||
}
|
||||
|
||||
// Count query to estimate the number of results in parallel
|
||||
let countQuery: Knex.Raw | undefined
|
||||
if (hasCount) {
|
||||
const estimateQuery = innerQueryBuilder.clone()
|
||||
estimateQuery.clearSelect().select(1)
|
||||
estimateQuery.clearOrder()
|
||||
estimateQuery.clearCounters()
|
||||
|
||||
countQuery = this.knex.raw(
|
||||
`SELECT count_estimate(?) AS estimate_count`,
|
||||
estimateQuery.toQuery()
|
||||
)
|
||||
}
|
||||
|
||||
// Apply pagination to the inner query
|
||||
if (hasPagination) {
|
||||
cte = this.buildCTEData({
|
||||
hasCount,
|
||||
searchWhereParts,
|
||||
take: take_,
|
||||
skip: skip_,
|
||||
orderBy,
|
||||
})
|
||||
|
||||
if (hasCount) {
|
||||
queryBuilder.select(this.knex.raw("pd.count_total"))
|
||||
innerQueryBuilder.limit(take_)
|
||||
if (skip_ > 0) {
|
||||
innerQueryBuilder.offset(skip_)
|
||||
}
|
||||
|
||||
queryBuilder.joinRaw(
|
||||
`JOIN paginated_data AS pd ON ${rootAlias}.id = pd.id`
|
||||
)
|
||||
}
|
||||
|
||||
return cte + queryBuilder.toQuery()
|
||||
}
|
||||
const innerQueryAlias = "paginated_ids"
|
||||
|
||||
public buildCTEData({
|
||||
hasCount,
|
||||
searchWhereParts = [],
|
||||
skip,
|
||||
take,
|
||||
orderBy,
|
||||
}: {
|
||||
hasCount: boolean
|
||||
searchWhereParts: string[]
|
||||
skip?: number
|
||||
take: number
|
||||
orderBy: OrderBy
|
||||
}): string {
|
||||
const queryBuilder = this.knex.queryBuilder()
|
||||
outerQueryBuilder.from(
|
||||
`cat_${rootEntity} AS ${this.getShortAlias(aliasMapping, rootKey)}`
|
||||
)
|
||||
|
||||
const hasWhere = isPresent(this.rawConfig?.filters) || isPresent(orderBy)
|
||||
const structure =
|
||||
hasWhere && !searchWhereParts.length
|
||||
? unflattenObjectKeys({
|
||||
...(this.rawConfig?.filters
|
||||
? unflattenObjectKeys(this.rawConfig?.filters)
|
||||
: {}),
|
||||
...orderBy,
|
||||
})
|
||||
: this.requestedFields
|
||||
outerQueryBuilder.joinRaw(
|
||||
`INNER JOIN (${innerQueryBuilder.toQuery()}) AS ${innerQueryAlias} ON ${rootAlias}.id = ${innerQueryAlias}.id`
|
||||
)
|
||||
|
||||
const rootKey = this.getStructureKeys(structure)[0]
|
||||
this.parseWhere(aliasMapping, filter, outerQueryBuilder)
|
||||
|
||||
const rootStructure = structure[rootKey] as Select
|
||||
|
||||
const entity = this.getEntity(rootKey)!.ref.entity
|
||||
const rootEntity = entity.toLowerCase()
|
||||
const aliasMapping: { [path: string]: string } = {}
|
||||
|
||||
const joinParts = this.buildQueryParts(
|
||||
const joinPartsOuterQuery = this.buildQueryParts(
|
||||
rootStructure,
|
||||
"",
|
||||
entity,
|
||||
entity as IndexTypes.SchemaObjectEntityRepresentation["parents"][0],
|
||||
rootKey,
|
||||
[],
|
||||
0,
|
||||
aliasMapping
|
||||
)
|
||||
joinPartsOuterQuery.forEach((joinPart) => {
|
||||
outerQueryBuilder.joinRaw(joinPart)
|
||||
})
|
||||
|
||||
const rootAlias = aliasMapping[rootKey]
|
||||
const finalSelectParts = !returnIdOnly
|
||||
? this.buildSelectParts(
|
||||
selectOnlyStructure[rootKey] as Select,
|
||||
rootKey,
|
||||
aliasMapping
|
||||
)
|
||||
: { [`${rootKey}.id`]: `${rootAlias}.id` }
|
||||
|
||||
queryBuilder.select(this.knex.raw(`${rootAlias}.id as id`))
|
||||
outerQueryBuilder.select(finalSelectParts)
|
||||
|
||||
queryBuilder.from(
|
||||
`cat_${rootEntity} AS ${this.getShortAlias(aliasMapping, rootEntity)}`
|
||||
)
|
||||
const finalSql = outerQueryBuilder.toQuery()
|
||||
|
||||
if (hasWhere) {
|
||||
joinParts.forEach((joinPart) => {
|
||||
queryBuilder.joinRaw(joinPart)
|
||||
})
|
||||
|
||||
if (searchWhereParts.length) {
|
||||
queryBuilder.whereRaw(`(${searchWhereParts.join(" OR ")})`)
|
||||
}
|
||||
|
||||
this.parseWhere(aliasMapping, this.selector.where!, queryBuilder)
|
||||
return {
|
||||
sql: finalSql,
|
||||
sqlCount: countQuery?.toQuery?.(),
|
||||
}
|
||||
|
||||
// ORDER BY clause
|
||||
const orderAliases: string[] = []
|
||||
for (const aliasPath in orderBy) {
|
||||
const path = aliasPath.split(".")
|
||||
const field = path.pop()
|
||||
const attr = path.join(".")
|
||||
|
||||
const pgType = this.getPostgresCastType(attr, [field])
|
||||
|
||||
const alias = aliasMapping[attr]
|
||||
const direction = orderBy[aliasPath]
|
||||
|
||||
const orderAlias = `"${alias}.data->>'${field}'"`
|
||||
orderAliases.push(orderAlias + " " + direction)
|
||||
|
||||
// transform the order by clause to a select MIN/MAX
|
||||
queryBuilder.select(
|
||||
direction === "ASC"
|
||||
? this.knex.raw(
|
||||
`MIN((${alias}.data->>'${field}')${pgType.cast}) as ${orderAlias}`
|
||||
)
|
||||
: this.knex.raw(
|
||||
`MAX((${alias}.data->>'${field}')${pgType.cast}) as ${orderAlias}`
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
queryBuilder.groupByRaw(`${rootAlias}.id`)
|
||||
|
||||
const countSubQuery = hasCount
|
||||
? `, (SELECT count(id) FROM data_select) as count_total`
|
||||
: ""
|
||||
|
||||
return `
|
||||
WITH data_select AS (
|
||||
${queryBuilder.toQuery()}
|
||||
),
|
||||
paginated_data AS (
|
||||
SELECT id ${countSubQuery}
|
||||
FROM data_select
|
||||
${orderAliases.length ? "ORDER BY " + orderAliases.join(", ") : ""}
|
||||
LIMIT ${take}
|
||||
${skip ? `OFFSET ${skip}` : ""}
|
||||
)
|
||||
`
|
||||
}
|
||||
|
||||
// NOTE: We are keeping the bellow code for now as reference to alternative implementation for us. DO NOT REMOVE
|
||||
// public buildQueryCount(): string {
|
||||
// const queryBuilder = this.knex.queryBuilder()
|
||||
|
||||
// const hasWhere = isPresent(this.rawConfig?.filters)
|
||||
// const structure = hasWhere ? this.rawConfig?.filters! : this.structure
|
||||
|
||||
// const rootKey = this.getStructureKeys(structure)[0]
|
||||
|
||||
// const rootStructure = structure[rootKey] as Select
|
||||
|
||||
// const entity = this.getEntity(rootKey)!.ref.entity
|
||||
// const rootEntity = entity.toLowerCase()
|
||||
// const aliasMapping: { [path: string]: string } = {}
|
||||
|
||||
// const joinParts = this.buildQueryParts(
|
||||
// rootStructure,
|
||||
// "",
|
||||
// entity,
|
||||
// rootKey,
|
||||
// [],
|
||||
// 0,
|
||||
// aliasMapping
|
||||
// )
|
||||
|
||||
// const rootAlias = aliasMapping[rootKey]
|
||||
|
||||
// queryBuilder.select(this.knex.raw(`COUNT(${rootAlias}.id) as count`))
|
||||
|
||||
// queryBuilder.from(
|
||||
// `cat_${rootEntity} AS ${this.getShortAlias(aliasMapping, rootEntity)}`
|
||||
// )
|
||||
|
||||
// const self = this
|
||||
// if (hasWhere && joinParts.length) {
|
||||
// const fromExistsRaw = joinParts.shift()!
|
||||
// const [joinPartsExists, fromExistsPart] =
|
||||
// fromExistsRaw.split(" left join ")
|
||||
// const [fromExists, whereExists] = fromExistsPart.split(" on ")
|
||||
// joinParts.unshift(joinPartsExists)
|
||||
|
||||
// queryBuilder.whereExists(function () {
|
||||
// this.select(self.knex.raw(`1`))
|
||||
// this.from(self.knex.raw(`${fromExists}`))
|
||||
// this.joinRaw(joinParts.join("\n"))
|
||||
// if (hasWhere) {
|
||||
// self.parseWhere(aliasMapping, self.selector.where!, this)
|
||||
// this.whereRaw(self.knex.raw(whereExists))
|
||||
// return
|
||||
// }
|
||||
|
||||
// this.whereRaw(self.knex.raw(whereExists))
|
||||
// })
|
||||
// } else {
|
||||
// queryBuilder.whereExists(function () {
|
||||
// this.select(self.knex.raw(`1`))
|
||||
// if (hasWhere) {
|
||||
// self.parseWhere(aliasMapping, self.selector.where!, this)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
|
||||
// return queryBuilder.toQuery()
|
||||
// }
|
||||
|
||||
public buildObjectFromResultset(
|
||||
resultSet: Record<string, any>[]
|
||||
): Record<string, any>[] {
|
||||
@@ -894,7 +934,11 @@ export class QueryBuilder {
|
||||
const isListMap: { [path: string]: boolean } = {}
|
||||
const referenceMap: { [key: string]: any } = {}
|
||||
const pathDetails: {
|
||||
[key: string]: { property: string; parents: string[]; parentPath: string }
|
||||
[key: string]: {
|
||||
property: string
|
||||
parents: string[]
|
||||
parentPath: string
|
||||
}
|
||||
} = {}
|
||||
|
||||
const initializeMaps = (structure: Select, path: string[]) => {
|
||||
|
||||
@@ -127,17 +127,15 @@ export class Configuration {
|
||||
}
|
||||
|
||||
if (idxSyncData.length > 0) {
|
||||
if (updatedConfig.length > 0) {
|
||||
const ids = await this.#indexSyncService.list({
|
||||
entity: updatedConfig.map((c) => c.entity),
|
||||
})
|
||||
idxSyncData.forEach((sync) => {
|
||||
const id = ids.find((i) => i.entity === sync.entity)?.id
|
||||
if (id) {
|
||||
sync.id = id
|
||||
}
|
||||
})
|
||||
}
|
||||
const ids = await this.#indexSyncService.list({
|
||||
entity: idxSyncData.map((c) => c.entity),
|
||||
})
|
||||
idxSyncData.forEach((sync) => {
|
||||
const id = ids.find((i) => i.entity === sync.entity)?.id
|
||||
if (id) {
|
||||
sync.id = id
|
||||
}
|
||||
})
|
||||
|
||||
await this.#indexSyncService.upsert(idxSyncData)
|
||||
}
|
||||
|
||||
@@ -178,13 +178,13 @@ export function generateGraphQLSchema(
|
||||
// Link table relationships
|
||||
const primaryField = doesPrimaryExportSchema
|
||||
? `${camelToSnakeCase(primary.alias)}: ${toPascalCase(
|
||||
composeTableName(primary.serviceName)
|
||||
primary.entity ?? composeTableName(primary.serviceName)
|
||||
)}`
|
||||
: ""
|
||||
|
||||
const foreignField = doesForeignExportSchema
|
||||
? `${camelToSnakeCase(foreign.alias)}: ${toPascalCase(
|
||||
composeTableName(foreign.serviceName)
|
||||
foreign.entity ?? composeTableName(foreign.serviceName)
|
||||
)}`
|
||||
: ""
|
||||
|
||||
|
||||
@@ -1,8 +1,6 @@
|
||||
import { defineJoinerConfig, Modules } from "@medusajs/framework/utils"
|
||||
import { Price, PriceList, PricePreference, PriceSet } from "@models"
|
||||
import { default as schema } from "./schema"
|
||||
|
||||
export const joinerConfig = defineJoinerConfig(Modules.PRICING, {
|
||||
schema,
|
||||
models: [PriceSet, PriceList, Price, PricePreference],
|
||||
})
|
||||
|
||||
@@ -1,59 +0,0 @@
|
||||
export const schema = `
|
||||
type PriceSet {
|
||||
id: ID!
|
||||
prices: [Price]
|
||||
calculated_price: CalculatedPriceSet
|
||||
}
|
||||
|
||||
type Price {
|
||||
id: ID!
|
||||
currency_code: String
|
||||
amount: Float
|
||||
min_quantity: Float
|
||||
max_quantity: Float
|
||||
rules_count: Int
|
||||
price_rules: [PriceRule]
|
||||
created_at: DateTime
|
||||
updated_at: DateTime
|
||||
deleted_at: DateTime
|
||||
}
|
||||
|
||||
type PriceRule {
|
||||
id: ID!
|
||||
price_set_id: String!
|
||||
price_set: PriceSet
|
||||
attribute: String!
|
||||
value: String!
|
||||
priority: Int!
|
||||
price_id: String!
|
||||
price_list_id: String!
|
||||
created_at: DateTime
|
||||
updated_at: DateTime
|
||||
deleted_at: DateTime
|
||||
}
|
||||
|
||||
type CalculatedPriceSet {
|
||||
id: ID!
|
||||
is_calculated_price_price_list: Boolean
|
||||
is_calculated_price_tax_inclusive: Boolean
|
||||
calculated_amount: Float
|
||||
raw_calculated_amount: JSON
|
||||
is_original_price_price_list: Boolean
|
||||
is_original_price_tax_inclusive: Boolean
|
||||
original_amount: Float
|
||||
raw_original_amount: JSON
|
||||
currency_code: String
|
||||
calculated_price: PriceDetails
|
||||
original_price: PriceDetails
|
||||
}
|
||||
|
||||
type PriceDetails {
|
||||
id: ID
|
||||
price_list_id: String
|
||||
price_list_type: String
|
||||
min_quantity: Float
|
||||
max_quantity: Float
|
||||
}
|
||||
`
|
||||
|
||||
export default schema
|
||||
Reference in New Issue
Block a user