feat: query.index (#11348)
What:
- `query.index` helper. It queries the index module, and aggregate the rest of requested fields/relations if needed like `query.graph`.
Not covered in this PR:
- Hydrate only sub entities returned by the query. Example: 1 out of 5 variants have returned, it should only hydrate the data of the single entity, currently it will merge all the variants of the product.
- Generate types of indexed data
example:
```ts
const query = container.resolve(ContainerRegistrationKeys.QUERY)
await query.index({
entity: "product",
fields: [
"id",
"description",
"status",
"variants.sku",
"variants.barcode",
"variants.material",
"variants.options.value",
"variants.prices.amount",
"variants.prices.currency_code",
"variants.inventory_items.inventory.sku",
"variants.inventory_items.inventory.description",
],
filters: {
"variants.sku": { $like: "%-1" },
"variants.prices.amount": { $gt: 30 },
},
pagination: {
order: {
"variants.prices.amount": "DESC",
},
},
})
```
This query return all products where at least one variant has the title ending in `-1` and at least one price bigger than `30`.
The Index Module only hold the data used to paginate and filter, and the returned object is:
```json
{
"id": "prod_01JKEAM2GJZ14K64R0DHK0JE72",
"title": null,
"variants": [
{
"id": "variant_01JKEAM2HC89GWS95F6GF9C6YA",
"sku": "extra-variant-1",
"prices": [
{
"id": "price_01JKEAM2JADEWWX72F8QDP6QXT",
"amount": 80,
"currency_code": "USD"
}
]
}
]
}
```
All the rest of the fields will be hydrated from their respective modules, and the final result will be:
```json
{
"id": "prod_01JKEAY2RJTF8TW9A23KTGY1GD",
"description": "extra description",
"status": "draft",
"variants": [
{
"sku": "extra-variant-1",
"barcode": null,
"material": null,
"id": "variant_01JKEAY2S945CRZ6X4QZJ7GVBJ",
"options": [
{
"value": "Red"
}
],
"prices": [
{
"amount": 20,
"currency_code": "CAD",
"id": "price_01JKEAY2T2EEYSWZHPGG11B7W7"
},
{
"amount": 80,
"currency_code": "USD",
"id": "price_01JKEAY2T2NJK2E5468RK84CAR"
}
],
"inventory_items": [
{
"variant_id": "variant_01JKEAY2S945CRZ6X4QZJ7GVBJ",
"inventory_item_id": "iitem_01JKEAY2SNY2AWEHPZN0DDXVW6",
"inventory": {
"sku": "extra-variant-1",
"description": "extra variant 1",
"id": "iitem_01JKEAY2SNY2AWEHPZN0DDXVW6"
}
}
]
}
]
}
```
Co-authored-by: Adrien de Peretti <25098370+adrien2p@users.noreply.github.com>
This commit is contained in:
committed by
GitHub
parent
8d10731343
commit
22276648ad
8
.changeset/fresh-beers-visit.md
Normal file
8
.changeset/fresh-beers-visit.md
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
"@medusajs/orchestration": patch
|
||||
"@medusajs/modules-sdk": patch
|
||||
"@medusajs/types": patch
|
||||
"@medusajs/index": patch
|
||||
---
|
||||
|
||||
feat: query.index
|
||||
239
integration-tests/modules/__tests__/index/query.index.ts
Normal file
239
integration-tests/modules/__tests__/index/query.index.ts
Normal file
@@ -0,0 +1,239 @@
|
||||
import { medusaIntegrationTestRunner } from "@medusajs/test-utils"
|
||||
import { RemoteQueryFunction } from "@medusajs/types"
|
||||
import { ContainerRegistrationKeys, defaultCurrencies } from "@medusajs/utils"
|
||||
import { setTimeout } from "timers/promises"
|
||||
import {
|
||||
adminHeaders,
|
||||
createAdminUser,
|
||||
} from "../../../helpers/create-admin-user"
|
||||
|
||||
jest.setTimeout(120000)
|
||||
|
||||
process.env.ENABLE_INDEX_MODULE = "true"
|
||||
|
||||
medusaIntegrationTestRunner({
|
||||
testSuite: ({ getContainer, dbConnection, api, dbConfig }) => {
|
||||
let appContainer
|
||||
|
||||
beforeAll(() => {
|
||||
appContainer = getContainer()
|
||||
})
|
||||
|
||||
afterAll(() => {
|
||||
process.env.ENABLE_INDEX_MODULE = "false"
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await createAdminUser(dbConnection, adminHeaders, appContainer)
|
||||
})
|
||||
|
||||
describe("Index engine - Query.index", () => {
|
||||
it("should use query.index to query the index module and hydrate the data", async () => {
|
||||
const shippingProfile = (
|
||||
await api.post(
|
||||
`/admin/shipping-profiles`,
|
||||
{ name: "Test", type: "default" },
|
||||
adminHeaders
|
||||
)
|
||||
).data.shipping_profile
|
||||
|
||||
const payload = [
|
||||
{
|
||||
title: "Test Product",
|
||||
description: "test-product-description",
|
||||
shipping_profile_id: shippingProfile.id,
|
||||
options: [{ title: "Denominations", values: ["100"] }],
|
||||
variants: [
|
||||
{
|
||||
title: `Test variant 1`,
|
||||
sku: `test-variant-1`,
|
||||
prices: [
|
||||
{
|
||||
currency_code: Object.values(defaultCurrencies)[0].code,
|
||||
amount: 30,
|
||||
},
|
||||
{
|
||||
currency_code: Object.values(defaultCurrencies)[2].code,
|
||||
amount: 50,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
Denominations: "100",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
title: "Extra product",
|
||||
description: "extra description",
|
||||
shipping_profile_id: shippingProfile.id,
|
||||
options: [{ title: "Colors", values: ["Red"] }],
|
||||
variants: new Array(2).fill(0).map((_, i) => ({
|
||||
title: `extra variant ${i}`,
|
||||
sku: `extra-variant-${i}`,
|
||||
prices: [
|
||||
{
|
||||
currency_code: Object.values(defaultCurrencies)[1].code,
|
||||
amount: 20,
|
||||
},
|
||||
{
|
||||
currency_code: Object.values(defaultCurrencies)[0].code,
|
||||
amount: 80,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
Colors: "Red",
|
||||
},
|
||||
})),
|
||||
},
|
||||
]
|
||||
|
||||
for (const data of payload) {
|
||||
await api.post("/admin/products", data, adminHeaders).catch((err) => {
|
||||
console.log(err)
|
||||
})
|
||||
}
|
||||
await setTimeout(5000)
|
||||
|
||||
const query = appContainer.resolve(
|
||||
ContainerRegistrationKeys.QUERY
|
||||
) as RemoteQueryFunction
|
||||
|
||||
const resultset = await query.index({
|
||||
entity: "product",
|
||||
fields: [
|
||||
"id",
|
||||
"description",
|
||||
"status",
|
||||
|
||||
"variants.sku",
|
||||
"variants.barcode",
|
||||
"variants.material",
|
||||
"variants.options.value",
|
||||
"variants.prices.amount",
|
||||
"variants.prices.currency_code",
|
||||
"variants.inventory_items.inventory.sku",
|
||||
"variants.inventory_items.inventory.description",
|
||||
],
|
||||
filters: {
|
||||
"variants.sku": { $like: "%-1" },
|
||||
"variants.prices.amount": { $gt: 30 },
|
||||
},
|
||||
pagination: {
|
||||
order: {
|
||||
"variants.prices.amount": "DESC",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(resultset.data).toEqual([
|
||||
{
|
||||
id: expect.any(String),
|
||||
description: "extra description",
|
||||
status: "draft",
|
||||
variants: [
|
||||
{
|
||||
sku: "extra-variant-0",
|
||||
barcode: null,
|
||||
material: null,
|
||||
id: expect.any(String),
|
||||
options: [
|
||||
{
|
||||
value: "Red",
|
||||
},
|
||||
],
|
||||
inventory_items: [
|
||||
{
|
||||
variant_id: expect.any(String),
|
||||
inventory_item_id: expect.any(String),
|
||||
inventory: {
|
||||
sku: "extra-variant-0",
|
||||
description: "extra variant 0",
|
||||
id: expect.any(String),
|
||||
},
|
||||
},
|
||||
],
|
||||
prices: expect.arrayContaining([]),
|
||||
},
|
||||
{
|
||||
sku: "extra-variant-1",
|
||||
barcode: null,
|
||||
material: null,
|
||||
id: expect.any(String),
|
||||
options: [
|
||||
{
|
||||
value: "Red",
|
||||
},
|
||||
],
|
||||
prices: expect.arrayContaining([
|
||||
{
|
||||
amount: 20,
|
||||
currency_code: "CAD",
|
||||
id: expect.any(String),
|
||||
},
|
||||
{
|
||||
amount: 80,
|
||||
currency_code: "USD",
|
||||
id: expect.any(String),
|
||||
},
|
||||
]),
|
||||
inventory_items: [
|
||||
{
|
||||
variant_id: expect.any(String),
|
||||
inventory_item_id: expect.any(String),
|
||||
inventory: {
|
||||
sku: "extra-variant-1",
|
||||
description: "extra variant 1",
|
||||
id: expect.any(String),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: expect.any(String),
|
||||
description: "test-product-description",
|
||||
status: "draft",
|
||||
variants: [
|
||||
{
|
||||
sku: "test-variant-1",
|
||||
barcode: null,
|
||||
material: null,
|
||||
id: expect.any(String),
|
||||
options: [
|
||||
{
|
||||
value: "100",
|
||||
},
|
||||
],
|
||||
prices: expect.arrayContaining([
|
||||
{
|
||||
amount: 30,
|
||||
currency_code: "USD",
|
||||
id: expect.any(String),
|
||||
},
|
||||
{
|
||||
amount: 50,
|
||||
currency_code: "EUR",
|
||||
id: expect.any(String),
|
||||
},
|
||||
]),
|
||||
inventory_items: [
|
||||
{
|
||||
variant_id: expect.any(String),
|
||||
inventory_item_id: expect.any(String),
|
||||
inventory: {
|
||||
sku: "test-variant-1",
|
||||
description: "Test variant 1",
|
||||
id: expect.any(String),
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
},
|
||||
})
|
||||
@@ -198,7 +198,6 @@ medusaIntegrationTestRunner({
|
||||
expect(updatedResults.length).toBe(1)
|
||||
expect(updatedResults[0].variants.length).toBe(1)
|
||||
|
||||
/*
|
||||
let staledRaws = await dbConnection.raw(
|
||||
'SELECT * FROM "index_data" WHERE "staled_at" IS NOT NULL'
|
||||
)
|
||||
@@ -209,7 +208,6 @@ medusaIntegrationTestRunner({
|
||||
'SELECT * FROM "index_relation" WHERE "staled_at" IS NOT NULL'
|
||||
)
|
||||
expect(staledRaws.rows.length).toBe(0)
|
||||
*/
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { RemoteFetchDataCallback } from "@medusajs/orchestration"
|
||||
import {
|
||||
ExternalModuleDeclaration,
|
||||
IIndexService,
|
||||
ILinkMigrationsPlanner,
|
||||
InternalModuleDeclaration,
|
||||
LoadedModule,
|
||||
@@ -27,13 +28,13 @@ import {
|
||||
promiseAll,
|
||||
} from "@medusajs/utils"
|
||||
import { asValue } from "awilix"
|
||||
import { Link } from "./link"
|
||||
import {
|
||||
MedusaModule,
|
||||
MigrationOptions,
|
||||
ModuleBootstrapOptions,
|
||||
RegisterModuleJoinerConfig,
|
||||
} from "./medusa-module"
|
||||
import { Link } from "./link"
|
||||
import { createQuery, RemoteQuery } from "./remote-query"
|
||||
import { MODULE_SCOPE } from "./types"
|
||||
|
||||
@@ -562,13 +563,20 @@ async function MedusaApp_({
|
||||
return getMigrationPlanner(options, linkModules)
|
||||
}
|
||||
|
||||
const indexModule = sharedContainer_.resolve(Modules.INDEX, {
|
||||
allowUnregistered: true,
|
||||
}) as IIndexService
|
||||
|
||||
return {
|
||||
onApplicationShutdown,
|
||||
onApplicationPrepareShutdown,
|
||||
onApplicationStart,
|
||||
modules: allModules,
|
||||
link: remoteLink,
|
||||
query: createQuery(remoteQuery) as any, // TODO: rm any once we remove the old RemoteQueryFunction and rely on the Query object instead,
|
||||
query: createQuery({
|
||||
remoteQuery,
|
||||
indexModule,
|
||||
}) as any, // TODO: rm any once we remove the old RemoteQueryFunction and rely on the Query object instead,
|
||||
entitiesMap,
|
||||
gqlSchema: schema,
|
||||
notFound,
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { ModuleJoinerConfig } from "@medusajs/types"
|
||||
import { defineJoinerConfig } from "@medusajs/utils"
|
||||
import { MedusaModule } from "../../medusa-module"
|
||||
import { ModuleJoinerConfig } from "@medusajs/types"
|
||||
|
||||
const customModuleJoinerConfig = defineJoinerConfig("custom_user", {
|
||||
schema: `
|
||||
@@ -62,12 +62,12 @@ const pricingJoinerConfig = defineJoinerConfig("pricing", {
|
||||
}
|
||||
|
||||
type Price {
|
||||
amount: Int
|
||||
amount: Float
|
||||
deep_nested_price: DeepNestedPrice
|
||||
}
|
||||
|
||||
type DeepNestedPrice {
|
||||
amount: Int
|
||||
amount: Float
|
||||
}
|
||||
`,
|
||||
alias: [
|
||||
|
||||
@@ -1,7 +1,9 @@
|
||||
import {
|
||||
GraphResultSet,
|
||||
IIndexService,
|
||||
RemoteJoinerOptions,
|
||||
RemoteJoinerQuery,
|
||||
RemoteQueryFilters,
|
||||
RemoteQueryFunction,
|
||||
RemoteQueryFunctionReturnPagination,
|
||||
RemoteQueryInput,
|
||||
@@ -21,6 +23,7 @@ import { toRemoteQuery } from "./to-remote-query"
|
||||
*/
|
||||
export class Query {
|
||||
#remoteQuery: RemoteQuery
|
||||
#indexModule: IIndexService
|
||||
|
||||
/**
|
||||
* Method to wrap execution of the graph query for instrumentation
|
||||
@@ -54,8 +57,15 @@ export class Query {
|
||||
},
|
||||
}
|
||||
|
||||
constructor(remoteQuery: RemoteQuery) {
|
||||
constructor({
|
||||
remoteQuery,
|
||||
indexModule,
|
||||
}: {
|
||||
remoteQuery: RemoteQuery
|
||||
indexModule: IIndexService
|
||||
}) {
|
||||
this.#remoteQuery = remoteQuery
|
||||
this.#indexModule = indexModule
|
||||
}
|
||||
|
||||
#unwrapQueryConfig(
|
||||
@@ -172,14 +182,79 @@ export class Query {
|
||||
|
||||
return this.#unwrapRemoteQueryResponse(response)
|
||||
}
|
||||
|
||||
/**
|
||||
* Index function uses the Index module to query and hydrates the data with query.graph
|
||||
* returns a result set
|
||||
*/
|
||||
async index<const TEntry extends string>(
|
||||
queryOptions: RemoteQueryInput<TEntry> & {
|
||||
joinFilters?: RemoteQueryFilters<TEntry>
|
||||
},
|
||||
options?: RemoteJoinerOptions
|
||||
): Promise<GraphResultSet<TEntry>> {
|
||||
if (!this.#indexModule) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.INVALID_DATA,
|
||||
"Index module is not loaded."
|
||||
)
|
||||
}
|
||||
|
||||
const mainEntity = queryOptions.entity
|
||||
|
||||
const fields = queryOptions.fields.map((field) => mainEntity + "." + field)
|
||||
const filters = queryOptions.filters
|
||||
? { [mainEntity]: queryOptions.filters }
|
||||
: ({} as any)
|
||||
const joinFilters = queryOptions.joinFilters
|
||||
? { [mainEntity]: queryOptions.joinFilters }
|
||||
: ({} as any)
|
||||
const pagination = queryOptions.pagination as any
|
||||
if (pagination?.order) {
|
||||
pagination.order = { [mainEntity]: pagination.order }
|
||||
}
|
||||
|
||||
const indexResponse = (await this.#indexModule.query({
|
||||
fields,
|
||||
filters,
|
||||
joinFilters,
|
||||
pagination,
|
||||
})) as unknown as GraphResultSet<TEntry>
|
||||
|
||||
delete queryOptions.pagination
|
||||
delete queryOptions.filters
|
||||
|
||||
let finalResultset: GraphResultSet<TEntry> = indexResponse
|
||||
|
||||
if (indexResponse.data.length) {
|
||||
finalResultset = await this.graph(queryOptions, {
|
||||
...options,
|
||||
initialData: indexResponse.data,
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
data: finalResultset.data,
|
||||
metadata: indexResponse.metadata as RemoteQueryFunctionReturnPagination,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* API wrapper around the remoteQuery with backward compatibility support
|
||||
* @param remoteQuery
|
||||
*/
|
||||
export function createQuery(remoteQuery: RemoteQuery) {
|
||||
const query = new Query(remoteQuery)
|
||||
export function createQuery({
|
||||
remoteQuery,
|
||||
indexModule,
|
||||
}: {
|
||||
remoteQuery: RemoteQuery
|
||||
indexModule: IIndexService
|
||||
}) {
|
||||
const query = new Query({
|
||||
remoteQuery,
|
||||
indexModule,
|
||||
})
|
||||
|
||||
function backwardCompatibleQuery(...args: any[]) {
|
||||
return query.query.apply(query, args)
|
||||
@@ -187,6 +262,7 @@ export function createQuery(remoteQuery: RemoteQuery) {
|
||||
|
||||
backwardCompatibleQuery.graph = query.graph.bind(query)
|
||||
backwardCompatibleQuery.gql = query.gql.bind(query)
|
||||
backwardCompatibleQuery.index = query.index.bind(query)
|
||||
|
||||
return backwardCompatibleQuery as Omit<RemoteQueryFunction, symbol>
|
||||
}
|
||||
|
||||
@@ -14,9 +14,10 @@ import {
|
||||
RemoteJoinerQuery,
|
||||
RemoteNestedExpands,
|
||||
} from "@medusajs/types"
|
||||
import { isString, toPascalCase } from "@medusajs/utils"
|
||||
import { isPresent, isString, toPascalCase } from "@medusajs/utils"
|
||||
import { MedusaModule } from "../medusa-module"
|
||||
|
||||
const BASE_PREFIX = ""
|
||||
export class RemoteQuery {
|
||||
private remoteJoiner: RemoteJoiner
|
||||
private modulesMap: Map<string, LoadedModule> = new Map()
|
||||
@@ -99,7 +100,7 @@ export class RemoteQuery {
|
||||
|
||||
public static getAllFieldsAndRelations(
|
||||
expand: RemoteExpandProperty | RemoteNestedExpands[number],
|
||||
prefix = "",
|
||||
prefix = BASE_PREFIX,
|
||||
args: JoinerArgument = {} as JoinerArgument
|
||||
): {
|
||||
select?: string[]
|
||||
@@ -122,7 +123,14 @@ export class RemoteQuery {
|
||||
fields.add(prefix ? `${prefix}.${field}` : field)
|
||||
}
|
||||
|
||||
args[prefix] = expand.args
|
||||
const filters =
|
||||
expand.args?.find((arg) => arg.name === "filters")?.value ?? {}
|
||||
|
||||
if (isPresent(filters)) {
|
||||
args[prefix] = filters
|
||||
} else if (isPresent(expand.args)) {
|
||||
args[prefix] = expand.args
|
||||
}
|
||||
|
||||
for (const property in expand.expands ?? {}) {
|
||||
const newPrefix = prefix ? `${prefix}.${property}` : property
|
||||
@@ -147,7 +155,12 @@ export class RemoteQuery {
|
||||
: shouldSelectAll
|
||||
? undefined
|
||||
: []
|
||||
return { select, relations, args }
|
||||
|
||||
return {
|
||||
select,
|
||||
relations,
|
||||
args,
|
||||
}
|
||||
}
|
||||
|
||||
private hasPagination(options: { [attr: string]: unknown }): boolean {
|
||||
@@ -225,6 +238,15 @@ export class RemoteQuery {
|
||||
filters[keyField] = ids
|
||||
}
|
||||
|
||||
delete options.args?.[BASE_PREFIX]
|
||||
if (Object.keys(options.args ?? {}).length) {
|
||||
filters = {
|
||||
...filters,
|
||||
...options?.args,
|
||||
}
|
||||
options.args = {} as any
|
||||
}
|
||||
|
||||
const hasPagination = this.hasPagination(options)
|
||||
|
||||
let methodName = hasPagination ? "listAndCount" : "list"
|
||||
|
||||
@@ -178,7 +178,7 @@ export const remoteJoinerData = {
|
||||
date: "2023-04-01T12:00:00Z",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
product_id: 103,
|
||||
variant_id: 993,
|
||||
quantity: 4,
|
||||
},
|
||||
|
||||
@@ -59,7 +59,7 @@ export const serviceConfigs: JoinerServiceConfig[] = [
|
||||
fieldAlias: {
|
||||
user_shortcut: "product.user",
|
||||
},
|
||||
primaryKeys: ["id"],
|
||||
primaryKeys: ["id,product_id"],
|
||||
relationships: [
|
||||
{
|
||||
foreignKey: "product_id",
|
||||
|
||||
@@ -291,17 +291,11 @@ describe("RemoteJoiner", () => {
|
||||
date: "2023-04-01T12:00:00Z",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
product: [
|
||||
{
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
{
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
],
|
||||
product_id: 103,
|
||||
product: {
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
},
|
||||
],
|
||||
user_id: 1,
|
||||
@@ -438,19 +432,18 @@ describe("RemoteJoiner", () => {
|
||||
number: "ORD-202",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
product_id: 103,
|
||||
variant_id: 993,
|
||||
quantity: 4,
|
||||
product: [
|
||||
{
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
{
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
],
|
||||
product: {
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
variant: {
|
||||
id: 993,
|
||||
name: "Product variant 33",
|
||||
product_id: 103,
|
||||
},
|
||||
},
|
||||
],
|
||||
id: 205,
|
||||
@@ -468,19 +461,18 @@ describe("RemoteJoiner", () => {
|
||||
number: "ORD-202",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
product_id: 103,
|
||||
variant_id: 993,
|
||||
quantity: 4,
|
||||
product: [
|
||||
{
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
{
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
],
|
||||
product: {
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
variant: {
|
||||
id: 993,
|
||||
name: "Product variant 33",
|
||||
product_id: 103,
|
||||
},
|
||||
},
|
||||
],
|
||||
id: 205,
|
||||
@@ -518,16 +510,10 @@ describe("RemoteJoiner", () => {
|
||||
],
|
||||
}),
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
},
|
||||
{
|
||||
email: "aaa@example.com",
|
||||
id: 3,
|
||||
},
|
||||
],
|
||||
product_user_alias: {
|
||||
email: "aaa@example.com",
|
||||
id: 3,
|
||||
},
|
||||
}),
|
||||
])
|
||||
expect(data[0].products[0].product).toEqual(undefined)
|
||||
@@ -564,16 +550,10 @@ describe("RemoteJoiner", () => {
|
||||
],
|
||||
}),
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
},
|
||||
{
|
||||
email: "aaa@example.com",
|
||||
id: 3,
|
||||
},
|
||||
],
|
||||
product_user_alias: {
|
||||
email: "aaa@example.com",
|
||||
id: 3,
|
||||
},
|
||||
}),
|
||||
])
|
||||
expect(data[0].products[0].product).toEqual({
|
||||
@@ -619,18 +599,11 @@ describe("RemoteJoiner", () => {
|
||||
],
|
||||
}),
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
email: "janedoe@example.com",
|
||||
},
|
||||
{
|
||||
name: "aaa bbb",
|
||||
id: 3,
|
||||
email: "aaa@example.com",
|
||||
},
|
||||
],
|
||||
product_user_alias: {
|
||||
name: "aaa bbb",
|
||||
id: 3,
|
||||
email: "aaa@example.com",
|
||||
},
|
||||
}),
|
||||
])
|
||||
expect(data[0].products[0].product).toEqual({
|
||||
@@ -725,16 +698,10 @@ describe("RemoteJoiner", () => {
|
||||
|
||||
expect(data[1]).toEqual(
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
id: 2,
|
||||
name: "Jane Doe",
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "aaa bbb",
|
||||
},
|
||||
],
|
||||
product_user_alias: {
|
||||
id: 3,
|
||||
name: "aaa bbb",
|
||||
},
|
||||
})
|
||||
)
|
||||
|
||||
@@ -857,10 +824,13 @@ describe("RemoteJoiner", () => {
|
||||
id
|
||||
number
|
||||
products {
|
||||
product {
|
||||
handler
|
||||
user {
|
||||
name
|
||||
variant {
|
||||
name
|
||||
product {
|
||||
handler
|
||||
user {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -878,18 +848,22 @@ describe("RemoteJoiner", () => {
|
||||
products: [
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 991,
|
||||
color: "red",
|
||||
product: {
|
||||
id: 101,
|
||||
product_extra_field: "extra 101 - red",
|
||||
variant: {
|
||||
id: 991,
|
||||
product_id: 101,
|
||||
variant_extra_field: "extra 101 - var 991 - red",
|
||||
},
|
||||
},
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 992,
|
||||
color: "green",
|
||||
product: {
|
||||
id: 101,
|
||||
product_extra_field: "extra 101 - green",
|
||||
variant: {
|
||||
id: 992,
|
||||
product_id: 101,
|
||||
variant_extra_field: "extra 101 - var 992 - green",
|
||||
},
|
||||
},
|
||||
],
|
||||
@@ -897,23 +871,6 @@ describe("RemoteJoiner", () => {
|
||||
{
|
||||
id: 205,
|
||||
extra_field: "extra",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
product: [
|
||||
{
|
||||
id: 101,
|
||||
color: "blue",
|
||||
product_extra_field: "extra 101 - blue",
|
||||
},
|
||||
{
|
||||
id: 103,
|
||||
color: "yellow",
|
||||
product_extra_field: "extra 101 - yellow",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
@@ -928,29 +885,41 @@ describe("RemoteJoiner", () => {
|
||||
products: [
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 991,
|
||||
color: "red",
|
||||
product: {
|
||||
id: 101,
|
||||
product_extra_field: "extra 101 - red",
|
||||
handler: "product-1-handler",
|
||||
user_id: 2,
|
||||
user: {
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
variant: {
|
||||
id: 991,
|
||||
product_id: 101,
|
||||
variant_extra_field: "extra 101 - var 991 - red",
|
||||
name: "Product variant 1",
|
||||
product: {
|
||||
handler: "product-1-handler",
|
||||
id: 101,
|
||||
user_id: 2,
|
||||
user: {
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 992,
|
||||
color: "green",
|
||||
product: {
|
||||
id: 101,
|
||||
product_extra_field: "extra 101 - green",
|
||||
handler: "product-1-handler",
|
||||
user_id: 2,
|
||||
user: {
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
variant: {
|
||||
id: 992,
|
||||
product_id: 101,
|
||||
variant_extra_field: "extra 101 - var 992 - green",
|
||||
name: "Product variant 2",
|
||||
product: {
|
||||
handler: "product-1-handler",
|
||||
id: 101,
|
||||
user_id: 2,
|
||||
user: {
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
@@ -965,31 +934,22 @@ describe("RemoteJoiner", () => {
|
||||
number: "ORD-202",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
product: [
|
||||
{
|
||||
id: 101,
|
||||
color: "blue",
|
||||
product_extra_field: "extra 101 - blue",
|
||||
handler: "product-1-handler",
|
||||
user_id: 2,
|
||||
user: {
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 103,
|
||||
color: "yellow",
|
||||
product_extra_field: "extra 101 - yellow",
|
||||
variant_id: 993,
|
||||
product_id: 103,
|
||||
variant: {
|
||||
name: "Product variant 33",
|
||||
id: 993,
|
||||
product_id: 103,
|
||||
product: {
|
||||
handler: "product-3-handler",
|
||||
id: 103,
|
||||
user_id: 3,
|
||||
user: {
|
||||
name: "aaa bbb",
|
||||
id: 3,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
extra_field: "extra",
|
||||
|
||||
@@ -44,6 +44,7 @@ type InternalParseExpandsParams = {
|
||||
expands: RemoteJoinerQuery["expands"]
|
||||
implodeMapping: InternalImplodeMapping[]
|
||||
options?: RemoteJoinerOptions
|
||||
initialData?: any[]
|
||||
}
|
||||
|
||||
export class RemoteJoiner {
|
||||
@@ -799,7 +800,9 @@ export class RemoteJoiner {
|
||||
})
|
||||
|
||||
for (const values of fieldsById.values()) {
|
||||
values.forEach((v) => idsToFetch.add(v))
|
||||
values.forEach((val) => {
|
||||
idsToFetch.add(val)
|
||||
})
|
||||
}
|
||||
|
||||
if (idsToFetch.size === 0) {
|
||||
@@ -878,10 +881,17 @@ export class RemoteJoiner {
|
||||
private parseExpands(
|
||||
params: InternalParseExpandsParams
|
||||
): Map<string, RemoteExpandProperty> {
|
||||
const { initialService, query, serviceConfig, expands, implodeMapping } =
|
||||
params
|
||||
const {
|
||||
initialService,
|
||||
query,
|
||||
serviceConfig,
|
||||
expands,
|
||||
implodeMapping,
|
||||
options,
|
||||
initialData,
|
||||
} = params
|
||||
|
||||
const parsedExpands = this.parseProperties({
|
||||
const { parsedExpands, aliasRealPathMap } = this.parseProperties({
|
||||
initialService,
|
||||
query,
|
||||
serviceConfig,
|
||||
@@ -889,6 +899,14 @@ export class RemoteJoiner {
|
||||
implodeMapping,
|
||||
})
|
||||
|
||||
if (initialData?.length) {
|
||||
this.createFilterFromInitialData({
|
||||
initialData: options?.initialData as any,
|
||||
parsedExpands,
|
||||
aliasRealPathMap,
|
||||
})
|
||||
}
|
||||
|
||||
const groupedExpands = this.groupExpands(parsedExpands)
|
||||
|
||||
return groupedExpands
|
||||
@@ -900,7 +918,10 @@ export class RemoteJoiner {
|
||||
serviceConfig: InternalJoinerServiceConfig
|
||||
expands: RemoteJoinerQuery["expands"]
|
||||
implodeMapping: InternalImplodeMapping[]
|
||||
}): Map<string, RemoteExpandProperty> {
|
||||
}): {
|
||||
parsedExpands: Map<string, RemoteExpandProperty>
|
||||
aliasRealPathMap: Map<string, string[]>
|
||||
} {
|
||||
const { initialService, query, serviceConfig, expands, implodeMapping } =
|
||||
params
|
||||
|
||||
@@ -1039,7 +1060,7 @@ export class RemoteJoiner {
|
||||
}
|
||||
}
|
||||
|
||||
return parsedExpands
|
||||
return { parsedExpands, aliasRealPathMap }
|
||||
}
|
||||
|
||||
private getEntity({ entity, prop }: { entity: string; prop: string }) {
|
||||
@@ -1216,6 +1237,206 @@ export class RemoteJoiner {
|
||||
return mergedExpands
|
||||
}
|
||||
|
||||
private createFilterFromInitialData({
|
||||
initialData,
|
||||
parsedExpands,
|
||||
aliasRealPathMap,
|
||||
}: {
|
||||
initialData: any[]
|
||||
parsedExpands: Map<string, RemoteExpandProperty>
|
||||
aliasRealPathMap: Map<string, string[]>
|
||||
}): void {
|
||||
if (!initialData.length) {
|
||||
return
|
||||
}
|
||||
|
||||
const getPkValues = ({
|
||||
initialData,
|
||||
serviceConfig,
|
||||
relationship,
|
||||
}: {
|
||||
initialData: any[]
|
||||
serviceConfig: InternalJoinerServiceConfig
|
||||
relationship?: JoinerRelationship
|
||||
}): Record<string, any> => {
|
||||
if (!initialData.length || !relationship || !serviceConfig) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const primaryKeys = relationship.primaryKey
|
||||
? relationship.primaryKey.split(",")
|
||||
: serviceConfig.primaryKeys
|
||||
|
||||
const filter: Record<string, any> = {}
|
||||
|
||||
// Collect IDs for the current level, considering composed keys
|
||||
primaryKeys.forEach((key) => {
|
||||
filter[key] = Array.from(
|
||||
new Set(initialData.map((dt) => dt[key]).filter(isDefined))
|
||||
)
|
||||
})
|
||||
|
||||
return filter
|
||||
}
|
||||
|
||||
const parsedSegment = new Map<string, any>()
|
||||
|
||||
const aliasReversePathMap = new Map<string, string>(
|
||||
Array.from(aliasRealPathMap).map(([path, realPath]) => [
|
||||
realPath.join("."),
|
||||
path,
|
||||
])
|
||||
)
|
||||
|
||||
for (let [path, expand] of parsedExpands.entries()) {
|
||||
const serviceConfig = expand.serviceConfig
|
||||
const relationship =
|
||||
this.getEntityRelationship({
|
||||
parentServiceConfig: expand.parentConfig!,
|
||||
property: expand.property,
|
||||
}) ?? serviceConfig.relationships?.get(serviceConfig.serviceName)
|
||||
|
||||
if (!serviceConfig || !relationship) {
|
||||
continue
|
||||
}
|
||||
|
||||
let aliasToPath: string | null = null
|
||||
if (aliasReversePathMap.has(path)) {
|
||||
aliasToPath = path
|
||||
path = aliasReversePathMap.get(path)!
|
||||
}
|
||||
|
||||
const pathSegments = path.split(".")
|
||||
let relevantInitialData = initialData
|
||||
let fullPath: string[] = []
|
||||
|
||||
for (const segment of pathSegments) {
|
||||
fullPath.push(segment)
|
||||
if (segment === BASE_PATH) {
|
||||
continue
|
||||
}
|
||||
|
||||
const pathStr = fullPath.join(".")
|
||||
if (parsedSegment.has(pathStr)) {
|
||||
relevantInitialData = parsedSegment.get(pathStr)
|
||||
continue
|
||||
}
|
||||
|
||||
relevantInitialData =
|
||||
RemoteJoiner.getNestedItems(relevantInitialData, segment) ?? []
|
||||
|
||||
parsedSegment.set(pathStr, relevantInitialData)
|
||||
|
||||
if (!relevantInitialData.length) {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if (!relevantInitialData.length) {
|
||||
continue
|
||||
}
|
||||
|
||||
const queryPath = expand.parent === "" ? BASE_PATH : aliasToPath ?? path
|
||||
const filter = getPkValues({
|
||||
initialData: relevantInitialData,
|
||||
serviceConfig,
|
||||
relationship,
|
||||
})
|
||||
|
||||
if (!Object.keys(filter).length) {
|
||||
continue
|
||||
}
|
||||
|
||||
const parsed = parsedExpands.get(queryPath)!
|
||||
parsed.args ??= []
|
||||
parsed.args.push({
|
||||
name: "filters",
|
||||
value: filter,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private mergeInitialData({
|
||||
items,
|
||||
initialData,
|
||||
serviceConfig,
|
||||
path,
|
||||
expands,
|
||||
relationship,
|
||||
}: {
|
||||
items: any[]
|
||||
initialData: any[]
|
||||
serviceConfig: InternalJoinerServiceConfig
|
||||
path: string
|
||||
expands?: RemoteNestedExpands
|
||||
relationship?: JoinerRelationship
|
||||
}) {
|
||||
if (!initialData.length || !relationship) {
|
||||
return items
|
||||
}
|
||||
|
||||
const primaryKeys = relationship?.primaryKey.split(",") || [
|
||||
serviceConfig.primaryKeys[0],
|
||||
]
|
||||
const expandKeys = Object.keys(expands ?? {})
|
||||
|
||||
const initialDataIndexMap = new Map(
|
||||
initialData.map((dt, index) => [
|
||||
primaryKeys.map((key) => dt[key]).join(","),
|
||||
index,
|
||||
])
|
||||
)
|
||||
const itemMap = new Map(
|
||||
items.map((item) => [primaryKeys.map((key) => item[key]).join(","), item])
|
||||
)
|
||||
|
||||
const orderedMergedItems = new Array(initialData.length)
|
||||
for (const [key, index] of initialDataIndexMap.entries()) {
|
||||
const iniData = initialData[index]
|
||||
const item = itemMap.get(key)
|
||||
|
||||
if (!item) {
|
||||
orderedMergedItems[index] = iniData
|
||||
continue
|
||||
}
|
||||
|
||||
// Only merge properties that are not relations
|
||||
const shallowProperty = { ...iniData }
|
||||
for (const key of expandKeys) {
|
||||
const isRel = !!this.getEntityRelationship({
|
||||
parentServiceConfig: serviceConfig,
|
||||
property: key,
|
||||
})
|
||||
if (isRel) {
|
||||
delete shallowProperty[key]
|
||||
}
|
||||
}
|
||||
|
||||
Object.assign(item, shallowProperty)
|
||||
orderedMergedItems[index] = item
|
||||
}
|
||||
|
||||
if (expands) {
|
||||
for (const expand of expandKeys) {
|
||||
this.mergeInitialData({
|
||||
items: items.flatMap((dt) => dt[expand] ?? []),
|
||||
initialData: initialData
|
||||
.flatMap((dt) => dt[expand] ?? [])
|
||||
.filter(isDefined),
|
||||
serviceConfig,
|
||||
path: `${path}.${expand}`,
|
||||
expands: expands[expand]?.expands,
|
||||
relationship: this.getEntityRelationship({
|
||||
parentServiceConfig: serviceConfig,
|
||||
property: expand,
|
||||
}),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
return orderedMergedItems
|
||||
}
|
||||
|
||||
async query(
|
||||
queryObj: RemoteJoinerQuery,
|
||||
options?: RemoteJoinerOptions
|
||||
@@ -1239,20 +1460,35 @@ export class RemoteJoiner {
|
||||
: [options.initialData]
|
||||
: []
|
||||
|
||||
if (options?.initialData) {
|
||||
let pkName = serviceConfig.primaryKeys[0]
|
||||
queryObj.args ??= []
|
||||
queryObj.args.push({
|
||||
name: pkName,
|
||||
value: iniDataArray.map((dt) => dt[pkName]),
|
||||
})
|
||||
const implodeMapping: InternalImplodeMapping[] = []
|
||||
const parseExpandsConfig: InternalParseExpandsParams = {
|
||||
initialService: {
|
||||
property: "",
|
||||
parent: "",
|
||||
serviceConfig,
|
||||
entity: serviceConfig.entity,
|
||||
fields: queryObj.fields,
|
||||
},
|
||||
query: queryObj,
|
||||
serviceConfig,
|
||||
expands: queryObj.expands!,
|
||||
implodeMapping,
|
||||
options,
|
||||
initialData: iniDataArray,
|
||||
}
|
||||
|
||||
const parsedExpands = this.parseExpands(parseExpandsConfig)
|
||||
const root = parsedExpands.get(BASE_PATH)!
|
||||
|
||||
const { primaryKeyArg, otherArgs, pkName } = gerPrimaryKeysAndOtherFilters({
|
||||
serviceConfig,
|
||||
queryObj,
|
||||
})
|
||||
|
||||
if (otherArgs) {
|
||||
parseExpandsConfig.initialService.args = otherArgs
|
||||
}
|
||||
|
||||
if (options?.throwIfKeyNotFound) {
|
||||
if (primaryKeyArg?.value == undefined) {
|
||||
if (!primaryKeyArg) {
|
||||
@@ -1275,30 +1511,6 @@ export class RemoteJoiner {
|
||||
}
|
||||
}
|
||||
|
||||
const implodeMapping: InternalImplodeMapping[] = []
|
||||
const parseExpandsConfig: InternalParseExpandsParams = {
|
||||
initialService: {
|
||||
property: "",
|
||||
parent: "",
|
||||
serviceConfig,
|
||||
entity: serviceConfig.entity,
|
||||
fields: queryObj.fields,
|
||||
},
|
||||
query: queryObj,
|
||||
serviceConfig,
|
||||
expands: queryObj.expands!,
|
||||
implodeMapping,
|
||||
options,
|
||||
}
|
||||
|
||||
if (otherArgs) {
|
||||
parseExpandsConfig.initialService.args = otherArgs
|
||||
}
|
||||
|
||||
const parsedExpands = this.parseExpands(parseExpandsConfig)
|
||||
|
||||
const root = parsedExpands.get(BASE_PATH)!
|
||||
|
||||
const response = await this.fetchData({
|
||||
expand: root,
|
||||
pkField: pkName,
|
||||
@@ -1306,28 +1518,40 @@ export class RemoteJoiner {
|
||||
options,
|
||||
})
|
||||
|
||||
const data = response.path ? response.data[response.path!] : response.data
|
||||
let data = response.path ? response.data[response.path!] : response.data
|
||||
const isDataArray = Array.isArray(data)
|
||||
|
||||
data = isDataArray ? data : [data]
|
||||
|
||||
if (options?.initialData) {
|
||||
// merge initial data with fetched data matching the primary key
|
||||
const initialDataMap = new Map(iniDataArray.map((dt) => [dt[pkName], dt]))
|
||||
for (const resData of data) {
|
||||
const iniData = initialDataMap.get(resData[pkName])
|
||||
data = this.mergeInitialData({
|
||||
items: data,
|
||||
initialData: iniDataArray,
|
||||
serviceConfig,
|
||||
path: BASE_PATH,
|
||||
expands: parsedExpands.get(BASE_PATH)?.expands,
|
||||
relationship: serviceConfig.relationships?.get(
|
||||
serviceConfig.serviceName
|
||||
) as JoinerRelationship,
|
||||
})
|
||||
|
||||
if (iniData) {
|
||||
Object.assign(resData, iniData)
|
||||
}
|
||||
}
|
||||
delete options?.initialData
|
||||
}
|
||||
|
||||
await this.handleExpands({
|
||||
items: Array.isArray(data) ? data : [data],
|
||||
items: data,
|
||||
parsedExpands,
|
||||
implodeMapping,
|
||||
options,
|
||||
})
|
||||
|
||||
const retData = isDataArray ? data : data[0]
|
||||
if (response.path) {
|
||||
response.data[response.path] = retData
|
||||
} else {
|
||||
response.data = retData
|
||||
}
|
||||
|
||||
return response.data
|
||||
}
|
||||
}
|
||||
@@ -1350,10 +1574,10 @@ function gerPrimaryKeysAndOtherFilters({ serviceConfig, queryObj }): {
|
||||
(arg) => !serviceConfig.primaryKeys.includes(arg.name)
|
||||
)
|
||||
|
||||
const filters =
|
||||
queryObj.args?.find((arg) => arg.name === "filters")?.value ?? {}
|
||||
|
||||
if (!primaryKeyArg) {
|
||||
const filters =
|
||||
queryObj.args?.find((arg) => arg.name === "filters")?.value ?? {}
|
||||
|
||||
const primaryKeyFilter = Object.keys(filters).find((key) => {
|
||||
return serviceConfig.primaryKeys.includes(key)
|
||||
})
|
||||
|
||||
@@ -20,7 +20,7 @@ export type IndexQueryConfig<TEntry extends string> = {
|
||||
export type QueryFunctionReturnPagination = {
|
||||
skip?: number
|
||||
take?: number
|
||||
count: number
|
||||
count?: number
|
||||
}
|
||||
|
||||
/**
|
||||
|
||||
@@ -6,6 +6,7 @@ import {
|
||||
RemoteQueryObjectConfig,
|
||||
RemoteQueryObjectFromStringResult,
|
||||
} from "./remote-query-object-from-string"
|
||||
import { RemoteQueryFilters } from "./to-remote-query"
|
||||
|
||||
/*type ExcludedProps = "__typename"*/
|
||||
|
||||
@@ -38,6 +39,20 @@ export type QueryGraphFunction = {
|
||||
): Promise<Prettify<GraphResultSet<TEntry>>>
|
||||
}
|
||||
|
||||
/**
|
||||
* QueryIndexFunction is a wrapper on top of remoteQuery
|
||||
* that simplifies the input it accepts and returns
|
||||
* a normalized/consistent output.
|
||||
*/
|
||||
export type QueryIndexFunction = {
|
||||
<const TEntry extends string>(
|
||||
queryOptions: RemoteQueryInput<TEntry> & {
|
||||
joinFilters?: RemoteQueryFilters<TEntry>
|
||||
},
|
||||
options?: RemoteJoinerOptions
|
||||
): Promise<Prettify<GraphResultSet<TEntry>>>
|
||||
}
|
||||
|
||||
/*export type RemoteQueryReturnedData<TEntry extends string> =
|
||||
TEntry extends keyof RemoteQueryEntryPoints
|
||||
? Prettify<Omit<RemoteQueryEntryPoints[TEntry], ExcludedProps>>
|
||||
@@ -102,6 +117,12 @@ export type RemoteQueryFunction = {
|
||||
*/
|
||||
graph: QueryGraphFunction
|
||||
|
||||
/**
|
||||
* Index function uses the index module to query and remoteQuery to hydrate the data
|
||||
* returns a result set
|
||||
*/
|
||||
index: QueryIndexFunction
|
||||
|
||||
/**
|
||||
* Query wrapper to provide specific GraphQL like API around remoteQuery.query
|
||||
* @param query
|
||||
|
||||
@@ -509,7 +509,7 @@ describe("IndexModuleService query", function () {
|
||||
})
|
||||
|
||||
it("should query products filtering by variant sku", async () => {
|
||||
const { data } = await module.query({
|
||||
const { data, metadata } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
filters: {
|
||||
product: {
|
||||
@@ -518,6 +518,16 @@ describe("IndexModuleService query", function () {
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
take: 100,
|
||||
skip: 0,
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
@@ -539,8 +549,8 @@ describe("IndexModuleService query", function () {
|
||||
])
|
||||
})
|
||||
|
||||
it("should query products filtering by variant sku", async () => {
|
||||
const { data } = await module.query({
|
||||
it("should query products filtering by variant sku and join filters on prices amount", async () => {
|
||||
const { data, metadata } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
joinFilters: {
|
||||
"product.variants.prices.amount": { $gt: 110 },
|
||||
@@ -552,6 +562,16 @@ describe("IndexModuleService query", function () {
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
take: 100,
|
||||
skip: 0,
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
@@ -568,8 +588,45 @@ describe("IndexModuleService query", function () {
|
||||
])
|
||||
})
|
||||
|
||||
it("should query products filtering by price and returning the complete entity", async () => {
|
||||
it("should filter using fields not selected", async () => {
|
||||
const { data } = await module.query({
|
||||
fields: ["product.id", "product.variants.*"],
|
||||
pagination: {
|
||||
order: {
|
||||
product: {
|
||||
variants: {
|
||||
prices: {
|
||||
amount: "DESC",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_1",
|
||||
variants: [
|
||||
{
|
||||
id: "var_1",
|
||||
sku: "aaa test aaa",
|
||||
},
|
||||
{
|
||||
id: "var_2",
|
||||
sku: "sku 123",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "prod_2",
|
||||
variants: [],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should query products filtering by price and returning the complete entity", async () => {
|
||||
const { data, metadata } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
filters: {
|
||||
product: {
|
||||
@@ -581,6 +638,16 @@ describe("IndexModuleService query", function () {
|
||||
},
|
||||
},
|
||||
keepFilteredEntities: true,
|
||||
pagination: {
|
||||
take: 100,
|
||||
skip: 0,
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
@@ -658,7 +725,7 @@ describe("IndexModuleService query", function () {
|
||||
})
|
||||
|
||||
it("should paginate products", async () => {
|
||||
const { data } = await module.query({
|
||||
const { data, metadata } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
pagination: {
|
||||
take: 1,
|
||||
@@ -666,6 +733,11 @@ describe("IndexModuleService query", function () {
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 2,
|
||||
skip: 1,
|
||||
take: 1,
|
||||
})
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_2",
|
||||
@@ -682,7 +754,7 @@ describe("IndexModuleService query", function () {
|
||||
})
|
||||
|
||||
it("should handle null values on where clause", async () => {
|
||||
const { data } = await module.query({
|
||||
const { data: data_, metadata } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
filters: {
|
||||
product: {
|
||||
@@ -691,25 +763,69 @@ describe("IndexModuleService query", function () {
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
take: 100,
|
||||
skip: 0,
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
|
||||
expect(data_).toEqual([
|
||||
{
|
||||
id: "prod_2",
|
||||
deep: { a: 1, obj: { b: 15 } },
|
||||
title: "Product 2 title",
|
||||
variants: [],
|
||||
},
|
||||
])
|
||||
|
||||
const { data, metadata: metadata2 } = await module.query({
|
||||
fields: ["product.*", "product.variants.*", "product.variants.prices.*"],
|
||||
filters: {
|
||||
product: {
|
||||
variants: {
|
||||
sku: { $ne: null },
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
take: 100,
|
||||
skip: 0,
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata2).toEqual({
|
||||
count: 1,
|
||||
skip: 0,
|
||||
take: 100,
|
||||
})
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_2",
|
||||
title: "Product 2 title",
|
||||
deep: {
|
||||
a: 1,
|
||||
obj: {
|
||||
b: 15,
|
||||
id: "prod_1",
|
||||
variants: [
|
||||
{
|
||||
id: "var_1",
|
||||
sku: "aaa test aaa",
|
||||
prices: [{ id: "money_amount_1", amount: 100 }],
|
||||
},
|
||||
},
|
||||
variants: [],
|
||||
{
|
||||
id: "var_2",
|
||||
sku: "sku 123",
|
||||
prices: [{ id: "money_amount_2", amount: 10 }],
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("should query products filtering by deep nested levels", async () => {
|
||||
const { data } = await module.query({
|
||||
const { data, metadata } = await module.query({
|
||||
fields: ["product.*"],
|
||||
filters: {
|
||||
product: {
|
||||
@@ -720,8 +836,17 @@ describe("IndexModuleService query", function () {
|
||||
},
|
||||
},
|
||||
},
|
||||
pagination: {
|
||||
take: 1,
|
||||
skip: 0,
|
||||
},
|
||||
})
|
||||
|
||||
expect(metadata).toEqual({
|
||||
count: 1,
|
||||
skip: 0,
|
||||
take: 1,
|
||||
})
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: "prod_2",
|
||||
|
||||
@@ -29,7 +29,7 @@
|
||||
"resolve:aliases": "tsc --showConfig -p tsconfig.json > tsconfig.resolved.json && tsc-alias -p tsconfig.resolved.json && rimraf tsconfig.resolved.json",
|
||||
"build": "rimraf dist && tsc --build && npm run resolve:aliases",
|
||||
"test": "jest --passWithNoTests ./src",
|
||||
"test:integration": "jest --runInBand --forceExit -- integration-tests/**/__tests__/**/*.ts",
|
||||
"test:integration": "jest --runInBand --forceExit -- integration-tests/__tests__/**/*.ts",
|
||||
"migration:initial": " MIKRO_ORM_CLI_CONFIG=./mikro-orm.config.dev.ts medusa-mikro-orm migration:create --initial",
|
||||
"migration:create": " MIKRO_ORM_CLI_CONFIG=./mikro-orm.config.dev.ts medusa-mikro-orm migration:create",
|
||||
"migration:up": " MIKRO_ORM_CLI_CONFIG=./mikro-orm.config.dev.ts medusa-mikro-orm migration:up",
|
||||
|
||||
@@ -133,17 +133,16 @@ export class DataSynchronizer {
|
||||
})
|
||||
} else {
|
||||
// Here we assume that the entity is not indexed anymore as it is not part of the schema object representation and we are cleaning the index
|
||||
// TODO: Drop the partition somewhere
|
||||
await promiseAll([
|
||||
this.#indexDataService.delete({
|
||||
selector: {
|
||||
name: entity,
|
||||
},
|
||||
}),
|
||||
this.#indexRelationService.delete({
|
||||
selector: {
|
||||
$or: [{ parent_id: entity }, { child_id: entity }],
|
||||
},
|
||||
}),
|
||||
this.#container.manager.execute(
|
||||
`DELETE FROM "index_data" WHERE "name" = ?`,
|
||||
[entity]
|
||||
),
|
||||
this.#container.manager.execute(
|
||||
`DELETE FROM "index_relation" WHERE "parent_name" = ? OR "child_name" = ?`,
|
||||
[entity, entity]
|
||||
),
|
||||
])
|
||||
}
|
||||
}
|
||||
@@ -171,14 +170,10 @@ export class DataSynchronizer {
|
||||
}
|
||||
),
|
||||
this.#updatedStatus(entity, IndexMetadataStatus.PROCESSING),
|
||||
this.#indexDataService.update({
|
||||
data: {
|
||||
staled_at: new Date(),
|
||||
},
|
||||
selector: {
|
||||
name: entity,
|
||||
},
|
||||
}),
|
||||
this.#container.manager.execute(
|
||||
`UPDATE "index_data" SET "staled_at" = NOW() WHERE "name" = ?`,
|
||||
[entity]
|
||||
),
|
||||
])
|
||||
|
||||
const finalAcknoledgement = await this.syncEntity({
|
||||
@@ -258,15 +253,10 @@ export class DataSynchronizer {
|
||||
entityName
|
||||
] as SchemaObjectEntityRepresentation
|
||||
|
||||
const { fields, alias, moduleConfig } = schemaEntityObjectRepresentation
|
||||
const { alias, moduleConfig } = schemaEntityObjectRepresentation
|
||||
const isLink = !!moduleConfig?.isLink
|
||||
|
||||
const entityPrimaryKey = fields.find(
|
||||
(field) => !!moduleConfig?.primaryKeys?.includes(field)
|
||||
)
|
||||
|
||||
if (!entityPrimaryKey) {
|
||||
// TODO: for now these are skiped
|
||||
if (!alias) {
|
||||
const acknoledgement = {
|
||||
lastCursor: pagination.cursor ?? null,
|
||||
done: true,
|
||||
@@ -276,14 +266,27 @@ export class DataSynchronizer {
|
||||
return acknoledgement
|
||||
}
|
||||
|
||||
const entityPrimaryKey = "id"
|
||||
const moduleHasId = !!moduleConfig?.primaryKeys?.includes("id")
|
||||
if (!moduleHasId) {
|
||||
const acknoledgement = {
|
||||
lastCursor: pagination.cursor ?? null,
|
||||
err: new Error(
|
||||
"Entity does not have a property 'id'. The 'id' must be provided and must be orderable (e.g ulid)"
|
||||
),
|
||||
}
|
||||
|
||||
await ack(acknoledgement)
|
||||
return acknoledgement
|
||||
}
|
||||
|
||||
let processed = 0
|
||||
let currentCursor = pagination.cursor!
|
||||
const batchSize = Math.min(pagination.batchSize ?? 100, 100)
|
||||
const limit = pagination.limit ?? Infinity
|
||||
let done = false
|
||||
let error = null
|
||||
|
||||
while (processed < limit || !done) {
|
||||
while (processed < limit) {
|
||||
const filters: Record<string, any> = {}
|
||||
|
||||
if (currentCursor) {
|
||||
@@ -306,8 +309,7 @@ export class DataSynchronizer {
|
||||
},
|
||||
})
|
||||
|
||||
done = !data.length
|
||||
if (done) {
|
||||
if (!data.length) {
|
||||
break
|
||||
}
|
||||
|
||||
|
||||
@@ -8,10 +8,12 @@ import {
|
||||
import {
|
||||
MikroOrmBaseRepository as BaseRepository,
|
||||
ContainerRegistrationKeys,
|
||||
deepMerge,
|
||||
InjectManager,
|
||||
InjectTransactionManager,
|
||||
isDefined,
|
||||
MedusaContext,
|
||||
promiseAll,
|
||||
toMikroORMEntity,
|
||||
} from "@medusajs/framework/utils"
|
||||
import {
|
||||
@@ -249,20 +251,29 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
|
||||
const select = normalizeFieldsSelection(fields)
|
||||
const where = flattenObjectKeys(filters)
|
||||
|
||||
const joinWhere = flattenObjectKeys(joinFilters)
|
||||
const orderBy = flattenObjectKeys(inputOrderBy)
|
||||
|
||||
const { manager } = sharedContext as { manager: SqlEntityManager }
|
||||
let hasPagination = false
|
||||
if (isDefined(skip)) {
|
||||
let hasCount = false
|
||||
if (isDefined(skip) || isDefined(take)) {
|
||||
hasPagination = true
|
||||
|
||||
if (isDefined(skip)) {
|
||||
hasCount = true
|
||||
}
|
||||
}
|
||||
|
||||
const requestedFields = deepMerge(deepMerge(select, filters), inputOrderBy)
|
||||
|
||||
const connection = manager.getConnection()
|
||||
const qb = new QueryBuilder({
|
||||
schema: this.schemaObjectRepresentation_,
|
||||
entityMap: this.schemaEntitiesMap_,
|
||||
knex: connection.getKnex(),
|
||||
rawConfig: config,
|
||||
selector: {
|
||||
select,
|
||||
where,
|
||||
@@ -274,19 +285,40 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
keepFilteredEntities,
|
||||
orderBy,
|
||||
},
|
||||
requestedFields,
|
||||
})
|
||||
|
||||
const sql = qb.buildQuery(hasPagination, !!keepFilteredEntities)
|
||||
const [sql, sqlCount] = qb.buildQuery({
|
||||
hasPagination,
|
||||
returnIdOnly: !!keepFilteredEntities,
|
||||
hasCount,
|
||||
})
|
||||
|
||||
let resultSet = await manager.execute(sql)
|
||||
const count = hasPagination ? +(resultSet[0]?.count ?? 0) : undefined
|
||||
const promises: Promise<any>[] = []
|
||||
|
||||
promises.push(manager.execute(sql))
|
||||
|
||||
if (hasCount && sqlCount) {
|
||||
promises.push(manager.execute(sqlCount))
|
||||
}
|
||||
|
||||
let [resultSet, count] = await promiseAll(promises)
|
||||
|
||||
const resultMetadata: IndexTypes.QueryFunctionReturnPagination | undefined =
|
||||
hasPagination
|
||||
? {
|
||||
count: hasCount ? parseInt(count[0].count) : undefined,
|
||||
skip,
|
||||
take,
|
||||
}
|
||||
: undefined
|
||||
|
||||
if (keepFilteredEntities) {
|
||||
const mainEntity = Object.keys(select)[0]
|
||||
|
||||
const ids = resultSet.map((r) => r[`${mainEntity}.id`])
|
||||
if (ids.length) {
|
||||
return await this.query<TEntry>(
|
||||
const result = await this.query<TEntry>(
|
||||
{
|
||||
fields,
|
||||
joinFilters,
|
||||
@@ -300,6 +332,8 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
} as IndexTypes.IndexQueryConfig<TEntry>,
|
||||
sharedContext
|
||||
)
|
||||
result.metadata ??= resultMetadata
|
||||
return result
|
||||
}
|
||||
}
|
||||
|
||||
@@ -307,13 +341,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
data: qb.buildObjectFromResultset(
|
||||
resultSet
|
||||
) as IndexTypes.QueryResultSet<TEntry>["data"],
|
||||
metadata: hasPagination
|
||||
? {
|
||||
count: count!,
|
||||
skip,
|
||||
take,
|
||||
}
|
||||
: undefined,
|
||||
metadata: resultMetadata,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -365,12 +393,19 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
return acc
|
||||
}, {}) as TData
|
||||
|
||||
await indexRepository.upsert({
|
||||
id: cleanedEntityData.id,
|
||||
name: entity,
|
||||
data: cleanedEntityData,
|
||||
staled_at: null,
|
||||
})
|
||||
await indexRepository.upsert(
|
||||
{
|
||||
id: cleanedEntityData.id,
|
||||
name: entity,
|
||||
data: cleanedEntityData,
|
||||
staled_at: null,
|
||||
},
|
||||
{
|
||||
onConflictAction: "merge",
|
||||
onConflictFields: ["id", "name"],
|
||||
onConflictMergeFields: ["data", "staled_at"],
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* Retrieve the parents to attach it to the index entry.
|
||||
@@ -391,12 +426,19 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
: [parentData]
|
||||
|
||||
for (const parentData_ of parentDataCollection) {
|
||||
await indexRepository.upsert({
|
||||
id: (parentData_ as any).id,
|
||||
name: parentEntity,
|
||||
data: parentData_,
|
||||
staled_at: null,
|
||||
})
|
||||
await indexRepository.upsert(
|
||||
{
|
||||
id: (parentData_ as any).id,
|
||||
name: parentEntity,
|
||||
data: parentData_,
|
||||
staled_at: null,
|
||||
},
|
||||
{
|
||||
onConflictAction: "merge",
|
||||
onConflictFields: ["id", "name"],
|
||||
onConflictMergeFields: ["data", "staled_at"],
|
||||
}
|
||||
)
|
||||
|
||||
await indexRelationRepository.upsert(
|
||||
{
|
||||
@@ -416,6 +458,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
"parent_name",
|
||||
"child_name",
|
||||
],
|
||||
onConflictMergeFields: ["staled_at"],
|
||||
}
|
||||
)
|
||||
}
|
||||
@@ -453,17 +496,24 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
)
|
||||
|
||||
await indexRepository.upsertMany(
|
||||
data_.map((entityData) => {
|
||||
return {
|
||||
id: entityData.id,
|
||||
name: entity,
|
||||
data: entityProperties.reduce((acc, property) => {
|
||||
acc[property] = entityData[property]
|
||||
return acc
|
||||
}, {}),
|
||||
staled_at: null,
|
||||
data_.map(
|
||||
(entityData) => {
|
||||
return {
|
||||
id: entityData.id,
|
||||
name: entity,
|
||||
data: entityProperties.reduce((acc, property) => {
|
||||
acc[property] = entityData[property]
|
||||
return acc
|
||||
}, {}),
|
||||
staled_at: null,
|
||||
}
|
||||
},
|
||||
{
|
||||
onConflictAction: "merge",
|
||||
onConflictFields: ["id", "name"],
|
||||
onConflictMergeFields: ["data", "staled_at"],
|
||||
}
|
||||
})
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
@@ -605,12 +655,19 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
return acc
|
||||
}, {}) as TData
|
||||
|
||||
await indexRepository.upsert({
|
||||
id: cleanedEntityData.id,
|
||||
name: entity,
|
||||
data: cleanedEntityData,
|
||||
staled_at: null,
|
||||
})
|
||||
await indexRepository.upsert(
|
||||
{
|
||||
id: cleanedEntityData.id,
|
||||
name: entity,
|
||||
data: cleanedEntityData,
|
||||
staled_at: null,
|
||||
},
|
||||
{
|
||||
onConflictAction: "merge",
|
||||
onConflictFields: ["id", "name"],
|
||||
onConflictMergeFields: ["data", "staled_at"],
|
||||
}
|
||||
)
|
||||
|
||||
/**
|
||||
* Create the index relation entries for the parent entity and the child entity
|
||||
@@ -634,6 +691,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
"parent_name",
|
||||
"child_name",
|
||||
],
|
||||
onConflictMergeFields: ["staled_at"],
|
||||
}
|
||||
)
|
||||
|
||||
@@ -655,6 +713,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
|
||||
"parent_name",
|
||||
"child_name",
|
||||
],
|
||||
onConflictMergeFields: ["staled_at"],
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { IndexTypes } from "@medusajs/framework/types"
|
||||
import { SqlEntityManager } from "@mikro-orm/postgresql"
|
||||
import { schemaObjectRepresentationPropertiesToOmit } from "@types"
|
||||
import { IndexTypes } from "@medusajs/framework/types"
|
||||
|
||||
export async function createPartitions(
|
||||
schemaObjectRepresentation: IndexTypes.SchemaObjectRepresentation,
|
||||
@@ -54,6 +54,10 @@ export async function createPartitions(
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS "IDX_cat_${cName}_data_gin" ON ${activeSchema}cat_${cName} USING GIN ("data" jsonb_path_ops)`
|
||||
)
|
||||
|
||||
part.push(
|
||||
`CREATE INDEX CONCURRENTLY IF NOT EXISTS "IDX_cat_${cName}_id" ON ${activeSchema}cat_${cName} ("id")`
|
||||
)
|
||||
|
||||
// create child id index on pivot partitions
|
||||
for (const parent of schemaObjectRepresentation[key].parents) {
|
||||
const pName = `${parent.ref.entity}${key}`.toLowerCase()
|
||||
|
||||
@@ -1,5 +1,10 @@
|
||||
import { IndexTypes } from "@medusajs/framework/types"
|
||||
import { GraphQLUtils, isObject, isString } from "@medusajs/framework/utils"
|
||||
import {
|
||||
GraphQLUtils,
|
||||
isObject,
|
||||
isPresent,
|
||||
isString,
|
||||
} from "@medusajs/framework/utils"
|
||||
import { Knex } from "@mikro-orm/knex"
|
||||
import { OrderBy, QueryFormat, QueryOptions, Select } from "@types"
|
||||
|
||||
@@ -24,6 +29,10 @@ export class QueryBuilder {
|
||||
private readonly options?: QueryOptions
|
||||
private readonly schema: IndexTypes.SchemaObjectRepresentation
|
||||
private readonly allSchemaFields: Set<string>
|
||||
private readonly rawConfig?: IndexTypes.IndexQueryConfig<any>
|
||||
private readonly requestedFields: {
|
||||
[key: string]: any
|
||||
}
|
||||
|
||||
constructor(args: {
|
||||
schema: IndexTypes.SchemaObjectRepresentation
|
||||
@@ -31,6 +40,10 @@ export class QueryBuilder {
|
||||
knex: Knex
|
||||
selector: QueryFormat
|
||||
options?: QueryOptions
|
||||
rawConfig?: IndexTypes.IndexQueryConfig<any>
|
||||
requestedFields: {
|
||||
[key: string]: any
|
||||
}
|
||||
}) {
|
||||
this.schema = args.schema
|
||||
this.entityMap = args.entityMap
|
||||
@@ -41,6 +54,8 @@ export class QueryBuilder {
|
||||
this.allSchemaFields = new Set(
|
||||
Object.values(this.schema).flatMap((entity) => entity.fields ?? [])
|
||||
)
|
||||
this.rawConfig = args.rawConfig
|
||||
this.requestedFields = args.requestedFields
|
||||
}
|
||||
|
||||
private getStructureKeys(structure) {
|
||||
@@ -56,7 +71,9 @@ export class QueryBuilder {
|
||||
return
|
||||
}
|
||||
|
||||
throw new Error(`Could not find entity for path: ${path}`)
|
||||
throw new Error(
|
||||
`Could not find entity for path: ${path}. It might not be indexed.`
|
||||
)
|
||||
}
|
||||
|
||||
return this.schema._schemaPropertiesMap[path]
|
||||
@@ -66,7 +83,7 @@ export class QueryBuilder {
|
||||
const entity = this.getEntity(path)?.ref?.entity!
|
||||
const fieldRef = this.entityMap[entity]._fields[field]
|
||||
if (!fieldRef) {
|
||||
throw new Error(`Field ${field} not found in the entityMap.`)
|
||||
throw new Error(`Field ${field} is not indexed.`)
|
||||
}
|
||||
|
||||
let currentType = fieldRef.type
|
||||
@@ -224,6 +241,8 @@ export class QueryBuilder {
|
||||
const val = operator === "IN" ? subValue : [subValue]
|
||||
if (operator === "=" && subValue === null) {
|
||||
operator = "IS"
|
||||
} else if (operator === "!=" && subValue === null) {
|
||||
operator = "IS NOT"
|
||||
}
|
||||
|
||||
if (operator === "=") {
|
||||
@@ -306,13 +325,16 @@ export class QueryBuilder {
|
||||
|
||||
const isSelectableField = this.allSchemaFields.has(parentProperty)
|
||||
const entities = this.getEntity(currentAliasPath, false)
|
||||
if (isSelectableField || !entities) {
|
||||
const entityRef = entities?.ref!
|
||||
|
||||
// !entityRef.alias means the object has not table, it's a nested object
|
||||
if (isSelectableField || !entities || !entityRef?.alias) {
|
||||
// We are currently selecting a specific field of the parent entity or the entity is not found on the index schema
|
||||
// We don't need to build the query parts for this as there is no join
|
||||
return []
|
||||
}
|
||||
|
||||
const mainEntity = entities.ref.entity
|
||||
const mainEntity = entityRef.entity
|
||||
const mainAlias =
|
||||
this.getShortAlias(aliasMapping, mainEntity.toLowerCase()) + level
|
||||
|
||||
@@ -530,10 +552,18 @@ export class QueryBuilder {
|
||||
return result
|
||||
}
|
||||
|
||||
public buildQuery(countAllResults = true, returnIdOnly = false): string {
|
||||
public buildQuery({
|
||||
hasPagination = true,
|
||||
hasCount = false,
|
||||
returnIdOnly = false,
|
||||
}: {
|
||||
hasPagination?: boolean
|
||||
hasCount?: boolean
|
||||
returnIdOnly?: boolean
|
||||
}): [string, string | null] {
|
||||
const queryBuilder = this.knex.queryBuilder()
|
||||
|
||||
const structure = this.structure
|
||||
const structure = this.requestedFields
|
||||
const filter = this.selector.where ?? {}
|
||||
|
||||
const { orderBy: order, skip, take } = this.options ?? {}
|
||||
@@ -564,15 +594,6 @@ export class QueryBuilder {
|
||||
? this.buildSelectParts(rootStructure, rootKey, aliasMapping)
|
||||
: { [rootKey + ".id"]: `${rootAlias}.id` }
|
||||
|
||||
if (countAllResults) {
|
||||
selectParts["offset_"] = this.knex.raw(
|
||||
`DENSE_RANK() OVER (ORDER BY ${this.getShortAlias(
|
||||
aliasMapping,
|
||||
rootEntity
|
||||
)}.id)`
|
||||
)
|
||||
}
|
||||
|
||||
queryBuilder.select(selectParts)
|
||||
|
||||
queryBuilder.from(
|
||||
@@ -601,24 +622,150 @@ export class QueryBuilder {
|
||||
)
|
||||
}
|
||||
|
||||
let sql = `WITH data AS (${queryBuilder.toQuery()})
|
||||
SELECT * ${
|
||||
countAllResults ? ", (SELECT max(offset_) FROM data) AS count" : ""
|
||||
}
|
||||
FROM data`
|
||||
let distinctQueryBuilder = queryBuilder.clone()
|
||||
|
||||
let take_ = !isNaN(+take!) ? +take! : 15
|
||||
let skip_ = !isNaN(+skip!) ? +skip! : 0
|
||||
if (typeof take === "number" || typeof skip === "number") {
|
||||
sql += `
|
||||
WHERE offset_ > ${skip_}
|
||||
AND offset_ <= ${skip_ + take_}
|
||||
`
|
||||
let sql = ""
|
||||
|
||||
if (hasPagination) {
|
||||
const idColumn = `${this.getShortAlias(aliasMapping, rootEntity)}.id`
|
||||
distinctQueryBuilder.clearSelect()
|
||||
distinctQueryBuilder.select(
|
||||
this.knex.raw(`DISTINCT ON (${idColumn}) ${idColumn} as "id"`)
|
||||
)
|
||||
distinctQueryBuilder.limit(take_)
|
||||
distinctQueryBuilder.offset(skip_)
|
||||
|
||||
sql += `WITH paginated_data AS (${distinctQueryBuilder.toQuery()}),`
|
||||
|
||||
queryBuilder.andWhere(
|
||||
this.knex.raw(`${idColumn} IN (SELECT id FROM "paginated_data")`)
|
||||
)
|
||||
}
|
||||
|
||||
return sql
|
||||
sql += `${hasPagination ? " " : "WITH"} data AS (${queryBuilder.toQuery()})
|
||||
SELECT *
|
||||
FROM data`
|
||||
|
||||
let sqlCount = ""
|
||||
if (hasCount) {
|
||||
sqlCount = this.buildQueryCount()
|
||||
}
|
||||
|
||||
return [sql, hasCount ? sqlCount : null]
|
||||
}
|
||||
|
||||
public buildQueryCount(): string {
|
||||
const queryBuilder = this.knex.queryBuilder()
|
||||
|
||||
const hasWhere = isPresent(this.rawConfig?.filters)
|
||||
const structure = hasWhere ? this.rawConfig?.filters! : this.requestedFields
|
||||
|
||||
const rootKey = this.getStructureKeys(structure)[0]
|
||||
|
||||
const rootStructure = structure[rootKey] as Select
|
||||
|
||||
const entity = this.getEntity(rootKey)!.ref.entity
|
||||
const rootEntity = entity.toLowerCase()
|
||||
const aliasMapping: { [path: string]: string } = {}
|
||||
|
||||
const joinParts = this.buildQueryParts(
|
||||
rootStructure,
|
||||
"",
|
||||
entity,
|
||||
rootKey,
|
||||
[],
|
||||
0,
|
||||
aliasMapping
|
||||
)
|
||||
|
||||
const rootAlias = aliasMapping[rootKey]
|
||||
|
||||
queryBuilder.select(
|
||||
this.knex.raw(`COUNT(DISTINCT ${rootAlias}.id) as count`)
|
||||
)
|
||||
|
||||
queryBuilder.from(
|
||||
`cat_${rootEntity} AS ${this.getShortAlias(aliasMapping, rootEntity)}`
|
||||
)
|
||||
|
||||
if (hasWhere) {
|
||||
joinParts.forEach((joinPart) => {
|
||||
queryBuilder.joinRaw(joinPart)
|
||||
})
|
||||
|
||||
this.parseWhere(aliasMapping, this.selector.where!, queryBuilder)
|
||||
}
|
||||
|
||||
return queryBuilder.toQuery()
|
||||
}
|
||||
|
||||
// NOTE: We are keeping the bellow code for now as reference to alternative implementation for us. DO NOT REMOVE
|
||||
// public buildQueryCount(): string {
|
||||
// const queryBuilder = this.knex.queryBuilder()
|
||||
|
||||
// const hasWhere = isPresent(this.rawConfig?.filters)
|
||||
// const structure = hasWhere ? this.rawConfig?.filters! : this.structure
|
||||
|
||||
// const rootKey = this.getStructureKeys(structure)[0]
|
||||
|
||||
// const rootStructure = structure[rootKey] as Select
|
||||
|
||||
// const entity = this.getEntity(rootKey)!.ref.entity
|
||||
// const rootEntity = entity.toLowerCase()
|
||||
// const aliasMapping: { [path: string]: string } = {}
|
||||
|
||||
// const joinParts = this.buildQueryParts(
|
||||
// rootStructure,
|
||||
// "",
|
||||
// entity,
|
||||
// rootKey,
|
||||
// [],
|
||||
// 0,
|
||||
// aliasMapping
|
||||
// )
|
||||
|
||||
// const rootAlias = aliasMapping[rootKey]
|
||||
|
||||
// queryBuilder.select(this.knex.raw(`COUNT(${rootAlias}.id) as count`))
|
||||
|
||||
// queryBuilder.from(
|
||||
// `cat_${rootEntity} AS ${this.getShortAlias(aliasMapping, rootEntity)}`
|
||||
// )
|
||||
|
||||
// const self = this
|
||||
// if (hasWhere && joinParts.length) {
|
||||
// const fromExistsRaw = joinParts.shift()!
|
||||
// const [joinPartsExists, fromExistsPart] =
|
||||
// fromExistsRaw.split(" left join ")
|
||||
// const [fromExists, whereExists] = fromExistsPart.split(" on ")
|
||||
// joinParts.unshift(joinPartsExists)
|
||||
|
||||
// queryBuilder.whereExists(function () {
|
||||
// this.select(self.knex.raw(`1`))
|
||||
// this.from(self.knex.raw(`${fromExists}`))
|
||||
// this.joinRaw(joinParts.join("\n"))
|
||||
// if (hasWhere) {
|
||||
// self.parseWhere(aliasMapping, self.selector.where!, this)
|
||||
// this.whereRaw(self.knex.raw(whereExists))
|
||||
// return
|
||||
// }
|
||||
|
||||
// this.whereRaw(self.knex.raw(whereExists))
|
||||
// })
|
||||
// } else {
|
||||
// queryBuilder.whereExists(function () {
|
||||
// this.select(self.knex.raw(`1`))
|
||||
// if (hasWhere) {
|
||||
// self.parseWhere(aliasMapping, self.selector.where!, this)
|
||||
// }
|
||||
// })
|
||||
// }
|
||||
|
||||
// return queryBuilder.toQuery()
|
||||
// }
|
||||
|
||||
public buildObjectFromResultset(
|
||||
resultSet: Record<string, any>[]
|
||||
): Record<string, any>[] {
|
||||
|
||||
Reference in New Issue
Block a user