Feat/index sync data (#11169)

**what**
Synchronisation process  implementation for configured entity to be indexed
This commit is contained in:
Adrien de Peretti
2025-01-27 14:56:12 +01:00
committed by GitHub
parent 5093224914
commit ea402875a5
7 changed files with 685 additions and 33 deletions

View File

@@ -1,7 +1,5 @@
{
"namespaces": [
"public"
],
"namespaces": ["public"],
"name": "public",
"tables": [
{
@@ -108,10 +106,7 @@
},
{
"keyName": "index_data_pkey",
"columnNames": [
"id",
"name"
],
"columnNames": ["id", "name"],
"composite": true,
"constraint": true,
"primary": true,
@@ -168,12 +163,7 @@
"primary": false,
"nullable": false,
"default": "'pending'",
"enumItems": [
"pending",
"processing",
"done",
"error"
],
"enumItems": ["pending", "processing", "done", "error"],
"mappedType": "enum"
},
"created_at": {
@@ -232,9 +222,7 @@
},
{
"keyName": "index_metadata_pkey",
"columnNames": [
"id"
],
"columnNames": ["id"],
"composite": false,
"constraint": true,
"primary": true,
@@ -366,9 +354,7 @@
},
{
"keyName": "index_relation_pkey",
"columnNames": [
"id"
],
"columnNames": ["id"],
"composite": false,
"constraint": true,
"primary": true,

View File

@@ -0,0 +1,21 @@
import { Migration } from "@mikro-orm/migrations"
export class Migration20250127105159 extends Migration {
override async up(): Promise<void> {
this.addSql(
`alter table if exists "index_relation" alter column "id" set not null;`
)
this.addSql(
`alter table if exists "index_relation" add constraint "IDX_index_relation_id_pivot_parent_name_child_name_parent_id_child_id_unique" unique ("parent_id", "child_id", "child_name", "parent_name", "pivot");`
)
}
override async down(): Promise<void> {
this.addSql(
`alter table if exists "index_relation" drop constraint "IDX_index_relation_id_pivot_parent_name_child_name_parent_id_child_id_unique";`
)
this.addSql(
`alter table if exists "index_relation" alter column "id" drop not null;`
)
}
}

View File

@@ -8,4 +8,5 @@ const IndexRelation = model.define("IndexRelation", {
child_name: model.text(),
child_id: model.text().index("IDX_index_relation_child_id"),
})
export default IndexRelation

View File

@@ -14,7 +14,11 @@ import {
MedusaContext,
toMikroORMEntity,
} from "@medusajs/framework/utils"
import { EntityManager, SqlEntityManager } from "@mikro-orm/postgresql"
import {
EntityManager,
EntityRepository,
SqlEntityManager,
} from "@mikro-orm/postgresql"
import { IndexData, IndexRelation } from "@models"
import { createPartitions, QueryBuilder } from "../utils"
import { flattenObjectKeys } from "../utils/flatten-object-keys"
@@ -204,13 +208,14 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
}
const { fields, alias } = schemaEntityObjectRepresentation
const { data: entityData } = await this.query_.graph({
const graphResult = await this.query_.graph({
entity: alias,
filters: {
id: ids,
},
fields: [...new Set(["id", ...fields])],
})
const { data: entityData } = graphResult
const argument = {
entity: schemaEntityObjectRepresentation.entity,
@@ -340,7 +345,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
transactionManager: SqlEntityManager
}
const indexRepository = em.getRepository(toMikroORMEntity(IndexData))
const indexRelationRepository = em.getRepository(
const indexRelationRepository: EntityRepository<any> = em.getRepository(
toMikroORMEntity(IndexRelation)
)
@@ -369,6 +374,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
id: cleanedEntityData.id,
name: entity,
data: cleanedEntityData,
// stale: false,
})
/**
@@ -394,18 +400,29 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
id: (parentData_ as any).id,
name: parentEntity,
data: parentData_,
// stale: false,
})
const parentIndexRelationEntry = indexRelationRepository.create({
parent_id: (parentData_ as any).id,
parent_name: parentEntity,
child_id: cleanedEntityData.id,
child_name: entity,
pivot: `${parentEntity}-${entity}`,
})
indexRelationRepository
.getEntityManager()
.persist(parentIndexRelationEntry)
await indexRelationRepository.upsert(
{
parent_id: (parentData_ as any).id,
parent_name: parentEntity,
child_id: cleanedEntityData.id,
child_name: entity,
pivot: `${parentEntity}-${entity}`,
// stale: false,
},
{
onConflictAction: "merge",
onConflictFields: [
"pivot",
"parent_id",
"child_id",
"parent_name",
"child_name",
],
}
)
}
}
}
@@ -453,6 +470,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
acc[property] = entityData[property]
return acc
}, {}),
// stale: false,
}
})
)
@@ -608,6 +626,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
id: cleanedEntityData.id,
name: entity,
data: cleanedEntityData,
// stale: false,
})
/**
@@ -620,6 +639,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
child_id: cleanedEntityData.id,
child_name: entity,
pivot: `${parentEntityName}-${entity}`,
// stale: false,
})
const childIndexRelationEntry = indexRelationRepository.create({
@@ -628,6 +648,7 @@ export class PostgresProvider implements IndexTypes.StorageProvider {
child_id: entityData[childPropertyId] as string,
child_name: childEntityName,
pivot: `${entity}-${childEntityName}`,
// stale: false,
})
indexRelationRepository

View File

@@ -0,0 +1,137 @@
import {
IndexTypes,
RemoteQueryFunction,
SchemaObjectEntityRepresentation,
Event,
} from "@medusajs/framework/types"
import { CommonEvents } from "@medusajs/framework/utils"
export class DataSynchronizer {
#storageProvider: IndexTypes.StorageProvider
#schemaObjectRepresentation: IndexTypes.SchemaObjectRepresentation
#query: RemoteQueryFunction
constructor({
storageProvider,
schemaObjectRepresentation,
query,
}: {
storageProvider: IndexTypes.StorageProvider
schemaObjectRepresentation: IndexTypes.SchemaObjectRepresentation
query: RemoteQueryFunction
}) {
this.#storageProvider = storageProvider
this.#schemaObjectRepresentation = schemaObjectRepresentation
this.#query = query
}
async sync({
entityName,
pagination = {},
ack,
}: {
entityName: string
pagination?: {
cursor?: string
updated_at?: string | Date
limit?: number
batchSize?: number
}
ack: (ack: {
lastCursor: string | null
done?: boolean
err?: Error
}) => Promise<void>
}) {
const schemaEntityObjectRepresentation = this.#schemaObjectRepresentation[
entityName
] as SchemaObjectEntityRepresentation
const { fields, alias, moduleConfig } = schemaEntityObjectRepresentation
const entityPrimaryKey = fields.find(
(field) => !!moduleConfig.linkableKeys?.[field]
)
if (!entityPrimaryKey) {
void ack({
lastCursor: pagination.cursor ?? null,
err: new Error(
`Entity ${entityName} does not have a linkable primary key`
),
})
return
}
let processed = 0
let currentCursor = pagination.cursor!
const batchSize = pagination.batchSize ?? 1000
const limit = pagination.limit ?? Infinity
let done = false
let error = null
while (processed < limit || !done) {
const filters: Record<string, any> = {}
if (currentCursor) {
filters[entityPrimaryKey] = { $gt: currentCursor }
}
if (pagination.updated_at) {
filters["updated_at"] = { $gt: pagination.updated_at }
}
const { data } = await this.#query.graph({
entity: alias,
fields: [entityPrimaryKey],
filters,
pagination: {
order: {
[entityPrimaryKey]: "asc",
},
take: batchSize,
},
})
done = !data.length
if (done) {
break
}
const envelop: Event = {
data,
name: `*.${CommonEvents.CREATED}`,
}
try {
await this.#storageProvider.consumeEvent(
schemaEntityObjectRepresentation
)(envelop)
currentCursor = data[data.length - 1][entityPrimaryKey]
processed += data.length
void ack({ lastCursor: currentCursor })
} catch (err) {
error = err
break
}
}
let acknoledgement: { lastCursor: string; done?: boolean; err?: Error } = {
lastCursor: currentCursor,
done: true,
}
if (error) {
acknoledgement = {
lastCursor: currentCursor,
err: error,
}
void ack(acknoledgement)
return acknoledgement
}
void ack(acknoledgement)
return acknoledgement
}
}