chore(): start moving some packages to the core directory (#7215)
This commit is contained in:
committed by
GitHub
parent
fdee748eed
commit
bbccd6481d
@@ -0,0 +1,201 @@
|
||||
import { abstractModuleServiceFactory } from "../abstract-module-service-factory"
|
||||
|
||||
const baseRepoMock = {
|
||||
serialize: jest.fn().mockImplementation((item) => item),
|
||||
transaction: (task) => task("transactionManager"),
|
||||
getFreshManager: jest.fn().mockReturnThis(),
|
||||
}
|
||||
|
||||
const defaultContext = { __type: "MedusaContext", manager: baseRepoMock }
|
||||
const defaultTransactionContext = {
|
||||
__type: "MedusaContext",
|
||||
transactionManager: "transactionManager",
|
||||
}
|
||||
|
||||
describe("Abstract Module Service Factory", () => {
|
||||
const containerMock = {
|
||||
baseRepository: baseRepoMock,
|
||||
mainModelMockRepository: baseRepoMock,
|
||||
otherModelMock1Repository: baseRepoMock,
|
||||
otherModelMock2Repository: baseRepoMock,
|
||||
mainModelMockService: {
|
||||
retrieve: jest.fn().mockResolvedValue({ id: "1", name: "Item" }),
|
||||
list: jest.fn().mockResolvedValue([{ id: "1", name: "Item" }]),
|
||||
delete: jest.fn().mockResolvedValue(undefined),
|
||||
softDelete: jest.fn().mockResolvedValue([[], {}]),
|
||||
restore: jest.fn().mockResolvedValue([[], {}]),
|
||||
},
|
||||
otherModelMock1Service: {
|
||||
retrieve: jest.fn().mockResolvedValue({ id: "1", name: "Item" }),
|
||||
list: jest.fn().mockResolvedValue([{ id: "1", name: "Item" }]),
|
||||
delete: jest.fn().mockResolvedValue(undefined),
|
||||
softDelete: jest.fn().mockResolvedValue([[], {}]),
|
||||
restore: jest.fn().mockResolvedValue([[], {}]),
|
||||
},
|
||||
otherModelMock2Service: {
|
||||
retrieve: jest.fn().mockResolvedValue({ id: "1", name: "Item" }),
|
||||
list: jest.fn().mockResolvedValue([{ id: "1", name: "Item" }]),
|
||||
delete: jest.fn().mockResolvedValue(undefined),
|
||||
softDelete: jest.fn().mockResolvedValue([[], {}]),
|
||||
restore: jest.fn().mockResolvedValue([[], {}]),
|
||||
},
|
||||
}
|
||||
|
||||
const mainModelMock = class MainModelMock {}
|
||||
const otherModelMock1 = class OtherModelMock1 {}
|
||||
const otherModelMock2 = class OtherModelMock2 {}
|
||||
|
||||
const abstractModuleService = abstractModuleServiceFactory<
|
||||
any,
|
||||
any,
|
||||
{
|
||||
OtherModelMock1: {
|
||||
dto: any
|
||||
singular: "OtherModelMock1"
|
||||
plural: "OtherModelMock1s"
|
||||
}
|
||||
OtherModelMock2: {
|
||||
dto: any
|
||||
singular: "OtherModelMock2"
|
||||
plural: "OtherModelMock2s"
|
||||
}
|
||||
}
|
||||
>(
|
||||
mainModelMock,
|
||||
[
|
||||
{
|
||||
model: otherModelMock1,
|
||||
plural: "otherModelMock1s",
|
||||
singular: "otherModelMock1",
|
||||
},
|
||||
{
|
||||
model: otherModelMock2,
|
||||
plural: "otherModelMock2s",
|
||||
singular: "otherModelMock2",
|
||||
},
|
||||
]
|
||||
// Add more parameters as needed
|
||||
)
|
||||
|
||||
describe("Main Model Methods", () => {
|
||||
let instance
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
instance = new abstractModuleService(containerMock)
|
||||
})
|
||||
|
||||
it("should have retrieve method", async () => {
|
||||
const result = await instance.retrieve("1")
|
||||
expect(result).toEqual({ id: "1", name: "Item" })
|
||||
expect(containerMock.mainModelMockService.retrieve).toHaveBeenCalledWith(
|
||||
"1",
|
||||
undefined,
|
||||
defaultContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should have list method", async () => {
|
||||
const result = await instance.list()
|
||||
expect(result).toEqual([{ id: "1", name: "Item" }])
|
||||
expect(containerMock.mainModelMockService.list).toHaveBeenCalledWith(
|
||||
{},
|
||||
{},
|
||||
defaultContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should have delete method", async () => {
|
||||
await instance.delete("1")
|
||||
expect(containerMock.mainModelMockService.delete).toHaveBeenCalledWith(
|
||||
["1"],
|
||||
defaultTransactionContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should have softDelete method", async () => {
|
||||
const result = await instance.softDelete("1")
|
||||
expect(result).toEqual({})
|
||||
expect(
|
||||
containerMock.mainModelMockService.softDelete
|
||||
).toHaveBeenCalledWith(["1"], defaultTransactionContext)
|
||||
})
|
||||
|
||||
it("should have restore method", async () => {
|
||||
const result = await instance.restore("1")
|
||||
expect(result).toEqual({})
|
||||
expect(containerMock.mainModelMockService.restore).toHaveBeenCalledWith(
|
||||
["1"],
|
||||
defaultTransactionContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should have delete method with selector", async () => {
|
||||
await instance.delete({ selector: { id: "1" } })
|
||||
expect(containerMock.mainModelMockService.delete).toHaveBeenCalledWith(
|
||||
[{ selector: { id: "1" } }],
|
||||
defaultTransactionContext
|
||||
)
|
||||
})
|
||||
})
|
||||
|
||||
describe("Other Models Methods", () => {
|
||||
let instance
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
instance = new abstractModuleService(containerMock)
|
||||
})
|
||||
|
||||
it("should have retrieve method for other models", async () => {
|
||||
const result = await instance.retrieveOtherModelMock1("1")
|
||||
expect(result).toEqual({ id: "1", name: "Item" })
|
||||
expect(
|
||||
containerMock.otherModelMock1Service.retrieve
|
||||
).toHaveBeenCalledWith("1", undefined, defaultContext)
|
||||
})
|
||||
|
||||
it("should have list method for other models", async () => {
|
||||
const result = await instance.listOtherModelMock1s()
|
||||
expect(result).toEqual([{ id: "1", name: "Item" }])
|
||||
expect(containerMock.otherModelMock1Service.list).toHaveBeenCalledWith(
|
||||
{},
|
||||
{},
|
||||
defaultContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should have delete method for other models", async () => {
|
||||
await instance.deleteOtherModelMock1s("1")
|
||||
expect(containerMock.otherModelMock1Service.delete).toHaveBeenCalledWith(
|
||||
["1"],
|
||||
defaultTransactionContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should have softDelete method for other models", async () => {
|
||||
const result = await instance.softDeleteOtherModelMock1s("1")
|
||||
expect(result).toEqual({})
|
||||
expect(
|
||||
containerMock.otherModelMock1Service.softDelete
|
||||
).toHaveBeenCalledWith(["1"], defaultTransactionContext)
|
||||
})
|
||||
|
||||
it("should have restore method for other models", async () => {
|
||||
const result = await instance.restoreOtherModelMock1s("1")
|
||||
expect(result).toEqual({})
|
||||
expect(containerMock.otherModelMock1Service.restore).toHaveBeenCalledWith(
|
||||
["1"],
|
||||
defaultTransactionContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should have delete method for other models with selector", async () => {
|
||||
await instance.deleteOtherModelMock1s({ selector: { id: "1" } })
|
||||
expect(containerMock.otherModelMock1Service.delete).toHaveBeenCalledWith(
|
||||
[{ selector: { id: "1" } }],
|
||||
defaultTransactionContext
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,290 @@
|
||||
import { internalModuleServiceFactory } from "../internal-module-service-factory"
|
||||
import { lowerCaseFirst } from "../../common"
|
||||
|
||||
const defaultContext = { __type: "MedusaContext" }
|
||||
|
||||
class Model {
|
||||
static __meta = {
|
||||
primaryKeys: ["id"],
|
||||
}
|
||||
}
|
||||
|
||||
describe("Internal Module Service Factory", () => {
|
||||
const modelRepositoryName = `${lowerCaseFirst(Model.name)}Repository`
|
||||
|
||||
const containerMock = {
|
||||
[modelRepositoryName]: {
|
||||
transaction: (task) => task(),
|
||||
getFreshManager: jest.fn().mockReturnThis(),
|
||||
find: jest.fn(),
|
||||
findAndCount: jest.fn(),
|
||||
create: jest.fn(),
|
||||
update: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
softDelete: jest.fn(),
|
||||
restore: jest.fn(),
|
||||
upsert: jest.fn(),
|
||||
},
|
||||
[`composite${Model.name}Repository`]: {
|
||||
transaction: (task) => task(),
|
||||
getFreshManager: jest.fn().mockReturnThis(),
|
||||
find: jest.fn(),
|
||||
findAndCount: jest.fn(),
|
||||
create: jest.fn(),
|
||||
update: jest.fn(),
|
||||
delete: jest.fn(),
|
||||
softDelete: jest.fn(),
|
||||
restore: jest.fn(),
|
||||
upsert: jest.fn(),
|
||||
},
|
||||
}
|
||||
|
||||
const internalModuleService = internalModuleServiceFactory<any>(Model)
|
||||
|
||||
describe("Internal Module Service Methods", () => {
|
||||
let instance
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
instance = new internalModuleService(containerMock)
|
||||
})
|
||||
|
||||
it("should throw model id undefined error on retrieve if id is not defined", async () => {
|
||||
const err = await instance.retrieve().catch((e) => e)
|
||||
expect(err.message).toBe("model - id must be defined")
|
||||
})
|
||||
|
||||
it("should throw an error on retrieve if composite key values are not defined", async () => {
|
||||
class CompositeModel {
|
||||
id: string
|
||||
name: string
|
||||
|
||||
static meta = { primaryKeys: ["id", "name"] }
|
||||
}
|
||||
|
||||
const compositeInternalModuleService =
|
||||
internalModuleServiceFactory<any>(CompositeModel)
|
||||
|
||||
const instance = new compositeInternalModuleService(containerMock)
|
||||
|
||||
const err = await instance.retrieve().catch((e) => e)
|
||||
expect(err.message).toBe("compositeModel - id, name must be defined")
|
||||
})
|
||||
|
||||
it("should throw NOT_FOUND error on retrieve if entity not found", async () => {
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce([])
|
||||
|
||||
const err = await instance.retrieve("1").catch((e) => e)
|
||||
expect(err.message).toBe("Model with id: 1 was not found")
|
||||
})
|
||||
|
||||
it("should retrieve entity successfully", async () => {
|
||||
const entity = { id: "1", name: "Item" }
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce([entity])
|
||||
|
||||
const result = await instance.retrieve("1")
|
||||
expect(result).toEqual(entity)
|
||||
})
|
||||
|
||||
it("should retrieve entity successfully with composite key", async () => {
|
||||
class CompositeModel {
|
||||
id: string
|
||||
name: string
|
||||
|
||||
static meta = { primaryKeys: ["id", "name"] }
|
||||
}
|
||||
|
||||
const compositeInternalModuleService =
|
||||
internalModuleServiceFactory<any>(CompositeModel)
|
||||
|
||||
const instance = new compositeInternalModuleService(containerMock)
|
||||
|
||||
const entity = { id: "1", name: "Item" }
|
||||
containerMock[
|
||||
`${lowerCaseFirst(CompositeModel.name)}Repository`
|
||||
].find.mockResolvedValueOnce([entity])
|
||||
|
||||
const result = await instance.retrieve({ id: "1", name: "Item" })
|
||||
expect(result).toEqual(entity)
|
||||
})
|
||||
|
||||
it("should list entities successfully", async () => {
|
||||
const entities = [
|
||||
{ id: "1", name: "Item" },
|
||||
{ id: "2", name: "Item2" },
|
||||
]
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce(entities)
|
||||
|
||||
const result = await instance.list()
|
||||
expect(result).toEqual(entities)
|
||||
})
|
||||
|
||||
it("should list entities and relation with default ordering successfully", async () => {
|
||||
const entities = [
|
||||
{ id: "1", name: "Item" },
|
||||
{ id: "2", name: "Item2" },
|
||||
]
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce(entities)
|
||||
|
||||
const result = await instance.list(
|
||||
{},
|
||||
{
|
||||
relations: ["relation"],
|
||||
}
|
||||
)
|
||||
|
||||
expect(result).toEqual(entities)
|
||||
expect(containerMock[modelRepositoryName].find).toHaveBeenCalledWith(
|
||||
expect.objectContaining({
|
||||
options: expect.objectContaining({
|
||||
populate: ["relation"],
|
||||
orderBy: {
|
||||
id: "ASC",
|
||||
},
|
||||
}),
|
||||
}),
|
||||
expect.any(Object)
|
||||
)
|
||||
})
|
||||
|
||||
it("should list and count entities successfully", async () => {
|
||||
const entities = [
|
||||
{ id: "1", name: "Item" },
|
||||
{ id: "2", name: "Item2" },
|
||||
]
|
||||
const count = entities.length
|
||||
containerMock[modelRepositoryName].findAndCount.mockResolvedValueOnce([
|
||||
entities,
|
||||
count,
|
||||
])
|
||||
|
||||
const result = await instance.listAndCount()
|
||||
expect(result).toEqual([entities, count])
|
||||
})
|
||||
|
||||
it("should create entity successfully", async () => {
|
||||
const entity = { id: "1", name: "Item" }
|
||||
|
||||
containerMock[modelRepositoryName].find.mockReturnValue([entity])
|
||||
|
||||
containerMock[modelRepositoryName].create.mockImplementation(
|
||||
async (entity) => entity
|
||||
)
|
||||
|
||||
const result = await instance.create(entity)
|
||||
expect(result).toEqual(entity)
|
||||
})
|
||||
|
||||
it("should create entities successfully", async () => {
|
||||
const entities = [
|
||||
{ id: "1", name: "Item" },
|
||||
{ id: "2", name: "Item2" },
|
||||
]
|
||||
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce([entities])
|
||||
|
||||
containerMock[modelRepositoryName].create.mockResolvedValueOnce(entities)
|
||||
|
||||
const result = await instance.create(entities)
|
||||
expect(result).toEqual(entities)
|
||||
})
|
||||
|
||||
it("should update entity successfully", async () => {
|
||||
const updateData = { id: "1", name: "UpdatedItem" }
|
||||
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce([
|
||||
updateData,
|
||||
])
|
||||
|
||||
containerMock[modelRepositoryName].update.mockResolvedValueOnce([
|
||||
updateData,
|
||||
])
|
||||
|
||||
const result = await instance.update(updateData)
|
||||
expect(result).toEqual([updateData])
|
||||
})
|
||||
|
||||
it("should update entities successfully", async () => {
|
||||
const updateData = { id: "1", name: "UpdatedItem" }
|
||||
const entitiesToUpdate = [{ id: "1", name: "Item" }]
|
||||
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce(
|
||||
entitiesToUpdate
|
||||
)
|
||||
|
||||
containerMock[modelRepositoryName].update.mockResolvedValueOnce([
|
||||
{ entity: entitiesToUpdate[0], update: updateData },
|
||||
])
|
||||
|
||||
const result = await instance.update({ selector: {}, data: updateData })
|
||||
expect(result).toEqual([
|
||||
{ entity: entitiesToUpdate[0], update: updateData },
|
||||
])
|
||||
})
|
||||
|
||||
it("should delete entity successfully", async () => {
|
||||
await instance.delete("1")
|
||||
expect(containerMock[modelRepositoryName].delete).toHaveBeenCalledWith(
|
||||
{
|
||||
$or: [
|
||||
{
|
||||
id: "1",
|
||||
},
|
||||
],
|
||||
},
|
||||
defaultContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should delete entities successfully", async () => {
|
||||
const entitiesToDelete = [{ id: "1", name: "Item" }]
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce(
|
||||
entitiesToDelete
|
||||
)
|
||||
|
||||
await instance.delete({ selector: {} })
|
||||
expect(containerMock[modelRepositoryName].delete).toHaveBeenCalledWith(
|
||||
{
|
||||
$or: [
|
||||
{
|
||||
id: "1",
|
||||
},
|
||||
],
|
||||
},
|
||||
defaultContext
|
||||
)
|
||||
})
|
||||
|
||||
it("should prevent from deleting all entities", async () => {
|
||||
const entitiesToDelete = [{ id: "1", name: "Item" }]
|
||||
containerMock[modelRepositoryName].find.mockResolvedValueOnce(
|
||||
entitiesToDelete
|
||||
)
|
||||
|
||||
await instance.delete([])
|
||||
expect(containerMock[modelRepositoryName].delete).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it("should soft delete entity successfully", async () => {
|
||||
await instance.softDelete("1")
|
||||
expect(
|
||||
containerMock[modelRepositoryName].softDelete
|
||||
).toHaveBeenCalledWith("1", defaultContext)
|
||||
})
|
||||
|
||||
it("should prevent from soft deleting all data", async () => {
|
||||
await instance.softDelete([])
|
||||
expect(
|
||||
containerMock[modelRepositoryName].softDelete
|
||||
).not.toHaveBeenCalled()
|
||||
})
|
||||
|
||||
it("should restore entity successfully", async () => {
|
||||
await instance.restore("1")
|
||||
expect(containerMock[modelRepositoryName].restore).toHaveBeenCalledWith(
|
||||
"1",
|
||||
defaultContext
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,150 @@
|
||||
import { loadDatabaseConfig } from "../load-module-database-config"
|
||||
|
||||
describe("loadDatabaseConfig", function () {
|
||||
afterEach(() => {
|
||||
delete process.env.POSTGRES_URL
|
||||
delete process.env.MEDUSA_POSTGRES_URL
|
||||
delete process.env.PRODUCT_POSTGRES_URL
|
||||
})
|
||||
|
||||
it("should return the local configuration using the environment variable respecting their precedence", function () {
|
||||
process.env.MEDUSA_POSTGRES_URL = "postgres://localhost:5432/medusa"
|
||||
process.env.PRODUCT_POSTGRES_URL = "postgres://localhost:5432/product"
|
||||
process.env.POSTGRES_URL = "postgres://localhost:5432/share_db"
|
||||
|
||||
let config = loadDatabaseConfig("product")
|
||||
|
||||
expect(config).toEqual({
|
||||
clientUrl: process.env.PRODUCT_POSTGRES_URL,
|
||||
driverOptions: {
|
||||
connection: {
|
||||
ssl: false,
|
||||
},
|
||||
},
|
||||
debug: false,
|
||||
schema: "",
|
||||
})
|
||||
|
||||
delete process.env.PRODUCT_POSTGRES_URL
|
||||
config = loadDatabaseConfig("product")
|
||||
|
||||
expect(config).toEqual({
|
||||
clientUrl: process.env.MEDUSA_POSTGRES_URL,
|
||||
driverOptions: {
|
||||
connection: {
|
||||
ssl: false,
|
||||
},
|
||||
},
|
||||
debug: false,
|
||||
schema: "",
|
||||
})
|
||||
|
||||
delete process.env.MEDUSA_POSTGRES_URL
|
||||
config = loadDatabaseConfig("product")
|
||||
|
||||
expect(config).toEqual({
|
||||
clientUrl: process.env.POSTGRES_URL,
|
||||
driverOptions: {
|
||||
connection: {
|
||||
ssl: false,
|
||||
},
|
||||
},
|
||||
debug: false,
|
||||
schema: "",
|
||||
})
|
||||
})
|
||||
|
||||
it("should return the remote configuration using the environment variable", function () {
|
||||
process.env.POSTGRES_URL = "postgres://https://test.com:5432/medusa"
|
||||
let config = loadDatabaseConfig("product")
|
||||
|
||||
expect(config).toEqual({
|
||||
clientUrl: process.env.POSTGRES_URL,
|
||||
driverOptions: {
|
||||
connection: {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
debug: false,
|
||||
schema: "",
|
||||
})
|
||||
|
||||
delete process.env.POSTGRES_URL
|
||||
process.env.PRODUCT_POSTGRES_URL = "postgres://https://test.com:5432/medusa"
|
||||
config = loadDatabaseConfig("product")
|
||||
|
||||
expect(config).toEqual({
|
||||
clientUrl: process.env.PRODUCT_POSTGRES_URL,
|
||||
driverOptions: {
|
||||
connection: {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
debug: false,
|
||||
schema: "",
|
||||
})
|
||||
})
|
||||
|
||||
it("should return the local configuration using the options", function () {
|
||||
process.env.POSTGRES_URL = "postgres://localhost:5432/medusa"
|
||||
const options = {
|
||||
database: {
|
||||
clientUrl: "postgres://localhost:5432/medusa-test",
|
||||
},
|
||||
}
|
||||
|
||||
let config = loadDatabaseConfig("product", options)
|
||||
|
||||
expect(config).toEqual({
|
||||
clientUrl: options.database.clientUrl,
|
||||
driverOptions: {
|
||||
connection: {
|
||||
ssl: false,
|
||||
},
|
||||
},
|
||||
debug: false,
|
||||
schema: "",
|
||||
})
|
||||
})
|
||||
|
||||
it("should return the remote configuration using the options", function () {
|
||||
process.env.POSTGRES_URL = "postgres://localhost:5432/medusa"
|
||||
const options = {
|
||||
database: {
|
||||
clientUrl: "postgres://https://test.com:5432/medusa-test",
|
||||
},
|
||||
}
|
||||
|
||||
let config = loadDatabaseConfig("product", options)
|
||||
|
||||
expect(config).toEqual({
|
||||
clientUrl: options.database.clientUrl,
|
||||
driverOptions: {
|
||||
connection: {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
debug: false,
|
||||
schema: "",
|
||||
})
|
||||
})
|
||||
|
||||
it("should throw if no clientUrl is provided", function () {
|
||||
let error
|
||||
try {
|
||||
loadDatabaseConfig("product")
|
||||
} catch (e) {
|
||||
error = e
|
||||
}
|
||||
|
||||
expect(error.message).toEqual(
|
||||
"No database clientUrl provided. Please provide the clientUrl through the [MODULE]_POSTGRES_URL, MEDUSA_POSTGRES_URL or POSTGRES_URL environment variable or the options object in the initialize function."
|
||||
)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,636 @@
|
||||
/**
|
||||
* Utility factory and interfaces for module service public facing API
|
||||
*/
|
||||
import {
|
||||
Constructor,
|
||||
Context,
|
||||
FindConfig,
|
||||
IEventBusModuleService,
|
||||
Pluralize,
|
||||
RepositoryService,
|
||||
RestoreReturn,
|
||||
SoftDeleteReturn,
|
||||
} from "@medusajs/types"
|
||||
import {
|
||||
isString,
|
||||
kebabCase,
|
||||
lowerCaseFirst,
|
||||
mapObjectTo,
|
||||
MapToConfig,
|
||||
pluralize,
|
||||
upperCaseFirst,
|
||||
} from "../common"
|
||||
import {
|
||||
InjectManager,
|
||||
InjectTransactionManager,
|
||||
MedusaContext,
|
||||
} from "./decorators"
|
||||
|
||||
type BaseMethods =
|
||||
| "retrieve"
|
||||
| "list"
|
||||
| "listAndCount"
|
||||
| "delete"
|
||||
| "softDelete"
|
||||
| "restore"
|
||||
| "create"
|
||||
| "update"
|
||||
|
||||
const readMethods = ["retrieve", "list", "listAndCount"] as BaseMethods[]
|
||||
const writeMethods = [
|
||||
"delete",
|
||||
"softDelete",
|
||||
"restore",
|
||||
"create",
|
||||
"update",
|
||||
] as BaseMethods[]
|
||||
|
||||
const methods: BaseMethods[] = [...readMethods, ...writeMethods]
|
||||
|
||||
type ModelDTOConfig = {
|
||||
dto: object
|
||||
create?: object
|
||||
update?: object
|
||||
}
|
||||
type ModelDTOConfigRecord = Record<any, ModelDTOConfig>
|
||||
type ModelNamingConfig = {
|
||||
singular?: string
|
||||
plural?: string
|
||||
}
|
||||
|
||||
type ModelsConfigTemplate = {
|
||||
[ModelName: string]: ModelDTOConfig & ModelNamingConfig
|
||||
}
|
||||
|
||||
type ExtractSingularName<
|
||||
T extends Record<any, any>,
|
||||
K = keyof T
|
||||
> = T[K] extends { singular?: string } ? T[K]["singular"] : K
|
||||
|
||||
type CreateMethodName<
|
||||
TModelDTOConfig extends ModelDTOConfigRecord,
|
||||
TModelName = keyof TModelDTOConfig
|
||||
> = TModelDTOConfig[TModelName] extends { create?: object }
|
||||
? `create${ExtractPluralName<TModelDTOConfig, TModelName>}`
|
||||
: never
|
||||
|
||||
type UpdateMethodName<
|
||||
TModelDTOConfig extends ModelDTOConfigRecord,
|
||||
TModelName = keyof TModelDTOConfig
|
||||
> = TModelDTOConfig[TModelName] extends { update?: object }
|
||||
? `update${ExtractPluralName<TModelDTOConfig, TModelName>}`
|
||||
: never
|
||||
|
||||
type ExtractPluralName<T extends Record<any, any>, K = keyof T> = T[K] extends {
|
||||
plural?: string
|
||||
}
|
||||
? T[K]["plural"]
|
||||
: Pluralize<K & string>
|
||||
|
||||
type ModelConfiguration =
|
||||
| Constructor<any>
|
||||
| { singular?: string; plural?: string; model: Constructor<any> }
|
||||
|
||||
export interface AbstractModuleServiceBase<TContainer, TMainModelDTO> {
|
||||
get __container__(): TContainer
|
||||
|
||||
retrieve(
|
||||
id: string,
|
||||
config?: FindConfig<any>,
|
||||
sharedContext?: Context
|
||||
): Promise<TMainModelDTO>
|
||||
|
||||
list(
|
||||
filters?: any,
|
||||
config?: FindConfig<any>,
|
||||
sharedContext?: Context
|
||||
): Promise<TMainModelDTO[]>
|
||||
|
||||
listAndCount(
|
||||
filters?: any,
|
||||
config?: FindConfig<any>,
|
||||
sharedContext?: Context
|
||||
): Promise<[TMainModelDTO[], number]>
|
||||
|
||||
delete(
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
sharedContext?: Context
|
||||
): Promise<void>
|
||||
|
||||
softDelete<TReturnableLinkableKeys extends string>(
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
config?: SoftDeleteReturn<TReturnableLinkableKeys>,
|
||||
sharedContext?: Context
|
||||
): Promise<Record<string, string[]> | void>
|
||||
|
||||
restore<TReturnableLinkableKeys extends string>(
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
config?: RestoreReturn<TReturnableLinkableKeys>,
|
||||
sharedContext?: Context
|
||||
): Promise<Record<string, string[]> | void>
|
||||
}
|
||||
|
||||
export type AbstractModuleService<
|
||||
TContainer,
|
||||
TMainModelDTO,
|
||||
TModelDTOConfig extends ModelsConfigTemplate
|
||||
> = AbstractModuleServiceBase<TContainer, TMainModelDTO> & {
|
||||
[TModelName in keyof TModelDTOConfig as `retrieve${ExtractSingularName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
> &
|
||||
string}`]: (
|
||||
id: string,
|
||||
config?: FindConfig<any>,
|
||||
sharedContext?: Context
|
||||
) => Promise<TModelDTOConfig[TModelName & string]["dto"]>
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as `list${ExtractPluralName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
> &
|
||||
string}`]: (
|
||||
filters?: any,
|
||||
config?: FindConfig<any>,
|
||||
sharedContext?: Context
|
||||
) => Promise<TModelDTOConfig[TModelName & string]["dto"][]>
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as `listAndCount${ExtractPluralName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
> &
|
||||
string}`]: {
|
||||
(filters?: any, config?: FindConfig<any>, sharedContext?: Context): Promise<
|
||||
[TModelDTOConfig[TModelName & string]["dto"][], number]
|
||||
>
|
||||
}
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as `delete${ExtractPluralName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
> &
|
||||
string}`]: {
|
||||
(
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
sharedContext?: Context
|
||||
): Promise<void>
|
||||
}
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as `softDelete${ExtractPluralName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
> &
|
||||
string}`]: {
|
||||
<TReturnableLinkableKeys extends string>(
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
config?: SoftDeleteReturn<TReturnableLinkableKeys>,
|
||||
sharedContext?: Context
|
||||
): Promise<Record<string, string[]> | void>
|
||||
}
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as `restore${ExtractPluralName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
> &
|
||||
string}`]: {
|
||||
<TReturnableLinkableKeys extends string>(
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
config?: RestoreReturn<TReturnableLinkableKeys>,
|
||||
sharedContext?: Context
|
||||
): Promise<Record<string, string[]> | void>
|
||||
}
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as CreateMethodName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
>]: {
|
||||
(
|
||||
data: TModelDTOConfig[TModelName]["create"][],
|
||||
sharedContext?: Context
|
||||
): Promise<TModelDTOConfig[TModelName]["dto"][]>
|
||||
}
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as CreateMethodName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
>]: {
|
||||
(
|
||||
data: TModelDTOConfig[TModelName]["create"],
|
||||
sharedContext?: Context
|
||||
): Promise<TModelDTOConfig[TModelName]["dto"]>
|
||||
}
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as UpdateMethodName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
>]: {
|
||||
(
|
||||
data: TModelDTOConfig[TModelName]["update"][],
|
||||
sharedContext?: Context
|
||||
): Promise<TModelDTOConfig[TModelName]["dto"][]>
|
||||
}
|
||||
} & {
|
||||
[TModelName in keyof TModelDTOConfig as UpdateMethodName<
|
||||
TModelDTOConfig,
|
||||
TModelName
|
||||
>]: {
|
||||
(
|
||||
data: TModelDTOConfig[TModelName]["update"],
|
||||
sharedContext?: Context
|
||||
): Promise<TModelDTOConfig[TModelName]["dto"]>
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory function for creating an abstract module service
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* const otherModels = new Set([
|
||||
* Currency,
|
||||
* Price,
|
||||
* PriceList,
|
||||
* PriceListRule,
|
||||
* PriceListRuleValue,
|
||||
* PriceRule,
|
||||
* PriceSetRuleType,
|
||||
* RuleType,
|
||||
* ])
|
||||
*
|
||||
* const AbstractModuleService = ModulesSdkUtils.abstractModuleServiceFactory<
|
||||
* InjectedDependencies,
|
||||
* PricingTypes.PriceSetDTO,
|
||||
* // The configuration of each entity also accept singular/plural properties, if not provided then it is using english pluralization
|
||||
* {
|
||||
* Currency: { dto: PricingTypes.CurrencyDTO }
|
||||
* Price: { dto: PricingTypes.PriceDTO }
|
||||
* PriceRule: { dto: PricingTypes.PriceRuleDTO }
|
||||
* RuleType: { dto: PricingTypes.RuleTypeDTO }
|
||||
* PriceList: { dto: PricingTypes.PriceListDTO }
|
||||
* PriceListRule: { dto: PricingTypes.PriceListRuleDTO }
|
||||
* }
|
||||
* >(PriceSet, [...otherModels], entityNameToLinkableKeysMap)
|
||||
*
|
||||
* @param mainModel
|
||||
* @param otherModels
|
||||
* @param entityNameToLinkableKeysMap
|
||||
*/
|
||||
export function abstractModuleServiceFactory<
|
||||
TContainer,
|
||||
TMainModelDTO,
|
||||
ModelsConfig extends ModelsConfigTemplate
|
||||
>(
|
||||
mainModel: Constructor<any>,
|
||||
otherModels: ModelConfiguration[],
|
||||
entityNameToLinkableKeysMap: MapToConfig = {}
|
||||
): {
|
||||
new (container: TContainer): AbstractModuleService<
|
||||
TContainer,
|
||||
TMainModelDTO,
|
||||
ModelsConfig
|
||||
>
|
||||
} {
|
||||
const buildMethodNamesFromModel = (
|
||||
model: ModelConfiguration,
|
||||
suffixed: boolean = true
|
||||
): Record<string, string> => {
|
||||
return methods.reduce((acc, method) => {
|
||||
let modelName: string = ""
|
||||
|
||||
if (method === "retrieve") {
|
||||
modelName =
|
||||
"singular" in model && model.singular
|
||||
? model.singular
|
||||
: (model as Constructor<any>).name
|
||||
} else {
|
||||
modelName =
|
||||
"plural" in model && model.plural
|
||||
? model.plural
|
||||
: pluralize((model as Constructor<any>).name)
|
||||
}
|
||||
|
||||
const methodName = suffixed
|
||||
? `${method}${upperCaseFirst(modelName)}`
|
||||
: method
|
||||
|
||||
return { ...acc, [method]: methodName }
|
||||
}, {})
|
||||
}
|
||||
|
||||
const buildAndAssignMethodImpl = function (
|
||||
klassPrototype: any,
|
||||
method: string,
|
||||
methodName: string,
|
||||
model: Constructor<any>
|
||||
): void {
|
||||
const serviceRegistrationName = `${lowerCaseFirst(model.name)}Service`
|
||||
|
||||
const applyMethod = function (impl: Function, contextIndex) {
|
||||
klassPrototype[methodName] = impl
|
||||
|
||||
const descriptorMockRef = {
|
||||
value: klassPrototype[methodName],
|
||||
}
|
||||
|
||||
MedusaContext()(klassPrototype, methodName, contextIndex)
|
||||
|
||||
const ManagerDecorator = readMethods.includes(method as BaseMethods)
|
||||
? InjectManager
|
||||
: InjectTransactionManager
|
||||
|
||||
ManagerDecorator("baseRepository_")(
|
||||
klassPrototype,
|
||||
methodName,
|
||||
descriptorMockRef
|
||||
)
|
||||
|
||||
klassPrototype[methodName] = descriptorMockRef.value
|
||||
}
|
||||
|
||||
let methodImplementation: any = function () {
|
||||
void 0
|
||||
}
|
||||
|
||||
switch (method) {
|
||||
case "retrieve":
|
||||
methodImplementation = async function <T extends object>(
|
||||
this: AbstractModuleService_,
|
||||
id: string,
|
||||
config?: FindConfig<any>,
|
||||
sharedContext: Context = {}
|
||||
): Promise<T> {
|
||||
const entities = await this.__container__[
|
||||
serviceRegistrationName
|
||||
].retrieve(id, config, sharedContext)
|
||||
|
||||
return await this.baseRepository_.serialize<T>(entities, {
|
||||
populate: true,
|
||||
})
|
||||
}
|
||||
|
||||
applyMethod(methodImplementation, 2)
|
||||
|
||||
break
|
||||
case "create":
|
||||
methodImplementation = async function <T extends object>(
|
||||
this: AbstractModuleService_,
|
||||
data = [],
|
||||
sharedContext: Context = {}
|
||||
): Promise<T | T[]> {
|
||||
const serviceData = Array.isArray(data) ? data : [data]
|
||||
const service = this.__container__[serviceRegistrationName]
|
||||
const entities = await service.create(serviceData, sharedContext)
|
||||
const response = Array.isArray(data) ? entities : entities[0]
|
||||
|
||||
return await this.baseRepository_.serialize<T | T[]>(response, {
|
||||
populate: true,
|
||||
})
|
||||
}
|
||||
|
||||
applyMethod(methodImplementation, 1)
|
||||
|
||||
break
|
||||
case "update":
|
||||
methodImplementation = async function <T extends object>(
|
||||
this: AbstractModuleService_,
|
||||
data = [],
|
||||
sharedContext: Context = {}
|
||||
): Promise<T | T[]> {
|
||||
const serviceData = Array.isArray(data) ? data : [data]
|
||||
const service = this.__container__[serviceRegistrationName]
|
||||
const entities = await service.update(serviceData, sharedContext)
|
||||
const response = Array.isArray(data) ? entities : entities[0]
|
||||
|
||||
return await this.baseRepository_.serialize<T | T[]>(response, {
|
||||
populate: true,
|
||||
})
|
||||
}
|
||||
|
||||
applyMethod(methodImplementation, 1)
|
||||
|
||||
break
|
||||
case "list":
|
||||
methodImplementation = async function <T extends object>(
|
||||
this: AbstractModuleService_,
|
||||
filters = {},
|
||||
config: FindConfig<any> = {},
|
||||
sharedContext: Context = {}
|
||||
): Promise<T[]> {
|
||||
const service = this.__container__[serviceRegistrationName]
|
||||
const entities = await service.list(filters, config, sharedContext)
|
||||
|
||||
return await this.baseRepository_.serialize<T[]>(entities, {
|
||||
populate: true,
|
||||
})
|
||||
}
|
||||
|
||||
applyMethod(methodImplementation, 2)
|
||||
|
||||
break
|
||||
case "listAndCount":
|
||||
methodImplementation = async function <T extends object>(
|
||||
this: AbstractModuleService_,
|
||||
filters = {},
|
||||
config: FindConfig<any> = {},
|
||||
sharedContext: Context = {}
|
||||
): Promise<T[]> {
|
||||
const [entities, count] = await this.__container__[
|
||||
serviceRegistrationName
|
||||
].listAndCount(filters, config, sharedContext)
|
||||
|
||||
return [
|
||||
await this.baseRepository_.serialize<T[]>(entities, {
|
||||
populate: true,
|
||||
}),
|
||||
count,
|
||||
]
|
||||
}
|
||||
|
||||
applyMethod(methodImplementation, 2)
|
||||
|
||||
break
|
||||
case "delete":
|
||||
methodImplementation = async function (
|
||||
this: AbstractModuleService_,
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
sharedContext: Context = {}
|
||||
): Promise<void> {
|
||||
const primaryKeyValues_ = Array.isArray(primaryKeyValues)
|
||||
? primaryKeyValues
|
||||
: [primaryKeyValues]
|
||||
await this.__container__[serviceRegistrationName].delete(
|
||||
primaryKeyValues_,
|
||||
sharedContext
|
||||
)
|
||||
|
||||
await this.eventBusModuleService_?.emit(
|
||||
primaryKeyValues_.map((primaryKeyValue) => ({
|
||||
eventName: `${kebabCase(model.name)}.deleted`,
|
||||
data: isString(primaryKeyValue)
|
||||
? { id: primaryKeyValue }
|
||||
: primaryKeyValue,
|
||||
}))
|
||||
)
|
||||
}
|
||||
|
||||
applyMethod(methodImplementation, 1)
|
||||
|
||||
break
|
||||
case "softDelete":
|
||||
methodImplementation = async function <T extends { id: string }>(
|
||||
this: AbstractModuleService_,
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
config: SoftDeleteReturn<string> = {},
|
||||
sharedContext: Context = {}
|
||||
): Promise<Record<string, string[]> | void> {
|
||||
const primaryKeyValues_ = Array.isArray(primaryKeyValues)
|
||||
? primaryKeyValues
|
||||
: [primaryKeyValues]
|
||||
|
||||
const [entities, cascadedEntitiesMap] = await this.__container__[
|
||||
serviceRegistrationName
|
||||
].softDelete(primaryKeyValues_, sharedContext)
|
||||
|
||||
const softDeletedEntities = await this.baseRepository_.serialize<T[]>(
|
||||
entities,
|
||||
{
|
||||
populate: true,
|
||||
}
|
||||
)
|
||||
|
||||
await this.eventBusModuleService_?.emit(
|
||||
softDeletedEntities.map(({ id }) => ({
|
||||
eventName: `${kebabCase(model.name)}.deleted`,
|
||||
data: { id },
|
||||
}))
|
||||
)
|
||||
|
||||
// Map internal table/column names to their respective external linkable keys
|
||||
// eg: product.id = product_id, variant.id = variant_id
|
||||
const mappedCascadedEntitiesMap = mapObjectTo(
|
||||
cascadedEntitiesMap,
|
||||
entityNameToLinkableKeysMap,
|
||||
{
|
||||
pick: config.returnLinkableKeys,
|
||||
}
|
||||
)
|
||||
|
||||
return mappedCascadedEntitiesMap ? mappedCascadedEntitiesMap : void 0
|
||||
}
|
||||
|
||||
applyMethod(methodImplementation, 2)
|
||||
|
||||
break
|
||||
case "restore":
|
||||
methodImplementation = async function <T extends object>(
|
||||
this: AbstractModuleService_,
|
||||
primaryKeyValues: string | object | string[] | object[],
|
||||
config: RestoreReturn<string> = {},
|
||||
sharedContext: Context = {}
|
||||
): Promise<Record<string, string[]> | void> {
|
||||
const primaryKeyValues_ = Array.isArray(primaryKeyValues)
|
||||
? primaryKeyValues
|
||||
: [primaryKeyValues]
|
||||
|
||||
const [_, cascadedEntitiesMap] = await this.__container__[
|
||||
serviceRegistrationName
|
||||
].restore(primaryKeyValues_, sharedContext)
|
||||
|
||||
let mappedCascadedEntitiesMap
|
||||
// Map internal table/column names to their respective external linkable keys
|
||||
// eg: product.id = product_id, variant.id = variant_id
|
||||
mappedCascadedEntitiesMap = mapObjectTo(
|
||||
cascadedEntitiesMap,
|
||||
entityNameToLinkableKeysMap,
|
||||
{
|
||||
pick: config.returnLinkableKeys,
|
||||
}
|
||||
)
|
||||
|
||||
return mappedCascadedEntitiesMap ? mappedCascadedEntitiesMap : void 0
|
||||
}
|
||||
|
||||
applyMethod(methodImplementation, 2)
|
||||
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
class AbstractModuleService_ {
|
||||
readonly __container__: Record<string, any>
|
||||
readonly baseRepository_: RepositoryService
|
||||
readonly eventBusModuleService_: IEventBusModuleService;
|
||||
|
||||
[key: string]: any
|
||||
|
||||
constructor(container: Record<string, any>) {
|
||||
this.__container__ = container
|
||||
this.baseRepository_ = container.baseRepository
|
||||
|
||||
const hasEventBusModuleService = Object.keys(this.__container__).find(
|
||||
// TODO: Should use ModuleRegistrationName.EVENT_BUS but it would require to move it to the utils package to prevent circular dependencies
|
||||
(key) => key === "eventBusModuleService"
|
||||
)
|
||||
const hasEventBusService = Object.keys(this.__container__).find(
|
||||
(key) => key === "eventBusService"
|
||||
)
|
||||
|
||||
this.eventBusModuleService_ = hasEventBusService
|
||||
? this.__container__.eventBusService
|
||||
: hasEventBusModuleService
|
||||
? this.__container__.eventBusModuleService
|
||||
: undefined
|
||||
}
|
||||
|
||||
protected async emitEvents_(groupedEvents) {
|
||||
if (!this.eventBusModuleService_ || !groupedEvents) {
|
||||
return
|
||||
}
|
||||
|
||||
const promises: Promise<void>[] = []
|
||||
for (const group of Object.keys(groupedEvents)) {
|
||||
promises.push(this.eventBusModuleService_.emit(groupedEvents[group]))
|
||||
}
|
||||
|
||||
await Promise.all(promises)
|
||||
}
|
||||
}
|
||||
|
||||
const mainModelMethods = buildMethodNamesFromModel(mainModel, false)
|
||||
|
||||
/**
|
||||
* Build the main retrieve/list/listAndCount/delete/softDelete/restore methods for the main model
|
||||
*/
|
||||
|
||||
for (let [method, methodName] of Object.entries(mainModelMethods)) {
|
||||
buildAndAssignMethodImpl(
|
||||
AbstractModuleService_.prototype,
|
||||
method,
|
||||
methodName,
|
||||
mainModel
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* Build the retrieve/list/listAndCount/delete/softDelete/restore methods for all the other models
|
||||
*/
|
||||
|
||||
const otherModelsMethods: [ModelConfiguration, Record<string, string>][] =
|
||||
otherModels.map((model) => [model, buildMethodNamesFromModel(model)])
|
||||
|
||||
for (let [model, modelsMethods] of otherModelsMethods) {
|
||||
Object.entries(modelsMethods).forEach(([method, methodName]) => {
|
||||
model = "model" in model ? model.model : model
|
||||
buildAndAssignMethodImpl(
|
||||
AbstractModuleService_.prototype,
|
||||
method,
|
||||
methodName,
|
||||
model
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
return AbstractModuleService_ as unknown as new (
|
||||
container: TContainer
|
||||
) => AbstractModuleService<TContainer, TMainModelDTO, ModelsConfig>
|
||||
}
|
||||
105
packages/core/utils/src/modules-sdk/build-query.ts
Normal file
105
packages/core/utils/src/modules-sdk/build-query.ts
Normal file
@@ -0,0 +1,105 @@
|
||||
import { DAL, FindConfig } from "@medusajs/types"
|
||||
import { deduplicate, isDefined, isObject } from "../common"
|
||||
|
||||
import { SoftDeletableFilterKey } from "../dal"
|
||||
|
||||
// Following convention here is fine, we can make it configurable if needed.
|
||||
const DELETED_AT_FIELD_NAME = "deleted_at"
|
||||
|
||||
type FilterFlags = {
|
||||
withDeleted?: boolean
|
||||
}
|
||||
|
||||
export function buildQuery<T = any, TDto = any>(
|
||||
filters: Record<string, any> = {},
|
||||
config: FindConfig<TDto> & { primaryKeyFields?: string | string[] } = {}
|
||||
): DAL.FindOptions<T> {
|
||||
const where: DAL.FilterQuery<T> = {}
|
||||
const filterFlags: FilterFlags = {}
|
||||
buildWhere(filters, where, filterFlags)
|
||||
|
||||
const primaryKeyFieldArray = isDefined(config.primaryKeyFields)
|
||||
? !Array.isArray(config.primaryKeyFields)
|
||||
? [config.primaryKeyFields]
|
||||
: config.primaryKeyFields
|
||||
: ["id"]
|
||||
|
||||
const whereHasPrimaryKeyFields = primaryKeyFieldArray.some(
|
||||
(pkField) => !!where[pkField]
|
||||
)
|
||||
|
||||
const defaultLimit = whereHasPrimaryKeyFields ? undefined : 15
|
||||
|
||||
delete config.primaryKeyFields
|
||||
|
||||
const findOptions: DAL.OptionsQuery<T, any> = {
|
||||
populate: deduplicate(config.relations ?? []),
|
||||
fields: config.select as string[],
|
||||
limit:
|
||||
(Number.isSafeInteger(config.take) && config.take! >= 0) ||
|
||||
null === config.take
|
||||
? config.take ?? undefined
|
||||
: defaultLimit,
|
||||
offset:
|
||||
(Number.isSafeInteger(config.skip) && config.skip! >= 0) ||
|
||||
null === config.skip
|
||||
? config.skip ?? undefined
|
||||
: 0,
|
||||
}
|
||||
|
||||
if (config.order) {
|
||||
findOptions.orderBy = config.order as DAL.OptionsQuery<T>["orderBy"]
|
||||
}
|
||||
|
||||
if (config.withDeleted || filterFlags.withDeleted) {
|
||||
findOptions.filters ??= {}
|
||||
findOptions.filters[SoftDeletableFilterKey] = {
|
||||
withDeleted: true,
|
||||
}
|
||||
}
|
||||
|
||||
if (config.filters) {
|
||||
findOptions.filters ??= {}
|
||||
|
||||
for (const [key, value] of Object.entries(config.filters)) {
|
||||
findOptions.filters[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
return { where, options: findOptions }
|
||||
}
|
||||
|
||||
function buildWhere(
|
||||
filters: Record<string, any> = {},
|
||||
where = {},
|
||||
flags: FilterFlags = {}
|
||||
) {
|
||||
for (let [prop, value] of Object.entries(filters)) {
|
||||
if (prop === DELETED_AT_FIELD_NAME) {
|
||||
flags.withDeleted = true
|
||||
}
|
||||
|
||||
if (["$or", "$and"].includes(prop)) {
|
||||
where[prop] = value.map((val) => {
|
||||
const deepWhere = {}
|
||||
buildWhere(val, deepWhere, flags)
|
||||
return deepWhere
|
||||
})
|
||||
continue
|
||||
}
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
value = deduplicate(value)
|
||||
where[prop] = ["$in", "$nin"].includes(prop) ? value : { $in: value }
|
||||
continue
|
||||
}
|
||||
|
||||
if (isObject(value)) {
|
||||
where[prop] = {}
|
||||
buildWhere(value, where[prop], flags)
|
||||
continue
|
||||
}
|
||||
|
||||
where[prop] = value
|
||||
}
|
||||
}
|
||||
34
packages/core/utils/src/modules-sdk/create-pg-connection.ts
Normal file
34
packages/core/utils/src/modules-sdk/create-pg-connection.ts
Normal file
@@ -0,0 +1,34 @@
|
||||
import { knex } from "@mikro-orm/knex"
|
||||
import { ModuleServiceInitializeOptions } from "@medusajs/types"
|
||||
import { isDefined } from "../common"
|
||||
|
||||
type Options = ModuleServiceInitializeOptions["database"]
|
||||
|
||||
/**
|
||||
* Create a new knex (pg in the future) connection which can be reused and shared
|
||||
* @param options
|
||||
*/
|
||||
export function createPgConnection(options: Options) {
|
||||
const { pool, schema = "public", clientUrl, driverOptions } = options
|
||||
const ssl =
|
||||
options.driverOptions?.ssl ??
|
||||
options.driverOptions?.connection?.ssl ??
|
||||
false
|
||||
|
||||
return knex<any, any>({
|
||||
client: "pg",
|
||||
searchPath: schema,
|
||||
connection: {
|
||||
connectionString: clientUrl,
|
||||
ssl,
|
||||
idle_in_transaction_session_timeout:
|
||||
(driverOptions?.idle_in_transaction_session_timeout as number) ??
|
||||
undefined, // prevent null to be passed
|
||||
},
|
||||
pool: {
|
||||
// https://knexjs.org/guide/#pool
|
||||
...(pool ?? {}),
|
||||
min: (pool?.min as number) ?? 0,
|
||||
},
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
export function MedusaContext() {
|
||||
return function (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
parameterIndex: number
|
||||
) {
|
||||
target.MedusaContextIndex_ ??= {}
|
||||
target.MedusaContextIndex_[propertyKey] = parameterIndex
|
||||
}
|
||||
}
|
||||
|
||||
MedusaContext.getIndex = function (
|
||||
target: any,
|
||||
propertyKey: string
|
||||
): number | undefined {
|
||||
return target.MedusaContextIndex_?.[propertyKey]
|
||||
}
|
||||
|
||||
export const MedusaContextType = "MedusaContext"
|
||||
@@ -0,0 +1,27 @@
|
||||
import { MessageAggregator } from "../../event-bus"
|
||||
import { InjectIntoContext } from "./inject-into-context"
|
||||
import {MessageAggregatorFormat} from "@medusajs/types";
|
||||
|
||||
export function EmitEvents(options: MessageAggregatorFormat = {} as MessageAggregatorFormat) {
|
||||
return function (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: any
|
||||
): void {
|
||||
const aggregator = new MessageAggregator()
|
||||
InjectIntoContext({
|
||||
messageAggregator: () => aggregator,
|
||||
})(target, propertyKey, descriptor)
|
||||
|
||||
const original = descriptor.value
|
||||
|
||||
descriptor.value = async function (...args: any[]) {
|
||||
const result = await original.apply(this, args)
|
||||
|
||||
await target.emitEvents_.apply(this, [aggregator.getMessages(options)])
|
||||
|
||||
aggregator.clearMessages()
|
||||
return result
|
||||
}
|
||||
}
|
||||
}
|
||||
6
packages/core/utils/src/modules-sdk/decorators/index.ts
Normal file
6
packages/core/utils/src/modules-sdk/decorators/index.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
export * from "./context-parameter"
|
||||
export * from "./inject-manager"
|
||||
export * from "./inject-shared-context"
|
||||
export * from "./inject-transaction-manager"
|
||||
export * from "./inject-into-context"
|
||||
export * from "./emit-events"
|
||||
@@ -0,0 +1,32 @@
|
||||
import { MessageAggregator } from "../../event-bus"
|
||||
|
||||
export function InjectIntoContext(
|
||||
properties: Record<string, unknown | Function>
|
||||
): MethodDecorator {
|
||||
return function (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: any
|
||||
): void {
|
||||
if (!target.MedusaContextIndex_) {
|
||||
throw new Error(
|
||||
`To apply @InjectIntoContext you have to flag a parameter using @MedusaContext`
|
||||
)
|
||||
}
|
||||
|
||||
const argIndex = target.MedusaContextIndex_[propertyKey]
|
||||
const original = descriptor.value
|
||||
descriptor.value = async function (...args: any[]) {
|
||||
for (const key of Object.keys(properties)) {
|
||||
args[argIndex] = args[argIndex] ?? {}
|
||||
args[argIndex][key] =
|
||||
args[argIndex][key] ??
|
||||
(typeof properties[key] === "function"
|
||||
? (properties[key] as Function).apply(this, args)
|
||||
: properties[key])
|
||||
}
|
||||
|
||||
return await original.apply(this, args)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,55 @@
|
||||
import { Context } from "@medusajs/types"
|
||||
import { MedusaContextType } from "./context-parameter"
|
||||
|
||||
export function InjectManager(managerProperty?: string): MethodDecorator {
|
||||
return function (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: any
|
||||
): void {
|
||||
if (!target.MedusaContextIndex_) {
|
||||
throw new Error(
|
||||
`To apply @InjectManager you have to flag a parameter using @MedusaContext`
|
||||
)
|
||||
}
|
||||
|
||||
const originalMethod = descriptor.value
|
||||
const argIndex = target.MedusaContextIndex_[propertyKey]
|
||||
|
||||
descriptor.value = function (...args: any[]) {
|
||||
const originalContext = args[argIndex] ?? {}
|
||||
const copiedContext = {} as Context
|
||||
for (const key in originalContext) {
|
||||
if (key === "manager" || key === "transactionManager") {
|
||||
continue
|
||||
}
|
||||
|
||||
Object.defineProperty(copiedContext, key, {
|
||||
get: function () {
|
||||
return originalContext[key]
|
||||
},
|
||||
set: function (value) {
|
||||
originalContext[key] = value
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
const resourceWithManager = !managerProperty
|
||||
? this
|
||||
: this[managerProperty]
|
||||
|
||||
copiedContext.manager =
|
||||
originalContext.manager ?? resourceWithManager.getFreshManager()
|
||||
|
||||
if (originalContext?.transactionManager) {
|
||||
copiedContext.transactionManager = originalContext?.transactionManager
|
||||
}
|
||||
|
||||
copiedContext.__type = MedusaContextType
|
||||
|
||||
args[argIndex] = copiedContext
|
||||
|
||||
return originalMethod.apply(this, args)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,28 @@
|
||||
import { Context, SharedContext } from "@medusajs/types"
|
||||
import { MedusaContextType } from "./context-parameter"
|
||||
|
||||
export function InjectSharedContext(): MethodDecorator {
|
||||
return function (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: any
|
||||
): void {
|
||||
if (!target.MedusaContextIndex_) {
|
||||
throw new Error(
|
||||
`To apply @InjectSharedContext you have to flag a parameter using @MedusaContext`
|
||||
)
|
||||
}
|
||||
|
||||
const originalMethod = descriptor.value
|
||||
const argIndex = target.MedusaContextIndex_[propertyKey]
|
||||
|
||||
descriptor.value = function (...args: any[]) {
|
||||
const context: SharedContext | Context = {
|
||||
...(args[argIndex] ?? { __type: MedusaContextType }),
|
||||
}
|
||||
args[argIndex] = context
|
||||
|
||||
return originalMethod.apply(this, args)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,86 @@
|
||||
import { Context } from "@medusajs/types"
|
||||
import { isString } from "../../common"
|
||||
import { MedusaContextType } from "./context-parameter"
|
||||
|
||||
export function InjectTransactionManager(
|
||||
shouldForceTransactionOrManagerProperty:
|
||||
| string
|
||||
| ((target: any) => boolean) = () => false,
|
||||
managerProperty?: string
|
||||
): MethodDecorator {
|
||||
return function (
|
||||
target: any,
|
||||
propertyKey: string | symbol,
|
||||
descriptor: any
|
||||
): void {
|
||||
if (!target.MedusaContextIndex_) {
|
||||
throw new Error(
|
||||
`An error occured applying decorator '@InjectTransactionManager' to method ${String(
|
||||
propertyKey
|
||||
)}: Missing parameter with flag @MedusaContext`
|
||||
)
|
||||
}
|
||||
|
||||
const originalMethod = descriptor.value
|
||||
const shouldForceTransaction = !isString(
|
||||
shouldForceTransactionOrManagerProperty
|
||||
)
|
||||
? shouldForceTransactionOrManagerProperty
|
||||
: () => false
|
||||
managerProperty = isString(shouldForceTransactionOrManagerProperty)
|
||||
? shouldForceTransactionOrManagerProperty
|
||||
: managerProperty
|
||||
|
||||
const argIndex = target.MedusaContextIndex_[propertyKey]
|
||||
descriptor.value = async function (...args: any[]) {
|
||||
const shouldForceTransactionRes = shouldForceTransaction(target)
|
||||
const context: Context = args[argIndex] ?? {}
|
||||
const originalContext = args[argIndex] ?? {}
|
||||
|
||||
if (!shouldForceTransactionRes && context?.transactionManager) {
|
||||
return await originalMethod.apply(this, args)
|
||||
}
|
||||
|
||||
return await (!managerProperty
|
||||
? this
|
||||
: this[managerProperty]
|
||||
).transaction(
|
||||
async (transactionManager) => {
|
||||
const copiedContext = {} as Context
|
||||
for (const key in originalContext) {
|
||||
if (key === "manager" || key === "transactionManager") {
|
||||
continue
|
||||
}
|
||||
|
||||
Object.defineProperty(copiedContext, key, {
|
||||
get: function () {
|
||||
return originalContext[key]
|
||||
},
|
||||
set: function (value) {
|
||||
originalContext[key] = value
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
copiedContext.transactionManager = transactionManager
|
||||
|
||||
if (originalContext?.manager) {
|
||||
copiedContext.manager = originalContext?.manager
|
||||
}
|
||||
|
||||
copiedContext.__type = MedusaContextType
|
||||
|
||||
args[argIndex] = copiedContext
|
||||
|
||||
return await originalMethod.apply(this, args)
|
||||
},
|
||||
{
|
||||
transaction: context?.transactionManager,
|
||||
isolationLevel: (context as Context)?.isolationLevel,
|
||||
enableNestedTransactions:
|
||||
(context as Context).enableNestedTransactions ?? false,
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
25
packages/core/utils/src/modules-sdk/definition.ts
Normal file
25
packages/core/utils/src/modules-sdk/definition.ts
Normal file
@@ -0,0 +1,25 @@
|
||||
export enum Modules {
|
||||
AUTH = "auth",
|
||||
CACHE = "cacheService",
|
||||
CART = "cart",
|
||||
CUSTOMER = "customer",
|
||||
EVENT_BUS = "eventBus",
|
||||
INVENTORY = "inventoryService",
|
||||
LINK = "linkModules",
|
||||
PAYMENT = "payment",
|
||||
PRICING = "pricingService",
|
||||
PRODUCT = "productService",
|
||||
PROMOTION = "promotion",
|
||||
SALES_CHANNEL = "salesChannel",
|
||||
TAX = "tax",
|
||||
FULFILLMENT = "fulfillment",
|
||||
STOCK_LOCATION = "stockLocationService",
|
||||
USER = "user",
|
||||
WORKFLOW_ENGINE = "workflows",
|
||||
REGION = "region",
|
||||
ORDER = "order",
|
||||
API_KEY = "apiKey",
|
||||
STORE = "store",
|
||||
CURRENCY = "currency",
|
||||
FILE = "file",
|
||||
}
|
||||
11
packages/core/utils/src/modules-sdk/index.ts
Normal file
11
packages/core/utils/src/modules-sdk/index.ts
Normal file
@@ -0,0 +1,11 @@
|
||||
export * from "./load-module-database-config"
|
||||
export * from "./decorators"
|
||||
export * from "./build-query"
|
||||
export * from "./loaders/mikro-orm-connection-loader"
|
||||
export * from "./loaders/mikro-orm-connection-loader-factory"
|
||||
export * from "./loaders/container-loader-factory"
|
||||
export * from "./create-pg-connection"
|
||||
export * from "./migration-scripts"
|
||||
export * from "./internal-module-service-factory"
|
||||
export * from "./abstract-module-service-factory"
|
||||
export * from "./definition"
|
||||
@@ -0,0 +1,515 @@
|
||||
import {
|
||||
BaseFilterable,
|
||||
Context,
|
||||
FilterQuery,
|
||||
FilterQuery as InternalFilterQuery,
|
||||
FindConfig,
|
||||
ModulesSdkTypes,
|
||||
UpsertWithReplaceConfig,
|
||||
} from "@medusajs/types"
|
||||
import { EntitySchema } from "@mikro-orm/core"
|
||||
import { EntityClass } from "@mikro-orm/core/typings"
|
||||
import {
|
||||
doNotForceTransaction,
|
||||
isDefined,
|
||||
isObject,
|
||||
isPresent,
|
||||
isString,
|
||||
lowerCaseFirst,
|
||||
MedusaError,
|
||||
shouldForceTransaction,
|
||||
} from "../common"
|
||||
import { buildQuery } from "./build-query"
|
||||
import {
|
||||
InjectManager,
|
||||
InjectTransactionManager,
|
||||
MedusaContext,
|
||||
} from "./decorators"
|
||||
import { FreeTextSearchFilterKey } from "../dal"
|
||||
|
||||
type SelectorAndData = {
|
||||
selector: FilterQuery<any> | BaseFilterable<FilterQuery<any>>
|
||||
data: any
|
||||
}
|
||||
|
||||
export function internalModuleServiceFactory<
|
||||
TContainer extends object = object
|
||||
>(
|
||||
model: any
|
||||
): {
|
||||
new <TEntity extends object = any>(
|
||||
container: TContainer
|
||||
): ModulesSdkTypes.InternalModuleService<TEntity, TContainer>
|
||||
} {
|
||||
const injectedRepositoryName = `${lowerCaseFirst(model.name)}Repository`
|
||||
const propertyRepositoryName = `__${injectedRepositoryName}__`
|
||||
|
||||
class AbstractService_<TEntity extends object>
|
||||
implements ModulesSdkTypes.InternalModuleService<TEntity, TContainer>
|
||||
{
|
||||
readonly __container__: TContainer;
|
||||
[key: string]: any
|
||||
|
||||
constructor(container: TContainer) {
|
||||
this.__container__ = container
|
||||
this[propertyRepositoryName] = container[injectedRepositoryName]
|
||||
}
|
||||
|
||||
static applyFreeTextSearchFilter(
|
||||
filters: FilterQuery,
|
||||
config: FindConfig<any>
|
||||
): void {
|
||||
if (isDefined(filters?.q)) {
|
||||
config.filters ??= {}
|
||||
config.filters[FreeTextSearchFilterKey] = {
|
||||
value: filters.q,
|
||||
fromEntity: model.name,
|
||||
}
|
||||
delete filters.q
|
||||
}
|
||||
}
|
||||
|
||||
static retrievePrimaryKeys(entity: EntityClass<any> | EntitySchema<any>) {
|
||||
return (
|
||||
(entity as EntitySchema<any>).meta?.primaryKeys ??
|
||||
(entity as EntityClass<any>).prototype.__meta?.primaryKeys ?? ["id"]
|
||||
)
|
||||
}
|
||||
|
||||
static buildUniqueCompositeKeyValue(keys: string[], data: object) {
|
||||
return keys.map((k) => data[k]).join(":")
|
||||
}
|
||||
|
||||
/**
|
||||
* Only apply top level default ordering as the relation
|
||||
* default ordering is already applied through the foreign key
|
||||
* @param config
|
||||
*/
|
||||
static applyDefaultOrdering(config: FindConfig<any>) {
|
||||
if (isPresent(config.order)) {
|
||||
return
|
||||
}
|
||||
|
||||
config.order = {}
|
||||
|
||||
const primaryKeys = AbstractService_.retrievePrimaryKeys(model)
|
||||
primaryKeys.forEach((primaryKey) => {
|
||||
config.order![primaryKey] = "ASC"
|
||||
})
|
||||
}
|
||||
|
||||
@InjectManager(propertyRepositoryName)
|
||||
async retrieve(
|
||||
idOrObject: string | object,
|
||||
config: FindConfig<TEntity> = {},
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<TEntity> {
|
||||
const primaryKeys = AbstractService_.retrievePrimaryKeys(model)
|
||||
|
||||
if (
|
||||
!isDefined(idOrObject) ||
|
||||
(isString(idOrObject) && primaryKeys.length > 1) ||
|
||||
((!isString(idOrObject) ||
|
||||
(isObject(idOrObject) && !idOrObject[primaryKeys[0]])) &&
|
||||
primaryKeys.length === 1)
|
||||
) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.NOT_FOUND,
|
||||
`${
|
||||
primaryKeys.length === 1
|
||||
? `${lowerCaseFirst(model.name) + " - " + primaryKeys[0]}`
|
||||
: `${lowerCaseFirst(model.name)} - ${primaryKeys.join(", ")}`
|
||||
} must be defined`
|
||||
)
|
||||
}
|
||||
|
||||
let primaryKeysCriteria = {}
|
||||
if (primaryKeys.length === 1) {
|
||||
primaryKeysCriteria[primaryKeys[0]] = idOrObject
|
||||
} else {
|
||||
const idOrObject_ = Array.isArray(idOrObject)
|
||||
? idOrObject
|
||||
: [idOrObject]
|
||||
primaryKeysCriteria = idOrObject_.map((primaryKeyValue) => ({
|
||||
$and: primaryKeys.map((key) => ({ [key]: primaryKeyValue[key] })),
|
||||
}))
|
||||
}
|
||||
|
||||
const queryOptions = buildQuery(primaryKeysCriteria, config)
|
||||
|
||||
const entities = await this[propertyRepositoryName].find(
|
||||
queryOptions,
|
||||
sharedContext
|
||||
)
|
||||
|
||||
if (!entities?.length) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.NOT_FOUND,
|
||||
`${model.name} with ${primaryKeys.join(", ")}: ${
|
||||
Array.isArray(idOrObject)
|
||||
? idOrObject.map((v) =>
|
||||
[isString(v) ? v : Object.values(v)].join(", ")
|
||||
)
|
||||
: idOrObject
|
||||
} was not found`
|
||||
)
|
||||
}
|
||||
|
||||
return entities[0]
|
||||
}
|
||||
|
||||
@InjectManager(propertyRepositoryName)
|
||||
async list(
|
||||
filters: FilterQuery<any> | BaseFilterable<FilterQuery<any>> = {},
|
||||
config: FindConfig<any> = {},
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<TEntity[]> {
|
||||
AbstractService_.applyDefaultOrdering(config)
|
||||
AbstractService_.applyFreeTextSearchFilter(filters, config)
|
||||
|
||||
const queryOptions = buildQuery(filters, config)
|
||||
|
||||
return await this[propertyRepositoryName].find(
|
||||
queryOptions,
|
||||
sharedContext
|
||||
)
|
||||
}
|
||||
|
||||
@InjectManager(propertyRepositoryName)
|
||||
async listAndCount(
|
||||
filters: FilterQuery<any> | BaseFilterable<FilterQuery<any>> = {},
|
||||
config: FindConfig<any> = {},
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<[TEntity[], number]> {
|
||||
AbstractService_.applyDefaultOrdering(config)
|
||||
AbstractService_.applyFreeTextSearchFilter(filters, config)
|
||||
|
||||
const queryOptions = buildQuery(filters, config)
|
||||
|
||||
return await this[propertyRepositoryName].findAndCount(
|
||||
queryOptions,
|
||||
sharedContext
|
||||
)
|
||||
}
|
||||
|
||||
create(data: any, sharedContext?: Context): Promise<TEntity>
|
||||
create(data: any[], sharedContext?: Context): Promise<TEntity[]>
|
||||
|
||||
@InjectTransactionManager(shouldForceTransaction, propertyRepositoryName)
|
||||
async create(
|
||||
data: any | any[],
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<TEntity | TEntity[]> {
|
||||
if (!isDefined(data) || (Array.isArray(data) && data.length === 0)) {
|
||||
return (Array.isArray(data) ? [] : void 0) as TEntity | TEntity[]
|
||||
}
|
||||
|
||||
const data_ = Array.isArray(data) ? data : [data]
|
||||
const entities = await this[propertyRepositoryName].create(
|
||||
data_,
|
||||
sharedContext
|
||||
)
|
||||
|
||||
return Array.isArray(data) ? entities : entities[0]
|
||||
}
|
||||
|
||||
update(data: any[], sharedContext?: Context): Promise<TEntity[]>
|
||||
update(data: any, sharedContext?: Context): Promise<TEntity>
|
||||
update(
|
||||
selectorAndData: SelectorAndData,
|
||||
sharedContext?: Context
|
||||
): Promise<TEntity[]>
|
||||
update(
|
||||
selectorAndData: SelectorAndData[],
|
||||
sharedContext?: Context
|
||||
): Promise<TEntity[]>
|
||||
|
||||
@InjectTransactionManager(shouldForceTransaction, propertyRepositoryName)
|
||||
async update(
|
||||
input: any | any[] | SelectorAndData | SelectorAndData[],
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<TEntity | TEntity[]> {
|
||||
if (!isDefined(input) || (Array.isArray(input) && input.length === 0)) {
|
||||
return (Array.isArray(input) ? [] : void 0) as TEntity | TEntity[]
|
||||
}
|
||||
|
||||
const primaryKeys = AbstractService_.retrievePrimaryKeys(model)
|
||||
const inputArray = Array.isArray(input) ? input : [input]
|
||||
|
||||
const toUpdateData: { entity; update }[] = []
|
||||
|
||||
// Only used when we receive data and no selector
|
||||
const keySelectorForDataOnly: any = {
|
||||
$or: [],
|
||||
}
|
||||
const keySelectorDataMap = new Map<string, any>()
|
||||
|
||||
for (const input_ of inputArray) {
|
||||
if (input_.selector) {
|
||||
const entitiesToUpdate = await this.list(
|
||||
input_.selector,
|
||||
{},
|
||||
sharedContext
|
||||
)
|
||||
// Create a pair of entity and data to update
|
||||
entitiesToUpdate.forEach((entity) => {
|
||||
toUpdateData.push({
|
||||
entity,
|
||||
update: input_.data,
|
||||
})
|
||||
})
|
||||
} else {
|
||||
// in case we are manipulating the data, then extract the primary keys as a selector and the rest as the data to update
|
||||
const selector = {}
|
||||
|
||||
primaryKeys.forEach((key) => {
|
||||
selector[key] = input_[key]
|
||||
})
|
||||
|
||||
const uniqueCompositeKey =
|
||||
AbstractService_.buildUniqueCompositeKeyValue(primaryKeys, input_)
|
||||
keySelectorDataMap.set(uniqueCompositeKey, input_)
|
||||
|
||||
keySelectorForDataOnly.$or.push(selector)
|
||||
}
|
||||
}
|
||||
|
||||
if (keySelectorForDataOnly.$or.length) {
|
||||
const entitiesToUpdate = await this.list(
|
||||
keySelectorForDataOnly,
|
||||
{},
|
||||
sharedContext
|
||||
)
|
||||
|
||||
// Create a pair of entity and data to update
|
||||
entitiesToUpdate.forEach((entity) => {
|
||||
const uniqueCompositeKey =
|
||||
AbstractService_.buildUniqueCompositeKeyValue(primaryKeys, entity)
|
||||
toUpdateData.push({
|
||||
entity,
|
||||
update: keySelectorDataMap.get(uniqueCompositeKey)!,
|
||||
})
|
||||
})
|
||||
|
||||
// Only throw for missing entities when we dont have selectors involved as selector by design can return 0 entities
|
||||
if (entitiesToUpdate.length !== keySelectorDataMap.size) {
|
||||
const entityName = (model as EntityClass<TEntity>).name ?? model
|
||||
|
||||
const compositeKeysValuesForFoundEntities = new Set(
|
||||
entitiesToUpdate.map((entity) => {
|
||||
return AbstractService_.buildUniqueCompositeKeyValue(
|
||||
primaryKeys,
|
||||
entity
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
const missingEntityValues: any[] = []
|
||||
|
||||
;[...keySelectorDataMap.keys()].filter((key) => {
|
||||
if (!compositeKeysValuesForFoundEntities.has(key)) {
|
||||
const value = key.replace(/:/gi, " - ")
|
||||
missingEntityValues.push(value)
|
||||
}
|
||||
})
|
||||
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.NOT_FOUND,
|
||||
`${entityName} with ${primaryKeys.join(
|
||||
", "
|
||||
)} "${missingEntityValues.join(", ")}" not found`
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
return await this[propertyRepositoryName].update(
|
||||
toUpdateData,
|
||||
sharedContext
|
||||
)
|
||||
}
|
||||
|
||||
delete(idOrSelector: string, sharedContext?: Context): Promise<void>
|
||||
delete(idOrSelector: string[], sharedContext?: Context): Promise<void>
|
||||
delete(idOrSelector: object, sharedContext?: Context): Promise<void>
|
||||
delete(idOrSelector: object[], sharedContext?: Context): Promise<void>
|
||||
delete(
|
||||
idOrSelector: {
|
||||
selector: FilterQuery<any> | BaseFilterable<FilterQuery<any>>
|
||||
},
|
||||
sharedContext?: Context
|
||||
): Promise<void>
|
||||
|
||||
@InjectTransactionManager(doNotForceTransaction, propertyRepositoryName)
|
||||
async delete(
|
||||
idOrSelector:
|
||||
| string
|
||||
| string[]
|
||||
| object
|
||||
| object[]
|
||||
| {
|
||||
selector: FilterQuery<any> | BaseFilterable<FilterQuery<any>>
|
||||
},
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<void> {
|
||||
if (
|
||||
!isDefined(idOrSelector) ||
|
||||
(Array.isArray(idOrSelector) && !idOrSelector.length)
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
const primaryKeys = AbstractService_.retrievePrimaryKeys(model)
|
||||
|
||||
if (
|
||||
(Array.isArray(idOrSelector) && idOrSelector.length === 0) ||
|
||||
((isString(idOrSelector) ||
|
||||
(Array.isArray(idOrSelector) && isString(idOrSelector[0]))) &&
|
||||
primaryKeys.length > 1)
|
||||
) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.NOT_FOUND,
|
||||
`${
|
||||
primaryKeys.length === 1
|
||||
? `"${lowerCaseFirst(model.name) + " - " + primaryKeys[0]}"`
|
||||
: `${lowerCaseFirst(model.name)} - ${primaryKeys.join(", ")}`
|
||||
} must be defined`
|
||||
)
|
||||
}
|
||||
|
||||
const deleteCriteria: any = {
|
||||
$or: [],
|
||||
}
|
||||
|
||||
if (isObject(idOrSelector) && "selector" in idOrSelector) {
|
||||
const entitiesToDelete = await this.list(
|
||||
idOrSelector.selector as FilterQuery<any>,
|
||||
{
|
||||
select: primaryKeys,
|
||||
},
|
||||
sharedContext
|
||||
)
|
||||
|
||||
for (const entity of entitiesToDelete) {
|
||||
const criteria = {}
|
||||
primaryKeys.forEach((key) => {
|
||||
criteria[key] = entity[key]
|
||||
})
|
||||
deleteCriteria.$or.push(criteria)
|
||||
}
|
||||
} else {
|
||||
const primaryKeysValues = Array.isArray(idOrSelector)
|
||||
? idOrSelector
|
||||
: [idOrSelector]
|
||||
|
||||
deleteCriteria.$or = primaryKeysValues.map((primaryKeyValue) => {
|
||||
const criteria = {}
|
||||
|
||||
if (isObject(primaryKeyValue)) {
|
||||
Object.entries(primaryKeyValue).forEach(([key, value]) => {
|
||||
criteria[key] = value
|
||||
})
|
||||
} else {
|
||||
criteria[primaryKeys[0]] = primaryKeyValue
|
||||
}
|
||||
|
||||
// TODO: Revisit
|
||||
/*primaryKeys.forEach((key) => {
|
||||
/!*if (
|
||||
isObject(primaryKeyValue) &&
|
||||
!isDefined(primaryKeyValue[key]) &&
|
||||
// primaryKeys.length > 1
|
||||
) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.INVALID_DATA,
|
||||
`Composite key must contain all primary key fields: ${primaryKeys.join(
|
||||
", "
|
||||
)}. Found: ${Object.keys(primaryKeyValue)}`
|
||||
)
|
||||
}*!/
|
||||
|
||||
criteria[key] = isObject(primaryKeyValue)
|
||||
? primaryKeyValue[key]
|
||||
: primaryKeyValue
|
||||
})*/
|
||||
return criteria
|
||||
})
|
||||
}
|
||||
|
||||
await this[propertyRepositoryName].delete(deleteCriteria, sharedContext)
|
||||
}
|
||||
|
||||
@InjectTransactionManager(propertyRepositoryName)
|
||||
async softDelete(
|
||||
idsOrFilter: string[] | InternalFilterQuery,
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<[TEntity[], Record<string, unknown[]>]> {
|
||||
if (Array.isArray(idsOrFilter) && !idsOrFilter.length) {
|
||||
return [[], {}]
|
||||
}
|
||||
|
||||
return await this[propertyRepositoryName].softDelete(
|
||||
idsOrFilter,
|
||||
sharedContext
|
||||
)
|
||||
}
|
||||
|
||||
@InjectTransactionManager(propertyRepositoryName)
|
||||
async restore(
|
||||
idsOrFilter: string[] | InternalFilterQuery,
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<[TEntity[], Record<string, unknown[]>]> {
|
||||
return await this[propertyRepositoryName].restore(
|
||||
idsOrFilter,
|
||||
sharedContext
|
||||
)
|
||||
}
|
||||
|
||||
upsert(data: any[], sharedContext?: Context): Promise<TEntity[]>
|
||||
upsert(data: any, sharedContext?: Context): Promise<TEntity>
|
||||
|
||||
@InjectTransactionManager(propertyRepositoryName)
|
||||
async upsert(
|
||||
data: any | any[],
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<TEntity | TEntity[]> {
|
||||
const data_ = Array.isArray(data) ? data : [data]
|
||||
const entities = await this[propertyRepositoryName].upsert(
|
||||
data_,
|
||||
sharedContext
|
||||
)
|
||||
return Array.isArray(data) ? entities : entities[0]
|
||||
}
|
||||
|
||||
upsertWithReplace(
|
||||
data: any[],
|
||||
config?: UpsertWithReplaceConfig<TEntity>,
|
||||
sharedContext?: Context
|
||||
): Promise<TEntity[]>
|
||||
upsertWithReplace(
|
||||
data: any,
|
||||
config?: UpsertWithReplaceConfig<TEntity>,
|
||||
sharedContext?: Context
|
||||
): Promise<TEntity>
|
||||
|
||||
@InjectTransactionManager(propertyRepositoryName)
|
||||
async upsertWithReplace(
|
||||
data: any | any[],
|
||||
config: UpsertWithReplaceConfig<TEntity> = {
|
||||
relations: [],
|
||||
},
|
||||
@MedusaContext() sharedContext: Context = {}
|
||||
): Promise<TEntity | TEntity[]> {
|
||||
const data_ = Array.isArray(data) ? data : [data]
|
||||
const entities = await this[propertyRepositoryName].upsertWithReplace(
|
||||
data_,
|
||||
config,
|
||||
sharedContext
|
||||
)
|
||||
return Array.isArray(data) ? entities : entities[0]
|
||||
}
|
||||
}
|
||||
|
||||
return AbstractService_ as unknown as new <TEntity extends {}>(
|
||||
container: TContainer
|
||||
) => ModulesSdkTypes.InternalModuleService<TEntity, TContainer>
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
import { ModulesSdkTypes } from "@medusajs/types"
|
||||
import { MedusaError } from "../common"
|
||||
|
||||
function getEnv(key: string, moduleName: string): string {
|
||||
const value =
|
||||
process.env[`${moduleName.toUpperCase()}_${key}`] ??
|
||||
process.env[`MEDUSA_${key}`] ??
|
||||
process.env[`${key}`]
|
||||
return value ?? ""
|
||||
}
|
||||
|
||||
function isModuleServiceInitializeOptions(
|
||||
obj: unknown
|
||||
): obj is ModulesSdkTypes.ModuleServiceInitializeOptions {
|
||||
return !!(obj as any)?.database
|
||||
}
|
||||
|
||||
function getDefaultDriverOptions(clientUrl: string) {
|
||||
const localOptions = {
|
||||
connection: {
|
||||
ssl: false,
|
||||
},
|
||||
}
|
||||
|
||||
const remoteOptions = {
|
||||
connection: {
|
||||
ssl: {
|
||||
rejectUnauthorized: false,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
if (clientUrl) {
|
||||
return clientUrl.match(/localhost/i) ? localOptions : remoteOptions
|
||||
}
|
||||
|
||||
return process.env.NODE_ENV?.match(/prod/i)
|
||||
? remoteOptions
|
||||
: process.env.NODE_ENV?.match(/dev/i)
|
||||
? localOptions
|
||||
: {}
|
||||
}
|
||||
|
||||
function getDatabaseUrl(
|
||||
config: ModulesSdkTypes.ModuleServiceInitializeOptions
|
||||
): string {
|
||||
const { clientUrl, host, port, user, password, database } = config.database!
|
||||
|
||||
if (host) {
|
||||
return `postgres://${user}:${password}@${host}:${port}/${database}`
|
||||
}
|
||||
|
||||
return clientUrl!
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the config for the database connection. The options can be retrieved
|
||||
* e.g through PRODUCT_* (e.g PRODUCT_POSTGRES_URL) or * (e.g POSTGRES_URL) environment variables or the options object.
|
||||
* @param options
|
||||
* @param moduleName
|
||||
*/
|
||||
export function loadDatabaseConfig(
|
||||
moduleName: string,
|
||||
options?: ModulesSdkTypes.ModuleServiceInitializeOptions,
|
||||
silent: boolean = false
|
||||
): Pick<
|
||||
ModulesSdkTypes.ModuleServiceInitializeOptions["database"],
|
||||
"clientUrl" | "schema" | "driverOptions" | "debug"
|
||||
> {
|
||||
const clientUrl =
|
||||
options?.database?.clientUrl ?? getEnv("POSTGRES_URL", moduleName)
|
||||
|
||||
const database = {
|
||||
clientUrl,
|
||||
schema: getEnv("POSTGRES_SCHEMA", moduleName) ?? "public",
|
||||
driverOptions: JSON.parse(
|
||||
getEnv("POSTGRES_DRIVER_OPTIONS", moduleName) ||
|
||||
JSON.stringify(getDefaultDriverOptions(clientUrl))
|
||||
),
|
||||
debug: false,
|
||||
connection: undefined,
|
||||
}
|
||||
|
||||
if (isModuleServiceInitializeOptions(options)) {
|
||||
database.clientUrl = getDatabaseUrl({
|
||||
database: { ...options.database, clientUrl },
|
||||
})
|
||||
database.schema = options.database!.schema ?? database.schema
|
||||
database.driverOptions =
|
||||
options.database!.driverOptions ??
|
||||
getDefaultDriverOptions(database.clientUrl)
|
||||
database.debug = options.database!.debug ?? database.debug
|
||||
database.connection = options.database!.connection
|
||||
}
|
||||
|
||||
if (!database.clientUrl && !silent && !database.connection) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.INVALID_ARGUMENT,
|
||||
"No database clientUrl provided. Please provide the clientUrl through the [MODULE]_POSTGRES_URL, MEDUSA_POSTGRES_URL or POSTGRES_URL environment variable or the options object in the initialize function."
|
||||
)
|
||||
}
|
||||
|
||||
return database
|
||||
}
|
||||
@@ -0,0 +1,174 @@
|
||||
import {
|
||||
Constructor,
|
||||
LoaderOptions,
|
||||
MedusaContainer,
|
||||
ModuleServiceInitializeCustomDataLayerOptions,
|
||||
ModuleServiceInitializeOptions,
|
||||
RepositoryService,
|
||||
} from "@medusajs/types"
|
||||
|
||||
import { asClass } from "awilix"
|
||||
import { internalModuleServiceFactory } from "../internal-module-service-factory"
|
||||
import { lowerCaseFirst } from "../../common"
|
||||
import { mikroOrmBaseRepositoryFactory } from "../../dal"
|
||||
|
||||
type RepositoryLoaderOptions = {
|
||||
moduleModels: Record<string, any>
|
||||
moduleRepositories?: Record<string, any>
|
||||
customRepositories: Record<string, any>
|
||||
container: MedusaContainer
|
||||
}
|
||||
|
||||
type ServiceLoaderOptions = {
|
||||
moduleModels: Record<string, any>
|
||||
moduleServices: Record<string, any>
|
||||
container: MedusaContainer
|
||||
}
|
||||
|
||||
/**
|
||||
* Factory for creating a container loader for a module.
|
||||
*
|
||||
* @param moduleModels
|
||||
* @param moduleServices
|
||||
* @param moduleRepositories
|
||||
* @param customRepositoryLoader The default repository loader is based on mikro orm. If you want to use a custom repository loader, you can pass it here.
|
||||
*/
|
||||
export function moduleContainerLoaderFactory({
|
||||
moduleModels,
|
||||
moduleServices,
|
||||
moduleRepositories = {},
|
||||
customRepositoryLoader = loadModuleRepositories,
|
||||
}: {
|
||||
moduleModels: Record<string, any>
|
||||
moduleServices: Record<string, any>
|
||||
moduleRepositories?: Record<string, any>
|
||||
customRepositoryLoader?: (options: RepositoryLoaderOptions) => void
|
||||
}): ({ container, options }: LoaderOptions) => Promise<void> {
|
||||
return async ({
|
||||
container,
|
||||
options,
|
||||
}: LoaderOptions<
|
||||
| ModuleServiceInitializeOptions
|
||||
| ModuleServiceInitializeCustomDataLayerOptions
|
||||
>) => {
|
||||
const customRepositories = (
|
||||
options as ModuleServiceInitializeCustomDataLayerOptions
|
||||
)?.repositories
|
||||
|
||||
loadModuleServices({
|
||||
moduleModels,
|
||||
moduleServices,
|
||||
container,
|
||||
})
|
||||
|
||||
const repositoryLoader = customRepositoryLoader ?? loadModuleRepositories
|
||||
repositoryLoader({
|
||||
moduleModels,
|
||||
moduleRepositories,
|
||||
customRepositories: customRepositories ?? {},
|
||||
container,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the services from the module services object. If a service is not
|
||||
* present a default service will be created for the model.
|
||||
*
|
||||
* @param moduleModels
|
||||
* @param moduleServices
|
||||
* @param container
|
||||
*/
|
||||
export function loadModuleServices({
|
||||
moduleModels,
|
||||
moduleServices,
|
||||
container,
|
||||
}: ServiceLoaderOptions) {
|
||||
const moduleServicesMap = new Map(
|
||||
Object.entries(moduleServices).map(([key, repository]) => [
|
||||
lowerCaseFirst(key),
|
||||
repository,
|
||||
])
|
||||
)
|
||||
|
||||
// Build default services for all models that are not present in the module services
|
||||
Object.values(moduleModels).forEach((Model) => {
|
||||
const mappedServiceName = lowerCaseFirst(Model.name) + "Service"
|
||||
const finalService = moduleServicesMap.get(mappedServiceName)
|
||||
|
||||
if (!finalService) {
|
||||
moduleServicesMap.set(
|
||||
mappedServiceName,
|
||||
internalModuleServiceFactory(Model)
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
const allServices = [...moduleServicesMap]
|
||||
|
||||
allServices.forEach(([key, service]) => {
|
||||
container.register({
|
||||
[lowerCaseFirst(key)]: asClass(service as Constructor<any>).singleton(),
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Load the repositories from the custom repositories object. If a repository is not
|
||||
* present in the custom repositories object, the default repository will be used from the module repository.
|
||||
* If none are present, a default repository will be created for the model.
|
||||
*
|
||||
* @param moduleModels
|
||||
* @param moduleRepositories
|
||||
* @param customRepositories
|
||||
* @param container
|
||||
*/
|
||||
export function loadModuleRepositories({
|
||||
moduleModels,
|
||||
moduleRepositories = {},
|
||||
customRepositories,
|
||||
container,
|
||||
}: RepositoryLoaderOptions) {
|
||||
const customRepositoriesMap = new Map(
|
||||
Object.entries(customRepositories).map(([key, repository]) => [
|
||||
lowerCaseFirst(key),
|
||||
repository,
|
||||
])
|
||||
)
|
||||
const moduleRepositoriesMap = new Map(
|
||||
Object.entries(moduleRepositories).map(([key, repository]) => [
|
||||
lowerCaseFirst(key),
|
||||
repository,
|
||||
])
|
||||
)
|
||||
|
||||
// Build default repositories for all models that are not present in the custom repositories or module repositories
|
||||
Object.values(moduleModels).forEach((Model) => {
|
||||
const mappedRepositoryName = lowerCaseFirst(Model.name) + "Repository"
|
||||
let finalRepository = customRepositoriesMap.get(mappedRepositoryName)
|
||||
finalRepository ??= moduleRepositoriesMap.get(mappedRepositoryName)
|
||||
|
||||
if (!finalRepository) {
|
||||
moduleRepositoriesMap.set(
|
||||
mappedRepositoryName,
|
||||
mikroOrmBaseRepositoryFactory(Model)
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
const allRepositories = [...customRepositoriesMap, ...moduleRepositoriesMap]
|
||||
|
||||
allRepositories.forEach(([key, repository]) => {
|
||||
let finalRepository = customRepositoriesMap.get(key)
|
||||
|
||||
if (!finalRepository) {
|
||||
finalRepository = repository
|
||||
}
|
||||
|
||||
container.register({
|
||||
[lowerCaseFirst(key)]: asClass(
|
||||
finalRepository as Constructor<RepositoryService>
|
||||
).singleton(),
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,34 @@
|
||||
import { InternalModuleDeclaration, LoaderOptions } from "@medusajs/types"
|
||||
import { mikroOrmConnectionLoader } from "./mikro-orm-connection-loader"
|
||||
|
||||
/**
|
||||
* Factory for creating a MikroORM connection loader for the modules
|
||||
*
|
||||
* @param moduleName
|
||||
* @param moduleModels
|
||||
* @param migrationsPath
|
||||
*/
|
||||
export function mikroOrmConnectionLoaderFactory({
|
||||
moduleName,
|
||||
moduleModels,
|
||||
migrationsPath,
|
||||
}: {
|
||||
moduleName: string
|
||||
moduleModels: any[]
|
||||
migrationsPath?: string
|
||||
}): any {
|
||||
return async (
|
||||
{ options, container, logger }: LoaderOptions,
|
||||
moduleDeclaration?: InternalModuleDeclaration
|
||||
): Promise<void> => {
|
||||
await mikroOrmConnectionLoader({
|
||||
moduleName,
|
||||
entities: moduleModels,
|
||||
container,
|
||||
options,
|
||||
moduleDeclaration,
|
||||
logger,
|
||||
pathToMigrations: migrationsPath ?? "",
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,163 @@
|
||||
import {
|
||||
Logger,
|
||||
MedusaContainer,
|
||||
MODULE_RESOURCE_TYPE,
|
||||
MODULE_SCOPE,
|
||||
ModulesSdkTypes,
|
||||
} from "@medusajs/types"
|
||||
import { PostgreSqlDriver, SqlEntityManager } from "@mikro-orm/postgresql"
|
||||
import { asValue } from "awilix"
|
||||
import { ContainerRegistrationKeys, MedusaError } from "../../common"
|
||||
import {
|
||||
FreeTextSearchFilterKey,
|
||||
mikroOrmCreateConnection,
|
||||
mikroOrmFreeTextSearchFilterOptionsFactory,
|
||||
} from "../../dal"
|
||||
import { loadDatabaseConfig } from "../load-module-database-config"
|
||||
|
||||
/**
|
||||
* Load a MikroORM connection into the container
|
||||
*
|
||||
* @param moduleName
|
||||
* @param container
|
||||
* @param options
|
||||
* @param filters
|
||||
* @param moduleDeclaration
|
||||
* @param entities
|
||||
* @param pathToMigrations
|
||||
*/
|
||||
export async function mikroOrmConnectionLoader({
|
||||
moduleName,
|
||||
container,
|
||||
options,
|
||||
moduleDeclaration,
|
||||
entities,
|
||||
pathToMigrations,
|
||||
}: {
|
||||
moduleName: string
|
||||
entities: any[]
|
||||
container: MedusaContainer
|
||||
options?:
|
||||
| ModulesSdkTypes.ModuleServiceInitializeOptions
|
||||
| ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions
|
||||
moduleDeclaration?: ModulesSdkTypes.InternalModuleDeclaration
|
||||
logger?: Logger
|
||||
pathToMigrations: string
|
||||
}) {
|
||||
const freeTextSearchGlobalFilter =
|
||||
mikroOrmFreeTextSearchFilterOptionsFactory(entities)
|
||||
|
||||
let manager = (
|
||||
options as ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions
|
||||
)?.manager
|
||||
|
||||
// Custom manager provided
|
||||
if (manager) {
|
||||
container.register({
|
||||
manager: asValue(manager),
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
moduleDeclaration?.scope === MODULE_SCOPE.INTERNAL &&
|
||||
moduleDeclaration.resources === MODULE_RESOURCE_TYPE.SHARED
|
||||
) {
|
||||
const shouldSwallowError = true
|
||||
const dbConfig = loadDatabaseConfig(
|
||||
moduleName,
|
||||
(options ?? {}) as ModulesSdkTypes.ModuleServiceInitializeOptions,
|
||||
shouldSwallowError
|
||||
)
|
||||
return await loadShared({
|
||||
database: {
|
||||
...dbConfig,
|
||||
filters: {
|
||||
[FreeTextSearchFilterKey as string]: freeTextSearchGlobalFilter,
|
||||
},
|
||||
},
|
||||
container,
|
||||
entities,
|
||||
pathToMigrations,
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Reuse an existing connection if it is passed in the options
|
||||
*/
|
||||
let dbConfig
|
||||
const shouldSwallowError = !!(
|
||||
options as ModulesSdkTypes.ModuleServiceInitializeOptions
|
||||
)?.database?.connection
|
||||
dbConfig = {
|
||||
...loadDatabaseConfig(
|
||||
moduleName,
|
||||
(options ?? {}) as ModulesSdkTypes.ModuleServiceInitializeOptions,
|
||||
shouldSwallowError
|
||||
),
|
||||
connection: (options as ModulesSdkTypes.ModuleServiceInitializeOptions)
|
||||
?.database?.connection,
|
||||
}
|
||||
|
||||
manager ??= await loadDefault({
|
||||
database: {
|
||||
...dbConfig,
|
||||
filters: {
|
||||
[FreeTextSearchFilterKey as string]: freeTextSearchGlobalFilter,
|
||||
},
|
||||
},
|
||||
entities,
|
||||
pathToMigrations,
|
||||
})
|
||||
|
||||
container.register({
|
||||
manager: asValue(manager),
|
||||
})
|
||||
}
|
||||
|
||||
async function loadDefault({
|
||||
database,
|
||||
entities,
|
||||
pathToMigrations,
|
||||
}): Promise<SqlEntityManager<PostgreSqlDriver>> {
|
||||
if (!database) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.INVALID_ARGUMENT,
|
||||
`Database config is not present at module config "options.database"`
|
||||
)
|
||||
}
|
||||
|
||||
const orm = await mikroOrmCreateConnection(
|
||||
database,
|
||||
entities,
|
||||
pathToMigrations
|
||||
)
|
||||
|
||||
return orm.em.fork()
|
||||
}
|
||||
|
||||
async function loadShared({ database, container, entities, pathToMigrations }) {
|
||||
const sharedConnection = container.resolve(
|
||||
ContainerRegistrationKeys.PG_CONNECTION,
|
||||
{
|
||||
allowUnregistered: true,
|
||||
}
|
||||
)
|
||||
if (!sharedConnection) {
|
||||
throw new Error(
|
||||
"The module is setup to use a shared resources but no shared connection is present."
|
||||
)
|
||||
}
|
||||
|
||||
const manager = await loadDefault({
|
||||
entities,
|
||||
database: {
|
||||
...database,
|
||||
connection: sharedConnection,
|
||||
},
|
||||
pathToMigrations,
|
||||
})
|
||||
container.register({
|
||||
manager: asValue(manager),
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
export * from "./migration-down"
|
||||
export * from "./migration-up"
|
||||
export * from "./seed"
|
||||
@@ -0,0 +1,60 @@
|
||||
import { LoaderOptions, Logger, ModulesSdkTypes } from "@medusajs/types"
|
||||
|
||||
import { EntitySchema } from "@mikro-orm/core"
|
||||
import { upperCaseFirst } from "../../common"
|
||||
import { loadDatabaseConfig } from "../load-module-database-config"
|
||||
import { mikroOrmCreateConnection } from "../../dal"
|
||||
|
||||
/**
|
||||
* Utility function to build a migration script that will revert the migrations.
|
||||
* Only used in mikro orm based modules.
|
||||
* @param moduleName
|
||||
* @param models
|
||||
* @param pathToMigrations
|
||||
*/
|
||||
export function buildRevertMigrationScript({
|
||||
moduleName,
|
||||
models,
|
||||
pathToMigrations,
|
||||
}) {
|
||||
/**
|
||||
* This script is only valid for mikro orm managers. If a user provide a custom manager
|
||||
* he is in charge of reverting the migrations.
|
||||
* @param options
|
||||
* @param logger
|
||||
* @param moduleDeclaration
|
||||
*/
|
||||
return async function ({
|
||||
options,
|
||||
logger,
|
||||
}: Pick<
|
||||
LoaderOptions<ModulesSdkTypes.ModuleServiceInitializeOptions>,
|
||||
"options" | "logger"
|
||||
> = {}) {
|
||||
logger ??= console as unknown as Logger
|
||||
|
||||
const dbData = loadDatabaseConfig(moduleName, options)!
|
||||
const entities = Object.values(models) as unknown as EntitySchema[]
|
||||
|
||||
const orm = await mikroOrmCreateConnection(
|
||||
dbData,
|
||||
entities,
|
||||
pathToMigrations
|
||||
)
|
||||
|
||||
try {
|
||||
const migrator = orm.getMigrator()
|
||||
await migrator.down()
|
||||
|
||||
logger?.info(`${upperCaseFirst(moduleName)} module migration executed`)
|
||||
} catch (error) {
|
||||
logger?.error(
|
||||
`${upperCaseFirst(
|
||||
moduleName
|
||||
)} module migration failed to run - Error: ${error}`
|
||||
)
|
||||
}
|
||||
|
||||
await orm.close()
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,67 @@
|
||||
import { LoaderOptions, Logger, ModulesSdkTypes } from "@medusajs/types"
|
||||
import { EntitySchema } from "@mikro-orm/core"
|
||||
import { upperCaseFirst } from "../../common"
|
||||
import { loadDatabaseConfig } from "../load-module-database-config"
|
||||
import { mikroOrmCreateConnection } from "../../dal"
|
||||
|
||||
/**
|
||||
* Utility function to build a migration script that will run the migrations.
|
||||
* Only used in mikro orm based modules.
|
||||
* @param moduleName
|
||||
* @param models
|
||||
* @param pathToMigrations
|
||||
*/
|
||||
export function buildMigrationScript({ moduleName, models, pathToMigrations }) {
|
||||
/**
|
||||
* This script is only valid for mikro orm managers. If a user provide a custom manager
|
||||
* he is in charge of running the migrations.
|
||||
* @param options
|
||||
* @param logger
|
||||
* @param moduleDeclaration
|
||||
*/
|
||||
return async function ({
|
||||
options,
|
||||
logger,
|
||||
}: Pick<
|
||||
LoaderOptions<ModulesSdkTypes.ModuleServiceInitializeOptions>,
|
||||
"options" | "logger"
|
||||
> = {}) {
|
||||
logger ??= console as unknown as Logger
|
||||
|
||||
const dbData = loadDatabaseConfig(moduleName, options)!
|
||||
const entities = Object.values(models) as unknown as EntitySchema[]
|
||||
|
||||
const orm = await mikroOrmCreateConnection(
|
||||
dbData,
|
||||
entities,
|
||||
pathToMigrations
|
||||
)
|
||||
|
||||
try {
|
||||
const migrator = orm.getMigrator()
|
||||
|
||||
const pendingMigrations = await migrator.getPendingMigrations()
|
||||
logger.info(
|
||||
`Running pending migrations: ${JSON.stringify(
|
||||
pendingMigrations,
|
||||
null,
|
||||
2
|
||||
)}`
|
||||
)
|
||||
|
||||
await migrator.up({
|
||||
migrations: pendingMigrations.map((m) => m.name),
|
||||
})
|
||||
|
||||
logger.info(`${upperCaseFirst(moduleName)} module migration executed`)
|
||||
} catch (error) {
|
||||
logger.error(
|
||||
`${upperCaseFirst(
|
||||
moduleName
|
||||
)} module migration failed to run - Error: ${error}`
|
||||
)
|
||||
}
|
||||
|
||||
await orm.close()
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,73 @@
|
||||
import { LoaderOptions, Logger, ModulesSdkTypes } from "@medusajs/types"
|
||||
import { EntitySchema } from "@mikro-orm/core"
|
||||
import { EOL } from "os"
|
||||
import { resolve } from "path"
|
||||
import { mikroOrmCreateConnection } from "../../dal"
|
||||
import { loadDatabaseConfig } from "../load-module-database-config"
|
||||
|
||||
/**
|
||||
* Utility function to build a seed script that will insert the seed data.
|
||||
* @param moduleName
|
||||
* @param models
|
||||
* @param pathToMigrations
|
||||
* @param seedHandler
|
||||
*/
|
||||
export function buildSeedScript({
|
||||
moduleName,
|
||||
models,
|
||||
pathToMigrations,
|
||||
seedHandler,
|
||||
}: {
|
||||
moduleName: string
|
||||
models: Record<string, unknown>
|
||||
pathToMigrations: string
|
||||
seedHandler: (args: {
|
||||
manager: any
|
||||
logger: Logger
|
||||
data: any
|
||||
}) => Promise<void>
|
||||
}) {
|
||||
return async function ({
|
||||
options,
|
||||
logger,
|
||||
path,
|
||||
}: Partial<
|
||||
Pick<
|
||||
LoaderOptions<ModulesSdkTypes.ModuleServiceInitializeOptions>,
|
||||
"options" | "logger"
|
||||
>
|
||||
> & {
|
||||
path: string
|
||||
}) {
|
||||
const logger_ = (logger ?? console) as unknown as Logger
|
||||
|
||||
logger_.info(`Loading seed data from ${path}...`)
|
||||
const dataSeed = await import(resolve(process.cwd(), path)).catch((e) => {
|
||||
logger_.error(
|
||||
`Failed to load seed data from ${path}. Please, provide a relative path and check that you export the following productCategoriesData, productsData, variantsData.${EOL}${e}`
|
||||
)
|
||||
throw e
|
||||
})
|
||||
|
||||
const dbData = loadDatabaseConfig(moduleName, options)!
|
||||
const entities = Object.values(models) as unknown as EntitySchema[]
|
||||
|
||||
const orm = await mikroOrmCreateConnection(
|
||||
dbData,
|
||||
entities,
|
||||
pathToMigrations
|
||||
)
|
||||
const manager = orm.em.fork()
|
||||
|
||||
try {
|
||||
logger_.info(`Inserting ${moduleName} data...`)
|
||||
seedHandler({ manager, logger: logger_, data: dataSeed })
|
||||
} catch (e) {
|
||||
logger_.error(
|
||||
`Failed to insert the seed data in the PostgreSQL database ${dbData.clientUrl}.${EOL}${e}`
|
||||
)
|
||||
}
|
||||
|
||||
await orm.close(true)
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user