chore(): start moving some packages to the core directory (#7215)
This commit is contained in:
committed by
GitHub
parent
fdee748eed
commit
bbccd6481d
189
packages/core/orchestration/src/__fixtures__/joiner/data.ts
Normal file
189
packages/core/orchestration/src/__fixtures__/joiner/data.ts
Normal file
@@ -0,0 +1,189 @@
|
||||
export const remoteJoinerData = {
|
||||
user: [
|
||||
{
|
||||
id: 1,
|
||||
email: "johndoe@example.com",
|
||||
name: "John Doe",
|
||||
fullname: "John Doe full name",
|
||||
products: [
|
||||
{
|
||||
id: 1,
|
||||
product_id: 102,
|
||||
},
|
||||
],
|
||||
nested: {
|
||||
lala: "lala",
|
||||
multiple: [
|
||||
{
|
||||
abc: 1,
|
||||
},
|
||||
{
|
||||
abc: 2,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
email: "janedoe@example.com",
|
||||
name: "Jane Doe",
|
||||
products: [
|
||||
{
|
||||
id: 2,
|
||||
product_id: [101, 102],
|
||||
},
|
||||
],
|
||||
nested: {
|
||||
lala: "lele",
|
||||
multiple: [
|
||||
{
|
||||
a: 33,
|
||||
},
|
||||
{
|
||||
a: 44,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
email: "aaa@example.com",
|
||||
name: "aaa bbb",
|
||||
fullname: "3333 Doe full name",
|
||||
nested: {
|
||||
lala: "lolo",
|
||||
multiple: [
|
||||
{
|
||||
a: 555,
|
||||
},
|
||||
{
|
||||
a: 555,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
email: "444444@example.com",
|
||||
name: "a4444 44 44",
|
||||
fullname: "444 Doe full name",
|
||||
products: [
|
||||
{
|
||||
id: 4,
|
||||
product_id: 103,
|
||||
},
|
||||
],
|
||||
nested: {
|
||||
lala: "lulu",
|
||||
multiple: [
|
||||
{
|
||||
a: 6666,
|
||||
},
|
||||
{
|
||||
a: 7777,
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
product: {
|
||||
rows: [
|
||||
{
|
||||
id: 101,
|
||||
name: "Product 1",
|
||||
handler: "product-1-handler",
|
||||
user_id: 2,
|
||||
},
|
||||
{
|
||||
id: 102,
|
||||
name: "Product 2",
|
||||
handler: "product-2-handler",
|
||||
user_id: 1,
|
||||
},
|
||||
{
|
||||
id: 103,
|
||||
name: "Product 3",
|
||||
handler: "product-3-handler",
|
||||
user_id: 3,
|
||||
},
|
||||
],
|
||||
limit: 3,
|
||||
skip: 0,
|
||||
},
|
||||
variant: [
|
||||
{
|
||||
id: 991,
|
||||
name: "Product variant 1",
|
||||
product_id: 101,
|
||||
},
|
||||
{
|
||||
id: 992,
|
||||
name: "Product variant 2",
|
||||
product_id: 101,
|
||||
},
|
||||
{
|
||||
id: 993,
|
||||
name: "Product variant 33",
|
||||
product_id: 103,
|
||||
},
|
||||
],
|
||||
order_variant: [
|
||||
{
|
||||
order_id: 201,
|
||||
product_id: 101,
|
||||
variant_id: 991,
|
||||
quantity: 1,
|
||||
},
|
||||
{
|
||||
order_id: 201,
|
||||
product_id: 101,
|
||||
variant_id: 992,
|
||||
quantity: 5,
|
||||
},
|
||||
{
|
||||
order_id: 205,
|
||||
product_id: 101,
|
||||
variant_id: 992,
|
||||
quantity: 4,
|
||||
},
|
||||
{
|
||||
order_id: 205,
|
||||
product_id: 103,
|
||||
variant_id: 993,
|
||||
quantity: 1,
|
||||
},
|
||||
],
|
||||
order: [
|
||||
{
|
||||
id: 201,
|
||||
number: "ORD-001",
|
||||
date: "2023-04-01T12:00:00Z",
|
||||
products: [
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 991,
|
||||
quantity: 1,
|
||||
},
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 992,
|
||||
quantity: 5,
|
||||
},
|
||||
],
|
||||
user_id: 4,
|
||||
},
|
||||
{
|
||||
id: 205,
|
||||
number: "ORD-202",
|
||||
date: "2023-04-01T12:00:00Z",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
variant_id: 993,
|
||||
quantity: 4,
|
||||
},
|
||||
],
|
||||
user_id: 1,
|
||||
},
|
||||
],
|
||||
}
|
||||
149
packages/core/orchestration/src/__mocks__/joiner/mock_data.ts
Normal file
149
packages/core/orchestration/src/__mocks__/joiner/mock_data.ts
Normal file
@@ -0,0 +1,149 @@
|
||||
import { JoinerServiceConfig } from "@medusajs/types"
|
||||
import { remoteJoinerData } from "./../../__fixtures__/joiner/data"
|
||||
|
||||
export const serviceConfigs: JoinerServiceConfig[] = [
|
||||
{
|
||||
serviceName: "user",
|
||||
primaryKeys: ["id"],
|
||||
args: {
|
||||
methodSuffix: "User",
|
||||
},
|
||||
alias: [
|
||||
{
|
||||
name: "me",
|
||||
args: {
|
||||
extraArgument: 123,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "customer",
|
||||
},
|
||||
],
|
||||
relationships: [
|
||||
{
|
||||
foreignKey: "products.product_id",
|
||||
serviceName: "product",
|
||||
primaryKey: "id",
|
||||
alias: "product",
|
||||
},
|
||||
],
|
||||
extends: [
|
||||
{
|
||||
serviceName: "variantService",
|
||||
relationship: {
|
||||
foreignKey: "user_id",
|
||||
serviceName: "user",
|
||||
primaryKey: "id",
|
||||
alias: "user",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
serviceName: "product",
|
||||
primaryKeys: ["id", "sku"],
|
||||
relationships: [
|
||||
{
|
||||
foreignKey: "user_id",
|
||||
serviceName: "user",
|
||||
primaryKey: "id",
|
||||
alias: "user",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
serviceName: "variantService",
|
||||
alias: {
|
||||
name: "variant",
|
||||
},
|
||||
fieldAlias: {
|
||||
user_shortcut: "product.user",
|
||||
},
|
||||
primaryKeys: ["id"],
|
||||
relationships: [
|
||||
{
|
||||
foreignKey: "product_id",
|
||||
serviceName: "product",
|
||||
primaryKey: "id",
|
||||
alias: "product",
|
||||
},
|
||||
{
|
||||
foreignKey: "variant_id",
|
||||
primaryKey: "id",
|
||||
serviceName: "order",
|
||||
alias: "orders",
|
||||
inverse: true, // In an inverted relationship the foreign key is on Order and the primary key is on variant
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
serviceName: "order",
|
||||
fieldAlias: {
|
||||
product_user_alias: {
|
||||
path: "products.product.user",
|
||||
forwardArgumentsOnPath: ["products.product"],
|
||||
},
|
||||
},
|
||||
primaryKeys: ["id"],
|
||||
relationships: [
|
||||
{
|
||||
foreignKey: "product_id",
|
||||
serviceName: "product",
|
||||
primaryKey: "id",
|
||||
alias: "product",
|
||||
},
|
||||
{
|
||||
foreignKey: "products.variant_id,product_id",
|
||||
serviceName: "variantService",
|
||||
primaryKey: "id,product_id",
|
||||
alias: "variant",
|
||||
},
|
||||
{
|
||||
foreignKey: "user_id",
|
||||
serviceName: "user",
|
||||
primaryKey: "id",
|
||||
alias: "user",
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
export const mockServiceList = (serviceName) => {
|
||||
return jest.fn().mockImplementation((data) => {
|
||||
const src = {
|
||||
userService: remoteJoinerData.user,
|
||||
productService: remoteJoinerData.product,
|
||||
variantService: remoteJoinerData.variant,
|
||||
orderService: remoteJoinerData.order,
|
||||
}
|
||||
|
||||
let resultset = JSON.parse(JSON.stringify(src[serviceName]))
|
||||
|
||||
if (
|
||||
serviceName === "userService" &&
|
||||
!data.fields?.some((field) => field.includes("multiple"))
|
||||
) {
|
||||
resultset = resultset.map((item) => {
|
||||
delete item.nested.multiple
|
||||
return item
|
||||
})
|
||||
}
|
||||
|
||||
// mock filtering on service order
|
||||
if (serviceName === "orderService" && data.options?.id) {
|
||||
resultset = resultset.filter((item) => data.options.id.includes(item.id))
|
||||
}
|
||||
|
||||
return {
|
||||
data: resultset,
|
||||
path: serviceName === "productService" ? "rows" : undefined,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
export const serviceMock = {
|
||||
orderService: mockServiceList("orderService"),
|
||||
userService: mockServiceList("userService"),
|
||||
productService: mockServiceList("productService"),
|
||||
variantService: mockServiceList("variantService"),
|
||||
}
|
||||
358
packages/core/orchestration/src/__tests__/joiner/graphql-ast.ts
Normal file
358
packages/core/orchestration/src/__tests__/joiner/graphql-ast.ts
Normal file
@@ -0,0 +1,358 @@
|
||||
import GraphQLParser from "../../joiner/graphql-ast"
|
||||
|
||||
describe("RemoteJoiner.parseQuery", () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("Simple query with fields", async () => {
|
||||
const graphqlQuery = `
|
||||
query {
|
||||
order {
|
||||
id
|
||||
number
|
||||
date
|
||||
}
|
||||
}
|
||||
`
|
||||
const parser = new GraphQLParser(graphqlQuery)
|
||||
const rjQuery = parser.parseQuery()
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "order",
|
||||
fields: ["id", "number", "date"],
|
||||
expands: [],
|
||||
})
|
||||
})
|
||||
|
||||
it("Simple query with fields and arguments", async () => {
|
||||
const graphqlQuery = `
|
||||
query {
|
||||
order(
|
||||
id: "ord_123",
|
||||
another_arg: 987,
|
||||
complexArg: {
|
||||
id: "123",
|
||||
name: "test",
|
||||
nestedArg: {
|
||||
nest_id: "abc",
|
||||
num: 123
|
||||
}
|
||||
}
|
||||
) {
|
||||
id
|
||||
number
|
||||
date
|
||||
}
|
||||
}
|
||||
`
|
||||
const parser = new GraphQLParser(graphqlQuery)
|
||||
const rjQuery = parser.parseQuery()
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "order",
|
||||
fields: ["id", "number", "date"],
|
||||
expands: [],
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: "ord_123",
|
||||
},
|
||||
{
|
||||
name: "another_arg",
|
||||
value: 987,
|
||||
},
|
||||
{
|
||||
name: "complexArg",
|
||||
value: {
|
||||
id: "123",
|
||||
name: "test",
|
||||
nestedArg: {
|
||||
nest_id: "abc",
|
||||
num: 123,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("Simple query with mapping fields to services", async () => {
|
||||
const graphqlQuery = `
|
||||
query {
|
||||
order {
|
||||
id
|
||||
number
|
||||
date
|
||||
products {
|
||||
product_id
|
||||
variant_id
|
||||
order
|
||||
variant {
|
||||
name
|
||||
sku
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
const parser = new GraphQLParser(graphqlQuery, {})
|
||||
const rjQuery = parser.parseQuery()
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "order",
|
||||
fields: ["id", "number", "date", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product_id", "variant_id", "order", "variant"],
|
||||
},
|
||||
{
|
||||
property: "products.variant",
|
||||
fields: ["name", "sku"],
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("Nested query with fields", async () => {
|
||||
const graphqlQuery = `
|
||||
query {
|
||||
order {
|
||||
id
|
||||
number
|
||||
date
|
||||
products {
|
||||
product_id
|
||||
variant_id
|
||||
order
|
||||
variant {
|
||||
name
|
||||
sku
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
const parser = new GraphQLParser(graphqlQuery)
|
||||
const rjQuery = parser.parseQuery()
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "order",
|
||||
fields: ["id", "number", "date", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product_id", "variant_id", "order", "variant"],
|
||||
},
|
||||
{
|
||||
property: "products.variant",
|
||||
fields: ["name", "sku"],
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("Nested query with fields and arguments", async () => {
|
||||
const graphqlQuery = `
|
||||
query {
|
||||
order (order_id: "ord_123") {
|
||||
id
|
||||
number
|
||||
date
|
||||
products (limit: 10) {
|
||||
product_id
|
||||
variant_id
|
||||
order
|
||||
variant (complexArg: { id: "123", name: "test", nestedArg: { nest_id: "abc", num: 123 } }, region_id: "reg_123") {
|
||||
name
|
||||
sku
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
const parser = new GraphQLParser(graphqlQuery)
|
||||
const rjQuery = parser.parseQuery()
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "order",
|
||||
fields: ["id", "number", "date", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product_id", "variant_id", "order", "variant"],
|
||||
args: [
|
||||
{
|
||||
name: "limit",
|
||||
value: 10,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
property: "products.variant",
|
||||
fields: ["name", "sku"],
|
||||
args: [
|
||||
{
|
||||
name: "complexArg",
|
||||
value: {
|
||||
id: "123",
|
||||
name: "test",
|
||||
nestedArg: {
|
||||
nest_id: "abc",
|
||||
num: 123,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "region_id",
|
||||
value: "reg_123",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
args: [
|
||||
{
|
||||
name: "order_id",
|
||||
value: "ord_123",
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("Nested query with fields and arguments using variables", async () => {
|
||||
const graphqlQuery = `
|
||||
query($orderId: ID, $anotherArg: String, $randomVariable: nonValidatedType) {
|
||||
order (order_id: $orderId, anotherArg: $anotherArg) {
|
||||
id
|
||||
number
|
||||
date
|
||||
products (randomValue: $randomVariable) {
|
||||
product_id
|
||||
variant_id
|
||||
order
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
const parser = new GraphQLParser(graphqlQuery, {
|
||||
orderId: 123,
|
||||
randomVariable: { complex: { num: 12343, str: "str_123" } },
|
||||
anotherArg: "any string",
|
||||
})
|
||||
const rjQuery = parser.parseQuery()
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "order",
|
||||
fields: ["id", "number", "date", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product_id", "variant_id", "order"],
|
||||
args: [
|
||||
{
|
||||
name: "randomValue",
|
||||
value: {
|
||||
complex: {
|
||||
num: 12343,
|
||||
str: "str_123",
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
args: [
|
||||
{
|
||||
name: "order_id",
|
||||
value: 123,
|
||||
},
|
||||
{
|
||||
name: "anotherArg",
|
||||
value: "any string",
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("Nested query with fields and directives", async () => {
|
||||
const graphqlQuery = `
|
||||
query {
|
||||
order(regularArgs: 123) {
|
||||
id
|
||||
number @include(if: "date > '2020-01-01'")
|
||||
date
|
||||
products {
|
||||
product_id
|
||||
variant_id
|
||||
variant @count {
|
||||
name @lowerCase
|
||||
sku @include(if: "name == 'test'")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
||||
const parser = new GraphQLParser(graphqlQuery)
|
||||
const rjQuery = parser.parseQuery()
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "order",
|
||||
fields: ["id", "number", "date", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product_id", "variant_id", "variant"],
|
||||
directives: {
|
||||
variant: [
|
||||
{
|
||||
name: "count",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
{
|
||||
property: "products.variant",
|
||||
fields: ["name", "sku"],
|
||||
directives: {
|
||||
name: [
|
||||
{
|
||||
name: "lowerCase",
|
||||
},
|
||||
],
|
||||
sku: [
|
||||
{
|
||||
name: "include",
|
||||
args: [
|
||||
{
|
||||
name: "if",
|
||||
value: "name == 'test'",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
],
|
||||
args: [
|
||||
{
|
||||
name: "regularArgs",
|
||||
value: 123,
|
||||
},
|
||||
],
|
||||
directives: {
|
||||
number: [
|
||||
{
|
||||
name: "include",
|
||||
args: [
|
||||
{
|
||||
name: "if",
|
||||
value: "date > '2020-01-01'",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
222
packages/core/orchestration/src/__tests__/joiner/helpers.ts
Normal file
222
packages/core/orchestration/src/__tests__/joiner/helpers.ts
Normal file
@@ -0,0 +1,222 @@
|
||||
import { toRemoteJoinerQuery } from "../../joiner/helpers"
|
||||
|
||||
describe("toRemoteJoinerQuery", () => {
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("should transform a simple object to a Remote Joiner Query format", async () => {
|
||||
const obj = {
|
||||
product: {
|
||||
fields: ["id", "title", "handle"],
|
||||
},
|
||||
}
|
||||
|
||||
const rjQuery = toRemoteJoinerQuery(obj)
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "product",
|
||||
fields: ["id", "title", "handle"],
|
||||
expands: [],
|
||||
})
|
||||
})
|
||||
|
||||
it("should transform a nested object to a Remote Joiner Query format", async () => {
|
||||
const obj = {
|
||||
product: {
|
||||
fields: ["id", "title", "handle"],
|
||||
variants: {
|
||||
fields: ["sku"],
|
||||
shipping_profiles: {
|
||||
profile: {
|
||||
fields: ["id", "name"],
|
||||
},
|
||||
},
|
||||
options: {
|
||||
fields: ["value"],
|
||||
},
|
||||
},
|
||||
options: {
|
||||
fields: ["value", "name"],
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const rjQuery = toRemoteJoinerQuery(obj)
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "product",
|
||||
fields: ["id", "title", "handle"],
|
||||
expands: [
|
||||
{
|
||||
property: "variants",
|
||||
fields: ["sku"],
|
||||
},
|
||||
{
|
||||
property: "variants.shipping_profiles",
|
||||
},
|
||||
{
|
||||
property: "variants.shipping_profiles.profile",
|
||||
fields: ["id", "name"],
|
||||
},
|
||||
{
|
||||
property: "variants.options",
|
||||
fields: ["value"],
|
||||
},
|
||||
{
|
||||
property: "options",
|
||||
fields: ["value", "name"],
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("should transform a nested object with arguments and directives to a Remote Joiner Query format", async () => {
|
||||
const obj = {
|
||||
product: {
|
||||
fields: ["id", "title", "handle"],
|
||||
__args: {
|
||||
limit: 10,
|
||||
offset: 0,
|
||||
},
|
||||
variants: {
|
||||
fields: ["sku"],
|
||||
__directives: {
|
||||
directiveName: "value",
|
||||
},
|
||||
shipping_profiles: {
|
||||
profile: {
|
||||
fields: ["id", "name"],
|
||||
__args: {
|
||||
context: {
|
||||
customer_group: "cg_123",
|
||||
region_id: "US",
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const rjQuery = toRemoteJoinerQuery(obj)
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "product",
|
||||
fields: ["id", "title", "handle"],
|
||||
expands: [
|
||||
{
|
||||
property: "variants",
|
||||
directives: [
|
||||
{
|
||||
name: "directiveName",
|
||||
value: "value",
|
||||
},
|
||||
],
|
||||
fields: ["sku"],
|
||||
},
|
||||
{
|
||||
property: "variants.shipping_profiles",
|
||||
},
|
||||
{
|
||||
property: "variants.shipping_profiles.profile",
|
||||
args: [
|
||||
{
|
||||
name: "context",
|
||||
value: {
|
||||
customer_group: "cg_123",
|
||||
region_id: "US",
|
||||
},
|
||||
},
|
||||
],
|
||||
fields: ["id", "name"],
|
||||
},
|
||||
],
|
||||
args: [
|
||||
{
|
||||
name: "limit",
|
||||
value: 10,
|
||||
},
|
||||
{
|
||||
name: "offset",
|
||||
value: 0,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("should transform a nested object with arguments and directives to a Remote Joiner Query format only using variables", async () => {
|
||||
const obj = {
|
||||
product: {
|
||||
fields: ["id", "title", "handle"],
|
||||
variants: {
|
||||
fields: ["sku"],
|
||||
__directives: {
|
||||
directiveName: "value",
|
||||
},
|
||||
shipping_profiles: {
|
||||
profile: {
|
||||
fields: ["id", "name"],
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const rjQuery = toRemoteJoinerQuery(obj, {
|
||||
product: {
|
||||
limit: 10,
|
||||
offset: 0,
|
||||
},
|
||||
"product.variants.shipping_profiles.profile": {
|
||||
context: {
|
||||
customer_group: "cg_123",
|
||||
region_id: "US",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(rjQuery).toEqual({
|
||||
alias: "product",
|
||||
fields: ["id", "title", "handle"],
|
||||
expands: [
|
||||
{
|
||||
property: "variants",
|
||||
directives: [
|
||||
{
|
||||
name: "directiveName",
|
||||
value: "value",
|
||||
},
|
||||
],
|
||||
fields: ["sku"],
|
||||
},
|
||||
{
|
||||
property: "variants.shipping_profiles",
|
||||
},
|
||||
{
|
||||
property: "variants.shipping_profiles.profile",
|
||||
args: [
|
||||
{
|
||||
name: "context",
|
||||
value: {
|
||||
customer_group: "cg_123",
|
||||
region_id: "US",
|
||||
},
|
||||
},
|
||||
],
|
||||
fields: ["id", "name"],
|
||||
},
|
||||
],
|
||||
args: [
|
||||
{
|
||||
name: "limit",
|
||||
value: 10,
|
||||
},
|
||||
{
|
||||
name: "offset",
|
||||
value: 0,
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,830 @@
|
||||
import { MedusaContainer, RemoteExpandProperty } from "@medusajs/types"
|
||||
import { lowerCaseFirst, toPascalCase } from "@medusajs/utils"
|
||||
import { remoteJoinerData } from "../../__fixtures__/joiner/data"
|
||||
import { serviceConfigs, serviceMock } from "../../__mocks__/joiner/mock_data"
|
||||
import { RemoteJoiner } from "../../joiner"
|
||||
|
||||
const container = {
|
||||
resolve: (serviceName) => {
|
||||
return {
|
||||
list: (...args) => {
|
||||
return serviceMock[serviceName].apply(this, args)
|
||||
},
|
||||
getByVariantId: (options) => {
|
||||
if (serviceName !== "orderService") {
|
||||
return
|
||||
}
|
||||
|
||||
let orderVar = JSON.parse(
|
||||
JSON.stringify(remoteJoinerData.order_variant)
|
||||
)
|
||||
|
||||
if (options.expands?.order) {
|
||||
orderVar = orderVar.map((item) => {
|
||||
item.order = JSON.parse(
|
||||
JSON.stringify(
|
||||
remoteJoinerData.order.find((o) => o.id === item.order_id)
|
||||
)
|
||||
)
|
||||
return item
|
||||
})
|
||||
}
|
||||
|
||||
return {
|
||||
data: orderVar,
|
||||
}
|
||||
},
|
||||
}
|
||||
},
|
||||
} as MedusaContainer
|
||||
|
||||
const callbacks = jest.fn()
|
||||
const fetchServiceDataCallback = jest.fn(
|
||||
async (
|
||||
expand: RemoteExpandProperty,
|
||||
pkField: string,
|
||||
ids?: (unknown | unknown[])[],
|
||||
relationship?: any
|
||||
) => {
|
||||
const serviceConfig = expand.serviceConfig
|
||||
const moduleRegistryName = !serviceConfig.serviceName.endsWith("Service")
|
||||
? lowerCaseFirst(serviceConfig.serviceName) + "Service"
|
||||
: serviceConfig.serviceName
|
||||
|
||||
const service = container.resolve(moduleRegistryName)
|
||||
const methodName = relationship?.inverse
|
||||
? `getBy${toPascalCase(pkField)}`
|
||||
: "list"
|
||||
|
||||
callbacks({
|
||||
service: serviceConfig.serviceName,
|
||||
fieds: expand.fields,
|
||||
args: expand.args,
|
||||
})
|
||||
|
||||
return await service[methodName]({
|
||||
fields: expand.fields,
|
||||
args: expand.args,
|
||||
expands: expand.expands,
|
||||
options: {
|
||||
[pkField]: ids,
|
||||
},
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
describe("RemoteJoiner", () => {
|
||||
let joiner: RemoteJoiner
|
||||
beforeAll(() => {
|
||||
joiner = new RemoteJoiner(serviceConfigs, fetchServiceDataCallback)
|
||||
})
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("Simple query of a service, its id and no fields specified", async () => {
|
||||
const query = {
|
||||
service: "user",
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: "1",
|
||||
},
|
||||
],
|
||||
fields: ["id", "name", "email"],
|
||||
}
|
||||
|
||||
const data = await joiner.query(query)
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: 1,
|
||||
name: "John Doe",
|
||||
email: "johndoe@example.com",
|
||||
},
|
||||
{
|
||||
id: 2,
|
||||
name: "Jane Doe",
|
||||
email: "janedoe@example.com",
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "aaa bbb",
|
||||
email: "aaa@example.com",
|
||||
},
|
||||
{
|
||||
id: 4,
|
||||
name: "a4444 44 44",
|
||||
email: "444444@example.com",
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("Simple query of a service where the returned data contains multiple properties", async () => {
|
||||
const query = RemoteJoiner.parseQuery(`
|
||||
query {
|
||||
product {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
`)
|
||||
const data = await joiner.query(query)
|
||||
|
||||
expect(data).toEqual({
|
||||
rows: [
|
||||
{
|
||||
id: 101,
|
||||
name: "Product 1",
|
||||
},
|
||||
{
|
||||
id: 102,
|
||||
name: "Product 2",
|
||||
},
|
||||
{
|
||||
id: 103,
|
||||
name: "Product 3",
|
||||
},
|
||||
],
|
||||
limit: 3,
|
||||
skip: 0,
|
||||
})
|
||||
})
|
||||
|
||||
it("Query of a service, expanding a property and restricting the fields expanded", async () => {
|
||||
const query = {
|
||||
service: "user",
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: "1",
|
||||
},
|
||||
],
|
||||
fields: ["username", "email", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products.product",
|
||||
fields: ["name"],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const data = await joiner.query(query)
|
||||
expect(data).toEqual([
|
||||
{
|
||||
email: "johndoe@example.com",
|
||||
products: [
|
||||
{
|
||||
product_id: 102,
|
||||
product: {
|
||||
name: "Product 2",
|
||||
id: 102,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 102],
|
||||
product: [
|
||||
{
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
{
|
||||
name: "Product 2",
|
||||
id: 102,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
email: "aaa@example.com",
|
||||
},
|
||||
{
|
||||
email: "444444@example.com",
|
||||
products: [
|
||||
{
|
||||
product_id: 103,
|
||||
product: {
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("Query a service expanding multiple nested properties", async () => {
|
||||
const query = {
|
||||
service: "order",
|
||||
fields: ["number", "date", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product"],
|
||||
},
|
||||
{
|
||||
property: "products.product",
|
||||
fields: ["name"],
|
||||
},
|
||||
{
|
||||
property: "user",
|
||||
fields: ["fullname", "email", "products"],
|
||||
},
|
||||
{
|
||||
property: "user.products.product",
|
||||
fields: ["name"],
|
||||
},
|
||||
],
|
||||
args: [],
|
||||
}
|
||||
|
||||
const data = await joiner.query(query)
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
number: "ORD-001",
|
||||
date: "2023-04-01T12:00:00Z",
|
||||
products: [
|
||||
{
|
||||
product_id: 101,
|
||||
product: {
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
},
|
||||
{
|
||||
product_id: 101,
|
||||
product: {
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
},
|
||||
],
|
||||
user_id: 4,
|
||||
user: {
|
||||
fullname: "444 Doe full name",
|
||||
email: "444444@example.com",
|
||||
products: [
|
||||
{
|
||||
product_id: 103,
|
||||
product: {
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
},
|
||||
],
|
||||
id: 4,
|
||||
},
|
||||
},
|
||||
{
|
||||
number: "ORD-202",
|
||||
date: "2023-04-01T12:00:00Z",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
product: [
|
||||
{
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
{
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
user_id: 1,
|
||||
user: {
|
||||
fullname: "John Doe full name",
|
||||
email: "johndoe@example.com",
|
||||
products: [
|
||||
{
|
||||
product_id: 102,
|
||||
product: {
|
||||
name: "Product 2",
|
||||
id: 102,
|
||||
},
|
||||
},
|
||||
],
|
||||
id: 1,
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("Query a service expanding an inverse relation", async () => {
|
||||
const query = RemoteJoiner.parseQuery(`
|
||||
query {
|
||||
variant {
|
||||
id
|
||||
name
|
||||
orders {
|
||||
order {
|
||||
number
|
||||
products {
|
||||
quantity
|
||||
product {
|
||||
name
|
||||
}
|
||||
variant {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`)
|
||||
const data = await joiner.query(query)
|
||||
|
||||
expect(data).toEqual([
|
||||
{
|
||||
id: 991,
|
||||
name: "Product variant 1",
|
||||
orders: {
|
||||
order: {
|
||||
number: "ORD-001",
|
||||
products: [
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 991,
|
||||
quantity: 1,
|
||||
product: {
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
variant: {
|
||||
name: "Product variant 1",
|
||||
id: 991,
|
||||
product_id: 101,
|
||||
},
|
||||
},
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 992,
|
||||
quantity: 5,
|
||||
product: {
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
variant: {
|
||||
name: "Product variant 2",
|
||||
id: 992,
|
||||
product_id: 101,
|
||||
},
|
||||
},
|
||||
],
|
||||
id: 201,
|
||||
},
|
||||
variant_id: 991,
|
||||
order_id: 201,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: 992,
|
||||
name: "Product variant 2",
|
||||
orders: [
|
||||
{
|
||||
order: {
|
||||
number: "ORD-001",
|
||||
products: [
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 991,
|
||||
quantity: 1,
|
||||
product: {
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
variant: {
|
||||
name: "Product variant 1",
|
||||
id: 991,
|
||||
product_id: 101,
|
||||
},
|
||||
},
|
||||
{
|
||||
product_id: 101,
|
||||
variant_id: 992,
|
||||
quantity: 5,
|
||||
product: {
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
variant: {
|
||||
name: "Product variant 2",
|
||||
id: 992,
|
||||
product_id: 101,
|
||||
},
|
||||
},
|
||||
],
|
||||
id: 201,
|
||||
},
|
||||
variant_id: 992,
|
||||
order_id: 201,
|
||||
},
|
||||
{
|
||||
order: {
|
||||
number: "ORD-202",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
variant_id: 993,
|
||||
quantity: 4,
|
||||
product: [
|
||||
{
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
{
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
id: 205,
|
||||
},
|
||||
variant_id: 992,
|
||||
order_id: 205,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: 993,
|
||||
name: "Product variant 33",
|
||||
orders: {
|
||||
order: {
|
||||
number: "ORD-202",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 103],
|
||||
variant_id: 993,
|
||||
quantity: 4,
|
||||
product: [
|
||||
{
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
},
|
||||
{
|
||||
name: "Product 3",
|
||||
id: 103,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
id: 205,
|
||||
},
|
||||
variant_id: 993,
|
||||
order_id: 205,
|
||||
},
|
||||
},
|
||||
])
|
||||
})
|
||||
|
||||
it("Should query an field alias and cleanup unused nested levels", async () => {
|
||||
const query = RemoteJoiner.parseQuery(`
|
||||
query {
|
||||
order {
|
||||
product_user_alias {
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
`)
|
||||
const data = await joiner.query(query)
|
||||
|
||||
expect(data).toEqual([
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
},
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
},
|
||||
],
|
||||
}),
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
},
|
||||
{
|
||||
email: "aaa@example.com",
|
||||
id: 3,
|
||||
},
|
||||
],
|
||||
}),
|
||||
])
|
||||
expect(data[0].products[0].product).toEqual(undefined)
|
||||
})
|
||||
|
||||
it("Should query an field alias and keep queried nested levels", async () => {
|
||||
const query = RemoteJoiner.parseQuery(`
|
||||
query {
|
||||
order {
|
||||
product_user_alias {
|
||||
email
|
||||
}
|
||||
products {
|
||||
product {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`)
|
||||
const data = await joiner.query(query)
|
||||
|
||||
expect(data).toEqual([
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
},
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
},
|
||||
],
|
||||
}),
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
},
|
||||
{
|
||||
email: "aaa@example.com",
|
||||
id: 3,
|
||||
},
|
||||
],
|
||||
}),
|
||||
])
|
||||
expect(data[0].products[0].product).toEqual({
|
||||
name: "Product 1",
|
||||
id: 101,
|
||||
user_id: 2,
|
||||
})
|
||||
expect(data[0].products[0].product.user).toEqual(undefined)
|
||||
})
|
||||
|
||||
it("Should query an field alias and merge requested fields on alias and on the relationship", async () => {
|
||||
const query = RemoteJoiner.parseQuery(`
|
||||
query {
|
||||
order {
|
||||
product_user_alias {
|
||||
email
|
||||
}
|
||||
products {
|
||||
product {
|
||||
user {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`)
|
||||
const data = await joiner.query(query)
|
||||
|
||||
expect(data).toEqual([
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
email: "janedoe@example.com",
|
||||
},
|
||||
{
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
email: "janedoe@example.com",
|
||||
},
|
||||
],
|
||||
}),
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
email: "janedoe@example.com",
|
||||
},
|
||||
{
|
||||
name: "aaa bbb",
|
||||
id: 3,
|
||||
email: "aaa@example.com",
|
||||
},
|
||||
],
|
||||
}),
|
||||
])
|
||||
expect(data[0].products[0].product).toEqual({
|
||||
id: 101,
|
||||
user_id: 2,
|
||||
user: {
|
||||
name: "Jane Doe",
|
||||
id: 2,
|
||||
email: "janedoe@example.com",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("Should query multiple aliases and pass the arguments where defined on 'forwardArgumentsOnPath'", async () => {
|
||||
const query = RemoteJoiner.parseQuery(`
|
||||
query {
|
||||
order {
|
||||
id
|
||||
product_user_alias (arg: { random: 123 }) {
|
||||
name
|
||||
}
|
||||
products {
|
||||
variant {
|
||||
user_shortcut(arg: 123) {
|
||||
name
|
||||
email
|
||||
products {
|
||||
product {
|
||||
handler
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`)
|
||||
const data = await joiner.query(query)
|
||||
|
||||
expect(callbacks.mock.calls).toEqual([
|
||||
[
|
||||
{
|
||||
service: "order",
|
||||
fieds: ["id", "products"],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
service: "product",
|
||||
fieds: ["id", "user_id"],
|
||||
args: [
|
||||
{
|
||||
name: "arg",
|
||||
value: {
|
||||
random: 123,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
service: "user",
|
||||
fieds: ["name", "id"],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
service: "variantService",
|
||||
fieds: ["id", "product_id"],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
service: "product",
|
||||
fieds: ["id", "user_id"],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
service: "user",
|
||||
fieds: ["name", "email", "products", "id"],
|
||||
},
|
||||
],
|
||||
[
|
||||
{
|
||||
service: "product",
|
||||
fieds: ["handler", "id"],
|
||||
},
|
||||
],
|
||||
])
|
||||
|
||||
expect(data[1]).toEqual(
|
||||
expect.objectContaining({
|
||||
product_user_alias: [
|
||||
{
|
||||
id: 2,
|
||||
name: "Jane Doe",
|
||||
},
|
||||
{
|
||||
id: 3,
|
||||
name: "aaa bbb",
|
||||
},
|
||||
],
|
||||
})
|
||||
)
|
||||
|
||||
expect(data[0].products[0]).toEqual({
|
||||
variant_id: 991,
|
||||
product_id: 101,
|
||||
variant: {
|
||||
id: 991,
|
||||
product_id: 101,
|
||||
user_shortcut: {
|
||||
email: "janedoe@example.com",
|
||||
id: 2,
|
||||
name: "Jane Doe",
|
||||
products: [
|
||||
{
|
||||
product_id: [101, 102],
|
||||
product: [
|
||||
{
|
||||
handler: "product-1-handler",
|
||||
id: 101,
|
||||
},
|
||||
{
|
||||
handler: "product-2-handler",
|
||||
id: 102,
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("It shouldn't register the service name as an alias if option autoCreateServiceNameAlias is false", async () => {
|
||||
const newJoiner = new RemoteJoiner(
|
||||
serviceConfigs,
|
||||
fetchServiceDataCallback,
|
||||
{ autoCreateServiceNameAlias: false }
|
||||
)
|
||||
|
||||
const query = {
|
||||
service: "user",
|
||||
fields: ["id", "name", "email"],
|
||||
}
|
||||
|
||||
const data = await newJoiner.query(query)
|
||||
|
||||
expect(data).toEqual(
|
||||
expect.arrayContaining([
|
||||
{
|
||||
id: 1,
|
||||
name: "John Doe",
|
||||
email: "johndoe@example.com",
|
||||
},
|
||||
])
|
||||
)
|
||||
|
||||
const queryWithAlias = {
|
||||
alias: "user",
|
||||
fields: ["id", "name", "email"],
|
||||
}
|
||||
|
||||
expect(newJoiner.query(queryWithAlias)).rejects.toThrowError(
|
||||
`Service with alias "user" was not found.`
|
||||
)
|
||||
})
|
||||
|
||||
it("Should throw when any key of the entrypoint isn't found", async () => {
|
||||
const query = RemoteJoiner.parseQuery(`
|
||||
query {
|
||||
order (id: 201) {
|
||||
id
|
||||
number
|
||||
}
|
||||
}
|
||||
`)
|
||||
const data = await joiner.query(query, {
|
||||
throwIfKeyNotFound: true,
|
||||
})
|
||||
|
||||
expect(data.length).toEqual(1)
|
||||
|
||||
const queryNotFound = RemoteJoiner.parseQuery(`
|
||||
query {
|
||||
order (id: "ord_1234556") {
|
||||
id
|
||||
number
|
||||
}
|
||||
}
|
||||
`)
|
||||
const dataNotFound = joiner.query(queryNotFound, {
|
||||
throwIfKeyNotFound: true,
|
||||
})
|
||||
|
||||
expect(dataNotFound).rejects.toThrowError("order id not found: ord_1234556")
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,613 @@
|
||||
import { MedusaContainer, RemoteExpandProperty } from "@medusajs/types"
|
||||
import { lowerCaseFirst, toPascalCase } from "@medusajs/utils"
|
||||
import { serviceConfigs, serviceMock } from "../../__mocks__/joiner/mock_data"
|
||||
import { RemoteJoiner } from "./../../joiner"
|
||||
|
||||
const container = {
|
||||
resolve: (serviceName) => {
|
||||
return {
|
||||
list: (...args) => {
|
||||
return serviceMock[serviceName].apply(this, args)
|
||||
},
|
||||
}
|
||||
},
|
||||
} as MedusaContainer
|
||||
|
||||
const fetchServiceDataCallback = async (
|
||||
expand: RemoteExpandProperty,
|
||||
pkField: string,
|
||||
ids?: (unknown | unknown[])[],
|
||||
relationship?: any
|
||||
) => {
|
||||
const serviceConfig = expand.serviceConfig
|
||||
const moduleRegistryName =
|
||||
lowerCaseFirst(serviceConfig.serviceName) + "Service"
|
||||
|
||||
const service = container.resolve(moduleRegistryName)
|
||||
const methodName = relationship?.inverse
|
||||
? `getBy${toPascalCase(pkField)}`
|
||||
: "list"
|
||||
|
||||
return await service[methodName]({
|
||||
fields: expand.fields,
|
||||
args: expand.args,
|
||||
expands: expand.expands,
|
||||
options: {
|
||||
[pkField]: ids,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
describe("RemoteJoiner", () => {
|
||||
let joiner: RemoteJoiner
|
||||
|
||||
beforeAll(() => {
|
||||
joiner = new RemoteJoiner(serviceConfigs, fetchServiceDataCallback)
|
||||
})
|
||||
|
||||
beforeEach(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
it("should filter the fields and attach the values correctly", () => {
|
||||
const data = {
|
||||
id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
title: "7468915",
|
||||
handle: "7468915",
|
||||
subtitle: null,
|
||||
description: null,
|
||||
collection_id: null,
|
||||
collection: null,
|
||||
type_id: "ptyp_01GX66TMARS55DBNYE31DDT8ZV",
|
||||
type: {
|
||||
id: "ptyp_01GX66TMARS55DBNYE31DDT8ZV",
|
||||
value: "test-type-1",
|
||||
},
|
||||
options: [
|
||||
{
|
||||
id: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
title: "4108194",
|
||||
product_id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
product: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
values: [
|
||||
{
|
||||
id: "optval_01H1PN57EAMXYFRGSJJJE9P0TJ",
|
||||
value: "4108194",
|
||||
option_id: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
option: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
variant_id: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
variant: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
variants: [
|
||||
{
|
||||
id: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
product_id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
product: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
options: [
|
||||
{
|
||||
id: "optval_01H1PN57EAMXYFRGSJJJE9P0TJ",
|
||||
value: "4108194",
|
||||
option_id: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
option: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
variant_id: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
variant: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
tags: [],
|
||||
images: [],
|
||||
}
|
||||
|
||||
const fields = [
|
||||
"id",
|
||||
"title",
|
||||
"subtitle",
|
||||
"description",
|
||||
"handle",
|
||||
"images",
|
||||
"tags",
|
||||
"type",
|
||||
"collection",
|
||||
"options",
|
||||
"variants_id",
|
||||
]
|
||||
|
||||
const expands = {
|
||||
collection: {
|
||||
fields: ["id", "title", "handle"],
|
||||
},
|
||||
images: {
|
||||
fields: ["url"],
|
||||
},
|
||||
options: {
|
||||
fields: ["title", "values"],
|
||||
expands: {
|
||||
values: {
|
||||
fields: ["id", "value"],
|
||||
},
|
||||
},
|
||||
},
|
||||
tags: {
|
||||
fields: ["value"],
|
||||
},
|
||||
type: {
|
||||
fields: ["value"],
|
||||
},
|
||||
variants: {
|
||||
fields: ["id", "options"],
|
||||
expands: {
|
||||
options: {
|
||||
fields: ["id", "value"],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const filteredFields = (RemoteJoiner as any).filterFields(
|
||||
data,
|
||||
fields,
|
||||
expands
|
||||
)
|
||||
|
||||
expect(filteredFields).toEqual(
|
||||
expect.objectContaining({
|
||||
id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
title: "7468915",
|
||||
subtitle: null,
|
||||
description: null,
|
||||
handle: "7468915",
|
||||
images: [],
|
||||
tags: [],
|
||||
type: {
|
||||
value: "test-type-1",
|
||||
},
|
||||
collection: null,
|
||||
options: [
|
||||
{
|
||||
title: "4108194",
|
||||
values: [
|
||||
{
|
||||
id: "optval_01H1PN57EAMXYFRGSJJJE9P0TJ",
|
||||
value: "4108194",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
variants: [
|
||||
{
|
||||
id: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
options: [
|
||||
{
|
||||
id: "optval_01H1PN57EAMXYFRGSJJJE9P0TJ",
|
||||
value: "4108194",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("should filter the fields and attach the values correctly taking into account the * fields selection", () => {
|
||||
const data = {
|
||||
id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
title: "7468915",
|
||||
handle: "7468915",
|
||||
subtitle: null,
|
||||
description: null,
|
||||
collection_id: null,
|
||||
collection: null,
|
||||
type_id: "ptyp_01GX66TMARS55DBNYE31DDT8ZV",
|
||||
type: {
|
||||
id: "ptyp_01GX66TMARS55DBNYE31DDT8ZV",
|
||||
value: "test-type-1",
|
||||
},
|
||||
options: [
|
||||
{
|
||||
id: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
title: "4108194",
|
||||
product_id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
product: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
values: [
|
||||
{
|
||||
id: "optval_01H1PN57EAMXYFRGSJJJE9P0TJ",
|
||||
value: "4108194",
|
||||
option_id: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
option: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
variant_id: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
variant: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
variants: [
|
||||
{
|
||||
id: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
product_id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
product: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
options: [
|
||||
{
|
||||
id: "optval_01H1PN57EAMXYFRGSJJJE9P0TJ",
|
||||
value: "4108194",
|
||||
option_id: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
option: "opt_01H1PN57AQE8G3FK365EYNH917",
|
||||
variant_id: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
variant: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
tags: [],
|
||||
images: [],
|
||||
}
|
||||
|
||||
const fields = [
|
||||
"id",
|
||||
"title",
|
||||
"subtitle",
|
||||
"description",
|
||||
"handle",
|
||||
"images",
|
||||
"tags",
|
||||
"type",
|
||||
"collection",
|
||||
"options",
|
||||
"variants_id",
|
||||
]
|
||||
|
||||
const expands = {
|
||||
collection: {
|
||||
fields: ["id", "title", "handle"],
|
||||
},
|
||||
images: {
|
||||
fields: ["url"],
|
||||
},
|
||||
options: {
|
||||
fields: ["title", "values"],
|
||||
expands: {
|
||||
values: {
|
||||
fields: ["id", "value"],
|
||||
},
|
||||
},
|
||||
},
|
||||
tags: {
|
||||
fields: ["value"],
|
||||
},
|
||||
type: {
|
||||
fields: ["value"],
|
||||
},
|
||||
variants: {
|
||||
fields: ["*"],
|
||||
expands: {
|
||||
options: {
|
||||
fields: ["id", "value"],
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const filteredFields = (RemoteJoiner as any).filterFields(
|
||||
data,
|
||||
fields,
|
||||
expands
|
||||
)
|
||||
|
||||
expect(filteredFields).toEqual(
|
||||
expect.objectContaining({
|
||||
id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
title: "7468915",
|
||||
subtitle: null,
|
||||
description: null,
|
||||
handle: "7468915",
|
||||
images: [],
|
||||
tags: [],
|
||||
type: {
|
||||
value: "test-type-1",
|
||||
},
|
||||
collection: null,
|
||||
options: [
|
||||
{
|
||||
title: "4108194",
|
||||
values: [
|
||||
{
|
||||
id: "optval_01H1PN57EAMXYFRGSJJJE9P0TJ",
|
||||
value: "4108194",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
variants: [
|
||||
{
|
||||
id: "variant_01H1PN57E99TMZAGNEZBSS3FM3",
|
||||
product_id: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
product: "prod_01H1PN579TJ707BRK938E2ME2N",
|
||||
options: [
|
||||
{
|
||||
id: "optval_01H1PN57EAMXYFRGSJJJE9P0TJ",
|
||||
value: "4108194",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("Simple query of a service, its id and no fields specified", async () => {
|
||||
const query = {
|
||||
service: "user",
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: "1",
|
||||
},
|
||||
],
|
||||
fields: ["id", "name", "email"],
|
||||
}
|
||||
|
||||
await joiner.query(query)
|
||||
|
||||
expect(serviceMock.userService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.userService).toHaveBeenCalledWith({
|
||||
args: [],
|
||||
fields: ["id", "name", "email"],
|
||||
options: { id: ["1"] },
|
||||
})
|
||||
})
|
||||
|
||||
it("Simple query of a service by its alias", async () => {
|
||||
const query = {
|
||||
alias: "customer",
|
||||
fields: ["id"],
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: "1",
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
await joiner.query(query)
|
||||
|
||||
expect(serviceMock.userService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.userService).toHaveBeenCalledWith({
|
||||
args: [],
|
||||
fields: ["id"],
|
||||
options: { id: ["1"] },
|
||||
})
|
||||
})
|
||||
|
||||
it("Simple query of a service by its alias with extra arguments", async () => {
|
||||
const query = {
|
||||
alias: "me",
|
||||
fields: ["id"],
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: 1,
|
||||
},
|
||||
{
|
||||
name: "arg1",
|
||||
value: "abc",
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
await joiner.query(query)
|
||||
|
||||
expect(serviceMock.userService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.userService).toHaveBeenCalledWith({
|
||||
args: [
|
||||
{
|
||||
name: "arg1",
|
||||
value: "abc",
|
||||
},
|
||||
],
|
||||
fields: ["id"],
|
||||
options: { id: [1] },
|
||||
})
|
||||
})
|
||||
|
||||
it("Simple query of a service, its id and a few fields specified", async () => {
|
||||
const query = {
|
||||
service: "user",
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: "1",
|
||||
},
|
||||
],
|
||||
fields: ["username", "email"],
|
||||
}
|
||||
|
||||
await joiner.query(query)
|
||||
|
||||
expect(serviceMock.userService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.userService).toHaveBeenCalledWith({
|
||||
args: [],
|
||||
fields: ["username", "email"],
|
||||
options: { id: ["1"] },
|
||||
})
|
||||
})
|
||||
|
||||
it("Query of a service, expanding a property and restricting the fields expanded", async () => {
|
||||
const query = {
|
||||
service: "user",
|
||||
fields: ["username", "email", "products"],
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: "1",
|
||||
},
|
||||
],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product"],
|
||||
},
|
||||
{
|
||||
property: "products.product",
|
||||
fields: ["name"],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
await joiner.query(query)
|
||||
|
||||
expect(serviceMock.userService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.userService).toHaveBeenCalledWith({
|
||||
args: [],
|
||||
fields: ["username", "email", "products"],
|
||||
expands: {
|
||||
products: {
|
||||
args: undefined,
|
||||
fields: ["product_id"],
|
||||
},
|
||||
},
|
||||
options: { id: ["1"] },
|
||||
})
|
||||
|
||||
expect(serviceMock.productService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.productService).toHaveBeenCalledWith({
|
||||
fields: ["name", "id"],
|
||||
options: { id: expect.arrayContaining([101, 102, 103]) },
|
||||
})
|
||||
})
|
||||
|
||||
it("Query a service using more than 1 argument, expanding a property with another argument", async () => {
|
||||
const query = {
|
||||
service: "user",
|
||||
args: [
|
||||
{
|
||||
name: "id",
|
||||
value: "1",
|
||||
},
|
||||
{
|
||||
name: "role",
|
||||
value: "admin",
|
||||
},
|
||||
],
|
||||
fields: ["username", "email", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product"],
|
||||
},
|
||||
{
|
||||
property: "products.product",
|
||||
fields: ["name"],
|
||||
args: [
|
||||
{
|
||||
name: "limit",
|
||||
value: "5",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
await joiner.query(query)
|
||||
|
||||
expect(serviceMock.userService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.userService).toHaveBeenCalledWith({
|
||||
args: [
|
||||
{
|
||||
name: "role",
|
||||
value: "admin",
|
||||
},
|
||||
],
|
||||
fields: ["username", "email", "products"],
|
||||
expands: {
|
||||
products: {
|
||||
args: undefined,
|
||||
fields: ["product_id"],
|
||||
},
|
||||
},
|
||||
options: { id: ["1"] },
|
||||
})
|
||||
|
||||
expect(serviceMock.productService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.productService).toHaveBeenCalledWith({
|
||||
fields: ["name", "id"],
|
||||
options: { id: expect.arrayContaining([101, 102, 103]) },
|
||||
args: [
|
||||
{
|
||||
name: "limit",
|
||||
value: "5",
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("Query a service expanding multiple nested properties", async () => {
|
||||
const query = {
|
||||
service: "order",
|
||||
fields: ["number", "date", "products"],
|
||||
expands: [
|
||||
{
|
||||
property: "products",
|
||||
fields: ["product"],
|
||||
},
|
||||
{
|
||||
property: "products.product",
|
||||
fields: ["handler"],
|
||||
},
|
||||
{
|
||||
property: "user",
|
||||
fields: ["fullname", "email", "products"],
|
||||
},
|
||||
{
|
||||
property: "user.products",
|
||||
fields: ["product"],
|
||||
},
|
||||
{
|
||||
property: "user.products.product",
|
||||
fields: ["name"],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
await joiner.query(query)
|
||||
|
||||
expect(serviceMock.orderService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.orderService).toHaveBeenCalledWith({
|
||||
args: undefined,
|
||||
fields: ["number", "date", "products", "user_id"],
|
||||
expands: {
|
||||
products: {
|
||||
args: undefined,
|
||||
fields: ["product_id"],
|
||||
},
|
||||
},
|
||||
options: { id: undefined },
|
||||
})
|
||||
|
||||
expect(serviceMock.userService).toHaveBeenCalledTimes(1)
|
||||
expect(serviceMock.userService).toHaveBeenCalledWith({
|
||||
fields: ["fullname", "email", "products", "id"],
|
||||
args: undefined,
|
||||
expands: {
|
||||
products: {
|
||||
args: undefined,
|
||||
fields: ["product_id"],
|
||||
},
|
||||
},
|
||||
options: { id: [4, 1] },
|
||||
})
|
||||
|
||||
expect(serviceMock.productService).toHaveBeenCalledTimes(2)
|
||||
expect(serviceMock.productService).toHaveBeenNthCalledWith(1, {
|
||||
fields: ["handler", "id"],
|
||||
options: { id: expect.arrayContaining([101, 103]) },
|
||||
})
|
||||
|
||||
expect(serviceMock.productService).toHaveBeenNthCalledWith(2, {
|
||||
fields: ["name", "id"],
|
||||
options: { id: expect.arrayContaining([103, 102]) },
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,435 @@
|
||||
import { OrchestratorBuilder } from "../../transaction/orchestrator-builder"
|
||||
|
||||
describe("OrchestratorBuilder", () => {
|
||||
let builder: OrchestratorBuilder
|
||||
|
||||
beforeEach(() => {
|
||||
builder = new OrchestratorBuilder()
|
||||
})
|
||||
|
||||
it("should load a TransactionStepsDefinition", () => {
|
||||
builder.load({ action: "foo" })
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
})
|
||||
})
|
||||
|
||||
it("should add a new action after the last action set", () => {
|
||||
builder.addAction("foo")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
})
|
||||
|
||||
builder.addAction("bar")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "bar",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should replace an action by another keeping its next steps", () => {
|
||||
builder.addAction("foo").addAction("axe").replaceAction("foo", "bar")
|
||||
expect(builder.build()).toEqual({
|
||||
action: "bar",
|
||||
next: {
|
||||
action: "axe",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should insert a new action before an existing action", () => {
|
||||
builder.addAction("foo").addAction("bar").insertActionBefore("bar", "axe")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "axe",
|
||||
next: {
|
||||
action: "bar",
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should insert a new action after an existing action", () => {
|
||||
builder.addAction("foo").addAction("axe").insertActionAfter("foo", "bar")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "bar",
|
||||
next: {
|
||||
action: "axe",
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should move an existing action and its next steps to another place. the destination will become next steps of the final branch", () => {
|
||||
builder
|
||||
.addAction("foo")
|
||||
.addAction("bar")
|
||||
.addAction("axe")
|
||||
.addAction("zzz")
|
||||
.moveAction("axe", "foo")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "axe",
|
||||
next: {
|
||||
action: "zzz",
|
||||
next: {
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should merge two action to run in parallel", () => {
|
||||
builder
|
||||
.addAction("foo")
|
||||
.addAction("bar")
|
||||
.addAction("axe")
|
||||
.mergeActions("foo", "axe")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
next: [
|
||||
{
|
||||
action: "foo",
|
||||
next: { action: "bar" },
|
||||
},
|
||||
{ action: "axe" },
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("should merge multiple actions to run in parallel", () => {
|
||||
builder
|
||||
.addAction("foo")
|
||||
.addAction("bar")
|
||||
.addAction("axe")
|
||||
.addAction("step")
|
||||
.mergeActions("bar", "axe", "step")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
next: [
|
||||
{
|
||||
action: "bar",
|
||||
},
|
||||
{
|
||||
action: "axe",
|
||||
},
|
||||
{
|
||||
action: "step",
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("should delete an action", () => {
|
||||
builder.addAction("foo").deleteAction("foo")
|
||||
|
||||
expect(builder.build()).toEqual({})
|
||||
})
|
||||
|
||||
it("should delete an action and keep all the next steps of that branch", () => {
|
||||
builder
|
||||
.addAction("foo")
|
||||
.addAction("bar")
|
||||
.addAction("axe")
|
||||
.deleteAction("bar")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "axe",
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should delete an action and remove all the next steps of that branch", () => {
|
||||
builder
|
||||
.addAction("foo")
|
||||
.addAction("bar")
|
||||
.addAction("axe")
|
||||
.addAction("step")
|
||||
.pruneAction("bar")
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
})
|
||||
})
|
||||
|
||||
it("should append a new action to the end of a given action's branch", () => {
|
||||
builder
|
||||
.load({
|
||||
action: "foo",
|
||||
next: [
|
||||
{
|
||||
action: "bar",
|
||||
next: {
|
||||
action: "zzz",
|
||||
},
|
||||
},
|
||||
{
|
||||
action: "axe",
|
||||
},
|
||||
],
|
||||
})
|
||||
.appendAction("step", "bar", { saveResponse: true })
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "foo",
|
||||
next: [
|
||||
{
|
||||
action: "bar",
|
||||
next: {
|
||||
action: "zzz",
|
||||
next: {
|
||||
action: "step",
|
||||
saveResponse: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
action: "axe",
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
describe("Composing Complex Transactions", () => {
|
||||
const loadedFlow = {
|
||||
next: {
|
||||
action: "createProduct",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachToSalesChannel",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createPrices",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createInventoryItems",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachInventoryItems",
|
||||
noCompensation: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
it("should load a transaction and add two steps", () => {
|
||||
const builder = new OrchestratorBuilder(loadedFlow)
|
||||
builder
|
||||
.addAction("step_1", { saveResponse: true })
|
||||
.addAction("step_2", { saveResponse: true })
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "createProduct",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachToSalesChannel",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createPrices",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createInventoryItems",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachInventoryItems",
|
||||
noCompensation: true,
|
||||
next: {
|
||||
action: "step_1",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "step_2",
|
||||
saveResponse: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should load a transaction, add 2 steps and merge step_1 to run in parallel with createProduct", () => {
|
||||
const builder = new OrchestratorBuilder(loadedFlow)
|
||||
builder
|
||||
.addAction("step_1", { saveResponse: true })
|
||||
.addAction("step_2", { saveResponse: true })
|
||||
.mergeActions("createProduct", "step_1")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
next: [
|
||||
{
|
||||
action: "createProduct",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachToSalesChannel",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createPrices",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createInventoryItems",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachInventoryItems",
|
||||
noCompensation: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
action: "step_1",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "step_2",
|
||||
saveResponse: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
})
|
||||
})
|
||||
|
||||
it("should load a transaction, add 2 steps and move 'step_1' and all its next steps to run before 'createPrices'", () => {
|
||||
const builder = new OrchestratorBuilder(loadedFlow)
|
||||
builder
|
||||
.addAction("step_1", { saveResponse: true })
|
||||
.addAction("step_2", { saveResponse: true })
|
||||
.moveAction("step_1", "createPrices")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "createProduct",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachToSalesChannel",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "step_1",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "step_2",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createPrices",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createInventoryItems",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachInventoryItems",
|
||||
noCompensation: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should load a transaction, add 2 steps and move 'step_1' to run before 'createPrices' and merge next steps", () => {
|
||||
const builder = new OrchestratorBuilder(loadedFlow)
|
||||
builder
|
||||
.addAction("step_1", { saveResponse: true })
|
||||
.addAction("step_2", { saveResponse: true })
|
||||
.moveAndMergeNextAction("step_1", "createPrices")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "createProduct",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachToSalesChannel",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "step_1",
|
||||
saveResponse: true,
|
||||
next: [
|
||||
{
|
||||
action: "step_2",
|
||||
saveResponse: true,
|
||||
},
|
||||
{
|
||||
action: "createPrices",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "createInventoryItems",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "attachInventoryItems",
|
||||
noCompensation: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("Fully compose a complex transaction", () => {
|
||||
const builder = new OrchestratorBuilder()
|
||||
builder
|
||||
.addAction("step_1", { saveResponse: true })
|
||||
.addAction("step_2", { saveResponse: true })
|
||||
.addAction("step_3", { saveResponse: true })
|
||||
|
||||
builder.insertActionBefore("step_3", "step_2.5", {
|
||||
saveResponse: false,
|
||||
noCompensation: true,
|
||||
})
|
||||
|
||||
builder.insertActionAfter("step_1", "step_1.1", { saveResponse: true })
|
||||
|
||||
builder.insertActionAfter("step_3", "step_4", { async: false })
|
||||
|
||||
builder
|
||||
.mergeActions("step_2", "step_2.5", "step_3")
|
||||
.addAction("step_5", { noCompensation: true })
|
||||
|
||||
builder.deleteAction("step_3")
|
||||
|
||||
expect(builder.build()).toEqual({
|
||||
action: "step_1",
|
||||
saveResponse: true,
|
||||
next: {
|
||||
action: "step_1.1",
|
||||
saveResponse: true,
|
||||
next: [
|
||||
{
|
||||
action: "step_2",
|
||||
saveResponse: true,
|
||||
},
|
||||
{
|
||||
action: "step_2.5",
|
||||
saveResponse: false,
|
||||
noCompensation: true,
|
||||
},
|
||||
{
|
||||
action: "step_4",
|
||||
async: false,
|
||||
next: {
|
||||
action: "step_5",
|
||||
noCompensation: true,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
})
|
||||
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,175 @@
|
||||
import { GlobalWorkflow } from "../../workflow/global-workflow"
|
||||
import { TransactionState } from "../../transaction/types"
|
||||
import { WorkflowManager } from "../../workflow/workflow-manager"
|
||||
|
||||
describe("WorkflowManager", () => {
|
||||
const container: any = {}
|
||||
|
||||
let handlers
|
||||
let flow: GlobalWorkflow
|
||||
let asyncStepIdempotencyKey: string
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
WorkflowManager.unregisterAll()
|
||||
|
||||
handlers = new Map()
|
||||
handlers.set("foo", {
|
||||
invoke: jest.fn().mockResolvedValue({ done: true }),
|
||||
compensate: jest.fn(() => {}),
|
||||
})
|
||||
|
||||
handlers.set("bar", {
|
||||
invoke: jest.fn().mockResolvedValue({ done: true }),
|
||||
compensate: jest.fn().mockResolvedValue({}),
|
||||
})
|
||||
|
||||
handlers.set("broken", {
|
||||
invoke: jest.fn(() => {
|
||||
throw new Error("Step Failed")
|
||||
}),
|
||||
compensate: jest.fn().mockResolvedValue({ bar: 123, reverted: true }),
|
||||
})
|
||||
|
||||
handlers.set("callExternal", {
|
||||
invoke: jest.fn(({ metadata }) => {
|
||||
asyncStepIdempotencyKey = metadata.idempotency_key
|
||||
}),
|
||||
})
|
||||
|
||||
WorkflowManager.register(
|
||||
"create-product",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "bar",
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
|
||||
WorkflowManager.register(
|
||||
"broken-delivery",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "broken",
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
|
||||
WorkflowManager.register(
|
||||
"deliver-product",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "callExternal",
|
||||
async: true,
|
||||
noCompensation: true,
|
||||
next: {
|
||||
action: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
|
||||
flow = new GlobalWorkflow(container)
|
||||
})
|
||||
|
||||
it("should return all registered workflows", () => {
|
||||
const wf = Object.keys(Object.fromEntries(WorkflowManager.getWorkflows()))
|
||||
expect(wf).toEqual(["create-product", "broken-delivery", "deliver-product"])
|
||||
})
|
||||
|
||||
it("should begin a transaction and returns its final state", async () => {
|
||||
const transaction = await flow.run("create-product", "t-id", {
|
||||
input: 123,
|
||||
})
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(handlers.get("foo").compensate).toHaveBeenCalledTimes(0)
|
||||
expect(handlers.get("foo").compensate).toHaveBeenCalledTimes(0)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.DONE)
|
||||
})
|
||||
|
||||
it("should begin a transaction and revert it when fail", async () => {
|
||||
const transaction = await flow.run("broken-delivery", "t-id")
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("broken").invoke).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(handlers.get("foo").compensate).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("broken").compensate).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.REVERTED)
|
||||
})
|
||||
|
||||
it("should continue an asyncronous transaction after reporting a successful step", async () => {
|
||||
const transaction = await flow.run("deliver-product", "t-id")
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("callExternal").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(0)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.INVOKING)
|
||||
|
||||
const continuation = await flow.registerStepSuccess(
|
||||
"deliver-product",
|
||||
asyncStepIdempotencyKey,
|
||||
{ ok: true }
|
||||
)
|
||||
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(continuation.getState()).toBe(TransactionState.DONE)
|
||||
})
|
||||
|
||||
it("should revert an asyncronous transaction after reporting a failure step", async () => {
|
||||
const transaction = await flow.run("deliver-product", "t-id")
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("callExternal").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(0)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.INVOKING)
|
||||
|
||||
const continuation = await flow.registerStepFailure(
|
||||
"deliver-product",
|
||||
asyncStepIdempotencyKey,
|
||||
{ ok: true }
|
||||
)
|
||||
|
||||
expect(handlers.get("foo").compensate).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(0)
|
||||
expect(handlers.get("bar").compensate).toHaveBeenCalledTimes(0)
|
||||
|
||||
expect(continuation.getState()).toBe(TransactionState.REVERTED)
|
||||
})
|
||||
|
||||
it("should update an existing global flow with a new step and a new handler", async () => {
|
||||
const definition =
|
||||
WorkflowManager.getTransactionDefinition("create-product")
|
||||
|
||||
definition.insertActionBefore("bar", "xor", { maxRetries: 3 })
|
||||
|
||||
const additionalHandlers = new Map()
|
||||
additionalHandlers.set("xor", {
|
||||
invoke: jest.fn().mockResolvedValue({ done: true }),
|
||||
compensate: jest.fn().mockResolvedValue({}),
|
||||
})
|
||||
|
||||
WorkflowManager.update("create-product", definition, additionalHandlers)
|
||||
|
||||
const transaction = await flow.run("create-product", "t-id")
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(additionalHandlers.get("xor").invoke).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.DONE)
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,264 @@
|
||||
import { TransactionState } from "../../transaction/types"
|
||||
import { LocalWorkflow } from "../../workflow/local-workflow"
|
||||
import { WorkflowManager } from "../../workflow/workflow-manager"
|
||||
|
||||
describe("WorkflowManager", () => {
|
||||
const container: any = {}
|
||||
|
||||
let handlers
|
||||
let asyncStepIdempotencyKey: string
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetAllMocks()
|
||||
WorkflowManager.unregisterAll()
|
||||
|
||||
handlers = new Map()
|
||||
handlers.set("foo", {
|
||||
invoke: jest.fn().mockResolvedValue({ done: true }),
|
||||
compensate: jest.fn(() => {}),
|
||||
})
|
||||
|
||||
handlers.set("bar", {
|
||||
invoke: jest.fn().mockResolvedValue({ done: true }),
|
||||
compensate: jest.fn().mockResolvedValue({}),
|
||||
})
|
||||
|
||||
handlers.set("broken", {
|
||||
invoke: jest.fn(() => {
|
||||
throw new Error("Step Failed")
|
||||
}),
|
||||
compensate: jest.fn().mockResolvedValue({ bar: 123, reverted: true }),
|
||||
})
|
||||
|
||||
handlers.set("callExternal", {
|
||||
invoke: jest.fn(({ metadata }) => {
|
||||
asyncStepIdempotencyKey = metadata.idempotency_key
|
||||
}),
|
||||
})
|
||||
|
||||
WorkflowManager.register(
|
||||
"create-product",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "bar",
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
|
||||
WorkflowManager.register(
|
||||
"broken-delivery",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "broken",
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
|
||||
WorkflowManager.register(
|
||||
"deliver-product",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "callExternal",
|
||||
async: true,
|
||||
noCompensation: true,
|
||||
next: {
|
||||
action: "bar",
|
||||
},
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
})
|
||||
|
||||
it("should return all registered workflows", () => {
|
||||
const wf = Object.keys(Object.fromEntries(WorkflowManager.getWorkflows()))
|
||||
expect(wf).toEqual(["create-product", "broken-delivery", "deliver-product"])
|
||||
})
|
||||
|
||||
it("should NOT throw when registering a workflow with an existing id in Medusa V1", () => {
|
||||
let err
|
||||
try {
|
||||
WorkflowManager.register(
|
||||
"create-product",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "bar",
|
||||
next: {
|
||||
action: "xor",
|
||||
},
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
} catch (e) {
|
||||
err = e
|
||||
}
|
||||
|
||||
expect(err).toBeUndefined()
|
||||
})
|
||||
|
||||
it("should throw when registering a workflow with an existing id in Medusa V2", () => {
|
||||
let err
|
||||
const env = process.env.MEDUSA_FF_MEDUSA_V2
|
||||
process.env.MEDUSA_FF_MEDUSA_V2 = "true"
|
||||
try {
|
||||
WorkflowManager.register(
|
||||
"create-product",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "bar",
|
||||
next: {
|
||||
action: "xor",
|
||||
},
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
} catch (e) {
|
||||
err = e
|
||||
}
|
||||
process.env.MEDUSA_FF_MEDUSA_V2 = env
|
||||
|
||||
expect(err).toBeDefined()
|
||||
expect(err.message).toBe(
|
||||
`Workflow with id "create-product" and step definition already exists.`
|
||||
)
|
||||
})
|
||||
|
||||
it("should not throw when registering a workflow with an existing id but identical definition", () => {
|
||||
let err
|
||||
|
||||
const env = process.env.MEDUSA_FF_MEDUSA_V2
|
||||
process.env.MEDUSA_FF_MEDUSA_V2 = "true"
|
||||
try {
|
||||
WorkflowManager.register(
|
||||
"create-product",
|
||||
{
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "bar",
|
||||
},
|
||||
},
|
||||
handlers
|
||||
)
|
||||
} catch (e) {
|
||||
err = e
|
||||
}
|
||||
process.env.MEDUSA_FF_MEDUSA_V2 = env
|
||||
|
||||
expect(err).not.toBeDefined()
|
||||
})
|
||||
|
||||
it("should begin a transaction and returns its final state", async () => {
|
||||
const flow = new LocalWorkflow("create-product", container)
|
||||
const transaction = await flow.run("t-id", {
|
||||
input: 123,
|
||||
})
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(handlers.get("foo").compensate).toHaveBeenCalledTimes(0)
|
||||
expect(handlers.get("foo").compensate).toHaveBeenCalledTimes(0)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.DONE)
|
||||
})
|
||||
|
||||
it("should begin a transaction and revert it when fail", async () => {
|
||||
const flow = new LocalWorkflow("broken-delivery", container)
|
||||
const transaction = await flow.run("t-id")
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("broken").invoke).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(handlers.get("foo").compensate).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("broken").compensate).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.REVERTED)
|
||||
})
|
||||
|
||||
it("should continue an asyncronous transaction after reporting a successful step", async () => {
|
||||
const flow = new LocalWorkflow("deliver-product", container)
|
||||
const transaction = await flow.run("t-id")
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("callExternal").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(0)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.INVOKING)
|
||||
|
||||
const continuation = await flow.registerStepSuccess(
|
||||
asyncStepIdempotencyKey,
|
||||
{ ok: true }
|
||||
)
|
||||
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(continuation.getState()).toBe(TransactionState.DONE)
|
||||
})
|
||||
|
||||
it("should revert an asyncronous transaction after reporting a failure step", async () => {
|
||||
const flow = new LocalWorkflow("deliver-product", container)
|
||||
const transaction = await flow.run("t-id")
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("callExternal").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(0)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.INVOKING)
|
||||
|
||||
const continuation = await flow.registerStepFailure(
|
||||
asyncStepIdempotencyKey,
|
||||
{ ok: true }
|
||||
)
|
||||
|
||||
expect(handlers.get("foo").compensate).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(0)
|
||||
expect(handlers.get("bar").compensate).toHaveBeenCalledTimes(0)
|
||||
|
||||
expect(continuation.getState()).toBe(TransactionState.REVERTED)
|
||||
})
|
||||
|
||||
it("should update a flow with a new step and a new handler", async () => {
|
||||
const flow = new LocalWorkflow("create-product", container)
|
||||
|
||||
const additionalHandler = {
|
||||
invoke: jest.fn().mockResolvedValue({ done: true }),
|
||||
compensate: jest.fn().mockResolvedValue({}),
|
||||
}
|
||||
|
||||
flow.insertActionBefore("bar", "xor", additionalHandler, { maxRetries: 3 })
|
||||
|
||||
const transaction = await flow.run("t-id")
|
||||
|
||||
expect(handlers.get("foo").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(handlers.get("bar").invoke).toHaveBeenCalledTimes(1)
|
||||
expect(additionalHandler.invoke).toHaveBeenCalledTimes(1)
|
||||
|
||||
expect(transaction.getState()).toBe(TransactionState.DONE)
|
||||
|
||||
expect(
|
||||
WorkflowManager.getWorkflow("create-product")?.handlers_.has("xor")
|
||||
).toEqual(false)
|
||||
})
|
||||
|
||||
it("should return the final flow definition when calling getFlow()", async () => {
|
||||
const flow = new LocalWorkflow("deliver-product", container)
|
||||
|
||||
expect(flow.getFlow()).toEqual({
|
||||
action: "foo",
|
||||
next: {
|
||||
action: "callExternal",
|
||||
async: true,
|
||||
noCompensation: true,
|
||||
next: { action: "bar" },
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
3
packages/core/orchestration/src/index.ts
Normal file
3
packages/core/orchestration/src/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./joiner"
|
||||
export * from "./transaction"
|
||||
export * from "./workflow"
|
||||
194
packages/core/orchestration/src/joiner/graphql-ast.ts
Normal file
194
packages/core/orchestration/src/joiner/graphql-ast.ts
Normal file
@@ -0,0 +1,194 @@
|
||||
import { RemoteJoinerQuery } from "@medusajs/types"
|
||||
import {
|
||||
ArgumentNode,
|
||||
DirectiveNode,
|
||||
DocumentNode,
|
||||
FieldNode,
|
||||
Kind,
|
||||
OperationDefinitionNode,
|
||||
SelectionSetNode,
|
||||
ValueNode,
|
||||
parse,
|
||||
} from "graphql"
|
||||
|
||||
interface Argument {
|
||||
name: string
|
||||
value?: unknown
|
||||
}
|
||||
|
||||
interface Directive {
|
||||
name: string
|
||||
args?: Argument[]
|
||||
}
|
||||
|
||||
interface Entity {
|
||||
property: string
|
||||
fields: string[]
|
||||
args?: Argument[]
|
||||
directives?: { [field: string]: Directive[] }
|
||||
}
|
||||
|
||||
class GraphQLParser {
|
||||
private ast: DocumentNode
|
||||
|
||||
constructor(input: string, private variables: Record<string, unknown> = {}) {
|
||||
this.ast = parse(input)
|
||||
this.variables = variables
|
||||
}
|
||||
|
||||
private parseValueNode(valueNode: ValueNode): unknown {
|
||||
const obj = {}
|
||||
|
||||
switch (valueNode.kind) {
|
||||
case Kind.VARIABLE:
|
||||
return this.variables ? this.variables[valueNode.name.value] : undefined
|
||||
case Kind.INT:
|
||||
return parseInt(valueNode.value, 10)
|
||||
case Kind.FLOAT:
|
||||
return parseFloat(valueNode.value)
|
||||
case Kind.BOOLEAN:
|
||||
return Boolean(valueNode.value)
|
||||
case Kind.STRING:
|
||||
case Kind.ENUM:
|
||||
return valueNode.value
|
||||
case Kind.NULL:
|
||||
return null
|
||||
case Kind.LIST:
|
||||
return valueNode.values.map((v) => this.parseValueNode(v))
|
||||
case Kind.OBJECT:
|
||||
for (const field of valueNode.fields) {
|
||||
obj[field.name.value] = this.parseValueNode(field.value)
|
||||
}
|
||||
return obj
|
||||
default:
|
||||
return undefined
|
||||
}
|
||||
}
|
||||
|
||||
private parseArguments(
|
||||
args: readonly ArgumentNode[]
|
||||
): Argument[] | undefined {
|
||||
if (!args.length) {
|
||||
return
|
||||
}
|
||||
|
||||
return args.map((arg) => {
|
||||
const value = this.parseValueNode(arg.value)
|
||||
|
||||
return {
|
||||
name: arg.name.value,
|
||||
value: value,
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
private parseDirectives(directives: readonly DirectiveNode[]): Directive[] {
|
||||
return directives.map((directive) => ({
|
||||
name: directive.name.value,
|
||||
args: this.parseArguments(directive.arguments || []),
|
||||
}))
|
||||
}
|
||||
|
||||
private createDirectivesMap(selectionSet: SelectionSetNode):
|
||||
| {
|
||||
[field: string]: Directive[]
|
||||
}
|
||||
| undefined {
|
||||
const directivesMap: { [field: string]: Directive[] } = {}
|
||||
let hasDirectives = false
|
||||
selectionSet.selections.forEach((field) => {
|
||||
const fieldName = (field as FieldNode).name.value
|
||||
const fieldDirectives = this.parseDirectives(
|
||||
(field as FieldNode).directives || []
|
||||
)
|
||||
if (fieldDirectives.length > 0) {
|
||||
hasDirectives = true
|
||||
directivesMap[fieldName] = fieldDirectives
|
||||
}
|
||||
})
|
||||
return hasDirectives ? directivesMap : undefined
|
||||
}
|
||||
|
||||
private extractEntities(
|
||||
node: SelectionSetNode,
|
||||
parentName = "",
|
||||
mainService = ""
|
||||
): Entity[] {
|
||||
const entities: Entity[] = []
|
||||
|
||||
node.selections.forEach((selection) => {
|
||||
if (selection.kind === "Field") {
|
||||
const fieldNode = selection as FieldNode
|
||||
|
||||
if (!fieldNode.selectionSet) {
|
||||
return
|
||||
}
|
||||
|
||||
const propName = fieldNode.name.value
|
||||
const entityName = parentName ? `${parentName}.${propName}` : propName
|
||||
|
||||
const nestedEntity: Entity = {
|
||||
property: entityName.replace(`${mainService}.`, ""),
|
||||
fields: fieldNode.selectionSet.selections.map(
|
||||
(field) => (field as FieldNode).name.value
|
||||
),
|
||||
args: this.parseArguments(fieldNode.arguments || []),
|
||||
directives: this.createDirectivesMap(fieldNode.selectionSet),
|
||||
}
|
||||
|
||||
entities.push(nestedEntity)
|
||||
entities.push(
|
||||
...this.extractEntities(
|
||||
fieldNode.selectionSet,
|
||||
entityName,
|
||||
mainService
|
||||
)
|
||||
)
|
||||
}
|
||||
})
|
||||
|
||||
return entities
|
||||
}
|
||||
|
||||
public parseQuery(): RemoteJoinerQuery {
|
||||
const queryDefinition = this.ast.definitions.find(
|
||||
(definition) => definition.kind === "OperationDefinition"
|
||||
) as OperationDefinitionNode
|
||||
|
||||
if (!queryDefinition) {
|
||||
throw new Error("No query found")
|
||||
}
|
||||
|
||||
const rootFieldNode = queryDefinition.selectionSet
|
||||
.selections[0] as FieldNode
|
||||
const propName = rootFieldNode.name.value
|
||||
|
||||
const remoteJoinConfig: RemoteJoinerQuery = {
|
||||
alias: propName,
|
||||
fields: [],
|
||||
expands: [],
|
||||
}
|
||||
|
||||
if (rootFieldNode.arguments) {
|
||||
remoteJoinConfig.args = this.parseArguments(rootFieldNode.arguments)
|
||||
}
|
||||
|
||||
if (rootFieldNode.selectionSet) {
|
||||
remoteJoinConfig.fields = rootFieldNode.selectionSet.selections.map(
|
||||
(field) => (field as FieldNode).name.value
|
||||
)
|
||||
remoteJoinConfig.directives = this.createDirectivesMap(
|
||||
rootFieldNode.selectionSet
|
||||
)
|
||||
remoteJoinConfig.expands = this.extractEntities(
|
||||
rootFieldNode.selectionSet,
|
||||
propName,
|
||||
propName
|
||||
)
|
||||
}
|
||||
|
||||
return remoteJoinConfig
|
||||
}
|
||||
}
|
||||
|
||||
export default GraphQLParser
|
||||
89
packages/core/orchestration/src/joiner/helpers.ts
Normal file
89
packages/core/orchestration/src/joiner/helpers.ts
Normal file
@@ -0,0 +1,89 @@
|
||||
import { RemoteJoinerQuery } from "@medusajs/types"
|
||||
|
||||
export function toRemoteJoinerQuery(
|
||||
obj: any,
|
||||
variables: Record<string, any> = {}
|
||||
): RemoteJoinerQuery {
|
||||
const remoteJoinerQuery: RemoteJoinerQuery = {
|
||||
alias: "",
|
||||
fields: [],
|
||||
expands: [],
|
||||
}
|
||||
|
||||
let entryPoint = ""
|
||||
function extractRecursive(obj: any, parentName = "", isEntryPoint = true) {
|
||||
for (const key of Object.keys(obj ?? {})) {
|
||||
const value = obj[key]
|
||||
|
||||
const canExpand =
|
||||
typeof value === "object" &&
|
||||
!["fields", "__args", "__directives"].includes(key)
|
||||
|
||||
if (!canExpand) {
|
||||
continue
|
||||
}
|
||||
|
||||
const entityName = parentName ? `${parentName}.${key}` : key
|
||||
const variablesPath = !isEntryPoint
|
||||
? `${entryPoint}${parentName ? "." + parentName : parentName}.${key}`
|
||||
: key
|
||||
|
||||
if (isEntryPoint) {
|
||||
entryPoint = key
|
||||
}
|
||||
|
||||
const currentVariables = variables[variablesPath]
|
||||
|
||||
const expandObj: any = {
|
||||
property: entityName,
|
||||
}
|
||||
|
||||
const reference = isEntryPoint ? remoteJoinerQuery : expandObj
|
||||
|
||||
if (currentVariables) {
|
||||
reference.args = Object.entries(currentVariables).map(
|
||||
([name, value]) => ({
|
||||
name,
|
||||
value,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
if (value.__args) {
|
||||
reference.args = [
|
||||
...(reference.__args || []),
|
||||
...Object.entries(value.__args).map(([name, value]) => ({
|
||||
name,
|
||||
value,
|
||||
})),
|
||||
]
|
||||
}
|
||||
|
||||
if (value.__directives) {
|
||||
reference.directives = Object.entries(value.__directives).map(
|
||||
([name, value]) => ({ name, value })
|
||||
)
|
||||
}
|
||||
|
||||
if (value.fields) {
|
||||
reference.fields = value.fields
|
||||
}
|
||||
|
||||
if (isEntryPoint) {
|
||||
if (value.isServiceAccess) {
|
||||
remoteJoinerQuery.service = key
|
||||
} else {
|
||||
remoteJoinerQuery.alias = key
|
||||
}
|
||||
} else {
|
||||
remoteJoinerQuery.expands!.push(expandObj)
|
||||
}
|
||||
|
||||
extractRecursive(value, isEntryPoint ? "" : entityName, false)
|
||||
}
|
||||
|
||||
return remoteJoinerQuery
|
||||
}
|
||||
|
||||
return extractRecursive(obj)
|
||||
}
|
||||
2
packages/core/orchestration/src/joiner/index.ts
Normal file
2
packages/core/orchestration/src/joiner/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from "./helpers"
|
||||
export * from "./remote-joiner"
|
||||
1053
packages/core/orchestration/src/joiner/remote-joiner.ts
Normal file
1053
packages/core/orchestration/src/joiner/remote-joiner.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,107 @@
|
||||
import {
|
||||
DistributedTransaction,
|
||||
TransactionCheckpoint,
|
||||
} from "../distributed-transaction"
|
||||
import { TransactionStep } from "../transaction-step"
|
||||
|
||||
export interface IDistributedTransactionStorage {
|
||||
get(key: string): Promise<TransactionCheckpoint | undefined>
|
||||
list(): Promise<TransactionCheckpoint[]>
|
||||
save(key: string, data: TransactionCheckpoint, ttl?: number): Promise<void>
|
||||
scheduleRetry(
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep,
|
||||
timestamp: number,
|
||||
interval: number
|
||||
): Promise<void>
|
||||
clearRetry(
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep
|
||||
): Promise<void>
|
||||
scheduleTransactionTimeout(
|
||||
transaction: DistributedTransaction,
|
||||
timestamp: number,
|
||||
interval: number
|
||||
): Promise<void>
|
||||
scheduleStepTimeout(
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep,
|
||||
timestamp: number,
|
||||
interval: number
|
||||
): Promise<void>
|
||||
clearTransactionTimeout(transaction: DistributedTransaction): Promise<void>
|
||||
clearStepTimeout(
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep
|
||||
): Promise<void>
|
||||
}
|
||||
|
||||
export abstract class DistributedTransactionStorage
|
||||
implements IDistributedTransactionStorage
|
||||
{
|
||||
constructor() {
|
||||
/* noop */
|
||||
}
|
||||
|
||||
async get(key: string): Promise<TransactionCheckpoint | undefined> {
|
||||
throw new Error("Method 'get' not implemented.")
|
||||
}
|
||||
|
||||
async list(): Promise<TransactionCheckpoint[]> {
|
||||
throw new Error("Method 'list' not implemented.")
|
||||
}
|
||||
|
||||
async save(
|
||||
key: string,
|
||||
data: TransactionCheckpoint,
|
||||
ttl?: number
|
||||
): Promise<void> {
|
||||
throw new Error("Method 'save' not implemented.")
|
||||
}
|
||||
|
||||
async scheduleRetry(
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep,
|
||||
timestamp: number,
|
||||
interval: number
|
||||
): Promise<void> {
|
||||
throw new Error("Method 'scheduleRetry' not implemented.")
|
||||
}
|
||||
|
||||
async clearRetry(
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep
|
||||
): Promise<void> {
|
||||
throw new Error("Method 'clearRetry' not implemented.")
|
||||
}
|
||||
|
||||
async scheduleTransactionTimeout(
|
||||
transaction: DistributedTransaction,
|
||||
timestamp: number,
|
||||
interval: number
|
||||
): Promise<void> {
|
||||
throw new Error("Method 'scheduleTransactionTimeout' not implemented.")
|
||||
}
|
||||
|
||||
async clearTransactionTimeout(
|
||||
transaction: DistributedTransaction
|
||||
): Promise<void> {
|
||||
throw new Error("Method 'clearTransactionTimeout' not implemented.")
|
||||
}
|
||||
|
||||
async scheduleStepTimeout(
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep,
|
||||
timestamp: number,
|
||||
interval: number
|
||||
): Promise<void> {
|
||||
throw new Error("Method 'scheduleStepTimeout' not implemented.")
|
||||
}
|
||||
|
||||
async clearStepTimeout(
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep
|
||||
): Promise<void> {
|
||||
throw new Error("Method 'clearStepTimeout' not implemented.")
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,39 @@
|
||||
import { TransactionState } from "@medusajs/utils"
|
||||
import { TransactionCheckpoint } from "../distributed-transaction"
|
||||
import { DistributedTransactionStorage } from "./abstract-storage"
|
||||
|
||||
// eslint-disable-next-line max-len
|
||||
export class BaseInMemoryDistributedTransactionStorage extends DistributedTransactionStorage {
|
||||
private storage: Map<string, TransactionCheckpoint>
|
||||
|
||||
constructor() {
|
||||
super()
|
||||
this.storage = new Map()
|
||||
}
|
||||
|
||||
async get(key: string): Promise<TransactionCheckpoint | undefined> {
|
||||
return this.storage.get(key)
|
||||
}
|
||||
|
||||
async list(): Promise<TransactionCheckpoint[]> {
|
||||
return Array.from(this.storage.values())
|
||||
}
|
||||
|
||||
async save(
|
||||
key: string,
|
||||
data: TransactionCheckpoint,
|
||||
ttl?: number
|
||||
): Promise<void> {
|
||||
const hasFinished = [
|
||||
TransactionState.DONE,
|
||||
TransactionState.REVERTED,
|
||||
TransactionState.FAILED,
|
||||
].includes(data.flow.state)
|
||||
|
||||
if (hasFinished) {
|
||||
this.storage.delete(key)
|
||||
} else {
|
||||
this.storage.set(key, data)
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,297 @@
|
||||
import { isDefined } from "@medusajs/utils"
|
||||
import { EventEmitter } from "events"
|
||||
import { IDistributedTransactionStorage } from "./datastore/abstract-storage"
|
||||
import { BaseInMemoryDistributedTransactionStorage } from "./datastore/base-in-memory-storage"
|
||||
import {
|
||||
TransactionFlow,
|
||||
TransactionOrchestrator,
|
||||
} from "./transaction-orchestrator"
|
||||
import { TransactionStep, TransactionStepHandler } from "./transaction-step"
|
||||
import { TransactionHandlerType, TransactionState } from "./types"
|
||||
|
||||
/**
|
||||
* @typedef TransactionMetadata
|
||||
* @property model_id - The id of the model_id that created the transaction (modelId).
|
||||
* @property idempotency_key - The idempotency key of the transaction.
|
||||
* @property action - The action of the transaction.
|
||||
* @property action_type - The type of the transaction.
|
||||
* @property attempt - The number of attempts for the transaction.
|
||||
* @property timestamp - The timestamp of the transaction.
|
||||
*/
|
||||
export type TransactionMetadata = {
|
||||
model_id: string
|
||||
idempotency_key: string
|
||||
action: string
|
||||
action_type: TransactionHandlerType
|
||||
attempt: number
|
||||
timestamp: number
|
||||
}
|
||||
|
||||
/**
|
||||
* @typedef TransactionContext
|
||||
* @property payload - Object containing the initial payload.
|
||||
* @property invoke - Object containing responses of Invoke handlers on steps flagged with saveResponse.
|
||||
* @property compensate - Object containing responses of Compensate handlers on steps flagged with saveResponse.
|
||||
*/
|
||||
export class TransactionContext {
|
||||
constructor(
|
||||
public payload: unknown = undefined,
|
||||
public invoke: Record<string, unknown> = {},
|
||||
public compensate: Record<string, unknown> = {}
|
||||
) {}
|
||||
}
|
||||
|
||||
export class TransactionStepError {
|
||||
constructor(
|
||||
public action: string,
|
||||
public handlerType: TransactionHandlerType,
|
||||
public error: Error | any
|
||||
) {}
|
||||
}
|
||||
|
||||
export class TransactionCheckpoint {
|
||||
constructor(
|
||||
public flow: TransactionFlow,
|
||||
public context: TransactionContext,
|
||||
public errors: TransactionStepError[] = []
|
||||
) {}
|
||||
}
|
||||
|
||||
export class TransactionPayload {
|
||||
/**
|
||||
* @param metadata - The metadata of the transaction.
|
||||
* @param data - The initial payload data to begin a transation.
|
||||
* @param context - Object gathering responses of all steps flagged with saveResponse.
|
||||
*/
|
||||
constructor(
|
||||
public metadata: TransactionMetadata,
|
||||
public data: Record<string, unknown>,
|
||||
public context: TransactionContext
|
||||
) {}
|
||||
}
|
||||
|
||||
/**
|
||||
* DistributedTransaction represents a distributed transaction, which is a transaction that is composed of multiple steps that are executed in a specific order.
|
||||
*/
|
||||
|
||||
export class DistributedTransaction extends EventEmitter {
|
||||
public modelId: string
|
||||
public transactionId: string
|
||||
|
||||
private readonly errors: TransactionStepError[] = []
|
||||
private readonly context: TransactionContext = new TransactionContext()
|
||||
private static keyValueStore: IDistributedTransactionStorage
|
||||
|
||||
public static setStorage(storage: IDistributedTransactionStorage) {
|
||||
this.keyValueStore = storage
|
||||
}
|
||||
|
||||
public static keyPrefix = "dtrans"
|
||||
|
||||
constructor(
|
||||
private flow: TransactionFlow,
|
||||
public handler: TransactionStepHandler,
|
||||
public payload?: any,
|
||||
errors?: TransactionStepError[],
|
||||
context?: TransactionContext
|
||||
) {
|
||||
super()
|
||||
|
||||
this.transactionId = flow.transactionId
|
||||
this.modelId = flow.modelId
|
||||
|
||||
if (errors) {
|
||||
this.errors = errors
|
||||
}
|
||||
|
||||
this.context.payload = payload
|
||||
if (context) {
|
||||
this.context = { ...context }
|
||||
}
|
||||
}
|
||||
|
||||
public getFlow() {
|
||||
return this.flow
|
||||
}
|
||||
|
||||
public getContext() {
|
||||
return this.context
|
||||
}
|
||||
|
||||
public getErrors(handlerType?: TransactionHandlerType) {
|
||||
if (!isDefined(handlerType)) {
|
||||
return this.errors
|
||||
}
|
||||
|
||||
return this.errors.filter((error) => error.handlerType === handlerType)
|
||||
}
|
||||
|
||||
public addError(
|
||||
action: string,
|
||||
handlerType: TransactionHandlerType,
|
||||
error: Error | any
|
||||
) {
|
||||
this.errors.push({
|
||||
action,
|
||||
handlerType,
|
||||
error,
|
||||
})
|
||||
}
|
||||
|
||||
public addResponse(
|
||||
action: string,
|
||||
handlerType: TransactionHandlerType,
|
||||
response: unknown
|
||||
) {
|
||||
this.context[handlerType][action] = response
|
||||
}
|
||||
|
||||
public hasFinished(): boolean {
|
||||
return [
|
||||
TransactionState.DONE,
|
||||
TransactionState.REVERTED,
|
||||
TransactionState.FAILED,
|
||||
].includes(this.getState())
|
||||
}
|
||||
|
||||
public getState(): TransactionState {
|
||||
return this.getFlow().state
|
||||
}
|
||||
|
||||
public get isPartiallyCompleted(): boolean {
|
||||
return !!this.getFlow().hasFailedSteps || !!this.getFlow().hasSkippedSteps
|
||||
}
|
||||
|
||||
public canInvoke(): boolean {
|
||||
return (
|
||||
this.getFlow().state === TransactionState.NOT_STARTED ||
|
||||
this.getFlow().state === TransactionState.INVOKING
|
||||
)
|
||||
}
|
||||
|
||||
public canRevert(): boolean {
|
||||
return (
|
||||
this.getFlow().state === TransactionState.DONE ||
|
||||
this.getFlow().state === TransactionState.COMPENSATING
|
||||
)
|
||||
}
|
||||
|
||||
public hasTimeout(): boolean {
|
||||
return !!this.getTimeout()
|
||||
}
|
||||
|
||||
public getTimeout(): number | undefined {
|
||||
return this.getFlow().options?.timeout
|
||||
}
|
||||
|
||||
public async saveCheckpoint(
|
||||
ttl = 0
|
||||
): Promise<TransactionCheckpoint | undefined> {
|
||||
const options = this.getFlow().options
|
||||
if (!options?.store) {
|
||||
return
|
||||
}
|
||||
|
||||
const data = new TransactionCheckpoint(
|
||||
this.getFlow(),
|
||||
this.getContext(),
|
||||
this.getErrors()
|
||||
)
|
||||
|
||||
const key = TransactionOrchestrator.getKeyName(
|
||||
DistributedTransaction.keyPrefix,
|
||||
this.modelId,
|
||||
this.transactionId
|
||||
)
|
||||
await DistributedTransaction.keyValueStore.save(key, data, ttl)
|
||||
|
||||
return data
|
||||
}
|
||||
|
||||
public static async loadTransaction(
|
||||
modelId: string,
|
||||
transactionId: string
|
||||
): Promise<TransactionCheckpoint | null> {
|
||||
const key = TransactionOrchestrator.getKeyName(
|
||||
DistributedTransaction.keyPrefix,
|
||||
modelId,
|
||||
transactionId
|
||||
)
|
||||
|
||||
const loadedData = await DistributedTransaction.keyValueStore.get(key)
|
||||
if (loadedData) {
|
||||
return loadedData
|
||||
}
|
||||
|
||||
return null
|
||||
}
|
||||
|
||||
public async scheduleRetry(
|
||||
step: TransactionStep,
|
||||
interval: number
|
||||
): Promise<void> {
|
||||
await this.saveCheckpoint()
|
||||
await DistributedTransaction.keyValueStore.scheduleRetry(
|
||||
this,
|
||||
step,
|
||||
Date.now(),
|
||||
interval
|
||||
)
|
||||
}
|
||||
|
||||
public async clearRetry(step: TransactionStep): Promise<void> {
|
||||
await DistributedTransaction.keyValueStore.clearRetry(this, step)
|
||||
}
|
||||
|
||||
public async scheduleTransactionTimeout(interval: number): Promise<void> {
|
||||
// schedule transaction timeout only if there are async steps
|
||||
if (!this.getFlow().hasAsyncSteps) {
|
||||
return
|
||||
}
|
||||
|
||||
await this.saveCheckpoint()
|
||||
await DistributedTransaction.keyValueStore.scheduleTransactionTimeout(
|
||||
this,
|
||||
Date.now(),
|
||||
interval
|
||||
)
|
||||
}
|
||||
|
||||
public async clearTransactionTimeout(): Promise<void> {
|
||||
if (!this.getFlow().hasAsyncSteps) {
|
||||
return
|
||||
}
|
||||
|
||||
await DistributedTransaction.keyValueStore.clearTransactionTimeout(this)
|
||||
}
|
||||
|
||||
public async scheduleStepTimeout(
|
||||
step: TransactionStep,
|
||||
interval: number
|
||||
): Promise<void> {
|
||||
// schedule step timeout only if the step is async
|
||||
if (!step.definition.async) {
|
||||
return
|
||||
}
|
||||
|
||||
await this.saveCheckpoint()
|
||||
await DistributedTransaction.keyValueStore.scheduleStepTimeout(
|
||||
this,
|
||||
step,
|
||||
Date.now(),
|
||||
interval
|
||||
)
|
||||
}
|
||||
|
||||
public async clearStepTimeout(step: TransactionStep): Promise<void> {
|
||||
if (!step.definition.async || step.isCompensating()) {
|
||||
return
|
||||
}
|
||||
|
||||
await DistributedTransaction.keyValueStore.clearStepTimeout(this, step)
|
||||
}
|
||||
}
|
||||
|
||||
DistributedTransaction.setStorage(
|
||||
new BaseInMemoryDistributedTransactionStorage()
|
||||
)
|
||||
74
packages/core/orchestration/src/transaction/errors.ts
Normal file
74
packages/core/orchestration/src/transaction/errors.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
export class PermanentStepFailureError extends Error {
|
||||
static isPermanentStepFailureError(
|
||||
error: Error
|
||||
): error is PermanentStepFailureError {
|
||||
return (
|
||||
error instanceof PermanentStepFailureError ||
|
||||
error?.name === "PermanentStepFailure"
|
||||
)
|
||||
}
|
||||
|
||||
constructor(message?: string) {
|
||||
super(message)
|
||||
this.name = "PermanentStepFailure"
|
||||
}
|
||||
}
|
||||
|
||||
export class TransactionStepTimeoutError extends Error {
|
||||
static isTransactionStepTimeoutError(
|
||||
error: Error
|
||||
): error is TransactionStepTimeoutError {
|
||||
return (
|
||||
error instanceof TransactionStepTimeoutError ||
|
||||
error?.name === "TransactionStepTimeoutError"
|
||||
)
|
||||
}
|
||||
|
||||
constructor(message?: string) {
|
||||
super(message)
|
||||
this.name = "TransactionStepTimeoutError"
|
||||
}
|
||||
}
|
||||
|
||||
export class TransactionTimeoutError extends Error {
|
||||
static isTransactionTimeoutError(
|
||||
error: Error
|
||||
): error is TransactionTimeoutError {
|
||||
return (
|
||||
error instanceof TransactionTimeoutError ||
|
||||
error?.name === "TransactionTimeoutError"
|
||||
)
|
||||
}
|
||||
|
||||
constructor(message?: string) {
|
||||
super(message)
|
||||
this.name = "TransactionTimeoutError"
|
||||
}
|
||||
}
|
||||
|
||||
export function serializeError(error) {
|
||||
const serialized = {
|
||||
message: error.message,
|
||||
name: error.name,
|
||||
stack: error.stack,
|
||||
}
|
||||
|
||||
Object.getOwnPropertyNames(error).forEach((key) => {
|
||||
// eslint-disable-next-line no-prototype-builtins
|
||||
if (!serialized.hasOwnProperty(key)) {
|
||||
serialized[key] = error[key]
|
||||
}
|
||||
})
|
||||
|
||||
return serialized
|
||||
}
|
||||
|
||||
export function isErrorLike(value) {
|
||||
return (
|
||||
!!value &&
|
||||
typeof value === "object" &&
|
||||
"name" in value &&
|
||||
"message" in value &&
|
||||
"stack" in value
|
||||
)
|
||||
}
|
||||
7
packages/core/orchestration/src/transaction/index.ts
Normal file
7
packages/core/orchestration/src/transaction/index.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export * from "./datastore/abstract-storage"
|
||||
export * from "./distributed-transaction"
|
||||
export * from "./errors"
|
||||
export * from "./orchestrator-builder"
|
||||
export * from "./transaction-orchestrator"
|
||||
export * from "./transaction-step"
|
||||
export * from "./types"
|
||||
@@ -0,0 +1,431 @@
|
||||
import { TransactionStepsDefinition } from "./types"
|
||||
|
||||
interface InternalStep extends TransactionStepsDefinition {
|
||||
next?: InternalStep | InternalStep[]
|
||||
depth: number
|
||||
parent?: InternalStep | null
|
||||
}
|
||||
|
||||
export class OrchestratorBuilder {
|
||||
protected steps: InternalStep
|
||||
protected hasChanges_ = false
|
||||
|
||||
get hasChanges() {
|
||||
return this.hasChanges_
|
||||
}
|
||||
|
||||
constructor(steps?: TransactionStepsDefinition) {
|
||||
this.load(steps)
|
||||
}
|
||||
|
||||
load(steps?: TransactionStepsDefinition) {
|
||||
this.steps = {
|
||||
depth: -1,
|
||||
parent: null,
|
||||
next: steps
|
||||
? JSON.parse(
|
||||
JSON.stringify((steps.action ? steps : steps.next) as InternalStep)
|
||||
)
|
||||
: undefined,
|
||||
}
|
||||
|
||||
this.updateDepths(this.steps, {}, 1, -1)
|
||||
return this
|
||||
}
|
||||
|
||||
addAction(action: string, options: Partial<TransactionStepsDefinition> = {}) {
|
||||
const step = this.findLastStep()
|
||||
const newAction = {
|
||||
action,
|
||||
depth: step.depth + 1,
|
||||
parent: step.action,
|
||||
...options,
|
||||
} as InternalStep
|
||||
|
||||
step.next = newAction
|
||||
this.hasChanges_ = true
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
replaceAction(
|
||||
existingAction: string,
|
||||
action: string,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
const step = this.findOrThrowStepByAction(existingAction)
|
||||
step.action = action
|
||||
|
||||
Object.assign(step, options)
|
||||
|
||||
this.hasChanges_ = true
|
||||
return this
|
||||
}
|
||||
|
||||
insertActionBefore(
|
||||
existingAction: string,
|
||||
action: string,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
const parentStep = this.findParentStepByAction(existingAction)
|
||||
if (parentStep) {
|
||||
const oldNext = parentStep.next!
|
||||
const newDepth = parentStep.depth + 1
|
||||
if (Array.isArray(parentStep.next)) {
|
||||
const index = parentStep.next.findIndex(
|
||||
(step) => step.action === existingAction
|
||||
)
|
||||
if (index > -1) {
|
||||
parentStep.next[index] = {
|
||||
action,
|
||||
...options,
|
||||
next: oldNext[index],
|
||||
depth: newDepth,
|
||||
} as InternalStep
|
||||
}
|
||||
} else {
|
||||
parentStep.next = {
|
||||
action,
|
||||
...options,
|
||||
next: oldNext,
|
||||
depth: newDepth,
|
||||
} as InternalStep
|
||||
}
|
||||
|
||||
this.updateDepths(oldNext as InternalStep, parentStep)
|
||||
}
|
||||
|
||||
this.hasChanges_ = true
|
||||
return this
|
||||
}
|
||||
|
||||
insertActionAfter(
|
||||
existingAction: string,
|
||||
action: string,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
const step = this.findOrThrowStepByAction(existingAction)
|
||||
const oldNext = step.next
|
||||
const newDepth = step.depth + 1
|
||||
step.next = {
|
||||
action,
|
||||
...options,
|
||||
next: oldNext,
|
||||
depth: newDepth,
|
||||
parent: step.action,
|
||||
} as InternalStep
|
||||
|
||||
this.updateDepths(oldNext as InternalStep, step.next)
|
||||
this.hasChanges_ = true
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
protected appendTo(step: InternalStep | string, newStep: InternalStep) {
|
||||
if (typeof step === "string") {
|
||||
step = this.findOrThrowStepByAction(step)
|
||||
}
|
||||
|
||||
step.next = {
|
||||
...newStep,
|
||||
depth: step.depth + 1,
|
||||
parent: step.action,
|
||||
} as InternalStep
|
||||
|
||||
this.hasChanges_ = true
|
||||
return this
|
||||
}
|
||||
|
||||
appendAction(
|
||||
action: string,
|
||||
to: string,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
const newAction = {
|
||||
action,
|
||||
...options,
|
||||
} as InternalStep
|
||||
|
||||
const branch = this.findLastStep(this.findStepByAction(to))
|
||||
this.appendTo(branch, newAction)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
protected move(
|
||||
actionToMove: string,
|
||||
targetAction: string,
|
||||
{
|
||||
runInParallel,
|
||||
mergeNext,
|
||||
}: {
|
||||
runInParallel?: boolean
|
||||
mergeNext?: boolean
|
||||
} = {
|
||||
runInParallel: false,
|
||||
mergeNext: false,
|
||||
}
|
||||
): OrchestratorBuilder {
|
||||
const parentActionToMoveStep = this.findParentStepByAction(actionToMove)!
|
||||
const parentTargetActionStep = this.findParentStepByAction(targetAction)!
|
||||
const actionToMoveStep = this.findStepByAction(
|
||||
actionToMove,
|
||||
parentTargetActionStep
|
||||
)!
|
||||
|
||||
if (!actionToMoveStep) {
|
||||
throw new Error(
|
||||
`Action "${actionToMove}" could not be found in the following steps of "${targetAction}"`
|
||||
)
|
||||
}
|
||||
|
||||
if (Array.isArray(parentActionToMoveStep.next)) {
|
||||
const index = parentActionToMoveStep.next.findIndex(
|
||||
(step) => step.action === actionToMove
|
||||
)
|
||||
if (index > -1) {
|
||||
parentActionToMoveStep.next.splice(index, 1)
|
||||
}
|
||||
} else {
|
||||
delete parentActionToMoveStep.next
|
||||
}
|
||||
|
||||
if (runInParallel) {
|
||||
if (Array.isArray(parentTargetActionStep.next)) {
|
||||
parentTargetActionStep.next.push(actionToMoveStep)
|
||||
} else if (parentTargetActionStep.next) {
|
||||
parentTargetActionStep.next = [
|
||||
parentTargetActionStep.next,
|
||||
actionToMoveStep,
|
||||
]
|
||||
}
|
||||
} else {
|
||||
if (actionToMoveStep.next) {
|
||||
if (mergeNext) {
|
||||
if (Array.isArray(actionToMoveStep.next)) {
|
||||
actionToMoveStep.next.push(
|
||||
parentTargetActionStep.next as InternalStep
|
||||
)
|
||||
} else {
|
||||
actionToMoveStep.next = [
|
||||
actionToMoveStep.next,
|
||||
parentTargetActionStep.next as InternalStep,
|
||||
]
|
||||
}
|
||||
} else {
|
||||
this.appendTo(
|
||||
this.findLastStep(actionToMoveStep),
|
||||
parentTargetActionStep.next as InternalStep
|
||||
)
|
||||
}
|
||||
} else {
|
||||
actionToMoveStep.next = parentTargetActionStep.next
|
||||
}
|
||||
|
||||
parentTargetActionStep.next = actionToMoveStep
|
||||
}
|
||||
|
||||
this.updateDepths(
|
||||
actionToMoveStep as InternalStep,
|
||||
parentTargetActionStep,
|
||||
1,
|
||||
parentTargetActionStep.depth
|
||||
)
|
||||
|
||||
this.hasChanges_ = true
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
moveAction(actionToMove: string, targetAction: string): OrchestratorBuilder {
|
||||
return this.move(actionToMove, targetAction)
|
||||
}
|
||||
|
||||
moveAndMergeNextAction(
|
||||
actionToMove: string,
|
||||
targetAction: string
|
||||
): OrchestratorBuilder {
|
||||
return this.move(actionToMove, targetAction, { mergeNext: true })
|
||||
}
|
||||
|
||||
mergeActions(where: string, ...actions: string[]) {
|
||||
actions.unshift(where)
|
||||
|
||||
if (actions.length < 2) {
|
||||
throw new Error("Cannot merge less than two actions")
|
||||
}
|
||||
|
||||
for (const action of actions) {
|
||||
if (action !== where) {
|
||||
this.move(action, where, { runInParallel: true })
|
||||
}
|
||||
}
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
deleteAction(action: string, steps: InternalStep = this.steps) {
|
||||
const actionStep = this.findOrThrowStepByAction(action)
|
||||
const parentStep = this.findParentStepByAction(action, steps)!
|
||||
|
||||
if (Array.isArray(parentStep.next)) {
|
||||
const index = parentStep.next.findIndex((step) => step.action === action)
|
||||
if (index > -1 && actionStep.next) {
|
||||
if (actionStep.next) {
|
||||
parentStep.next[index] = actionStep.next as InternalStep
|
||||
} else {
|
||||
parentStep.next.splice(index, 1)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
parentStep.next = actionStep.next
|
||||
}
|
||||
|
||||
this.updateDepths(
|
||||
actionStep.next as InternalStep,
|
||||
parentStep,
|
||||
1,
|
||||
parentStep.depth
|
||||
)
|
||||
|
||||
this.hasChanges_ = true
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
pruneAction(action: string) {
|
||||
const actionStep = this.findOrThrowStepByAction(action)
|
||||
const parentStep = this.findParentStepByAction(action, this.steps)!
|
||||
|
||||
if (Array.isArray(parentStep.next)) {
|
||||
const index = parentStep.next.findIndex((step) => step.action === action)
|
||||
if (index > -1) {
|
||||
parentStep.next.splice(index, 1)
|
||||
}
|
||||
} else {
|
||||
delete parentStep.next
|
||||
}
|
||||
|
||||
this.hasChanges_ = true
|
||||
return this
|
||||
}
|
||||
|
||||
protected findStepByAction(
|
||||
action: string,
|
||||
step: InternalStep = this.steps
|
||||
): InternalStep | undefined {
|
||||
if (step.uuid === action || step.action === action) {
|
||||
return step
|
||||
}
|
||||
|
||||
if (Array.isArray(step.next)) {
|
||||
for (const subStep of step.next) {
|
||||
const found = this.findStepByAction(action, subStep as InternalStep)
|
||||
if (found) {
|
||||
return found
|
||||
}
|
||||
}
|
||||
} else if (step.next && typeof step.next === "object") {
|
||||
return this.findStepByAction(action, step.next as InternalStep)
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
protected findOrThrowStepByAction(
|
||||
action: string,
|
||||
steps: InternalStep = this.steps
|
||||
): InternalStep {
|
||||
const step = this.findStepByAction(action, steps)
|
||||
if (!step) {
|
||||
throw new Error(`Action "${action}" could not be found`)
|
||||
}
|
||||
|
||||
return step
|
||||
}
|
||||
|
||||
protected findParentStepByAction(
|
||||
action: string,
|
||||
step: InternalStep = this.steps
|
||||
): InternalStep | undefined {
|
||||
if (!step.next) {
|
||||
return
|
||||
}
|
||||
|
||||
const nextSteps = Array.isArray(step.next) ? step.next : [step.next]
|
||||
for (const nextStep of nextSteps) {
|
||||
if (!nextStep) {
|
||||
continue
|
||||
}
|
||||
if (nextStep.uuid === action || nextStep.action === action) {
|
||||
return step
|
||||
}
|
||||
const foundStep = this.findParentStepByAction(
|
||||
action,
|
||||
nextStep as InternalStep
|
||||
)
|
||||
if (foundStep) {
|
||||
return foundStep
|
||||
}
|
||||
}
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
protected findLastStep(steps: InternalStep = this.steps): InternalStep {
|
||||
let step = steps as InternalStep
|
||||
while (step.next) {
|
||||
step = Array.isArray(step.next)
|
||||
? (step.next[step.next.length - 1] as InternalStep)
|
||||
: (step.next as InternalStep)
|
||||
}
|
||||
|
||||
return step
|
||||
}
|
||||
|
||||
protected updateDepths(
|
||||
startingStep: InternalStep,
|
||||
parent,
|
||||
incr = 1,
|
||||
beginFrom?: number
|
||||
): void {
|
||||
if (!startingStep) {
|
||||
return
|
||||
}
|
||||
|
||||
const update = (step: InternalStep, parent, beginFrom) => {
|
||||
step.depth = beginFrom + incr
|
||||
step.parent = parent.action
|
||||
if (Array.isArray(step.next)) {
|
||||
step.next.forEach((nextAction) => update(nextAction, step, step.depth))
|
||||
} else if (step.next) {
|
||||
update(step.next, step, step.depth)
|
||||
}
|
||||
}
|
||||
update(startingStep, parent, beginFrom ?? startingStep.depth)
|
||||
}
|
||||
|
||||
build(): TransactionStepsDefinition {
|
||||
if (!this.steps.next) {
|
||||
return {}
|
||||
}
|
||||
|
||||
const ignore = ["depth", "parent"]
|
||||
const result = JSON.parse(
|
||||
JSON.stringify(
|
||||
Array.isArray(this.steps.next) ? this.steps : this.steps.next,
|
||||
null
|
||||
),
|
||||
(key, value) => {
|
||||
if (ignore.includes(key)) {
|
||||
return
|
||||
}
|
||||
|
||||
return value
|
||||
}
|
||||
)
|
||||
|
||||
this.hasChanges_ = false
|
||||
return result
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
221
packages/core/orchestration/src/transaction/transaction-step.ts
Normal file
221
packages/core/orchestration/src/transaction/transaction-step.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
import { MedusaError, TransactionStepState } from "@medusajs/utils"
|
||||
import {
|
||||
DistributedTransaction,
|
||||
TransactionPayload,
|
||||
} from "./distributed-transaction"
|
||||
import { TransactionOrchestrator } from "./transaction-orchestrator"
|
||||
import {
|
||||
TransactionHandlerType,
|
||||
TransactionState,
|
||||
TransactionStepStatus,
|
||||
TransactionStepsDefinition,
|
||||
} from "./types"
|
||||
|
||||
export type TransactionStepHandler = (
|
||||
actionId: string,
|
||||
handlerType: TransactionHandlerType,
|
||||
payload: TransactionPayload,
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep,
|
||||
orchestrator: TransactionOrchestrator
|
||||
) => Promise<unknown>
|
||||
|
||||
/**
|
||||
* @class TransactionStep
|
||||
* @classdesc A class representing a single step in a transaction flow
|
||||
*/
|
||||
export class TransactionStep {
|
||||
/**
|
||||
* @member id - The id of the step
|
||||
* @member depth - The depth of the step in the flow
|
||||
* @member definition - The definition of the step
|
||||
* @member invoke - The current state and status of the invoke action of the step
|
||||
* @member compensate - The current state and status of the compensate action of the step
|
||||
* @member attempts - The number of attempts made to execute the step
|
||||
* @member failures - The number of failures encountered while executing the step
|
||||
* @member lastAttempt - The timestamp of the last attempt made to execute the step
|
||||
* @member hasScheduledRetry - A flag indicating if a retry has been scheduled
|
||||
* @member retryRescheduledAt - The timestamp of the last retry scheduled
|
||||
* @member next - The ids of the next steps in the flow
|
||||
* @member saveResponse - A flag indicating if the response of a step should be shared in the transaction context and available to subsequent steps - default is true
|
||||
*/
|
||||
private stepFailed = false
|
||||
id: string
|
||||
uuid?: string
|
||||
depth: number
|
||||
definition: TransactionStepsDefinition
|
||||
invoke: {
|
||||
state: TransactionStepState
|
||||
status: TransactionStepStatus
|
||||
}
|
||||
compensate: {
|
||||
state: TransactionStepState
|
||||
status: TransactionStepStatus
|
||||
}
|
||||
attempts: number
|
||||
failures: number
|
||||
lastAttempt: number | null
|
||||
retryRescheduledAt: number | null
|
||||
hasScheduledRetry: boolean
|
||||
timedOutAt: number | null
|
||||
startedAt?: number
|
||||
next: string[]
|
||||
saveResponse: boolean
|
||||
|
||||
public getStates() {
|
||||
return this.isCompensating() ? this.compensate : this.invoke
|
||||
}
|
||||
|
||||
public beginCompensation() {
|
||||
if (this.isCompensating()) {
|
||||
return
|
||||
}
|
||||
|
||||
this.stepFailed = true
|
||||
this.attempts = 0
|
||||
this.failures = 0
|
||||
this.lastAttempt = null
|
||||
}
|
||||
|
||||
public isCompensating() {
|
||||
return this.stepFailed
|
||||
}
|
||||
|
||||
public isInvoking() {
|
||||
return !this.stepFailed
|
||||
}
|
||||
|
||||
public changeState(toState: TransactionStepState) {
|
||||
const allowed = {
|
||||
[TransactionStepState.DORMANT]: [TransactionStepState.NOT_STARTED],
|
||||
[TransactionStepState.NOT_STARTED]: [
|
||||
TransactionStepState.INVOKING,
|
||||
TransactionStepState.COMPENSATING,
|
||||
TransactionStepState.FAILED,
|
||||
TransactionStepState.SKIPPED,
|
||||
],
|
||||
[TransactionStepState.INVOKING]: [
|
||||
TransactionStepState.FAILED,
|
||||
TransactionStepState.DONE,
|
||||
TransactionStepState.TIMEOUT,
|
||||
],
|
||||
[TransactionStepState.COMPENSATING]: [
|
||||
TransactionStepState.REVERTED,
|
||||
TransactionStepState.FAILED,
|
||||
],
|
||||
[TransactionStepState.DONE]: [TransactionStepState.COMPENSATING],
|
||||
}
|
||||
|
||||
const curState = this.getStates()
|
||||
if (
|
||||
curState.state === toState ||
|
||||
allowed?.[curState.state]?.includes(toState)
|
||||
) {
|
||||
curState.state = toState
|
||||
return
|
||||
}
|
||||
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.NOT_ALLOWED,
|
||||
`Updating State from "${curState.state}" to "${toState}" is not allowed.`
|
||||
)
|
||||
}
|
||||
|
||||
public changeStatus(toStatus: TransactionStepStatus) {
|
||||
const allowed = {
|
||||
[TransactionStepStatus.WAITING]: [
|
||||
TransactionStepStatus.OK,
|
||||
TransactionStepStatus.TEMPORARY_FAILURE,
|
||||
TransactionStepStatus.PERMANENT_FAILURE,
|
||||
],
|
||||
[TransactionStepStatus.TEMPORARY_FAILURE]: [
|
||||
TransactionStepStatus.IDLE,
|
||||
TransactionStepStatus.PERMANENT_FAILURE,
|
||||
],
|
||||
[TransactionStepStatus.PERMANENT_FAILURE]: [TransactionStepStatus.IDLE],
|
||||
}
|
||||
|
||||
const curState = this.getStates()
|
||||
if (
|
||||
curState.status === toStatus ||
|
||||
toStatus === TransactionStepStatus.WAITING ||
|
||||
allowed?.[curState.status]?.includes(toStatus)
|
||||
) {
|
||||
curState.status = toStatus
|
||||
return
|
||||
}
|
||||
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.NOT_ALLOWED,
|
||||
`Updating Status from "${curState.status}" to "${toStatus}" is not allowed.`
|
||||
)
|
||||
}
|
||||
|
||||
hasRetryScheduled(): boolean {
|
||||
return !!this.hasScheduledRetry
|
||||
}
|
||||
|
||||
hasRetryInterval(): boolean {
|
||||
return !!this.definition.retryInterval
|
||||
}
|
||||
|
||||
hasTimeout(): boolean {
|
||||
return !!this.getTimeout()
|
||||
}
|
||||
|
||||
getTimeout(): number | undefined {
|
||||
return this.definition.timeout
|
||||
}
|
||||
|
||||
canRetry(): boolean {
|
||||
return (
|
||||
!this.definition.retryInterval ||
|
||||
!!(
|
||||
this.lastAttempt &&
|
||||
this.definition.retryInterval &&
|
||||
Date.now() - this.lastAttempt > this.definition.retryInterval * 1e3
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
hasAwaitingRetry(): boolean {
|
||||
return !!this.definition.retryIntervalAwaiting
|
||||
}
|
||||
|
||||
canRetryAwaiting(): boolean {
|
||||
return !!(
|
||||
this.hasAwaitingRetry() &&
|
||||
this.lastAttempt &&
|
||||
Date.now() - this.lastAttempt >
|
||||
this.definition.retryIntervalAwaiting! * 1e3
|
||||
)
|
||||
}
|
||||
|
||||
canInvoke(flowState: TransactionState): boolean {
|
||||
const { status, state } = this.getStates()
|
||||
return (
|
||||
(!this.isCompensating() &&
|
||||
state === TransactionStepState.NOT_STARTED &&
|
||||
flowState === TransactionState.INVOKING) ||
|
||||
status === TransactionStepStatus.TEMPORARY_FAILURE
|
||||
)
|
||||
}
|
||||
|
||||
canCompensate(flowState: TransactionState): boolean {
|
||||
return (
|
||||
this.isCompensating() &&
|
||||
this.getStates().state === TransactionStepState.NOT_STARTED &&
|
||||
flowState === TransactionState.COMPENSATING
|
||||
)
|
||||
}
|
||||
|
||||
canCancel(): boolean {
|
||||
return (
|
||||
!this.isCompensating() &&
|
||||
[
|
||||
TransactionStepStatus.WAITING,
|
||||
TransactionStepStatus.TEMPORARY_FAILURE,
|
||||
].includes(this.getStates().status)
|
||||
)
|
||||
}
|
||||
}
|
||||
186
packages/core/orchestration/src/transaction/types.ts
Normal file
186
packages/core/orchestration/src/transaction/types.ts
Normal file
@@ -0,0 +1,186 @@
|
||||
import { DistributedTransaction } from "./distributed-transaction"
|
||||
import { TransactionStep } from "./transaction-step"
|
||||
export {
|
||||
TransactionHandlerType,
|
||||
TransactionState,
|
||||
TransactionStepStatus,
|
||||
} from "@medusajs/utils"
|
||||
|
||||
/**
|
||||
* Defines the structure and behavior of a single step within a transaction workflow.
|
||||
*/
|
||||
export type TransactionStepsDefinition = {
|
||||
/**
|
||||
* A unique identifier for the transaction step.
|
||||
* This is set automatically when declaring a workflow with "createWorkflow"
|
||||
*/
|
||||
uuid?: string
|
||||
|
||||
/**
|
||||
* Specifies the action to be performed in this step.
|
||||
* "name" is an alias for action when creating a workflow with "createWorkflow".
|
||||
*/
|
||||
action?: string
|
||||
|
||||
/**
|
||||
* Indicates whether the workflow should continue even if there is a permanent failure in this step.
|
||||
* In case it is set to true, the children steps of this step will not be executed and their status will be marked as TransactionStepState.SKIPPED.
|
||||
*/
|
||||
continueOnPermanentFailure?: boolean
|
||||
|
||||
/**
|
||||
* If true, no compensation action will be triggered for this step in case of a failure.
|
||||
*/
|
||||
noCompensation?: boolean
|
||||
|
||||
/**
|
||||
* The maximum number of times this step should be retried in case of temporary failures.
|
||||
* The default is 0 (no retries).
|
||||
*/
|
||||
maxRetries?: number
|
||||
|
||||
/**
|
||||
* The interval (in seconds) between retry attempts after a temporary failure.
|
||||
* The default is to retry immediately.
|
||||
*/
|
||||
retryInterval?: number
|
||||
|
||||
/**
|
||||
* The interval (in seconds) to retry a step even if its status is "TransactionStepStatus.WAITING".
|
||||
*/
|
||||
retryIntervalAwaiting?: number
|
||||
|
||||
/**
|
||||
* The maximum amount of time (in seconds) to wait for this step to complete.
|
||||
* This is NOT an execution timeout, the step will always be executed and wait for its response.
|
||||
* If the response is not received within the timeout set, it will be marked as "TransactionStepStatus.TIMEOUT" and the workflow will be reverted as soon as it receives the response.
|
||||
*/
|
||||
timeout?: number
|
||||
|
||||
/**
|
||||
* If true, the step is executed asynchronously. This means that the workflow will not wait for the response of this step.
|
||||
* Async steps require to have their responses set using "setStepSuccess" or "setStepFailure", unless it is combined with "backgroundExecution: true".
|
||||
* If combined with a timeout, and any response is not set within that interval, the step will be marked as "TransactionStepStatus.TIMEOUT" and the workflow will be reverted immediately.
|
||||
*/
|
||||
async?: boolean
|
||||
|
||||
/**
|
||||
* It applies to "async" steps only, allowing them to run in the background and automatically complete without external intervention.
|
||||
* It is ideal for time-consuming tasks that will be complete after the execution, contrasting with standard "async" operations that require a response to be set in a later stage.
|
||||
*/
|
||||
backgroundExecution?: boolean
|
||||
|
||||
/**
|
||||
* If true, the compensation function for this step is executed asynchronously. Which means, the response has to be set using "setStepSuccess" or "setStepFailure".
|
||||
*/
|
||||
compensateAsync?: boolean
|
||||
|
||||
/**
|
||||
* If true, the workflow will not wait for their sibling steps to complete before moving to the next step.
|
||||
*/
|
||||
noWait?: boolean
|
||||
|
||||
/**
|
||||
* If true, the response of this step will be stored.
|
||||
* Default is true.
|
||||
*/
|
||||
saveResponse?: boolean
|
||||
|
||||
/**
|
||||
* Defines the next step(s) to execute after this step. Can be a single step or an array of steps.
|
||||
*/
|
||||
next?: TransactionStepsDefinition | TransactionStepsDefinition[]
|
||||
|
||||
// TODO: add metadata field for customizations
|
||||
}
|
||||
|
||||
/**
|
||||
* Defines the options for a transaction model, which are applicable to the entire workflow.
|
||||
*/
|
||||
export type TransactionModelOptions = {
|
||||
/**
|
||||
* The global timeout for the entire transaction workflow (in seconds).
|
||||
*/
|
||||
timeout?: number
|
||||
|
||||
/**
|
||||
* If true, the state of the transaction will be persisted.
|
||||
*/
|
||||
store?: boolean
|
||||
|
||||
/**
|
||||
* TBD
|
||||
*/
|
||||
retentionTime?: number
|
||||
|
||||
/**
|
||||
* If true, the execution details of each step will be stored.
|
||||
*/
|
||||
storeExecution?: boolean
|
||||
|
||||
// TODO: add metadata field for customizations
|
||||
}
|
||||
|
||||
export type TransactionModel = {
|
||||
id: string
|
||||
flow: TransactionStepsDefinition
|
||||
hash: string
|
||||
options?: TransactionModelOptions
|
||||
}
|
||||
|
||||
export enum DistributedTransactionEvent {
|
||||
BEGIN = "begin",
|
||||
RESUME = "resume",
|
||||
COMPENSATE_BEGIN = "compensateBegin",
|
||||
FINISH = "finish",
|
||||
TIMEOUT = "timeout",
|
||||
STEP_BEGIN = "stepBegin",
|
||||
STEP_SUCCESS = "stepSuccess",
|
||||
STEP_FAILURE = "stepFailure",
|
||||
STEP_AWAITING = "stepAwaiting",
|
||||
COMPENSATE_STEP_SUCCESS = "compensateStepSuccess",
|
||||
COMPENSATE_STEP_FAILURE = "compensateStepFailure",
|
||||
}
|
||||
|
||||
export type DistributedTransactionEvents = {
|
||||
onBegin?: (args: { transaction: DistributedTransaction }) => void
|
||||
onResume?: (args: { transaction: DistributedTransaction }) => void
|
||||
onFinish?: (args: {
|
||||
transaction: DistributedTransaction
|
||||
result?: unknown
|
||||
errors?: unknown[]
|
||||
}) => void
|
||||
onTimeout?: (args: { transaction: DistributedTransaction }) => void
|
||||
|
||||
onStepBegin?: (args: {
|
||||
step: TransactionStep
|
||||
transaction: DistributedTransaction
|
||||
}) => void
|
||||
|
||||
onStepSuccess?: (args: {
|
||||
step: TransactionStep
|
||||
transaction: DistributedTransaction
|
||||
}) => void
|
||||
|
||||
onStepFailure?: (args: {
|
||||
step: TransactionStep
|
||||
transaction: DistributedTransaction
|
||||
}) => void
|
||||
|
||||
onStepAwaiting?: (args: {
|
||||
step: TransactionStep
|
||||
transaction: DistributedTransaction
|
||||
}) => void
|
||||
|
||||
onCompensateBegin?: (args: { transaction: DistributedTransaction }) => void
|
||||
|
||||
onCompensateStepSuccess?: (args: {
|
||||
step: TransactionStep
|
||||
transaction: DistributedTransaction
|
||||
}) => void
|
||||
|
||||
onCompensateStepFailure?: (args: {
|
||||
step: TransactionStep
|
||||
transaction: DistributedTransaction
|
||||
}) => void
|
||||
}
|
||||
146
packages/core/orchestration/src/workflow/global-workflow.ts
Normal file
146
packages/core/orchestration/src/workflow/global-workflow.ts
Normal file
@@ -0,0 +1,146 @@
|
||||
import { Context, LoadedModule, MedusaContainer } from "@medusajs/types"
|
||||
import { createMedusaContainer } from "@medusajs/utils"
|
||||
import { asValue } from "awilix"
|
||||
|
||||
import {
|
||||
DistributedTransaction,
|
||||
DistributedTransactionEvents,
|
||||
} from "../transaction"
|
||||
import { WorkflowDefinition, WorkflowManager } from "./workflow-manager"
|
||||
|
||||
export class GlobalWorkflow extends WorkflowManager {
|
||||
protected static workflows: Map<string, WorkflowDefinition> = new Map()
|
||||
protected container: MedusaContainer
|
||||
protected context: Context
|
||||
protected subscribe: DistributedTransactionEvents
|
||||
|
||||
constructor(
|
||||
modulesLoaded?: LoadedModule[] | MedusaContainer,
|
||||
context?: Context,
|
||||
subscribe?: DistributedTransactionEvents
|
||||
) {
|
||||
super()
|
||||
|
||||
let container
|
||||
|
||||
if (!Array.isArray(modulesLoaded) && modulesLoaded) {
|
||||
if (!("cradle" in modulesLoaded)) {
|
||||
container = createMedusaContainer(modulesLoaded)
|
||||
} else {
|
||||
container = modulesLoaded
|
||||
}
|
||||
} else if (Array.isArray(modulesLoaded) && modulesLoaded.length) {
|
||||
container = createMedusaContainer()
|
||||
|
||||
for (const mod of modulesLoaded) {
|
||||
const registrationName = mod.__definition.registrationName
|
||||
container.register(registrationName, asValue(mod))
|
||||
}
|
||||
}
|
||||
|
||||
this.container = container
|
||||
this.context = context ?? {}
|
||||
this.subscribe = subscribe ?? {}
|
||||
}
|
||||
|
||||
async run(workflowId: string, uniqueTransactionId: string, input?: unknown) {
|
||||
if (!WorkflowManager.workflows.has(workflowId)) {
|
||||
throw new Error(`Workflow with id "${workflowId}" not found.`)
|
||||
}
|
||||
|
||||
const workflow = WorkflowManager.workflows.get(workflowId)!
|
||||
|
||||
const orchestrator = workflow.orchestrator
|
||||
|
||||
const transaction = await orchestrator.beginTransaction(
|
||||
uniqueTransactionId,
|
||||
workflow.handler(this.container, this.context),
|
||||
input
|
||||
)
|
||||
|
||||
if (this.subscribe.onStepBegin) {
|
||||
transaction.once("stepBegin", this.subscribe.onStepBegin)
|
||||
}
|
||||
|
||||
if (this.subscribe.onStepSuccess) {
|
||||
transaction.once("stepSuccess", this.subscribe.onStepSuccess)
|
||||
}
|
||||
|
||||
if (this.subscribe.onStepFailure) {
|
||||
transaction.once("stepFailure", this.subscribe.onStepFailure)
|
||||
}
|
||||
|
||||
if (this.subscribe.onStepAwaiting) {
|
||||
transaction.once("stepAwaiting", this.subscribe.onStepAwaiting)
|
||||
}
|
||||
|
||||
await orchestrator.resume(transaction)
|
||||
|
||||
return transaction
|
||||
}
|
||||
|
||||
async registerStepSuccess(
|
||||
workflowId: string,
|
||||
idempotencyKey: string,
|
||||
response?: unknown
|
||||
): Promise<DistributedTransaction> {
|
||||
if (!WorkflowManager.workflows.has(workflowId)) {
|
||||
throw new Error(`Workflow with id "${workflowId}" not found.`)
|
||||
}
|
||||
|
||||
const workflow = WorkflowManager.workflows.get(workflowId)!
|
||||
const orchestrator = workflow.orchestrator
|
||||
orchestrator.once("resume", (transaction) => {
|
||||
if (this.subscribe.onStepBegin) {
|
||||
transaction.once("stepBegin", this.subscribe.onStepBegin)
|
||||
}
|
||||
|
||||
if (this.subscribe.onStepSuccess) {
|
||||
transaction.once("stepSuccess", this.subscribe.onStepSuccess)
|
||||
}
|
||||
|
||||
if (this.subscribe.onStepFailure) {
|
||||
transaction.once("stepFailure", this.subscribe.onStepFailure)
|
||||
}
|
||||
})
|
||||
|
||||
return await workflow.orchestrator.registerStepSuccess(
|
||||
idempotencyKey,
|
||||
workflow.handler(this.container, this.context),
|
||||
undefined,
|
||||
response
|
||||
)
|
||||
}
|
||||
|
||||
async registerStepFailure(
|
||||
workflowId: string,
|
||||
idempotencyKey: string,
|
||||
error?: Error | any
|
||||
): Promise<DistributedTransaction> {
|
||||
if (!WorkflowManager.workflows.has(workflowId)) {
|
||||
throw new Error(`Workflow with id "${workflowId}" not found.`)
|
||||
}
|
||||
|
||||
const workflow = WorkflowManager.workflows.get(workflowId)!
|
||||
const orchestrator = workflow.orchestrator
|
||||
orchestrator.once("resume", (transaction) => {
|
||||
if (this.subscribe.onStepBegin) {
|
||||
transaction.once("stepBegin", this.subscribe.onStepBegin)
|
||||
}
|
||||
|
||||
if (this.subscribe.onStepSuccess) {
|
||||
transaction.once("stepSuccess", this.subscribe.onStepSuccess)
|
||||
}
|
||||
|
||||
if (this.subscribe.onStepFailure) {
|
||||
transaction.once("stepFailure", this.subscribe.onStepFailure)
|
||||
}
|
||||
})
|
||||
|
||||
return await workflow.orchestrator.registerStepFailure(
|
||||
idempotencyKey,
|
||||
error,
|
||||
workflow.handler(this.container, this.context)
|
||||
)
|
||||
}
|
||||
}
|
||||
3
packages/core/orchestration/src/workflow/index.ts
Normal file
3
packages/core/orchestration/src/workflow/index.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export * from "./workflow-manager"
|
||||
export * from "./local-workflow"
|
||||
export * from "./global-workflow"
|
||||
542
packages/core/orchestration/src/workflow/local-workflow.ts
Normal file
542
packages/core/orchestration/src/workflow/local-workflow.ts
Normal file
@@ -0,0 +1,542 @@
|
||||
import { Context, LoadedModule, MedusaContainer } from "@medusajs/types"
|
||||
import {
|
||||
createMedusaContainer,
|
||||
isDefined,
|
||||
isString,
|
||||
MedusaContext,
|
||||
MedusaContextType,
|
||||
MedusaModuleType,
|
||||
} from "@medusajs/utils"
|
||||
import { asValue } from "awilix"
|
||||
import {
|
||||
DistributedTransaction,
|
||||
DistributedTransactionEvent,
|
||||
DistributedTransactionEvents,
|
||||
TransactionModelOptions,
|
||||
TransactionOrchestrator,
|
||||
TransactionStepsDefinition,
|
||||
} from "../transaction"
|
||||
import { OrchestratorBuilder } from "../transaction/orchestrator-builder"
|
||||
import {
|
||||
WorkflowDefinition,
|
||||
WorkflowManager,
|
||||
WorkflowStepHandler,
|
||||
} from "./workflow-manager"
|
||||
|
||||
type StepHandler = {
|
||||
invoke: WorkflowStepHandler
|
||||
compensate?: WorkflowStepHandler
|
||||
}
|
||||
|
||||
export class LocalWorkflow {
|
||||
protected container_: MedusaContainer
|
||||
protected workflowId: string
|
||||
protected flow: OrchestratorBuilder
|
||||
protected customOptions: Partial<TransactionModelOptions> = {}
|
||||
protected workflow: WorkflowDefinition
|
||||
protected handlers: Map<string, StepHandler>
|
||||
protected medusaContext?: Context
|
||||
|
||||
get container() {
|
||||
return this.container_
|
||||
}
|
||||
|
||||
set container(modulesLoaded: LoadedModule[] | MedusaContainer) {
|
||||
this.resolveContainer(modulesLoaded)
|
||||
}
|
||||
|
||||
constructor(
|
||||
workflowId: string,
|
||||
modulesLoaded?: LoadedModule[] | MedusaContainer
|
||||
) {
|
||||
const globalWorkflow = WorkflowManager.getWorkflow(workflowId)
|
||||
if (!globalWorkflow) {
|
||||
throw new Error(`Workflow with id "${workflowId}" not found.`)
|
||||
}
|
||||
|
||||
this.flow = new OrchestratorBuilder(globalWorkflow.flow_)
|
||||
this.workflowId = workflowId
|
||||
this.workflow = globalWorkflow
|
||||
this.handlers = new Map(globalWorkflow.handlers_)
|
||||
|
||||
this.resolveContainer(modulesLoaded)
|
||||
}
|
||||
|
||||
private resolveContainer(modulesLoaded?: LoadedModule[] | MedusaContainer) {
|
||||
let container
|
||||
|
||||
if (!Array.isArray(modulesLoaded) && modulesLoaded) {
|
||||
if (!("cradle" in modulesLoaded)) {
|
||||
container = createMedusaContainer(modulesLoaded)
|
||||
} else {
|
||||
container = createMedusaContainer({}, modulesLoaded) // copy container
|
||||
}
|
||||
} else if (Array.isArray(modulesLoaded) && modulesLoaded.length) {
|
||||
container = createMedusaContainer()
|
||||
|
||||
for (const mod of modulesLoaded) {
|
||||
const registrationName = mod.__definition.registrationName
|
||||
container.register(registrationName, asValue(mod))
|
||||
}
|
||||
}
|
||||
|
||||
this.container_ = this.contextualizedMedusaModules(container)
|
||||
}
|
||||
|
||||
private contextualizedMedusaModules(container) {
|
||||
if (!container) {
|
||||
return createMedusaContainer()
|
||||
}
|
||||
|
||||
// eslint-disable-next-line
|
||||
const this_ = this
|
||||
const originalResolver = container.resolve
|
||||
container.resolve = function (registrationName, opts) {
|
||||
const resolved = originalResolver(registrationName, opts)
|
||||
if (resolved?.constructor?.__type !== MedusaModuleType) {
|
||||
return resolved
|
||||
}
|
||||
|
||||
return new Proxy(resolved, {
|
||||
get: function (target, prop) {
|
||||
if (typeof target[prop] !== "function") {
|
||||
return target[prop]
|
||||
}
|
||||
|
||||
return async (...args) => {
|
||||
const ctxIndex = MedusaContext.getIndex(target, prop as string)
|
||||
|
||||
const hasContext = args[ctxIndex!]?.__type === MedusaContextType
|
||||
if (!hasContext && isDefined(ctxIndex)) {
|
||||
const context = this_.medusaContext
|
||||
if (context?.__type === MedusaContextType) {
|
||||
delete context?.manager
|
||||
delete context?.transactionManager
|
||||
|
||||
args[ctxIndex] = context
|
||||
}
|
||||
}
|
||||
return await target[prop].apply(target, [...args])
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
return container
|
||||
}
|
||||
|
||||
protected commit() {
|
||||
const finalFlow = this.flow.build()
|
||||
|
||||
const globalWorkflow = WorkflowManager.getWorkflow(this.workflowId)
|
||||
const customOptions = {
|
||||
...globalWorkflow?.options,
|
||||
...this.customOptions,
|
||||
}
|
||||
|
||||
this.workflow = {
|
||||
id: this.workflowId,
|
||||
flow_: finalFlow,
|
||||
orchestrator: new TransactionOrchestrator(
|
||||
this.workflowId,
|
||||
finalFlow,
|
||||
customOptions
|
||||
),
|
||||
options: customOptions,
|
||||
handler: WorkflowManager.buildHandlers(this.handlers),
|
||||
handlers_: this.handlers,
|
||||
}
|
||||
}
|
||||
|
||||
public getFlow() {
|
||||
if (this.flow.hasChanges) {
|
||||
this.commit()
|
||||
}
|
||||
|
||||
return this.workflow.flow_
|
||||
}
|
||||
|
||||
private registerEventCallbacks({
|
||||
orchestrator,
|
||||
transaction,
|
||||
subscribe,
|
||||
idempotencyKey,
|
||||
}: {
|
||||
orchestrator: TransactionOrchestrator
|
||||
transaction?: DistributedTransaction
|
||||
subscribe?: DistributedTransactionEvents
|
||||
idempotencyKey?: string
|
||||
}) {
|
||||
const modelId = orchestrator.id
|
||||
let transactionId
|
||||
|
||||
if (transaction) {
|
||||
transactionId = transaction!.transactionId
|
||||
} else if (idempotencyKey) {
|
||||
const [, trxId] = idempotencyKey!.split(":")
|
||||
transactionId = trxId
|
||||
}
|
||||
|
||||
const eventWrapperMap = new Map()
|
||||
for (const [key, handler] of Object.entries(subscribe ?? {})) {
|
||||
eventWrapperMap.set(key, (args) => {
|
||||
const { transaction } = args
|
||||
|
||||
if (
|
||||
transaction.transactionId !== transactionId ||
|
||||
transaction.modelId !== modelId
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
handler(args)
|
||||
})
|
||||
}
|
||||
|
||||
if (subscribe?.onBegin) {
|
||||
orchestrator.on(
|
||||
DistributedTransactionEvent.BEGIN,
|
||||
eventWrapperMap.get("onBegin")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onResume) {
|
||||
orchestrator.on(
|
||||
DistributedTransactionEvent.RESUME,
|
||||
eventWrapperMap.get("onResume")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onCompensateBegin) {
|
||||
orchestrator.on(
|
||||
DistributedTransactionEvent.COMPENSATE_BEGIN,
|
||||
eventWrapperMap.get("onCompensateBegin")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onTimeout) {
|
||||
orchestrator.on(
|
||||
DistributedTransactionEvent.TIMEOUT,
|
||||
eventWrapperMap.get("onTimeout")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onFinish) {
|
||||
orchestrator.on(
|
||||
DistributedTransactionEvent.FINISH,
|
||||
eventWrapperMap.get("onFinish")
|
||||
)
|
||||
}
|
||||
|
||||
const resumeWrapper = ({ transaction }) => {
|
||||
if (
|
||||
transaction.modelId !== modelId ||
|
||||
transaction.transactionId !== transactionId
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
if (subscribe?.onStepBegin) {
|
||||
transaction.on(
|
||||
DistributedTransactionEvent.STEP_BEGIN,
|
||||
eventWrapperMap.get("onStepBegin")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onStepSuccess) {
|
||||
transaction.on(
|
||||
DistributedTransactionEvent.STEP_SUCCESS,
|
||||
eventWrapperMap.get("onStepSuccess")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onStepFailure) {
|
||||
transaction.on(
|
||||
DistributedTransactionEvent.STEP_FAILURE,
|
||||
eventWrapperMap.get("onStepFailure")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onStepAwaiting) {
|
||||
transaction.on(
|
||||
DistributedTransactionEvent.STEP_AWAITING,
|
||||
eventWrapperMap.get("onStepAwaiting")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onCompensateStepSuccess) {
|
||||
transaction.on(
|
||||
DistributedTransactionEvent.COMPENSATE_STEP_SUCCESS,
|
||||
eventWrapperMap.get("onCompensateStepSuccess")
|
||||
)
|
||||
}
|
||||
|
||||
if (subscribe?.onCompensateStepFailure) {
|
||||
transaction.on(
|
||||
DistributedTransactionEvent.COMPENSATE_STEP_FAILURE,
|
||||
eventWrapperMap.get("onCompensateStepFailure")
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
if (transaction) {
|
||||
resumeWrapper({ transaction })
|
||||
} else {
|
||||
orchestrator.once("resume", resumeWrapper)
|
||||
}
|
||||
|
||||
const cleanUp = () => {
|
||||
subscribe?.onFinish &&
|
||||
orchestrator.removeListener(
|
||||
DistributedTransactionEvent.FINISH,
|
||||
eventWrapperMap.get("onFinish")
|
||||
)
|
||||
subscribe?.onResume &&
|
||||
orchestrator.removeListener(
|
||||
DistributedTransactionEvent.RESUME,
|
||||
eventWrapperMap.get("onResume")
|
||||
)
|
||||
subscribe?.onBegin &&
|
||||
orchestrator.removeListener(
|
||||
DistributedTransactionEvent.BEGIN,
|
||||
eventWrapperMap.get("onBegin")
|
||||
)
|
||||
subscribe?.onCompensateBegin &&
|
||||
orchestrator.removeListener(
|
||||
DistributedTransactionEvent.COMPENSATE_BEGIN,
|
||||
eventWrapperMap.get("onCompensateBegin")
|
||||
)
|
||||
subscribe?.onTimeout &&
|
||||
orchestrator.removeListener(
|
||||
DistributedTransactionEvent.TIMEOUT,
|
||||
eventWrapperMap.get("onTimeout")
|
||||
)
|
||||
|
||||
orchestrator.removeListener(
|
||||
DistributedTransactionEvent.RESUME,
|
||||
resumeWrapper
|
||||
)
|
||||
|
||||
eventWrapperMap.clear()
|
||||
}
|
||||
|
||||
return {
|
||||
cleanUpEventListeners: cleanUp,
|
||||
}
|
||||
}
|
||||
|
||||
async run(
|
||||
uniqueTransactionId: string,
|
||||
input?: unknown,
|
||||
context?: Context,
|
||||
subscribe?: DistributedTransactionEvents
|
||||
) {
|
||||
if (this.flow.hasChanges) {
|
||||
this.commit()
|
||||
}
|
||||
this.medusaContext = context
|
||||
const { handler, orchestrator } = this.workflow
|
||||
|
||||
const transaction = await orchestrator.beginTransaction(
|
||||
uniqueTransactionId,
|
||||
handler(this.container_, context),
|
||||
input
|
||||
)
|
||||
|
||||
const { cleanUpEventListeners } = this.registerEventCallbacks({
|
||||
orchestrator,
|
||||
transaction,
|
||||
subscribe,
|
||||
})
|
||||
|
||||
await orchestrator.resume(transaction)
|
||||
|
||||
cleanUpEventListeners()
|
||||
|
||||
return transaction
|
||||
}
|
||||
|
||||
async getRunningTransaction(uniqueTransactionId: string, context?: Context) {
|
||||
this.medusaContext = context
|
||||
const { handler, orchestrator } = this.workflow
|
||||
|
||||
const transaction = await orchestrator.retrieveExistingTransaction(
|
||||
uniqueTransactionId,
|
||||
handler(this.container_, context)
|
||||
)
|
||||
|
||||
return transaction
|
||||
}
|
||||
|
||||
async cancel(
|
||||
transactionOrTransactionId: string | DistributedTransaction,
|
||||
context?: Context,
|
||||
subscribe?: DistributedTransactionEvents
|
||||
) {
|
||||
this.medusaContext = context
|
||||
const { orchestrator } = this.workflow
|
||||
|
||||
const transaction = isString(transactionOrTransactionId)
|
||||
? await this.getRunningTransaction(transactionOrTransactionId, context)
|
||||
: transactionOrTransactionId
|
||||
|
||||
const { cleanUpEventListeners } = this.registerEventCallbacks({
|
||||
orchestrator,
|
||||
transaction,
|
||||
subscribe,
|
||||
})
|
||||
|
||||
await orchestrator.cancelTransaction(transaction)
|
||||
|
||||
cleanUpEventListeners()
|
||||
|
||||
return transaction
|
||||
}
|
||||
|
||||
async registerStepSuccess(
|
||||
idempotencyKey: string,
|
||||
response?: unknown,
|
||||
context?: Context,
|
||||
subscribe?: DistributedTransactionEvents
|
||||
): Promise<DistributedTransaction> {
|
||||
this.medusaContext = context
|
||||
const { handler, orchestrator } = this.workflow
|
||||
|
||||
const { cleanUpEventListeners } = this.registerEventCallbacks({
|
||||
orchestrator,
|
||||
idempotencyKey,
|
||||
subscribe,
|
||||
})
|
||||
|
||||
const transaction = await orchestrator.registerStepSuccess(
|
||||
idempotencyKey,
|
||||
handler(this.container_, context),
|
||||
undefined,
|
||||
response
|
||||
)
|
||||
|
||||
cleanUpEventListeners()
|
||||
|
||||
return transaction
|
||||
}
|
||||
|
||||
async registerStepFailure(
|
||||
idempotencyKey: string,
|
||||
error?: Error | any,
|
||||
context?: Context,
|
||||
subscribe?: DistributedTransactionEvents
|
||||
): Promise<DistributedTransaction> {
|
||||
this.medusaContext = context
|
||||
const { handler, orchestrator } = this.workflow
|
||||
|
||||
const { cleanUpEventListeners } = this.registerEventCallbacks({
|
||||
orchestrator,
|
||||
idempotencyKey,
|
||||
subscribe,
|
||||
})
|
||||
|
||||
const transaction = await orchestrator.registerStepFailure(
|
||||
idempotencyKey,
|
||||
error,
|
||||
handler(this.container_, context)
|
||||
)
|
||||
|
||||
cleanUpEventListeners()
|
||||
|
||||
return transaction
|
||||
}
|
||||
|
||||
setOptions(options: Partial<TransactionModelOptions>) {
|
||||
this.customOptions = options
|
||||
return this
|
||||
}
|
||||
|
||||
addAction(
|
||||
action: string,
|
||||
handler: StepHandler,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
this.assertHandler(handler, action)
|
||||
this.handlers.set(action, handler)
|
||||
|
||||
return this.flow.addAction(action, options)
|
||||
}
|
||||
|
||||
replaceAction(
|
||||
existingAction: string,
|
||||
action: string,
|
||||
handler: StepHandler,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
this.assertHandler(handler, action)
|
||||
this.handlers.set(action, handler)
|
||||
|
||||
return this.flow.replaceAction(existingAction, action, options)
|
||||
}
|
||||
|
||||
insertActionBefore(
|
||||
existingAction: string,
|
||||
action: string,
|
||||
handler: StepHandler,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
this.assertHandler(handler, action)
|
||||
this.handlers.set(action, handler)
|
||||
|
||||
return this.flow.insertActionBefore(existingAction, action, options)
|
||||
}
|
||||
|
||||
insertActionAfter(
|
||||
existingAction: string,
|
||||
action: string,
|
||||
handler: StepHandler,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
this.assertHandler(handler, action)
|
||||
this.handlers.set(action, handler)
|
||||
|
||||
return this.flow.insertActionAfter(existingAction, action, options)
|
||||
}
|
||||
|
||||
appendAction(
|
||||
action: string,
|
||||
to: string,
|
||||
handler: StepHandler,
|
||||
options: Partial<TransactionStepsDefinition> = {}
|
||||
) {
|
||||
this.assertHandler(handler, action)
|
||||
this.handlers.set(action, handler)
|
||||
|
||||
return this.flow.appendAction(action, to, options)
|
||||
}
|
||||
|
||||
moveAction(actionToMove: string, targetAction: string): OrchestratorBuilder {
|
||||
return this.flow.moveAction(actionToMove, targetAction)
|
||||
}
|
||||
|
||||
moveAndMergeNextAction(
|
||||
actionToMove: string,
|
||||
targetAction: string
|
||||
): OrchestratorBuilder {
|
||||
return this.flow.moveAndMergeNextAction(actionToMove, targetAction)
|
||||
}
|
||||
|
||||
mergeActions(where: string, ...actions: string[]) {
|
||||
return this.flow.mergeActions(where, ...actions)
|
||||
}
|
||||
|
||||
deleteAction(action: string, parentSteps?) {
|
||||
return this.flow.deleteAction(action, parentSteps)
|
||||
}
|
||||
|
||||
pruneAction(action: string) {
|
||||
return this.flow.pruneAction(action)
|
||||
}
|
||||
|
||||
protected assertHandler(handler: StepHandler, action: string): void | never {
|
||||
if (!handler?.invoke) {
|
||||
throw new Error(
|
||||
`Handler for action "${action}" is missing invoke function.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
216
packages/core/orchestration/src/workflow/workflow-manager.ts
Normal file
216
packages/core/orchestration/src/workflow/workflow-manager.ts
Normal file
@@ -0,0 +1,216 @@
|
||||
import { Context, MedusaContainer } from "@medusajs/types"
|
||||
import {
|
||||
DistributedTransaction,
|
||||
OrchestratorBuilder,
|
||||
TransactionHandlerType,
|
||||
TransactionMetadata,
|
||||
TransactionModelOptions,
|
||||
TransactionOrchestrator,
|
||||
TransactionStep,
|
||||
TransactionStepHandler,
|
||||
TransactionStepsDefinition,
|
||||
} from "../transaction"
|
||||
|
||||
export interface WorkflowDefinition {
|
||||
id: string
|
||||
handler: (
|
||||
container: MedusaContainer,
|
||||
context?: Context
|
||||
) => TransactionStepHandler
|
||||
orchestrator: TransactionOrchestrator
|
||||
flow_: TransactionStepsDefinition
|
||||
handlers_: Map<
|
||||
string,
|
||||
{ invoke: WorkflowStepHandler; compensate?: WorkflowStepHandler }
|
||||
>
|
||||
options: TransactionModelOptions
|
||||
requiredModules?: Set<string>
|
||||
optionalModules?: Set<string>
|
||||
}
|
||||
|
||||
export type WorkflowHandler = Map<
|
||||
string,
|
||||
{ invoke: WorkflowStepHandler; compensate?: WorkflowStepHandler }
|
||||
>
|
||||
|
||||
export type WorkflowStepHandlerArguments = {
|
||||
container: MedusaContainer
|
||||
payload: unknown
|
||||
invoke: { [actions: string]: unknown }
|
||||
compensate: { [actions: string]: unknown }
|
||||
metadata: TransactionMetadata
|
||||
transaction: DistributedTransaction
|
||||
step: TransactionStep
|
||||
orchestrator: TransactionOrchestrator
|
||||
context?: Context
|
||||
}
|
||||
|
||||
export type WorkflowStepHandler = (
|
||||
args: WorkflowStepHandlerArguments
|
||||
) => Promise<unknown>
|
||||
|
||||
export class WorkflowManager {
|
||||
protected static workflows: Map<string, WorkflowDefinition> = new Map()
|
||||
|
||||
static unregister(workflowId: string) {
|
||||
WorkflowManager.workflows.delete(workflowId)
|
||||
}
|
||||
|
||||
static unregisterAll() {
|
||||
WorkflowManager.workflows.clear()
|
||||
}
|
||||
|
||||
static getWorkflows() {
|
||||
return WorkflowManager.workflows
|
||||
}
|
||||
|
||||
static getWorkflow(workflowId: string) {
|
||||
return WorkflowManager.workflows.get(workflowId)
|
||||
}
|
||||
|
||||
static getTransactionDefinition(workflowId): OrchestratorBuilder {
|
||||
if (!WorkflowManager.workflows.has(workflowId)) {
|
||||
throw new Error(`Workflow with id "${workflowId}" not found.`)
|
||||
}
|
||||
|
||||
const workflow = WorkflowManager.workflows.get(workflowId)!
|
||||
return new OrchestratorBuilder(workflow.flow_)
|
||||
}
|
||||
|
||||
static register(
|
||||
workflowId: string,
|
||||
flow: TransactionStepsDefinition | OrchestratorBuilder | undefined,
|
||||
handlers: WorkflowHandler,
|
||||
options: TransactionModelOptions = {},
|
||||
requiredModules?: Set<string>,
|
||||
optionalModules?: Set<string>
|
||||
) {
|
||||
const finalFlow = flow instanceof OrchestratorBuilder ? flow.build() : flow
|
||||
|
||||
if (WorkflowManager.workflows.has(workflowId)) {
|
||||
const excludeStepUuid = (key, value) => {
|
||||
return key === "uuid" ? undefined : value
|
||||
}
|
||||
|
||||
const areStepsEqual = finalFlow
|
||||
? JSON.stringify(finalFlow, excludeStepUuid) ===
|
||||
JSON.stringify(
|
||||
WorkflowManager.workflows.get(workflowId)!.flow_,
|
||||
excludeStepUuid
|
||||
)
|
||||
: true
|
||||
|
||||
if (!areStepsEqual) {
|
||||
if (process.env.MEDUSA_FF_MEDUSA_V2 == "true") {
|
||||
throw new Error(
|
||||
`Workflow with id "${workflowId}" and step definition already exists.`
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
WorkflowManager.workflows.set(workflowId, {
|
||||
id: workflowId,
|
||||
flow_: finalFlow!,
|
||||
orchestrator: new TransactionOrchestrator(
|
||||
workflowId,
|
||||
finalFlow ?? {},
|
||||
options
|
||||
),
|
||||
handler: WorkflowManager.buildHandlers(handlers),
|
||||
handlers_: handlers,
|
||||
options,
|
||||
requiredModules,
|
||||
optionalModules,
|
||||
})
|
||||
}
|
||||
|
||||
static update(
|
||||
workflowId: string,
|
||||
flow: TransactionStepsDefinition | OrchestratorBuilder,
|
||||
handlers: Map<
|
||||
string,
|
||||
{ invoke: WorkflowStepHandler; compensate?: WorkflowStepHandler }
|
||||
>,
|
||||
options: TransactionModelOptions = {},
|
||||
requiredModules?: Set<string>,
|
||||
optionalModules?: Set<string>
|
||||
) {
|
||||
if (!WorkflowManager.workflows.has(workflowId)) {
|
||||
throw new Error(`Workflow with id "${workflowId}" not found.`)
|
||||
}
|
||||
|
||||
const workflow = WorkflowManager.workflows.get(workflowId)!
|
||||
|
||||
for (const [key, value] of handlers.entries()) {
|
||||
workflow.handlers_.set(key, value)
|
||||
}
|
||||
|
||||
const finalFlow = flow instanceof OrchestratorBuilder ? flow.build() : flow
|
||||
const updatedOptions = { ...workflow.options, ...options }
|
||||
|
||||
WorkflowManager.workflows.set(workflowId, {
|
||||
id: workflowId,
|
||||
flow_: finalFlow,
|
||||
orchestrator: new TransactionOrchestrator(
|
||||
workflowId,
|
||||
finalFlow,
|
||||
updatedOptions
|
||||
),
|
||||
handler: WorkflowManager.buildHandlers(workflow.handlers_),
|
||||
handlers_: workflow.handlers_,
|
||||
options: updatedOptions,
|
||||
requiredModules,
|
||||
optionalModules,
|
||||
})
|
||||
}
|
||||
|
||||
public static buildHandlers(
|
||||
handlers: Map<
|
||||
string,
|
||||
{ invoke: WorkflowStepHandler; compensate?: WorkflowStepHandler }
|
||||
>
|
||||
): (container: MedusaContainer, context?: Context) => TransactionStepHandler {
|
||||
return (
|
||||
container: MedusaContainer,
|
||||
context?: Context
|
||||
): TransactionStepHandler => {
|
||||
return async (
|
||||
actionId: string,
|
||||
handlerType: TransactionHandlerType,
|
||||
payload: any,
|
||||
transaction: DistributedTransaction,
|
||||
step: TransactionStep,
|
||||
orchestrator: TransactionOrchestrator
|
||||
) => {
|
||||
const command = handlers.get(actionId)
|
||||
|
||||
if (!command) {
|
||||
throw new Error(`Handler for action "${actionId}" not found.`)
|
||||
} else if (!command[handlerType]) {
|
||||
throw new Error(
|
||||
`"${handlerType}" handler for action "${actionId}" not found.`
|
||||
)
|
||||
}
|
||||
|
||||
const { invoke, compensate, payload: input } = payload.context
|
||||
const { metadata } = payload
|
||||
|
||||
return await command[handlerType]!({
|
||||
container,
|
||||
payload: input,
|
||||
invoke,
|
||||
compensate,
|
||||
metadata,
|
||||
transaction: transaction as DistributedTransaction,
|
||||
step,
|
||||
orchestrator,
|
||||
context,
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
global.WorkflowManager ??= WorkflowManager
|
||||
exports.WorkflowManager = global.WorkflowManager
|
||||
Reference in New Issue
Block a user