chore(): start moving some packages to the core directory (#7215)
This commit is contained in:
committed by
GitHub
parent
fdee748eed
commit
bbccd6481d
@@ -0,0 +1,18 @@
|
||||
import { arrayIntersection } from "../array-intersection"
|
||||
|
||||
describe("arrayIntersection", function () {
|
||||
it("should return the intersection between two arrays", function () {
|
||||
const output = arrayIntersection(["foo", "bar"], ["bar", "baz"])
|
||||
expect(output).toEqual(["bar"])
|
||||
})
|
||||
|
||||
it("should return an empty array if there is no intersection", function () {
|
||||
const output = arrayIntersection(["bar", "baz"], ["foo", "boo"])
|
||||
expect(output).toEqual([])
|
||||
})
|
||||
|
||||
it("should return an all items when the arrays are equivalent", function () {
|
||||
const output = arrayIntersection(["bar", "baz"], ["baz", "bar"])
|
||||
expect(output).toEqual(["baz", "bar"])
|
||||
})
|
||||
})
|
||||
91
packages/core/utils/src/common/__tests__/build-query.spec.ts
Normal file
91
packages/core/utils/src/common/__tests__/build-query.spec.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import { buildSelects } from "../build-query"
|
||||
|
||||
describe("buildSelects", () => {
|
||||
it("successfully build back select object shape to list", () => {
|
||||
const q = buildSelects([
|
||||
"order",
|
||||
"order.items",
|
||||
"order.swaps",
|
||||
"order.swaps.additional_items",
|
||||
"order.discounts",
|
||||
"order.discounts.rule",
|
||||
"order.claims",
|
||||
"order.claims.additional_items",
|
||||
"additional_items",
|
||||
"additional_items.variant",
|
||||
"return_order",
|
||||
"return_order.items",
|
||||
"return_order.shipping_method",
|
||||
"return_order.shipping_method.tax_lines",
|
||||
])
|
||||
|
||||
expect(q).toEqual({
|
||||
order: {
|
||||
items: true,
|
||||
swaps: {
|
||||
additional_items: true,
|
||||
},
|
||||
discounts: {
|
||||
rule: true,
|
||||
},
|
||||
claims: {
|
||||
additional_items: true,
|
||||
},
|
||||
},
|
||||
additional_items: {
|
||||
variant: true,
|
||||
},
|
||||
return_order: {
|
||||
items: true,
|
||||
shipping_method: {
|
||||
tax_lines: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
describe("buildSelects", () => {
|
||||
it("successfully build back select object shape to list", () => {
|
||||
const q = buildSelects([
|
||||
"order",
|
||||
"order.items",
|
||||
"order.swaps",
|
||||
"order.swaps.additional_items",
|
||||
"order.discounts",
|
||||
"order.discounts.rule",
|
||||
"order.claims",
|
||||
"order.claims.additional_items",
|
||||
"additional_items",
|
||||
"additional_items.variant",
|
||||
"return_order",
|
||||
"return_order.items",
|
||||
"return_order.shipping_method",
|
||||
"return_order.shipping_method.tax_lines",
|
||||
])
|
||||
|
||||
expect(q).toEqual({
|
||||
order: {
|
||||
items: true,
|
||||
swaps: {
|
||||
additional_items: true,
|
||||
},
|
||||
discounts: {
|
||||
rule: true,
|
||||
},
|
||||
claims: {
|
||||
additional_items: true,
|
||||
},
|
||||
},
|
||||
additional_items: {
|
||||
variant: true,
|
||||
},
|
||||
return_order: {
|
||||
items: true,
|
||||
shipping_method: {
|
||||
tax_lines: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,59 @@
|
||||
import { convertItemResponseToUpdateRequest } from "../convert-item-response-to-update-request"
|
||||
|
||||
describe("convertItemResponseToUpdateRequest", function () {
|
||||
it("should return true or false for different types of data", function () {
|
||||
const expectations = [
|
||||
{
|
||||
item: {
|
||||
id: "test-id",
|
||||
test_attr: "test-name",
|
||||
relation_object_with_params: {
|
||||
id: "test-relation-object-id",
|
||||
test_attr: "test-object-name",
|
||||
},
|
||||
relation_object_without_params: {
|
||||
id: "test-relation-object-without-params-id",
|
||||
},
|
||||
relation_array: [
|
||||
{
|
||||
id: "test-relation-array-id",
|
||||
test_attr: "test-array-name",
|
||||
},
|
||||
],
|
||||
},
|
||||
selects: [
|
||||
"id",
|
||||
"test_attr",
|
||||
"relation_object_with_params.id",
|
||||
"relation_object_with_params.test_attr",
|
||||
"relation_object_without_params.id",
|
||||
"relation_array.id",
|
||||
"relation_array.test_attr",
|
||||
],
|
||||
relations: [
|
||||
"relation_object_with_params",
|
||||
"relation_object_without_params",
|
||||
"relation_array",
|
||||
],
|
||||
output: {
|
||||
id: "test-id",
|
||||
test_attr: "test-name",
|
||||
relation_object_with_params: { test_attr: "test-object-name" },
|
||||
relation_array: [{ id: "test-relation-array-id" }],
|
||||
relation_object_without_params_id:
|
||||
"test-relation-object-without-params-id",
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
expectations.forEach((expectation) => {
|
||||
const response = convertItemResponseToUpdateRequest(
|
||||
expectation.item,
|
||||
expectation.selects,
|
||||
expectation.relations
|
||||
)
|
||||
|
||||
expect(response).toEqual(expectation.output)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,92 @@
|
||||
import { createPsqlIndexStatementHelper } from "../create-psql-index-helper"
|
||||
|
||||
describe("createPsqlIndexStatementHelper", function () {
|
||||
it("should generate a simple index", function () {
|
||||
const options = {
|
||||
name: "index_name",
|
||||
tableName: "table_name",
|
||||
columns: "column_name",
|
||||
}
|
||||
|
||||
const indexStatement = createPsqlIndexStatementHelper(options)
|
||||
expect(indexStatement + "").toEqual(
|
||||
`CREATE INDEX IF NOT EXISTS "${options.name}" ON "${options.tableName}" (${options.columns})`
|
||||
)
|
||||
})
|
||||
|
||||
it("should generate a simple index and auto compose its name", function () {
|
||||
const options = {
|
||||
tableName: "table_name",
|
||||
columns: "column_name",
|
||||
}
|
||||
|
||||
const indexStatement = createPsqlIndexStatementHelper(options)
|
||||
expect(indexStatement + "").toEqual(
|
||||
`CREATE INDEX IF NOT EXISTS "IDX_table_name_column_name" ON "${options.tableName}" (${options.columns})`
|
||||
)
|
||||
})
|
||||
|
||||
it("should generate a composite index", function () {
|
||||
const options = {
|
||||
name: "index_name",
|
||||
tableName: "table_name",
|
||||
columns: ["column_name_1", "column_name_2"],
|
||||
}
|
||||
|
||||
const indexStatement = createPsqlIndexStatementHelper(options)
|
||||
expect(indexStatement.expression).toEqual(
|
||||
`CREATE INDEX IF NOT EXISTS "${options.name}" ON "${
|
||||
options.tableName
|
||||
}" (${options.columns.join(", ")})`
|
||||
)
|
||||
})
|
||||
|
||||
it("should generate an index with where clauses", function () {
|
||||
const options = {
|
||||
name: "index_name",
|
||||
tableName: "table_name",
|
||||
columns: ["column_name_1", "column_name_2"],
|
||||
where: "column_name_1 IS NOT NULL",
|
||||
}
|
||||
|
||||
const indexStatement = createPsqlIndexStatementHelper(options)
|
||||
expect(indexStatement.expression).toEqual(
|
||||
`CREATE INDEX IF NOT EXISTS "${options.name}" ON "${
|
||||
options.tableName
|
||||
}" (${options.columns.join(", ")}) WHERE ${options.where}`
|
||||
)
|
||||
})
|
||||
|
||||
it("should generate an index with where clauses and index type", function () {
|
||||
const options = {
|
||||
name: "index_name",
|
||||
tableName: "table_name",
|
||||
columns: ["column_name_1", "column_name_2"],
|
||||
type: "GIN",
|
||||
where: "column_name_1 IS NOT NULL",
|
||||
}
|
||||
|
||||
const indexStatement = createPsqlIndexStatementHelper(options)
|
||||
expect(indexStatement.toString()).toEqual(
|
||||
`CREATE INDEX IF NOT EXISTS "${options.name}" ON "${
|
||||
options.tableName
|
||||
}" USING GIN (${options.columns.join(", ")}) WHERE ${options.where}`
|
||||
)
|
||||
})
|
||||
|
||||
it("should generate unique constraint", function () {
|
||||
const options = {
|
||||
tableName: "table_name",
|
||||
columns: ["column_name_1", "column_name_2"],
|
||||
unique: true,
|
||||
where: "column_name_1 IS NOT NULL",
|
||||
}
|
||||
|
||||
const indexStatement = createPsqlIndexStatementHelper(options)
|
||||
expect(indexStatement.expression).toEqual(
|
||||
`CREATE UNIQUE INDEX IF NOT EXISTS "IDX_table_name_column_name_1_column_name_2_unique" ON "${
|
||||
options.tableName
|
||||
}" (${options.columns.join(", ")}) WHERE ${options.where}`
|
||||
)
|
||||
})
|
||||
})
|
||||
74
packages/core/utils/src/common/__tests__/deep-copy.spec.ts
Normal file
74
packages/core/utils/src/common/__tests__/deep-copy.spec.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { deepCopy } from "../deep-copy"
|
||||
|
||||
class TestA {
|
||||
prop1: any
|
||||
prop2: any
|
||||
|
||||
constructor(prop1: any, prop2: any) {
|
||||
this.prop1 = prop1
|
||||
this.prop2 = prop2
|
||||
}
|
||||
}
|
||||
|
||||
class TestWrapper {
|
||||
prop1: any
|
||||
prop2: any
|
||||
|
||||
constructor(prop1: any, prop2: any) {
|
||||
this.prop1 = prop1
|
||||
this.prop2 = prop2
|
||||
}
|
||||
|
||||
factory() {
|
||||
return new TestA(deepCopy(this.prop1), deepCopy(this.prop2))
|
||||
}
|
||||
}
|
||||
|
||||
class TestWrapperWithoutDeepCopy {
|
||||
prop1: any
|
||||
prop2: any
|
||||
|
||||
constructor(prop1: any, prop2: any) {
|
||||
this.prop1 = prop1
|
||||
this.prop2 = prop2
|
||||
}
|
||||
|
||||
factory() {
|
||||
return new TestA(this.prop1, this.prop2)
|
||||
}
|
||||
}
|
||||
|
||||
describe("deepCopy", () => {
|
||||
it("should deep copy an object", () => {
|
||||
const prop1 = {
|
||||
prop1: 1,
|
||||
}
|
||||
|
||||
const prop2 = {
|
||||
prop1: 3,
|
||||
}
|
||||
|
||||
const wrapperWithoutDeepCopy = new TestWrapperWithoutDeepCopy(prop1, prop2)
|
||||
let factory1 = wrapperWithoutDeepCopy.factory()
|
||||
let factory2 = wrapperWithoutDeepCopy.factory()
|
||||
|
||||
factory1.prop1.prop1 = 2
|
||||
|
||||
expect(wrapperWithoutDeepCopy.prop1).toEqual({ prop1: 2 })
|
||||
expect(factory1.prop1).toEqual({ prop1: 2 })
|
||||
expect(factory2.prop1).toEqual({ prop1: 2 })
|
||||
|
||||
prop1.prop1 = 4
|
||||
prop2.prop1 = 4
|
||||
|
||||
const wrapper = new TestWrapper(prop1, prop2)
|
||||
factory1 = wrapper.factory()
|
||||
factory2 = wrapper.factory()
|
||||
|
||||
factory1.prop1.prop1 = 2
|
||||
|
||||
expect(wrapper.prop1).toEqual({ prop1: 4 })
|
||||
expect(factory1.prop1).toEqual({ prop1: 2 })
|
||||
expect(factory2.prop1).toEqual({ prop1: 4 })
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,33 @@
|
||||
import { deepEqualObj } from "../deep-equal-obj"
|
||||
|
||||
describe("deepEqualObj", function () {
|
||||
it("should return true if objects are equal", function () {
|
||||
const object1 = {
|
||||
foo: "bar",
|
||||
bar: "foo",
|
||||
xar: { foo: "bar", wor: { bar: "foo", ror: ["test", "test1"] } },
|
||||
}
|
||||
const object2 = {
|
||||
foo: "bar",
|
||||
bar: "foo",
|
||||
xar: { foo: "bar", wor: { bar: "foo", ror: ["test", "test1"] } },
|
||||
}
|
||||
|
||||
expect(deepEqualObj(object1, object2)).toBe(true)
|
||||
})
|
||||
|
||||
it("should return false if objects are not equal", function () {
|
||||
const object1 = {
|
||||
foo: "bar",
|
||||
bar: "foo",
|
||||
xar: { foo: "bar", wor: { bar: "foo", ror: ["test", "test1"] } },
|
||||
}
|
||||
const object2 = {
|
||||
foo: "bar",
|
||||
bar: "foo",
|
||||
xar: { foo: "bar", wor: { bar: "foo", ror: ["test", "test1_"] } },
|
||||
}
|
||||
|
||||
expect(deepEqualObj(object1, object2)).toBe(false)
|
||||
})
|
||||
})
|
||||
203
packages/core/utils/src/common/__tests__/deep-flat-map.ts
Normal file
203
packages/core/utils/src/common/__tests__/deep-flat-map.ts
Normal file
@@ -0,0 +1,203 @@
|
||||
import { deepFlatMap } from "../deep-flat-map"
|
||||
|
||||
describe("deepFlatMap", function () {
|
||||
it("should return flat map of nested objects", function () {
|
||||
const data = [
|
||||
{
|
||||
id: "sales_channel_1",
|
||||
stock_locations: [
|
||||
{
|
||||
id: "location_1",
|
||||
fulfillment_sets: [
|
||||
{
|
||||
id: "fset_1",
|
||||
name: "Test 123",
|
||||
service_zones: [
|
||||
{
|
||||
id: "zone_123",
|
||||
shipping_options: [
|
||||
{
|
||||
id: "so_zone_123 1111",
|
||||
calculated_price: {
|
||||
calculated_amount: 3000,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "so_zone_123 22222",
|
||||
calculated_price: {
|
||||
calculated_amount: 6000,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "zone_567",
|
||||
shipping_options: [
|
||||
{
|
||||
id: "zone 567 11111",
|
||||
calculated_price: {
|
||||
calculated_amount: 1230,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "zone 567 22222",
|
||||
calculated_price: {
|
||||
calculated_amount: 1230,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
id: "location_2",
|
||||
fulfillment_sets: [
|
||||
{
|
||||
id: "fset_2",
|
||||
name: "fset name 2",
|
||||
service_zones: [
|
||||
{
|
||||
id: "zone_ABC",
|
||||
shipping_options: [
|
||||
{
|
||||
id: "zone_abc_unique",
|
||||
calculated_price: {
|
||||
calculated_amount: 70,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
|
||||
{
|
||||
id: "sales_channel_2",
|
||||
stock_locations: [
|
||||
{
|
||||
id: "location_5",
|
||||
fulfillment_sets: [
|
||||
{
|
||||
id: "fset_aaa",
|
||||
name: "Test aaa",
|
||||
service_zones: [
|
||||
{
|
||||
id: "zone_aaa",
|
||||
shipping_options: [
|
||||
{
|
||||
id: "so_zone_aaa aaaa",
|
||||
calculated_price: {
|
||||
calculated_amount: 500,
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "so_zone_aaa bbbb",
|
||||
calculated_price: {
|
||||
calculated_amount: 12,
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
]
|
||||
|
||||
const result = deepFlatMap(
|
||||
data,
|
||||
"stock_locations.fulfillment_sets.service_zones.shipping_options.calculated_price",
|
||||
({
|
||||
root_,
|
||||
stock_locations,
|
||||
fulfillment_sets,
|
||||
service_zones,
|
||||
shipping_options,
|
||||
calculated_price,
|
||||
}) => {
|
||||
return {
|
||||
sales_channel_id: root_.id,
|
||||
stock_location_id: stock_locations.id,
|
||||
fulfillment_set_id: fulfillment_sets.id,
|
||||
fulfillment_set_name: fulfillment_sets.name,
|
||||
service_zone_id: service_zones.id,
|
||||
shipping_option_id: shipping_options.id,
|
||||
price: calculated_price.calculated_amount,
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
expect(result).toEqual([
|
||||
{
|
||||
sales_channel_id: "sales_channel_1",
|
||||
stock_location_id: "location_1",
|
||||
fulfillment_set_id: "fset_1",
|
||||
fulfillment_set_name: "Test 123",
|
||||
service_zone_id: "zone_123",
|
||||
shipping_option_id: "so_zone_123 1111",
|
||||
price: 3000,
|
||||
},
|
||||
{
|
||||
sales_channel_id: "sales_channel_1",
|
||||
stock_location_id: "location_1",
|
||||
fulfillment_set_id: "fset_1",
|
||||
fulfillment_set_name: "Test 123",
|
||||
service_zone_id: "zone_123",
|
||||
shipping_option_id: "so_zone_123 22222",
|
||||
price: 6000,
|
||||
},
|
||||
{
|
||||
sales_channel_id: "sales_channel_1",
|
||||
stock_location_id: "location_1",
|
||||
fulfillment_set_id: "fset_1",
|
||||
fulfillment_set_name: "Test 123",
|
||||
service_zone_id: "zone_567",
|
||||
shipping_option_id: "zone 567 11111",
|
||||
price: 1230,
|
||||
},
|
||||
{
|
||||
sales_channel_id: "sales_channel_1",
|
||||
stock_location_id: "location_1",
|
||||
fulfillment_set_id: "fset_1",
|
||||
fulfillment_set_name: "Test 123",
|
||||
service_zone_id: "zone_567",
|
||||
shipping_option_id: "zone 567 22222",
|
||||
price: 1230,
|
||||
},
|
||||
{
|
||||
sales_channel_id: "sales_channel_1",
|
||||
stock_location_id: "location_2",
|
||||
fulfillment_set_id: "fset_2",
|
||||
fulfillment_set_name: "fset name 2",
|
||||
service_zone_id: "zone_ABC",
|
||||
shipping_option_id: "zone_abc_unique",
|
||||
price: 70,
|
||||
},
|
||||
{
|
||||
sales_channel_id: "sales_channel_2",
|
||||
stock_location_id: "location_5",
|
||||
fulfillment_set_id: "fset_aaa",
|
||||
fulfillment_set_name: "Test aaa",
|
||||
service_zone_id: "zone_aaa",
|
||||
shipping_option_id: "so_zone_aaa aaaa",
|
||||
price: 500,
|
||||
},
|
||||
{
|
||||
sales_channel_id: "sales_channel_2",
|
||||
stock_location_id: "location_5",
|
||||
fulfillment_set_id: "fset_aaa",
|
||||
fulfillment_set_name: "Test aaa",
|
||||
service_zone_id: "zone_aaa",
|
||||
shipping_option_id: "so_zone_aaa bbbb",
|
||||
price: 12,
|
||||
},
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,20 @@
|
||||
import { getDuplicates } from "../get-duplicates"
|
||||
|
||||
describe("getDuplicates", function () {
|
||||
it("should return an empty array if there are no duplicates", function () {
|
||||
const output = getDuplicates(["foo", "bar", "baz"])
|
||||
expect(output).toHaveLength(0)
|
||||
})
|
||||
|
||||
it("should return a singular duplicate", function () {
|
||||
const output = getDuplicates(["foo", "bar", "baz", "baz", "baz"])
|
||||
expect(output).toHaveLength(1)
|
||||
expect(output[0]).toEqual("baz")
|
||||
})
|
||||
|
||||
it("should return all duplicates in the array", function () {
|
||||
const output = getDuplicates(["foo", "bar", "bar", "baz", "baz", "baz"])
|
||||
expect(output).toHaveLength(2)
|
||||
expect(output).toEqual(expect.arrayContaining(["baz", "bar"]))
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,62 @@
|
||||
import { getSelectsAndRelationsFromObjectArray } from "../get-selects-and-relations-from-object-array"
|
||||
|
||||
describe("getSelectsAndRelationsFromObjectArray", function () {
|
||||
it("should return true or false for different types of data", function () {
|
||||
const expectations = [
|
||||
{
|
||||
input: [
|
||||
{
|
||||
attr_string: "string",
|
||||
attr_boolean: true,
|
||||
attr_null: null,
|
||||
attr_undefined: undefined,
|
||||
attr_object: {
|
||||
attr_string: "string",
|
||||
attr_boolean: true,
|
||||
attr_null: null,
|
||||
attr_undefined: undefined,
|
||||
},
|
||||
attr_array: [
|
||||
{
|
||||
attr_object: {
|
||||
attr_string: "string",
|
||||
attr_boolean: true,
|
||||
attr_null: null,
|
||||
attr_undefined: undefined,
|
||||
},
|
||||
},
|
||||
{
|
||||
attr_object: {
|
||||
attr_string: "string",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
output: {
|
||||
selects: [
|
||||
"attr_string",
|
||||
"attr_boolean",
|
||||
"attr_null",
|
||||
"attr_undefined",
|
||||
"attr_object.attr_string",
|
||||
"attr_object.attr_boolean",
|
||||
"attr_object.attr_null",
|
||||
"attr_object.attr_undefined",
|
||||
"attr_array.attr_object.attr_string",
|
||||
"attr_array.attr_object.attr_boolean",
|
||||
"attr_array.attr_object.attr_null",
|
||||
"attr_array.attr_object.attr_undefined",
|
||||
],
|
||||
relations: ["attr_object", "attr_array", "attr_array.attr_object"],
|
||||
},
|
||||
},
|
||||
]
|
||||
|
||||
expectations.forEach((expectation) => {
|
||||
expect(getSelectsAndRelationsFromObjectArray(expectation.input)).toEqual(
|
||||
expectation.output
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
50
packages/core/utils/src/common/__tests__/group-by.spec.ts
Normal file
50
packages/core/utils/src/common/__tests__/group-by.spec.ts
Normal file
@@ -0,0 +1,50 @@
|
||||
import { groupBy } from "../group-by"
|
||||
|
||||
const array = [
|
||||
{
|
||||
id: "test-id-1",
|
||||
property: "test-id-1-property-1",
|
||||
},
|
||||
{
|
||||
id: "test-id-1",
|
||||
property: "test-id-1-property-2",
|
||||
},
|
||||
{
|
||||
id: "test-id-2",
|
||||
property: "test-id-2-property-1",
|
||||
},
|
||||
{
|
||||
id: "test-id-2",
|
||||
property: "test-id-2-property-2",
|
||||
},
|
||||
{
|
||||
id: "test-id-3",
|
||||
property: "test-id-3-property-1",
|
||||
},
|
||||
]
|
||||
|
||||
const mapToObject = (map: Map<any, any>) => Object.fromEntries(map.entries())
|
||||
|
||||
describe("groupBy", function () {
|
||||
it("should return a map grouped by an identifier", function () {
|
||||
const response = mapToObject(groupBy(array, "id"))
|
||||
|
||||
expect(response).toEqual({
|
||||
"test-id-1": [
|
||||
{ id: "test-id-1", property: "test-id-1-property-1" },
|
||||
{ id: "test-id-1", property: "test-id-1-property-2" },
|
||||
],
|
||||
"test-id-2": [
|
||||
{ id: "test-id-2", property: "test-id-2-property-1" },
|
||||
{ id: "test-id-2", property: "test-id-2-property-2" },
|
||||
],
|
||||
"test-id-3": [{ id: "test-id-3", property: "test-id-3-property-1" }],
|
||||
})
|
||||
})
|
||||
|
||||
it("should return empty map if identifier is not found in array", function () {
|
||||
const response = mapToObject(groupBy(array, "doesnotexist"))
|
||||
|
||||
expect(response).toEqual({})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,107 @@
|
||||
import {
|
||||
DatabaseErrorCode,
|
||||
handlePostgresDatabaseError,
|
||||
} from "../handle-postgres-database-error"
|
||||
import { EOL } from "os"
|
||||
|
||||
describe("handlePostgresDataError", function () {
|
||||
it("should throw a specific message on database does not exists", function () {
|
||||
const error = new Error("database does not exist")
|
||||
Object.assign(error, { code: DatabaseErrorCode.databaseDoesNotExist })
|
||||
|
||||
let outputError: any
|
||||
try {
|
||||
handlePostgresDatabaseError(error)
|
||||
} catch (e) {
|
||||
outputError = e
|
||||
}
|
||||
|
||||
expect(outputError.message).toEqual(
|
||||
`The specified PostgreSQL database does not exist. Please create it and try again.${EOL}${error.message}`
|
||||
)
|
||||
})
|
||||
|
||||
it("should throw a specific message on database connection failure", function () {
|
||||
const error = new Error("database does not exist")
|
||||
Object.assign(error, { code: DatabaseErrorCode.connectionFailure })
|
||||
|
||||
let outputError: any
|
||||
try {
|
||||
handlePostgresDatabaseError(error)
|
||||
} catch (e) {
|
||||
outputError = e
|
||||
}
|
||||
|
||||
expect(outputError.message).toEqual(
|
||||
`Failed to establish a connection to PostgreSQL. Please ensure the following is true and try again:
|
||||
- You have a PostgreSQL database running
|
||||
- You have passed the correct credentials in medusa-config.js
|
||||
- You have formatted the database connection string correctly. See below:
|
||||
"postgres://[username]:[password]@[host]:[post]/[db_name]" - If there is no password, you can omit it from the connection string
|
||||
${EOL}
|
||||
${error.message}`
|
||||
)
|
||||
})
|
||||
|
||||
it("should throw a specific message on database wrong credentials", function () {
|
||||
const error = new Error("database does not exist")
|
||||
Object.assign(error, { code: DatabaseErrorCode.wrongCredentials })
|
||||
|
||||
let outputError: any
|
||||
try {
|
||||
handlePostgresDatabaseError(error)
|
||||
} catch (e) {
|
||||
outputError = e
|
||||
}
|
||||
|
||||
expect(outputError.message).toEqual(
|
||||
`The specified credentials does not exists for the specified PostgreSQL database.${EOL}${error.message}`
|
||||
)
|
||||
})
|
||||
|
||||
it("should throw a specific message on database not found", function () {
|
||||
const error = new Error("database does not exist")
|
||||
Object.assign(error, { code: DatabaseErrorCode.notFound })
|
||||
|
||||
let outputError: any
|
||||
try {
|
||||
handlePostgresDatabaseError(error)
|
||||
} catch (e) {
|
||||
outputError = e
|
||||
}
|
||||
|
||||
expect(outputError.message).toEqual(
|
||||
`The specified connection string for your PostgreSQL database might have illegal characters. Please check that it only contains allowed characters [a-zA-Z0-9]${EOL}${error.message}`
|
||||
)
|
||||
})
|
||||
|
||||
it("should throw a specific message on database migration missing", function () {
|
||||
const error = new Error("database does not exist")
|
||||
Object.assign(error, { code: DatabaseErrorCode.migrationMissing })
|
||||
|
||||
let outputError: any
|
||||
try {
|
||||
handlePostgresDatabaseError(error)
|
||||
} catch (e) {
|
||||
outputError = e
|
||||
}
|
||||
|
||||
expect(outputError.message).toEqual(
|
||||
`Migrations missing. Please run 'medusa migrations run' and try again.`
|
||||
)
|
||||
})
|
||||
|
||||
it("should re throw unhandled error code", function () {
|
||||
const error = new Error("database does not exist")
|
||||
Object.assign(error, { code: "test" })
|
||||
|
||||
let outputError: any
|
||||
try {
|
||||
handlePostgresDatabaseError(error)
|
||||
} catch (e) {
|
||||
outputError = e
|
||||
}
|
||||
|
||||
expect(outputError.message).toEqual("database does not exist")
|
||||
})
|
||||
})
|
||||
21
packages/core/utils/src/common/__tests__/is-email.spec.ts
Normal file
21
packages/core/utils/src/common/__tests__/is-email.spec.ts
Normal file
@@ -0,0 +1,21 @@
|
||||
import { validateEmail } from "../is-email"
|
||||
|
||||
describe("validateEmail", () => {
|
||||
it("successfully validates an email", () => {
|
||||
expect(validateEmail("test@email.com")).toBe("test@email.com")
|
||||
expect(validateEmail("test.test@email.com")).toBe("test.test@email.com")
|
||||
expect(validateEmail("test.test123@email.com")).toBe(
|
||||
"test.test123@email.com"
|
||||
)
|
||||
})
|
||||
|
||||
it("throws on an invalidates email", () => {
|
||||
expect.assertions(1)
|
||||
|
||||
try {
|
||||
validateEmail("not-an-email")
|
||||
} catch (e) {
|
||||
expect(e.message).toBe("The email is not valid")
|
||||
}
|
||||
})
|
||||
})
|
||||
61
packages/core/utils/src/common/__tests__/is-present.spec.ts
Normal file
61
packages/core/utils/src/common/__tests__/is-present.spec.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import { isPresent } from "../is-present"
|
||||
|
||||
describe("isPresent", function () {
|
||||
it("should return true or false for different types of data", function () {
|
||||
const expectations = [
|
||||
{
|
||||
input: null,
|
||||
output: false,
|
||||
},
|
||||
{
|
||||
input: undefined,
|
||||
output: false,
|
||||
},
|
||||
{
|
||||
input: "Testing",
|
||||
output: true,
|
||||
},
|
||||
{
|
||||
input: "",
|
||||
output: false,
|
||||
},
|
||||
{
|
||||
input: {},
|
||||
output: false,
|
||||
},
|
||||
{
|
||||
input: { test: 1 },
|
||||
output: true,
|
||||
},
|
||||
{
|
||||
input: [],
|
||||
output: false,
|
||||
},
|
||||
{
|
||||
input: [{ test: 1 }],
|
||||
output: true,
|
||||
},
|
||||
{
|
||||
input: new Map([["test", "test"]]),
|
||||
output: true,
|
||||
},
|
||||
{
|
||||
input: new Map([]),
|
||||
output: false,
|
||||
},
|
||||
|
||||
{
|
||||
input: new Set(["test"]),
|
||||
output: true,
|
||||
},
|
||||
{
|
||||
input: new Set([]),
|
||||
output: false,
|
||||
},
|
||||
]
|
||||
|
||||
expectations.forEach((expectation) => {
|
||||
expect(isPresent(expectation.input)).toEqual(expectation.output)
|
||||
})
|
||||
})
|
||||
})
|
||||
43
packages/core/utils/src/common/__tests__/map-object-to.ts
Normal file
43
packages/core/utils/src/common/__tests__/map-object-to.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { mapObjectTo, MapToConfig } from "../map-object-to"
|
||||
|
||||
const input = {
|
||||
a: [{ id: "1" }, { id: "2" }],
|
||||
b: [{ id: "3" }, { id: "4", handle: "handle1" }],
|
||||
c: [{ id: "5", sku: "sku1" }, { id: "6" }],
|
||||
}
|
||||
|
||||
const mapToConfig: MapToConfig = {
|
||||
a: [{ mapTo: "a.id", valueFrom: "id" }],
|
||||
b: [
|
||||
{ mapTo: "b.id", valueFrom: "id" },
|
||||
{ mapTo: "b.handle", valueFrom: "handle" },
|
||||
],
|
||||
c: [
|
||||
{ mapTo: "c.id", valueFrom: "id" },
|
||||
{ mapTo: "c.sku", valueFrom: "sku" },
|
||||
],
|
||||
}
|
||||
|
||||
describe("mapObjectTo", function () {
|
||||
it("should return a new object with the keys remapped and the values picked from the original object based on the map config", function () {
|
||||
const remappedObject = mapObjectTo(input, mapToConfig)
|
||||
|
||||
expect(remappedObject).toEqual({
|
||||
"a.id": ["1", "2"],
|
||||
"b.id": ["3", "4"],
|
||||
"b.handle": ["handle1"],
|
||||
"c.id": ["5", "6"],
|
||||
"c.sku": ["sku1"],
|
||||
})
|
||||
})
|
||||
|
||||
it("should return a new object with only the picked properties", function () {
|
||||
const remappedObject = mapObjectTo(input, mapToConfig, {
|
||||
pick: ["a.id"],
|
||||
})
|
||||
|
||||
expect(remappedObject).toEqual({
|
||||
"a.id": ["1", "2"],
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,42 @@
|
||||
import { objectToStringPath } from "../object-to-string-path"
|
||||
|
||||
describe("objectToStringPath", function () {
|
||||
it("should return only the properties path of the properties that are set to true", function () {
|
||||
const res = objectToStringPath(
|
||||
{
|
||||
product: true,
|
||||
variants: {
|
||||
title: true,
|
||||
prices: {
|
||||
amount: true,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
includeParentPropertyFields: false,
|
||||
}
|
||||
)
|
||||
|
||||
expect(res).toEqual(["product", "variants.title", "variants.prices.amount"])
|
||||
})
|
||||
|
||||
it("should return a string path from an object including properties that are object and contains other properties set to true", function () {
|
||||
const res = objectToStringPath({
|
||||
product: true,
|
||||
variants: {
|
||||
title: true,
|
||||
prices: {
|
||||
amount: true,
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
expect(res).toEqual([
|
||||
"product",
|
||||
"variants",
|
||||
"variants.title",
|
||||
"variants.prices",
|
||||
"variants.prices.amount",
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,12 @@
|
||||
import { partitionArray } from "../../../dist"
|
||||
|
||||
describe("partitionArray", function () {
|
||||
it("should split array according to predicate", function () {
|
||||
const res = partitionArray([1, 2, 3, 4, 5], (x) => x % 2 === 0)
|
||||
|
||||
expect(res).toEqual([
|
||||
[2, 4],
|
||||
[1, 3, 5],
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,111 @@
|
||||
import { pickValueFromObject } from "../pick-value-from-object"
|
||||
|
||||
describe("pickValueFromObject", function () {
|
||||
it("should return true or false for different types of data", function () {
|
||||
const expectations = [
|
||||
{
|
||||
input: {
|
||||
1: "attribute.another_attribute",
|
||||
2: {
|
||||
attribute: {
|
||||
another_attribute: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
output: "test",
|
||||
},
|
||||
{
|
||||
input: {
|
||||
1: "attribute.another_attribute.array_attribute",
|
||||
2: {
|
||||
attribute: {
|
||||
another_attribute: [
|
||||
{
|
||||
array_attribute: "test 1",
|
||||
},
|
||||
{
|
||||
array_attribute: "test 2",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
output: ["test 1", "test 2"],
|
||||
},
|
||||
{
|
||||
input: {
|
||||
1: "attribute.another_attribute.array_attribute.deep_array_attribute",
|
||||
2: {
|
||||
attribute: {
|
||||
another_attribute: [
|
||||
{
|
||||
array_attribute: [
|
||||
{
|
||||
deep_array_attribute: "test 1",
|
||||
},
|
||||
{
|
||||
deep_array_attribute: "test 2",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
array_attribute: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
output: ["test 1", "test 2"],
|
||||
},
|
||||
{
|
||||
input: {
|
||||
1: "attribute.another_attribute.array_attribute",
|
||||
2: {
|
||||
attribute: {
|
||||
another_attribute: [
|
||||
{
|
||||
array_attribute: [
|
||||
{
|
||||
deep_array_attribute: "test 1",
|
||||
},
|
||||
{
|
||||
deep_array_attribute: "test 2",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
array_attribute: [],
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
output: [
|
||||
{
|
||||
deep_array_attribute: "test 1",
|
||||
},
|
||||
{
|
||||
deep_array_attribute: "test 2",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
input: {
|
||||
1: "attribute.missing_attribute",
|
||||
2: {
|
||||
attribute: {
|
||||
another_attribute: "test",
|
||||
},
|
||||
},
|
||||
},
|
||||
output: undefined,
|
||||
},
|
||||
]
|
||||
|
||||
expectations.forEach((expectation) => {
|
||||
expect(
|
||||
pickValueFromObject(expectation.input["1"], expectation.input["2"])
|
||||
).toEqual(expectation.output)
|
||||
})
|
||||
})
|
||||
})
|
||||
33
packages/core/utils/src/common/__tests__/pluralize.spec.ts
Normal file
33
packages/core/utils/src/common/__tests__/pluralize.spec.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { pluralize } from "../plurailze"
|
||||
|
||||
describe("pluralize", function () {
|
||||
it("should pluralize any words", function () {
|
||||
const words = [
|
||||
"apple",
|
||||
"box",
|
||||
"day",
|
||||
"country",
|
||||
"baby",
|
||||
"knife",
|
||||
"hero",
|
||||
"potato",
|
||||
"address",
|
||||
]
|
||||
|
||||
const expectedOutput = [
|
||||
"apples",
|
||||
"boxes",
|
||||
"days",
|
||||
"countries",
|
||||
"babies",
|
||||
"knives",
|
||||
"heroes",
|
||||
"potatoes",
|
||||
"addresses",
|
||||
]
|
||||
|
||||
words.forEach((word, index) => {
|
||||
expect(pluralize(word)).toBe(expectedOutput[index])
|
||||
})
|
||||
})
|
||||
})
|
||||
58
packages/core/utils/src/common/__tests__/promise-all.spec.ts
Normal file
58
packages/core/utils/src/common/__tests__/promise-all.spec.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
import { promiseAll } from "../promise-all"
|
||||
import { EOL } from "os"
|
||||
|
||||
describe("promiseAll", function () {
|
||||
it("should throw an error if any of the promises throw", async function () {
|
||||
const res = await promiseAll([
|
||||
Promise.resolve(1),
|
||||
(async () => {
|
||||
throw new Error("error")
|
||||
})(),
|
||||
Promise.resolve(3),
|
||||
]).catch((e) => e)
|
||||
|
||||
expect(res.message).toBe("error")
|
||||
})
|
||||
|
||||
it("should throw errors if any of the promises throw and aggregate them", async function () {
|
||||
const res = await promiseAll(
|
||||
[
|
||||
Promise.resolve(1),
|
||||
(async () => {
|
||||
throw new Error("error")
|
||||
})(),
|
||||
(async () => {
|
||||
throw new Error("error2")
|
||||
})(),
|
||||
Promise.resolve(3),
|
||||
],
|
||||
{
|
||||
aggregateErrors: true,
|
||||
}
|
||||
).catch((e) => e)
|
||||
|
||||
expect(res.message).toBe(["error", "error2"].join(EOL))
|
||||
})
|
||||
|
||||
it("should return all values if all promises are fulfilled", async function () {
|
||||
const res = await promiseAll([
|
||||
Promise.resolve(1),
|
||||
Promise.resolve(2),
|
||||
Promise.resolve(3),
|
||||
])
|
||||
|
||||
expect(res).toEqual([1, 2, 3])
|
||||
})
|
||||
|
||||
it("should return all values if all promises are fulfilled including waiting for nested promises", async function () {
|
||||
const res = await promiseAll([
|
||||
Promise.resolve(1),
|
||||
(async () => {
|
||||
await promiseAll([Promise.resolve(1), Promise.resolve(2)])
|
||||
})(),
|
||||
Promise.resolve(3),
|
||||
])
|
||||
|
||||
expect(res).toEqual([1, undefined, 3])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,148 @@
|
||||
import { remoteQueryObjectFromString } from "../remote-query-object-from-string"
|
||||
|
||||
const fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"url",
|
||||
"metadata",
|
||||
"tags.id",
|
||||
"tags.created_at",
|
||||
"tags.updated_at",
|
||||
"tags.deleted_at",
|
||||
"tags.value",
|
||||
"options.id",
|
||||
"options.created_at",
|
||||
"options.updated_at",
|
||||
"options.deleted_at",
|
||||
"options.title",
|
||||
"options.product_id",
|
||||
"options.metadata",
|
||||
"options.values.id",
|
||||
"options.values.created_at",
|
||||
"options.values.updated_at",
|
||||
"options.values.deleted_at",
|
||||
"options.values.value",
|
||||
"options.values.option_id",
|
||||
"options.values.variant_id",
|
||||
"options.values.metadata",
|
||||
]
|
||||
|
||||
describe("remoteQueryObjectFromString", function () {
|
||||
it("should return a remote query object", function () {
|
||||
const output = remoteQueryObjectFromString({
|
||||
entryPoint: "product",
|
||||
variables: {
|
||||
q: "name",
|
||||
options: {
|
||||
name: "option_name",
|
||||
},
|
||||
"options.values": {
|
||||
value: 123,
|
||||
},
|
||||
},
|
||||
fields,
|
||||
})
|
||||
|
||||
expect(output).toEqual({
|
||||
product: {
|
||||
__args: {
|
||||
q: "name",
|
||||
},
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"url",
|
||||
"metadata",
|
||||
],
|
||||
isServiceAccess: false,
|
||||
tags: {
|
||||
fields: ["id", "created_at", "updated_at", "deleted_at", "value"],
|
||||
},
|
||||
|
||||
options: {
|
||||
__args: {
|
||||
name: "option_name",
|
||||
},
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"title",
|
||||
"product_id",
|
||||
"metadata",
|
||||
],
|
||||
values: {
|
||||
__args: {
|
||||
value: 123,
|
||||
},
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"value",
|
||||
"option_id",
|
||||
"variant_id",
|
||||
"metadata",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
|
||||
it("should return a remote query object using service entry point", function () {
|
||||
const output = remoteQueryObjectFromString({
|
||||
service: "product",
|
||||
variables: {},
|
||||
fields,
|
||||
})
|
||||
|
||||
expect(output).toEqual({
|
||||
product: {
|
||||
__args: {},
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"url",
|
||||
"metadata",
|
||||
],
|
||||
isServiceAccess: true,
|
||||
tags: {
|
||||
fields: ["id", "created_at", "updated_at", "deleted_at", "value"],
|
||||
},
|
||||
|
||||
options: {
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"title",
|
||||
"product_id",
|
||||
"metadata",
|
||||
],
|
||||
values: {
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"value",
|
||||
"option_id",
|
||||
"variant_id",
|
||||
"metadata",
|
||||
],
|
||||
},
|
||||
},
|
||||
},
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,219 @@
|
||||
import { remoteQueryObjectToString } from "../remote-query-object-to-string"
|
||||
|
||||
const remoteQueryObject = {
|
||||
fields: [
|
||||
"id",
|
||||
"title",
|
||||
"subtitle",
|
||||
"status",
|
||||
"external_id",
|
||||
"description",
|
||||
"handle",
|
||||
"is_giftcard",
|
||||
"discountable",
|
||||
"thumbnail",
|
||||
"collection_id",
|
||||
"type_id",
|
||||
"weight",
|
||||
"length",
|
||||
"height",
|
||||
"width",
|
||||
"hs_code",
|
||||
"origin_country",
|
||||
"mid_code",
|
||||
"material",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"metadata",
|
||||
],
|
||||
images: {
|
||||
fields: ["id", "created_at", "updated_at", "deleted_at", "url", "metadata"],
|
||||
},
|
||||
tags: {
|
||||
fields: ["id", "created_at", "updated_at", "deleted_at", "value"],
|
||||
},
|
||||
|
||||
type: {
|
||||
fields: ["id", "created_at", "updated_at", "deleted_at", "value"],
|
||||
},
|
||||
|
||||
collection: {
|
||||
fields: ["title", "handle", "id", "created_at", "updated_at", "deleted_at"],
|
||||
},
|
||||
|
||||
options: {
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"title",
|
||||
"product_id",
|
||||
"metadata",
|
||||
],
|
||||
values: {
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"value",
|
||||
"option_id",
|
||||
"variant_id",
|
||||
"metadata",
|
||||
],
|
||||
},
|
||||
},
|
||||
|
||||
variants: {
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"title",
|
||||
"product_id",
|
||||
"sku",
|
||||
"barcode",
|
||||
"ean",
|
||||
"upc",
|
||||
"variant_rank",
|
||||
"inventory_quantity",
|
||||
"allow_backorder",
|
||||
"manage_inventory",
|
||||
"hs_code",
|
||||
"origin_country",
|
||||
"mid_code",
|
||||
"material",
|
||||
"weight",
|
||||
"length",
|
||||
"height",
|
||||
"width",
|
||||
"metadata",
|
||||
],
|
||||
|
||||
options: {
|
||||
fields: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"value",
|
||||
"option_id",
|
||||
"variant_id",
|
||||
"metadata",
|
||||
],
|
||||
},
|
||||
},
|
||||
profile: {
|
||||
fields: ["id", "created_at", "updated_at", "deleted_at", "name", "type"],
|
||||
},
|
||||
}
|
||||
|
||||
describe("remoteQueryObjectToString", function () {
|
||||
it("should return a string array of fields/relations", function () {
|
||||
const output = remoteQueryObjectToString(remoteQueryObject)
|
||||
|
||||
expect(output).toEqual([
|
||||
"id",
|
||||
"title",
|
||||
"subtitle",
|
||||
"status",
|
||||
"external_id",
|
||||
"description",
|
||||
"handle",
|
||||
"is_giftcard",
|
||||
"discountable",
|
||||
"thumbnail",
|
||||
"collection_id",
|
||||
"type_id",
|
||||
"weight",
|
||||
"length",
|
||||
"height",
|
||||
"width",
|
||||
"hs_code",
|
||||
"origin_country",
|
||||
"mid_code",
|
||||
"material",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"metadata",
|
||||
"images.id",
|
||||
"images.created_at",
|
||||
"images.updated_at",
|
||||
"images.deleted_at",
|
||||
"images.url",
|
||||
"images.metadata",
|
||||
"tags.id",
|
||||
"tags.created_at",
|
||||
"tags.updated_at",
|
||||
"tags.deleted_at",
|
||||
"tags.value",
|
||||
"type.id",
|
||||
"type.created_at",
|
||||
"type.updated_at",
|
||||
"type.deleted_at",
|
||||
"type.value",
|
||||
"collection.title",
|
||||
"collection.handle",
|
||||
"collection.id",
|
||||
"collection.created_at",
|
||||
"collection.updated_at",
|
||||
"collection.deleted_at",
|
||||
"options.id",
|
||||
"options.created_at",
|
||||
"options.updated_at",
|
||||
"options.deleted_at",
|
||||
"options.title",
|
||||
"options.product_id",
|
||||
"options.metadata",
|
||||
"options.values.id",
|
||||
"options.values.created_at",
|
||||
"options.values.updated_at",
|
||||
"options.values.deleted_at",
|
||||
"options.values.value",
|
||||
"options.values.option_id",
|
||||
"options.values.variant_id",
|
||||
"options.values.metadata",
|
||||
"variants.id",
|
||||
"variants.created_at",
|
||||
"variants.updated_at",
|
||||
"variants.deleted_at",
|
||||
"variants.title",
|
||||
"variants.product_id",
|
||||
"variants.sku",
|
||||
"variants.barcode",
|
||||
"variants.ean",
|
||||
"variants.upc",
|
||||
"variants.variant_rank",
|
||||
"variants.inventory_quantity",
|
||||
"variants.allow_backorder",
|
||||
"variants.manage_inventory",
|
||||
"variants.hs_code",
|
||||
"variants.origin_country",
|
||||
"variants.mid_code",
|
||||
"variants.material",
|
||||
"variants.weight",
|
||||
"variants.length",
|
||||
"variants.height",
|
||||
"variants.width",
|
||||
"variants.metadata",
|
||||
"variants.options.id",
|
||||
"variants.options.created_at",
|
||||
"variants.options.updated_at",
|
||||
"variants.options.deleted_at",
|
||||
"variants.options.value",
|
||||
"variants.options.option_id",
|
||||
"variants.options.variant_id",
|
||||
"variants.options.metadata",
|
||||
"profile.id",
|
||||
"profile.created_at",
|
||||
"profile.updated_at",
|
||||
"profile.deleted_at",
|
||||
"profile.name",
|
||||
"profile.type",
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,22 @@
|
||||
import { removeUndefined } from "../remove-undefined"
|
||||
|
||||
describe("removeUndefined", function () {
|
||||
it("should remove all undefined fields from an object", function () {
|
||||
const withUndefined = {
|
||||
foo: undefined,
|
||||
bar: "baz",
|
||||
foo2: null,
|
||||
}
|
||||
expect(withUndefined.hasOwnProperty("foo")).toBe(true)
|
||||
|
||||
const output = removeUndefined(withUndefined)
|
||||
expect(output.hasOwnProperty("foo")).toBe(false)
|
||||
expect(output.hasOwnProperty("bar")).toBe(true)
|
||||
expect(output.hasOwnProperty("foo2")).toBe(true)
|
||||
})
|
||||
|
||||
it("should return an empty object as-is", function () {
|
||||
const output = removeUndefined({})
|
||||
expect(output).toEqual({})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,68 @@
|
||||
import { stringToSelectRelationObject } from "../string-to-select-relation-object"
|
||||
|
||||
const fields = [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"url",
|
||||
"metadata",
|
||||
"tags.id",
|
||||
"tags.created_at",
|
||||
"tags.updated_at",
|
||||
"tags.deleted_at",
|
||||
"tags.value",
|
||||
"options.id",
|
||||
"options.created_at",
|
||||
"options.updated_at",
|
||||
"options.deleted_at",
|
||||
"options.title",
|
||||
"options.product_id",
|
||||
"options.metadata",
|
||||
"options.values.id",
|
||||
"options.values.created_at",
|
||||
"options.values.updated_at",
|
||||
"options.values.deleted_at",
|
||||
"options.values.value",
|
||||
"options.values.option_id",
|
||||
"options.values.variant_id",
|
||||
"options.values.metadata",
|
||||
]
|
||||
|
||||
describe("stringToSelectRelationObject", function () {
|
||||
it("should return an object containing the select and relation properties", function () {
|
||||
const output = stringToSelectRelationObject(fields)
|
||||
|
||||
expect(output).toEqual({
|
||||
select: [
|
||||
"id",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"deleted_at",
|
||||
"url",
|
||||
"metadata",
|
||||
"tags.id",
|
||||
"tags.created_at",
|
||||
"tags.updated_at",
|
||||
"tags.deleted_at",
|
||||
"tags.value",
|
||||
"options.id",
|
||||
"options.created_at",
|
||||
"options.updated_at",
|
||||
"options.deleted_at",
|
||||
"options.title",
|
||||
"options.product_id",
|
||||
"options.metadata",
|
||||
"options.values.id",
|
||||
"options.values.created_at",
|
||||
"options.values.updated_at",
|
||||
"options.values.deleted_at",
|
||||
"options.values.value",
|
||||
"options.values.option_id",
|
||||
"options.values.variant_id",
|
||||
"options.values.metadata",
|
||||
],
|
||||
relations: ["tags", "options", "options.values"],
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,36 @@
|
||||
import { toCamelCase } from "../to-camel-case"
|
||||
|
||||
describe("toCamelCase", function () {
|
||||
it("should convert all cases to camel case", function () {
|
||||
const expectations = [
|
||||
{
|
||||
input: "testing-camelize",
|
||||
output: "testingCamelize",
|
||||
},
|
||||
{
|
||||
input: "testing-Camelize",
|
||||
output: "testingCamelize",
|
||||
},
|
||||
{
|
||||
input: "TESTING-CAMELIZE",
|
||||
output: "testingCamelize",
|
||||
},
|
||||
{
|
||||
input: "this_is-A-test",
|
||||
output: "thisIsATest",
|
||||
},
|
||||
{
|
||||
input: "this_is-A-test ANOTHER",
|
||||
output: "thisIsATestAnother",
|
||||
},
|
||||
{
|
||||
input: "testingAlreadyCamelized",
|
||||
output: "testingAlreadyCamelized",
|
||||
},
|
||||
]
|
||||
|
||||
expectations.forEach((expectation) => {
|
||||
expect(toCamelCase(expectation.input)).toEqual(expectation.output)
|
||||
})
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,36 @@
|
||||
import { upperCaseFirst } from "../upper-case-first"
|
||||
|
||||
describe("upperCaseFirst", function () {
|
||||
it("should convert first letter of the word to capital letter", function () {
|
||||
const expectations = [
|
||||
{
|
||||
input: "testing capitalize",
|
||||
output: "Testing capitalize",
|
||||
},
|
||||
{
|
||||
input: "testing",
|
||||
output: "Testing",
|
||||
},
|
||||
{
|
||||
input: "Testing",
|
||||
output: "Testing",
|
||||
},
|
||||
{
|
||||
input: "TESTING",
|
||||
output: "TESTING",
|
||||
},
|
||||
{
|
||||
input: "t",
|
||||
output: "T",
|
||||
},
|
||||
{
|
||||
input: "",
|
||||
output: "",
|
||||
},
|
||||
]
|
||||
|
||||
expectations.forEach((expectation) => {
|
||||
expect(upperCaseFirst(expectation.input)).toEqual(expectation.output)
|
||||
})
|
||||
})
|
||||
})
|
||||
32
packages/core/utils/src/common/alter-columns-helper.ts
Normal file
32
packages/core/utils/src/common/alter-columns-helper.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
export function generatePostgresAlterColummnIfExistStatement(
|
||||
tableName: string,
|
||||
columns: string[],
|
||||
alterExpression: string
|
||||
) {
|
||||
let script = `
|
||||
DO $$
|
||||
DECLARE
|
||||
current_column text;
|
||||
BEGIN`
|
||||
|
||||
columns.forEach((column) => {
|
||||
script += `
|
||||
current_column := '${column}';
|
||||
IF EXISTS (
|
||||
SELECT 1
|
||||
FROM information_schema.columns
|
||||
WHERE table_name = '${tableName}'
|
||||
AND column_name = current_column
|
||||
) THEN
|
||||
EXECUTE format('ALTER TABLE %I ALTER COLUMN %I ${alterExpression}', '${tableName}', current_column);
|
||||
ELSE
|
||||
RAISE NOTICE 'Column % does not exist or alteration condition not met.', current_column;
|
||||
END IF;`
|
||||
})
|
||||
|
||||
script += `
|
||||
END$$;
|
||||
`
|
||||
|
||||
return script
|
||||
}
|
||||
15
packages/core/utils/src/common/array-difference.ts
Normal file
15
packages/core/utils/src/common/array-difference.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
type ArrayDifferenceElement = string | number
|
||||
|
||||
export function arrayDifference<TElement = ArrayDifferenceElement>(
|
||||
mainArray: TElement[],
|
||||
differingArray: TElement[]
|
||||
): TElement[] {
|
||||
const mainArraySet = new Set(mainArray)
|
||||
const differingArraySet = new Set(differingArray)
|
||||
|
||||
const difference = [...mainArraySet].filter(
|
||||
(element) => !differingArraySet.has(element)
|
||||
)
|
||||
|
||||
return difference
|
||||
}
|
||||
17
packages/core/utils/src/common/array-intersection.ts
Normal file
17
packages/core/utils/src/common/array-intersection.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
type ArrayIntersectionElement = string | number
|
||||
|
||||
export function arrayIntersection<TElement = ArrayIntersectionElement>(
|
||||
firstArray: TElement[],
|
||||
secondArray: TElement[]
|
||||
): TElement[] {
|
||||
const firstArraySet = new Set(firstArray)
|
||||
const res = new Set<TElement>()
|
||||
|
||||
secondArray.forEach((element) => {
|
||||
if (firstArraySet.has(element)) {
|
||||
res.add(element)
|
||||
}
|
||||
})
|
||||
|
||||
return Array.from(res)
|
||||
}
|
||||
70
packages/core/utils/src/common/build-query.ts
Normal file
70
packages/core/utils/src/common/build-query.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
// Those utils are used in a typeorm context and we can't be sure that they can be used elsewhere
|
||||
|
||||
import { objectFromStringPath } from "./object-from-string-path"
|
||||
|
||||
|
||||
type Order = {
|
||||
[key: string]: "ASC" | "DESC" | Order
|
||||
}
|
||||
|
||||
type Selects = {
|
||||
[key: string]: boolean | Selects
|
||||
}
|
||||
|
||||
type Relations = {
|
||||
[key: string]: boolean | Relations
|
||||
}
|
||||
|
||||
export function buildSelects(selectCollection: string[]): Selects {
|
||||
return buildRelationsOrSelect(selectCollection)
|
||||
}
|
||||
|
||||
export function buildRelations(relationCollection: string[]): Relations {
|
||||
return buildRelationsOrSelect(relationCollection)
|
||||
}
|
||||
|
||||
function buildRelationsOrSelect(collection: string[]): Selects | Relations {
|
||||
return objectFromStringPath(collection)
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert an order of dot string into a nested object
|
||||
* @example
|
||||
* input: { id: "ASC", "items.title": "ASC", "items.variant.title": "ASC" }
|
||||
* output: {
|
||||
* "id": "ASC",
|
||||
* "items": {
|
||||
* "id": "ASC",
|
||||
* "variant": {
|
||||
* "title": "ASC"
|
||||
* }
|
||||
* },
|
||||
* }
|
||||
* @param orderBy
|
||||
*/
|
||||
export function buildOrder<T>(orderBy: { [k: string]: "ASC" | "DESC" }): Order {
|
||||
const output: Order = {}
|
||||
|
||||
const orderKeys = Object.keys(orderBy)
|
||||
|
||||
for (const order of orderKeys) {
|
||||
if (order.indexOf(".") > -1) {
|
||||
const nestedOrder = order.split(".")
|
||||
|
||||
let parent = output
|
||||
|
||||
while (nestedOrder.length > 1) {
|
||||
const nestedRelation = nestedOrder.shift() as string
|
||||
parent = (parent[nestedRelation] as Order) ??= {}
|
||||
}
|
||||
|
||||
parent[nestedOrder[0]] = orderBy[order]
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
output[order] = orderBy[order]
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
2
packages/core/utils/src/common/camel-to-snake-case.ts
Normal file
2
packages/core/utils/src/common/camel-to-snake-case.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export const camelToSnakeCase = (string) =>
|
||||
string.replace(/([a-z])([A-Z])/g, "$1_$2").toLowerCase()
|
||||
9
packages/core/utils/src/common/container.ts
Normal file
9
packages/core/utils/src/common/container.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export const ContainerRegistrationKeys = {
|
||||
PG_CONNECTION: "__pg_connection__",
|
||||
MANAGER: "manager",
|
||||
CONFIG_MODULE: "configModule",
|
||||
LOGGER: "logger",
|
||||
REMOTE_QUERY: "remoteQuery",
|
||||
REMOTE_LINK: "remoteLink",
|
||||
FEATURE_FLAG_ROUTER: "featureFlagRouter",
|
||||
}
|
||||
@@ -0,0 +1,104 @@
|
||||
import { isObject } from "../common/is-object"
|
||||
|
||||
interface ItemRecord extends Record<string, any> {
|
||||
id: string
|
||||
}
|
||||
|
||||
export function convertItemResponseToUpdateRequest(
|
||||
item: ItemRecord,
|
||||
selects: string[],
|
||||
relations: string[],
|
||||
fromManyRelationships: boolean = false
|
||||
): ItemRecord {
|
||||
const newItem: ItemRecord = {
|
||||
id: item.id,
|
||||
}
|
||||
|
||||
// If item is a child of a many relationship, we just need to pass in the id of the item
|
||||
if (fromManyRelationships) {
|
||||
return newItem
|
||||
}
|
||||
|
||||
for (const [key, value] of Object.entries(item)) {
|
||||
if (relations.includes(key)) {
|
||||
const relation = item[key]
|
||||
|
||||
// If the relationship is an object, its either a one to one or many to one relationship
|
||||
// We typically don't update the parent from the child relationship, we can skip this for now.
|
||||
// This can be focused on solely for one to one relationships
|
||||
if (isObject(relation)) {
|
||||
// If "id" is the only one in the object, underscorize the relation. This is assuming that
|
||||
// the relationship itself was changed to another item and now we need to revert it to the old item.
|
||||
if (Object.keys(relation).length === 1 && "id" in relation) {
|
||||
newItem[`${key}_id`] = relation.id
|
||||
}
|
||||
|
||||
// If attributes of the relation have been updated, we can assume that this
|
||||
// was an update operation on the relation. We revert what was updated.
|
||||
if (Object.keys(relation).length > 1) {
|
||||
// The ID can be figured out from the relationship, we can delete the ID here
|
||||
if ("id" in relation) {
|
||||
delete relation.id
|
||||
}
|
||||
|
||||
// we just need the selects for the relation, filter it out and remove the parent scope
|
||||
const filteredSelects = selects
|
||||
.filter((s) => s.startsWith(key) && !s.includes("id"))
|
||||
.map(shiftFirstPath)
|
||||
|
||||
// Add the filtered selects to the sanitized object
|
||||
for (const filteredSelect of filteredSelects) {
|
||||
newItem[key] = newItem[key] || {}
|
||||
newItem[key][filteredSelect] = relation[filteredSelect]
|
||||
}
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// If the relation is an array, we can expect this to be a one to many or many to many
|
||||
// relationships. Recursively call the function until all relations are converted
|
||||
if (Array.isArray(relation)) {
|
||||
const newRelationsArray: ItemRecord[] = []
|
||||
|
||||
for (const rel of relation) {
|
||||
// Scope selects and relations to ones that are relevant to the current relation
|
||||
const filteredRelations = relations
|
||||
.filter((r) => r.startsWith(key))
|
||||
.map(shiftFirstPath)
|
||||
|
||||
const filteredSelects = selects
|
||||
.filter((s) => s.startsWith(key))
|
||||
.map(shiftFirstPath)
|
||||
|
||||
newRelationsArray.push(
|
||||
convertItemResponseToUpdateRequest(
|
||||
rel,
|
||||
filteredSelects,
|
||||
filteredRelations,
|
||||
true
|
||||
)
|
||||
)
|
||||
}
|
||||
|
||||
newItem[key] = newRelationsArray
|
||||
}
|
||||
}
|
||||
|
||||
// if the key exists in the selects, we add them to the new sanitized array.
|
||||
// sanitisation is done because MikroORM adds relationship attributes and other default attributes
|
||||
// which we do not want to add to the update request
|
||||
if (selects.includes(key) && !fromManyRelationships) {
|
||||
newItem[key] = value
|
||||
}
|
||||
}
|
||||
|
||||
return newItem
|
||||
}
|
||||
|
||||
function shiftFirstPath(select) {
|
||||
const selectArray = select.split(".")
|
||||
selectArray.shift()
|
||||
|
||||
return selectArray.join(".")
|
||||
}
|
||||
9
packages/core/utils/src/common/create-container-like.ts
Normal file
9
packages/core/utils/src/common/create-container-like.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { ContainerLike } from "@medusajs/types"
|
||||
|
||||
export function createContainerLike(obj): ContainerLike {
|
||||
return {
|
||||
resolve(key: string) {
|
||||
return obj[key]
|
||||
},
|
||||
}
|
||||
}
|
||||
74
packages/core/utils/src/common/create-psql-index-helper.ts
Normal file
74
packages/core/utils/src/common/create-psql-index-helper.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
import { Index } from "@mikro-orm/core"
|
||||
|
||||
/**
|
||||
* Create a PSQL index statement
|
||||
* @param name The name of the index, if not provided it will be generated in the format IDX_tableName_columnName
|
||||
* @param tableName The name of the table
|
||||
* @param columns The columns to index
|
||||
* @param type The type of index (e.g GIN, GIST, BTREE, etc)
|
||||
* @param where The where clause
|
||||
* @param unique If the index should be a unique index
|
||||
*
|
||||
* @example
|
||||
* createPsqlIndexStatementHelper({
|
||||
* name: "idx_user_email",
|
||||
* tableName: "user",
|
||||
* columns: "email",
|
||||
* type: "btree",
|
||||
* where: "email IS NOT NULL"
|
||||
* });
|
||||
*
|
||||
* // expression: CREATE INDEX IF NOT EXISTS idx_user_email ON user USING btree (email) WHERE email IS NOT NULL;
|
||||
*
|
||||
* createPsqlIndexStatementHelper({
|
||||
* name: "idx_user_email",
|
||||
* tableName: "user",
|
||||
* columns: "email"
|
||||
* });
|
||||
*
|
||||
* // expression: CREATE INDEX IF NOT EXISTS idx_user_email ON user (email);
|
||||
*
|
||||
*/
|
||||
export function createPsqlIndexStatementHelper({
|
||||
name,
|
||||
tableName,
|
||||
columns,
|
||||
type,
|
||||
where,
|
||||
unique,
|
||||
}: {
|
||||
name?: string
|
||||
tableName: string
|
||||
columns: string | string[]
|
||||
type?: string
|
||||
where?: string
|
||||
unique?: boolean
|
||||
}) {
|
||||
const columnsName = Array.isArray(columns) ? columns.join("_") : columns
|
||||
|
||||
columns = Array.isArray(columns) ? columns.join(", ") : columns
|
||||
name = name || `IDX_${tableName}_${columnsName}${unique ? "_unique" : ""}`
|
||||
|
||||
const typeStr = type ? ` USING ${type}` : ""
|
||||
const optionsStr = where ? ` WHERE ${where}` : ""
|
||||
const uniqueStr = unique ? "UNIQUE " : ""
|
||||
|
||||
const expression = `CREATE ${uniqueStr}INDEX IF NOT EXISTS "${name}" ON "${tableName}"${typeStr} (${columns})${optionsStr}`
|
||||
return {
|
||||
toString: () => {
|
||||
return expression
|
||||
},
|
||||
valueOf: () => {
|
||||
return expression
|
||||
},
|
||||
name,
|
||||
expression,
|
||||
MikroORMIndex: (options?: Parameters<typeof Index>[0]) => {
|
||||
return Index({
|
||||
name,
|
||||
expression,
|
||||
...options,
|
||||
})
|
||||
},
|
||||
}
|
||||
}
|
||||
3
packages/core/utils/src/common/deduplicate.ts
Normal file
3
packages/core/utils/src/common/deduplicate.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function deduplicate<T = any>(collection: T[]): T[] {
|
||||
return [...new Set(collection)]
|
||||
}
|
||||
35
packages/core/utils/src/common/deep-copy.ts
Normal file
35
packages/core/utils/src/common/deep-copy.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
import { isObject } from "./is-object"
|
||||
|
||||
/**
|
||||
* In most casees, JSON.parse(JSON.stringify(obj)) is enough to deep copy an object.
|
||||
* But in some cases, it's not enough. For example, if the object contains a function or a proxy, it will be lost after JSON.parse(JSON.stringify(obj)).
|
||||
*
|
||||
* @param obj
|
||||
*/
|
||||
export function deepCopy<T extends Record<any, any> = Record<any, any>>(
|
||||
obj: T | T[]
|
||||
): T | T[] {
|
||||
if (obj === null || typeof obj !== "object") {
|
||||
return obj
|
||||
}
|
||||
|
||||
if (Array.isArray(obj)) {
|
||||
const copy: any[] = []
|
||||
for (let i = 0; i < obj.length; i++) {
|
||||
copy[i] = deepCopy(obj[i])
|
||||
}
|
||||
return copy
|
||||
}
|
||||
|
||||
if (isObject(obj)) {
|
||||
const copy: Record<any, any> = {}
|
||||
for (let attr in obj) {
|
||||
if (obj.hasOwnProperty(attr)) {
|
||||
copy[attr] = deepCopy(obj[attr])
|
||||
}
|
||||
}
|
||||
return copy
|
||||
}
|
||||
|
||||
return obj
|
||||
}
|
||||
24
packages/core/utils/src/common/deep-equal-obj.ts
Normal file
24
packages/core/utils/src/common/deep-equal-obj.ts
Normal file
@@ -0,0 +1,24 @@
|
||||
export function deepEqualObj(obj1: object, obj2: object): boolean {
|
||||
if (typeof obj1 !== typeof obj2) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (typeof obj1 !== "object" || obj1 === null) {
|
||||
return obj1 === obj2
|
||||
}
|
||||
|
||||
const obj1Keys = Object.keys(obj1)
|
||||
const obj2Keys = Object.keys(obj2)
|
||||
|
||||
if (obj1Keys.length !== obj2Keys.length) {
|
||||
return false
|
||||
}
|
||||
|
||||
for (const key of obj1Keys) {
|
||||
if (!obj2Keys.includes(key) || !deepEqualObj(obj1[key], obj2[key])) {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
103
packages/core/utils/src/common/deep-flat-map.ts
Normal file
103
packages/core/utils/src/common/deep-flat-map.ts
Normal file
@@ -0,0 +1,103 @@
|
||||
import { isDefined } from "./is-defined"
|
||||
import { isObject } from "./is-object"
|
||||
|
||||
/**
|
||||
* @description
|
||||
* This function is used to flatten nested objects and arrays
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* ```ts
|
||||
* const data = {
|
||||
* root_level_property: "root level",
|
||||
* products: [
|
||||
* {
|
||||
* id: "1",
|
||||
* name: "product 1",
|
||||
* variants: [
|
||||
* { id: "1.1", name: "variant 1.1" },
|
||||
* { id: "1.2", name: "variant 1.2" },
|
||||
* ],
|
||||
* },
|
||||
* {
|
||||
* id: "2",
|
||||
* name: "product 2",
|
||||
* variants: [
|
||||
* { id: "2.1", name: "variant 2.1" },
|
||||
* { id: "2.2", name: "variant 2.2" },
|
||||
* ],
|
||||
* },
|
||||
* ],
|
||||
* }
|
||||
*
|
||||
* const flat = deepFlatMap(
|
||||
* data,
|
||||
* "products.variants",
|
||||
* ({ root_, products, variants }) => {
|
||||
* return {
|
||||
* root_level_property: root_.root_level_property,
|
||||
* product_id: products.id,
|
||||
* product_name: products.name,
|
||||
* variant_id: variants.id,
|
||||
* variant_name: variants.name,
|
||||
* }
|
||||
* }
|
||||
* )
|
||||
* ```
|
||||
*/
|
||||
|
||||
export function deepFlatMap(
|
||||
data: any,
|
||||
path: string,
|
||||
callback: (context: Record<string, any>) => any
|
||||
) {
|
||||
const ROOT_LEVEL = "root_"
|
||||
const keys = path.split(".")
|
||||
keys.unshift(ROOT_LEVEL)
|
||||
|
||||
const lastKey = keys[keys.length - 1]
|
||||
const stack: {
|
||||
element: any
|
||||
path: string[]
|
||||
context: Record<string, any>
|
||||
}[] = [{ element: { [ROOT_LEVEL]: data }, path: keys, context: {} }]
|
||||
|
||||
const results: any[] = []
|
||||
while (stack.length > 0) {
|
||||
const { element, path, context } = stack.shift()!
|
||||
const currentKey = path[0]
|
||||
const remainingPath = path.slice(1)
|
||||
|
||||
if (!isDefined(element[currentKey])) {
|
||||
callback({ ...context })
|
||||
continue
|
||||
}
|
||||
|
||||
if (remainingPath.length === 0) {
|
||||
if (Array.isArray(element[currentKey])) {
|
||||
element[currentKey].forEach((item) => {
|
||||
results.push(callback({ ...context, [lastKey]: item }))
|
||||
})
|
||||
} else if (isObject(element[currentKey])) {
|
||||
results.push(callback({ ...context, [lastKey]: element[currentKey] }))
|
||||
}
|
||||
} else {
|
||||
if (Array.isArray(element[currentKey])) {
|
||||
element[currentKey].forEach((item) => {
|
||||
stack.push({
|
||||
element: item,
|
||||
path: remainingPath,
|
||||
context: { ...context, [currentKey]: item },
|
||||
})
|
||||
})
|
||||
} else if (isObject(element[currentKey])) {
|
||||
stack.push({
|
||||
element: element[currentKey],
|
||||
path: remainingPath,
|
||||
context: { ...context, [currentKey]: element[currentKey] },
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
return results
|
||||
}
|
||||
55
packages/core/utils/src/common/errors.ts
Normal file
55
packages/core/utils/src/common/errors.ts
Normal file
@@ -0,0 +1,55 @@
|
||||
/**
|
||||
* @typedef MedusaErrorType
|
||||
*
|
||||
*/
|
||||
export const MedusaErrorTypes = {
|
||||
/** Errors stemming from the database */
|
||||
DB_ERROR: "database_error",
|
||||
DUPLICATE_ERROR: "duplicate_error",
|
||||
INVALID_ARGUMENT: "invalid_argument",
|
||||
INVALID_DATA: "invalid_data",
|
||||
UNAUTHORIZED: "unauthorized",
|
||||
NOT_FOUND: "not_found",
|
||||
NOT_ALLOWED: "not_allowed",
|
||||
UNEXPECTED_STATE: "unexpected_state",
|
||||
CONFLICT: "conflict",
|
||||
PAYMENT_AUTHORIZATION_ERROR: "payment_authorization_error",
|
||||
}
|
||||
|
||||
export const MedusaErrorCodes = {
|
||||
INSUFFICIENT_INVENTORY: "insufficient_inventory",
|
||||
CART_INCOMPATIBLE_STATE: "cart_incompatible_state",
|
||||
}
|
||||
|
||||
/**
|
||||
* Standardized error to be used across Medusa project.
|
||||
* @extends Error
|
||||
*/
|
||||
export class MedusaError extends Error {
|
||||
public type: string
|
||||
public message: string
|
||||
public code?: string
|
||||
public date: Date
|
||||
public static Types = MedusaErrorTypes
|
||||
public static Codes = MedusaErrorCodes
|
||||
|
||||
/**
|
||||
* Creates a standardized error to be used across Medusa project.
|
||||
* @param {string} type - type of error
|
||||
* @param {string} message - message to go along with error
|
||||
* @param {string} code - code of error
|
||||
* @param {Array} params - params
|
||||
*/
|
||||
constructor(type: string, message: string, code?: string, ...params: any) {
|
||||
super(...params)
|
||||
|
||||
if (Error.captureStackTrace) {
|
||||
Error.captureStackTrace(this, MedusaError)
|
||||
}
|
||||
|
||||
this.type = type
|
||||
this.code = code
|
||||
this.message = message
|
||||
this.date = new Date()
|
||||
}
|
||||
}
|
||||
16
packages/core/utils/src/common/generate-entity-id.ts
Normal file
16
packages/core/utils/src/common/generate-entity-id.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
import { ulid } from "ulid"
|
||||
|
||||
/**
|
||||
* Generate a composed id based on the input parameters and return either the is if it exists or the generated one.
|
||||
* @param idProperty
|
||||
* @param prefix
|
||||
*/
|
||||
export function generateEntityId(idProperty?: string, prefix?: string): string {
|
||||
if (idProperty) {
|
||||
return idProperty
|
||||
}
|
||||
|
||||
const id = ulid()
|
||||
prefix = prefix ? `${prefix}_` : ""
|
||||
return `${prefix}${id}`
|
||||
}
|
||||
17
packages/core/utils/src/common/generate-linkable-keys-map.ts
Normal file
17
packages/core/utils/src/common/generate-linkable-keys-map.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { MapToConfig } from "./map-object-to"
|
||||
|
||||
export function generateLinkableKeysMap(
|
||||
linkableKeys: Record<string, string>
|
||||
): MapToConfig {
|
||||
const entityLinkableKeysMap: MapToConfig = {}
|
||||
|
||||
Object.entries(linkableKeys).forEach(([key, value]) => {
|
||||
entityLinkableKeysMap[value] ??= []
|
||||
entityLinkableKeysMap[value].push({
|
||||
mapTo: key,
|
||||
valueFrom: key.split("_").pop()!,
|
||||
})
|
||||
})
|
||||
|
||||
return entityLinkableKeysMap
|
||||
}
|
||||
32
packages/core/utils/src/common/get-config-file.ts
Normal file
32
packages/core/utils/src/common/get-config-file.ts
Normal file
@@ -0,0 +1,32 @@
|
||||
import { join } from "path"
|
||||
|
||||
/**
|
||||
* Attempts to resolve the config file in a given root directory.
|
||||
* @param {string} rootDir - the directory to find the config file in.
|
||||
* @param {string} configName - the name of the config file.
|
||||
* @return {object} an object containing the config module and its path as well as an error property if the config couldn't be loaded.
|
||||
*/
|
||||
function getConfigFile<TConfig = unknown>(
|
||||
rootDir: string,
|
||||
configName: string
|
||||
): { configModule: TConfig; configFilePath: string; error?: any } {
|
||||
const configPath = join(rootDir, configName)
|
||||
let configFilePath = ``
|
||||
let configModule
|
||||
let err
|
||||
|
||||
try {
|
||||
configFilePath = require.resolve(configPath)
|
||||
configModule = require(configFilePath)
|
||||
} catch (e) {
|
||||
err = e
|
||||
}
|
||||
|
||||
if (configModule && typeof configModule.default === "object") {
|
||||
configModule = configModule.default
|
||||
}
|
||||
|
||||
return { configModule, configFilePath, error: err }
|
||||
}
|
||||
|
||||
export default getConfigFile
|
||||
16
packages/core/utils/src/common/get-duplicates.ts
Normal file
16
packages/core/utils/src/common/get-duplicates.ts
Normal file
@@ -0,0 +1,16 @@
|
||||
// This function is intentionally not generic, as we will likely need a comparator function in that case, which will make it more complex than necessary
|
||||
// Revisit if there is such use-case in the future
|
||||
export const getDuplicates = (collection: string[]): string[] => {
|
||||
const uniqueElements = new Set<string>()
|
||||
const duplicates = new Set<string>()
|
||||
|
||||
collection.forEach((item) => {
|
||||
if (uniqueElements.has(item)) {
|
||||
duplicates.add(item)
|
||||
} else {
|
||||
uniqueElements.add(item)
|
||||
}
|
||||
})
|
||||
|
||||
return Array.from(duplicates)
|
||||
}
|
||||
15
packages/core/utils/src/common/get-iso-string-from-date.ts
Normal file
15
packages/core/utils/src/common/get-iso-string-from-date.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import { isDate } from "./is-date"
|
||||
import { MedusaError } from "./errors"
|
||||
|
||||
export const GetIsoStringFromDate = (date: Date | string) => {
|
||||
if (!isDate(date)) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.INVALID_DATA,
|
||||
`Cannot format date to ISO string: ${date}`
|
||||
)
|
||||
}
|
||||
|
||||
date = new Date(date)
|
||||
|
||||
return date.toISOString()
|
||||
}
|
||||
14
packages/core/utils/src/common/get-medusa-version.ts
Normal file
14
packages/core/utils/src/common/get-medusa-version.ts
Normal file
@@ -0,0 +1,14 @@
|
||||
import { join } from "path"
|
||||
|
||||
export const getMedusaVersion = (): string => {
|
||||
try {
|
||||
return require(join(
|
||||
process.cwd(),
|
||||
`node_modules`,
|
||||
`@medusajs/medusa`,
|
||||
`package.json`
|
||||
)).version
|
||||
} catch (e) {
|
||||
return ``
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
import { deduplicate } from "./deduplicate"
|
||||
import { isObject } from "./is-object"
|
||||
|
||||
export function getSelectsAndRelationsFromObjectArray(
|
||||
dataArray: object[],
|
||||
options: { objectFields: string[] } = {
|
||||
objectFields: [],
|
||||
},
|
||||
prefix?: string
|
||||
): {
|
||||
selects: string[]
|
||||
relations: string[]
|
||||
} {
|
||||
const selects: string[] = []
|
||||
const relations: string[] = []
|
||||
|
||||
for (const data of dataArray) {
|
||||
for (const [key, value] of Object.entries(data)) {
|
||||
if (isObject(value) && !options.objectFields.includes(key)) {
|
||||
relations.push(setKey(key, prefix))
|
||||
const res = getSelectsAndRelationsFromObjectArray(
|
||||
[value],
|
||||
options,
|
||||
setKey(key, prefix)
|
||||
)
|
||||
selects.push(...res.selects)
|
||||
relations.push(...res.relations)
|
||||
} else if (Array.isArray(value)) {
|
||||
relations.push(setKey(key, prefix))
|
||||
const res = getSelectsAndRelationsFromObjectArray(
|
||||
value,
|
||||
options,
|
||||
setKey(key, prefix)
|
||||
)
|
||||
selects.push(...res.selects)
|
||||
relations.push(...res.relations)
|
||||
} else {
|
||||
selects.push(setKey(key, prefix))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const uniqueSelects: string[] = deduplicate(selects)
|
||||
const uniqueRelations: string[] = deduplicate(relations)
|
||||
|
||||
return {
|
||||
selects: uniqueSelects,
|
||||
relations: uniqueRelations,
|
||||
}
|
||||
}
|
||||
|
||||
function setKey(key: string, prefix?: string) {
|
||||
if (prefix) {
|
||||
return `${prefix}.${key}`
|
||||
} else {
|
||||
return key
|
||||
}
|
||||
}
|
||||
19
packages/core/utils/src/common/get-set-difference.ts
Normal file
19
packages/core/utils/src/common/get-set-difference.ts
Normal file
@@ -0,0 +1,19 @@
|
||||
/**
|
||||
* Get the difference between two sets. The difference is the elements that are in the original set but not in the compare set.
|
||||
* @param orignalSet
|
||||
* @param compareSet
|
||||
*/
|
||||
export function getSetDifference<T>(
|
||||
orignalSet: Set<T>,
|
||||
compareSet: Set<T>
|
||||
): Set<T> {
|
||||
const difference = new Set<T>()
|
||||
|
||||
orignalSet.forEach((element) => {
|
||||
if (!compareSet.has(element)) {
|
||||
difference.add(element)
|
||||
}
|
||||
})
|
||||
|
||||
return difference
|
||||
}
|
||||
20
packages/core/utils/src/common/group-by.ts
Normal file
20
packages/core/utils/src/common/group-by.ts
Normal file
@@ -0,0 +1,20 @@
|
||||
export function groupBy(
|
||||
array: Record<any, any>[],
|
||||
attribute: string | number
|
||||
): Map<any, any> {
|
||||
return array.reduce<Map<any, any>>((map, obj) => {
|
||||
const key = obj[attribute]
|
||||
|
||||
if (!key) {
|
||||
return map
|
||||
}
|
||||
|
||||
if (!map.get(key)) {
|
||||
map.set(key, [])
|
||||
}
|
||||
|
||||
map.get(key).push(obj)
|
||||
|
||||
return map
|
||||
}, new Map())
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
import { EOL } from "os"
|
||||
|
||||
export const DatabaseErrorCode = {
|
||||
databaseDoesNotExist: "3D000",
|
||||
connectionFailure: "ECONNREFUSED",
|
||||
wrongCredentials: "28000",
|
||||
notFound: "ENOTFOUND",
|
||||
migrationMissing: "42P01",
|
||||
}
|
||||
|
||||
export function handlePostgresDatabaseError(err: any): never {
|
||||
if (DatabaseErrorCode.databaseDoesNotExist === err.code) {
|
||||
throw new Error(
|
||||
`The specified PostgreSQL database does not exist. Please create it and try again.${EOL}${err.message}`
|
||||
)
|
||||
}
|
||||
|
||||
if (DatabaseErrorCode.connectionFailure === err.code) {
|
||||
throw new Error(
|
||||
`Failed to establish a connection to PostgreSQL. Please ensure the following is true and try again:
|
||||
- You have a PostgreSQL database running
|
||||
- You have passed the correct credentials in medusa-config.js
|
||||
- You have formatted the database connection string correctly. See below:
|
||||
"postgres://[username]:[password]@[host]:[post]/[db_name]" - If there is no password, you can omit it from the connection string
|
||||
${EOL}
|
||||
${err.message}`
|
||||
)
|
||||
}
|
||||
|
||||
if (DatabaseErrorCode.wrongCredentials === err.code) {
|
||||
throw new Error(
|
||||
`The specified credentials does not exists for the specified PostgreSQL database.${EOL}${err.message}`
|
||||
)
|
||||
}
|
||||
|
||||
if (DatabaseErrorCode.notFound === err.code) {
|
||||
throw new Error(
|
||||
`The specified connection string for your PostgreSQL database might have illegal characters. Please check that it only contains allowed characters [a-zA-Z0-9]${EOL}${err.message}`
|
||||
)
|
||||
}
|
||||
|
||||
if (DatabaseErrorCode.migrationMissing === err.code) {
|
||||
throw new Error(
|
||||
`Migrations missing. Please run 'medusa migrations run' and try again.`
|
||||
)
|
||||
}
|
||||
|
||||
throw err
|
||||
}
|
||||
60
packages/core/utils/src/common/index.ts
Normal file
60
packages/core/utils/src/common/index.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
export * from "./alter-columns-helper"
|
||||
export * from "./array-difference"
|
||||
export * from "./array-intersection"
|
||||
export * from "./build-query"
|
||||
export * from "./camel-to-snake-case"
|
||||
export * from "./container"
|
||||
export * from "./convert-item-response-to-update-request"
|
||||
export * from "./create-container-like"
|
||||
export * from "./create-psql-index-helper"
|
||||
export * from "./deduplicate"
|
||||
export * from "./deep-copy"
|
||||
export * from "./deep-equal-obj"
|
||||
export * from "./deep-flat-map"
|
||||
export * from "./errors"
|
||||
export * from "./generate-entity-id"
|
||||
export * from "./generate-linkable-keys-map"
|
||||
export * from "./get-config-file"
|
||||
export * from "./get-duplicates"
|
||||
export * from "./get-iso-string-from-date"
|
||||
export * from "./get-selects-and-relations-from-object-array"
|
||||
export * from "./get-set-difference"
|
||||
export * from "./group-by"
|
||||
export * from "./handle-postgres-database-error"
|
||||
export * from "./is-big-number"
|
||||
export * from "./is-date"
|
||||
export * from "./is-defined"
|
||||
export * from "./is-email"
|
||||
export * from "./is-object"
|
||||
export * from "./is-present"
|
||||
export * from "./is-string"
|
||||
export * from "./lower-case-first"
|
||||
export * from "./map-object-to"
|
||||
export * from "./medusa-container"
|
||||
export * from "./object-from-string-path"
|
||||
export * from "./object-to-string-path"
|
||||
export * from "./optional-numeric-serializer"
|
||||
export * from "./partition-array"
|
||||
export * from "./pick-deep"
|
||||
export * from "./pick-value-from-object"
|
||||
export * from "./plurailze"
|
||||
export * from "./prefix-array-items"
|
||||
export * from "./promise-all"
|
||||
export * from "./remote-query-object-from-string"
|
||||
export * from "./remote-query-object-to-string"
|
||||
export * from "./remove-nullisih"
|
||||
export * from "./remove-undefined"
|
||||
export * from "./rules"
|
||||
export * from "./selector-constraints-to-string"
|
||||
export * from "./set-metadata"
|
||||
export * from "./simple-hash"
|
||||
export * from "./string-or-regex-equals"
|
||||
export * from "./string-to-select-relation-object"
|
||||
export * from "./stringify-circular"
|
||||
export * from "./to-camel-case"
|
||||
export * from "./to-kebab-case"
|
||||
export * from "./to-pascal-case"
|
||||
export * from "./transaction"
|
||||
export * from "./trim-zeros"
|
||||
export * from "./upper-case-first"
|
||||
export * from "./wrap-handler"
|
||||
6
packages/core/utils/src/common/is-big-number.ts
Normal file
6
packages/core/utils/src/common/is-big-number.ts
Normal file
@@ -0,0 +1,6 @@
|
||||
import { BigNumberRawValue } from "@medusajs/types"
|
||||
import { isObject } from "./is-object"
|
||||
|
||||
export function isBigNumber(obj: any): obj is BigNumberRawValue {
|
||||
return isObject(obj) && "value" in obj
|
||||
}
|
||||
3
packages/core/utils/src/common/is-date.ts
Normal file
3
packages/core/utils/src/common/is-date.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function isDate(value: any): value is Date {
|
||||
return value !== null && !isNaN(new Date(value).valueOf())
|
||||
}
|
||||
5
packages/core/utils/src/common/is-defined.ts
Normal file
5
packages/core/utils/src/common/is-defined.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export function isDefined<T = undefined | unknown>(
|
||||
val: T
|
||||
): val is T extends undefined ? never : T {
|
||||
return typeof val !== "undefined"
|
||||
}
|
||||
30
packages/core/utils/src/common/is-email.ts
Normal file
30
packages/core/utils/src/common/is-email.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
import { MedusaError } from "./errors"
|
||||
|
||||
const EMAIL_REGEX =
|
||||
/^(([^<>()[\]\\.,;:\s@"]+(\.[^<>()[\]\\.,;:\s@"]+)*)|.(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/
|
||||
|
||||
/**
|
||||
* Check whether provided string is an email.
|
||||
* @param email - string to check
|
||||
*/
|
||||
function isEmail(email: string) {
|
||||
return email.toLowerCase().match(EMAIL_REGEX)
|
||||
}
|
||||
|
||||
/**
|
||||
* Used to validate user email.
|
||||
* @param {string} email - email to validate
|
||||
* @return {string} the validated email
|
||||
*/
|
||||
export function validateEmail(email: string): string {
|
||||
const validatedEmail = isEmail(email)
|
||||
|
||||
if (!validatedEmail) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.INVALID_DATA,
|
||||
"The email is not valid"
|
||||
)
|
||||
}
|
||||
|
||||
return email.toLowerCase()
|
||||
}
|
||||
3
packages/core/utils/src/common/is-object.ts
Normal file
3
packages/core/utils/src/common/is-object.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function isObject(obj: any): obj is object {
|
||||
return obj != null && obj?.constructor?.name === "Object"
|
||||
}
|
||||
23
packages/core/utils/src/common/is-present.ts
Normal file
23
packages/core/utils/src/common/is-present.ts
Normal file
@@ -0,0 +1,23 @@
|
||||
import { isDefined } from "./is-defined"
|
||||
import { isObject } from "./is-object"
|
||||
import { isString } from "./is-string"
|
||||
|
||||
export function isPresent(value: any): boolean {
|
||||
if (!isDefined(value) || value === null) {
|
||||
return false
|
||||
}
|
||||
|
||||
if (isString(value) || Array.isArray(value)) {
|
||||
return value.length > 0
|
||||
}
|
||||
|
||||
if (value instanceof Map || value instanceof Set) {
|
||||
return value.size > 0
|
||||
}
|
||||
|
||||
if (isObject(value)) {
|
||||
return Object.keys(value).length > 0
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
3
packages/core/utils/src/common/is-string.ts
Normal file
3
packages/core/utils/src/common/is-string.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function isString(val: any): val is string {
|
||||
return val != null && typeof val === "string"
|
||||
}
|
||||
3
packages/core/utils/src/common/lower-case-first.ts
Normal file
3
packages/core/utils/src/common/lower-case-first.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function lowerCaseFirst(str: string): string {
|
||||
return str.charAt(0).toLowerCase() + str.slice(1)
|
||||
}
|
||||
53
packages/core/utils/src/common/map-object-to.ts
Normal file
53
packages/core/utils/src/common/map-object-to.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
type RemapInputObject = Record<string, unknown[]>
|
||||
type RemapConfig = { mapTo: string; valueFrom: string }
|
||||
export type MapToConfig = {
|
||||
[key: string]: RemapConfig[]
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new object with the keys remapped and the values picked from the original object based
|
||||
* on the map config
|
||||
*
|
||||
* @param object input object
|
||||
* @param mapTo configuration to map the output object
|
||||
* @param removeIfNotRemapped if true, the keys that are not remapped will be removed from the output object
|
||||
* @param pick if provided, only the keys in the array will be picked from the output object
|
||||
*/
|
||||
export function mapObjectTo<
|
||||
TResult = any,
|
||||
T extends RemapInputObject = RemapInputObject
|
||||
>(
|
||||
object: T,
|
||||
mapTo: MapToConfig,
|
||||
{
|
||||
removeIfNotRemapped,
|
||||
pick,
|
||||
}: { removeIfNotRemapped?: boolean; pick?: string[] } = {}
|
||||
): TResult {
|
||||
removeIfNotRemapped ??= false
|
||||
|
||||
const newObject: Record<string, any> = {}
|
||||
|
||||
for (const key in object) {
|
||||
const remapConfig = mapTo[key as string]!
|
||||
|
||||
if (!remapConfig) {
|
||||
if (!removeIfNotRemapped) {
|
||||
newObject[key] = object[key]
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
remapConfig.forEach((config) => {
|
||||
if (pick?.length && !pick.includes(config.mapTo)) {
|
||||
return
|
||||
}
|
||||
|
||||
newObject[config.mapTo] = object[key]
|
||||
.map((obj: any) => obj[config.valueFrom])
|
||||
.filter(Boolean)
|
||||
})
|
||||
}
|
||||
|
||||
return newObject as TResult
|
||||
}
|
||||
57
packages/core/utils/src/common/medusa-container.ts
Normal file
57
packages/core/utils/src/common/medusa-container.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import { MedusaContainer } from "@medusajs/types"
|
||||
import {
|
||||
asFunction,
|
||||
asValue,
|
||||
AwilixContainer,
|
||||
ClassOrFunctionReturning,
|
||||
createContainer,
|
||||
Resolver,
|
||||
} from "awilix"
|
||||
|
||||
function asArray(
|
||||
resolvers: (ClassOrFunctionReturning<unknown> | Resolver<unknown>)[]
|
||||
): { resolve: (container: AwilixContainer) => unknown[] } {
|
||||
return {
|
||||
resolve: (container: AwilixContainer) =>
|
||||
resolvers.map((resolver) => container.build(resolver)),
|
||||
}
|
||||
}
|
||||
|
||||
function registerAdd(
|
||||
this: MedusaContainer,
|
||||
name: string,
|
||||
registration: typeof asFunction | typeof asValue
|
||||
) {
|
||||
const storeKey = name + "_STORE"
|
||||
|
||||
if (this.registrations[storeKey] === undefined) {
|
||||
this.register(storeKey, asValue([] as Resolver<unknown>[]))
|
||||
}
|
||||
const store = this.resolve(storeKey) as (
|
||||
| ClassOrFunctionReturning<unknown>
|
||||
| Resolver<unknown>
|
||||
)[]
|
||||
|
||||
if (this.registrations[name] === undefined) {
|
||||
this.register(name, asArray(store))
|
||||
}
|
||||
store.unshift(registration)
|
||||
|
||||
return this
|
||||
}
|
||||
|
||||
export function createMedusaContainer(...args): MedusaContainer {
|
||||
const container = createContainer.apply(null, args) as MedusaContainer
|
||||
|
||||
container.registerAdd = registerAdd.bind(container)
|
||||
|
||||
const originalScope = container.createScope
|
||||
container.createScope = () => {
|
||||
const scoped = originalScope() as MedusaContainer
|
||||
scoped.registerAdd = registerAdd.bind(scoped)
|
||||
|
||||
return scoped
|
||||
}
|
||||
|
||||
return container
|
||||
}
|
||||
78
packages/core/utils/src/common/object-from-string-path.ts
Normal file
78
packages/core/utils/src/common/object-from-string-path.ts
Normal file
@@ -0,0 +1,78 @@
|
||||
/**
|
||||
* Convert a collection of dot string into a nested object
|
||||
* @example
|
||||
* input: [
|
||||
* order,
|
||||
* order.items,
|
||||
* order.swaps,
|
||||
* order.swaps.additional_items,
|
||||
* order.discounts,
|
||||
* order.discounts.rule,
|
||||
* order.claims,
|
||||
* order.claims.additional_items,
|
||||
* additional_items,
|
||||
* additional_items.variant,
|
||||
* return_order,
|
||||
* return_order.items,
|
||||
* return_order.shipping_method,
|
||||
* return_order.shipping_method.tax_lines
|
||||
* ]
|
||||
* output: {
|
||||
* "order": {
|
||||
* "items": true,
|
||||
* "swaps": {
|
||||
* "additional_items": true
|
||||
* },
|
||||
* "discounts": {
|
||||
* "rule": true
|
||||
* },
|
||||
* "claims": {
|
||||
* "additional_items": true
|
||||
* }
|
||||
* },
|
||||
* "additional_items": {
|
||||
* "variant": true
|
||||
* },
|
||||
* "return_order": {
|
||||
* "items": true,
|
||||
* "shipping_method": {
|
||||
* "tax_lines": true
|
||||
* }
|
||||
* }
|
||||
* }
|
||||
* @param collection
|
||||
*/
|
||||
export function objectFromStringPath(
|
||||
collection: string[]
|
||||
): Record<string, any> {
|
||||
collection = collection.sort()
|
||||
const output: Record<string, any> = {}
|
||||
|
||||
for (const relation of collection) {
|
||||
if (!relation) {
|
||||
continue
|
||||
}
|
||||
if (relation.indexOf(".") > -1) {
|
||||
const nestedRelations = relation.split(".")
|
||||
|
||||
let parent = output
|
||||
|
||||
while (nestedRelations.length > 1) {
|
||||
const nestedRelation = nestedRelations.shift() as string
|
||||
parent = parent[nestedRelation] =
|
||||
parent[nestedRelation] !== true &&
|
||||
typeof parent[nestedRelation] === "object"
|
||||
? parent[nestedRelation]
|
||||
: {}
|
||||
}
|
||||
|
||||
parent[nestedRelations[0]] = true
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
output[relation] = output[relation] ?? true
|
||||
}
|
||||
|
||||
return output
|
||||
}
|
||||
77
packages/core/utils/src/common/object-to-string-path.ts
Normal file
77
packages/core/utils/src/common/object-to-string-path.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { isObject } from "./is-object"
|
||||
|
||||
/**
|
||||
* Converts a structure of find options to an
|
||||
* array of string paths
|
||||
* @example
|
||||
* // With `includeTruePropertiesOnly` default value set to false
|
||||
* const result = objectToStringPath({
|
||||
* test: {
|
||||
* test1: true,
|
||||
* test2: true,
|
||||
* test3: {
|
||||
* test4: true
|
||||
* },
|
||||
* },
|
||||
* test2: true
|
||||
* })
|
||||
* console.log(result)
|
||||
* // output: ['test', 'test.test1', 'test.test2', 'test.test3', 'test.test3.test4', 'test2']
|
||||
*
|
||||
* @example
|
||||
* // With `includeTruePropertiesOnly` set to true
|
||||
* const result = objectToStringPath({
|
||||
* test: {
|
||||
* test1: true,
|
||||
* test2: true,
|
||||
* test3: {
|
||||
* test4: true
|
||||
* },
|
||||
* },
|
||||
* test2: true
|
||||
* }, {
|
||||
* includeTruePropertiesOnly: true
|
||||
* })
|
||||
* console.log(result)
|
||||
* // output: ['test.test1', 'test.test2', 'test.test3.test4', 'test2']
|
||||
*
|
||||
* @param {InputObject} input
|
||||
* @param {boolean} includeParentPropertyFields If set to true (example 1), all properties will be included as well as the parents,
|
||||
* otherwise (example 2) all properties path set to true will included, excluded the parents
|
||||
*/
|
||||
export function objectToStringPath(
|
||||
input: object = {},
|
||||
{ includeParentPropertyFields }: { includeParentPropertyFields: boolean } = {
|
||||
includeParentPropertyFields: true,
|
||||
}
|
||||
): string[] {
|
||||
if (!isObject(input) || !Object.keys(input).length) {
|
||||
return []
|
||||
}
|
||||
|
||||
const output: Set<string> = includeParentPropertyFields
|
||||
? new Set(Object.keys(input))
|
||||
: new Set()
|
||||
|
||||
for (const key of Object.keys(input)) {
|
||||
if (isObject(input[key])) {
|
||||
const deepRes = objectToStringPath(input[key], {
|
||||
includeParentPropertyFields,
|
||||
})
|
||||
|
||||
const items = deepRes.reduce((acc, val) => {
|
||||
acc.push(`${key}.${val}`)
|
||||
return acc
|
||||
}, [] as string[])
|
||||
|
||||
items.forEach((item) => output.add(item))
|
||||
continue
|
||||
}
|
||||
|
||||
if (isObject(key) || input[key] === true) {
|
||||
output.add(key)
|
||||
}
|
||||
}
|
||||
|
||||
return Array.from(output)
|
||||
}
|
||||
@@ -0,0 +1,4 @@
|
||||
import { isDefined } from "./is-defined"
|
||||
|
||||
export const optionalNumericSerializer = (value) =>
|
||||
isDefined(value) && value !== null ? Number(value) : value
|
||||
30
packages/core/utils/src/common/partition-array.ts
Normal file
30
packages/core/utils/src/common/partition-array.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
/**
|
||||
* Partitions an array into two arrays based on a predicate function
|
||||
|
||||
* @example
|
||||
* const result = partitionArray([1, 2, 3, 4, 5], (x) => x % 2 === 0)
|
||||
*
|
||||
* console.log(result)
|
||||
*
|
||||
* // output: [[2, 4], [1, 3, 5]]
|
||||
*
|
||||
* @param {T} input input array of type T
|
||||
* @param {(T) => boolean} predicate function to use when split array elements
|
||||
*/
|
||||
export const partitionArray = <T>(
|
||||
input: T[],
|
||||
predicate: (T) => boolean
|
||||
): [T[], T[]] => {
|
||||
return input.reduce(
|
||||
([pos, neg], currentElement) => {
|
||||
if (predicate(currentElement)) {
|
||||
pos.push(currentElement)
|
||||
} else {
|
||||
neg.push(currentElement)
|
||||
}
|
||||
|
||||
return [pos, neg]
|
||||
},
|
||||
[[], []] as [T[], T[]]
|
||||
)
|
||||
}
|
||||
63
packages/core/utils/src/common/pick-deep.ts
Normal file
63
packages/core/utils/src/common/pick-deep.ts
Normal file
@@ -0,0 +1,63 @@
|
||||
import { isObject } from "./is-object"
|
||||
|
||||
export function pickDeep<T extends object = object>(
|
||||
input: object,
|
||||
fields: Array<number | string>,
|
||||
prefix: string = ""
|
||||
): T {
|
||||
if (!input) {
|
||||
return input
|
||||
}
|
||||
|
||||
return Object.entries(input).reduce((nextInput, [key, value]) => {
|
||||
const fieldKey = withPrefix(key, prefix)
|
||||
const fieldMatches = fields.includes(fieldKey)
|
||||
const partialKeyMatch =
|
||||
fields.filter((field) => field.toString().startsWith(`${fieldKey}.`))
|
||||
.length > 0
|
||||
|
||||
const valueIsObject = isObject(value)
|
||||
const valueIsArray = Array.isArray(value)
|
||||
|
||||
if (fieldMatches && (valueIsObject || valueIsArray)) {
|
||||
nextInput[key] = value
|
||||
|
||||
return nextInput
|
||||
}
|
||||
|
||||
if (!fieldMatches && !partialKeyMatch) {
|
||||
return nextInput
|
||||
}
|
||||
|
||||
if (valueIsArray) {
|
||||
nextInput[key] = value.map((arrItem) => {
|
||||
if (isObject(arrItem)) {
|
||||
return pickDeep(arrItem, fields, withPrefix(key, prefix))
|
||||
}
|
||||
return arrItem
|
||||
})
|
||||
|
||||
return nextInput
|
||||
} else if (valueIsObject) {
|
||||
if (Object.keys(value).length) {
|
||||
nextInput[key] = pickDeep(value, fields, withPrefix(key, prefix))
|
||||
}
|
||||
|
||||
return nextInput
|
||||
}
|
||||
|
||||
if (fieldMatches) {
|
||||
nextInput[key] = value
|
||||
}
|
||||
|
||||
return nextInput
|
||||
}, {} as T)
|
||||
}
|
||||
|
||||
function withPrefix(key: string, prefix: string): string {
|
||||
if (prefix.length) {
|
||||
return `${prefix}.${key}`
|
||||
} else {
|
||||
return key
|
||||
}
|
||||
}
|
||||
37
packages/core/utils/src/common/pick-value-from-object.ts
Normal file
37
packages/core/utils/src/common/pick-value-from-object.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { isObject } from "./is-object"
|
||||
|
||||
export function pickValueFromObject(
|
||||
path: string,
|
||||
object: Record<any, any>
|
||||
): any {
|
||||
const segments = path.split(".")
|
||||
let result: any = undefined
|
||||
|
||||
for (const segment of segments) {
|
||||
const segmentsLeft = [...segments].splice(1, segments.length - 1)
|
||||
const segmentOutput = object[segment]
|
||||
|
||||
if (segmentsLeft.length === 0) {
|
||||
result = segmentOutput
|
||||
break
|
||||
}
|
||||
|
||||
if (isObject(segmentOutput)) {
|
||||
result = pickValueFromObject(segmentsLeft.join("."), segmentOutput)
|
||||
break
|
||||
}
|
||||
|
||||
if (Array.isArray(segmentOutput)) {
|
||||
result = segmentOutput
|
||||
.map((segmentOutput_) =>
|
||||
pickValueFromObject(segmentsLeft.join("."), segmentOutput_)
|
||||
)
|
||||
.flat()
|
||||
break
|
||||
}
|
||||
|
||||
result = segmentOutput
|
||||
}
|
||||
|
||||
return result
|
||||
}
|
||||
27
packages/core/utils/src/common/plurailze.ts
Normal file
27
packages/core/utils/src/common/plurailze.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
/**
|
||||
* Some library provide pluralize function with language specific rules.
|
||||
* This is a simple implementation of pluralize function.
|
||||
* @param word
|
||||
*/
|
||||
export function pluralize(word: string): string {
|
||||
// Add basic rules for forming plurals
|
||||
if (
|
||||
//word.endsWith("s") ||
|
||||
word.endsWith("sh") ||
|
||||
word.endsWith("ss") ||
|
||||
word.endsWith("ch") ||
|
||||
word.endsWith("x") ||
|
||||
word.endsWith("o") ||
|
||||
word.endsWith("z")
|
||||
) {
|
||||
return word + "es"
|
||||
} else if (word.endsWith("y") && !"aeiou".includes(word[word.length - 2])) {
|
||||
return word.slice(0, -1) + "ies"
|
||||
} else if (word.endsWith("es")) {
|
||||
return word
|
||||
} else if (word.endsWith("fe")) {
|
||||
return word.slice(0, -2) + "ves"
|
||||
} else {
|
||||
return word + "s"
|
||||
}
|
||||
}
|
||||
8
packages/core/utils/src/common/prefix-array-items.ts
Normal file
8
packages/core/utils/src/common/prefix-array-items.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
/**
|
||||
* Prefixes an array of strings with a specified string
|
||||
* @param array
|
||||
* @param prefix
|
||||
*/
|
||||
export function prefixArrayItems(array: string[], prefix: string): string[] {
|
||||
return array.map((arrEl) => `${prefix}${arrEl}`)
|
||||
}
|
||||
40
packages/core/utils/src/common/promise-all.ts
Normal file
40
packages/core/utils/src/common/promise-all.ts
Normal file
@@ -0,0 +1,40 @@
|
||||
import { EOL } from "os"
|
||||
|
||||
const getMessageError = (state: PromiseRejectedResult) =>
|
||||
state.reason.message ?? state.reason
|
||||
|
||||
const isRejected = (
|
||||
state: PromiseSettledResult<unknown>
|
||||
): state is PromiseRejectedResult => {
|
||||
return state.status === "rejected"
|
||||
}
|
||||
|
||||
const getValue = (state: PromiseFulfilledResult<unknown>) => state.value
|
||||
|
||||
/**
|
||||
* Promise.allSettled with error handling, safe alternative to Promise.all
|
||||
* @param promises
|
||||
* @param aggregateErrors
|
||||
*/
|
||||
export async function promiseAll<T extends readonly unknown[] | []>(
|
||||
promises: T,
|
||||
{ aggregateErrors } = { aggregateErrors: false }
|
||||
): Promise<{ -readonly [P in keyof T]: Awaited<T[P]> }> {
|
||||
const states = await Promise.allSettled(promises)
|
||||
|
||||
const rejected = (states as PromiseSettledResult<unknown>[]).filter(
|
||||
isRejected
|
||||
)
|
||||
|
||||
if (rejected.length) {
|
||||
if (aggregateErrors) {
|
||||
throw new Error(rejected.map(getMessageError).join(EOL))
|
||||
}
|
||||
|
||||
throw rejected[0].reason // Re throw the error itself
|
||||
}
|
||||
|
||||
return (states as PromiseFulfilledResult<unknown>[]).map(
|
||||
getValue
|
||||
) as unknown as Promise<{ -readonly [P in keyof T]: Awaited<T[P]> }>
|
||||
}
|
||||
@@ -0,0 +1,155 @@
|
||||
import { isObject } from "./is-object"
|
||||
|
||||
/**
|
||||
* Convert a string fields array to a remote query object
|
||||
* @param config - The configuration object
|
||||
*
|
||||
* @example
|
||||
* const fields = [
|
||||
* "id",
|
||||
* "created_at",
|
||||
* "updated_at",
|
||||
* "deleted_at",
|
||||
* "url",
|
||||
* "metadata",
|
||||
* "tags.id",
|
||||
* "tags.created_at",
|
||||
* "tags.updated_at",
|
||||
* "tags.deleted_at",
|
||||
* "tags.value",
|
||||
* "options.id",
|
||||
* "options.created_at",
|
||||
* "options.updated_at",
|
||||
* "options.deleted_at",
|
||||
* "options.title",
|
||||
* "options.product_id",
|
||||
* "options.metadata",
|
||||
* "options.values.id",
|
||||
* "options.values.created_at",
|
||||
* "options.values.updated_at",
|
||||
* "options.values.deleted_at",
|
||||
* "options.values.value",
|
||||
* "options.values.option_id",
|
||||
* "options.values.variant_id",
|
||||
* "options.values.metadata",
|
||||
* ]
|
||||
*
|
||||
* const remoteQueryObject = remoteQueryObjectFromString({
|
||||
* entryPoint: "product",
|
||||
* variables: {},
|
||||
* fields,
|
||||
* })
|
||||
*
|
||||
* console.log(remoteQueryObject)
|
||||
* // {
|
||||
* // product: {
|
||||
* // __args: {},
|
||||
* // fields: [
|
||||
* // "id",
|
||||
* // "created_at",
|
||||
* // "updated_at",
|
||||
* // "deleted_at",
|
||||
* // "url",
|
||||
* // "metadata",
|
||||
* // ],
|
||||
* //
|
||||
* // tags: {
|
||||
* // fields: ["id", "created_at", "updated_at", "deleted_at", "value"],
|
||||
* // },
|
||||
* //
|
||||
* // options: {
|
||||
* // fields: [
|
||||
* // "id",
|
||||
* // "created_at",
|
||||
* // "updated_at",
|
||||
* // "deleted_at",
|
||||
* // "title",
|
||||
* // "product_id",
|
||||
* // "metadata",
|
||||
* // ],
|
||||
* // values: {
|
||||
* // fields: [
|
||||
* // "id",
|
||||
* // "created_at",
|
||||
* // "updated_at",
|
||||
* // "deleted_at",
|
||||
* // "value",
|
||||
* // "option_id",
|
||||
* // "variant_id",
|
||||
* // "metadata",
|
||||
* // ],
|
||||
* // },
|
||||
* // },
|
||||
* // },
|
||||
* // }
|
||||
*/
|
||||
export function remoteQueryObjectFromString(
|
||||
config:
|
||||
| {
|
||||
entryPoint: string
|
||||
variables?: any
|
||||
fields: string[]
|
||||
}
|
||||
| {
|
||||
service: string
|
||||
variables?: any
|
||||
fields: string[]
|
||||
}
|
||||
): object {
|
||||
const { entryPoint, service, variables, fields } = {
|
||||
...config,
|
||||
entryPoint: "entryPoint" in config ? config.entryPoint : undefined,
|
||||
service: "service" in config ? config.service : undefined,
|
||||
}
|
||||
|
||||
const entryKey = (entryPoint ?? service) as string
|
||||
|
||||
const remoteJoinerConfig: object = {
|
||||
[entryKey]: {
|
||||
fields: [],
|
||||
isServiceAccess: !!service, // specifies if the entry point is a service
|
||||
},
|
||||
}
|
||||
|
||||
const usedVariables = new Set()
|
||||
|
||||
for (const field of fields) {
|
||||
if (!field.includes(".")) {
|
||||
remoteJoinerConfig[entryKey]["fields"].push(field)
|
||||
continue
|
||||
}
|
||||
|
||||
const fieldSegments = field.split(".")
|
||||
const fieldProperty = fieldSegments.pop()
|
||||
|
||||
let combinedPath = ""
|
||||
|
||||
const deepConfigRef = fieldSegments.reduce((acc, curr) => {
|
||||
combinedPath = combinedPath ? combinedPath + "." + curr : curr
|
||||
|
||||
if (isObject(variables) && combinedPath in variables) {
|
||||
acc[curr] ??= {}
|
||||
acc[curr]["__args"] = variables[combinedPath]
|
||||
usedVariables.add(combinedPath)
|
||||
} else {
|
||||
acc[curr] ??= {}
|
||||
}
|
||||
|
||||
return acc[curr]
|
||||
}, remoteJoinerConfig[entryKey])
|
||||
|
||||
deepConfigRef["fields"] ??= []
|
||||
deepConfigRef["fields"].push(fieldProperty)
|
||||
}
|
||||
|
||||
const topLevelArgs = {}
|
||||
for (const key of Object.keys(variables ?? {})) {
|
||||
if (!usedVariables.has(key)) {
|
||||
topLevelArgs[key] = variables[key]
|
||||
}
|
||||
}
|
||||
|
||||
remoteJoinerConfig[entryKey]["__args"] = topLevelArgs ?? {}
|
||||
|
||||
return remoteJoinerConfig
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
/**
|
||||
* Transform a remote query object to a string array containing the chain of fields and relations
|
||||
*
|
||||
* @param fields
|
||||
* @param parent
|
||||
*
|
||||
* @example
|
||||
*
|
||||
* const remoteQueryObject = {
|
||||
* fields: [
|
||||
* "id",
|
||||
* "title",
|
||||
* ],
|
||||
* images: {
|
||||
* fields: ["id", "created_at", "updated_at", "deleted_at", "url", "metadata"],
|
||||
* },
|
||||
* }
|
||||
*
|
||||
* const fields = remoteQueryObjectToString(remoteQueryObject)
|
||||
*
|
||||
* console.log(fields)
|
||||
* // ["id", "title", "images.id", "images.created_at", "images.updated_at", "images.deleted_at", "images.url", "images.metadata"]
|
||||
*/
|
||||
export function remoteQueryObjectToString(
|
||||
fields: object,
|
||||
parent?: string
|
||||
): string[] {
|
||||
return Object.keys(fields).reduce((acc, key) => {
|
||||
if (key === "fields") {
|
||||
if (parent) {
|
||||
fields[key].map((fieldKey) => acc.push(`${parent}.${fieldKey}`))
|
||||
} else {
|
||||
fields[key].map((fieldKey) => acc.push(fieldKey))
|
||||
}
|
||||
|
||||
return acc
|
||||
}
|
||||
|
||||
if (typeof fields[key] === "object") {
|
||||
acc = acc.concat(
|
||||
remoteQueryObjectToString(
|
||||
fields[key],
|
||||
parent ? `${parent}.${key}` : key
|
||||
)
|
||||
)
|
||||
return acc
|
||||
}
|
||||
|
||||
return acc
|
||||
}, [] as string[])
|
||||
}
|
||||
18
packages/core/utils/src/common/remove-nullisih.ts
Normal file
18
packages/core/utils/src/common/remove-nullisih.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
import { isDefined } from "./is-defined"
|
||||
|
||||
export function removeNullish<T = unknown>(
|
||||
obj: Record<string, T>
|
||||
): Record<string, T> {
|
||||
return Object.entries(obj).reduce(
|
||||
(resultObject, [currentKey, currentValue]) => {
|
||||
if (!isDefined(currentValue) || currentValue === null) {
|
||||
return resultObject
|
||||
}
|
||||
|
||||
resultObject[currentKey] = currentValue
|
||||
|
||||
return resultObject
|
||||
},
|
||||
{}
|
||||
)
|
||||
}
|
||||
4
packages/core/utils/src/common/remove-undefined.ts
Normal file
4
packages/core/utils/src/common/remove-undefined.ts
Normal file
@@ -0,0 +1,4 @@
|
||||
// useful in cases where presence of undefined is not desired (eg. in microORM operations)
|
||||
export const removeUndefined = <T extends Record<string, any>>(obj: T): T => {
|
||||
return JSON.parse(JSON.stringify(obj)) as T
|
||||
}
|
||||
10
packages/core/utils/src/common/rules.ts
Normal file
10
packages/core/utils/src/common/rules.ts
Normal file
@@ -0,0 +1,10 @@
|
||||
export enum RuleOperator {
|
||||
IN = "in",
|
||||
EQ = "eq",
|
||||
NE = "ne",
|
||||
GT = "gt",
|
||||
GTE = "gte",
|
||||
LT = "lt",
|
||||
LTE = "lte",
|
||||
NIN = "nin",
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
export function selectorConstraintsToString(
|
||||
selector: object | object[]
|
||||
): string {
|
||||
const selectors = Array.isArray(selector) ? selector : [selector]
|
||||
|
||||
return selectors
|
||||
.map((selector_) => {
|
||||
return Object.entries(selector_)
|
||||
.map(
|
||||
([key, value]: [string, any]) =>
|
||||
`${key}: ${value._type ? `${value._type}(${value._value})` : value}`
|
||||
)
|
||||
.join(", ")
|
||||
})
|
||||
.join(" or ")
|
||||
}
|
||||
45
packages/core/utils/src/common/set-metadata.ts
Normal file
45
packages/core/utils/src/common/set-metadata.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { MedusaError } from "./errors"
|
||||
|
||||
/**
|
||||
* Dedicated method to set metadata.
|
||||
* @param obj - the entity to apply metadata to.
|
||||
* @param metadata - the metadata to set
|
||||
* @return resolves to the updated result.
|
||||
*/
|
||||
export function setMetadata(
|
||||
obj: { metadata: Record<string, unknown> | null },
|
||||
metadata: Record<string, unknown>
|
||||
): Record<string, unknown> {
|
||||
const existing = obj.metadata || {}
|
||||
const newData = {}
|
||||
|
||||
for (const [key, value] of Object.entries(metadata)) {
|
||||
if (typeof key !== "string") {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.INVALID_ARGUMENT,
|
||||
"Key type is invalid. Metadata keys must be strings"
|
||||
)
|
||||
}
|
||||
|
||||
/**
|
||||
* We reserve the empty string as a way to delete a key.
|
||||
* If the value is an empty string, we don't
|
||||
* set it, and if it exists in the existing metadata, we
|
||||
* unset the field.
|
||||
*/
|
||||
if (value === "") {
|
||||
if (key in existing) {
|
||||
delete existing[key]
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
newData[key] = value
|
||||
}
|
||||
|
||||
return {
|
||||
...existing,
|
||||
...newData,
|
||||
}
|
||||
}
|
||||
8
packages/core/utils/src/common/simple-hash.ts
Normal file
8
packages/core/utils/src/common/simple-hash.ts
Normal file
@@ -0,0 +1,8 @@
|
||||
// DJB2 hash function
|
||||
export function simpleHash(text: string): string {
|
||||
let hash = 5381
|
||||
for (let i = 0; i < text.length; i++) {
|
||||
hash = (hash << 5) + hash + text.charCodeAt(i)
|
||||
}
|
||||
return hash.toString(16)
|
||||
}
|
||||
9
packages/core/utils/src/common/string-or-regex-equals.ts
Normal file
9
packages/core/utils/src/common/string-or-regex-equals.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
export const stringEqualsOrRegexMatch = (
|
||||
stringOrRegex: string | RegExp,
|
||||
testString: string
|
||||
) => {
|
||||
if (stringOrRegex instanceof RegExp) {
|
||||
return stringOrRegex.test(testString)
|
||||
}
|
||||
return stringOrRegex === testString
|
||||
}
|
||||
@@ -0,0 +1,97 @@
|
||||
/**
|
||||
* Convert a string fields array to a specific object such as { select, relation }
|
||||
* @param fields
|
||||
*
|
||||
* @example
|
||||
* const fields = [
|
||||
* "id",
|
||||
* "created_at",
|
||||
* "updated_at",
|
||||
* "deleted_at",
|
||||
* "url",
|
||||
* "metadata",
|
||||
* "tags.id",
|
||||
* "tags.created_at",
|
||||
* "tags.updated_at",
|
||||
* "tags.deleted_at",
|
||||
* "tags.value",
|
||||
* "options.id",
|
||||
* "options.created_at",
|
||||
* "options.updated_at",
|
||||
* "options.deleted_at",
|
||||
* "options.title",
|
||||
* "options.product_id",
|
||||
* "options.metadata",
|
||||
* "options.values.id",
|
||||
* "options.values.created_at",
|
||||
* "options.values.updated_at",
|
||||
* "options.values.deleted_at",
|
||||
* "options.values.value",
|
||||
* "options.values.option_id",
|
||||
* "options.values.variant_id",
|
||||
* "options.values.metadata",
|
||||
* ]
|
||||
*
|
||||
* const remoteQueryObject = stringToSelectRelationObject(fields)
|
||||
*
|
||||
* console.log(remoteQueryObject)
|
||||
* // {
|
||||
* // select: [
|
||||
* // "id",
|
||||
* // "created_at",
|
||||
* // "updated_at",
|
||||
* // "deleted_at",
|
||||
* // "url",
|
||||
* // "metadata",
|
||||
* // "tags.id",
|
||||
* // "tags.created_at",
|
||||
* // "tags.updated_at",
|
||||
* // "tags.deleted_at",
|
||||
* // "tags.value",
|
||||
* // "options.id",
|
||||
* // "options.created_at",
|
||||
* // "options.updated_at",
|
||||
* // "options.deleted_at",
|
||||
* // "options.title",
|
||||
* // "options.product_id",
|
||||
* // "options.metadata",
|
||||
* // "options.values.id",
|
||||
* // "options.values.created_at",
|
||||
* // "options.values.updated_at",
|
||||
* // "options.values.deleted_at",
|
||||
* // "options.values.value",
|
||||
* // "options.values.option_id",
|
||||
* // "options.values.variant_id",
|
||||
* // "options.values.metadata",
|
||||
* // ],
|
||||
* // relations: ["tags", "options", "options.values"],
|
||||
* // }
|
||||
*/
|
||||
export function stringToSelectRelationObject(fields: string[]): {
|
||||
select: string[]
|
||||
relations: string[]
|
||||
} {
|
||||
const tempResult = {
|
||||
select: new Set<string>(),
|
||||
relations: new Set<string>(),
|
||||
}
|
||||
|
||||
for (const field of fields) {
|
||||
tempResult.select.add(field)
|
||||
|
||||
if (!field.includes(".")) {
|
||||
continue
|
||||
}
|
||||
|
||||
const segments = field.split(".")
|
||||
segments.pop()
|
||||
const relationPath = segments.join(".")
|
||||
|
||||
tempResult.relations.add(relationPath)
|
||||
}
|
||||
|
||||
return {
|
||||
select: Array.from(tempResult.select),
|
||||
relations: Array.from(tempResult.relations),
|
||||
}
|
||||
}
|
||||
62
packages/core/utils/src/common/stringify-circular.ts
Normal file
62
packages/core/utils/src/common/stringify-circular.ts
Normal file
@@ -0,0 +1,62 @@
|
||||
const isObject = (value: any): value is object =>
|
||||
typeof value === "object" &&
|
||||
value != null &&
|
||||
!(value instanceof Boolean) &&
|
||||
!(value instanceof Date) &&
|
||||
!(value instanceof Number) &&
|
||||
!(value instanceof RegExp) &&
|
||||
!(value instanceof String)
|
||||
|
||||
const isPrimitive = (val) => {
|
||||
return val !== Object(val)
|
||||
}
|
||||
|
||||
function decycle(object: any, replacer?: Function | null) {
|
||||
const objects = new WeakMap()
|
||||
|
||||
function deepCopy(value, path) {
|
||||
let oldPath
|
||||
let newObj
|
||||
|
||||
if (replacer != null) {
|
||||
value = replacer(value)
|
||||
}
|
||||
|
||||
if (isObject(value)) {
|
||||
oldPath = objects.get(value)
|
||||
if (oldPath !== undefined) {
|
||||
return { $ref: oldPath }
|
||||
}
|
||||
|
||||
objects.set(value, path)
|
||||
|
||||
if (Array.isArray(value)) {
|
||||
newObj = []
|
||||
value.forEach((el, idx) => {
|
||||
newObj[idx] = deepCopy(el, path + "[" + idx + "]")
|
||||
})
|
||||
} else {
|
||||
newObj = {}
|
||||
Object.keys(value).forEach((name) => {
|
||||
newObj[name] = deepCopy(
|
||||
value[name],
|
||||
path + "[" + JSON.stringify(name) + "]"
|
||||
)
|
||||
})
|
||||
}
|
||||
return newObj
|
||||
}
|
||||
|
||||
return !isPrimitive(value) ? value + "" : value
|
||||
}
|
||||
|
||||
return deepCopy(object, "$")
|
||||
}
|
||||
|
||||
export function stringifyCircular(
|
||||
object: any,
|
||||
replacer?: Function | null,
|
||||
space?: number
|
||||
): string {
|
||||
return JSON.stringify(decycle(object, replacer), null, space)
|
||||
}
|
||||
7
packages/core/utils/src/common/to-camel-case.ts
Normal file
7
packages/core/utils/src/common/to-camel-case.ts
Normal file
@@ -0,0 +1,7 @@
|
||||
export function toCamelCase(str: string): string {
|
||||
return /^([a-z]+)(([A-Z]([a-z]+))+)$/.test(str)
|
||||
? str
|
||||
: str
|
||||
.toLowerCase()
|
||||
.replace(/[^a-zA-Z0-9]+(.)/g, (m, chr) => chr.toUpperCase())
|
||||
}
|
||||
5
packages/core/utils/src/common/to-kebab-case.ts
Normal file
5
packages/core/utils/src/common/to-kebab-case.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export const kebabCase = (string) =>
|
||||
string
|
||||
.replace(/([a-z])([A-Z])/g, "$1-$2")
|
||||
.replace(/[\s_]+/g, "-")
|
||||
.toLowerCase()
|
||||
5
packages/core/utils/src/common/to-pascal-case.ts
Normal file
5
packages/core/utils/src/common/to-pascal-case.ts
Normal file
@@ -0,0 +1,5 @@
|
||||
export function toPascalCase(s: string): string {
|
||||
return s.replace(/(^\w|_\w)/g, (match) =>
|
||||
match.replace(/_/g, "").toUpperCase()
|
||||
)
|
||||
}
|
||||
@@ -0,0 +1,3 @@
|
||||
export function doNotForceTransaction(): boolean {
|
||||
return false
|
||||
}
|
||||
2
packages/core/utils/src/common/transaction/index.ts
Normal file
2
packages/core/utils/src/common/transaction/index.ts
Normal file
@@ -0,0 +1,2 @@
|
||||
export * from "./do-not-force-transaction"
|
||||
export * from "./should-force-transaction"
|
||||
@@ -0,0 +1,5 @@
|
||||
import { MODULE_RESOURCE_TYPE } from "@medusajs/types"
|
||||
|
||||
export function shouldForceTransaction(target: any): boolean {
|
||||
return target.moduleDeclaration?.resources === MODULE_RESOURCE_TYPE.ISOLATED
|
||||
}
|
||||
18
packages/core/utils/src/common/trim-zeros.ts
Normal file
18
packages/core/utils/src/common/trim-zeros.ts
Normal file
@@ -0,0 +1,18 @@
|
||||
export function trimZeros(value: string) {
|
||||
const [whole, fraction] = value.split(".")
|
||||
|
||||
if (fraction) {
|
||||
const exp = fraction.split("e")
|
||||
|
||||
const decimal = exp[0].replace(/0+$/, "")
|
||||
const expStr = exp.length > 1 ? `e${exp[1]}` : ""
|
||||
|
||||
if (!decimal) {
|
||||
return whole + expStr
|
||||
}
|
||||
|
||||
return `${whole}.${decimal}` + expStr
|
||||
}
|
||||
|
||||
return whole
|
||||
}
|
||||
3
packages/core/utils/src/common/upper-case-first.ts
Normal file
3
packages/core/utils/src/common/upper-case-first.ts
Normal file
@@ -0,0 +1,3 @@
|
||||
export function upperCaseFirst(str: string): string {
|
||||
return str.charAt(0).toUpperCase() + str.slice(1)
|
||||
}
|
||||
37
packages/core/utils/src/common/wrap-handler.ts
Normal file
37
packages/core/utils/src/common/wrap-handler.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
import { NextFunction, Request, RequestHandler, Response } from "express"
|
||||
|
||||
type handler = (req: Request, res: Response) => Promise<void>
|
||||
|
||||
export const wrapHandler = (fn: handler): RequestHandler => {
|
||||
return async (req: Request, res: Response, next: NextFunction) => {
|
||||
const req_ = req as Request & { errors?: Error[] }
|
||||
if (req_?.errors?.length) {
|
||||
return res.status(400).json({
|
||||
errors: req_.errors,
|
||||
message:
|
||||
"Provided request body contains errors. Please check the data and retry the request",
|
||||
})
|
||||
}
|
||||
|
||||
try {
|
||||
return await fn(req, res)
|
||||
} catch (err) {
|
||||
next(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* @schema MultipleErrors
|
||||
* title: "Multiple Errors"
|
||||
* type: object
|
||||
* properties:
|
||||
* errors:
|
||||
* type: array
|
||||
* description: Array of errors
|
||||
* items:
|
||||
* $ref: "#/components/schemas/Error"
|
||||
* message:
|
||||
* type: string
|
||||
* default: "Provided request body contains errors. Please check the data and retry the request"
|
||||
*/
|
||||
Reference in New Issue
Block a user