feat: Add support for exporting products in backend (#8214)
CLOSES CC-221 CLOSES CC-223 CLOSES CC-224
This commit is contained in:
@@ -0,0 +1,6 @@
|
||||
Product Id,Product Title,Product Subtitle,Product Status,Product External Id,Product Description,Product Handle,Product Is Giftcard,Product Discountable,Product Thumbnail,Product Collection Id,Product Type Id,Product Weight,Product Length,Product Height,Product Width,Product Hs Code,Product Origin Country,Product Mid Code,Product Material,Product Created At,Product Updated At,Product Deleted At,Product Image 1,Product Image 2,Product Tag 1,Product Tag 2,Variant Id,Variant Title,Variant Sku,Variant Barcode,Variant Ean,Variant Upc,Variant Allow Backorder,Variant Manage Inventory,Variant Hs Code,Variant Origin Country,Variant Mid Code,Variant Material,Variant Weight,Variant Length,Variant Height,Variant Width,Variant Metadata,Variant Variant Rank,Variant Product Id,Variant Created At,Variant Updated At,Variant Deleted At,Variant Price USD,Variant Price EUR,Variant Price DKK,Variant Option 1 Name,Variant Option 1 Value,Variant Option 2 Name,Variant Option 2 Value
|
||||
prod_01J3CRPNVGRZ01A8GH8FQYK10Z,Base product,,draft,,"test-product-description
|
||||
test line 2",base-product,false,true,test-image.png,pcol_01J3CRPNT6A0G5GG34MWHWE7QD,ptyp_01J3CRPNV39E51BGGWSKT674C5,,,,,,,,,2024-07-22T08:25:06.158Z,2024-07-22T08:25:06.158Z,,test-image.png,test-image-2.png,123,456,variant_01J3CRPNW5J6EBVVQP1TN33A58,Test variant,,,,,false,true,,,,,,,,,,0,prod_01J3CRPNVGRZ01A8GH8FQYK10Z,2024-07-22T08:25:06.182Z,2024-07-22T08:25:06.182Z,,100,45,30,size,large,color,green
|
||||
prod_01J3CRPNVGRZ01A8GH8FQYK10Z,Base product,,draft,,"test-product-description
|
||||
test line 2",base-product,false,true,test-image.png,pcol_01J3CRPNT6A0G5GG34MWHWE7QD,ptyp_01J3CRPNV39E51BGGWSKT674C5,,,,,,,,,2024-07-22T08:25:06.158Z,2024-07-22T08:25:06.158Z,,test-image.png,test-image-2.png,123,456,variant_01J3CRPNW6NES6EN14X93F6YYB,Test variant 2,,,,,false,true,,,,,,,,,,0,prod_01J3CRPNVGRZ01A8GH8FQYK10Z,2024-07-22T08:25:06.182Z,2024-07-22T08:25:06.182Z,,200,65,50,size,small,color,green
|
||||
prod_01J3CRPNYJTCAV1QKRF6H0BY3M,Proposed product,,proposed,,test-product-description,proposed-product,false,true,test-image.png,,ptyp_01J3CRPNV39E51BGGWSKT674C5,,,,,,,,,2024-07-22T08:25:06.256Z,2024-07-22T08:25:06.256Z,,test-image.png,test-image-2.png,new-tag,,variant_01J3CRPNYZ6VZ5FVJ7WHJABV54,Test variant,,,,,false,true,,,,,,,,,,0,prod_01J3CRPNYJTCAV1QKRF6H0BY3M,2024-07-22T08:25:06.271Z,2024-07-22T08:25:06.271Z,,100,45,30,size,large,color,green
|
||||
|
@@ -0,0 +1,2 @@
|
||||
Product Id,Product Title,Product Subtitle,Product Status,Product External Id,Product Description,Product Handle,Product Is Giftcard,Product Discountable,Product Thumbnail,Product Collection Id,Product Type Id,Product Weight,Product Length,Product Height,Product Width,Product Hs Code,Product Origin Country,Product Mid Code,Product Material,Product Created At,Product Updated At,Product Deleted At,Product Image 1,Product Image 2,Product Tag 1,Variant Id,Variant Title,Variant Sku,Variant Barcode,Variant Ean,Variant Upc,Variant Allow Backorder,Variant Manage Inventory,Variant Hs Code,Variant Origin Country,Variant Mid Code,Variant Material,Variant Weight,Variant Length,Variant Height,Variant Width,Variant Metadata,Variant Variant Rank,Variant Product Id,Variant Created At,Variant Updated At,Variant Deleted At,Variant Price USD,Variant Price EUR,Variant Price DKK,Variant Option 1 Name,Variant Option 1 Value,Variant Option 2 Name,Variant Option 2 Value
|
||||
prod_01J3CSN791SN1RN7X155Z8S9CN,Proposed product,,proposed,,test-product-description,proposed-product,false,true,test-image.png,,ptyp_01J3CSN76GCRSCDV9V489B5FWQ,,,,,,,,,2024-07-22T08:41:47.040Z,2024-07-22T08:41:47.040Z,,test-image.png,test-image-2.png,new-tag,variant_01J3CSN79CQ2ND94SRJSXMEMNH,Test variant,,,,,false,true,,,,,,,,,,0,prod_01J3CSN791SN1RN7X155Z8S9CN,2024-07-22T08:41:47.053Z,2024-07-22T08:41:47.053Z,,100,45,30,size,large,color,green
|
||||
|
@@ -1,12 +1,42 @@
|
||||
import { medusaIntegrationTestRunner } from "medusa-test-utils"
|
||||
import { IEventBusModuleService } from "@medusajs/types"
|
||||
import { TestEventUtils, medusaIntegrationTestRunner } from "medusa-test-utils"
|
||||
import {
|
||||
adminHeaders,
|
||||
createAdminUser,
|
||||
} from "../../../../helpers/create-admin-user"
|
||||
import { getProductFixture } from "../../../../helpers/fixtures"
|
||||
import fs from "fs/promises"
|
||||
import path from "path"
|
||||
import { ModuleRegistrationName } from "@medusajs/utils"
|
||||
|
||||
jest.setTimeout(50000)
|
||||
|
||||
const compareCSVs = async (filePath, expectedFilePath) => {
|
||||
let fileContent = await fs.readFile(filePath, { encoding: "utf-8" })
|
||||
let fixturesContent = await fs.readFile(expectedFilePath, {
|
||||
encoding: "utf-8",
|
||||
})
|
||||
|
||||
// Normalize csv data to get rid of dynamic data
|
||||
const idsToReplace = ["prod_", "pcol_", "variant_", "ptyp_"]
|
||||
const dateRegex =
|
||||
/(\d{4})-(\d{2})-(\d{2})T(\d{2})\:(\d{2})\:(\d{2})\.(\d{3})Z/g
|
||||
idsToReplace.forEach((prefix) => {
|
||||
fileContent = fileContent.replace(
|
||||
new RegExp(`${prefix}\\w*\\d*`, "g"),
|
||||
"<ID>"
|
||||
)
|
||||
fixturesContent = fixturesContent.replace(
|
||||
new RegExp(`${prefix}\\w*\\d*`, "g"),
|
||||
"<ID>"
|
||||
)
|
||||
})
|
||||
fileContent = fileContent.replace(dateRegex, "<DATE>")
|
||||
fixturesContent = fixturesContent.replace(dateRegex, "<DATE>")
|
||||
|
||||
expect(fileContent).toEqual(fixturesContent)
|
||||
}
|
||||
|
||||
medusaIntegrationTestRunner({
|
||||
testSuite: ({ dbConnection, getContainer, api }) => {
|
||||
let baseProduct
|
||||
@@ -17,6 +47,11 @@ medusaIntegrationTestRunner({
|
||||
|
||||
let baseType
|
||||
|
||||
let eventBus: IEventBusModuleService
|
||||
beforeAll(async () => {
|
||||
eventBus = getContainer().resolve(ModuleRegistrationName.EVENT_BUS)
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
await createAdminUser(dbConnection, adminHeaders, getContainer())
|
||||
|
||||
@@ -49,8 +84,53 @@ medusaIntegrationTestRunner({
|
||||
"/admin/products",
|
||||
getProductFixture({
|
||||
title: "Base product",
|
||||
description: "test-product-description\ntest line 2",
|
||||
collection_id: baseCollection.id,
|
||||
type_id: baseType.id,
|
||||
variants: [
|
||||
{
|
||||
title: "Test variant",
|
||||
prices: [
|
||||
{
|
||||
currency_code: "usd",
|
||||
amount: 100,
|
||||
},
|
||||
{
|
||||
currency_code: "eur",
|
||||
amount: 45,
|
||||
},
|
||||
{
|
||||
currency_code: "dkk",
|
||||
amount: 30,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
size: "large",
|
||||
color: "green",
|
||||
},
|
||||
},
|
||||
{
|
||||
title: "Test variant 2",
|
||||
prices: [
|
||||
{
|
||||
currency_code: "usd",
|
||||
amount: 200,
|
||||
},
|
||||
{
|
||||
currency_code: "eur",
|
||||
amount: 65,
|
||||
},
|
||||
{
|
||||
currency_code: "dkk",
|
||||
amount: 50,
|
||||
},
|
||||
],
|
||||
options: {
|
||||
size: "small",
|
||||
color: "green",
|
||||
},
|
||||
},
|
||||
],
|
||||
}),
|
||||
adminHeaders
|
||||
)
|
||||
@@ -72,6 +152,11 @@ medusaIntegrationTestRunner({
|
||||
|
||||
describe("POST /admin/products/export", () => {
|
||||
it("should export a csv file containing the expected products", async () => {
|
||||
const subscriberExecution = TestEventUtils.waitSubscribersExecution(
|
||||
"notification.notification.created",
|
||||
eventBus
|
||||
)
|
||||
|
||||
// BREAKING: The batch endpoints moved to the domain routes (admin/batch-jobs -> /admin/products/export). The payload and response changed as well.
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/products/export",
|
||||
@@ -82,205 +167,69 @@ medusaIntegrationTestRunner({
|
||||
const workflowId = batchJobRes.data.workflow_id
|
||||
expect(workflowId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
while (true) {
|
||||
// const res = await api.get(
|
||||
// `/admin/batch-jobs/${batchJobId}`,
|
||||
// adminReqConfig
|
||||
// )
|
||||
// await new Promise((resolve, _) => {
|
||||
// setTimeout(resolve, 1000)
|
||||
// })
|
||||
// batchJob = res.data.batch_job
|
||||
// shouldContinuePulling = !(
|
||||
// batchJob.status === "completed" || batchJob.status === "failed"
|
||||
// )
|
||||
break
|
||||
}
|
||||
await subscriberExecution
|
||||
const notifications = (
|
||||
await api.get("/admin/notifications", adminHeaders)
|
||||
).data.notifications
|
||||
|
||||
// expect(batchJob.status).toBe("completed")
|
||||
expect(notifications.length).toBe(1)
|
||||
expect(notifications[0]).toEqual(
|
||||
expect.objectContaining({
|
||||
data: expect.objectContaining({
|
||||
title: "Product export",
|
||||
description: "Product export completed successfully!",
|
||||
file: expect.objectContaining({
|
||||
url: expect.stringContaining("-product-exports.csv"),
|
||||
filename: expect.any(String),
|
||||
mimeType: "text/csv",
|
||||
}),
|
||||
}),
|
||||
})
|
||||
)
|
||||
|
||||
// exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
// const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
// expect(isFileExists).toBeTruthy()
|
||||
|
||||
// const fileSize = (await fs.stat(exportFilePath)).size
|
||||
// expect(batchJob.result?.file_size).toBe(fileSize)
|
||||
|
||||
// const data = (await fs.readFile(exportFilePath)).toString()
|
||||
// const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
// expect(lines.length).toBe(1)
|
||||
|
||||
// const lineColumn = lines[0].split(";")
|
||||
|
||||
// expect(lineColumn[0]).toBe(productId)
|
||||
// expect(lineColumn[2]).toBe(productPayload.title)
|
||||
// expect(lineColumn[4]).toBe(productPayload.description)
|
||||
// expect(lineColumn[23]).toBe(variantId)
|
||||
// expect(lineColumn[24]).toBe(productPayload.variants[0].title)
|
||||
// expect(lineColumn[25]).toBe(productPayload.variants[0].sku)
|
||||
await compareCSVs(
|
||||
notifications[0].data.file.url,
|
||||
path.join(__dirname, "__fixtures__", "exported-products.csv")
|
||||
)
|
||||
await fs.rm(path.dirname(notifications[0].data.file.url), {
|
||||
force: true,
|
||||
recursive: true,
|
||||
})
|
||||
})
|
||||
|
||||
// it("should export a csv file containing the expected products including new line char in the cells", async () => {
|
||||
// const api = useApi()
|
||||
it("should export a csv file filtered by specific products", async () => {
|
||||
const subscriberExecution = TestEventUtils.waitSubscribersExecution(
|
||||
"notification.notification.created",
|
||||
eventBus
|
||||
)
|
||||
|
||||
// const productPayload = {
|
||||
// title: "Test export product",
|
||||
// description: "test-product-description\ntest line 2",
|
||||
// type: { value: "test-type" },
|
||||
// images: ["test-image.png", "test-image-2.png"],
|
||||
// collection_id: "test-collection",
|
||||
// tags: [{ value: "123" }, { value: "456" }],
|
||||
// options: [{ title: "size" }, { title: "color" }],
|
||||
// variants: [
|
||||
// {
|
||||
// title: "Test variant",
|
||||
// inventory_quantity: 10,
|
||||
// sku: "test-variant-sku-product-export",
|
||||
// prices: [
|
||||
// {
|
||||
// currency_code: "usd",
|
||||
// amount: 100,
|
||||
// },
|
||||
// {
|
||||
// currency_code: "eur",
|
||||
// amount: 45,
|
||||
// },
|
||||
// {
|
||||
// currency_code: "dkk",
|
||||
// amount: 30,
|
||||
// },
|
||||
// ],
|
||||
// options: [{ value: "large" }, { value: "green" }],
|
||||
// },
|
||||
// ],
|
||||
// }
|
||||
// const createProductRes = await api.post(
|
||||
// "/admin/products",
|
||||
// productPayload,
|
||||
// adminReqConfig
|
||||
// )
|
||||
// const productId = createProductRes.data.product.id
|
||||
// const variantId = createProductRes.data.product.variants[0].id
|
||||
// BREAKING: We don't support setting batch size in the export anymore
|
||||
const batchJobRes = await api.post(
|
||||
`/admin/products/export?id=${proposedProduct.id}`,
|
||||
{},
|
||||
adminHeaders
|
||||
)
|
||||
|
||||
// const batchPayload = {
|
||||
// type: "product-export",
|
||||
// context: {
|
||||
// filterable_fields: {
|
||||
// title: "Test export product",
|
||||
// },
|
||||
// },
|
||||
// }
|
||||
// const batchJobRes = await api.post(
|
||||
// "/admin/batch-jobs",
|
||||
// batchPayload,
|
||||
// adminReqConfig
|
||||
// )
|
||||
// const batchJobId = batchJobRes.data.batch_job.id
|
||||
const workflowId = batchJobRes.data.workflow_id
|
||||
expect(workflowId).toBeTruthy()
|
||||
|
||||
// expect(batchJobId).toBeTruthy()
|
||||
await subscriberExecution
|
||||
const notifications = (
|
||||
await api.get("/admin/notifications", adminHeaders)
|
||||
).data.notifications
|
||||
|
||||
// // Pull to check the status until it is completed
|
||||
// let batchJob
|
||||
// let shouldContinuePulling = true
|
||||
// while (shouldContinuePulling) {
|
||||
// const res = await api.get(
|
||||
// `/admin/batch-jobs/${batchJobId}`,
|
||||
// adminReqConfig
|
||||
// )
|
||||
expect(notifications.length).toBe(1)
|
||||
|
||||
// await new Promise((resolve, _) => {
|
||||
// setTimeout(resolve, 1000)
|
||||
// })
|
||||
await compareCSVs(
|
||||
notifications[0].data.file.url,
|
||||
path.join(__dirname, "__fixtures__", "filtered-products.csv")
|
||||
)
|
||||
|
||||
// batchJob = res.data.batch_job
|
||||
// shouldContinuePulling = !(
|
||||
// batchJob.status === "completed" || batchJob.status === "failed"
|
||||
// )
|
||||
// }
|
||||
|
||||
// expect(batchJob.status).toBe("completed")
|
||||
|
||||
// exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
// const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
// expect(isFileExists).toBeTruthy()
|
||||
|
||||
// const fileSize = (await fs.stat(exportFilePath)).size
|
||||
// expect(batchJob.result?.file_size).toBe(fileSize)
|
||||
|
||||
// const data = (await fs.readFile(exportFilePath)).toString()
|
||||
// const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
// expect(lines.length).toBe(1)
|
||||
|
||||
// const lineColumn = lines[0].split(";")
|
||||
|
||||
// expect(lineColumn[0]).toBe(productId)
|
||||
// expect(lineColumn[2]).toBe(productPayload.title)
|
||||
// expect(lineColumn[4]).toBe(`"${productPayload.description}"`)
|
||||
// expect(lineColumn[23]).toBe(variantId)
|
||||
// expect(lineColumn[24]).toBe(productPayload.variants[0].title)
|
||||
// expect(lineColumn[25]).toBe(productPayload.variants[0].sku)
|
||||
// })
|
||||
|
||||
// it("should export a csv file containing a limited number of products", async () => {
|
||||
// const api = useApi()
|
||||
|
||||
// const batchPayload = {
|
||||
// type: "product-export",
|
||||
// context: {
|
||||
// batch_size: 1,
|
||||
// filterable_fields: { collection_id: "test-collection" },
|
||||
// order: "created_at",
|
||||
// },
|
||||
// }
|
||||
|
||||
// const batchJobRes = await api.post(
|
||||
// "/admin/batch-jobs",
|
||||
// batchPayload,
|
||||
// adminReqConfig
|
||||
// )
|
||||
// const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
// expect(batchJobId).toBeTruthy()
|
||||
|
||||
// // Pull to check the status until it is completed
|
||||
// let batchJob
|
||||
// let shouldContinuePulling = true
|
||||
// while (shouldContinuePulling) {
|
||||
// const res = await api.get(
|
||||
// `/admin/batch-jobs/${batchJobId}`,
|
||||
// adminReqConfig
|
||||
// )
|
||||
|
||||
// await new Promise((resolve, _) => {
|
||||
// setTimeout(resolve, 1000)
|
||||
// })
|
||||
|
||||
// batchJob = res.data.batch_job
|
||||
// shouldContinuePulling = !(
|
||||
// batchJob.status === "completed" || batchJob.status === "failed"
|
||||
// )
|
||||
// }
|
||||
|
||||
// expect(batchJob.status).toBe("completed")
|
||||
|
||||
// exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
// const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
// expect(isFileExists).toBeTruthy()
|
||||
|
||||
// const data = (await fs.readFile(exportFilePath)).toString()
|
||||
// const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
// expect(lines.length).toBe(4)
|
||||
|
||||
// const csvLine = lines[0].split(";")
|
||||
// expect(csvLine[0]).toBe("test-product")
|
||||
// })
|
||||
await fs.rm(path.dirname(notifications[0].data.file.url), {
|
||||
force: true,
|
||||
recursive: true,
|
||||
})
|
||||
})
|
||||
|
||||
// it("should be able to import an exported csv file", async () => {
|
||||
// const api = useApi()
|
||||
|
||||
@@ -31,6 +31,21 @@ module.exports = defineConfig({
|
||||
providers: [customFulfillmentProvider],
|
||||
},
|
||||
},
|
||||
[Modules.NOTIFICATION]: {
|
||||
resolve: "@medusajs/notification",
|
||||
options: {
|
||||
providers: [
|
||||
{
|
||||
resolve: "@medusajs/notification-local",
|
||||
id: "local",
|
||||
options: {
|
||||
name: "Local Notification Provider",
|
||||
channels: ["feed"],
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
[Modules.FILE]: {
|
||||
resolve: "@medusajs/file",
|
||||
options: {
|
||||
|
||||
@@ -185,7 +185,7 @@ medusaIntegrationTestRunner({
|
||||
|
||||
const notifications = await service.listNotifications()
|
||||
|
||||
expect(logSpy).toHaveBeenLastCalledWith(
|
||||
expect(logSpy).toHaveBeenCalledWith(
|
||||
`Attempting to send a notification to: 'test@medusajs.com' on the channel: 'email' with template: 'order-created-template' and data: '{\"order_id\":\"1234\"}'`
|
||||
)
|
||||
expect(notifications).toHaveLength(1)
|
||||
|
||||
@@ -1,476 +0,0 @@
|
||||
import fs from "fs/promises"
|
||||
import path, { resolve, sep } from "path"
|
||||
import { startBootstrapApp } from "../../../../environment-helpers/bootstrap-app"
|
||||
import { useApi } from "../../../../environment-helpers/use-api"
|
||||
import { getContainer } from "../../../../environment-helpers/use-container"
|
||||
import { initDb, useDb } from "../../../../environment-helpers/use-db"
|
||||
import { simpleSalesChannelFactory } from "../../../../factories"
|
||||
import productSeeder from "../../../../helpers/product-seeder"
|
||||
import {
|
||||
adminHeaders,
|
||||
createAdminUser,
|
||||
} from "../../../../helpers/create-admin-user"
|
||||
|
||||
const setupServer = require("../../../../environment-helpers/setup-server")
|
||||
const userSeeder = require("../../../../helpers/user-seeder")
|
||||
|
||||
const adminReqConfig = {
|
||||
headers: {
|
||||
"x-medusa-access-token": "test_token",
|
||||
},
|
||||
}
|
||||
|
||||
const env: Record<any, any> = {
|
||||
MEDUSA_FF_MEDUSA_V2: true,
|
||||
}
|
||||
|
||||
jest.setTimeout(180000)
|
||||
|
||||
// TODO SEE to use new test runner medusaIntegrationTestRunner({
|
||||
// env,
|
||||
// testSuite: ({ dbConnection, getContainer, api }) => {})
|
||||
|
||||
describe.skip("Batch job of product-export type", () => {
|
||||
let medusaProcess
|
||||
let dbConnection
|
||||
let exportFilePath = ""
|
||||
let topDir = ""
|
||||
let shutdownServer
|
||||
|
||||
beforeAll(async () => {
|
||||
const cwd = path.resolve(path.join(__dirname, "..", "..", ".."))
|
||||
|
||||
dbConnection = await initDb({ cwd, env } as any)
|
||||
shutdownServer = await startBootstrapApp({ cwd, env })
|
||||
medusaProcess = await setupServer({
|
||||
cwd,
|
||||
uploadDir: __dirname,
|
||||
env,
|
||||
verbose: true,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (topDir !== "") {
|
||||
await fs.rm(resolve(__dirname, topDir), { recursive: true })
|
||||
}
|
||||
|
||||
const db = useDb()
|
||||
await db.shutdown()
|
||||
|
||||
await medusaProcess.kill()
|
||||
await shutdownServer()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
const container = getContainer()
|
||||
await productSeeder(dbConnection)
|
||||
await createAdminUser(dbConnection, adminHeaders, container)
|
||||
await userSeeder(dbConnection)
|
||||
|
||||
await simpleSalesChannelFactory(dbConnection, {
|
||||
id: "test-channel",
|
||||
is_default: true,
|
||||
})
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
|
||||
// @ts-ignore
|
||||
try {
|
||||
const isFileExists = (await fs.stat(exportFilePath))?.isFile()
|
||||
|
||||
if (isFileExists) {
|
||||
const [, relativeRoot] = exportFilePath
|
||||
.replace(__dirname, "")
|
||||
.split(sep)
|
||||
|
||||
if ((await fs.stat(resolve(__dirname, relativeRoot)))?.isDirectory()) {
|
||||
topDir = relativeRoot
|
||||
}
|
||||
|
||||
await fs.unlink(exportFilePath)
|
||||
}
|
||||
} catch (err) {
|
||||
// noop
|
||||
}
|
||||
})
|
||||
|
||||
it("should export a csv file containing the expected products", async () => {
|
||||
const api = useApi()
|
||||
|
||||
const productPayload = {
|
||||
title: "Test export product",
|
||||
description: "test-product-description",
|
||||
type: { value: "test-type" },
|
||||
images: ["test-image.png", "test-image-2.png"],
|
||||
collection_id: "test-collection",
|
||||
tags: [{ value: "123" }, { value: "456" }],
|
||||
options: [{ title: "size" }, { title: "color" }],
|
||||
variants: [
|
||||
{
|
||||
title: "Test variant",
|
||||
inventory_quantity: 10,
|
||||
sku: "test-variant-sku-product-export",
|
||||
prices: [
|
||||
{
|
||||
currency_code: "usd",
|
||||
amount: 100,
|
||||
},
|
||||
{
|
||||
currency_code: "eur",
|
||||
amount: 45,
|
||||
},
|
||||
{
|
||||
currency_code: "dkk",
|
||||
amount: 30,
|
||||
},
|
||||
],
|
||||
options: [{ value: "large" }, { value: "green" }],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
const createProductRes = await api.post(
|
||||
"/admin/products",
|
||||
productPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
const productId = createProductRes.data.product.id
|
||||
const variantId = createProductRes.data.product.variants[0].id
|
||||
|
||||
const batchPayload = {
|
||||
type: "product-export",
|
||||
context: {
|
||||
filterable_fields: {
|
||||
title: "Test export product",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const fileSize = (await fs.stat(exportFilePath)).size
|
||||
expect(batchJob.result?.file_size).toBe(fileSize)
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(1)
|
||||
|
||||
const lineColumn = lines[0].split(";")
|
||||
|
||||
expect(lineColumn[0]).toBe(productId)
|
||||
expect(lineColumn[2]).toBe(productPayload.title)
|
||||
expect(lineColumn[4]).toBe(productPayload.description)
|
||||
expect(lineColumn[23]).toBe(variantId)
|
||||
expect(lineColumn[24]).toBe(productPayload.variants[0].title)
|
||||
expect(lineColumn[25]).toBe(productPayload.variants[0].sku)
|
||||
})
|
||||
|
||||
it("should export a csv file containing the expected products including new line char in the cells", async () => {
|
||||
const api = useApi()
|
||||
|
||||
const productPayload = {
|
||||
title: "Test export product",
|
||||
description: "test-product-description\ntest line 2",
|
||||
type: { value: "test-type" },
|
||||
images: ["test-image.png", "test-image-2.png"],
|
||||
collection_id: "test-collection",
|
||||
tags: [{ value: "123" }, { value: "456" }],
|
||||
options: [{ title: "size" }, { title: "color" }],
|
||||
variants: [
|
||||
{
|
||||
title: "Test variant",
|
||||
inventory_quantity: 10,
|
||||
sku: "test-variant-sku-product-export",
|
||||
prices: [
|
||||
{
|
||||
currency_code: "usd",
|
||||
amount: 100,
|
||||
},
|
||||
{
|
||||
currency_code: "eur",
|
||||
amount: 45,
|
||||
},
|
||||
{
|
||||
currency_code: "dkk",
|
||||
amount: 30,
|
||||
},
|
||||
],
|
||||
options: [{ value: "large" }, { value: "green" }],
|
||||
},
|
||||
],
|
||||
}
|
||||
const createProductRes = await api.post(
|
||||
"/admin/products",
|
||||
productPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const productId = createProductRes.data.product.id
|
||||
const variantId = createProductRes.data.product.variants[0].id
|
||||
|
||||
const batchPayload = {
|
||||
type: "product-export",
|
||||
context: {
|
||||
filterable_fields: {
|
||||
title: "Test export product",
|
||||
},
|
||||
},
|
||||
}
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const fileSize = (await fs.stat(exportFilePath)).size
|
||||
expect(batchJob.result?.file_size).toBe(fileSize)
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(1)
|
||||
|
||||
const lineColumn = lines[0].split(";")
|
||||
|
||||
expect(lineColumn[0]).toBe(productId)
|
||||
expect(lineColumn[2]).toBe(productPayload.title)
|
||||
expect(lineColumn[4]).toBe(`"${productPayload.description}"`)
|
||||
expect(lineColumn[23]).toBe(variantId)
|
||||
expect(lineColumn[24]).toBe(productPayload.variants[0].title)
|
||||
expect(lineColumn[25]).toBe(productPayload.variants[0].sku)
|
||||
})
|
||||
|
||||
it("should export a csv file containing a limited number of products", async () => {
|
||||
const api = useApi()
|
||||
|
||||
const batchPayload = {
|
||||
type: "product-export",
|
||||
context: {
|
||||
batch_size: 1,
|
||||
filterable_fields: { collection_id: "test-collection" },
|
||||
order: "created_at",
|
||||
},
|
||||
}
|
||||
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(4)
|
||||
|
||||
const csvLine = lines[0].split(";")
|
||||
expect(csvLine[0]).toBe("test-product")
|
||||
})
|
||||
|
||||
it("should be able to import an exported csv file", async () => {
|
||||
const api = useApi()
|
||||
|
||||
const batchPayload = {
|
||||
type: "product-export",
|
||||
context: {
|
||||
batch_size: 1,
|
||||
filterable_fields: { collection_id: "test-collection" },
|
||||
order: "created_at",
|
||||
},
|
||||
}
|
||||
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
let batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [header, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(4)
|
||||
|
||||
const csvLine = lines[0].split(";")
|
||||
expect(csvLine[0]).toBe("test-product")
|
||||
expect(csvLine[2]).toBe("Test product")
|
||||
|
||||
csvLine[2] = "Updated test product"
|
||||
lines.splice(0, 1, csvLine.join(";"))
|
||||
|
||||
await fs.writeFile(exportFilePath, [header, ...lines].join("\r\n"))
|
||||
|
||||
const importBatchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
{
|
||||
type: "product-import",
|
||||
context: {
|
||||
fileKey: exportFilePath,
|
||||
},
|
||||
},
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
batchJobId = importBatchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
const productsResponse = await api.get("/admin/products", adminReqConfig)
|
||||
expect(productsResponse.data.count).toBe(5)
|
||||
expect(productsResponse.data.products).toEqual(
|
||||
expect.arrayContaining([
|
||||
expect.objectContaining({
|
||||
id: csvLine[0],
|
||||
handle: csvLine[1],
|
||||
title: csvLine[2],
|
||||
}),
|
||||
])
|
||||
)
|
||||
})
|
||||
})
|
||||
1
packages/core/core-flows/src/notification/index.ts
Normal file
1
packages/core/core-flows/src/notification/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./steps"
|
||||
1
packages/core/core-flows/src/notification/steps/index.ts
Normal file
1
packages/core/core-flows/src/notification/steps/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./send-notifications"
|
||||
@@ -0,0 +1,32 @@
|
||||
import { INotificationModuleService } from "@medusajs/types"
|
||||
import { ModuleRegistrationName } from "@medusajs/utils"
|
||||
import { StepResponse, createStep } from "@medusajs/workflows-sdk"
|
||||
|
||||
type SendNotificationsStepInput = {
|
||||
to: string
|
||||
channel: string
|
||||
template: string
|
||||
data?: Record<string, unknown> | null
|
||||
trigger_type?: string | null
|
||||
resource_id?: string | null
|
||||
resource_type?: string | null
|
||||
receiver_id?: string | null
|
||||
original_notification_id?: string | null
|
||||
idempotency_key?: string | null
|
||||
}[]
|
||||
|
||||
export const sendNotificationsStepId = "send-notifications"
|
||||
export const sendNotificationsStep = createStep(
|
||||
sendNotificationsStepId,
|
||||
async (data: SendNotificationsStepInput, { container }) => {
|
||||
const service = container.resolve<INotificationModuleService>(
|
||||
ModuleRegistrationName.NOTIFICATION
|
||||
)
|
||||
const created = await service.createNotifications(data)
|
||||
return new StepResponse(
|
||||
created,
|
||||
created.map((notification) => notification.id)
|
||||
)
|
||||
}
|
||||
// Most of the notifications are irreversible, so we can't compensate notifications reliably
|
||||
)
|
||||
@@ -0,0 +1,133 @@
|
||||
import { BigNumberInput, HttpTypes, PricingTypes } from "@medusajs/types"
|
||||
import { upperCaseFirst } from "@medusajs/utils"
|
||||
|
||||
// We want to have one row per variant, so we need to normalize the data
|
||||
export const normalizeForExport = (
|
||||
product: HttpTypes.AdminProduct[]
|
||||
): object[] => {
|
||||
const res = product.reduce((acc: object[], product) => {
|
||||
const variants = product.variants ?? []
|
||||
if (!variants.length) {
|
||||
acc.push(normalizeProductForExport(product))
|
||||
return acc
|
||||
}
|
||||
|
||||
variants.forEach((v) => {
|
||||
const toPush = {
|
||||
...normalizeProductForExport(product),
|
||||
...normalizeVariantForExport(v),
|
||||
} as any
|
||||
delete toPush["Product Variants"]
|
||||
|
||||
acc.push(toPush)
|
||||
})
|
||||
|
||||
return acc
|
||||
}, [])
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
const normalizeProductForExport = (product: HttpTypes.AdminProduct): object => {
|
||||
const flattenedImages = product.images?.reduce(
|
||||
(acc: Record<string, string>, image, idx) => {
|
||||
acc[beautifyKey(`product_image_${idx + 1}`)] = image.url
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
const flattenedTags = product.tags?.reduce(
|
||||
(acc: Record<string, string>, tag, idx) => {
|
||||
acc[beautifyKey(`product_tag_${idx + 1}`)] = tag.value
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
const flattenedSalesChannels = product.sales_channels?.reduce(
|
||||
(acc: Record<string, string>, salesChannel, idx) => {
|
||||
acc[beautifyKey(`product_sales_channel_${idx + 1}`)] = salesChannel.id
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
const res = {
|
||||
...prefixFields(product, "product"),
|
||||
...flattenedImages,
|
||||
...flattenedTags,
|
||||
...flattenedSalesChannels,
|
||||
} as any
|
||||
|
||||
delete res["Product Images"]
|
||||
delete res["Product Tags"]
|
||||
delete res["Product Sales Channels"]
|
||||
|
||||
// We can decide if we want the metadata in the export and how that would look like
|
||||
delete res["Product Metadata"]
|
||||
|
||||
// We only want the IDs for the type and collection
|
||||
delete res["Product Type"]
|
||||
delete res["Product Collection"]
|
||||
|
||||
// We just rely on the variant options to reconstruct the product options, so we want to
|
||||
// omit the product options to keep the file simpler
|
||||
delete res["Product Options"]
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
const normalizeVariantForExport = (
|
||||
variant: HttpTypes.AdminProductVariant & {
|
||||
price_set?: PricingTypes.PriceSetDTO
|
||||
}
|
||||
): object => {
|
||||
const flattenedPrices = variant.price_set?.prices
|
||||
?.sort((a, b) => b.currency_code!.localeCompare(a.currency_code!))
|
||||
.reduce((acc: Record<string, BigNumberInput>, price) => {
|
||||
const regionRule = price.price_rules?.find(
|
||||
(r) => r.attribute === "region"
|
||||
)
|
||||
if (regionRule) {
|
||||
acc[beautifyKey(`variant_price_${regionRule.value}`)] = price.amount!
|
||||
} else if (!price.price_rules?.length) {
|
||||
acc[
|
||||
beautifyKey(`variant_price_${price.currency_code!.toUpperCase()}`)
|
||||
] = price.amount!
|
||||
}
|
||||
return acc
|
||||
}, {})
|
||||
|
||||
const flattenedOptions = variant.options?.reduce(
|
||||
(acc: Record<string, string>, option, idx) => {
|
||||
acc[beautifyKey(`variant_option_${idx + 1}_name`)] = option.option?.title!
|
||||
acc[beautifyKey(`variant_option_${idx + 1}_value`)] = option.value
|
||||
return acc
|
||||
},
|
||||
{}
|
||||
)
|
||||
|
||||
const res = {
|
||||
...prefixFields(variant, "variant"),
|
||||
...flattenedPrices,
|
||||
...flattenedOptions,
|
||||
} as any
|
||||
delete res["Variant Price Set"]
|
||||
delete res["Variant Options"]
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
const prefixFields = (obj: object, prefix: string): object => {
|
||||
const res = {}
|
||||
Object.keys(obj).forEach((key) => {
|
||||
res[beautifyKey(`${prefix}_${key}`)] = obj[key]
|
||||
})
|
||||
|
||||
return res
|
||||
}
|
||||
|
||||
const beautifyKey = (key: string): string => {
|
||||
return key.split("_").map(upperCaseFirst).join(" ")
|
||||
}
|
||||
@@ -0,0 +1,36 @@
|
||||
import { IFileModuleService, HttpTypes } from "@medusajs/types"
|
||||
import { ModuleRegistrationName, convertJsonToCsv } from "@medusajs/utils"
|
||||
import { StepResponse, createStep } from "@medusajs/workflows-sdk"
|
||||
import { normalizeForExport } from "../helpers/normalize-for-export"
|
||||
|
||||
export const generateProductCsvStepId = "generate-product-csv"
|
||||
export const generateProductCsvStep = createStep(
|
||||
generateProductCsvStepId,
|
||||
async (products: HttpTypes.AdminProduct[], { container }) => {
|
||||
const normalizedData = normalizeForExport(products)
|
||||
const csvContent = convertJsonToCsv(normalizedData)
|
||||
|
||||
const fileModule: IFileModuleService = container.resolve(
|
||||
ModuleRegistrationName.FILE
|
||||
)
|
||||
|
||||
const filename = `${Date.now()}-product-exports.csv`
|
||||
const file = await fileModule.createFiles({
|
||||
filename,
|
||||
mimeType: "text/csv",
|
||||
content: csvContent,
|
||||
})
|
||||
|
||||
return new StepResponse({ id: file.id, filename }, file.id)
|
||||
},
|
||||
async (fileId, { container }) => {
|
||||
if (!fileId) {
|
||||
return
|
||||
}
|
||||
|
||||
const fileModule: IFileModuleService = container.resolve(
|
||||
ModuleRegistrationName.FILE
|
||||
)
|
||||
await fileModule.deleteFiles(fileId)
|
||||
}
|
||||
)
|
||||
@@ -0,0 +1,49 @@
|
||||
import { FilterableProductProps, RemoteQueryFunction } from "@medusajs/types"
|
||||
import {
|
||||
ContainerRegistrationKeys,
|
||||
remoteQueryObjectFromString,
|
||||
} from "@medusajs/utils"
|
||||
import { createStep, StepResponse } from "@medusajs/workflows-sdk"
|
||||
|
||||
type StepInput = {
|
||||
select: string[]
|
||||
filter?: FilterableProductProps
|
||||
}
|
||||
|
||||
export const getAllProductsStepId = "get-all-products"
|
||||
export const getAllProductsStep = createStep(
|
||||
getAllProductsStepId,
|
||||
async (data: StepInput, { container }) => {
|
||||
const remoteQuery: RemoteQueryFunction = container.resolve(
|
||||
ContainerRegistrationKeys.REMOTE_QUERY
|
||||
)
|
||||
|
||||
const allProducts: any[] = []
|
||||
const pageSize = 200
|
||||
let page = 0
|
||||
|
||||
// We intentionally fetch the products serially here to avoid putting too much load on the DB
|
||||
while (true) {
|
||||
const remoteQueryObject = remoteQueryObjectFromString({
|
||||
entryPoint: "product",
|
||||
variables: {
|
||||
filters: data.filter,
|
||||
skip: page * pageSize,
|
||||
take: pageSize,
|
||||
},
|
||||
fields: data.select,
|
||||
})
|
||||
|
||||
const { rows: products } = await remoteQuery(remoteQueryObject)
|
||||
allProducts.push(...products)
|
||||
|
||||
if (products.length < pageSize) {
|
||||
break
|
||||
}
|
||||
|
||||
page += 1
|
||||
}
|
||||
|
||||
return new StepResponse(allProducts, allProducts)
|
||||
}
|
||||
)
|
||||
@@ -2,6 +2,7 @@ export * from "./create-products"
|
||||
export * from "./update-products"
|
||||
export * from "./delete-products"
|
||||
export * from "./get-products"
|
||||
export * from "./get-all-products"
|
||||
export * from "./create-variant-pricing-link"
|
||||
export * from "./create-product-options"
|
||||
export * from "./update-product-options"
|
||||
@@ -19,3 +20,4 @@ export * from "./delete-product-types"
|
||||
export * from "./create-product-tags"
|
||||
export * from "./update-product-tags"
|
||||
export * from "./delete-product-tags"
|
||||
export * from "./generate-product-csv"
|
||||
|
||||
@@ -1,6 +1,12 @@
|
||||
import { WorkflowData, createWorkflow } from "@medusajs/workflows-sdk"
|
||||
import { useRemoteQueryStep } from "../../common"
|
||||
import {
|
||||
WorkflowData,
|
||||
createWorkflow,
|
||||
transform,
|
||||
} from "@medusajs/workflows-sdk"
|
||||
import { WorkflowTypes } from "@medusajs/types"
|
||||
import { generateProductCsvStep, getAllProductsStep } from "../steps"
|
||||
import { useRemoteQueryStep } from "../../common"
|
||||
import { sendNotificationsStep } from "../../notification"
|
||||
|
||||
export const exportProductsWorkflowId = "export-products"
|
||||
export const exportProductsWorkflow = createWorkflow(
|
||||
@@ -8,11 +14,39 @@ export const exportProductsWorkflow = createWorkflow(
|
||||
(
|
||||
input: WorkflowData<WorkflowTypes.ProductWorkflow.ExportProductsDTO>
|
||||
): WorkflowData<void> => {
|
||||
const products = useRemoteQueryStep({
|
||||
entry_point: "product",
|
||||
fields: input.select,
|
||||
variables: input.filter,
|
||||
list: true,
|
||||
const products = getAllProductsStep(input).config({
|
||||
async: true,
|
||||
backgroundExecution: true,
|
||||
})
|
||||
|
||||
const file = generateProductCsvStep(products)
|
||||
const fileDetails = useRemoteQueryStep({
|
||||
fields: ["id", "url"],
|
||||
entry_point: "file",
|
||||
variables: { id: file.id },
|
||||
list: false,
|
||||
})
|
||||
|
||||
const notifications = transform({ fileDetails, file }, (data) => {
|
||||
return [
|
||||
{
|
||||
// We don't need the recipient here for now, but if we want to push feed notifications to a specific user we could add it.
|
||||
to: "",
|
||||
channel: "feed",
|
||||
template: "admin-ui",
|
||||
data: {
|
||||
title: "Product export",
|
||||
description: "Product export completed successfully!",
|
||||
file: {
|
||||
filename: data.file.filename,
|
||||
url: data.fileDetails.url,
|
||||
mimeType: "text/csv",
|
||||
},
|
||||
},
|
||||
},
|
||||
]
|
||||
})
|
||||
|
||||
sendNotificationsStep(notifications)
|
||||
}
|
||||
)
|
||||
|
||||
@@ -1,4 +1,5 @@
|
||||
import { IEventBusModuleService } from "@medusajs/types"
|
||||
import { EventEmitter } from "events"
|
||||
|
||||
// Allows you to wait for all subscribers to execute for a given event. Only works with the local event bus.
|
||||
export const waitSubscribersExecution = (
|
||||
@@ -6,23 +7,36 @@ export const waitSubscribersExecution = (
|
||||
eventBus: IEventBusModuleService
|
||||
) => {
|
||||
const subscriberPromises: Promise<any>[] = []
|
||||
const eventEmitter: EventEmitter = (eventBus as any).eventEmitter_
|
||||
|
||||
;(eventBus as any).eventEmitter_.listeners(eventName).forEach((listener) => {
|
||||
;(eventBus as any).eventEmitter_.removeListener("order.created", listener)
|
||||
|
||||
// If there are no existing listeners, resolve once the event happens. Otherwise, wrap the existing subscribers in a promise and resolve once they are done.
|
||||
if (!eventEmitter.listeners(eventName).length) {
|
||||
let ok, nok
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
ok = resolve
|
||||
nok = reject
|
||||
})
|
||||
|
||||
subscriberPromises.push(promise)
|
||||
eventEmitter.on(eventName, ok)
|
||||
} else {
|
||||
eventEmitter.listeners(eventName).forEach((listener: any) => {
|
||||
eventEmitter.removeListener(eventName, listener)
|
||||
|
||||
const newListener = async (...args2) => {
|
||||
return await listener.apply(eventBus, args2).then(ok).catch(nok)
|
||||
}
|
||||
let ok, nok
|
||||
const promise = new Promise((resolve, reject) => {
|
||||
ok = resolve
|
||||
nok = reject
|
||||
})
|
||||
subscriberPromises.push(promise)
|
||||
|
||||
;(eventBus as any).eventEmitter_.on("order.created", newListener)
|
||||
})
|
||||
const newListener = async (...args2) => {
|
||||
return await listener.apply(eventBus, args2).then(ok).catch(nok)
|
||||
}
|
||||
|
||||
eventEmitter.on(eventName, newListener)
|
||||
})
|
||||
}
|
||||
|
||||
return Promise.all(subscriberPromises)
|
||||
}
|
||||
|
||||
@@ -329,7 +329,7 @@ export const ModulesDefinition: {
|
||||
label: upperCaseFirst(ModuleRegistrationName.NOTIFICATION),
|
||||
isRequired: false,
|
||||
isQueryable: true,
|
||||
dependencies: ["logger"],
|
||||
dependencies: [ModuleRegistrationName.EVENT_BUS, "logger"],
|
||||
defaultModuleDeclaration: {
|
||||
scope: MODULE_SCOPE.INTERNAL,
|
||||
resources: MODULE_RESOURCE_TYPE.SHARED,
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { BaseFilterable } from "../../dal"
|
||||
import { BigNumberInput, BigNumberValue } from "../../totals"
|
||||
import { PriceRuleDTO } from "./price-rule"
|
||||
|
||||
/**
|
||||
* @interface
|
||||
@@ -27,6 +28,16 @@ export interface MoneyAmountDTO {
|
||||
* The maximum quantity required to be purchased for this price to be applied.
|
||||
*/
|
||||
max_quantity?: BigNumberValue
|
||||
/**
|
||||
* The number of rules that apply to this price
|
||||
*/
|
||||
rules_count?: number
|
||||
|
||||
/**
|
||||
* The price rules that apply to this price
|
||||
*/
|
||||
price_rules?: PriceRuleDTO[]
|
||||
|
||||
/**
|
||||
* When the money_amount was created.
|
||||
*/
|
||||
|
||||
@@ -38,6 +38,7 @@
|
||||
"awilix": "^8.0.1",
|
||||
"bignumber.js": "^9.1.2",
|
||||
"dotenv": "^16.4.5",
|
||||
"json-2-csv": "^5.5.4",
|
||||
"jsonwebtoken": "^9.0.2",
|
||||
"knex": "2.4.2",
|
||||
"pluralize": "^8.0.0",
|
||||
|
||||
1
packages/core/utils/src/csv/index.ts
Normal file
1
packages/core/utils/src/csv/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./jsontocsv"
|
||||
17
packages/core/utils/src/csv/jsontocsv.ts
Normal file
17
packages/core/utils/src/csv/jsontocsv.ts
Normal file
@@ -0,0 +1,17 @@
|
||||
import { json2csv } from "json-2-csv"
|
||||
|
||||
export interface ConvertJsonToCsvOptions<T> {}
|
||||
|
||||
export const convertJsonToCsv = <T extends object>(
|
||||
data: T[],
|
||||
options?: ConvertJsonToCsvOptions<T>
|
||||
) => {
|
||||
return json2csv(data, {
|
||||
prependHeader: true,
|
||||
arrayIndexesAsKeys: true,
|
||||
expandNestedObjects: true,
|
||||
expandArrayObjects: true,
|
||||
unwindArrays: false,
|
||||
emptyFieldValue: "",
|
||||
})
|
||||
}
|
||||
@@ -26,5 +26,6 @@ export * from "./shipping"
|
||||
export * from "./totals"
|
||||
export * from "./totals/big-number"
|
||||
export * from "./user"
|
||||
export * from "./csv"
|
||||
|
||||
export const MedusaModuleType = Symbol.for("MedusaModule")
|
||||
|
||||
9
packages/core/utils/src/notification/events.ts
Normal file
9
packages/core/utils/src/notification/events.ts
Normal file
@@ -0,0 +1,9 @@
|
||||
import { buildEventNamesFromEntityName } from "../event-bus"
|
||||
import { Modules } from "../modules-sdk"
|
||||
|
||||
const eventBaseNames: ["notification"] = ["notification"]
|
||||
|
||||
export const NotificationEvents = buildEventNamesFromEntityName(
|
||||
eventBaseNames,
|
||||
Modules.NOTIFICATION
|
||||
)
|
||||
@@ -1 +1,2 @@
|
||||
export * from "./abstract-notification-provider"
|
||||
export * from "./events"
|
||||
|
||||
@@ -1,6 +1,16 @@
|
||||
import { INotificationModuleService } from "@medusajs/types"
|
||||
import { Module, Modules } from "@medusajs/utils"
|
||||
import { moduleIntegrationTestRunner, SuiteOptions } from "medusa-test-utils"
|
||||
import {
|
||||
CommonEvents,
|
||||
Module,
|
||||
Modules,
|
||||
NotificationEvents,
|
||||
composeMessage,
|
||||
} from "@medusajs/utils"
|
||||
import {
|
||||
MockEventBusService,
|
||||
moduleIntegrationTestRunner,
|
||||
SuiteOptions,
|
||||
} from "medusa-test-utils"
|
||||
import { resolve } from "path"
|
||||
import { NotificationModuleService } from "@services"
|
||||
|
||||
@@ -27,6 +37,12 @@ moduleIntegrationTestRunner({
|
||||
moduleOptions,
|
||||
testSuite: ({ service }: SuiteOptions<INotificationModuleService>) =>
|
||||
describe("Notification Module Service", () => {
|
||||
let eventBusEmitSpy
|
||||
|
||||
beforeEach(() => {
|
||||
eventBusEmitSpy = jest.spyOn(MockEventBusService.prototype, "emit")
|
||||
})
|
||||
|
||||
it(`should export the appropriate linkable configuration`, () => {
|
||||
const linkable = Module(Modules.NOTIFICATION, {
|
||||
service: NotificationModuleService,
|
||||
@@ -67,6 +83,27 @@ moduleIntegrationTestRunner({
|
||||
)
|
||||
})
|
||||
|
||||
it("emits an event when a notification is created", async () => {
|
||||
const notification = {
|
||||
to: "admin@medusa.com",
|
||||
template: "some-template",
|
||||
channel: "email",
|
||||
data: {},
|
||||
}
|
||||
|
||||
const result = await service.createNotifications(notification)
|
||||
|
||||
expect(eventBusEmitSpy.mock.calls[0][0]).toHaveLength(1)
|
||||
expect(eventBusEmitSpy).toHaveBeenCalledWith([
|
||||
composeMessage(NotificationEvents.NOTIFICATION_CREATED, {
|
||||
data: { id: result.id },
|
||||
object: "notification",
|
||||
source: Modules.NOTIFICATION,
|
||||
action: CommonEvents.CREATED,
|
||||
}),
|
||||
])
|
||||
})
|
||||
|
||||
it("ensures the same notification is not sent twice", async () => {
|
||||
const notification = {
|
||||
to: "admin@medusa.com",
|
||||
|
||||
@@ -8,6 +8,7 @@ import {
|
||||
NotificationTypes,
|
||||
} from "@medusajs/types"
|
||||
import {
|
||||
EmitEvents,
|
||||
InjectManager,
|
||||
InjectTransactionManager,
|
||||
MedusaContext,
|
||||
@@ -17,6 +18,7 @@ import {
|
||||
} from "@medusajs/utils"
|
||||
import { Notification } from "@models"
|
||||
import NotificationProviderService from "./notification-provider"
|
||||
import { eventBuilders } from "@utils"
|
||||
|
||||
type InjectedDependencies = {
|
||||
baseRepository: DAL.RepositoryService
|
||||
@@ -64,6 +66,7 @@ export default class NotificationModuleService
|
||||
): Promise<NotificationTypes.NotificationDTO>
|
||||
|
||||
@InjectManager("baseRepository_")
|
||||
@EmitEvents()
|
||||
async createNotifications(
|
||||
data:
|
||||
| NotificationTypes.CreateNotificationDTO
|
||||
@@ -83,6 +86,11 @@ export default class NotificationModuleService
|
||||
NotificationTypes.NotificationDTO[]
|
||||
>(createdNotifications)
|
||||
|
||||
eventBuilders.createdNotification({
|
||||
data: serialized,
|
||||
sharedContext,
|
||||
})
|
||||
|
||||
return Array.isArray(data) ? serialized : serialized[0]
|
||||
}
|
||||
|
||||
|
||||
15
packages/modules/notification/src/utils/events.ts
Normal file
15
packages/modules/notification/src/utils/events.ts
Normal file
@@ -0,0 +1,15 @@
|
||||
import {
|
||||
CommonEvents,
|
||||
eventBuilderFactory,
|
||||
Modules,
|
||||
NotificationEvents,
|
||||
} from "@medusajs/utils"
|
||||
|
||||
export const eventBuilders = {
|
||||
createdNotification: eventBuilderFactory({
|
||||
source: Modules.NOTIFICATION,
|
||||
action: CommonEvents.CREATED,
|
||||
object: "notification",
|
||||
eventsEnum: NotificationEvents,
|
||||
}),
|
||||
}
|
||||
1
packages/modules/notification/src/utils/index.ts
Normal file
1
packages/modules/notification/src/utils/index.ts
Normal file
@@ -0,0 +1 @@
|
||||
export * from "./events"
|
||||
@@ -38,9 +38,9 @@ export class LocalFileService extends AbstractFileProviderService {
|
||||
|
||||
const fileKey = path.join(
|
||||
parsedFilename.dir,
|
||||
// We append "private" to the file key so deletions and presigned URLs can know which folder to look into
|
||||
`${Date.now()}-${parsedFilename.base}${
|
||||
file.access === "public" ? "" : "-private"
|
||||
// We prepend "private" to the file key so deletions and presigned URLs can know which folder to look into
|
||||
`${file.access === "public" ? "" : "private-"}${Date.now()}-${
|
||||
parsedFilename.base
|
||||
}`
|
||||
)
|
||||
|
||||
@@ -57,7 +57,7 @@ export class LocalFileService extends AbstractFileProviderService {
|
||||
}
|
||||
|
||||
async delete(file: FileTypes.ProviderDeleteFileDTO): Promise<void> {
|
||||
const baseDir = file.fileKey.endsWith("-private")
|
||||
const baseDir = file.fileKey.startsWith("private-")
|
||||
? this.privateUploadDir_
|
||||
: this.uploadDir_
|
||||
|
||||
@@ -77,7 +77,7 @@ export class LocalFileService extends AbstractFileProviderService {
|
||||
async getPresignedDownloadUrl(
|
||||
file: FileTypes.ProviderGetFileDTO
|
||||
): Promise<string> {
|
||||
const isPrivate = file.fileKey.endsWith("-private")
|
||||
const isPrivate = file.fileKey.startsWith("private-")
|
||||
const baseDir = isPrivate ? this.privateUploadDir_ : this.uploadDir_
|
||||
|
||||
const filePath = this.getUploadFilePath(baseDir, file.fileKey)
|
||||
|
||||
25
yarn.lock
25
yarn.lock
@@ -5390,6 +5390,7 @@ __metadata:
|
||||
expect-type: ^0.19.0
|
||||
express: ^4.18.2
|
||||
jest: ^29.7.0
|
||||
json-2-csv: ^5.5.4
|
||||
jsonwebtoken: ^9.0.2
|
||||
knex: 2.4.2
|
||||
pluralize: ^8.0.0
|
||||
@@ -16093,6 +16094,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"deeks@npm:3.1.0":
|
||||
version: 3.1.0
|
||||
resolution: "deeks@npm:3.1.0"
|
||||
checksum: 3173ca28466cf31d550248c034c5466d93c5aecb8ee8ca547a2c9f471e62af4ebed7456c3310503be901d982867071b4411030a6b724528739895aee1dc2b482
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"deep-eql@npm:^4.1.3":
|
||||
version: 4.1.3
|
||||
resolution: "deep-eql@npm:4.1.3"
|
||||
@@ -16378,6 +16386,13 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"doc-path@npm:4.1.1":
|
||||
version: 4.1.1
|
||||
resolution: "doc-path@npm:4.1.1"
|
||||
checksum: 5a908c4d0c8431fa60349cad1d5f0775cf9825d4d85e6bd7f55925c01d6278be8dd04f6858b8f8fdc8ea992a63545595ea77a2282551ff95538608f382b46f8a
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"doctrine@npm:^2.1.0":
|
||||
version: 2.1.0
|
||||
resolution: "doctrine@npm:2.1.0"
|
||||
@@ -21417,6 +21432,16 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"json-2-csv@npm:^5.5.4":
|
||||
version: 5.5.4
|
||||
resolution: "json-2-csv@npm:5.5.4"
|
||||
dependencies:
|
||||
deeks: 3.1.0
|
||||
doc-path: 4.1.1
|
||||
checksum: 1ecfbdb93aa3079e943bcaad280547a17240a9ecf13915a843856378612899c7c612c26ae14272e2b3372f8bccc2ecfef54530e05bcda6ae8de1b616b4b0296a
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"json-buffer@npm:3.0.1":
|
||||
version: 3.0.1
|
||||
resolution: "json-buffer@npm:3.0.1"
|
||||
|
||||
Reference in New Issue
Block a user