feat: Add basic support for importing products (#8266)

This commit is contained in:
Stevche Radevski
2024-07-25 11:07:24 +02:00
committed by GitHub
parent a26b7cf253
commit 444a244eab
10 changed files with 520 additions and 115 deletions

View File

@@ -234,118 +234,6 @@ medusaIntegrationTestRunner({
recursive: true,
})
})
// it("should be able to import an exported csv file", async () => {
// const api = useApi()
// const batchPayload = {
// type: "product-export",
// context: {
// batch_size: 1,
// filterable_fields: { collection_id: "test-collection" },
// order: "created_at",
// },
// }
// const batchJobRes = await api.post(
// "/admin/batch-jobs",
// batchPayload,
// adminReqConfig
// )
// let batchJobId = batchJobRes.data.batch_job.id
// expect(batchJobId).toBeTruthy()
// // Pull to check the status until it is completed
// let batchJob
// let shouldContinuePulling = true
// while (shouldContinuePulling) {
// const res = await api.get(
// `/admin/batch-jobs/${batchJobId}`,
// adminReqConfig
// )
// await new Promise((resolve, _) => {
// setTimeout(resolve, 1000)
// })
// batchJob = res.data.batch_job
// shouldContinuePulling = !(
// batchJob.status === "completed" || batchJob.status === "failed"
// )
// }
// expect(batchJob.status).toBe("completed")
// exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
// const isFileExists = (await fs.stat(exportFilePath)).isFile()
// expect(isFileExists).toBeTruthy()
// const data = (await fs.readFile(exportFilePath)).toString()
// const [header, ...lines] = data.split("\r\n").filter((l) => l)
// expect(lines.length).toBe(4)
// const csvLine = lines[0].split(";")
// expect(csvLine[0]).toBe("test-product")
// expect(csvLine[2]).toBe("Test product")
// csvLine[2] = "Updated test product"
// lines.splice(0, 1, csvLine.join(";"))
// await fs.writeFile(exportFilePath, [header, ...lines].join("\r\n"))
// const importBatchJobRes = await api.post(
// "/admin/batch-jobs",
// {
// type: "product-import",
// context: {
// fileKey: exportFilePath,
// },
// },
// adminReqConfig
// )
// batchJobId = importBatchJobRes.data.batch_job.id
// expect(batchJobId).toBeTruthy()
// shouldContinuePulling = true
// while (shouldContinuePulling) {
// const res = await api.get(
// `/admin/batch-jobs/${batchJobId}`,
// adminReqConfig
// )
// await new Promise((resolve, _) => {
// setTimeout(resolve, 1000)
// })
// batchJob = res.data.batch_job
// shouldContinuePulling = !(
// batchJob.status === "completed" || batchJob.status === "failed"
// )
// }
// expect(batchJob.status).toBe("completed")
// const productsResponse = await api.get(
// "/admin/products",
// adminReqConfig
// )
// expect(productsResponse.data.count).toBe(5)
// expect(productsResponse.data.products).toEqual(
// expect.arrayContaining([
// expect.objectContaining({
// id: csvLine[0],
// handle: csvLine[1],
// title: csvLine[2],
// }),
// ])
// )
// })
})
},
})

View File

@@ -60,7 +60,6 @@ medusaIntegrationTestRunner({
"/admin/products",
getProductFixture({
title: "Base product",
type_id: baseType.id,
}),
adminHeaders
)
@@ -72,7 +71,7 @@ medusaIntegrationTestRunner({
})
describe("POST /admin/products/export", () => {
it("should import a products CSV file", async () => {
it("should import a previously exported products CSV file", async () => {
const subscriberExecution = TestEventUtils.waitSubscribersExecution(
"notification.notification.created",
eventBus
@@ -83,6 +82,17 @@ medusaIntegrationTestRunner({
{ encoding: "utf-8" }
)
fileContent = fileContent.replace(
/prod_01J3CRPNVGRZ01A8GH8FQYK10Z/g,
baseProduct.id
)
fileContent = fileContent.replace(
/variant_01J3CRPNW5J6EBVVQP1TN33A58/g,
baseProduct.variants[0].id
)
fileContent = fileContent.replace(/pcol_\w*\d*/g, baseCollection.id)
fileContent = fileContent.replace(/ptyp_\w*\d*/g, baseType.id)
const { form, meta } = getUploadReq({
name: "test.csv",
content: fileContent,
@@ -108,7 +118,189 @@ medusaIntegrationTestRunner({
}),
})
)
const dbProducts = (await api.get("/admin/products", adminHeaders)).data
.products
expect(dbProducts).toHaveLength(2)
expect(dbProducts).toEqual(
expect.arrayContaining([
expect.objectContaining({
id: baseProduct.id,
handle: "base-product",
is_giftcard: false,
thumbnail: "test-image.png",
status: "draft",
description: "test-product-description\ntest line 2",
options: [
expect.objectContaining({
title: "size",
values: expect.arrayContaining([
expect.objectContaining({
value: "small",
}),
expect.objectContaining({
value: "large",
}),
]),
}),
expect.objectContaining({
title: "color",
values: expect.arrayContaining([
expect.objectContaining({
value: "green",
}),
]),
}),
],
images: expect.arrayContaining([
expect.objectContaining({
url: "test-image.png",
}),
expect.objectContaining({
url: "test-image-2.png",
}),
]),
tags: [
expect.objectContaining({
value: "123",
}),
expect.objectContaining({
value: "456",
}),
],
type: expect.objectContaining({
id: baseType.id,
}),
collection: expect.objectContaining({
id: baseCollection.id,
}),
variants: [
expect.objectContaining({
title: "Test variant",
allow_backorder: false,
manage_inventory: true,
prices: [
expect.objectContaining({
currency_code: "usd",
amount: 100,
}),
expect.objectContaining({
currency_code: "eur",
amount: 45,
}),
expect.objectContaining({
currency_code: "dkk",
amount: 30,
}),
],
options: [
expect.objectContaining({
value: "large",
}),
expect.objectContaining({
value: "green",
}),
],
}),
expect.objectContaining({
title: "Test variant 2",
allow_backorder: false,
manage_inventory: true,
// TODO: Since we are doing a product update, there won't be any prices created for the variant
options: [
expect.objectContaining({
value: "small",
}),
expect.objectContaining({
value: "green",
}),
],
}),
],
created_at: expect.any(String),
updated_at: expect.any(String),
}),
expect.objectContaining({
id: expect.any(String),
handle: "proposed-product",
is_giftcard: false,
thumbnail: "test-image.png",
status: "proposed",
description: "test-product-description",
options: [
expect.objectContaining({
title: "size",
values: expect.arrayContaining([
expect.objectContaining({
value: "large",
}),
]),
}),
expect.objectContaining({
title: "color",
values: expect.arrayContaining([
expect.objectContaining({
value: "green",
}),
]),
}),
],
images: expect.arrayContaining([
expect.objectContaining({
url: "test-image.png",
}),
expect.objectContaining({
url: "test-image-2.png",
}),
]),
tags: [
expect.objectContaining({
value: "new-tag",
}),
],
type: expect.objectContaining({
id: baseType.id,
}),
collection: null,
variants: [
expect.objectContaining({
title: "Test variant",
allow_backorder: false,
manage_inventory: true,
prices: [
expect.objectContaining({
currency_code: "usd",
amount: 100,
}),
expect.objectContaining({
currency_code: "eur",
amount: 45,
}),
expect.objectContaining({
currency_code: "dkk",
amount: 30,
}),
],
options: [
expect.objectContaining({
value: "large",
}),
expect.objectContaining({
value: "green",
}),
],
}),
],
created_at: expect.any(String),
updated_at: expect.any(String),
}),
])
)
})
it("should fail on invalid prices being present in the CSV", async () => {})
it("should fail on non-existent fields being present in the CSV", async () => {})
})
},
})

View File

@@ -0,0 +1,216 @@
import { HttpTypes } from "@medusajs/types"
import { MedusaError, lowerCaseFirst } from "@medusajs/utils"
// We want to convert the csv data format to a standard DTO format.
export const normalizeForImport = (
rawProducts: object[]
): HttpTypes.AdminCreateProduct[] => {
const productMap = new Map<
string,
{
product: HttpTypes.AdminCreateProduct
variants: HttpTypes.AdminCreateProductVariant[]
}
>()
rawProducts.forEach((rawProduct) => {
const productInMap = productMap.get(rawProduct["Product Handle"])
if (!productInMap) {
productMap.set(rawProduct["Product Handle"], {
product: normalizeProductForImport(rawProduct),
variants: [normalizeVariantForImport(rawProduct)],
})
return
}
productMap.set(rawProduct["Product Handle"], {
product: productInMap.product,
variants: [
...productInMap.variants,
normalizeVariantForImport(rawProduct),
],
})
})
return Array.from(productMap.values()).map((p) => {
const options = p.variants.reduce(
(agg: Record<string, Set<string>>, variant) => {
Object.entries(variant.options ?? {}).forEach(([key, value]) => {
if (!agg[key]) {
agg[key] = new Set()
}
agg[key].add(value as string)
})
return agg
},
{}
)
return {
...p.product,
options: Object.entries(options).map(([key, value]) => ({
title: key,
values: Array.from(value),
})),
variants: p.variants,
}
})
}
const productFieldsToOmit = new Map()
const variantFieldsToOmit = new Map([["variant_product_id", true]])
// We use an array here as we do a substring matching as a check.
// These fields can have a numeric value, but they are stored as string in the DB so we need to normalize them
const stringFields = [
"product_tag_",
"variant_barcode",
"variant_sku",
"variant_ean",
"variant_upc",
"variant_hs_code",
"variant_mid_code",
]
const normalizeProductForImport = (
rawProduct: object
): HttpTypes.AdminCreateProduct => {
const response = {}
Object.entries(rawProduct).forEach(([key, value]) => {
const normalizedKey = snakecaseKey(key)
const normalizedValue = getNormalizedValue(normalizedKey, value)
// We have no way of telling if a field is set as an empty string or it was undefined, so we completely omit empty fields.
if (normalizedValue === "") {
return
}
if (normalizedKey.startsWith("product_image_")) {
response["images"] = [
...(response["images"] || []),
{ url: normalizedValue },
]
return
}
if (normalizedKey.startsWith("product_tag_")) {
response["tags"] = [
...(response["tags"] || []),
{ value: normalizedValue },
]
return
}
if (normalizedKey.startsWith("product_sales_channel_")) {
response["sales_channels"] = [
...(response["sales_channels"] || []),
{ id: normalizedValue },
]
return
}
if (
normalizedKey.startsWith("product_") &&
!productFieldsToOmit.has(normalizedKey)
) {
response[normalizedKey.replace("product_", "")] = normalizedValue
return
}
})
return response as HttpTypes.AdminCreateProduct
}
const normalizeVariantForImport = (
rawProduct: object
): HttpTypes.AdminCreateProductVariant => {
const response = {}
const options = new Map<number, { name?: string; value?: string }>()
Object.entries(rawProduct).forEach(([key, value]) => {
const normalizedKey = snakecaseKey(key)
const normalizedValue = getNormalizedValue(normalizedKey, value)
// We have no way of telling if a field is set as an empty string or it was undefined, so we completely omit empty fields.
if (normalizedValue === "") {
return
}
if (normalizedKey.startsWith("variant_price_")) {
const priceKey = normalizedKey.replace("variant_price_", "")
// Note: If we start using the region name instead of ID, this check might not always work.
if (priceKey.length === 3) {
response["prices"] = [
...(response["prices"] || []),
{ currency_code: priceKey.toLowerCase(), amount: normalizedValue },
]
} else {
response["prices"] = [
...(response["prices"] || []),
{
amount: normalizedValue,
rules: { region_id: priceKey },
},
]
}
return
}
if (normalizedKey.startsWith("variant_option_")) {
const keyBase = normalizedKey.replace("variant_option_", "")
const optionIndex = parseInt(keyBase.split("_")[0])
const optionType = keyBase.split("_")[1]
options.set(optionIndex, {
...options.get(optionIndex),
[optionType]: normalizedValue,
})
return
}
if (
normalizedKey.startsWith("variant_") &&
!variantFieldsToOmit.has(normalizedKey)
) {
response[normalizedKey.replace("variant_", "")] = normalizedValue
return
}
})
response["options"] = Array.from(options.values()).reduce(
(agg: Record<string, string>, option) => {
if (!option.name) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
`Missing option name for product with handle ${rawProduct["Product Handle"]}`
)
}
if (!option.value) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
`Missing option value for product with handle ${rawProduct["Product Handle"]} and option ${option.name}`
)
}
agg[option.name] = option.value
return agg
},
{}
)
return response as HttpTypes.AdminCreateProductVariant
}
const getNormalizedValue = (key: string, value: any): any => {
return stringFields.some((field) => key.startsWith(field))
? value.toString()
: value
}
const snakecaseKey = (key: string): string => {
return key.split(" ").map(lowerCaseFirst).join("_")
}

View File

@@ -0,0 +1,63 @@
import { HttpTypes, IProductModuleService, ProductTypes } from "@medusajs/types"
import { MedusaError, ModuleRegistrationName } from "@medusajs/utils"
import { StepResponse, createStep } from "@medusajs/workflows-sdk"
export const groupProductsForBatchStepId = "group-products-for-batch"
export const groupProductsForBatchStep = createStep(
groupProductsForBatchStepId,
async (data: HttpTypes.AdminCreateProduct[], { container }) => {
const service = container.resolve<IProductModuleService>(
ModuleRegistrationName.PRODUCT
)
const existingProducts = await service.listProducts(
{
// We already validate that there is handle in a previous step
handle: data.map((product) => product.handle) as string[],
},
{ take: null, select: ["handle"] }
)
const existingProductsMap = new Map(
existingProducts.map((p) => [p.handle, true])
)
const { toUpdate, toCreate } = data.reduce(
(
acc: {
toUpdate: (HttpTypes.AdminUpdateProduct & { id: string })[]
toCreate: HttpTypes.AdminCreateProduct[]
},
product
) => {
// There are few data normalizations to do if we are dealing with an update.
if (existingProductsMap.has(product.handle!)) {
if (!(product as any).id) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
"Product id is required when updating products in import"
)
}
// TODO: Currently the update product workflow doesn't update variant pricing, but we should probably add support for it.
product.variants?.forEach((variant: any) => {
delete variant.prices
})
acc.toUpdate.push(
product as HttpTypes.AdminUpdateProduct & { id: string }
)
return acc
}
// New products will be created with a new ID, even if there is one present in the CSV.
// To add support for creating with predefined IDs we will need to do changes to the upsert method.
delete (product as any).id
acc.toCreate.push(product)
return acc
},
{ toUpdate: [], toCreate: [] }
)
return new StepResponse({ create: toCreate, update: toUpdate })
}
)

View File

@@ -21,3 +21,5 @@ export * from "./create-product-tags"
export * from "./update-product-tags"
export * from "./delete-product-tags"
export * from "./generate-product-csv"
export * from "./parse-product-csv"
export * from "./group-products-for-batch"

View File

@@ -0,0 +1,23 @@
import { MedusaError, convertCsvToJson } from "@medusajs/utils"
import { StepResponse, createStep } from "@medusajs/workflows-sdk"
import { normalizeForImport } from "../helpers/normalize-for-import"
export const parseProductCsvStepId = "parse-product-csv"
export const parseProductCsvStep = createStep(
parseProductCsvStepId,
async (fileContent: string) => {
const csvProducts = convertCsvToJson(fileContent)
csvProducts.forEach((product: any) => {
if (!product["Product Handle"]) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
"Product handle is required when importing products"
)
}
})
const normalizedData = normalizeForImport(csvProducts)
return new StepResponse(normalizedData)
}
)

View File

@@ -5,6 +5,8 @@ import {
} from "@medusajs/workflows-sdk"
import { WorkflowTypes } from "@medusajs/types"
import { sendNotificationsStep } from "../../notification"
import { groupProductsForBatchStep, parseProductCsvStep } from "../steps"
import { batchProductsWorkflow } from "./batch-products"
export const importProductsWorkflowId = "import-products"
export const importProductsWorkflow = createWorkflow(
@@ -12,7 +14,12 @@ export const importProductsWorkflow = createWorkflow(
(
input: WorkflowData<WorkflowTypes.ProductWorkflow.ImportProductsDTO>
): WorkflowData<void> => {
// validateImportCsvStep(input.fileContent)
const products = parseProductCsvStep(input.fileContent)
const batchRequest = groupProductsForBatchStep(products)
// TODO: Add async confirmation step here
batchProductsWorkflow.runAsStep({ input: batchRequest })
const notifications = transform({ input }, (data) => {
return [

View File

@@ -0,0 +1,12 @@
import { csv2json } from "json-2-csv"
export interface ConvertCsvToJsonOptions<T> {}
export const convertCsvToJson = <T extends object>(
data: string,
options?: ConvertCsvToJsonOptions<T>
): T[] => {
return csv2json(data, {
preventCsvInjection: true,
}) as T[]
}

View File

@@ -1 +1,2 @@
export * from "./jsontocsv"
export * from "./csvtojson"

View File

@@ -12,6 +12,7 @@ export const convertJsonToCsv = <T extends object>(
expandNestedObjects: true,
expandArrayObjects: true,
unwindArrays: false,
preventCsvInjection: true,
emptyFieldValue: "",
})
}