feat(medusa): PriceList import strategy (#2210)

This commit is contained in:
Sebastian Rindom
2022-09-28 15:30:15 +02:00
committed by GitHub
parent 884f36e8a8
commit 7dc8d3a0c9
17 changed files with 1129 additions and 21 deletions

View File

@@ -0,0 +1,5 @@
---
"@medusajs/medusa": minor
---
Adds a BatchJob strategy for importing prices to PriceLists

View File

@@ -0,0 +1,5 @@
---
"medusa-core-utils": minor
---
Adds `computerizeAmount` utility to convert human money format into the DB format Medusa uses (integer of lowest currency unit)

View File

@@ -0,0 +1,290 @@
const fs = require("fs")
const path = require("path")
const setupServer = require("../../../../helpers/setup-server")
const { useApi } = require("../../../../helpers/use-api")
const { initDb, useDb } = require("../../../../helpers/use-db")
const adminSeeder = require("../../../helpers/admin-seeder")
const {
simpleRegionFactory,
simplePriceListFactory,
simpleProductFactory,
} = require("../../../factories")
const adminReqConfig = {
headers: {
Authorization: "Bearer test_token",
},
}
jest.setTimeout(1000000)
function cleanTempData() {
// cleanup tmp ops files
const opsFiles = path.resolve(
"__tests__",
"batch-jobs",
"price-list",
"imports"
)
fs.rmSync(opsFiles, { recursive: true, force: true })
}
function getImportFile() {
return path.resolve(
"__tests__",
"batch-jobs",
"price-list",
"price-list-import.csv"
)
}
function copyTemplateFile() {
const csvTemplate = path.resolve(
"__tests__",
"batch-jobs",
"price-list",
"price-list-import-template.csv"
)
const destination = getImportFile()
fs.copyFileSync(csvTemplate, destination)
}
describe("Price list import batch job", () => {
let medusaProcess
let dbConnection
beforeAll(async () => {
const cwd = path.resolve(path.join(__dirname, "..", "..", ".."))
dbConnection = await initDb({ cwd })
cleanTempData() // cleanup if previous process didn't manage to do it
medusaProcess = await setupServer({
cwd,
redisUrl: "redis://127.0.0.1:6379",
uploadDir: __dirname,
verbose: false,
})
})
afterAll(async () => {
const db = useDb()
await db.shutdown()
cleanTempData()
medusaProcess.kill()
})
beforeEach(async () => {
await adminSeeder(dbConnection)
})
afterEach(async () => {
const db = useDb()
await db.teardown()
})
it("should import a csv file", async () => {
jest.setTimeout(1000000)
const api = useApi()
copyTemplateFile()
const product = await simpleProductFactory(dbConnection, {
variants: [
{
id: "test-pl-variant",
},
{
id: "test-pl-sku-variant",
sku: "pl-sku",
},
],
})
await simpleRegionFactory(dbConnection, {
id: "test-pl-region",
name: "PL Region",
currency_code: "eur",
})
const priceList = await simplePriceListFactory(dbConnection, {
id: "pl_my_price_list",
name: "Test price list",
prices: [
{
variant_id: product.variants[0].id,
currency_code: "usd",
amount: 1000,
},
{
variant_id: product.variants[0].id,
currency_code: "eur",
amount: 2080,
},
],
})
const response = await api.post(
"/admin/batch-jobs",
{
type: "price-list-import",
context: {
price_list_id: priceList.id,
fileKey: "price-list-import.csv",
},
},
adminReqConfig
)
const batchJobId = response.data.batch_job.id
expect(batchJobId).toBeTruthy()
// Pull to check the status until it is completed
let batchJob
let shouldContinuePulling = true
while (shouldContinuePulling) {
const res = await api.get(
`/admin/batch-jobs/${batchJobId}`,
adminReqConfig
)
await new Promise((resolve, _) => {
setTimeout(resolve, 1000)
})
batchJob = res.data.batch_job
shouldContinuePulling = !(
batchJob.status === "completed" || batchJob.status === "failed"
)
}
expect(batchJob.status).toBe("completed")
const priceListRes = await api.get(
"/admin/price-lists/pl_my_price_list",
adminReqConfig
)
// Verify that file service deleted file
const importFilePath = getImportFile()
expect(fs.existsSync(importFilePath)).toBe(false)
expect(priceListRes.data.price_list.prices.length).toEqual(5)
expect(priceListRes.data.price_list.prices).toEqual(
expect.arrayContaining([
expect.objectContaining({
variant_id: "test-pl-variant",
currency_code: "usd",
amount: 1111,
}),
expect.objectContaining({
variant_id: "test-pl-variant",
currency_code: "eur",
region_id: "test-pl-region",
amount: 2222,
}),
expect.objectContaining({
variant_id: "test-pl-variant",
currency_code: "jpy",
amount: 3333,
}),
expect.objectContaining({
variant_id: "test-pl-sku-variant",
currency_code: "usd",
amount: 4444,
}),
expect.objectContaining({
variant_id: "test-pl-sku-variant",
currency_code: "eur",
region_id: "test-pl-region",
amount: 5555,
}),
])
)
})
it("should fail with invalid import format", async () => {
jest.setTimeout(1000000)
const api = useApi()
const product = await simpleProductFactory(dbConnection, {
variants: [
{ id: "test-pl-variant" },
{ id: "test-pl-sku-variant", sku: "pl-sku" },
],
})
await simpleRegionFactory(dbConnection, {
id: "test-pl-region",
name: "PL Region",
currency_code: "eur",
})
const priceList = await simplePriceListFactory(dbConnection, {
id: "pl_my_price_list",
name: "Test price list",
prices: [
{
variant_id: product.variants[0].id,
currency_code: "usd",
amount: 1000,
},
{
variant_id: product.variants[0].id,
currency_code: "eur",
amount: 2080,
},
],
})
const response = await api.post(
"/admin/batch-jobs",
{
type: "price-list-import",
context: {
price_list_id: priceList.id,
fileKey: "invalid-format.csv",
},
},
adminReqConfig
)
const batchJobId = response.data.batch_job.id
expect(batchJobId).toBeTruthy()
// Pull to check the status until it is completed
let batchJob
let shouldContinuePulling = true
while (shouldContinuePulling) {
const res = await api.get(
`/admin/batch-jobs/${batchJobId}`,
adminReqConfig
)
await new Promise((resolve, _) => {
setTimeout(resolve, 1000)
})
batchJob = res.data.batch_job
shouldContinuePulling = !(
batchJob.status === "completed" || batchJob.status === "failed"
)
}
expect(batchJob.status).toBe("failed")
expect(batchJob.result).toEqual({
errors: [
"The csv file parsing failed due to: Unable to treat column non-descript-column from the csv file. No target column found in the provided schema",
],
})
})
})

View File

@@ -0,0 +1,3 @@
non-descript-column,SKU,Price USD,Price PL Region [EUR], Price JPY
test-pl-variant,,11.11,22.22,3333
,pl-sku,44.441,55.55,
1 non-descript-column SKU Price USD Price PL Region [EUR] Price JPY
2 test-pl-variant 11.11 22.22 3333
3 pl-sku 44.441 55.55

View File

@@ -0,0 +1,3 @@
Product Variant ID,SKU,Price USD,Price PL Region [EUR], Price JPY
test-pl-variant,,11.11,22.22,3333
,pl-sku,44.441,55.55,
1 Product Variant ID SKU Price USD Price PL Region [EUR] Price JPY
2 test-pl-variant 11.11 22.22 3333
3 pl-sku 44.441 55.55

View File

@@ -1,4 +1,4 @@
Product id,Product Handle,Product Title,Product Subtitle,Product Description,Product Status,Product Thumbnail,Product Weight,Product Length,Product Width,Product Height,Product HS Code,Product Origin Country,Product MID Code,Product Material,Product Collection Title,Product Collection Handle,Product Type,Product Tags,Product Discountable,Product External ID,Product Profile Name,Product Profile Type,Variant id,Variant Title,Variant SKU,Variant Barcode,Variant Inventory Quantity,Variant Allow backorder,Variant Manage inventory,Variant Weight,Variant Length,Variant Width,Variant Height,Variant HS Code,Variant Origin Country,Variant MID Code,Variant Material,Price ImportLand [EUR],Price USD,Price denmark [DKK],Price Denmark [DKK],Option 1 Name,Option 1 Value,Option 2 Name,Option 2 Value,Image 1 Url,Sales Channel 1 Name,Sales Channel 2 Name,Sales Channel 1 Id,Sales Channel 2 Id
O6S1YQ6mKm,test-product-product-1,Test product,,test-product-description-1,draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,100,110,130,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png,Import Sales Channel 1,Import Sales Channel 2,,
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,110,test-option,Option 1 value 1,,,test-image.png,,,,
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,120,,,test-option,Option 1 Value blue,,,test-image.png,,,,
O6S1YQ6mKm,test-product-product-1,Test product,,test-product-description-1,draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,1.00,1.10,1.30,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png,Import Sales Channel 1,Import Sales Channel 2,,
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,1.10,test-option,Option 1 value 1,,,test-image.png,,,,
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,1.20,,,test-option,Option 1 Value blue,,,test-image.png,,,,
1 Product id Product Handle Product Title Product Subtitle Product Description Product Status Product Thumbnail Product Weight Product Length Product Width Product Height Product HS Code Product Origin Country Product MID Code Product Material Product Collection Title Product Collection Handle Product Type Product Tags Product Discountable Product External ID Product Profile Name Product Profile Type Variant id Variant Title Variant SKU Variant Barcode Variant Inventory Quantity Variant Allow backorder Variant Manage inventory Variant Weight Variant Length Variant Width Variant Height Variant HS Code Variant Origin Country Variant MID Code Variant Material Price ImportLand [EUR] Price USD Price denmark [DKK] Price Denmark [DKK] Option 1 Name Option 1 Value Option 2 Name Option 2 Value Image 1 Url Sales Channel 1 Name Sales Channel 2 Name Sales Channel 1 Id Sales Channel 2 Id
2 O6S1YQ6mKm test-product-product-1 Test product test-product-description-1 draft Test collection 1 test-collection1 test-type-1 123_1 TRUE profile_1 profile_type_1 Test variant test-sku-1 test-barcode-1 10 FALSE TRUE 100 1.00 110 1.10 130 1.30 test-option-1 option 1 value red test-option-2 option 2 value 1 test-image.png Import Sales Channel 1 Import Sales Channel 2
3 5VxiEkmnPV test-product-product-2 Test product test-product-description draft Test collection test-collection2 test-type 123 TRUE profile_2 profile_type_2 Test variant test-sku-2 test-barcode-2 10 FALSE TRUE 110 1.10 test-option Option 1 value 1 test-image.png
4 5VxiEkmnPV test-product-product-2 Test product test-product-description draft Test collection test-collection2 test-type 123 TRUE profile_2 profile_type_2 Test variant test-sku-3 test-barcode-3 10 FALSE TRUE 120 1.20 test-option Option 1 Value blue test-image.png

View File

@@ -1,4 +1,4 @@
Product id,Product Handle,Product Title,Product Subtitle,Product Description,Product Status,Product Thumbnail,Product Weight,Product Length,Product Width,Product Height,Product HS Code,Product Origin Country,Product MID Code,Product Material,Product Collection Title,Product Collection Handle,Product Type,Product Tags,Product Discountable,Product External ID,Product Profile Name,Product Profile Type,Variant id,Variant Title,Variant SKU,Variant Barcode,Variant Inventory Quantity,Variant Allow backorder,Variant Manage inventory,Variant Weight,Variant Length,Variant Width,Variant Height,Variant HS Code,Variant Origin Country,Variant MID Code,Variant Material,Price ImportLand [EUR],Price USD,Price denmark [DKK],Price Denmark [DKK],Option 1 Name,Option 1 Value,Option 2 Name,Option 2 Value,Image 1 Url
O6S1YQ6mKm,test-product-product-1,Test product,,"Hopper Stripes Bedding, available as duvet cover, pillow sham and sheet.\n100% organic cotton, soft and crisp to the touch. Made in Portugal.",draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,100,110,130,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,110,test-option,Option 1 value 1,,,test-image.png
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,120,,,test-option,Option 1 Value blue,,,test-image.png
O6S1YQ6mKm,test-product-product-1,Test product,,"Hopper Stripes Bedding, available as duvet cover, pillow sham and sheet.\n100% organic cotton, soft and crisp to the touch. Made in Portugal.",draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,1.00,1.10,1.30,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,1.10,test-option,Option 1 value 1,,,test-image.png
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,1.20,,,test-option,Option 1 Value blue,,,test-image.png
1 Product id Product Handle Product Title Product Subtitle Product Description Product Status Product Thumbnail Product Weight Product Length Product Width Product Height Product HS Code Product Origin Country Product MID Code Product Material Product Collection Title Product Collection Handle Product Type Product Tags Product Discountable Product External ID Product Profile Name Product Profile Type Variant id Variant Title Variant SKU Variant Barcode Variant Inventory Quantity Variant Allow backorder Variant Manage inventory Variant Weight Variant Length Variant Width Variant Height Variant HS Code Variant Origin Country Variant MID Code Variant Material Price ImportLand [EUR] Price USD Price denmark [DKK] Price Denmark [DKK] Option 1 Name Option 1 Value Option 2 Name Option 2 Value Image 1 Url
2 O6S1YQ6mKm test-product-product-1 Test product Hopper Stripes Bedding, available as duvet cover, pillow sham and sheet.\n100% organic cotton, soft and crisp to the touch. Made in Portugal. draft Test collection 1 test-collection1 test-type-1 123_1 TRUE profile_1 profile_type_1 Test variant test-sku-1 test-barcode-1 10 FALSE TRUE 100 1.00 110 1.10 130 1.30 test-option-1 option 1 value red test-option-2 option 2 value 1 test-image.png
3 5VxiEkmnPV test-product-product-2 Test product test-product-description draft Test collection test-collection2 test-type 123 TRUE profile_2 profile_type_2 Test variant test-sku-2 test-barcode-2 10 FALSE TRUE 110 1.10 test-option Option 1 value 1 test-image.png
4 5VxiEkmnPV test-product-product-2 Test product test-product-description draft Test collection test-collection2 test-type 123 TRUE profile_2 profile_type_2 Test variant test-sku-3 test-barcode-3 10 FALSE TRUE 120 1.20 test-option Option 1 Value blue test-image.png

View File

@@ -10,6 +10,7 @@ export type ProductVariantFactoryData = {
product_id: string
id?: string
is_giftcard?: boolean
sku?: string
inventory_quantity?: number
title?: string
options?: { option_id: string; value: string }[]
@@ -31,6 +32,7 @@ export const simpleProductVariantFactory = async (
const toSave = manager.create(ProductVariant, {
id,
product_id: data.product_id,
sku: data.sku ?? null,
inventory_quantity:
typeof data.inventory_quantity !== "undefined"
? data.inventory_quantity

View File

@@ -0,0 +1,13 @@
import zeroDecimalCurrencies from "./zero-decimal-currencies"
const computerizeAmount = (amount, currency) => {
let divisor = 100
if (zeroDecimalCurrencies.includes(currency.toLowerCase())) {
divisor = 1
}
return Math.round(amount * divisor)
}
export default computerizeAmount

View File

@@ -4,8 +4,8 @@ export { default as createRequireFromPath } from "./create-require-from-path"
export { default as MedusaError } from "./errors"
export { default as getConfigFile } from "./get-config-file"
export { default as humanizeAmount } from "./humanize-amount"
export { default as computerizeAmount } from "./computerize-amount"
export { indexTypes } from "./index-types"
export { transformIdableFields } from "./transform-idable-fields"
export { default as Validator } from "./validator"
export { default as zeroDecimalCurrencies } from "./zero-decimal-currencies"

View File

@@ -26,6 +26,7 @@ export { default as OrderEditService } from "./order-edit"
export { default as OrderEditItemChangeService } from "./order-edit-item-change"
export { default as PaymentProviderService } from "./payment-provider"
export { default as PricingService } from "./pricing"
export { default as PriceListService } from "./price-list"
export { default as ProductCollectionService } from "./product-collection"
export { default as ProductService } from "./product"
export { default as ProductTypeService } from "./product-type"

View File

@@ -249,6 +249,19 @@ class PriceListService extends TransactionBaseService {
})
}
/**
* Removes all prices from a price list and deletes the removed prices in bulk
* @param id - id of the price list
* @returns {Promise<void>} updated Price List
*/
async clearPrices(id: string): Promise<void> {
return await this.atomicPhase_(async (manager: EntityManager) => {
const moneyAmountRepo = manager.getCustomRepository(this.moneyAmountRepo_)
const priceList = await this.retrieve(id, { select: ["id"] })
await moneyAmountRepo.delete({ price_list_id: priceList.id })
})
}
/**
* Deletes a Price List
* Will never fail due to delete being idempotent.

View File

@@ -0,0 +1,166 @@
import { Readable, PassThrough } from "stream"
import { EntityManager } from "typeorm"
import { FileService } from "medusa-interfaces"
import { MockManager } from "medusa-test-utils"
import { User } from "../../../../models"
import { BatchJobStatus } from "../../../../types/batch-job"
import PriceListImportStrategy from "../../../batch-jobs/price-list/import"
import {
PriceListService,
BatchJobService,
ProductVariantService,
RegionService,
} from "../../../../services"
import { InjectedProps } from "../../../batch-jobs/price-list/types"
let fakeJob = {
id: "batch_plimport",
type: "price-list-import",
context: {
price_list_id: "pl_1234",
fileKey: "csv.key",
},
results: { advancement_count: 0, count: 6 },
created_by: "usr_tester",
created_by_user: {} as User,
result: {},
dry_run: false,
status: BatchJobStatus.PROCESSING,
}
async function* generateCSVDataForStream() {
yield "Product Variant ID,SKU,Price EUR,Price NA [USD]\n"
yield ",MEDUSA-SWEAT-SMALL,15,13.5\n"
yield "5VxiEkmnPV,,15,13.5\n"
}
/* ******************** SERVICES MOCK ******************** */
const fileServiceMock = {
withTransaction: function () {
return this
},
delete: jest.fn(),
getDownloadStream: jest.fn().mockImplementation(() => {
return Promise.resolve(Readable.from(generateCSVDataForStream()))
}),
getUploadStreamDescriptor: jest.fn().mockImplementation(() => ({
writeStream: new PassThrough(),
promise: Promise.resolve(),
})),
}
const priceListServiceMock = {
withTransaction: function () {
return this
},
retrieve: jest.fn().mockImplementation(() => {
return Promise.resolve(fakeJob)
}),
}
const batchJobServiceMock = {
withTransaction: function () {
return this
},
update: jest.fn().mockImplementation((data) => {
fakeJob = {
...fakeJob,
...data,
}
return Promise.resolve(fakeJob)
}),
complete: jest.fn().mockImplementation(() => {
fakeJob.status = BatchJobStatus.COMPLETED
return Promise.resolve(fakeJob)
}),
confirmed: jest.fn().mockImplementation(() => {
fakeJob.status = BatchJobStatus.CONFIRMED
return Promise.resolve(fakeJob)
}),
retrieve: jest.fn().mockImplementation(() => {
return Promise.resolve(fakeJob)
}),
}
const productVariantServiceMock = {
withTransaction: function () {
return this
},
retrieve: jest.fn().mockImplementation(() =>
Promise.resolve({
id: "retrieved-by-id",
})
),
retrieveBySKU: jest.fn().mockImplementation(() =>
Promise.resolve({
id: "retrieved-by-sku",
})
),
}
const regionServiceMock = {
withTransaction: function () {
return this
},
retrieveByName: jest.fn().mockImplementation(() =>
Promise.resolve({
id: "reg_HMnixPlOicAs7aBlXuchAGxd",
name: "Denmark",
currency_code: "DKK",
currency: "DKK",
tax_rate: 0.25,
tax_code: null,
countries: [
{
id: "1001",
iso_2: "DK",
iso_3: "DNK",
num_code: "208",
name: "denmark",
display_name: "Denmark",
},
],
})
),
}
const managerMock = MockManager
/* ******************** PRICE LIST IMPORT STRATEGY TESTS ******************** */
describe("Price List import strategy", () => {
afterAll(() => {
jest.clearAllMocks()
})
const priceListImportStrategy = new PriceListImportStrategy({
manager: managerMock as EntityManager,
fileService: fileServiceMock as typeof FileService,
batchJobService: batchJobServiceMock as unknown as BatchJobService,
priceListService: priceListServiceMock as unknown as PriceListService,
productVariantService:
productVariantServiceMock as unknown as ProductVariantService,
regionService: regionServiceMock as unknown as RegionService,
} as unknown as InjectedProps)
it("`preProcessBatchJob` should generate import ops and upload them to a bucket using the file service", async () => {
const getImportInstructionsSpy = jest.spyOn(
priceListImportStrategy,
"getImportInstructions"
)
await priceListImportStrategy.preProcessBatchJob(fakeJob.id)
expect(getImportInstructionsSpy).toBeCalledTimes(1)
expect(fileServiceMock.getUploadStreamDescriptor).toBeCalledTimes(1)
expect(fileServiceMock.getUploadStreamDescriptor).toHaveBeenCalledWith({
ext: "json",
name: `imports/price-lists/ops/${fakeJob.id}-PRICE_LIST_PRICE_CREATE`,
})
getImportInstructionsSpy.mockRestore()
})
})

View File

@@ -0,0 +1,513 @@
import { EntityManager } from "typeorm"
import { MedusaError, computerizeAmount } from "medusa-core-utils"
import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces"
import CsvParser from "../../../services/csv-parser"
import {
BatchJobService,
ProductVariantService,
PriceListService,
RegionService,
} from "../../../services"
import { CreateBatchJobInput } from "../../../types/batch-job"
import {
InjectedProps,
OperationType,
PriceListImportOperation,
PriceListImportOperationPrice,
ParsedPriceListImportPrice,
PriceListImportBatchJob,
PriceListImportCsvSchema,
TBuiltPriceListImportLine,
TParsedPriceListImportRowData,
} from "./types"
/*
* Default strategy class used for a batch import of products/variants.
*/
class PriceListImportStrategy extends AbstractBatchJobStrategy {
static identifier = "price-list-import-strategy"
static batchType = "price-list-import"
private processedCounter: Record<string, number> = {}
protected manager_: EntityManager
protected transactionManager_: EntityManager | undefined
protected readonly fileService_: IFileService
protected readonly regionService_: RegionService
protected readonly priceListService_: PriceListService
protected readonly batchJobService_: BatchJobService
protected readonly productVariantService_: ProductVariantService
protected readonly csvParser_: CsvParser<
PriceListImportCsvSchema,
Record<string, string>,
Record<string, string>
>
constructor({
batchJobService,
productVariantService,
priceListService,
regionService,
fileService,
manager,
}: InjectedProps) {
// eslint-disable-next-line prefer-rest-params
super(arguments[0])
this.csvParser_ = new CsvParser(CSVSchema)
this.manager_ = manager
this.fileService_ = fileService
this.batchJobService_ = batchJobService
this.priceListService_ = priceListService
this.productVariantService_ = productVariantService
this.regionService_ = regionService
}
async buildTemplate(): Promise<string> {
throw new Error("Not implemented!")
}
/**
* Create a description of a row on which the error occurred and throw a Medusa error.
*
* @param row - Parsed CSV row data
* @param errorDescription - Concrete error
*/
protected static throwDescriptiveError(
row: TParsedPriceListImportRowData,
errorDescription?: string
): never {
const message = `Error while processing row with:
variant ID: ${row[PriceListRowKeys.VARIANT_ID]},
variant SKU: ${row[PriceListRowKeys.VARIANT_SKU]},
${errorDescription}`
throw new MedusaError(MedusaError.Types.INVALID_DATA, message)
}
async prepareBatchJobForProcessing(
batchJob: CreateBatchJobInput,
reqContext: any
): Promise<CreateBatchJobInput> {
const manager = this.transactionManager_ ?? this.manager_
if (!batchJob.context?.price_list_id) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
"Price list id is required"
)
}
// Validate that PriceList exists
const priceListId = batchJob.context.price_list_id as string
await this.priceListService_.withTransaction(manager).retrieve(priceListId)
return batchJob
}
/**
* Generate instructions for creation of prices from parsed CSV rows.
*
* @param priceListId - the ID of the price list where the prices will be created
* @param csvData - An array of parsed CSV rows.
*/
async getImportInstructions(
priceListId: string,
csvData: TParsedPriceListImportRowData[]
): Promise<Record<OperationType, PriceListImportOperation[]>> {
// Validate that PriceList exists
const manager = this.transactionManager_ ?? this.manager_
await this.priceListService_.withTransaction(manager).retrieve(priceListId)
const pricesToCreate: PriceListImportOperation[] = []
for (const row of csvData) {
let variantId = row[PriceListRowKeys.VARIANT_ID]
if (!variantId) {
if (!row[PriceListRowKeys.VARIANT_SKU]) {
PriceListImportStrategy.throwDescriptiveError(
row,
"SKU or ID is required"
)
}
const variant = await this.productVariantService_.retrieveBySKU(
`${row[PriceListRowKeys.VARIANT_SKU]}`,
{
select: ["id"],
}
)
variantId = variant.id
} else {
// Validate that product exists
await this.productVariantService_.retrieve(`${variantId}`, {
select: ["id"],
})
}
const pricesOperationData = await this.prepareVariantPrices(
row[PriceListRowKeys.PRICES] as ParsedPriceListImportPrice[]
)
pricesToCreate.push({
variant_id: `${variantId}`,
prices: pricesOperationData,
})
}
return {
[OperationType.PricesCreate]: pricesToCreate,
}
}
/**
* Prepare prices records for insert - find and append region ids to records that contain a region name.
*
* @param prices - the parsed prices to prepare
* @returns the prepared prices. All prices have amount in DB format, currency_code and if applicable region_id.
*/
protected async prepareVariantPrices(
prices: ParsedPriceListImportPrice[]
): Promise<PriceListImportOperationPrice[]> {
const transactionManager = this.transactionManager_ ?? this.manager_
const operationalPrices: PriceListImportOperationPrice[] = []
for (const price of prices) {
const record: Partial<PriceListImportOperationPrice> = {
amount: price.amount,
}
if ("region_name" in price) {
try {
const region = await this.regionService_
.withTransaction(transactionManager)
.retrieveByName(price.region_name)
record.region_id = region.id
record.currency_code = region.currency_code
} catch (e) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
`Trying to set a price for a region ${price.region_name} that doesn't exist`
)
}
} else {
// TODO: Verify that currency is activated for store
record.currency_code = price.currency_code
}
record.amount = computerizeAmount(record.amount, record.currency_code)
operationalPrices.push(record as PriceListImportOperationPrice)
}
return operationalPrices
}
/**
* A worker method called after a batch job has been created.
* The method parses a CSV file, generates sets of instructions
* for processing and stores these instructions to a JSON file
* which is uploaded to a bucket.
*
* @param batchJobId - An id of a job that is being preprocessed.
*/
async preProcessBatchJob(batchJobId: string): Promise<void> {
const transactionManager = this.transactionManager_ ?? this.manager_
const batchJob = (await this.batchJobService_
.withTransaction(transactionManager)
.retrieve(batchJobId)) as PriceListImportBatchJob
const csvFileKey = batchJob.context.fileKey
const priceListId = batchJob.context.price_list_id
const csvStream = await this.fileService_.getDownloadStream({
fileKey: csvFileKey,
})
let builtData: Record<string, string>[]
try {
const parsedData = await this.csvParser_.parse(csvStream)
builtData = await this.csvParser_.buildData(parsedData)
} catch (e) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
"The csv file parsing failed due to: " + e.message
)
}
const ops = await this.getImportInstructions(priceListId, builtData)
await this.uploadImportOpsFile(batchJobId, ops)
let totalOperationCount = 0
const operationsCounts = {}
Object.keys(ops).forEach((key) => {
operationsCounts[key] = ops[key].length
totalOperationCount += ops[key].length
})
await this.batchJobService_
.withTransaction(transactionManager)
.update(batchJobId, {
result: {
advancement_count: 0,
// number of update/create operations to execute
count: totalOperationCount,
operations: operationsCounts,
stat_descriptors: [
{
key: "price-list-import-count",
name: "PriceList to import",
message: `${
ops[OperationType.PricesCreate].length
} prices will be added`,
},
],
},
})
}
/**
* The main processing method called after a batch job
* is ready/confirmed for processing.
*
* @param batchJobId - An id of a batch job that is being processed.
*/
async processJob(batchJobId: string): Promise<void> {
return await this.atomicPhase_(async (manager) => {
const batchJob = (await this.batchJobService_
.withTransaction(manager)
.retrieve(batchJobId)) as PriceListImportBatchJob
const priceListId = batchJob.context.price_list_id
const txPriceListService = this.priceListService_.withTransaction(manager)
// Delete Existing prices for price list
await txPriceListService.clearPrices(priceListId)
// Upload new prices for price list
const priceImportOperations = await this.downloadImportOpsFile(
batchJobId,
OperationType.PricesCreate
)
await Promise.all(
priceImportOperations.map(async (op) => {
await txPriceListService.addPrices(
priceListId,
op.prices.map((p) => {
return {
...p,
variant_id: op.variant_id,
}
})
)
})
)
await this.finalize(batchJob)
})
}
/**
* Store import ops JSON file to a bucket.
*
* @param batchJobId - An id of the current batch job being processed.
* @param results - An object containing parsed CSV data.
*/
protected async uploadImportOpsFile(
batchJobId: string,
results: Record<OperationType, PriceListImportOperation[]>
): Promise<void> {
const uploadPromises: Promise<void>[] = []
const transactionManager = this.transactionManager_ ?? this.manager_
for (const op in results) {
if (results[op]?.length) {
const { writeStream, promise } = await this.fileService_
.withTransaction(transactionManager)
.getUploadStreamDescriptor({
name: PriceListImportStrategy.buildFilename(batchJobId, op),
ext: "json",
})
uploadPromises.push(promise)
writeStream.write(JSON.stringify(results[op]))
writeStream.end()
}
}
await Promise.all(uploadPromises)
}
/**
* Remove parsed ops JSON file.
*
* @param batchJobId - An id of the current batch job being processed.
* @param op - Type of import operation.
*/
protected async downloadImportOpsFile(
batchJobId: string,
op: OperationType
): Promise<PriceListImportOperation[]> {
let data = ""
const transactionManager = this.transactionManager_ ?? this.manager_
const readableStream = await this.fileService_
.withTransaction(transactionManager)
.getDownloadStream({
fileKey: PriceListImportStrategy.buildFilename(batchJobId, op, {
appendExt: ".json",
}),
})
return await new Promise((resolve) => {
readableStream.on("data", (chunk) => {
data += chunk
})
readableStream.on("end", () => {
resolve(JSON.parse(data))
})
readableStream.on("error", () => {
// TODO: maybe should throw
resolve([] as PriceListImportOperation[])
})
})
}
/**
* Delete parsed CSV ops files.
*
* @param batchJobId - An id of the current batch job being processed.
*/
protected async deleteOpsFiles(batchJobId: string): Promise<void> {
const transactionManager = this.transactionManager_ ?? this.manager_
const fileServiceTx = this.fileService_.withTransaction(transactionManager)
for (const op of Object.values(OperationType)) {
try {
await fileServiceTx.delete({
fileKey: PriceListImportStrategy.buildFilename(batchJobId, op, {
appendExt: ".json",
}),
})
} catch (e) {
// noop
}
}
}
/**
* Update count of processed data in the batch job `result` column
* and cleanup temp JSON files.
*
* @param batchJob - The current batch job being processed.
*/
private async finalize(batchJob: PriceListImportBatchJob): Promise<void> {
const transactionManager = this.transactionManager_ ?? this.manager_
delete this.processedCounter[batchJob.id]
await this.batchJobService_
.withTransaction(transactionManager)
.update(batchJob.id, {
result: { advancement_count: batchJob.result.count },
})
const { fileKey } = batchJob.context
await this.fileService_
.withTransaction(transactionManager)
.delete({ fileKey })
await this.deleteOpsFiles(batchJob.id)
}
private static buildFilename(
batchJobId: string,
operation: string,
{ appendExt }: { appendExt?: string } = { appendExt: undefined }
): string {
const filename = `imports/price-lists/ops/${batchJobId}-${operation}`
return appendExt ? filename + appendExt : filename
}
}
export default PriceListImportStrategy
enum PriceListRowKeys {
VARIANT_ID = "id",
VARIANT_SKU = "sku",
PRICES = "prices",
}
/**
* Schema definition for the CSV parser.
*/
const CSVSchema: PriceListImportCsvSchema = {
columns: [
{
name: "Product Variant ID",
mapTo: PriceListRowKeys.VARIANT_ID,
},
{ name: "SKU", mapTo: PriceListRowKeys.VARIANT_SKU },
{
name: "Price Region",
match: /Price (.*) \[([A-Z]{3})\]/,
reducer: (
builtLine: TBuiltPriceListImportLine,
key: string,
value: string
): TBuiltPriceListImportLine => {
builtLine[PriceListRowKeys.PRICES] =
builtLine[PriceListRowKeys.PRICES] || []
if (typeof value === "undefined" || value === null) {
return builtLine
}
const [, regionName] =
key.trim().match(/Price (.*) \[([A-Z]{3})\]/) || []
builtLine[PriceListRowKeys.PRICES].push({
amount: parseFloat(value),
region_name: regionName,
})
return builtLine
},
},
{
name: "Price Currency",
match: /Price [A-Z]{3}/,
reducer: (
builtLine: TBuiltPriceListImportLine,
key: string,
value: string
): TBuiltPriceListImportLine => {
builtLine[PriceListRowKeys.PRICES] =
builtLine[PriceListRowKeys.PRICES] || []
if (typeof value === "undefined" || value === null) {
return builtLine
}
const currency = key.trim().split(" ")[1]
builtLine[PriceListRowKeys.PRICES].push({
amount: parseFloat(value),
currency_code: currency.toLowerCase(),
})
return builtLine
},
},
],
}

View File

@@ -0,0 +1,89 @@
import { EntityManager } from "typeorm"
import { FileService } from "medusa-interfaces"
import {
BatchJobService,
PriceListService,
ProductVariantService,
RegionService,
} from "../../../services"
import { CsvSchema } from "../../../interfaces/csv-parser"
import { BatchJob } from "../../../models"
export type PriceListImportBatchJob = BatchJob & {
context: PriceListImportJobContext
result: Pick<BatchJob, "result"> & {
operations: {
[K in keyof typeof OperationType]: number
}
}
}
/**
* DI props for the Product import strategy
*/
export type InjectedProps = {
priceListService: PriceListService
batchJobService: BatchJobService
productVariantService: ProductVariantService
regionService: RegionService
fileService: typeof FileService
manager: EntityManager
}
/**
* Data shape returned by the CSVParser.
*/
export type TParsedPriceListImportRowData = Record<
string,
string | number | (string | number | object)[]
>
export type PriceListImportOperationPrice = {
region_id?: string
currency_code: string
amount: number
}
export type PriceListImportOperation = {
variant_id: string
prices: PriceListImportOperationPrice[]
}
export type ParsedPriceListImportPrice =
| {
amount: number
currency_code: string
}
| {
amount: number
region_name: string
}
/**
* CSV parser's row reducer result data shape.
*/
export type TBuiltPriceListImportLine = Record<string, any>
/**
* Schema definition of for an import CSV file.
*/
export type PriceListImportCsvSchema = CsvSchema<
TParsedPriceListImportRowData,
TBuiltPriceListImportLine
>
/**
* Import Batch job context column type.
*/
export type PriceListImportJobContext = {
price_list_id: string
fileKey: string
}
/**
* Supported batch job import ops.
*/
export enum OperationType {
PricesCreate = "PRICE_LIST_PRICE_CREATE",
}

View File

@@ -1,6 +1,6 @@
/* eslint-disable valid-jsdoc */
import { EntityManager } from "typeorm"
import { MedusaError } from "medusa-core-utils"
import { computerizeAmount, MedusaError } from "medusa-core-utils"
import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces"
import CsvParser from "../../../services/csv-parser"
@@ -198,11 +198,12 @@ class ProductImportStrategy extends AbstractBatchJobStrategy {
if (price.regionName) {
try {
record.region_id = (
await this.regionService_
.withTransaction(transactionManager)
.retrieveByName(price.regionName)
)?.id
const region = await this.regionService_
.withTransaction(transactionManager)
.retrieveByName(price.regionName)
record.region_id = region.id
record.currency_code = region.currency_code
} catch (e) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
@@ -213,6 +214,7 @@ class ProductImportStrategy extends AbstractBatchJobStrategy {
record.currency_code = price.currency_code
}
record.amount = computerizeAmount(record.amount, record.currency_code)
prices.push(record)
}
@@ -845,7 +847,7 @@ const CSVSchema: ProductImportCsvSchema = {
// PRICES
{
name: "Price Region",
match: /Price .* \[([A-Z]{2,4})\]/,
match: /Price (.*) \[([A-Z]{3})\]/,
reducer: (
builtLine: TParsedProductImportRowData,
key,
@@ -857,11 +859,12 @@ const CSVSchema: ProductImportCsvSchema = {
return builtLine
}
const regionName = key.split(" ")[1]
const [, regionName] =
key.trim().match(/Price (.*) \[([A-Z]{3})\]/) || []
;(
builtLine["variant.prices"] as Record<string, string | number>[]
).push({
amount: value,
amount: parseFloat(value),
regionName,
})
@@ -870,7 +873,7 @@ const CSVSchema: ProductImportCsvSchema = {
},
{
name: "Price Currency",
match: /Price [A-Z]{2,4}/,
match: /Price [A-Z]{3}/,
reducer: (
builtLine: TParsedProductImportRowData,
key,
@@ -882,11 +885,12 @@ const CSVSchema: ProductImportCsvSchema = {
return builtLine
}
const currency = key.split(" ")[1]
const currency = key.trim().split(" ")[1]
;(
builtLine["variant.prices"] as Record<string, string | number>[]
).push({
amount: value,
amount: parseFloat(value),
currency_code: currency,
})
@@ -962,3 +966,4 @@ const SalesChannelsSchema: ProductImportCsvSchema = {
},
],
}

View File

@@ -47,7 +47,7 @@ class BatchJobSubscriber {
.preProcessBatchJob(batchJob.id)
await batchJobServiceTx.setPreProcessingDone(batchJob.id)
} catch (e) {
await this.batchJobService_.setFailed(batchJob.id)
await this.batchJobService_.setFailed(batchJob.id, e.message)
throw e
}
})
@@ -68,7 +68,7 @@ class BatchJobSubscriber {
await batchJobStrategy.withTransaction(manager).processJob(batchJob.id)
await batchJobServiceTx.complete(batchJob.id)
} catch (e) {
await this.batchJobService_.setFailed(batchJob.id)
await this.batchJobService_.setFailed(batchJob.id, e.message)
throw e
}
})