feat: product import strategy (#1706)
* init: copy PI files * feat: add subscribers, refactor strategies folder * wip: strategies integration tests package * fix: rename * wip: use redis * wip: use redis deps, redis setup in local tests * fix: naming collision, medusa config * fix: typing, update apply changes for new event ordering and reimplement interface * feat: make redis container run in integration tests * fix: missing yarn lock * feat: redis setup v2 * fix: setup server imports * fix: a lot of integration issues * fix: a lot of integration issues v2, transform tags, fix `ops` object parsing * wip: parsing product options * feat: ✨creating product and variants works, processing product/variant options, update schema * fix: query keys, logic for finding existing variant * fix: types * feat: update product variant's options * feat: parse MA records * feat: creating/updating MA records, region detection, error handling * feat: throw an error when creating an MA for nonexistent region * refactor: remove unused methods * refactor: use provided ids to track records, extract a couple of methods * refactor: remove unused method * refactor/wip: add initial comment for main methods * refactor: replace usage of RedisJSON functionality with basic k/v api * feat: async progress report * types: define more precise types, cleanup * feat: error handling * feat: unit testing preprocessing * feat: integration testing for CI, fix legacy bug where user is unable to create a variant if regional price is also sent as payload, add csv for integration tests * fix: error throw for logs * feat: add product endpoint snap * refactor: remove log * feat: add snaps, rebase * refactor: add comments * feat: snap update * refactor: typo * refactor: change error handler * feat: Redis cleanup after the job is done * testing :fix product unit test, remove integration snap, add inline object matcher * testing: fix obsolete snaps * refactor: update comments * fix: rebase issue * fix: rebase issue v2, remove log form an integration test * fix: try reverting setup server * fix: insert variants test * refactor: don't pass tx manager, refactor methods * refactor: don't use regionRepo, add `retrieveByName` to region repo * refactor: don't use productRepo * refactor: don't use `productVariantRepo` * refactor: remove repo mocks from unit tests * fix: product import unit tests * feat: file cleanup on finalize, kill test logs * wip: use files to persist ops instead of redis, move strategy class into `batch-job` folder * fix: minio delete method, add file cleanup method to import, fix promise coordination * fix: replace redis methods * feat: store import ops as a file instead of Redis * feat: test cleanup * fix: change unit tests after Redis logic removal * feat: use `results` for progress reporting, add `stat_descriptors` info after preprocessing, remove redis mentions * feat: extract to other files, use directory from property, fix strategy loader to allow other files in `strategies` directory * feat: fix instance progress counter * fix: mock services types * fix: update snaps * fix: error handling stream, fix test file service name generation * fix: remove dir with tmp files after testing * fix: new yarn.lock after rebase * fix: remove log, change object shape * fix: add DI types * refactor: remove container as a csv parser dep * fix: remove seeder, change typings * refactor: reimplement `retrieveByName` in the region service * fix: unit tests typings * fix: remove ts-ignore, complete typings for csv parser validators * fix: don't keep track of progress since it is redundant and only keep track of `advancement_count` * fix: return of the batch job seeder * fix: update find region by name method * fix: update types for service typings * fix: update redis type usage * fix: update unit tests file * fix: unit tests * fix: remove redis from integration test * feat: refactor region retrieval by name * feat: refactor product option update * fix: remove repo import * fix: return redis in test * fix: handle stream error * fix: tmp data cleanup Co-authored-by: fPolic <frane@medusajs.com>
This commit is contained in:
@@ -1753,6 +1753,80 @@ describe("/admin/products", () => {
|
||||
})
|
||||
})
|
||||
|
||||
describe("variant creation", () => {
|
||||
beforeEach(async () => {
|
||||
try {
|
||||
await productSeeder(dbConnection)
|
||||
await adminSeeder(dbConnection)
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
throw err
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
})
|
||||
|
||||
it("create a product variant with prices (regional and currency)", async () => {
|
||||
const api = useApi()
|
||||
|
||||
const payload = {
|
||||
title: "New variant",
|
||||
sku: "new-sku",
|
||||
ean: "new-ean",
|
||||
upc: "new-upc",
|
||||
barcode: "new-barcode",
|
||||
prices: [
|
||||
{
|
||||
currency_code: "usd",
|
||||
amount: 100,
|
||||
},
|
||||
{
|
||||
region_id: "test-region",
|
||||
amount: 200,
|
||||
},
|
||||
],
|
||||
options: [{ option_id: "test-option", value: "inserted value" }],
|
||||
}
|
||||
|
||||
const res = await api
|
||||
.post("/admin/products/test-product/variants", payload, {
|
||||
headers: {
|
||||
Authorization: "Bearer test_token",
|
||||
},
|
||||
})
|
||||
.catch((err) => console.log(err))
|
||||
|
||||
const insertedVariant = res.data.product.variants.find(
|
||||
(v) => v.sku === "new-sku"
|
||||
)
|
||||
|
||||
expect(res.status).toEqual(200)
|
||||
|
||||
expect(insertedVariant.prices).toEqual([
|
||||
expect.objectContaining({
|
||||
currency_code: "usd",
|
||||
amount: 100,
|
||||
min_quantity: null,
|
||||
max_quantity: null,
|
||||
variant_id: insertedVariant.id,
|
||||
region_id: null,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
currency_code: "usd",
|
||||
amount: 200,
|
||||
min_quantity: null,
|
||||
max_quantity: null,
|
||||
price_list_id: null,
|
||||
variant_id: insertedVariant.id,
|
||||
region_id: "test-region",
|
||||
}),
|
||||
])
|
||||
})
|
||||
})
|
||||
|
||||
describe("testing for soft-deletion + uniqueness on handles, collection and variant properties", () => {
|
||||
beforeEach(async () => {
|
||||
await productSeeder(dbConnection)
|
||||
|
||||
255
integration-tests/api/__tests__/batch-jobs/product/import.js
Normal file
255
integration-tests/api/__tests__/batch-jobs/product/import.js
Normal file
@@ -0,0 +1,255 @@
|
||||
const fs = require("fs")
|
||||
const path = require("path")
|
||||
|
||||
const setupServer = require("../../../../helpers/setup-server")
|
||||
const { useApi } = require("../../../../helpers/use-api")
|
||||
const { initDb, useDb } = require("../../../../helpers/use-db")
|
||||
|
||||
const adminSeeder = require("../../../helpers/admin-seeder")
|
||||
const batchJobSeeder = require("../../../helpers/batch-job-seeder")
|
||||
const userSeeder = require("../../../helpers/user-seeder")
|
||||
|
||||
const adminReqConfig = {
|
||||
headers: {
|
||||
Authorization: "Bearer test_token",
|
||||
},
|
||||
}
|
||||
|
||||
jest.setTimeout(1000000)
|
||||
|
||||
function cleanTempData() {
|
||||
// cleanup tmp ops files
|
||||
const opsFiles = path.resolve("__tests__", "batch-jobs", "product", "imports")
|
||||
|
||||
fs.rmSync(opsFiles, { recursive: true, force: true })
|
||||
}
|
||||
|
||||
describe("Product import batch job", () => {
|
||||
let medusaProcess
|
||||
let dbConnection
|
||||
|
||||
beforeAll(async () => {
|
||||
const cwd = path.resolve(path.join(__dirname, "..", "..", ".."))
|
||||
dbConnection = await initDb({ cwd })
|
||||
|
||||
cleanTempData() // cleanup if previous process didn't manage to do it
|
||||
|
||||
medusaProcess = await setupServer({
|
||||
cwd,
|
||||
redisUrl: "redis://127.0.0.1:6379",
|
||||
uploadDir: __dirname,
|
||||
verbose: false,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
const db = useDb()
|
||||
await db.shutdown()
|
||||
|
||||
cleanTempData()
|
||||
|
||||
medusaProcess.kill()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
try {
|
||||
await batchJobSeeder(dbConnection)
|
||||
await adminSeeder(dbConnection)
|
||||
await userSeeder(dbConnection)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
throw e
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
})
|
||||
|
||||
it("should import a csv file", async () => {
|
||||
jest.setTimeout(1000000)
|
||||
const api = useApi()
|
||||
|
||||
const response = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
{
|
||||
type: "product_import",
|
||||
context: {
|
||||
fileKey: "product-import.csv",
|
||||
},
|
||||
},
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
const batchJobId = response.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
const productsResponse = await api.get("/admin/products", adminReqConfig)
|
||||
|
||||
expect(productsResponse.data.count).toBe(2)
|
||||
expect(productsResponse.data.products).toEqual([
|
||||
expect.objectContaining({
|
||||
id: "O6S1YQ6mKm",
|
||||
title: "Test product",
|
||||
description: "test-product-description-1",
|
||||
handle: "test-product-product-1",
|
||||
is_giftcard: false,
|
||||
status: "draft",
|
||||
thumbnail: "test-image.png",
|
||||
variants: [
|
||||
expect.objectContaining({
|
||||
title: "Test variant",
|
||||
product_id: "O6S1YQ6mKm",
|
||||
sku: "test-sku-1",
|
||||
barcode: "test-barcode-1",
|
||||
ean: null,
|
||||
upc: null,
|
||||
inventory_quantity: 10,
|
||||
prices: [
|
||||
expect.objectContaining({
|
||||
currency_code: "eur",
|
||||
amount: 100,
|
||||
region_id: "region-product-import-0",
|
||||
}),
|
||||
expect.objectContaining({
|
||||
currency_code: "usd",
|
||||
amount: 110,
|
||||
}),
|
||||
expect.objectContaining({
|
||||
currency_code: "dkk",
|
||||
amount: 130,
|
||||
region_id: "region-product-import-1",
|
||||
}),
|
||||
],
|
||||
options: [
|
||||
expect.objectContaining({
|
||||
value: "option 1 value red",
|
||||
}),
|
||||
expect.objectContaining({
|
||||
value: "option 2 value 1",
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
images: [
|
||||
expect.objectContaining({
|
||||
url: "test-image.png",
|
||||
}),
|
||||
],
|
||||
options: [
|
||||
expect.objectContaining({
|
||||
title: "test-option-1",
|
||||
product_id: "O6S1YQ6mKm",
|
||||
}),
|
||||
expect.objectContaining({
|
||||
title: "test-option-2",
|
||||
product_id: "O6S1YQ6mKm",
|
||||
}),
|
||||
],
|
||||
tags: [
|
||||
expect.objectContaining({
|
||||
value: "123_1",
|
||||
}),
|
||||
],
|
||||
}),
|
||||
expect.objectContaining({
|
||||
id: "5VxiEkmnPV",
|
||||
title: "Test product",
|
||||
description: "test-product-description",
|
||||
handle: "test-product-product-2",
|
||||
is_giftcard: false,
|
||||
status: "draft",
|
||||
thumbnail: "test-image.png",
|
||||
profile_id: expect.any(String),
|
||||
variants: [
|
||||
expect.objectContaining({
|
||||
title: "Test variant",
|
||||
product_id: "5VxiEkmnPV",
|
||||
sku: "test-sku-2",
|
||||
barcode: "test-barcode-2",
|
||||
ean: null,
|
||||
upc: null,
|
||||
inventory_quantity: 10,
|
||||
allow_backorder: false,
|
||||
manage_inventory: true,
|
||||
prices: [
|
||||
expect.objectContaining({
|
||||
currency_code: "dkk",
|
||||
amount: 110,
|
||||
region_id: "region-product-import-2",
|
||||
}),
|
||||
],
|
||||
options: [
|
||||
expect.objectContaining({
|
||||
value: "Option 1 value 1",
|
||||
}),
|
||||
],
|
||||
}),
|
||||
expect.objectContaining({
|
||||
title: "Test variant",
|
||||
product_id: "5VxiEkmnPV",
|
||||
sku: "test-sku-3",
|
||||
barcode: "test-barcode-3",
|
||||
ean: null,
|
||||
upc: null,
|
||||
inventory_quantity: 10,
|
||||
allow_backorder: false,
|
||||
manage_inventory: true,
|
||||
prices: [
|
||||
expect.objectContaining({
|
||||
currency_code: "usd",
|
||||
amount: 120,
|
||||
region_id: null,
|
||||
}),
|
||||
],
|
||||
options: [
|
||||
expect.objectContaining({
|
||||
value: "Option 1 Value blue",
|
||||
}),
|
||||
],
|
||||
}),
|
||||
],
|
||||
images: [
|
||||
expect.objectContaining({
|
||||
url: "test-image.png",
|
||||
}),
|
||||
],
|
||||
options: [
|
||||
expect.objectContaining({
|
||||
title: "test-option",
|
||||
product_id: "5VxiEkmnPV",
|
||||
}),
|
||||
],
|
||||
tags: [
|
||||
expect.objectContaining({
|
||||
value: "123",
|
||||
}),
|
||||
],
|
||||
}),
|
||||
])
|
||||
})
|
||||
})
|
||||
@@ -0,0 +1,4 @@
|
||||
Product id,Product Handle,Product Title,Product Subtitle,Product Description,Product Status,Product Thumbnail,Product Weight,Product Length,Product Width,Product Height,Product HS Code,Product Origin Country,Product Mid Code,Product Material,Product Collection Title,Product Collection Handle,Product Type,Product Tags,Product Discountable,Product External ID,Product Profile Name,Product Profile Type,Variant id,Variant Title,Variant SKU,Variant Barcode,Variant Inventory Quantity,Variant Allow backorder,Variant Manage inventory,Variant Weight,Variant Length,Variant Width,Variant Height,Variant HS Code,Variant Origin Country,Variant Mid Code,Variant Material,Price ImportLand [EUR],Price USD,Price denmark [DKK],Price Denmark [DKK],Option 1 Name,Option 1 Value,Option 2 Name,Option 2 Value,Image 1 Url
|
||||
O6S1YQ6mKm,test-product-product-1,Test product,,test-product-description-1,draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,100,110,130,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png
|
||||
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,110,test-option,Option 1 value 1,,,test-image.png
|
||||
5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,120,,,test-option,Option 1 Value blue,,,test-image.png
|
||||
|
26
integration-tests/api/helpers/batch-job-seeder.js
Normal file
26
integration-tests/api/helpers/batch-job-seeder.js
Normal file
@@ -0,0 +1,26 @@
|
||||
const { Region } = require("@medusajs/medusa")
|
||||
|
||||
module.exports = async (connection, data = {}) => {
|
||||
const manager = connection.manager
|
||||
|
||||
await manager.insert(Region, {
|
||||
id: "region-product-import-0",
|
||||
name: "ImportLand",
|
||||
currency_code: "eur",
|
||||
tax_rate: 0,
|
||||
})
|
||||
|
||||
await manager.insert(Region, {
|
||||
id: "region-product-import-1",
|
||||
name: "denmark",
|
||||
currency_code: "dkk",
|
||||
tax_rate: 0,
|
||||
})
|
||||
|
||||
await manager.insert(Region, {
|
||||
id: "region-product-import-2",
|
||||
name: "Denmark",
|
||||
currency_code: "dkk",
|
||||
tax_rate: 0,
|
||||
})
|
||||
}
|
||||
@@ -48,7 +48,7 @@ export default class LocalFileService extends AbstractFileService {
|
||||
}
|
||||
|
||||
async getUploadStreamDescriptor({ name, ext }) {
|
||||
const fileKey = `${name}-${Date.now()}.${ext}`
|
||||
const fileKey = `${name}.${ext}`
|
||||
const filePath = path.resolve(this.upload_dir_, fileKey)
|
||||
|
||||
const isFileExists = fs.existsSync(filePath)
|
||||
@@ -66,4 +66,12 @@ export default class LocalFileService extends AbstractFileService {
|
||||
fileKey,
|
||||
}
|
||||
}
|
||||
|
||||
async getDownloadStream(fileData) {
|
||||
const filePath = path.resolve(
|
||||
this.upload_dir_,
|
||||
fileData.fileKey + (fileData.ext ? `.${fileData.ext}` : "")
|
||||
)
|
||||
return fs.createReadStream(filePath)
|
||||
}
|
||||
}
|
||||
|
||||
58
integration-tests/helpers/use-redis.ts
Normal file
58
integration-tests/helpers/use-redis.ts
Normal file
@@ -0,0 +1,58 @@
|
||||
const path = require("path")
|
||||
|
||||
const Redis = require("ioredis")
|
||||
const { GenericContainer } = require("testcontainers")
|
||||
|
||||
require("dotenv").config({ path: path.join(__dirname, "../.env") })
|
||||
|
||||
const workerId = parseInt(process.env.JEST_WORKER_ID || "1")
|
||||
|
||||
const DB_USERNAME = process.env.DB_USERNAME || "postgres"
|
||||
const DB_PASSWORD = process.env.DB_PASSWORD || ""
|
||||
|
||||
const DbTestUtil = {
|
||||
db_: null,
|
||||
|
||||
setDb: function (connection) {
|
||||
this.db_ = connection
|
||||
},
|
||||
|
||||
clear: async function () {
|
||||
/* noop */
|
||||
},
|
||||
|
||||
teardown: async function () {
|
||||
/* noop */
|
||||
},
|
||||
|
||||
shutdown: async function () {
|
||||
/* noop */
|
||||
// TODO: stop container
|
||||
},
|
||||
}
|
||||
|
||||
const instance = DbTestUtil
|
||||
|
||||
module.exports = {
|
||||
initRedis: async function ({ cwd }) {
|
||||
// const configPath = path.resolve(path.join(cwd, `medusa-config.js`))
|
||||
// const { projectConfig } = require(configPath)
|
||||
|
||||
const container = await new GenericContainer("redis")
|
||||
.withExposedPorts(6379)
|
||||
.start()
|
||||
|
||||
const redisClient = new Redis({
|
||||
host: container.getHost(),
|
||||
port: container.getMappedPort(6379),
|
||||
db: workerId,
|
||||
})
|
||||
|
||||
instance.setDb(redisClient)
|
||||
|
||||
return redisClient
|
||||
},
|
||||
useRedis: function () {
|
||||
return instance
|
||||
},
|
||||
}
|
||||
@@ -12,6 +12,7 @@
|
||||
"dotenv": "^10.0.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@faker-js/faker": "^5.5.3"
|
||||
"@faker-js/faker": "^5.5.3",
|
||||
"testcontainers": "^8.10.1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -20,7 +20,7 @@ class MinioService extends AbstractFileService {
|
||||
this.endpoint_ = options.endpoint
|
||||
this.s3ForcePathStyle_ = true
|
||||
this.signatureVersion_ = "v4"
|
||||
this.downloadUrlDuration = options.download_url_duration ?? 60 // 60 seconds
|
||||
this.downloadUrlDuration = options.download_url_duration ?? 60 // 60 seconds
|
||||
}
|
||||
|
||||
upload(file) {
|
||||
@@ -45,7 +45,7 @@ class MinioService extends AbstractFileService {
|
||||
return
|
||||
}
|
||||
|
||||
resolve({ url: data.Location })
|
||||
resolve({ url: data.Location, key: data.Key })
|
||||
})
|
||||
})
|
||||
}
|
||||
@@ -55,27 +55,29 @@ class MinioService extends AbstractFileService {
|
||||
|
||||
const s3 = new aws.S3()
|
||||
const params = {
|
||||
Key: `${file}`,
|
||||
Bucket: this.bucket_,
|
||||
Key: `${file.fileKey}`,
|
||||
}
|
||||
|
||||
return await Promise.all(
|
||||
[
|
||||
s3.deleteObject({...params, Bucket: this.bucket_}, (err, data) => {
|
||||
return await Promise.all([
|
||||
s3.deleteObject({ ...params, Bucket: this.bucket_ }, (err, data) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
return
|
||||
}
|
||||
resolve(data)
|
||||
}),
|
||||
s3.deleteObject(
|
||||
{ ...params, Bucket: this.private_bucket_ },
|
||||
(err, data) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
return
|
||||
}
|
||||
resolve(data)
|
||||
}),
|
||||
s3.deleteObject({...params, Bucket: this.private_bucket_}, (err, data) => {
|
||||
if (err) {
|
||||
reject(err)
|
||||
return
|
||||
}
|
||||
resolve(data)
|
||||
})
|
||||
]
|
||||
)
|
||||
}
|
||||
),
|
||||
])
|
||||
}
|
||||
|
||||
async getUploadStreamDescriptor({ usePrivateBucket = true, ...fileData }) {
|
||||
@@ -125,7 +127,7 @@ class MinioService extends AbstractFileService {
|
||||
const params = {
|
||||
Bucket: usePrivateBucket ? this.private_bucket_ : this.bucket_,
|
||||
Key: `${fileData.fileKey}`,
|
||||
Expires: this.downloadUrlDuration,
|
||||
Expires: this.downloadUrlDuration,
|
||||
}
|
||||
|
||||
return await s3.getSignedUrlPromise("getObject", params)
|
||||
|
||||
@@ -49,6 +49,7 @@
|
||||
"dependencies": {
|
||||
"@hapi/joi": "^16.1.8",
|
||||
"@medusajs/medusa-cli": "^1.3.1",
|
||||
"@types/ioredis": "^4.28.10",
|
||||
"@types/lodash": "^4.14.168",
|
||||
"awilix": "^4.2.3",
|
||||
"body-parser": "^1.19.0",
|
||||
|
||||
@@ -219,7 +219,7 @@ import { validator } from "../../../../utils/validator"
|
||||
* description: The value to give for the Product Option at the same index in the Product's `options` field.
|
||||
* type: string
|
||||
* weight:
|
||||
* description: The wieght of the Product.
|
||||
* description: The weight of the Product.
|
||||
* type: number
|
||||
* length:
|
||||
* description: The length of the Product.
|
||||
|
||||
@@ -62,7 +62,7 @@ import { EntityManager } from "typeorm"
|
||||
* description: Whether Medusa should keep track of the inventory for this Product Variant.
|
||||
* type: boolean
|
||||
* weight:
|
||||
* description: The wieght of the Product Variant.
|
||||
* description: The weight of the Product Variant.
|
||||
* type: number
|
||||
* length:
|
||||
* description: The length of the Product Variant.
|
||||
|
||||
@@ -28,7 +28,15 @@ export default ({ container, configModule, isTest }: LoaderOptions): void => {
|
||||
|
||||
const core = glob.sync(coreFull, {
|
||||
cwd: __dirname,
|
||||
ignore: ["**/__fixtures__/**", "**/index.js", "**/index.ts"],
|
||||
ignore: [
|
||||
"**/__fixtures__/**",
|
||||
"**/index.js",
|
||||
"**/index.ts",
|
||||
"**/utils.js",
|
||||
"**/utils.ts",
|
||||
"**/types.js",
|
||||
"**/types.ts",
|
||||
],
|
||||
})
|
||||
|
||||
core.forEach((fn) => {
|
||||
|
||||
@@ -6,7 +6,7 @@ import { currencies } from "../../utils/currencies"
|
||||
|
||||
describe("CsvParser", () => {
|
||||
describe("parse", () => {
|
||||
const csvParser = new CsvParser(createContainer(), {
|
||||
const csvParser = new CsvParser({
|
||||
columns: [],
|
||||
})
|
||||
|
||||
@@ -62,7 +62,7 @@ describe("CsvParser", () => {
|
||||
],
|
||||
}
|
||||
|
||||
const csvParser = new CsvParser(createContainer(), schema)
|
||||
const csvParser = new CsvParser(schema)
|
||||
|
||||
it("given a line containing a column which is not defined in the schema, then validation should fail", async () => {
|
||||
try {
|
||||
@@ -139,7 +139,7 @@ describe("CsvParser", () => {
|
||||
})
|
||||
|
||||
describe("mapTo", () => {
|
||||
const csvParser = new CsvParser(createContainer(), {
|
||||
const csvParser = new CsvParser({
|
||||
columns: [
|
||||
{
|
||||
name: "title",
|
||||
@@ -160,7 +160,7 @@ describe("CsvParser", () => {
|
||||
})
|
||||
|
||||
describe("transformer", () => {
|
||||
const csvParser = new CsvParser(createContainer(), {
|
||||
const csvParser = new CsvParser({
|
||||
columns: [
|
||||
{
|
||||
name: "title",
|
||||
@@ -188,7 +188,7 @@ describe("CsvParser", () => {
|
||||
|
||||
describe("match", () => {
|
||||
describe("regex", () => {
|
||||
const csvParser = new CsvParser(createContainer(), {
|
||||
const csvParser = new CsvParser({
|
||||
columns: [
|
||||
{
|
||||
name: "title",
|
||||
@@ -273,7 +273,7 @@ describe("CsvParser", () => {
|
||||
},
|
||||
],
|
||||
}
|
||||
const csvParser = new CsvParser(createContainer(), schema)
|
||||
const csvParser = new CsvParser(schema)
|
||||
|
||||
it("given a column with match and reducer properties, when building data, should return the result of the reducer function", async () => {
|
||||
const content = await csvParser.buildData([
|
||||
@@ -363,7 +363,7 @@ describe("CsvParser", () => {
|
||||
},
|
||||
],
|
||||
}
|
||||
const csvParser = new CsvParser(createContainer(), schema)
|
||||
const csvParser = new CsvParser(schema)
|
||||
|
||||
it("given a column with match and mapTo property, when building data, then the mapTo property should be ignored", async () => {
|
||||
const content = await csvParser.buildData([
|
||||
|
||||
@@ -661,6 +661,12 @@ describe("ProductVariantService", () => {
|
||||
return Promise.resolve([{ id: idOrIds, currency_code: "usd" }])
|
||||
}
|
||||
}),
|
||||
retrieve: function () {
|
||||
return Promise.resolve({
|
||||
id: IdMap.getId("california"),
|
||||
name: "California",
|
||||
})
|
||||
},
|
||||
}
|
||||
|
||||
const productVariantService = new ProductVariantService({
|
||||
|
||||
@@ -313,6 +313,7 @@ class BatchJobService extends TransactionBaseService {
|
||||
batchJobOrId,
|
||||
BatchJobStatus.PRE_PROCESSED
|
||||
)
|
||||
|
||||
if (batchJob.dry_run) {
|
||||
return batchJob
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { AwilixContainer } from "awilix"
|
||||
import { difference } from "lodash"
|
||||
import Papa, { ParseConfig } from "papaparse"
|
||||
|
||||
import { AbstractParser } from "../interfaces/abstract-parser"
|
||||
import { CsvParserContext, CsvSchema } from "../interfaces/csv-parser"
|
||||
|
||||
@@ -16,11 +16,7 @@ class CsvParser<
|
||||
> extends AbstractParser<TSchema, TParserResult, ParseConfig, TOutputResult> {
|
||||
protected readonly $$delimiter: string = ";"
|
||||
|
||||
constructor(
|
||||
protected readonly container: AwilixContainer,
|
||||
schema: TSchema,
|
||||
delimiter?: string
|
||||
) {
|
||||
constructor(schema: TSchema, delimiter?: string) {
|
||||
super(schema)
|
||||
if (delimiter) {
|
||||
this.$$delimiter = delimiter
|
||||
|
||||
@@ -10,8 +10,8 @@ type InjectedDependencies = {
|
||||
manager: EntityManager
|
||||
logger: Logger
|
||||
stagedJobRepository: typeof StagedJobRepository
|
||||
redisClient: Redis
|
||||
redisSubscriber: Redis
|
||||
redisClient: Redis.Redis
|
||||
redisSubscriber: Redis.Redis
|
||||
}
|
||||
|
||||
type Subscriber<T = unknown> = (data: T, eventName: string) => Promise<void>
|
||||
@@ -27,8 +27,8 @@ export default class EventBusService {
|
||||
protected readonly stagedJobRepository_: typeof StagedJobRepository
|
||||
protected readonly observers_: Map<string | symbol, Subscriber[]>
|
||||
protected readonly cronHandlers_: Map<string | symbol, Subscriber[]>
|
||||
protected readonly redisClient_: Redis
|
||||
protected readonly redisSubscriber_: Redis
|
||||
protected readonly redisClient_: Redis.Redis
|
||||
protected readonly redisSubscriber_: Redis.Redis
|
||||
protected readonly cronQueue_: Bull
|
||||
protected queue_: Bull
|
||||
protected shouldEnqueuerRun: boolean
|
||||
@@ -47,7 +47,7 @@ export default class EventBusService {
|
||||
singleton = true
|
||||
) {
|
||||
const opts = {
|
||||
createClient: (type: string): Redis => {
|
||||
createClient: (type: string): Redis.Redis => {
|
||||
switch (type) {
|
||||
case "client":
|
||||
return redisClient
|
||||
|
||||
@@ -256,9 +256,12 @@ class ProductVariantService extends BaseService {
|
||||
if (prices) {
|
||||
for (const price of prices) {
|
||||
if (price.region_id) {
|
||||
const region = await this.regionService_.retrieve(price.region_id)
|
||||
|
||||
await this.setRegionPrice(result.id, {
|
||||
amount: price.amount,
|
||||
region_id: price.region_id,
|
||||
currency_code: region.currency_code,
|
||||
})
|
||||
} else {
|
||||
await this.setCurrencyPrice(result.id, price)
|
||||
@@ -381,7 +384,10 @@ class ProductVariantService extends BaseService {
|
||||
|
||||
for (const price of prices) {
|
||||
if (price.region_id) {
|
||||
const region = await this.regionService_.retrieve(price.region_id)
|
||||
|
||||
await this.setRegionPrice(variantId, {
|
||||
currency_code: region.currency_code,
|
||||
region_id: price.region_id,
|
||||
amount: price.amount,
|
||||
})
|
||||
@@ -517,8 +523,8 @@ class ProductVariantService extends BaseService {
|
||||
}
|
||||
|
||||
/**
|
||||
* Adds option value to a varaint.
|
||||
* Fails when product with variant does not exists or
|
||||
* Adds option value to a variant.
|
||||
* Fails when product with variant does not exist or
|
||||
* if that product does not have an option with the given
|
||||
* option id. Fails if given variant is not found.
|
||||
* Option value must be of type string or number.
|
||||
|
||||
@@ -7,6 +7,7 @@ import { TransactionBaseService } from "../interfaces"
|
||||
import SalesChannelFeatureFlag from "../loaders/feature-flags/sales-channels"
|
||||
import {
|
||||
Product,
|
||||
ProductOption,
|
||||
ProductTag,
|
||||
ProductType,
|
||||
ProductVariant,
|
||||
@@ -770,6 +771,26 @@ class ProductService extends TransactionBaseService {
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieve product's option by title.
|
||||
*
|
||||
* @param title - title of the option
|
||||
* @param productId - id of a product
|
||||
* @return product option
|
||||
*/
|
||||
async retrieveOptionByTitle(
|
||||
title: string,
|
||||
productId: string
|
||||
): Promise<ProductOption | undefined> {
|
||||
const productOptionRepo = this.manager_.getCustomRepository(
|
||||
this.productOptionRepository_
|
||||
)
|
||||
|
||||
return productOptionRepo.findOne({
|
||||
where: { title, product_id: productId },
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete an option from a product.
|
||||
* @param productId - the product to delete an option from
|
||||
|
||||
@@ -436,6 +436,25 @@ class RegionService extends TransactionBaseService {
|
||||
return await this.retrieve(country.region_id, config)
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a region by name.
|
||||
*
|
||||
* @param name - the name of the region to retrieve
|
||||
* @return region with the matching name
|
||||
*/
|
||||
async retrieveByName(name: string): Promise<Region | never> {
|
||||
const [region] = await this.list({ name }, { take: 1 })
|
||||
|
||||
if (!region) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.NOT_FOUND,
|
||||
`Region "${name}" was not found`
|
||||
)
|
||||
}
|
||||
|
||||
return region
|
||||
}
|
||||
|
||||
/**
|
||||
* Retrieves a region by its id.
|
||||
*
|
||||
|
||||
@@ -47,7 +47,7 @@ class TaxProviderService extends TransactionBaseService {
|
||||
protected readonly taxLineRepo_: typeof LineItemTaxLineRepository
|
||||
protected readonly smTaxLineRepo_: typeof ShippingMethodTaxLineRepository
|
||||
protected readonly taxProviderRepo_: typeof TaxProviderRepository
|
||||
protected readonly redis_: Redis
|
||||
protected readonly redis_: Redis.Redis
|
||||
protected readonly eventBus_: EventBusService
|
||||
|
||||
constructor(container: AwilixContainer) {
|
||||
@@ -438,7 +438,7 @@ class TaxProviderService extends TransactionBaseService {
|
||||
productId: string,
|
||||
regionId: string,
|
||||
value: TaxServiceRate[]
|
||||
): Promise<void> {
|
||||
): Promise<null | string> {
|
||||
const cacheKey = this.getCacheKey(productId, regionId)
|
||||
return await this.redis_.set(
|
||||
cacheKey,
|
||||
|
||||
@@ -0,0 +1,176 @@
|
||||
import { Readable, PassThrough } from "stream"
|
||||
import { EntityManager } from "typeorm"
|
||||
|
||||
import { FileService } from "medusa-interfaces"
|
||||
import { IdMap, MockManager } from "medusa-test-utils"
|
||||
|
||||
import { User } from "../../../../models"
|
||||
import { BatchJobStatus } from "../../../../types/batch-job"
|
||||
import ProductImportStrategy from "../../../batch-jobs/product/import"
|
||||
import {
|
||||
BatchJobService,
|
||||
ProductService,
|
||||
ProductVariantService,
|
||||
RegionService,
|
||||
ShippingProfileService,
|
||||
} from "../../../../services"
|
||||
import { InjectedProps } from "../../../batch-jobs/product/types"
|
||||
|
||||
let fakeJob = {
|
||||
id: IdMap.getId("product-import-job"),
|
||||
type: "product-import",
|
||||
context: {
|
||||
csvFileKey: "csv.key",
|
||||
},
|
||||
results: { advancement_count: 0, count: 6 },
|
||||
created_by: IdMap.getId("product-import-creator"),
|
||||
created_by_user: {} as User,
|
||||
result: {},
|
||||
dry_run: false,
|
||||
status: BatchJobStatus.PROCESSING,
|
||||
}
|
||||
|
||||
async function* generateCSVDataForStream() {
|
||||
yield "Product id,Product Handle,Product Title,Product Subtitle,Product Description,Product Status,Product Thumbnail,Product Weight,Product Length,Product Width,Product Height,Product HS Code,Product Origin Country,Product Mid Code,Product Material,Product Collection Title,Product Collection Handle,Product Type,Product Tags,Product Discountable,Product External ID,Product Profile Name,Product Profile Type,Variant id,Variant Title,Variant SKU,Variant Barcode,Variant Inventory Quantity,Variant Allow backorder,Variant Manage inventory,Variant Weight,Variant Length,Variant Width,Variant Height,Variant HS Code,Variant Origin Country,Variant Mid Code,Variant Material,Price france [USD],Price USD,Price denmark [DKK],Price Denmark [DKK],Option 1 Name,Option 1 Value,Option 2 Name,Option 2 Value,Image 1 Url\n"
|
||||
yield "O6S1YQ6mKm,test-product-product-1,Test product,,test-product-description-1,draft,,,,,,,,,,Test collection 1,test-collection1,test-type-1,123_1,TRUE,,profile_1,profile_type_1,SebniWTDeC,Test variant,test-sku-1,test-barcode-1,10,FALSE,TRUE,,,,,,,,,100,110,130,,test-option-1,option 1 value red,test-option-2,option 2 value 1,test-image.png\n"
|
||||
yield "5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,CaBp7amx3r,Test variant,test-sku-2,test-barcode-2,10,FALSE,TRUE,,,,,,,,,,,,110,test-option,Option 1 value 1,,,test-image.png\n"
|
||||
yield "5VxiEkmnPV,test-product-product-2,Test product,,test-product-description,draft,,,,,,,,,,Test collection,test-collection2,test-type,123,TRUE,,profile_2,profile_type_2,3SS1MHGDEJ,Test variant,test-sku-3,test-barcode-3,10,FALSE,TRUE,,,,,,,,,,120,,,test-option,Option 1 Value blue,,,test-image.png\n"
|
||||
}
|
||||
|
||||
/* ******************** SERVICES MOCK ******************** */
|
||||
|
||||
const fileServiceMock = {
|
||||
withTransaction: function () {
|
||||
return this
|
||||
},
|
||||
delete: jest.fn(),
|
||||
getDownloadStream: jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve(Readable.from(generateCSVDataForStream()))
|
||||
}),
|
||||
getUploadStreamDescriptor: jest.fn().mockImplementation(() => ({
|
||||
writeStream: new PassThrough(),
|
||||
promise: Promise.resolve(),
|
||||
})),
|
||||
}
|
||||
|
||||
const batchJobServiceMock = {
|
||||
withTransaction: function () {
|
||||
return this
|
||||
},
|
||||
update: jest.fn().mockImplementation((data) => {
|
||||
fakeJob = {
|
||||
...fakeJob,
|
||||
...data,
|
||||
}
|
||||
return Promise.resolve(fakeJob)
|
||||
}),
|
||||
complete: jest.fn().mockImplementation(() => {
|
||||
fakeJob.status = BatchJobStatus.COMPLETED
|
||||
return Promise.resolve(fakeJob)
|
||||
}),
|
||||
confirmed: jest.fn().mockImplementation(() => {
|
||||
fakeJob.status = BatchJobStatus.CONFIRMED
|
||||
return Promise.resolve(fakeJob)
|
||||
}),
|
||||
retrieve: jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve(fakeJob)
|
||||
}),
|
||||
}
|
||||
|
||||
const productServiceMock = {
|
||||
withTransaction: function () {
|
||||
return this
|
||||
},
|
||||
count: jest.fn().mockImplementation(() => Promise.resolve()),
|
||||
}
|
||||
|
||||
const shippingProfileServiceMock = {
|
||||
retrieveDefault: jest.fn().mockImplementation((_data) => {
|
||||
return Promise.resolve({ id: "default_shipping_profile" })
|
||||
}),
|
||||
}
|
||||
|
||||
const productVariantServiceMock = {
|
||||
withTransaction: function () {
|
||||
return this
|
||||
},
|
||||
count: jest.fn().mockImplementation(() => Promise.resolve()),
|
||||
}
|
||||
|
||||
const regionServiceMock = {
|
||||
withTransaction: function () {
|
||||
return this
|
||||
},
|
||||
retrieveByName: jest.fn().mockImplementation(() =>
|
||||
Promise.resolve({
|
||||
id: "reg_HMnixPlOicAs7aBlXuchAGxd",
|
||||
name: "Denmark",
|
||||
currency_code: "DKK",
|
||||
currency: "DKK",
|
||||
tax_rate: 0.25,
|
||||
tax_code: null,
|
||||
countries: [
|
||||
{
|
||||
id: "1001",
|
||||
iso_2: "DK",
|
||||
iso_3: "DNK",
|
||||
num_code: "208",
|
||||
name: "denmark",
|
||||
display_name: "Denmark",
|
||||
},
|
||||
],
|
||||
})
|
||||
),
|
||||
}
|
||||
|
||||
const managerMock = MockManager
|
||||
|
||||
/* ******************** PRODUCT IMPORT STRATEGY TESTS ******************** */
|
||||
|
||||
describe("Product import strategy", () => {
|
||||
afterAll(() => {
|
||||
jest.clearAllMocks()
|
||||
})
|
||||
|
||||
const productImportStrategy = new ProductImportStrategy({
|
||||
manager: managerMock as EntityManager,
|
||||
fileService: fileServiceMock as typeof FileService,
|
||||
batchJobService: batchJobServiceMock as unknown as BatchJobService,
|
||||
productService: productServiceMock as unknown as ProductService,
|
||||
shippingProfileService:
|
||||
shippingProfileServiceMock as unknown as ShippingProfileService,
|
||||
productVariantService:
|
||||
productVariantServiceMock as unknown as ProductVariantService,
|
||||
regionService: regionServiceMock as unknown as RegionService,
|
||||
} as unknown as InjectedProps)
|
||||
|
||||
it("`preProcessBatchJob` should generate import ops and upload them to a bucket using the file service", async () => {
|
||||
const getImportInstructionsSpy = jest.spyOn(
|
||||
productImportStrategy,
|
||||
"getImportInstructions"
|
||||
)
|
||||
|
||||
await productImportStrategy.preProcessBatchJob(fakeJob.id)
|
||||
|
||||
expect(getImportInstructionsSpy).toBeCalledTimes(1)
|
||||
|
||||
expect(fileServiceMock.getUploadStreamDescriptor).toBeCalledTimes(2)
|
||||
|
||||
expect(fileServiceMock.getUploadStreamDescriptor).toHaveBeenNthCalledWith(
|
||||
1,
|
||||
{
|
||||
ext: "json",
|
||||
name: `imports/products/import/ops/${fakeJob.id}-PRODUCT_CREATE`,
|
||||
}
|
||||
)
|
||||
expect(fileServiceMock.getUploadStreamDescriptor).toHaveBeenNthCalledWith(
|
||||
2,
|
||||
{
|
||||
ext: "json",
|
||||
name: `imports/products/import/ops/${fakeJob.id}-VARIANT_UPDATE`, // because row data has variant.id
|
||||
}
|
||||
)
|
||||
|
||||
getImportInstructionsSpy.mockRestore()
|
||||
})
|
||||
})
|
||||
732
packages/medusa/src/strategies/batch-jobs/product/import.ts
Normal file
732
packages/medusa/src/strategies/batch-jobs/product/import.ts
Normal file
@@ -0,0 +1,732 @@
|
||||
/* eslint-disable valid-jsdoc */
|
||||
import { EntityManager } from "typeorm"
|
||||
import { MedusaError } from "medusa-core-utils"
|
||||
|
||||
import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces"
|
||||
import CsvParser from "../../../services/csv-parser"
|
||||
import {
|
||||
BatchJobService,
|
||||
ProductService,
|
||||
ProductVariantService,
|
||||
RegionService,
|
||||
ShippingProfileService,
|
||||
} from "../../../services"
|
||||
import { CreateProductInput } from "../../../types/product"
|
||||
import {
|
||||
CreateProductVariantInput,
|
||||
UpdateProductVariantInput,
|
||||
} from "../../../types/product-variant"
|
||||
import {
|
||||
ImportJobContext,
|
||||
InjectedProps,
|
||||
OperationType,
|
||||
ProductImportCsvSchema,
|
||||
TBuiltProductImportLine,
|
||||
TParsedProductImportRowData,
|
||||
} from "./types"
|
||||
import { transformProductData, transformVariantData } from "./utils"
|
||||
|
||||
/**
|
||||
* Process this many variant rows before reporting progress.
|
||||
*/
|
||||
const BATCH_SIZE = 100
|
||||
|
||||
/**
|
||||
* Default strategy class used for a batch import of products/variants.
|
||||
*/
|
||||
class ProductImportStrategy extends AbstractBatchJobStrategy {
|
||||
static identifier = "product-import"
|
||||
|
||||
static batchType = "product_import"
|
||||
|
||||
private processedCounter: Record<string, number> = {}
|
||||
|
||||
protected manager_: EntityManager
|
||||
protected transactionManager_: EntityManager | undefined
|
||||
|
||||
protected readonly fileService_: IFileService
|
||||
|
||||
protected readonly productService_: ProductService
|
||||
protected readonly batchJobService_: BatchJobService
|
||||
protected readonly productVariantService_: ProductVariantService
|
||||
protected readonly shippingProfileService_: ShippingProfileService
|
||||
protected readonly regionService_: RegionService
|
||||
|
||||
protected readonly csvParser_: CsvParser<
|
||||
ProductImportCsvSchema,
|
||||
Record<string, string>,
|
||||
Record<string, string>
|
||||
>
|
||||
|
||||
constructor({
|
||||
batchJobService,
|
||||
productService,
|
||||
productVariantService,
|
||||
shippingProfileService,
|
||||
regionService,
|
||||
fileService,
|
||||
manager,
|
||||
}: InjectedProps) {
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
super(arguments[0])
|
||||
|
||||
this.csvParser_ = new CsvParser(CSVSchema)
|
||||
|
||||
this.manager_ = manager
|
||||
this.fileService_ = fileService
|
||||
this.batchJobService_ = batchJobService
|
||||
this.productService_ = productService
|
||||
this.productVariantService_ = productVariantService
|
||||
this.shippingProfileService_ = shippingProfileService
|
||||
this.regionService_ = regionService
|
||||
}
|
||||
|
||||
buildTemplate(): Promise<string> {
|
||||
throw new Error("Not implemented!")
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a description of a row on which the error occurred and throw a Medusa error.
|
||||
*
|
||||
* @param row - Parsed CSV row data
|
||||
*/
|
||||
protected static throwDescriptiveError(
|
||||
row: TParsedProductImportRowData
|
||||
): never {
|
||||
const message = `Error while processing row with:
|
||||
product id: ${row["product.id"]},
|
||||
product handle: ${row["product.handle"]},
|
||||
variant id: ${row["variant.id"]}
|
||||
variant sku: ${row["variant.sku"]}`
|
||||
|
||||
throw new MedusaError(MedusaError.Types.INVALID_DATA, message)
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate instructions for update/create of products/variants from parsed CSV rows.
|
||||
*
|
||||
* @param csvData - An array of parsed CSV rows.
|
||||
*/
|
||||
async getImportInstructions(
|
||||
csvData: TParsedProductImportRowData[]
|
||||
): Promise<Record<OperationType, TParsedProductImportRowData[]>> {
|
||||
const shippingProfile = await this.shippingProfileService_.retrieveDefault()
|
||||
|
||||
const seenProducts = {}
|
||||
|
||||
const productsCreate: TParsedProductImportRowData[] = []
|
||||
const productsUpdate: TParsedProductImportRowData[] = []
|
||||
|
||||
const variantsCreate: TParsedProductImportRowData[] = []
|
||||
const variantsUpdate: TParsedProductImportRowData[] = []
|
||||
|
||||
for (const row of csvData) {
|
||||
if ((row["variant.prices"] as Record<string, any>[]).length) {
|
||||
await this.prepareVariantPrices(row)
|
||||
}
|
||||
|
||||
if (row["variant.id"]) {
|
||||
variantsUpdate.push(row)
|
||||
} else {
|
||||
variantsCreate.push(row)
|
||||
}
|
||||
|
||||
// save only first occurrence
|
||||
if (!seenProducts[row["product.handle"] as string]) {
|
||||
row["product.profile_id"] = shippingProfile!.id
|
||||
if (row["product.product.id"]) {
|
||||
productsUpdate.push(row)
|
||||
} else {
|
||||
productsCreate.push(row)
|
||||
}
|
||||
|
||||
seenProducts[row["product.handle"] as string] = true
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
[OperationType.ProductCreate]: productsCreate,
|
||||
[OperationType.VariantCreate]: variantsCreate,
|
||||
[OperationType.ProductUpdate]: productsUpdate,
|
||||
[OperationType.VariantUpdate]: variantsUpdate,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Prepare prices records for insert - find and append region ids to records that contain a region name.
|
||||
*
|
||||
* @param row - An object containing parsed row data.
|
||||
*/
|
||||
protected async prepareVariantPrices(row): Promise<void> {
|
||||
const transactionManager = this.transactionManager_ ?? this.manager_
|
||||
|
||||
const prices: Record<string, string | number>[] = []
|
||||
|
||||
for (const price of row["variant.prices"]) {
|
||||
const record: Record<string, string | number> = {
|
||||
amount: price.amount,
|
||||
}
|
||||
|
||||
if (price.regionName) {
|
||||
const region = await this.regionService_
|
||||
.withTransaction(transactionManager)
|
||||
.retrieveByName(price.regionName)
|
||||
|
||||
if (!region) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.INVALID_DATA,
|
||||
`Trying to set a price for a region ${price.regionName} that doesn't exist`
|
||||
)
|
||||
}
|
||||
|
||||
record.region_id = region!.id
|
||||
} else {
|
||||
record.currency_code = price.currency_code
|
||||
}
|
||||
|
||||
prices.push(record)
|
||||
}
|
||||
|
||||
row["variant.prices"] = prices
|
||||
}
|
||||
|
||||
/**
|
||||
* A worker method called after a batch job has been created.
|
||||
* The method parses a CSV file, generates sets of instructions
|
||||
* for processing and stores these instructions to a JSON file
|
||||
* which is uploaded to a bucket.
|
||||
*
|
||||
* @param batchJobId - An id of a job that is being preprocessed.
|
||||
*/
|
||||
async preProcessBatchJob(batchJobId: string): Promise<void> {
|
||||
const batchJob = await this.batchJobService_.retrieve(batchJobId)
|
||||
|
||||
const csvFileKey = (batchJob.context as ImportJobContext).fileKey
|
||||
const csvStream = await this.fileService_.getDownloadStream({
|
||||
fileKey: csvFileKey,
|
||||
})
|
||||
|
||||
const parsedData = await this.csvParser_.parse(csvStream)
|
||||
const builtData = await this.csvParser_.buildData(parsedData)
|
||||
|
||||
const ops = await this.getImportInstructions(builtData)
|
||||
|
||||
await this.uploadImportOpsFile(batchJobId, ops)
|
||||
|
||||
await this.batchJobService_.update(batchJobId, {
|
||||
result: {
|
||||
advancement_count: 0,
|
||||
// number of update/create operations to execute
|
||||
count: Object.keys(ops).reduce((acc, k) => acc + ops[k].length, 0),
|
||||
stat_descriptors: [
|
||||
{
|
||||
key: "product-import-count",
|
||||
name: "Products/variants to import",
|
||||
message: `There will be ${
|
||||
ops[OperationType.ProductCreate].length
|
||||
} products created (${
|
||||
ops[OperationType.ProductUpdate].length
|
||||
} updated).
|
||||
${
|
||||
ops[OperationType.VariantCreate].length
|
||||
} variants will be created and ${
|
||||
ops[OperationType.VariantUpdate].length
|
||||
} updated`,
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* The main processing method called after a batch job
|
||||
* is ready/confirmed for processing.
|
||||
*
|
||||
* @param batchJobId - An id of a batch job that is being processed.
|
||||
*/
|
||||
async processJob(batchJobId: string): Promise<void> {
|
||||
return await this.atomicPhase_(async () => {
|
||||
await this.createProducts(batchJobId)
|
||||
await this.updateProducts(batchJobId)
|
||||
await this.createVariants(batchJobId)
|
||||
await this.updateVariants(batchJobId)
|
||||
|
||||
this.finalize(batchJobId)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Method creates products using `ProductService` and parsed data from a CSV row.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
*/
|
||||
private async createProducts(batchJobId: string): Promise<void> {
|
||||
const transactionManager = this.transactionManager_ ?? this.manager_
|
||||
const productOps = await this.downloadImportOpsFile(
|
||||
batchJobId,
|
||||
OperationType.ProductCreate
|
||||
)
|
||||
|
||||
for (const productOp of productOps) {
|
||||
try {
|
||||
await this.productService_
|
||||
.withTransaction(transactionManager)
|
||||
.create(
|
||||
transformProductData(productOp) as unknown as CreateProductInput
|
||||
)
|
||||
} catch (e) {
|
||||
ProductImportStrategy.throwDescriptiveError(productOp)
|
||||
}
|
||||
|
||||
this.updateProgress(batchJobId)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method updates existing products in the DB using a CSV row data.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
*/
|
||||
private async updateProducts(batchJobId: string): Promise<void> {
|
||||
const transactionManager = this.transactionManager_ ?? this.manager_
|
||||
const productOps = await this.downloadImportOpsFile(
|
||||
batchJobId,
|
||||
OperationType.ProductUpdate
|
||||
)
|
||||
|
||||
for (const productOp of productOps) {
|
||||
try {
|
||||
await this.productService_
|
||||
.withTransaction(transactionManager)
|
||||
.update(
|
||||
productOp["product.id"] as string,
|
||||
transformProductData(productOp)
|
||||
)
|
||||
} catch (e) {
|
||||
ProductImportStrategy.throwDescriptiveError(productOp)
|
||||
}
|
||||
|
||||
this.updateProgress(batchJobId)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method creates product variants from a CSV data.
|
||||
* Method also handles processing of variant options.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
*/
|
||||
private async createVariants(batchJobId: string): Promise<void> {
|
||||
const transactionManager = this.transactionManager_ ?? this.manager_
|
||||
|
||||
const variantOps = await this.downloadImportOpsFile(
|
||||
batchJobId,
|
||||
OperationType.VariantCreate
|
||||
)
|
||||
|
||||
for (const variantOp of variantOps) {
|
||||
try {
|
||||
const variant = transformVariantData(variantOp)
|
||||
|
||||
const product = await this.productService_
|
||||
.withTransaction(transactionManager)
|
||||
.retrieveByHandle(variantOp["product.handle"] as string, {
|
||||
relations: ["variants", "variants.options", "options"],
|
||||
})
|
||||
|
||||
const optionIds =
|
||||
(variantOp["product.options"] as Record<string, string>[])?.map(
|
||||
(variantOption) =>
|
||||
product!.options.find(
|
||||
(createdProductOption) =>
|
||||
createdProductOption.title === variantOption.title
|
||||
)!.id
|
||||
) || []
|
||||
|
||||
variant.options =
|
||||
(variant.options as Record<string, any>[])?.map((o, index) => ({
|
||||
...o,
|
||||
option_id: optionIds[index],
|
||||
})) || []
|
||||
|
||||
await this.productVariantService_
|
||||
.withTransaction(transactionManager)
|
||||
.create(product!, variant as unknown as CreateProductVariantInput)
|
||||
|
||||
this.updateProgress(batchJobId)
|
||||
} catch (e) {
|
||||
ProductImportStrategy.throwDescriptiveError(variantOp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Method updates product variants from a CSV data.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
*/
|
||||
private async updateVariants(batchJobId: string): Promise<void> {
|
||||
const transactionManager = this.transactionManager_ ?? this.manager_
|
||||
|
||||
const variantOps = await this.downloadImportOpsFile(
|
||||
batchJobId,
|
||||
OperationType.VariantUpdate
|
||||
)
|
||||
|
||||
const productServiceTx =
|
||||
this.productService_.withTransaction(transactionManager)
|
||||
|
||||
for (const variantOp of variantOps) {
|
||||
try {
|
||||
const product = await productServiceTx.retrieveByHandle(
|
||||
variantOp["product.handle"] as string
|
||||
)
|
||||
|
||||
await this.prepareVariantOptions(variantOp, product.id)
|
||||
|
||||
await this.productVariantService_
|
||||
.withTransaction(transactionManager)
|
||||
.update(
|
||||
variantOp["variant.id"] as string,
|
||||
transformVariantData(variantOp) as UpdateProductVariantInput
|
||||
)
|
||||
} catch (e) {
|
||||
ProductImportStrategy.throwDescriptiveError(variantOp)
|
||||
}
|
||||
|
||||
this.updateProgress(batchJobId)
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Extend records used for creating variant options with corresponding product option ids.
|
||||
*
|
||||
* @param variantOp - Parsed row data form CSV
|
||||
* @param productId - id of variant's product
|
||||
*/
|
||||
protected async prepareVariantOptions(
|
||||
variantOp,
|
||||
productId: string
|
||||
): Promise<void> {
|
||||
const productOptions = variantOp["variant.options"] || []
|
||||
|
||||
for (const o of productOptions) {
|
||||
const option = await this.productService_.retrieveOptionByTitle(
|
||||
o._title,
|
||||
productId
|
||||
)
|
||||
o.option_id = option?.id
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Store import ops JSON file to a bucket.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
* @param results - An object containing parsed CSV data.
|
||||
*/
|
||||
protected async uploadImportOpsFile(
|
||||
batchJobId: string,
|
||||
results: Record<OperationType, TParsedProductImportRowData[]>
|
||||
): Promise<void> {
|
||||
const uploadPromises: Promise<void>[] = []
|
||||
const transactionManager = this.transactionManager_ ?? this.manager_
|
||||
|
||||
for (const op in results) {
|
||||
if (results[op]?.length) {
|
||||
const { writeStream, promise } = await this.fileService_
|
||||
.withTransaction(transactionManager)
|
||||
.getUploadStreamDescriptor({
|
||||
name: `imports/products/import/ops/${batchJobId}-${op}`,
|
||||
ext: "json",
|
||||
})
|
||||
|
||||
uploadPromises.push(promise)
|
||||
|
||||
writeStream.write(JSON.stringify(results[op]))
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(uploadPromises)
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove parsed ops JSON file.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
* @param op - Type of import operation.
|
||||
*/
|
||||
protected async downloadImportOpsFile(
|
||||
batchJobId: string,
|
||||
op: OperationType
|
||||
): Promise<TParsedProductImportRowData[]> {
|
||||
let data = ""
|
||||
const transactionManager = this.transactionManager_ ?? this.manager_
|
||||
|
||||
const readableStream = await this.fileService_
|
||||
.withTransaction(transactionManager)
|
||||
.getDownloadStream({
|
||||
fileKey: `imports/products/import/ops/${batchJobId}-${op}`,
|
||||
ext: "json",
|
||||
})
|
||||
|
||||
return await new Promise((resolve) => {
|
||||
readableStream.on("data", (chunk) => {
|
||||
data += chunk
|
||||
})
|
||||
readableStream.on("end", () => {
|
||||
resolve(JSON.parse(data))
|
||||
})
|
||||
readableStream.on("error", () =>
|
||||
resolve([] as TParsedProductImportRowData[])
|
||||
)
|
||||
})
|
||||
}
|
||||
|
||||
/**
|
||||
* Delete parsed CSV ops files.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
*/
|
||||
protected async deleteOpsFiles(batchJobId: string): Promise<void> {
|
||||
const transactionManager = this.transactionManager_ ?? this.manager_
|
||||
|
||||
for (const op of Object.keys(OperationType)) {
|
||||
try {
|
||||
this.fileService_.withTransaction(transactionManager).delete({
|
||||
fileKey: `imports/products/import/ops/-${batchJobId}-${op}`,
|
||||
})
|
||||
} catch (e) {
|
||||
// noop
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update count of processed data in the batch job `result` column
|
||||
* and cleanup temp JSON files.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
*/
|
||||
private async finalize(batchJobId: string): Promise<void> {
|
||||
const batchJob = await this.batchJobService_.retrieve(batchJobId)
|
||||
|
||||
delete this.processedCounter[batchJobId]
|
||||
|
||||
await this.batchJobService_.update(batchJobId, {
|
||||
result: { advancement_count: batchJob.result.count },
|
||||
})
|
||||
|
||||
const { fileKey } = batchJob.context as ImportJobContext
|
||||
|
||||
await this.fileService_.delete({ fileKey })
|
||||
|
||||
await this.deleteOpsFiles(batchJobId)
|
||||
}
|
||||
|
||||
/**
|
||||
* Store the progress in the batch job `result` column.
|
||||
* Method is called after every update/create operation,
|
||||
* but after every `BATCH_SIZE` processed rows info is written to the DB.
|
||||
*
|
||||
* @param batchJobId - An id of the current batch job being processed.
|
||||
*/
|
||||
private async updateProgress(batchJobId: string): Promise<void> {
|
||||
const newCount = (this.processedCounter[batchJobId] || 0) + 1
|
||||
this.processedCounter[batchJobId] = newCount
|
||||
|
||||
if (newCount % BATCH_SIZE !== 0) {
|
||||
return
|
||||
}
|
||||
|
||||
await this.batchJobService_.update(batchJobId, {
|
||||
result: {
|
||||
advancement_count: newCount,
|
||||
},
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export default ProductImportStrategy
|
||||
|
||||
/**
|
||||
* Schema definition for the CSV parser.
|
||||
*/
|
||||
const CSVSchema: ProductImportCsvSchema = {
|
||||
columns: [
|
||||
// PRODUCT
|
||||
{
|
||||
name: "Product id",
|
||||
mapTo: "product.id",
|
||||
},
|
||||
{
|
||||
name: "Product Handle",
|
||||
mapTo: "product.handle",
|
||||
required: true,
|
||||
},
|
||||
{ name: "Product Title", mapTo: "product.title" },
|
||||
{ name: "Product Subtitle", mapTo: "product.subtitle" },
|
||||
{ name: "Product Description", mapTo: "product.description" },
|
||||
{ name: "Product Status", mapTo: "product.status" },
|
||||
{ name: "Product Thumbnail", mapTo: "product.thumbnail" },
|
||||
{ name: "Product Weight", mapTo: "product.weight" },
|
||||
{ name: "Product Length", mapTo: "product.length" },
|
||||
{ name: "Product Width", mapTo: "product.width" },
|
||||
{ name: "Product Height", mapTo: "product.height" },
|
||||
{ name: "Product HS Code", mapTo: "product.hs_code" },
|
||||
{ name: "Product Origin Country", mapTo: "product.origin_country" },
|
||||
{ name: "Product Mid Code", mapTo: "product.mid_code" },
|
||||
{ name: "Product Material", mapTo: "product.material" },
|
||||
// PRODUCT-COLLECTION
|
||||
{ name: "Product Collection Title", mapTo: "product.collection.title" },
|
||||
{ name: "Product Collection Handle", mapTo: "product.collection.handle" },
|
||||
// PRODUCT-TYPE
|
||||
{ name: "Product Type", mapTo: "product.type.value" },
|
||||
// PRODUCT-TAGS
|
||||
{
|
||||
name: "Product Tags",
|
||||
mapTo: "product.tags",
|
||||
transform: (value: string) =>
|
||||
`${value}`.split(",").map((v) => ({ value: v })),
|
||||
},
|
||||
//
|
||||
{ name: "Product Discountable", mapTo: "product.discountable" },
|
||||
{ name: "Product External ID", mapTo: "product.external_id" },
|
||||
// PRODUCT-SHIPPING_PROFILE
|
||||
{ name: "Product Profile Name", mapTo: "product.profile.name" },
|
||||
{ name: "Product Profile Type", mapTo: "product.profile.type" },
|
||||
// VARIANTS
|
||||
{
|
||||
name: "Variant id",
|
||||
mapTo: "variant.id",
|
||||
},
|
||||
{ name: "Variant Title", mapTo: "variant.title" },
|
||||
{ name: "Variant SKU", mapTo: "variant.sku" },
|
||||
{ name: "Variant Barcode", mapTo: "variant.barcode" },
|
||||
{ name: "Variant Inventory Quantity", mapTo: "variant.inventory_quantity" },
|
||||
{ name: "Variant Allow backorder", mapTo: "variant.allow_backorder" },
|
||||
{ name: "Variant Manage inventory", mapTo: "variant.manage_inventory" },
|
||||
{ name: "Variant Weight", mapTo: "variant.weight" },
|
||||
{ name: "Variant Length", mapTo: "variant.length" },
|
||||
{ name: "Variant Width", mapTo: "variant.width" },
|
||||
{ name: "Variant Height", mapTo: "variant.height" },
|
||||
{ name: "Variant HS Code", mapTo: "variant.hs_code" },
|
||||
{ name: "Variant Origin Country", mapTo: "variant.origin_country" },
|
||||
{ name: "Variant Mid Code", mapTo: "variant.mid_code" },
|
||||
{ name: "Variant Material", mapTo: "variant.material" },
|
||||
|
||||
// ==== DYNAMIC FIELDS ====
|
||||
|
||||
// PRODUCT_OPTIONS
|
||||
{
|
||||
name: "Option Name",
|
||||
match: /Option \d+ Name/,
|
||||
reducer: (builtLine, key, value): TBuiltProductImportLine => {
|
||||
builtLine["product.options"] = builtLine["product.options"] || []
|
||||
|
||||
if (typeof value === "undefined" || value === null) {
|
||||
return builtLine
|
||||
}
|
||||
;(
|
||||
builtLine["product.options"] as Record<string, string | number>[]
|
||||
).push({ title: value })
|
||||
|
||||
return builtLine
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Option Value",
|
||||
match: /Option \d+ Value/,
|
||||
reducer: (
|
||||
builtLine: TParsedProductImportRowData,
|
||||
key: string,
|
||||
value: string,
|
||||
context: any
|
||||
): TBuiltProductImportLine => {
|
||||
builtLine["variant.options"] = builtLine["variant.options"] || []
|
||||
|
||||
if (typeof value === "undefined" || value === null) {
|
||||
return builtLine
|
||||
}
|
||||
|
||||
;(
|
||||
builtLine["variant.options"] as Record<string, string | number>[]
|
||||
).push({
|
||||
value,
|
||||
_title: context.line[key.slice(0, -6) + " Name"],
|
||||
})
|
||||
|
||||
return builtLine
|
||||
},
|
||||
},
|
||||
// PRICES
|
||||
{
|
||||
name: "Price Region",
|
||||
match: /Price .* \[([A-Z]{2,4})\]/,
|
||||
reducer: (
|
||||
builtLine: TParsedProductImportRowData,
|
||||
key,
|
||||
value
|
||||
): TBuiltProductImportLine => {
|
||||
builtLine["variant.prices"] = builtLine["variant.prices"] || []
|
||||
|
||||
if (typeof value === "undefined" || value === null) {
|
||||
return builtLine
|
||||
}
|
||||
|
||||
const regionName = key.split(" ")[1]
|
||||
|
||||
;(
|
||||
builtLine["variant.prices"] as Record<string, string | number>[]
|
||||
).push({
|
||||
amount: value,
|
||||
regionName,
|
||||
})
|
||||
|
||||
return builtLine
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Price Currency",
|
||||
match: /Price [A-Z]{2,4}/,
|
||||
reducer: (
|
||||
builtLine: TParsedProductImportRowData,
|
||||
key,
|
||||
value
|
||||
): TBuiltProductImportLine => {
|
||||
builtLine["variant.prices"] = builtLine["variant.prices"] || []
|
||||
|
||||
if (typeof value === "undefined" || value === null) {
|
||||
return builtLine
|
||||
}
|
||||
|
||||
const currency = key.split(" ")[1]
|
||||
|
||||
;(
|
||||
builtLine["variant.prices"] as Record<string, string | number>[]
|
||||
).push({
|
||||
amount: value,
|
||||
currency_code: currency,
|
||||
})
|
||||
|
||||
return builtLine
|
||||
},
|
||||
},
|
||||
// IMAGES
|
||||
{
|
||||
name: "Image Url",
|
||||
match: /Image \d+ Url/,
|
||||
reducer: (builtLine: any, key, value): TBuiltProductImportLine => {
|
||||
builtLine["product.images"] = builtLine["product.images"] || []
|
||||
|
||||
if (typeof value === "undefined" || value === null) {
|
||||
return builtLine
|
||||
}
|
||||
|
||||
builtLine["product.images"].push(value)
|
||||
|
||||
return builtLine
|
||||
},
|
||||
},
|
||||
],
|
||||
}
|
||||
65
packages/medusa/src/strategies/batch-jobs/product/types.ts
Normal file
65
packages/medusa/src/strategies/batch-jobs/product/types.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { EntityManager } from "typeorm"
|
||||
import { FileService } from "medusa-interfaces"
|
||||
|
||||
import {
|
||||
BatchJobService,
|
||||
ProductService,
|
||||
ProductVariantService,
|
||||
RegionService,
|
||||
ShippingProfileService,
|
||||
} from "../../../services"
|
||||
import { ProductOptionRepository } from "../../../repositories/product-option"
|
||||
import { CsvSchema } from "../../../interfaces/csv-parser"
|
||||
|
||||
/**
|
||||
* DI props for the Product import strategy
|
||||
*/
|
||||
export type InjectedProps = {
|
||||
batchJobService: BatchJobService
|
||||
productService: ProductService
|
||||
productVariantService: ProductVariantService
|
||||
shippingProfileService: ShippingProfileService
|
||||
regionService: RegionService
|
||||
fileService: typeof FileService
|
||||
|
||||
manager: EntityManager
|
||||
}
|
||||
|
||||
/**
|
||||
* Data shape returned by the CSVParser.
|
||||
*/
|
||||
export type TParsedProductImportRowData = Record<
|
||||
string,
|
||||
string | number | (string | number | object)[]
|
||||
>
|
||||
|
||||
/**
|
||||
* CSV parser's row reducer result data shape.
|
||||
*/
|
||||
export type TBuiltProductImportLine = Record<string, any>
|
||||
|
||||
/**
|
||||
* Schema definition of for an import CSV file.
|
||||
*/
|
||||
export type ProductImportCsvSchema = CsvSchema<
|
||||
TParsedProductImportRowData,
|
||||
TBuiltProductImportLine
|
||||
>
|
||||
|
||||
/**
|
||||
* Import Batch job context column type.
|
||||
*/
|
||||
export type ImportJobContext = {
|
||||
total: number
|
||||
fileKey: string
|
||||
}
|
||||
|
||||
/**
|
||||
* Supported batch job import ops.
|
||||
*/
|
||||
export enum OperationType {
|
||||
ProductCreate = "PRODUCT_CREATE",
|
||||
ProductUpdate = "PRODUCT_UPDATE",
|
||||
VariantCreate = "VARIANT_CREATE",
|
||||
VariantUpdate = "VARIANT_UPDATE",
|
||||
}
|
||||
60
packages/medusa/src/strategies/batch-jobs/product/utils.ts
Normal file
60
packages/medusa/src/strategies/batch-jobs/product/utils.ts
Normal file
@@ -0,0 +1,60 @@
|
||||
/**
|
||||
* Pick keys for a new object by regex.
|
||||
* @param data - Initial data object
|
||||
* @param regex - A regex used to pick which keys are going to be copied in the new object
|
||||
*/
|
||||
import { TParsedProductImportRowData } from "./types"
|
||||
|
||||
export function pickObjectPropsByRegex(
|
||||
data: TParsedProductImportRowData,
|
||||
regex: RegExp
|
||||
): TParsedProductImportRowData {
|
||||
const variantKeyPredicate = (key: string): boolean => regex.test(key)
|
||||
const ret = {}
|
||||
|
||||
for (const k in data) {
|
||||
if (variantKeyPredicate(k)) {
|
||||
ret[k] = data[k]
|
||||
}
|
||||
}
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* Pick data from parsed CSV object relevant for product create/update and remove prefixes from keys.
|
||||
*/
|
||||
export function transformProductData(
|
||||
data: TParsedProductImportRowData
|
||||
): TParsedProductImportRowData {
|
||||
const ret = {}
|
||||
const productData = pickObjectPropsByRegex(data, /product\./)
|
||||
|
||||
Object.keys(productData).forEach((k) => {
|
||||
const key = k.split("product.")[1]
|
||||
ret[key] = productData[k]
|
||||
})
|
||||
|
||||
return ret
|
||||
}
|
||||
|
||||
/**
|
||||
* Pick data from parsed CSV object relevant for variant create/update and remove prefixes from keys.
|
||||
*/
|
||||
export function transformVariantData(
|
||||
data: TParsedProductImportRowData
|
||||
): TParsedProductImportRowData {
|
||||
const ret = {}
|
||||
const productData = pickObjectPropsByRegex(data, /variant\./)
|
||||
|
||||
Object.keys(productData).forEach((k) => {
|
||||
const key = k.split("variant.")[1]
|
||||
ret[key] = productData[k]
|
||||
})
|
||||
|
||||
// include product handle to keep track of associated product
|
||||
ret["product.handle"] = data["product.handle"]
|
||||
ret["product.options"] = data["product.options"]
|
||||
|
||||
return ret
|
||||
}
|
||||
@@ -34,9 +34,13 @@ class BatchJobSubscriber {
|
||||
batchJob.type
|
||||
)
|
||||
|
||||
await batchJobStrategy.preProcessBatchJob(batchJob.id)
|
||||
|
||||
await this.batchJobService_.setPreProcessingDone(batchJob.id)
|
||||
try {
|
||||
await batchJobStrategy.preProcessBatchJob(batchJob.id)
|
||||
await this.batchJobService_.setPreProcessingDone(batchJob.id)
|
||||
} catch (e) {
|
||||
await this.batchJobService_.setFailed(batchJob.id)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
|
||||
processBatchJob = async (data): Promise<void> => {
|
||||
@@ -48,9 +52,13 @@ class BatchJobSubscriber {
|
||||
|
||||
await this.batchJobService_.setProcessing(batchJob.id)
|
||||
|
||||
await batchJobStrategy.processJob(batchJob.id)
|
||||
|
||||
await this.batchJobService_.complete(batchJob.id)
|
||||
try {
|
||||
await batchJobStrategy.processJob(batchJob.id)
|
||||
await this.batchJobService_.complete(batchJob.id)
|
||||
} catch (e) {
|
||||
await this.batchJobService_.setFailed(batchJob.id)
|
||||
throw e
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
10
yarn.lock
10
yarn.lock
@@ -5215,6 +5215,7 @@ __metadata:
|
||||
"@hapi/joi": ^16.1.8
|
||||
"@medusajs/medusa-cli": ^1.3.1
|
||||
"@types/express": ^4.17.13
|
||||
"@types/ioredis": ^4.28.10
|
||||
"@types/jest": ^27.5.0
|
||||
"@types/jsonwebtoken": ^8.5.5
|
||||
"@types/lodash": ^4.14.168
|
||||
@@ -8639,6 +8640,15 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/ioredis@npm:^4.28.10":
|
||||
version: 4.28.10
|
||||
resolution: "@types/ioredis@npm:4.28.10"
|
||||
dependencies:
|
||||
"@types/node": "*"
|
||||
checksum: ff680fef6750721c465ee9d6060d3229e49e5b217d68503c4972c4b869b8f84b91cbd9b7d5195a40fb0bff6ab9a4f5e24a2cc17a368ddb0bfe2dfd4eb5fc1872
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"@types/is-ci@npm:^3.0.0":
|
||||
version: 3.0.0
|
||||
resolution: "@types/is-ci@npm:3.0.0"
|
||||
|
||||
Reference in New Issue
Block a user