feat(medusa/product-export-strategy): Implement the Product export strategy (#1688)
This commit is contained in:
committed by
GitHub
parent
0e34800573
commit
7b09b8c36c
9
.github/workflows/action.yml
vendored
9
.github/workflows/action.yml
vendored
@@ -43,6 +43,15 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
services:
|
||||
redis:
|
||||
image: redis
|
||||
options: >-
|
||||
--health-cmd "redis-cli ping"
|
||||
--health-interval 10s
|
||||
--health-timeout 5s
|
||||
--health-retries 5
|
||||
ports:
|
||||
- 6379:6379
|
||||
postgres:
|
||||
image: postgres
|
||||
env:
|
||||
|
||||
@@ -12,7 +12,7 @@ Object {
|
||||
"id": "job_3",
|
||||
"result": null,
|
||||
"status": "created",
|
||||
"type": "batch_2",
|
||||
"type": "product-export",
|
||||
"updated_at": Any<String>,
|
||||
},
|
||||
Object {
|
||||
@@ -24,7 +24,7 @@ Object {
|
||||
"id": "job_2",
|
||||
"result": null,
|
||||
"status": "created",
|
||||
"type": "batch_2",
|
||||
"type": "product-export",
|
||||
"updated_at": Any<String>,
|
||||
},
|
||||
Object {
|
||||
@@ -36,7 +36,7 @@ Object {
|
||||
"id": "job_1",
|
||||
"result": null,
|
||||
"status": "created",
|
||||
"type": "batch_1",
|
||||
"type": "product-export",
|
||||
"updated_at": Any<String>,
|
||||
},
|
||||
],
|
||||
@@ -51,7 +51,26 @@ Object {
|
||||
"canceled_at": null,
|
||||
"completed_at": null,
|
||||
"confirmed_at": null,
|
||||
"context": Object {},
|
||||
"context": Object {
|
||||
"list_config": Object {
|
||||
"order": Object {
|
||||
"created_at": "DESC",
|
||||
},
|
||||
"relations": Array [
|
||||
"variants",
|
||||
"variants.prices",
|
||||
"variants.options",
|
||||
"images",
|
||||
"options",
|
||||
"tags",
|
||||
"type",
|
||||
"collection",
|
||||
"variants.prices.region",
|
||||
],
|
||||
"skip": 0,
|
||||
"take": 50,
|
||||
},
|
||||
},
|
||||
"created_at": Any<String>,
|
||||
"created_by": "admin_user",
|
||||
"deleted_at": null,
|
||||
@@ -62,7 +81,7 @@ Object {
|
||||
"processing_at": null,
|
||||
"result": null,
|
||||
"status": "created",
|
||||
"type": "batch_1",
|
||||
"type": "product-export",
|
||||
"updated_at": Any<String>,
|
||||
}
|
||||
`;
|
||||
@@ -83,7 +102,7 @@ Object {
|
||||
"processing_at": null,
|
||||
"result": null,
|
||||
"status": "canceled",
|
||||
"type": "batch_1",
|
||||
"type": "product-export",
|
||||
"updated_at": Any<String>,
|
||||
}
|
||||
`;
|
||||
|
||||
@@ -24,25 +24,23 @@ const setupJobDb = async (dbConnection) => {
|
||||
|
||||
await simpleBatchJobFactory(dbConnection, {
|
||||
id: "job_1",
|
||||
type: "batch_1",
|
||||
type: "product-export",
|
||||
created_by: "admin_user",
|
||||
})
|
||||
await simpleBatchJobFactory(dbConnection, {
|
||||
id: "job_2",
|
||||
type: "batch_2",
|
||||
ready_at: new Date(),
|
||||
type: "product-export",
|
||||
created_by: "admin_user",
|
||||
})
|
||||
await simpleBatchJobFactory(dbConnection, {
|
||||
id: "job_3",
|
||||
type: "batch_2",
|
||||
ready_at: new Date(),
|
||||
type: "product-export",
|
||||
created_by: "admin_user",
|
||||
})
|
||||
await simpleBatchJobFactory(dbConnection, {
|
||||
id: "job_4",
|
||||
type: "batch_1",
|
||||
ready_at: new Date(),
|
||||
type: "product-export",
|
||||
status: "awaiting_confirmation",
|
||||
created_by: "member-user",
|
||||
})
|
||||
} catch (err) {
|
||||
@@ -142,16 +140,11 @@ describe("/admin/batch-jobs", () => {
|
||||
})
|
||||
|
||||
describe("POST /admin/batch-jobs/", () => {
|
||||
beforeEach(async() => {
|
||||
try {
|
||||
await adminSeeder(dbConnection)
|
||||
} catch (err) {
|
||||
console.log(err)
|
||||
throw err
|
||||
}
|
||||
beforeEach(async () => {
|
||||
await setupJobDb(dbConnection)
|
||||
})
|
||||
|
||||
afterEach(async() => {
|
||||
afterEach(async () => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
})
|
||||
@@ -162,7 +155,7 @@ describe("/admin/batch-jobs", () => {
|
||||
const response = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
{
|
||||
type: "batch_1",
|
||||
type: "product-export",
|
||||
context: {},
|
||||
},
|
||||
adminReqConfig
|
||||
@@ -212,7 +205,7 @@ describe("/admin/batch-jobs", () => {
|
||||
await setupJobDb(dbConnection)
|
||||
await simpleBatchJobFactory(dbConnection, {
|
||||
id: "job_complete",
|
||||
type: "batch_1",
|
||||
type: "product-export",
|
||||
created_by: "admin_user",
|
||||
completed_at: new Date(),
|
||||
})
|
||||
@@ -222,7 +215,7 @@ describe("/admin/batch-jobs", () => {
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
afterEach(async() => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
})
|
||||
|
||||
150
integration-tests/api/__tests__/batch-jobs/product/export.js
Normal file
150
integration-tests/api/__tests__/batch-jobs/product/export.js
Normal file
@@ -0,0 +1,150 @@
|
||||
const path = require("path")
|
||||
const fs = require('fs/promises')
|
||||
|
||||
const setupServer = require("../../../../helpers/setup-server")
|
||||
const { useApi } = require("../../../../helpers/use-api")
|
||||
const { initDb, useDb } = require("../../../../helpers/use-db")
|
||||
|
||||
const adminSeeder = require("../../../helpers/admin-seeder")
|
||||
const userSeeder = require("../../../helpers/user-seeder")
|
||||
const productSeeder = require("../../../helpers/product-seeder")
|
||||
|
||||
const adminReqConfig = {
|
||||
headers: {
|
||||
Authorization: "Bearer test_token",
|
||||
},
|
||||
}
|
||||
|
||||
jest.setTimeout(1000000)
|
||||
|
||||
describe("Batch job of product-export type", () => {
|
||||
let medusaProcess
|
||||
let dbConnection
|
||||
|
||||
beforeAll(async () => {
|
||||
const cwd = path.resolve(path.join(__dirname, "..", "..", ".."))
|
||||
dbConnection = await initDb({ cwd })
|
||||
medusaProcess = await setupServer({
|
||||
cwd,
|
||||
redisUrl: "redis://127.0.0.1:6379",
|
||||
uploadDir: __dirname,
|
||||
verbose: false
|
||||
})
|
||||
})
|
||||
|
||||
let exportFilePath = ""
|
||||
|
||||
afterAll(async () => {
|
||||
const db = useDb()
|
||||
await db.shutdown()
|
||||
|
||||
medusaProcess.kill()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
try {
|
||||
await productSeeder(dbConnection)
|
||||
await adminSeeder(dbConnection)
|
||||
await userSeeder(dbConnection)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
throw e
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async() => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
|
||||
const isFileExists = (await fs.stat(exportFilePath))?.isFile()
|
||||
if (isFileExists) {
|
||||
await fs.unlink(exportFilePath)
|
||||
}
|
||||
})
|
||||
|
||||
it('should export a csv file containing the expected products', async () => {
|
||||
jest.setTimeout(1000000)
|
||||
const api = useApi()
|
||||
|
||||
const productPayload = {
|
||||
title: "Test export product",
|
||||
description: "test-product-description",
|
||||
type: { value: "test-type" },
|
||||
images: ["test-image.png", "test-image-2.png"],
|
||||
collection_id: "test-collection",
|
||||
tags: [{ value: "123" }, { value: "456" }],
|
||||
options: [{ title: "size" }, { title: "color" }],
|
||||
variants: [
|
||||
{
|
||||
title: "Test variant",
|
||||
inventory_quantity: 10,
|
||||
sku: "test-variant-sku-product-export",
|
||||
prices: [
|
||||
{
|
||||
currency_code: "usd",
|
||||
amount: 100,
|
||||
},
|
||||
{
|
||||
currency_code: "eur",
|
||||
amount: 45,
|
||||
},
|
||||
{
|
||||
currency_code: "dkk",
|
||||
amount: 30,
|
||||
},
|
||||
],
|
||||
options: [{ value: "large" }, { value: "green" }],
|
||||
},
|
||||
],
|
||||
}
|
||||
const createProductRes =
|
||||
await api.post("/admin/products", productPayload, adminReqConfig)
|
||||
const productId = createProductRes.data.product.id
|
||||
const variantId = createProductRes.data.product.variants[0].id
|
||||
|
||||
const batchPayload = {
|
||||
type: "product-export",
|
||||
context: {
|
||||
filterable_fields: { title: "Test export product" }
|
||||
},
|
||||
}
|
||||
const batchJobRes = await api.post("/admin/batch-jobs", batchPayload, adminReqConfig)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob;
|
||||
let shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api
|
||||
.get(`/admin/batch-jobs/${batchJobId}`, adminReqConfig)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
shouldContinuePulling = !(batchJob.status === "completed")
|
||||
}
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter(l => l)
|
||||
|
||||
expect(lines.length).toBe(1)
|
||||
|
||||
const lineColumn = lines[0].split(";")
|
||||
|
||||
expect(lineColumn[0]).toBe(productId)
|
||||
expect(lineColumn[2]).toBe(productPayload.title)
|
||||
expect(lineColumn[4]).toBe(productPayload.description)
|
||||
expect(lineColumn[23]).toBe(variantId)
|
||||
expect(lineColumn[24]).toBe(productPayload.variants[0].title)
|
||||
expect(lineColumn[25]).toBe(productPayload.variants[0].sku)
|
||||
})
|
||||
})
|
||||
@@ -17,15 +17,14 @@ export const simpleBatchJobFactory = async (
|
||||
): Promise<BatchJob> => {
|
||||
const manager = connection.manager
|
||||
|
||||
const job = manager.create(BatchJob, {
|
||||
const job = manager.create<BatchJob>(BatchJob, {
|
||||
id: data.id,
|
||||
status: data.status ?? BatchJobStatus.CREATED,
|
||||
awaiting_confirmation_at: data.awaiting_confirmation_at ?? null,
|
||||
completed_at: data.completed_at ?? null,
|
||||
type: data.type ?? "test-job",
|
||||
created_by: data.created_by ?? null,
|
||||
context: data.context ?? {},
|
||||
})
|
||||
|
||||
return await manager.save(job)
|
||||
return await manager.save<BatchJob>(job)
|
||||
}
|
||||
|
||||
@@ -5,7 +5,7 @@ const workerId = parseInt(process.env.JEST_WORKER_ID || "1")
|
||||
module.exports = {
|
||||
plugins: [],
|
||||
projectConfig: {
|
||||
// redis_url: REDIS_URL,
|
||||
redis_url: process.env.REDIS_URL,
|
||||
database_url: `postgres://${DB_USERNAME}:${DB_PASSWORD}@localhost/medusa-integration-${workerId}`,
|
||||
database_type: "postgres",
|
||||
jwt_secret: 'test',
|
||||
|
||||
@@ -8,16 +8,16 @@
|
||||
"build": "babel src -d dist --extensions \".ts,.js\""
|
||||
},
|
||||
"dependencies": {
|
||||
"@medusajs/medusa": "1.2.1-dev-1650623081351",
|
||||
"@medusajs/medusa": "1.3.2-dev-1655728455189",
|
||||
"faker": "^5.5.3",
|
||||
"medusa-interfaces": "1.2.1-dev-1650623081351",
|
||||
"medusa-interfaces": "1.3.0-dev-1655728455189",
|
||||
"typeorm": "^0.2.31"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/cli": "^7.12.10",
|
||||
"@babel/core": "^7.12.10",
|
||||
"@babel/node": "^7.12.10",
|
||||
"babel-preset-medusa-package": "1.1.19-dev-1650623081351",
|
||||
"babel-preset-medusa-package": "1.1.19-dev-1655728455189",
|
||||
"jest": "^26.6.3"
|
||||
}
|
||||
}
|
||||
|
||||
61
integration-tests/api/src/services/local-file-service.js
Normal file
61
integration-tests/api/src/services/local-file-service.js
Normal file
@@ -0,0 +1,61 @@
|
||||
import { AbstractFileService } from "@medusajs/medusa"
|
||||
import stream from "stream"
|
||||
import { resolve } from "path"
|
||||
import * as fs from "fs"
|
||||
|
||||
export default class LocalFileService extends AbstractFileService {
|
||||
constructor({}, options) {
|
||||
super({});
|
||||
this.upload_dir_ = process.env.UPLOAD_DIR ?? options.upload_dir ?? "uploads/images";
|
||||
|
||||
if (!fs.existsSync(this.upload_dir_)) {
|
||||
fs.mkdirSync(this.upload_dir_);
|
||||
}
|
||||
}
|
||||
|
||||
upload(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const path = resolve(this.upload_dir_, file.originalname)
|
||||
fs.writeFile(path, "", err => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
|
||||
resolve({ url: path });
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
delete({ name }) {
|
||||
return new Promise((resolve, _) => {
|
||||
const path = resolve(this.upload_dir_, name)
|
||||
fs.unlink(path, err => {
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
resolve("file unlinked");
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
async getUploadStreamDescriptor({ name, ext }) {
|
||||
const fileKey = `${name}.${ext}`
|
||||
const path = resolve(this.upload_dir_, fileKey)
|
||||
|
||||
const isFileExists = fs.existsSync(path)
|
||||
if (!isFileExists) {
|
||||
await this.upload({ originalname: fileKey })
|
||||
}
|
||||
|
||||
const pass = new stream.PassThrough()
|
||||
pass.pipe(fs.createWriteStream(path))
|
||||
|
||||
return {
|
||||
writeStream: pass,
|
||||
promise: Promise.resolve(),
|
||||
url: `${this.upload_dir_}/${fileKey}`,
|
||||
fileKey,
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1312,10 +1312,25 @@
|
||||
"@jridgewell/resolve-uri" "^3.0.3"
|
||||
"@jridgewell/sourcemap-codec" "^1.4.10"
|
||||
|
||||
"@medusajs/medusa-cli@1.2.1-dev-1650623081351":
|
||||
version "1.2.1-dev-1650623081351"
|
||||
resolved "http://localhost:4873/@medusajs%2fmedusa-cli/-/medusa-cli-1.2.1-dev-1650623081351.tgz#4c02a0989ecb2dda97839c5a4d9f70ce2c5fda6c"
|
||||
integrity sha512-afZmMq3Z6WTO77wnbQkdeq7VUxT+6bth7n5Kld3beK+4YBY7DHMTVHs+GoHxagpTBOA6jdvqBXT1llKl/UMT6Q==
|
||||
"@mapbox/node-pre-gyp@^1.0.0":
|
||||
version "1.0.9"
|
||||
resolved "http://localhost:4873/@mapbox%2fnode-pre-gyp/-/node-pre-gyp-1.0.9.tgz#09a8781a3a036151cdebbe8719d6f8b25d4058bc"
|
||||
integrity sha512-aDF3S3rK9Q2gey/WAttUlISduDItz5BU3306M9Eyv6/oS40aMprnopshtlKTykxRNIBEZuRMaZAnbrQ4QtKGyw==
|
||||
dependencies:
|
||||
detect-libc "^2.0.0"
|
||||
https-proxy-agent "^5.0.0"
|
||||
make-dir "^3.1.0"
|
||||
node-fetch "^2.6.7"
|
||||
nopt "^5.0.0"
|
||||
npmlog "^5.0.1"
|
||||
rimraf "^3.0.2"
|
||||
semver "^7.3.5"
|
||||
tar "^6.1.11"
|
||||
|
||||
"@medusajs/medusa-cli@1.3.0-dev-1655728455189":
|
||||
version "1.3.0-dev-1655728455189"
|
||||
resolved "http://localhost:4873/@medusajs%2fmedusa-cli/-/medusa-cli-1.3.0-dev-1655728455189.tgz#1d6bf606fbd96167faf0824d221646f664e06d66"
|
||||
integrity sha512-wMDVBN6X6cG6Ni/0/H5K54Hs3RmYLiGvI81VqWQtisjia0T98Hf8MuhxlJ4kS3ny34r5M7Qjs+KCoiospgzbqA==
|
||||
dependencies:
|
||||
"@babel/polyfill" "^7.8.7"
|
||||
"@babel/runtime" "^7.9.6"
|
||||
@@ -1333,8 +1348,8 @@
|
||||
is-valid-path "^0.1.1"
|
||||
joi-objectid "^3.0.1"
|
||||
meant "^1.0.1"
|
||||
medusa-core-utils "1.1.31-dev-1650623081351"
|
||||
medusa-telemetry "0.0.11-dev-1650623081351"
|
||||
medusa-core-utils "1.1.31-dev-1655728455189"
|
||||
medusa-telemetry "0.0.11-dev-1655728455189"
|
||||
netrc-parser "^3.1.6"
|
||||
open "^8.0.6"
|
||||
ora "^5.4.1"
|
||||
@@ -1348,13 +1363,13 @@
|
||||
winston "^3.3.3"
|
||||
yargs "^15.3.1"
|
||||
|
||||
"@medusajs/medusa@1.2.1-dev-1650623081351":
|
||||
version "1.2.1-dev-1650623081351"
|
||||
resolved "http://localhost:4873/@medusajs%2fmedusa/-/medusa-1.2.1-dev-1650623081351.tgz#9e887b9d8c396e08d04597b3cb0faf61e702d745"
|
||||
integrity sha512-1PNRQcqeKgSQ+LWRLGD9TkNX0csXKk+eKISw5MJKW4U0WB1fyi5TVAqtEA2mmWKMQDc0sq0H3DwaOBrwtTL/XA==
|
||||
"@medusajs/medusa@1.3.2-dev-1655728455189":
|
||||
version "1.3.2-dev-1655728455189"
|
||||
resolved "http://localhost:4873/@medusajs%2fmedusa/-/medusa-1.3.2-dev-1655728455189.tgz#c3c189fa7aebf94117349067acc0b0a0bc537a28"
|
||||
integrity sha512-oYMihSFpQKrAru2vRnCU9q6A6j6Zgq0/6153m+vibhrbuvW8NZUJSoRjEEA925c8Yprr4xWwFhYghB0jUjIAPA==
|
||||
dependencies:
|
||||
"@hapi/joi" "^16.1.8"
|
||||
"@medusajs/medusa-cli" "1.2.1-dev-1650623081351"
|
||||
"@medusajs/medusa-cli" "1.3.0-dev-1655728455189"
|
||||
"@types/lodash" "^4.14.168"
|
||||
awilix "^4.2.3"
|
||||
body-parser "^1.19.0"
|
||||
@@ -1377,10 +1392,12 @@
|
||||
joi "^17.3.0"
|
||||
joi-objectid "^3.0.1"
|
||||
jsonwebtoken "^8.5.1"
|
||||
medusa-core-utils "1.1.31-dev-1650623081351"
|
||||
medusa-test-utils "1.1.37-dev-1650623081351"
|
||||
medusa-core-utils "1.1.31-dev-1655728455189"
|
||||
medusa-test-utils "1.1.37-dev-1655728455189"
|
||||
morgan "^1.9.1"
|
||||
multer "^1.4.2"
|
||||
node-schedule "^2.1.0"
|
||||
papaparse "^5.3.2"
|
||||
passport "^0.4.0"
|
||||
passport-http-bearer "^1.0.1"
|
||||
passport-jwt "^4.0.0"
|
||||
@@ -2024,10 +2041,10 @@ babel-preset-jest@^26.6.2:
|
||||
babel-plugin-jest-hoist "^26.6.2"
|
||||
babel-preset-current-node-syntax "^1.0.0"
|
||||
|
||||
babel-preset-medusa-package@1.1.19-dev-1650623081351:
|
||||
version "1.1.19-dev-1650623081351"
|
||||
resolved "http://localhost:4873/babel-preset-medusa-package/-/babel-preset-medusa-package-1.1.19-dev-1650623081351.tgz#bd46931534637e9b6a6c37a8fd740622967a7258"
|
||||
integrity sha512-eL/xbx7BG1Yrx2WfvdDsZknR94okk1VPigp54HYEXI8kZu/NhzZlayMSTMFiOrF4NvnW0i/3DS9pUcxdG2gQEg==
|
||||
babel-preset-medusa-package@1.1.19-dev-1655728455189:
|
||||
version "1.1.19-dev-1655728455189"
|
||||
resolved "http://localhost:4873/babel-preset-medusa-package/-/babel-preset-medusa-package-1.1.19-dev-1655728455189.tgz#a7884d6869c9ac7adb23c5789d6dc858614381c7"
|
||||
integrity sha512-i8JKgbu59S+WU58tRV5iJtJ9UnJnpOPwEtU9b7WVz9X5LLDeUk6Mmhcb6XvTuYCfdC8bXJ/Hk8NlqETIZw1lPQ==
|
||||
dependencies:
|
||||
"@babel/plugin-proposal-class-properties" "^7.12.1"
|
||||
"@babel/plugin-proposal-decorators" "^7.12.1"
|
||||
@@ -2753,6 +2770,14 @@ cron-parser@^2.13.0:
|
||||
is-nan "^1.3.0"
|
||||
moment-timezone "^0.5.31"
|
||||
|
||||
cron-parser@^3.5.0:
|
||||
version "3.5.0"
|
||||
resolved "http://localhost:4873/cron-parser/-/cron-parser-3.5.0.tgz#b1a9da9514c0310aa7ef99c2f3f1d0f8c235257c"
|
||||
integrity sha512-wyVZtbRs6qDfFd8ap457w3XVntdvqcwBGxBoTvJQH9KGVKL/fB+h2k3C8AqiVxvUQKN1Ps/Ns46CNViOpVDhfQ==
|
||||
dependencies:
|
||||
is-nan "^1.3.2"
|
||||
luxon "^1.26.0"
|
||||
|
||||
cross-spawn@^6.0.0, cross-spawn@^6.0.5:
|
||||
version "6.0.5"
|
||||
resolved "http://localhost:4873/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4"
|
||||
@@ -4183,7 +4208,7 @@ is-lambda@^1.0.1:
|
||||
resolved "http://localhost:4873/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5"
|
||||
integrity sha1-PZh3iZ5qU+/AFgUEzeFfgubwYdU=
|
||||
|
||||
is-nan@^1.3.0:
|
||||
is-nan@^1.3.0, is-nan@^1.3.2:
|
||||
version "1.3.2"
|
||||
resolved "http://localhost:4873/is-nan/-/is-nan-1.3.2.tgz#043a54adea31748b55b6cd4e09aadafa69bd9e1d"
|
||||
integrity sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w==
|
||||
@@ -5068,6 +5093,11 @@ logform@^2.3.2, logform@^2.4.0:
|
||||
safe-stable-stringify "^2.3.1"
|
||||
triple-beam "^1.3.0"
|
||||
|
||||
long-timeout@0.1.1:
|
||||
version "0.1.1"
|
||||
resolved "http://localhost:4873/long-timeout/-/long-timeout-0.1.1.tgz#9721d788b47e0bcb5a24c2e2bee1a0da55dab514"
|
||||
integrity sha512-BFRuQUqc7x2NWxfJBCyUrN8iYUYznzL9JROmRz1gZ6KlOIgmoD+njPVbb+VNn2nGMKggMsK79iUNErillsrx7w==
|
||||
|
||||
lower-case@^2.0.2:
|
||||
version "2.0.2"
|
||||
resolved "http://localhost:4873/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28"
|
||||
@@ -5082,6 +5112,11 @@ lru-cache@^6.0.0:
|
||||
dependencies:
|
||||
yallist "^4.0.0"
|
||||
|
||||
luxon@^1.26.0:
|
||||
version "1.28.0"
|
||||
resolved "http://localhost:4873/luxon/-/luxon-1.28.0.tgz#e7f96daad3938c06a62de0fb027115d251251fbf"
|
||||
integrity sha512-TfTiyvZhwBYM/7QdAVDh+7dBTBA29v4ik0Ce9zda3Mnf8on1S5KJI8P2jKFZ8+5C0jhmr0KwJEO/Wdpm0VeWJQ==
|
||||
|
||||
make-dir@^2.0.0, make-dir@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "http://localhost:4873/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5"
|
||||
@@ -5153,23 +5188,23 @@ media-typer@0.3.0:
|
||||
resolved "http://localhost:4873/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
|
||||
integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=
|
||||
|
||||
medusa-core-utils@1.1.31-dev-1650623081351:
|
||||
version "1.1.31-dev-1650623081351"
|
||||
resolved "http://localhost:4873/medusa-core-utils/-/medusa-core-utils-1.1.31-dev-1650623081351.tgz#ac70dfe1bf7fac52fa1d08a49bba7b522f1073f6"
|
||||
integrity sha512-PMqWGob/v+nQfkiUJyA8NfyosZIgbBAXwHrr61XSJWECtnwNYA/VoCXsqJwwnluE3mHbX1ePh5dfzHQ/FS03+g==
|
||||
medusa-core-utils@1.1.31-dev-1655728455189:
|
||||
version "1.1.31-dev-1655728455189"
|
||||
resolved "http://localhost:4873/medusa-core-utils/-/medusa-core-utils-1.1.31-dev-1655728455189.tgz#d1765c3aa5ff294722a44c7f5b90dacb185f03ee"
|
||||
integrity sha512-hsyo22Hp5A18+qA5DhfQRvY5eAM3IpzxuuY/SVk50Pxrf+r3U4aWmL4uX0qGpQeKWv6gOLmPxt+OyJIDUPNrwg==
|
||||
dependencies:
|
||||
joi "^17.3.0"
|
||||
joi-objectid "^3.0.1"
|
||||
|
||||
medusa-interfaces@1.2.1-dev-1650623081351:
|
||||
version "1.2.1-dev-1650623081351"
|
||||
resolved "http://localhost:4873/medusa-interfaces/-/medusa-interfaces-1.2.1-dev-1650623081351.tgz#1d9e2f924c79ff256c1e49693ac38b992415fc9e"
|
||||
integrity sha512-z8ESArLGSIfOwQ1MwgG3BmFkAA0FTSZFjJ313FCbWAnxY7y9KqmMWhKGJd2keybafrm9OghTZknuQhMCbgwkcw==
|
||||
medusa-interfaces@1.3.0-dev-1655728455189:
|
||||
version "1.3.0-dev-1655728455189"
|
||||
resolved "http://localhost:4873/medusa-interfaces/-/medusa-interfaces-1.3.0-dev-1655728455189.tgz#4eaa0dc41671bb24777b3630c8640124c4ca9f24"
|
||||
integrity sha512-lcYVYQUg5ImtXLtR4pHHOqXi1q1kl46Gi9rgjAUWVvldnssVDnGThjgnVCFeCkDz+qMqn7lxTHoZXaDIneadlA==
|
||||
|
||||
medusa-telemetry@0.0.11-dev-1650623081351:
|
||||
version "0.0.11-dev-1650623081351"
|
||||
resolved "http://localhost:4873/medusa-telemetry/-/medusa-telemetry-0.0.11-dev-1650623081351.tgz#6c672cb98c4f24ce8d0722020aae1b2ed75d402e"
|
||||
integrity sha512-d4ul1DZwPRhDk4tgODiCybPF7nms7QWfNj8g7Jx3yjFVV3o4GK4vl3ui5E21SMyqKMsGX3WtUeRInQ+qWWiOiA==
|
||||
medusa-telemetry@0.0.11-dev-1655728455189:
|
||||
version "0.0.11-dev-1655728455189"
|
||||
resolved "http://localhost:4873/medusa-telemetry/-/medusa-telemetry-0.0.11-dev-1655728455189.tgz#552ba1e2038641321f152b5ba7df0d88f29c26bf"
|
||||
integrity sha512-MCTDiUg62y2xmRAFeOZHdaKXd2VhVw/9JE8Ewm9IY8QWSkumQOMKVPUAfdgra8EYsp3TD7LixWRlPB2TTn7DKg==
|
||||
dependencies:
|
||||
axios "^0.21.1"
|
||||
axios-retry "^3.1.9"
|
||||
@@ -5181,13 +5216,13 @@ medusa-telemetry@0.0.11-dev-1650623081351:
|
||||
remove-trailing-slash "^0.1.1"
|
||||
uuid "^8.3.2"
|
||||
|
||||
medusa-test-utils@1.1.37-dev-1650623081351:
|
||||
version "1.1.37-dev-1650623081351"
|
||||
resolved "http://localhost:4873/medusa-test-utils/-/medusa-test-utils-1.1.37-dev-1650623081351.tgz#2bc27ea2856c355316f71ee575b277897911d151"
|
||||
integrity sha512-UUfjbj+DWUozo0Q2ozJVH5Lf5EZZOnVgGetKZ35WZ/0m7E7EFB4n3fxtMu/+c2xXdOCuWrE534XtojUXQp5xFw==
|
||||
medusa-test-utils@1.1.37-dev-1655728455189:
|
||||
version "1.1.37-dev-1655728455189"
|
||||
resolved "http://localhost:4873/medusa-test-utils/-/medusa-test-utils-1.1.37-dev-1655728455189.tgz#d6283c0075a69dee988d437f5aedfbba334aa847"
|
||||
integrity sha512-SHkPjpiC6A37Duat9HrWPz+tBbZL50zDEXr/sjDiF0N8cEYNIxVRz2Y2/kbbKegSG3UTcckk5VUtxP17LiQKBw==
|
||||
dependencies:
|
||||
"@babel/plugin-transform-classes" "^7.9.5"
|
||||
medusa-core-utils "1.1.31-dev-1650623081351"
|
||||
medusa-core-utils "1.1.31-dev-1655728455189"
|
||||
randomatic "^3.1.1"
|
||||
|
||||
merge-descriptors@1.0.1:
|
||||
@@ -5529,6 +5564,15 @@ node-releases@^2.0.3:
|
||||
resolved "http://localhost:4873/node-releases/-/node-releases-2.0.4.tgz#f38252370c43854dc48aa431c766c6c398f40476"
|
||||
integrity sha512-gbMzqQtTtDz/00jQzZ21PQzdI9PyLYqUSvD0p3naOhX4odFji0ZxYdnVwPTxmSwkmxhcFImpozceidSG+AgoPQ==
|
||||
|
||||
node-schedule@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "http://localhost:4873/node-schedule/-/node-schedule-2.1.0.tgz#068ae38d7351c330616f7fe7cdb05036f977cbaf"
|
||||
integrity sha512-nl4JTiZ7ZQDc97MmpTq9BQjYhq7gOtoh7SiPH069gBFBj0PzD8HI7zyFs6rzqL8Y5tTiEEYLxgtbx034YPrbyQ==
|
||||
dependencies:
|
||||
cron-parser "^3.5.0"
|
||||
long-timeout "0.1.1"
|
||||
sorted-array-functions "^1.3.0"
|
||||
|
||||
nopt@^5.0.0:
|
||||
version "5.0.0"
|
||||
resolved "http://localhost:4873/nopt/-/nopt-5.0.0.tgz#530942bb58a512fccafe53fe210f13a25355dc88"
|
||||
@@ -5800,6 +5844,11 @@ packet-reader@1.0.0:
|
||||
resolved "http://localhost:4873/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74"
|
||||
integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ==
|
||||
|
||||
papaparse@^5.3.2:
|
||||
version "5.3.2"
|
||||
resolved "http://localhost:4873/papaparse/-/papaparse-5.3.2.tgz#d1abed498a0ee299f103130a6109720404fbd467"
|
||||
integrity sha512-6dNZu0Ki+gyV0eBsFKJhYr+MdQYAzFUGlBMNj3GNrmHxmz1lfRa24CjFObPXtjcetlOv5Ad299MhIK0znp3afw==
|
||||
|
||||
parse-json@^5.0.0:
|
||||
version "5.2.0"
|
||||
resolved "http://localhost:4873/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd"
|
||||
@@ -6745,6 +6794,11 @@ socks@^2.6.2:
|
||||
ip "^1.1.5"
|
||||
smart-buffer "^4.2.0"
|
||||
|
||||
sorted-array-functions@^1.3.0:
|
||||
version "1.3.0"
|
||||
resolved "http://localhost:4873/sorted-array-functions/-/sorted-array-functions-1.3.0.tgz#8605695563294dffb2c9796d602bd8459f7a0dd5"
|
||||
integrity sha512-2sqgzeFlid6N4Z2fUQ1cvFmTOLRi/sEDzSQ0OKYchqgoPmQBVyM3959qYx3fpS6Esef80KjmpgPeEr028dP3OA==
|
||||
|
||||
source-map-resolve@^0.5.0:
|
||||
version "0.5.3"
|
||||
resolved "http://localhost:4873/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a"
|
||||
|
||||
@@ -2,7 +2,7 @@ const path = require("path")
|
||||
const { spawn } = require("child_process")
|
||||
const { setPort } = require("./use-api")
|
||||
|
||||
module.exports = ({ cwd, verbose }) => {
|
||||
module.exports = ({ cwd, redisUrl, uploadDir, verbose }) => {
|
||||
const serverPath = path.join(__dirname, "test-server.js")
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
@@ -13,6 +13,8 @@ module.exports = ({ cwd, verbose }) => {
|
||||
NODE_ENV: "development",
|
||||
JWT_SECRET: "test",
|
||||
COOKIE_SECRET: "test",
|
||||
REDIS_URL: redisUrl, // If provided, will use a real instance, otherwise a fake instance
|
||||
UPLOAD_DIR: uploadDir // If provided, will be used for the fake local file service
|
||||
},
|
||||
stdio: verbose
|
||||
? ["inherit", "inherit", "inherit", "ipc"]
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
import { IsBoolean, IsObject, IsOptional, IsString } from "class-validator"
|
||||
import BatchJobService from "../../../../services/batch-job"
|
||||
import { validator } from "../../../../utils/validator"
|
||||
import { BatchJob } from "../../../../models"
|
||||
|
||||
/**
|
||||
* @oas [post] /batch-jobs
|
||||
@@ -27,11 +28,16 @@ import { validator } from "../../../../utils/validator"
|
||||
export default async (req, res) => {
|
||||
const validated = await validator(AdminPostBatchesReq, req.body)
|
||||
|
||||
const batchJobService: BatchJobService = req.scope.resolve("batchJobService")
|
||||
const toCreate = await batchJobService.prepareBatchJobForProcessing(
|
||||
validated,
|
||||
req
|
||||
)
|
||||
|
||||
const userId = req.user.id ?? req.user.userId
|
||||
|
||||
const batchJobService: BatchJobService = req.scope.resolve("batchJobService")
|
||||
const batch_job = await batchJobService.create({
|
||||
...validated,
|
||||
...toCreate,
|
||||
created_by: userId,
|
||||
})
|
||||
|
||||
@@ -43,7 +49,7 @@ export class AdminPostBatchesReq {
|
||||
type: string
|
||||
|
||||
@IsObject()
|
||||
context: Record<string, unknown>
|
||||
context: BatchJob["context"]
|
||||
|
||||
@IsBoolean()
|
||||
@IsOptional()
|
||||
|
||||
@@ -1,14 +1,18 @@
|
||||
import { TransactionBaseService } from "./transaction-base-service"
|
||||
import { BatchJobResultError, CreateBatchJobInput } from "../types/batch-job"
|
||||
import { ProductExportBatchJob } from "../strategies/batch-jobs/product"
|
||||
import { BatchJobService } from "../services"
|
||||
import { BatchJob } from "../models"
|
||||
|
||||
export interface IBatchJobStrategy<T extends TransactionBaseService<any>>
|
||||
export interface IBatchJobStrategy<T extends TransactionBaseService<never>>
|
||||
extends TransactionBaseService<T> {
|
||||
/**
|
||||
* Method for preparing a batch job for processing
|
||||
*/
|
||||
prepareBatchJobForProcessing(
|
||||
batchJobEntity: object,
|
||||
batchJobEntity: CreateBatchJobInput,
|
||||
req: Express.Request
|
||||
): Promise<object>
|
||||
): Promise<CreateBatchJobInput>
|
||||
|
||||
/**
|
||||
* Method for pre-processing a batch job
|
||||
@@ -23,34 +27,92 @@ export interface IBatchJobStrategy<T extends TransactionBaseService<any>>
|
||||
/**
|
||||
* Builds and returns a template file that can be downloaded and filled in
|
||||
*/
|
||||
buildTemplate()
|
||||
buildTemplate(): Promise<string>
|
||||
}
|
||||
|
||||
export abstract class AbstractBatchJobStrategy<
|
||||
T extends TransactionBaseService<any>
|
||||
T extends TransactionBaseService<never, TContainer>,
|
||||
TContainer = unknown
|
||||
>
|
||||
extends TransactionBaseService<T>
|
||||
extends TransactionBaseService<T, TContainer>
|
||||
implements IBatchJobStrategy<T>
|
||||
{
|
||||
static identifier: string
|
||||
static batchType: string
|
||||
|
||||
protected abstract batchJobService_: BatchJobService
|
||||
|
||||
async prepareBatchJobForProcessing(
|
||||
batchJob: object,
|
||||
batchJob: CreateBatchJobInput,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
req: Express.Request
|
||||
): Promise<object> {
|
||||
): Promise<CreateBatchJobInput> {
|
||||
return batchJob
|
||||
}
|
||||
|
||||
public abstract preProcessBatchJob(batchJobId: string): Promise<void>
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
public async preProcessBatchJob(batchJobId: string): Promise<void> {
|
||||
return
|
||||
}
|
||||
|
||||
public abstract processJob(batchJobId: string): Promise<void>
|
||||
|
||||
public abstract buildTemplate(): Promise<string>
|
||||
|
||||
protected async shouldRetryOnProcessingError(
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
batchJob: BatchJob,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
err: unknown
|
||||
): Promise<boolean> {
|
||||
return false
|
||||
}
|
||||
|
||||
protected async handleProcessingError<T>(
|
||||
batchJobId: string,
|
||||
err: unknown,
|
||||
result: T
|
||||
): Promise<void> {
|
||||
return await this.atomicPhase_(async (transactionManager) => {
|
||||
const batchJob = (await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.retrieve(batchJobId)) as ProductExportBatchJob
|
||||
|
||||
const shouldRetry = await this.shouldRetryOnProcessingError(batchJob, err)
|
||||
|
||||
const errMessage =
|
||||
(err as { message: string }).message ??
|
||||
`Something went wrong with the batchJob ${batchJob.id}`
|
||||
const errCode = (err as { code: string | number }).code ?? "unknown"
|
||||
const resultError = { message: errMessage, code: errCode, err }
|
||||
|
||||
if (shouldRetry) {
|
||||
const existingErrors =
|
||||
batchJob?.result?.errors ?? ([] as BatchJobResultError[])
|
||||
const retryCount = batchJob.context.retry_count ?? 0
|
||||
|
||||
await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.update(batchJobId, {
|
||||
context: {
|
||||
retry_count: retryCount + 1,
|
||||
},
|
||||
result: {
|
||||
...result,
|
||||
errors: [...existingErrors, resultError],
|
||||
},
|
||||
})
|
||||
} else {
|
||||
await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.setFailed(batchJob, resultError)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
export function isBatchJobStrategy(
|
||||
object: unknown
|
||||
): object is IBatchJobStrategy<any> {
|
||||
): object is IBatchJobStrategy<never> {
|
||||
return object instanceof AbstractBatchJobStrategy
|
||||
}
|
||||
|
||||
@@ -204,8 +204,8 @@ describe("plugins loader", () => {
|
||||
})
|
||||
|
||||
it("registers price selection strategy", () => {
|
||||
const priceSelectionStrategy: (...args: unknown[]) => any =
|
||||
container.resolve("priceSelectionStrategy")
|
||||
const priceSelectionStrategy =
|
||||
container.resolve("priceSelectionStrategy") as (...args: unknown[]) => any
|
||||
|
||||
expect(priceSelectionStrategy).toBeTruthy()
|
||||
expect(priceSelectionStrategy.constructor.name).toBe(
|
||||
@@ -214,8 +214,8 @@ describe("plugins loader", () => {
|
||||
})
|
||||
|
||||
it("registers tax calculation strategy", () => {
|
||||
const taxCalculationStrategy: (...args: unknown[]) => any =
|
||||
container.resolve("taxCalculationStrategy")
|
||||
const taxCalculationStrategy =
|
||||
container.resolve("taxCalculationStrategy") as (...args: unknown[]) => any
|
||||
|
||||
expect(taxCalculationStrategy).toBeTruthy()
|
||||
expect(taxCalculationStrategy.constructor.name).toBe(
|
||||
@@ -224,8 +224,8 @@ describe("plugins loader", () => {
|
||||
})
|
||||
|
||||
it("registers batch job strategies as single array", () => {
|
||||
const batchJobStrategies: (...args: unknown[]) => any =
|
||||
container.resolve("batchJobStrategies")
|
||||
const batchJobStrategies =
|
||||
container.resolve("batchJobStrategies") as (...args: unknown[]) => any
|
||||
|
||||
expect(batchJobStrategies).toBeTruthy()
|
||||
expect(Array.isArray(batchJobStrategies)).toBeTruthy()
|
||||
@@ -233,8 +233,8 @@ describe("plugins loader", () => {
|
||||
})
|
||||
|
||||
it("registers batch job strategies by type and only keep the last", () => {
|
||||
const batchJobStrategy: (...args: unknown[]) => any =
|
||||
container.resolve("batchType_type-1")
|
||||
const batchJobStrategy =
|
||||
container.resolve("batchType_type-1") as (...args: unknown[]) => any
|
||||
|
||||
expect(batchJobStrategy).toBeTruthy()
|
||||
expect(batchJobStrategy.constructor.name).toBe("testBatch2BatchStrategy")
|
||||
@@ -245,9 +245,9 @@ describe("plugins loader", () => {
|
||||
})
|
||||
|
||||
it("registers batch job strategies by identifier", () => {
|
||||
const batchJobStrategy: (...args: unknown[]) => any = container.resolve(
|
||||
const batchJobStrategy = container.resolve(
|
||||
"batch_testBatch3-identifier"
|
||||
)
|
||||
) as (...args: unknown[]) => any
|
||||
|
||||
expect(batchJobStrategy).toBeTruthy()
|
||||
expect(Array.isArray(batchJobStrategy)).toBeFalsy()
|
||||
|
||||
@@ -159,7 +159,9 @@ export function registerStrategies(
|
||||
pluginDetails: PluginDetails,
|
||||
container: MedusaContainer
|
||||
): void {
|
||||
const files = glob.sync(`${pluginDetails.resolve}/strategies/[!__]*.js`, {})
|
||||
const files = glob.sync(`${pluginDetails.resolve}/strategies/[!__]*.js`, {
|
||||
ignore: ["**/__fixtures__/**", "**/index.js", "**/index.ts"],
|
||||
})
|
||||
const registeredServices = {}
|
||||
|
||||
files.map((file) => {
|
||||
|
||||
@@ -28,7 +28,7 @@ export default ({ container, configModule, isTest }: LoaderOptions): void => {
|
||||
|
||||
const core = glob.sync(coreFull, {
|
||||
cwd: __dirname,
|
||||
ignore: ["**/__fixtures__/**", "index.js", "index.ts"],
|
||||
ignore: ["**/__fixtures__/**", "**/index.js", "**/index.ts"],
|
||||
})
|
||||
|
||||
core.forEach((fn) => {
|
||||
|
||||
@@ -1,9 +1,10 @@
|
||||
import { AfterLoad, BeforeInsert, Column, Entity, JoinColumn, ManyToOne } from "typeorm"
|
||||
import { BatchJobStatus } from "../types/batch-job"
|
||||
import { BatchJobResultError, BatchJobResultStatDescriptor, BatchJobStatus } from "../types/batch-job"
|
||||
import { DbAwareColumn, resolveDbType } from "../utils/db-aware-column"
|
||||
import { SoftDeletableEntity } from "../interfaces/models/soft-deletable-entity"
|
||||
import { generateEntityId } from "../utils/generate-entity-id"
|
||||
import { User } from "./user"
|
||||
import { RequestQueryFields, Selector } from "../types/common"
|
||||
|
||||
@Entity()
|
||||
export class BatchJob extends SoftDeletableEntity {
|
||||
@@ -18,10 +19,17 @@ export class BatchJob extends SoftDeletableEntity {
|
||||
created_by_user: User
|
||||
|
||||
@DbAwareColumn({ type: "jsonb", nullable: true })
|
||||
context: { retry_count?: number; max_retry?: number } & Record<string, unknown>
|
||||
context: Record<string, unknown>
|
||||
|
||||
@DbAwareColumn({ type: "jsonb", nullable: true })
|
||||
result: Record<string, unknown>
|
||||
result: {
|
||||
count?: number
|
||||
advancement_count?: number
|
||||
progress?: number
|
||||
errors?: BatchJobResultError[]
|
||||
stat_descriptors?: BatchJobResultStatDescriptor[]
|
||||
file_key?: string
|
||||
} & Record<string, unknown>
|
||||
|
||||
@Column({ type: "boolean", nullable: false, default: false })
|
||||
dry_run: boolean = false;
|
||||
|
||||
@@ -1,22 +1,26 @@
|
||||
import { DeepPartial, EntityManager } from "typeorm"
|
||||
import { EntityManager } from "typeorm"
|
||||
import { BatchJob } from "../models"
|
||||
import { BatchJobRepository } from "../repositories/batch-job"
|
||||
import {
|
||||
BatchJobCreateProps,
|
||||
BatchJobResultError,
|
||||
BatchJobStatus,
|
||||
BatchJobUpdateProps,
|
||||
CreateBatchJobInput,
|
||||
FilterableBatchJobProps,
|
||||
} from "../types/batch-job"
|
||||
import { FindConfig } from "../types/common"
|
||||
import { TransactionBaseService } from "../interfaces"
|
||||
import { AbstractBatchJobStrategy, TransactionBaseService } from "../interfaces"
|
||||
import { buildQuery } from "../utils"
|
||||
import { MedusaError } from "medusa-core-utils"
|
||||
import { EventBusService } from "./index"
|
||||
import { EventBusService, StrategyResolverService } from "./index"
|
||||
import { Request } from "express"
|
||||
|
||||
type InjectedDependencies = {
|
||||
manager: EntityManager
|
||||
eventBusService: EventBusService
|
||||
batchJobRepository: typeof BatchJobRepository
|
||||
strategyResolverService: StrategyResolverService
|
||||
}
|
||||
|
||||
class BatchJobService extends TransactionBaseService<BatchJobService> {
|
||||
@@ -31,10 +35,12 @@ class BatchJobService extends TransactionBaseService<BatchJobService> {
|
||||
FAILED: "batch.failed",
|
||||
}
|
||||
|
||||
protected readonly manager_: EntityManager
|
||||
protected manager_: EntityManager
|
||||
protected transactionManager_: EntityManager | undefined
|
||||
|
||||
protected readonly batchJobRepository_: typeof BatchJobRepository
|
||||
protected readonly eventBus_: EventBusService
|
||||
protected readonly strategyResolver_: StrategyResolverService
|
||||
|
||||
protected batchJobStatusMapToProps = new Map<
|
||||
BatchJobStatus,
|
||||
@@ -88,12 +94,19 @@ class BatchJobService extends TransactionBaseService<BatchJobService> {
|
||||
manager,
|
||||
batchJobRepository,
|
||||
eventBusService,
|
||||
strategyResolverService,
|
||||
}: InjectedDependencies) {
|
||||
super({ manager, batchJobRepository, eventBusService })
|
||||
super({
|
||||
manager,
|
||||
batchJobRepository,
|
||||
eventBusService,
|
||||
strategyResolverService,
|
||||
})
|
||||
|
||||
this.manager_ = manager
|
||||
this.batchJobRepository_ = batchJobRepository
|
||||
this.eventBus_ = eventBusService
|
||||
this.strategyResolver_ = strategyResolverService
|
||||
}
|
||||
|
||||
async retrieve(
|
||||
@@ -157,7 +170,7 @@ class BatchJobService extends TransactionBaseService<BatchJobService> {
|
||||
}
|
||||
|
||||
async update(
|
||||
batchJobId: string,
|
||||
batchJobOrId: BatchJob | string,
|
||||
data: BatchJobUpdateProps
|
||||
): Promise<BatchJob> {
|
||||
return await this.atomicPhase_(async (manager) => {
|
||||
@@ -165,13 +178,20 @@ class BatchJobService extends TransactionBaseService<BatchJobService> {
|
||||
this.batchJobRepository_
|
||||
)
|
||||
|
||||
let batchJob = await this.retrieve(batchJobId)
|
||||
let batchJob = batchJobOrId as BatchJob
|
||||
if (typeof batchJobOrId === "string") {
|
||||
batchJob = await this.retrieve(batchJobOrId)
|
||||
}
|
||||
|
||||
const { context, ...rest } = data
|
||||
const { context, result, ...rest } = data
|
||||
if (context) {
|
||||
batchJob.context = { ...batchJob.context, ...context }
|
||||
}
|
||||
|
||||
if (result) {
|
||||
batchJob.result = { ...batchJob.result, ...result }
|
||||
}
|
||||
|
||||
Object.keys(rest)
|
||||
.filter((key) => typeof rest[key] !== `undefined`)
|
||||
.forEach((key) => {
|
||||
@@ -218,7 +238,7 @@ class BatchJobService extends TransactionBaseService<BatchJobService> {
|
||||
batchJob = await batchJobRepo.save(batchJob)
|
||||
batchJob.loadStatus()
|
||||
|
||||
this.eventBus_.withTransaction(transactionManager).emit(eventType, {
|
||||
await this.eventBus_.withTransaction(transactionManager).emit(eventType, {
|
||||
id: batchJob.id,
|
||||
})
|
||||
|
||||
@@ -331,9 +351,41 @@ class BatchJobService extends TransactionBaseService<BatchJobService> {
|
||||
})
|
||||
}
|
||||
|
||||
async setFailed(batchJobOrId: string | BatchJob): Promise<BatchJob | never> {
|
||||
async setFailed(
|
||||
batchJobOrId: string | BatchJob,
|
||||
error?: BatchJobResultError
|
||||
): Promise<BatchJob | never> {
|
||||
return await this.atomicPhase_(async () => {
|
||||
return await this.updateStatus(batchJobOrId, BatchJobStatus.FAILED)
|
||||
let batchJob = batchJobOrId as BatchJob
|
||||
|
||||
if (error) {
|
||||
if (typeof batchJobOrId === "string") {
|
||||
batchJob = await this.retrieve(batchJobOrId)
|
||||
}
|
||||
|
||||
const result = batchJob.result ?? {}
|
||||
|
||||
await this.update(batchJob, {
|
||||
result: {
|
||||
...result,
|
||||
errors: [...(result?.errors ?? []), error],
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
return await this.updateStatus(batchJob, BatchJobStatus.FAILED)
|
||||
})
|
||||
}
|
||||
|
||||
async prepareBatchJobForProcessing(
|
||||
data: CreateBatchJobInput,
|
||||
req: Request
|
||||
): Promise<CreateBatchJobInput | never> {
|
||||
return await this.atomicPhase_(async () => {
|
||||
const batchStrategy = this.strategyResolver_.resolveBatchJobByType(
|
||||
data.type
|
||||
)
|
||||
return await batchStrategy.prepareBatchJobForProcessing(data, req)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -41,3 +41,4 @@ export { default as TaxProviderService } from "./tax-provider"
|
||||
export { default as ProductTypeService } from "./product-type"
|
||||
export { default as PricingService } from "./pricing"
|
||||
export { default as BatchJobService } from "./batch-job"
|
||||
export { default as StrategyResolverService } from "./strategy-resolver"
|
||||
|
||||
38
packages/medusa/src/services/strategy-resolver.ts
Normal file
38
packages/medusa/src/services/strategy-resolver.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { AbstractBatchJobStrategy, TransactionBaseService } from "../interfaces"
|
||||
import { EntityManager } from "typeorm"
|
||||
import { MedusaError } from "medusa-core-utils"
|
||||
|
||||
type InjectedDependencies = {
|
||||
manager: EntityManager
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
export default class StrategyResolver extends TransactionBaseService<
|
||||
StrategyResolver,
|
||||
InjectedDependencies
|
||||
> {
|
||||
protected manager_: EntityManager
|
||||
protected transactionManager_: EntityManager | undefined
|
||||
|
||||
constructor(container: InjectedDependencies) {
|
||||
super(container)
|
||||
this.manager_ = container.manager
|
||||
}
|
||||
|
||||
resolveBatchJobByType<T extends TransactionBaseService<never>>(
|
||||
type: string
|
||||
): AbstractBatchJobStrategy<T> {
|
||||
let resolved: AbstractBatchJobStrategy<T>
|
||||
try {
|
||||
resolved = this.container[
|
||||
`batchType_${type}`
|
||||
] as AbstractBatchJobStrategy<T>
|
||||
} catch (e) {
|
||||
throw new MedusaError(
|
||||
MedusaError.Types.NOT_FOUND,
|
||||
`Unable to find a BatchJob strategy with the type ${type}`
|
||||
)
|
||||
}
|
||||
return resolved
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,398 @@
|
||||
import { IdMap } from "medusa-test-utils"
|
||||
|
||||
const productIds = [
|
||||
"product-export-strategy-product-1",
|
||||
"product-export-strategy-product-2",
|
||||
]
|
||||
const variantIds = [
|
||||
"product-export-strategy-variant-1",
|
||||
"product-export-strategy-variant-2",
|
||||
"product-export-strategy-variant-3",
|
||||
]
|
||||
export const productsToExport = [
|
||||
{
|
||||
collection: {
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
handle: "test-collection1",
|
||||
id: IdMap.getId("product-export-collection_1"),
|
||||
metadata: null,
|
||||
title: "Test collection 1",
|
||||
updated_at: "randomString",
|
||||
},
|
||||
collection_id: IdMap.getId("product-export-collection_1"),
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
description: "test-product-description-1",
|
||||
discountable: true,
|
||||
external_id: null,
|
||||
handle: "test-product-product-1",
|
||||
height: null,
|
||||
hs_code: null,
|
||||
id: productIds[0],
|
||||
images: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-image_1"),
|
||||
metadata: null,
|
||||
updated_at: "randomString",
|
||||
url: "test-image.png",
|
||||
},
|
||||
],
|
||||
is_giftcard: false,
|
||||
length: null,
|
||||
material: null,
|
||||
metadata: null,
|
||||
mid_code: null,
|
||||
options: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-option_1"),
|
||||
metadata: null,
|
||||
product_id: productIds[0],
|
||||
title: "test-option-1",
|
||||
updated_at: "randomString",
|
||||
},
|
||||
{
|
||||
created_at: "randomString2",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-option_2"),
|
||||
metadata: null,
|
||||
product_id: productIds[0],
|
||||
title: "test-option-2",
|
||||
updated_at: "randomString2",
|
||||
},
|
||||
],
|
||||
origin_country: null,
|
||||
profile_id: IdMap.getId("product-export-profile_1"),
|
||||
profile: {
|
||||
id: IdMap.getId("product-export-profile_1"),
|
||||
name: "profile_1",
|
||||
type: "profile_type_1",
|
||||
},
|
||||
status: "draft",
|
||||
subtitle: null,
|
||||
tags: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-tag_1"),
|
||||
metadata: null,
|
||||
updated_at: "randomString",
|
||||
value: "123_1",
|
||||
},
|
||||
],
|
||||
thumbnail: null,
|
||||
title: "Test product",
|
||||
type: {
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-type_1"),
|
||||
metadata: null,
|
||||
updated_at: "randomString",
|
||||
value: "test-type-1",
|
||||
},
|
||||
type_id: IdMap.getId("product-export-type_1"),
|
||||
updated_at: "randomString",
|
||||
variants: [
|
||||
{
|
||||
allow_backorder: false,
|
||||
barcode: "test-barcode",
|
||||
calculated_price: null,
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
ean: "test-ean",
|
||||
height: null,
|
||||
hs_code: null,
|
||||
id: variantIds[0],
|
||||
inventory_quantity: 10,
|
||||
length: null,
|
||||
manage_inventory: true,
|
||||
material: null,
|
||||
metadata: null,
|
||||
mid_code: null,
|
||||
options: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-variant_option_1"),
|
||||
metadata: null,
|
||||
option_id: IdMap.getId("product-export-option_1"),
|
||||
updated_at: "randomString",
|
||||
value: "option 1 value 1",
|
||||
variant_id: variantIds[0],
|
||||
},
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-variant_option_2"),
|
||||
metadata: null,
|
||||
option_id: IdMap.getId("product-export-option_2"),
|
||||
updated_at: "randomString",
|
||||
value: "option 2 value 1",
|
||||
variant_id: variantIds[0],
|
||||
},
|
||||
],
|
||||
origin_country: null,
|
||||
original_price: null,
|
||||
prices: [
|
||||
{
|
||||
amount: 100,
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-price_1"),
|
||||
region_id: IdMap.getId("product-export-region_1"),
|
||||
max_quantity: null,
|
||||
min_quantity: null,
|
||||
price_list: null,
|
||||
price_list_id: null,
|
||||
region: {
|
||||
id: IdMap.getId("product-export-region_1"),
|
||||
currency_code: "usd",
|
||||
name: "france",
|
||||
},
|
||||
updated_at: "randomString",
|
||||
variant_id: variantIds[0],
|
||||
},
|
||||
{
|
||||
amount: 110,
|
||||
created_at: "randomString",
|
||||
currency_code: "usd",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-price_1"),
|
||||
region_id: null,
|
||||
max_quantity: null,
|
||||
min_quantity: null,
|
||||
price_list: null,
|
||||
price_list_id: null,
|
||||
updated_at: "randomString",
|
||||
variant_id: variantIds[0],
|
||||
},
|
||||
{
|
||||
amount: 130,
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-price_1"),
|
||||
region_id: IdMap.getId("product-export-region_1"),
|
||||
max_quantity: null,
|
||||
min_quantity: null,
|
||||
price_list: null,
|
||||
price_list_id: null,
|
||||
region: {
|
||||
id: IdMap.getId("product-export-region_3"),
|
||||
name: "denmark",
|
||||
currency_code: "dkk",
|
||||
},
|
||||
updated_at: "randomString",
|
||||
variant_id: variantIds[0],
|
||||
},
|
||||
],
|
||||
product_id: IdMap.getId("product-export-product_1"),
|
||||
sku: "test-sku",
|
||||
title: "Test variant",
|
||||
upc: "test-upc",
|
||||
updated_at: "randomString",
|
||||
weight: null,
|
||||
width: null,
|
||||
},
|
||||
],
|
||||
weight: null,
|
||||
width: null,
|
||||
},
|
||||
{
|
||||
collection: {
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
handle: "test-collection2",
|
||||
id: IdMap.getId("product-export-collection_2"),
|
||||
metadata: null,
|
||||
title: "Test collection",
|
||||
updated_at: "randomString",
|
||||
},
|
||||
collection_id: "test-collection",
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
description: "test-product-description",
|
||||
discountable: true,
|
||||
external_id: null,
|
||||
handle: "test-product-product-2",
|
||||
height: null,
|
||||
hs_code: null,
|
||||
id: productIds[1],
|
||||
images: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-image_2"),
|
||||
metadata: null,
|
||||
updated_at: "randomString",
|
||||
url: "test-image.png",
|
||||
},
|
||||
],
|
||||
is_giftcard: false,
|
||||
length: null,
|
||||
material: null,
|
||||
metadata: null,
|
||||
mid_code: null,
|
||||
options: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-option_2"),
|
||||
metadata: null,
|
||||
product_id: productIds[1],
|
||||
title: "test-option",
|
||||
updated_at: "randomString",
|
||||
},
|
||||
],
|
||||
origin_country: null,
|
||||
profile_id: IdMap.getId("product-export-profile_2"),
|
||||
profile: {
|
||||
id: IdMap.getId("product-export-profile_2"),
|
||||
name: "profile_2",
|
||||
type: "profile_type_2",
|
||||
},
|
||||
status: "draft",
|
||||
subtitle: null,
|
||||
tags: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-tag_2"),
|
||||
metadata: null,
|
||||
updated_at: "randomString",
|
||||
value: "123",
|
||||
},
|
||||
],
|
||||
thumbnail: null,
|
||||
title: "Test product",
|
||||
type: {
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-type_2"),
|
||||
metadata: null,
|
||||
updated_at: "randomString",
|
||||
value: "test-type",
|
||||
},
|
||||
type_id: "test-type",
|
||||
updated_at: "randomString",
|
||||
variants: [
|
||||
{
|
||||
allow_backorder: false,
|
||||
barcode: "test-barcode",
|
||||
calculated_price: null,
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
ean: "test-ean",
|
||||
height: null,
|
||||
hs_code: null,
|
||||
id: variantIds[1],
|
||||
inventory_quantity: 10,
|
||||
length: null,
|
||||
manage_inventory: true,
|
||||
material: null,
|
||||
metadata: null,
|
||||
mid_code: null,
|
||||
options: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-variant_option_2"),
|
||||
metadata: null,
|
||||
option_id: IdMap.getId("product-export-option_2"),
|
||||
updated_at: "randomString",
|
||||
value: "Option 1 value 1",
|
||||
variant_id: variantIds[1],
|
||||
},
|
||||
],
|
||||
origin_country: null,
|
||||
original_price: null,
|
||||
prices: [
|
||||
{
|
||||
amount: 110,
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-price_2"),
|
||||
max_quantity: null,
|
||||
min_quantity: null,
|
||||
price_list: null,
|
||||
price_list_id: null,
|
||||
region_id: IdMap.getId("product-export-region_2"),
|
||||
region: {
|
||||
id: IdMap.getId("product-export-region_2"),
|
||||
name: "Denmark",
|
||||
currency_code: "dkk",
|
||||
},
|
||||
updated_at: "randomString",
|
||||
variant_id: variantIds[1],
|
||||
},
|
||||
],
|
||||
product_id: IdMap.getId("product-export-product_2"),
|
||||
sku: "test-sku",
|
||||
title: "Test variant",
|
||||
upc: "test-upc",
|
||||
updated_at: "randomString",
|
||||
weight: null,
|
||||
width: null,
|
||||
},
|
||||
{
|
||||
allow_backorder: false,
|
||||
barcode: "test-barcode",
|
||||
calculated_price: null,
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
ean: "test-ean",
|
||||
height: null,
|
||||
hs_code: null,
|
||||
id: variantIds[2],
|
||||
inventory_quantity: 10,
|
||||
length: null,
|
||||
manage_inventory: true,
|
||||
material: null,
|
||||
metadata: null,
|
||||
mid_code: null,
|
||||
options: [
|
||||
{
|
||||
created_at: "randomString",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-variant_option_2"),
|
||||
metadata: null,
|
||||
option_id: IdMap.getId("product-export-option_2"),
|
||||
updated_at: "randomString",
|
||||
value: "Option 1 Value 1",
|
||||
variant_id: variantIds[2],
|
||||
},
|
||||
],
|
||||
origin_country: null,
|
||||
original_price: null,
|
||||
prices: [
|
||||
{
|
||||
amount: 120,
|
||||
created_at: "randomString",
|
||||
currency_code: "usd",
|
||||
deleted_at: null,
|
||||
id: IdMap.getId("product-export-price_2"),
|
||||
max_quantity: null,
|
||||
min_quantity: null,
|
||||
price_list: null,
|
||||
price_list_id: null,
|
||||
region_id: IdMap.getId("product-export-region_1"),
|
||||
updated_at: "randomString",
|
||||
variant_id: variantIds[2],
|
||||
},
|
||||
],
|
||||
product_id: productIds[1],
|
||||
sku: "test-sku",
|
||||
title: "Test variant",
|
||||
upc: "test-upc",
|
||||
updated_at: "randomString",
|
||||
weight: null,
|
||||
width: null,
|
||||
},
|
||||
],
|
||||
weight: null,
|
||||
width: null,
|
||||
},
|
||||
]
|
||||
@@ -0,0 +1,14 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Product export strategy should process the batch job and generate the appropriate output 1`] = `
|
||||
Array [
|
||||
"Product ID;Product Handle;Product Title;Product Subtitle;Product Description;Product Status;Product Thumbnail;Product Weight;Product Length;Product Width;Product Height;Product HS Code;Product Origin Country;Product MID Code;Product Material;Product Collection Title;Product Collection Handle;Product Type;Product Tags;Product Discountable;Product External ID;Product Profile Name;Product Profile Type;Variant ID;Variant Title;Variant SKU;Variant Barcode;Variant Inventory Quantity;Variant Allow backorder;Variant Manage inventory;Variant Weight;Variant Length;Variant Width;Variant Height;Variant HS Code;Variant Origin Country;Variant MID Code;Variant Material;Price france [USD];Price USD;Price denmark [DKK];Price Denmark [DKK];Option 1 Name;Option 1 Value;Option 2 Name;Option 2 Value;Image 1 Url
|
||||
",
|
||||
"product-export-strategy-product-1;test-product-product-1;Test product;;test-product-description-1;draft;;;;;;;;;;Test collection 1;test-collection1;test-type-1;123_1;true;;profile_1;profile_type_1;product-export-strategy-variant-1;Test variant;test-sku;test-barcode;10;false;true;;;;;;;;;100;110;130;;test-option-1;option 1 value 1;test-option-2;option 2 value 1;test-image.png
|
||||
",
|
||||
"product-export-strategy-product-2;test-product-product-2;Test product;;test-product-description;draft;;;;;;;;;;Test collection;test-collection2;test-type;123;true;;profile_2;profile_type_2;product-export-strategy-variant-2;Test variant;test-sku;test-barcode;10;false;true;;;;;;;;;;;;110;test-option;Option 1 value 1;;;test-image.png
|
||||
",
|
||||
"product-export-strategy-product-2;test-product-product-2;Test product;;test-product-description;draft;;;;;;;;;;Test collection;test-collection2;test-type;123;true;;profile_2;profile_type_2;product-export-strategy-variant-3;Test variant;test-sku;test-barcode;10;false;true;;;;;;;;;;120;;;test-option;Option 1 Value 1;;;test-image.png
|
||||
",
|
||||
]
|
||||
`;
|
||||
@@ -0,0 +1,209 @@
|
||||
import ProductExportStrategy from "../../../batch-jobs/product/export"
|
||||
import { IdMap, MockManager } from "medusa-test-utils"
|
||||
import { User } from "../../../../models"
|
||||
import { BatchJobStatus } from "../../../../types/batch-job"
|
||||
import { productsToExport } from "../../../__fixtures__/product-export-data"
|
||||
import { AdminPostBatchesReq } from "../../../../api/routes/admin/batch/create-batch-job"
|
||||
import { defaultAdminProductRelations } from "../../../../api/routes/admin/products"
|
||||
import { ProductExportBatchJob } from "../../../batch-jobs/product"
|
||||
import { Request } from "express"
|
||||
|
||||
const outputDataStorage: string[] = []
|
||||
|
||||
let fakeJob = {
|
||||
id: IdMap.getId("product-export-job"),
|
||||
type: 'product-export',
|
||||
created_by: IdMap.getId("product-export-job-creator"),
|
||||
created_by_user: {} as User,
|
||||
context: {},
|
||||
result: {},
|
||||
dry_run: false,
|
||||
status: BatchJobStatus.PROCESSING as BatchJobStatus
|
||||
} as ProductExportBatchJob
|
||||
|
||||
const fileServiceMock = {
|
||||
delete: jest.fn(),
|
||||
getUploadStreamDescriptor: jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
writeStream: {
|
||||
write: (data: string) => {
|
||||
outputDataStorage.push(data)
|
||||
},
|
||||
end: () => void 0
|
||||
},
|
||||
promise: Promise.resolve(),
|
||||
url: 'product-export.csv'
|
||||
})
|
||||
}),
|
||||
withTransaction: function () {
|
||||
return this
|
||||
}
|
||||
}
|
||||
const batchJobServiceMock = {
|
||||
withTransaction: function () {
|
||||
return this
|
||||
},
|
||||
update: jest.fn().mockImplementation((job, data) => {
|
||||
fakeJob = {
|
||||
...fakeJob,
|
||||
...data,
|
||||
context: { ...fakeJob?.context, ...data?.context },
|
||||
result: { ...fakeJob?.result, ...data?.result }
|
||||
}
|
||||
return Promise.resolve(fakeJob)
|
||||
}),
|
||||
updateStatus: jest.fn().mockImplementation((status) => {
|
||||
fakeJob.status = status
|
||||
return Promise.resolve(fakeJob)
|
||||
}),
|
||||
complete: jest.fn().mockImplementation(() => {
|
||||
fakeJob.status = BatchJobStatus.COMPLETED
|
||||
return Promise.resolve(fakeJob)
|
||||
}),
|
||||
retrieve: jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve(fakeJob)
|
||||
}),
|
||||
setFailed: jest.fn().mockImplementation((...args) => {
|
||||
console.error(...args)
|
||||
})
|
||||
}
|
||||
const productServiceMock = {
|
||||
withTransaction: function () {
|
||||
return this
|
||||
},
|
||||
list: jest.fn().mockImplementation(() => Promise.resolve(productsToExport)),
|
||||
count: jest.fn().mockImplementation(() => Promise.resolve(productsToExport.length)),
|
||||
listAndCount: jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve([productsToExport, productsToExport.length])
|
||||
}),
|
||||
}
|
||||
const managerMock = MockManager
|
||||
|
||||
describe("Product export strategy", () => {
|
||||
const productExportStrategy = new ProductExportStrategy({
|
||||
manager: managerMock,
|
||||
fileService: fileServiceMock as any,
|
||||
batchJobService: batchJobServiceMock as any,
|
||||
productService: productServiceMock as any,
|
||||
})
|
||||
|
||||
it('should generate the appropriate template', async () => {
|
||||
await productExportStrategy.prepareBatchJobForProcessing(fakeJob, {} as Request)
|
||||
await productExportStrategy.preProcessBatchJob(fakeJob.id)
|
||||
const template = await productExportStrategy.buildHeader(fakeJob)
|
||||
expect(template).toMatch(/.*Product ID.*/)
|
||||
expect(template).toMatch(/.*Product Handle.*/)
|
||||
expect(template).toMatch(/.*Product Title.*/)
|
||||
expect(template).toMatch(/.*Product Subtitle.*/)
|
||||
expect(template).toMatch(/.*Product Description.*/)
|
||||
expect(template).toMatch(/.*Product Status.*/)
|
||||
expect(template).toMatch(/.*Product Thumbnail.*/)
|
||||
expect(template).toMatch(/.*Product Weight.*/)
|
||||
expect(template).toMatch(/.*Product Length.*/)
|
||||
expect(template).toMatch(/.*Product Width.*/)
|
||||
expect(template).toMatch(/.*Product Height.*/)
|
||||
expect(template).toMatch(/.*Product HS Code.*/)
|
||||
expect(template).toMatch(/.*Product Origin Country.*/)
|
||||
expect(template).toMatch(/.*Product MID Code.*/)
|
||||
expect(template).toMatch(/.*Product Material.*/)
|
||||
expect(template).toMatch(/.*Product Collection Title.*/)
|
||||
expect(template).toMatch(/.*Product Collection Handle.*/)
|
||||
expect(template).toMatch(/.*Product Type.*/)
|
||||
expect(template).toMatch(/.*Product Tags.*/)
|
||||
expect(template).toMatch(/.*Product Discountable.*/)
|
||||
expect(template).toMatch(/.*Product External ID.*/)
|
||||
expect(template).toMatch(/.*Product Profile Name.*/)
|
||||
expect(template).toMatch(/.*Product Profile Type.*/)
|
||||
expect(template).toMatch(/.*Product Profile Type.*/)
|
||||
|
||||
expect(template).toMatch(/.*Variant ID.*/)
|
||||
expect(template).toMatch(/.*Variant Title.*/)
|
||||
expect(template).toMatch(/.*Variant SKU.*/)
|
||||
expect(template).toMatch(/.*Variant Barcode.*/)
|
||||
expect(template).toMatch(/.*Variant Allow backorder.*/)
|
||||
expect(template).toMatch(/.*Variant Manage inventory.*/)
|
||||
expect(template).toMatch(/.*Variant Weight.*/)
|
||||
expect(template).toMatch(/.*Variant Length.*/)
|
||||
expect(template).toMatch(/.*Variant Width.*/)
|
||||
expect(template).toMatch(/.*Variant Height.*/)
|
||||
expect(template).toMatch(/.*Variant HS Code.*/)
|
||||
expect(template).toMatch(/.*Variant Origin Country.*/)
|
||||
expect(template).toMatch(/.*Variant MID Code.*/)
|
||||
expect(template).toMatch(/.*Variant Material.*/)
|
||||
|
||||
expect(template).toMatch(/.*Option 1 Name.*/)
|
||||
expect(template).toMatch(/.*Option 1 Value.*/)
|
||||
expect(template).toMatch(/.*Option 2 Name.*/)
|
||||
expect(template).toMatch(/.*Option 2 Value.*/)
|
||||
|
||||
expect(template).toMatch(/.*Price USD.*/)
|
||||
expect(template).toMatch(/.*Price france \[USD\].*/)
|
||||
expect(template).toMatch(/.*Price denmark \[DKK\].*/)
|
||||
expect(template).toMatch(/.*Price Denmark \[DKK\].*/)
|
||||
|
||||
expect(template).toMatch(/.*Image 1 Url.*/)
|
||||
})
|
||||
|
||||
it('should process the batch job and generate the appropriate output', async () => {
|
||||
await productExportStrategy.prepareBatchJobForProcessing(fakeJob, {} as Request)
|
||||
await productExportStrategy.preProcessBatchJob(fakeJob.id)
|
||||
await productExportStrategy.processJob(fakeJob.id)
|
||||
expect(outputDataStorage).toMatchSnapshot()
|
||||
})
|
||||
|
||||
it('should prepare the job to be pre proccessed', async () => {
|
||||
const fakeJob1: AdminPostBatchesReq = {
|
||||
type: 'product-export',
|
||||
context: {
|
||||
limit: 10,
|
||||
offset: 10,
|
||||
expand: "variants",
|
||||
fields: "title",
|
||||
order: "-title",
|
||||
filterable_fields: { title: "test" }
|
||||
},
|
||||
dry_run: false
|
||||
}
|
||||
|
||||
const output1 = await productExportStrategy.prepareBatchJobForProcessing(
|
||||
fakeJob1,
|
||||
{} as Express.Request
|
||||
)
|
||||
|
||||
expect(output1.context).toEqual(expect.objectContaining({
|
||||
list_config: {
|
||||
select: ["title", "created_at", "id"],
|
||||
order: { title: "DESC" },
|
||||
relations: ["variants"],
|
||||
skip: 10,
|
||||
take: 10,
|
||||
},
|
||||
filterable_fields: { title: "test" }
|
||||
}))
|
||||
|
||||
const fakeJob2: AdminPostBatchesReq = {
|
||||
type: 'product-export',
|
||||
context: {},
|
||||
dry_run: false
|
||||
}
|
||||
|
||||
const output2 = await productExportStrategy.prepareBatchJobForProcessing(
|
||||
fakeJob2,
|
||||
{} as Express.Request
|
||||
)
|
||||
|
||||
expect(output2.context).toEqual(expect.objectContaining({
|
||||
list_config: {
|
||||
select: undefined,
|
||||
order: { created_at: "DESC" },
|
||||
relations: [
|
||||
...defaultAdminProductRelations,
|
||||
"variants.prices.region"
|
||||
],
|
||||
skip: 0,
|
||||
take: 50,
|
||||
},
|
||||
filterable_fields: undefined
|
||||
}))
|
||||
})
|
||||
})
|
||||
423
packages/medusa/src/strategies/batch-jobs/product/export.ts
Normal file
423
packages/medusa/src/strategies/batch-jobs/product/export.ts
Normal file
@@ -0,0 +1,423 @@
|
||||
import { EntityManager } from "typeorm"
|
||||
import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces"
|
||||
import { Product, ProductVariant } from "../../../models"
|
||||
import { BatchJobService, ProductService } from "../../../services"
|
||||
import { BatchJobStatus, CreateBatchJobInput } from "../../../types/batch-job"
|
||||
import { defaultAdminProductRelations } from "../../../api/routes/admin/products"
|
||||
import { prepareListQuery } from "../../../utils/get-query-config"
|
||||
import {
|
||||
ProductExportBatchJob,
|
||||
ProductExportBatchJobContext,
|
||||
ProductExportColumnSchemaDescriptor,
|
||||
ProductExportPriceData,
|
||||
productExportSchemaDescriptors,
|
||||
} from "./index"
|
||||
import { FindProductConfig } from "../../../types/product"
|
||||
|
||||
type InjectedDependencies = {
|
||||
manager: EntityManager
|
||||
batchJobService: BatchJobService
|
||||
productService: ProductService
|
||||
fileService: IFileService<never>
|
||||
}
|
||||
|
||||
export default class ProductExportStrategy extends AbstractBatchJobStrategy<
|
||||
ProductExportStrategy,
|
||||
InjectedDependencies
|
||||
> {
|
||||
public static identifier = "product-export-strategy"
|
||||
public static batchType = "product-export"
|
||||
|
||||
protected manager_: EntityManager
|
||||
protected transactionManager_: EntityManager | undefined
|
||||
|
||||
protected readonly batchJobService_: BatchJobService
|
||||
protected readonly productService_: ProductService
|
||||
protected readonly fileService_: IFileService<never>
|
||||
|
||||
protected readonly defaultRelations_ = [
|
||||
...defaultAdminProductRelations,
|
||||
"variants.prices.region",
|
||||
]
|
||||
/*
|
||||
*
|
||||
* The dynamic columns corresponding to the lowest level of relations are built later on.
|
||||
* You can have a look at the buildHeader method that take care of appending the other
|
||||
* column descriptors to this map.
|
||||
*
|
||||
*/
|
||||
protected readonly columnDescriptors: Map<
|
||||
string,
|
||||
ProductExportColumnSchemaDescriptor
|
||||
> = productExportSchemaDescriptors
|
||||
|
||||
private readonly NEWLINE_ = "\r\n"
|
||||
private readonly DELIMITER_ = ";"
|
||||
private readonly DEFAULT_LIMIT = 50
|
||||
|
||||
constructor({
|
||||
manager,
|
||||
batchJobService,
|
||||
productService,
|
||||
fileService,
|
||||
}: InjectedDependencies) {
|
||||
super({
|
||||
manager,
|
||||
batchJobService,
|
||||
productService,
|
||||
fileService,
|
||||
})
|
||||
|
||||
this.manager_ = manager
|
||||
this.batchJobService_ = batchJobService
|
||||
this.productService_ = productService
|
||||
this.fileService_ = fileService
|
||||
}
|
||||
|
||||
async buildTemplate(): Promise<string> {
|
||||
return ""
|
||||
}
|
||||
|
||||
async preProcessBatchJob(batchJobId: string): Promise<void> {
|
||||
return await this.atomicPhase_(async (transactionManager) => {
|
||||
const batchJob = (await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.retrieve(batchJobId)) as ProductExportBatchJob
|
||||
|
||||
let offset = batchJob.context?.list_config?.skip ?? 0
|
||||
const limit = batchJob.context?.list_config?.take ?? this.DEFAULT_LIMIT
|
||||
|
||||
const { list_config = {}, filterable_fields = {} } = batchJob.context
|
||||
const [productList, count] = await this.productService_
|
||||
.withTransaction(transactionManager)
|
||||
.listAndCount(filterable_fields, {
|
||||
...(list_config ?? {}),
|
||||
take: Math.min(batchJob.context.batch_size ?? Infinity, limit),
|
||||
} as FindProductConfig)
|
||||
|
||||
const productCount = batchJob.context?.batch_size ?? count
|
||||
let products: Product[] = productList
|
||||
|
||||
let dynamicOptionColumnCount = 0
|
||||
let dynamicImageColumnCount = 0
|
||||
|
||||
const pricesData = new Set<string>()
|
||||
|
||||
while (offset < productCount) {
|
||||
if (!products?.length) {
|
||||
products = await this.productService_
|
||||
.withTransaction(transactionManager)
|
||||
.list(filterable_fields, {
|
||||
...list_config,
|
||||
skip: offset,
|
||||
take: Math.min(productCount - offset, limit),
|
||||
} as FindProductConfig)
|
||||
}
|
||||
|
||||
// Retrieve the highest count of each object to build the dynamic columns later
|
||||
for (const product of products) {
|
||||
const optionsCount = product?.options?.length ?? 0
|
||||
dynamicOptionColumnCount = Math.max(
|
||||
dynamicOptionColumnCount,
|
||||
optionsCount
|
||||
)
|
||||
|
||||
const imageCount = product?.images?.length ?? 0
|
||||
dynamicImageColumnCount = Math.max(
|
||||
dynamicImageColumnCount,
|
||||
imageCount
|
||||
)
|
||||
|
||||
for (const variant of product.variants) {
|
||||
if (variant.prices?.length) {
|
||||
variant.prices.forEach((price) => {
|
||||
pricesData.add(
|
||||
JSON.stringify({
|
||||
currency_code: price.currency_code,
|
||||
region: price.region
|
||||
? {
|
||||
currency_code: price.region.currency_code,
|
||||
name: price.region.name,
|
||||
id: price.region.id,
|
||||
}
|
||||
: null,
|
||||
})
|
||||
)
|
||||
})
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
offset += products.length
|
||||
products = []
|
||||
}
|
||||
|
||||
await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.update(batchJob, {
|
||||
context: {
|
||||
shape: {
|
||||
dynamicImageColumnCount,
|
||||
dynamicOptionColumnCount,
|
||||
prices: [...pricesData].map((stringifyData) =>
|
||||
JSON.parse(stringifyData)
|
||||
),
|
||||
},
|
||||
},
|
||||
result: {
|
||||
stat_descriptors: [
|
||||
{
|
||||
key: "product-export-count",
|
||||
name: "Product count to export",
|
||||
message: `There will be ${productCount} products exported by this action`,
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async prepareBatchJobForProcessing(
|
||||
batchJob: CreateBatchJobInput,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
req: Express.Request
|
||||
): Promise<CreateBatchJobInput> {
|
||||
const {
|
||||
limit,
|
||||
offset,
|
||||
order,
|
||||
fields,
|
||||
expand,
|
||||
filterable_fields,
|
||||
...context
|
||||
} = (batchJob?.context ?? {}) as ProductExportBatchJobContext
|
||||
|
||||
const listConfig = prepareListQuery(
|
||||
{
|
||||
limit,
|
||||
offset,
|
||||
order,
|
||||
fields,
|
||||
expand,
|
||||
},
|
||||
{
|
||||
isList: true,
|
||||
defaultRelations: this.defaultRelations_,
|
||||
}
|
||||
)
|
||||
|
||||
batchJob.context = {
|
||||
...(context ?? {}),
|
||||
list_config: listConfig,
|
||||
filterable_fields,
|
||||
}
|
||||
|
||||
return batchJob
|
||||
}
|
||||
|
||||
async processJob(batchJobId: string): Promise<void> {
|
||||
let offset = 0
|
||||
let limit = this.DEFAULT_LIMIT
|
||||
let advancementCount = 0
|
||||
let productCount = 0
|
||||
|
||||
return await this.atomicPhase_(
|
||||
async (transactionManager) => {
|
||||
let batchJob = (await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.retrieve(batchJobId)) as ProductExportBatchJob
|
||||
|
||||
const { writeStream, fileKey, promise } = await this.fileService_
|
||||
.withTransaction(transactionManager)
|
||||
.getUploadStreamDescriptor({
|
||||
name: `product-export-${Date.now()}`,
|
||||
ext: "csv",
|
||||
})
|
||||
|
||||
const header = await this.buildHeader(batchJob)
|
||||
writeStream.write(header)
|
||||
|
||||
advancementCount =
|
||||
batchJob.result?.advancement_count ?? advancementCount
|
||||
offset = (batchJob.context?.list_config?.skip ?? 0) + advancementCount
|
||||
limit = batchJob.context?.list_config?.take ?? limit
|
||||
|
||||
const { list_config = {}, filterable_fields = {} } = batchJob.context
|
||||
const [productList, count] = await this.productService_
|
||||
.withTransaction(transactionManager)
|
||||
.listAndCount(filterable_fields, {
|
||||
...list_config,
|
||||
skip: offset,
|
||||
take: Math.min(batchJob.context.batch_size ?? Infinity, limit),
|
||||
} as FindProductConfig)
|
||||
|
||||
productCount = batchJob.context?.batch_size ?? count
|
||||
let products: Product[] = productList
|
||||
|
||||
while (offset < productCount) {
|
||||
if (!products?.length) {
|
||||
products = await this.productService_
|
||||
.withTransaction(transactionManager)
|
||||
.list(filterable_fields, {
|
||||
...list_config,
|
||||
skip: offset,
|
||||
take: Math.min(productCount - offset, limit),
|
||||
} as FindProductConfig)
|
||||
}
|
||||
|
||||
products.forEach((product: Product) => {
|
||||
const lines = this.buildProductVariantLines(product)
|
||||
lines.forEach((line) => writeStream.write(line))
|
||||
})
|
||||
|
||||
advancementCount += products.length
|
||||
offset += products.length
|
||||
products = []
|
||||
|
||||
batchJob = (await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.update(batchJobId, {
|
||||
result: {
|
||||
file_key: fileKey,
|
||||
count: productCount,
|
||||
advancement_count: advancementCount,
|
||||
progress: advancementCount / productCount,
|
||||
},
|
||||
})) as ProductExportBatchJob
|
||||
|
||||
if (batchJob.status === BatchJobStatus.CANCELED) {
|
||||
writeStream.end()
|
||||
|
||||
await this.fileService_
|
||||
.withTransaction(transactionManager)
|
||||
.delete({ key: fileKey })
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
writeStream.end()
|
||||
|
||||
return await promise
|
||||
},
|
||||
"REPEATABLE READ",
|
||||
async (err) =>
|
||||
this.handleProcessingError(batchJobId, err, {
|
||||
count: productCount,
|
||||
advancement_count: advancementCount,
|
||||
progress: advancementCount / productCount,
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
public async buildHeader(batchJob: ProductExportBatchJob): Promise<string> {
|
||||
const {
|
||||
prices = [],
|
||||
dynamicImageColumnCount,
|
||||
dynamicOptionColumnCount,
|
||||
} = batchJob?.context?.shape ?? {}
|
||||
|
||||
this.appendMoneyAmountDescriptors(prices)
|
||||
this.appendOptionsDescriptors(dynamicOptionColumnCount)
|
||||
this.appendImagesDescriptors(dynamicImageColumnCount)
|
||||
|
||||
return (
|
||||
[...this.columnDescriptors.keys()].join(this.DELIMITER_) + this.NEWLINE_
|
||||
)
|
||||
}
|
||||
|
||||
private appendImagesDescriptors(maxImagesCount: number): void {
|
||||
for (let i = 0; i < maxImagesCount; ++i) {
|
||||
this.columnDescriptors.set(`Image ${i + 1} Url`, {
|
||||
accessor: (product: Product) => product?.images[i]?.url ?? "",
|
||||
entityName: "product",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private appendOptionsDescriptors(maxOptionsCount: number): void {
|
||||
for (let i = 0; i < maxOptionsCount; ++i) {
|
||||
this.columnDescriptors
|
||||
.set(`Option ${i + 1} Name`, {
|
||||
accessor: (productOption: Product) =>
|
||||
productOption?.options[i]?.title ?? "",
|
||||
entityName: "product",
|
||||
})
|
||||
.set(`Option ${i + 1} Value`, {
|
||||
accessor: (variant: ProductVariant) =>
|
||||
variant?.options[i]?.value ?? "",
|
||||
entityName: "variant",
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
private appendMoneyAmountDescriptors(
|
||||
pricesData: ProductExportPriceData[]
|
||||
): void {
|
||||
for (const priceData of pricesData) {
|
||||
if (priceData.currency_code) {
|
||||
this.columnDescriptors.set(
|
||||
`Price ${priceData.currency_code?.toUpperCase()}`,
|
||||
{
|
||||
accessor: (variant: ProductVariant) => {
|
||||
const price = variant.prices.find((variantPrice) => {
|
||||
return (
|
||||
variantPrice.currency_code &&
|
||||
priceData.currency_code &&
|
||||
variantPrice.currency_code.toLowerCase() ===
|
||||
priceData.currency_code.toLowerCase()
|
||||
)
|
||||
})
|
||||
return price?.amount?.toString() ?? ""
|
||||
},
|
||||
entityName: "variant",
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
if (priceData.region) {
|
||||
this.columnDescriptors.set(
|
||||
`Price ${priceData.region.name} ${
|
||||
priceData.region?.currency_code
|
||||
? "[" + priceData.region?.currency_code.toUpperCase() + "]"
|
||||
: ""
|
||||
}`,
|
||||
{
|
||||
accessor: (variant: ProductVariant) => {
|
||||
const price = variant.prices.find((variantPrice) => {
|
||||
return (
|
||||
variantPrice.region &&
|
||||
priceData.region &&
|
||||
variantPrice.region?.name?.toLowerCase() ===
|
||||
priceData.region?.name?.toLowerCase() &&
|
||||
variantPrice.region?.id?.toLowerCase() ===
|
||||
priceData.region?.id?.toLowerCase()
|
||||
)
|
||||
})
|
||||
return price?.amount?.toString() ?? ""
|
||||
},
|
||||
entityName: "variant",
|
||||
}
|
||||
)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
private buildProductVariantLines(product: Product): string[] {
|
||||
const outputLineData: string[] = []
|
||||
|
||||
for (const variant of product.variants) {
|
||||
const variantLineData: string[] = []
|
||||
for (const [, columnSchema] of this.columnDescriptors.entries()) {
|
||||
if (columnSchema.entityName === "product") {
|
||||
variantLineData.push(columnSchema.accessor(product))
|
||||
}
|
||||
if (columnSchema.entityName === "variant") {
|
||||
variantLineData.push(columnSchema.accessor(variant))
|
||||
}
|
||||
}
|
||||
outputLineData.push(variantLineData.join(this.DELIMITER_) + this.NEWLINE_)
|
||||
}
|
||||
|
||||
return outputLineData
|
||||
}
|
||||
}
|
||||
336
packages/medusa/src/strategies/batch-jobs/product/index.ts
Normal file
336
packages/medusa/src/strategies/batch-jobs/product/index.ts
Normal file
@@ -0,0 +1,336 @@
|
||||
import { BatchJob, Product, ProductVariant } from "../../../models"
|
||||
import { Selector } from "../../../types/common"
|
||||
|
||||
export type ProductExportBatchJobContext = {
|
||||
retry_count?: number
|
||||
max_retry?: number
|
||||
offset?: number
|
||||
limit?: number
|
||||
batch_size?: number
|
||||
order?: string
|
||||
fields?: string
|
||||
expand?: string
|
||||
shape: {
|
||||
prices: ProductExportPriceData[]
|
||||
dynamicOptionColumnCount: number
|
||||
dynamicImageColumnCount: number
|
||||
}
|
||||
list_config?: {
|
||||
select?: string[]
|
||||
relations?: string[]
|
||||
skip?: number
|
||||
take?: number
|
||||
order?: Record<string, "ASC" | "DESC">
|
||||
}
|
||||
filterable_fields?: Selector<unknown>
|
||||
}
|
||||
|
||||
export type ProductExportPriceData = {
|
||||
currency_code?: string
|
||||
region?: { name: string; currency_code: string; id: string }
|
||||
}
|
||||
|
||||
export type ProductExportBatchJob = BatchJob & {
|
||||
context: ProductExportBatchJobContext
|
||||
}
|
||||
|
||||
export type ProductExportColumnSchemaEntity = "product" | "variant"
|
||||
|
||||
export type ProductExportColumnSchemaDescriptor =
|
||||
| {
|
||||
accessor: (product: Product) => string
|
||||
entityName: Extract<ProductExportColumnSchemaEntity, "product">
|
||||
}
|
||||
| {
|
||||
accessor: (variant: ProductVariant) => string
|
||||
entityName: Extract<ProductExportColumnSchemaEntity, "variant">
|
||||
}
|
||||
|
||||
export const productExportSchemaDescriptors = new Map<
|
||||
string,
|
||||
ProductExportColumnSchemaDescriptor
|
||||
>([
|
||||
[
|
||||
"Product ID",
|
||||
{
|
||||
accessor: (product: Product): string => product?.id ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Handle",
|
||||
{
|
||||
accessor: (product: Product): string => product?.handle ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Title",
|
||||
{
|
||||
accessor: (product: Product): string => product?.title ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Subtitle",
|
||||
{
|
||||
accessor: (product: Product): string => product?.subtitle ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Description",
|
||||
{
|
||||
accessor: (product: Product): string => product?.description ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Status",
|
||||
{
|
||||
accessor: (product: Product): string => product?.status ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Thumbnail",
|
||||
{
|
||||
accessor: (product: Product): string => product?.thumbnail ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Weight",
|
||||
{
|
||||
accessor: (product: Product): string => product?.weight?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Length",
|
||||
{
|
||||
accessor: (product: Product): string => product?.length?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Width",
|
||||
{
|
||||
accessor: (product: Product): string => product?.width?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Height",
|
||||
{
|
||||
accessor: (product: Product): string => product?.height?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product HS Code",
|
||||
{
|
||||
accessor: (product: Product): string =>
|
||||
product?.hs_code?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Origin Country",
|
||||
{
|
||||
accessor: (product: Product): string =>
|
||||
product?.origin_country?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product MID Code",
|
||||
{
|
||||
accessor: (product: Product): string =>
|
||||
product?.mid_code?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Material",
|
||||
{
|
||||
accessor: (product: Product): string =>
|
||||
product?.material?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Collection Title",
|
||||
{
|
||||
accessor: (product: Product): string => product?.collection?.title ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Collection Handle",
|
||||
{
|
||||
accessor: (product: Product): string => product?.collection?.handle ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Type",
|
||||
{
|
||||
accessor: (product: Product): string => product?.type?.value ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Tags",
|
||||
{
|
||||
accessor: (product: Product): string =>
|
||||
(product.tags.map((t) => t.value) ?? []).join(","),
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Discountable",
|
||||
{
|
||||
accessor: (product: Product): string =>
|
||||
product?.discountable?.toString() ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product External ID",
|
||||
{
|
||||
accessor: (product: Product): string => product?.external_id ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Profile Name",
|
||||
{
|
||||
accessor: (product: Product): string => product?.profile?.name ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Product Profile Type",
|
||||
{
|
||||
accessor: (product: Product): string => product?.profile?.type ?? "",
|
||||
entityName: "product",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant ID",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string => variant?.id ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Title",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string => variant?.title ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant SKU",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string => variant?.sku ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Barcode",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string => variant?.barcode ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Inventory Quantity",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.inventory_quantity?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Allow backorder",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.allow_backorder?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Manage inventory",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.manage_inventory?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Weight",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.weight?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Length",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.length?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Width",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.width?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Height",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.height?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant HS Code",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.hs_code?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Origin Country",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.origin_country?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant MID Code",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.mid_code?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
[
|
||||
"Variant Material",
|
||||
{
|
||||
accessor: (variant: ProductVariant): string =>
|
||||
variant?.material?.toString() ?? "",
|
||||
entityName: "variant",
|
||||
},
|
||||
],
|
||||
])
|
||||
57
packages/medusa/src/subscribers/batch-job.ts
Normal file
57
packages/medusa/src/subscribers/batch-job.ts
Normal file
@@ -0,0 +1,57 @@
|
||||
import BatchJobService from "../services/batch-job"
|
||||
import EventBusService from "../services/event-bus"
|
||||
import { StrategyResolverService } from "../services"
|
||||
|
||||
type InjectedDependencies = {
|
||||
eventBusService: EventBusService
|
||||
batchJobService: BatchJobService
|
||||
strategyResolverService: StrategyResolverService
|
||||
}
|
||||
|
||||
class BatchJobSubscriber {
|
||||
private readonly eventBusService_: EventBusService
|
||||
private readonly batchJobService_: BatchJobService
|
||||
private readonly strategyResolver_: StrategyResolverService
|
||||
|
||||
constructor({
|
||||
eventBusService,
|
||||
batchJobService,
|
||||
strategyResolverService,
|
||||
}: InjectedDependencies) {
|
||||
this.eventBusService_ = eventBusService
|
||||
this.batchJobService_ = batchJobService
|
||||
this.strategyResolver_ = strategyResolverService
|
||||
|
||||
this.eventBusService_
|
||||
.subscribe(BatchJobService.Events.CREATED, this.preProcessBatchJob)
|
||||
.subscribe(BatchJobService.Events.CONFIRMED, this.processBatchJob)
|
||||
}
|
||||
|
||||
preProcessBatchJob = async (data): Promise<void> => {
|
||||
const batchJob = await this.batchJobService_.retrieve(data.id)
|
||||
|
||||
const batchJobStrategy = this.strategyResolver_.resolveBatchJobByType(
|
||||
batchJob.type
|
||||
)
|
||||
|
||||
await batchJobStrategy.preProcessBatchJob(batchJob.id)
|
||||
|
||||
await this.batchJobService_.setPreProcessingDone(batchJob.id)
|
||||
}
|
||||
|
||||
processBatchJob = async (data): Promise<void> => {
|
||||
const batchJob = await this.batchJobService_.retrieve(data.id)
|
||||
|
||||
const batchJobStrategy = this.strategyResolver_.resolveBatchJobByType(
|
||||
batchJob.type
|
||||
)
|
||||
|
||||
await this.batchJobService_.setProcessing(batchJob.id)
|
||||
|
||||
await batchJobStrategy.processJob(batchJob.id)
|
||||
|
||||
await this.batchJobService_.complete(batchJob.id)
|
||||
}
|
||||
}
|
||||
|
||||
export default BatchJobSubscriber
|
||||
@@ -22,6 +22,24 @@ export enum BatchJobStatus {
|
||||
|
||||
export type BatchJobUpdateProps = Partial<Pick<BatchJob, "context" | "result">>
|
||||
|
||||
export type CreateBatchJobInput = {
|
||||
type: string
|
||||
context: BatchJob["context"]
|
||||
dry_run: boolean
|
||||
}
|
||||
|
||||
export type BatchJobResultError = {
|
||||
message: string
|
||||
code: string | number
|
||||
[key: string]: unknown
|
||||
}
|
||||
|
||||
export type BatchJobResultStatDescriptor = {
|
||||
key: string
|
||||
name: string
|
||||
message: string
|
||||
}
|
||||
|
||||
export class FilterableBatchJobProps {
|
||||
@IsOptional()
|
||||
@IsType([String, [String]])
|
||||
|
||||
Reference in New Issue
Block a user