feat(medusa): Add batch strategy for order exports (#1603)
This commit is contained in:
6
.github/workflows/action.yml
vendored
6
.github/workflows/action.yml
vendored
@@ -1,9 +1,9 @@
|
||||
name: Medusa Pipeline
|
||||
on:
|
||||
on:
|
||||
pull_request:
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- 'www/**'
|
||||
- "docs/**"
|
||||
- "www/**"
|
||||
|
||||
jobs:
|
||||
unit-tests:
|
||||
|
||||
@@ -63,7 +63,7 @@ describe("/admin/batch-jobs", () => {
|
||||
beforeAll(async () => {
|
||||
const cwd = path.resolve(path.join(__dirname, "..", ".."))
|
||||
dbConnection = await initDb({ cwd })
|
||||
medusaProcess = await setupServer({ cwd, verbose: false })
|
||||
medusaProcess = await setupServer({ cwd })
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
@@ -131,19 +131,19 @@ describe("/admin/batch-jobs", () => {
|
||||
id: "job_3",
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
created_by: "admin_user"
|
||||
created_by: "admin_user",
|
||||
},
|
||||
{
|
||||
id: "job_2",
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
created_by: "admin_user"
|
||||
created_by: "admin_user",
|
||||
},
|
||||
{
|
||||
id: "job_1",
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
created_by: "admin_user"
|
||||
created_by: "admin_user",
|
||||
},
|
||||
],
|
||||
})
|
||||
@@ -165,23 +165,24 @@ describe("/admin/batch-jobs", () => {
|
||||
const response = await api.get("/admin/batch-jobs/job_1", adminReqConfig)
|
||||
|
||||
expect(response.status).toEqual(200)
|
||||
expect(response.data.batch_job).toEqual(expect.objectContaining({
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
created_by: "admin_user"
|
||||
}))
|
||||
expect(response.data.batch_job).toEqual(
|
||||
expect.objectContaining({
|
||||
created_at: expect.any(String),
|
||||
updated_at: expect.any(String),
|
||||
created_by: "admin_user",
|
||||
})
|
||||
)
|
||||
})
|
||||
|
||||
it("should fail on batch job created by other user", async () => {
|
||||
const api = useApi()
|
||||
await api.get("/admin/batch-jobs/job_4", adminReqConfig)
|
||||
.catch((err) => {
|
||||
expect(err.response.status).toEqual(400)
|
||||
expect(err.response.data.type).toEqual("not_allowed")
|
||||
expect(err.response.data.message).toEqual(
|
||||
"Cannot access a batch job that does not belong to the logged in user"
|
||||
)
|
||||
})
|
||||
await api.get("/admin/batch-jobs/job_4", adminReqConfig).catch((err) => {
|
||||
expect(err.response.status).toEqual(400)
|
||||
expect(err.response.data.type).toEqual("not_allowed")
|
||||
expect(err.response.data.message).toEqual(
|
||||
"Cannot access a batch job that does not belong to the logged in user"
|
||||
)
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -195,7 +196,7 @@ describe("/admin/batch-jobs", () => {
|
||||
await db.teardown()
|
||||
})
|
||||
|
||||
it("Creates a batch job", async() => {
|
||||
it("Creates a batch job", async () => {
|
||||
const api = useApi()
|
||||
|
||||
const response = await api.post(
|
||||
@@ -261,7 +262,7 @@ describe("/admin/batch-jobs", () => {
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async() => {
|
||||
afterEach(async () => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
})
|
||||
|
||||
251
integration-tests/api/__tests__/batch-jobs/order/export.js
Normal file
251
integration-tests/api/__tests__/batch-jobs/order/export.js
Normal file
@@ -0,0 +1,251 @@
|
||||
const path = require("path")
|
||||
const fs = require("fs/promises")
|
||||
import { sep, resolve } from "path"
|
||||
|
||||
const setupServer = require("../../../../helpers/setup-server")
|
||||
const { useApi } = require("../../../../helpers/use-api")
|
||||
const { initDb, useDb } = require("../../../../helpers/use-db")
|
||||
|
||||
const adminSeeder = require("../../../helpers/admin-seeder")
|
||||
const userSeeder = require("../../../helpers/user-seeder")
|
||||
const orderSeeder = require("../../../helpers/order-seeder")
|
||||
|
||||
const adminReqConfig = {
|
||||
headers: {
|
||||
Authorization: "Bearer test_token",
|
||||
},
|
||||
}
|
||||
|
||||
jest.setTimeout(1000000)
|
||||
|
||||
describe("Batchjob with type order-export", () => {
|
||||
let medusaProcess
|
||||
let dbConnection
|
||||
let exportFilePath = ""
|
||||
let topDir = ""
|
||||
|
||||
beforeAll(async () => {
|
||||
const cwd = path.resolve(path.join(__dirname, "..", "..", ".."))
|
||||
dbConnection = await initDb({ cwd })
|
||||
medusaProcess = await setupServer({
|
||||
cwd,
|
||||
redisUrl: "redis://127.0.0.1:6379",
|
||||
uploadDir: __dirname,
|
||||
verbose: false,
|
||||
})
|
||||
})
|
||||
|
||||
afterAll(async () => {
|
||||
if (topDir !== "") {
|
||||
await fs.rm(resolve(__dirname, topDir), { recursive: true })
|
||||
}
|
||||
|
||||
const db = useDb()
|
||||
await db.shutdown()
|
||||
|
||||
medusaProcess.kill()
|
||||
})
|
||||
|
||||
beforeEach(async () => {
|
||||
try {
|
||||
await adminSeeder(dbConnection)
|
||||
await userSeeder(dbConnection)
|
||||
await orderSeeder(dbConnection)
|
||||
} catch (e) {
|
||||
console.log(e)
|
||||
throw e
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async () => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
|
||||
const isFileExists = (await fs.stat(exportFilePath))?.isFile()
|
||||
|
||||
if (isFileExists) {
|
||||
const [, relativeRoot] = exportFilePath.replace(__dirname, "").split(sep)
|
||||
|
||||
if ((await fs.stat(resolve(__dirname, relativeRoot)))?.isDirectory()) {
|
||||
topDir = relativeRoot
|
||||
}
|
||||
|
||||
await fs.unlink(exportFilePath)
|
||||
}
|
||||
})
|
||||
|
||||
it("Should export a file containing all orders", async () => {
|
||||
jest.setTimeout(1000000)
|
||||
const api = useApi()
|
||||
|
||||
const batchPayload = {
|
||||
type: "order-export",
|
||||
context: {},
|
||||
}
|
||||
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
|
||||
if (shouldContinuePulling) {
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const fileSize = (await fs.stat(exportFilePath)).size
|
||||
expect(batchJob.result?.file_size).toBe(fileSize)
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(6)
|
||||
|
||||
const csvLine = lines[0].split(";")
|
||||
|
||||
expect(csvLine[0]).toBe("discount-order")
|
||||
expect(csvLine[1]).toBe("6")
|
||||
expect(csvLine[14]).toBe("fulfilled")
|
||||
expect(csvLine[15]).toBe("captured")
|
||||
expect(csvLine[16]).toBe("8000")
|
||||
})
|
||||
|
||||
it("Should export a file containing a limited number of orders", async () => {
|
||||
jest.setTimeout(1000000)
|
||||
const api = useApi()
|
||||
|
||||
const batchPayload = {
|
||||
type: "order-export",
|
||||
context: { batch_size: 3 },
|
||||
}
|
||||
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
|
||||
if (shouldContinuePulling) {
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(3)
|
||||
})
|
||||
|
||||
it("Should export a file with orders from a single customer", async () => {
|
||||
jest.setTimeout(1000000)
|
||||
const api = useApi()
|
||||
|
||||
const batchPayload = {
|
||||
type: "order-export",
|
||||
context: { filterable_fields: { email: "test@email.com" } },
|
||||
}
|
||||
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
|
||||
if (shouldContinuePulling) {
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(1)
|
||||
|
||||
const csvLine = lines[0].split(";")
|
||||
|
||||
expect(csvLine[0]).toBe("test-order")
|
||||
expect(csvLine[6]).toBe("test@email.com")
|
||||
})
|
||||
})
|
||||
@@ -1,5 +1,6 @@
|
||||
const path = require("path")
|
||||
const fs = require('fs/promises')
|
||||
const fs = require("fs/promises")
|
||||
import { sep, resolve } from "path"
|
||||
|
||||
const setupServer = require("../../../../helpers/setup-server")
|
||||
const { useApi } = require("../../../../helpers/use-api")
|
||||
@@ -20,6 +21,8 @@ jest.setTimeout(1000000)
|
||||
describe("Batch job of product-export type", () => {
|
||||
let medusaProcess
|
||||
let dbConnection
|
||||
let exportFilePath = ""
|
||||
let topDir = ""
|
||||
|
||||
beforeAll(async () => {
|
||||
const cwd = path.resolve(path.join(__dirname, "..", "..", ".."))
|
||||
@@ -28,13 +31,15 @@ describe("Batch job of product-export type", () => {
|
||||
cwd,
|
||||
redisUrl: "redis://127.0.0.1:6379",
|
||||
uploadDir: __dirname,
|
||||
verbose: false
|
||||
verbose: false,
|
||||
})
|
||||
})
|
||||
|
||||
let exportFilePath = ""
|
||||
|
||||
afterAll(async () => {
|
||||
if (topDir !== "") {
|
||||
await fs.rm(resolve(__dirname, topDir), { recursive: true })
|
||||
}
|
||||
|
||||
const db = useDb()
|
||||
await db.shutdown()
|
||||
|
||||
@@ -52,17 +57,24 @@ describe("Batch job of product-export type", () => {
|
||||
}
|
||||
})
|
||||
|
||||
afterEach(async() => {
|
||||
afterEach(async () => {
|
||||
const db = useDb()
|
||||
await db.teardown()
|
||||
|
||||
const isFileExists = (await fs.stat(exportFilePath))?.isFile()
|
||||
|
||||
if (isFileExists) {
|
||||
const [, relativeRoot] = exportFilePath.replace(__dirname, "").split(sep)
|
||||
|
||||
if ((await fs.stat(resolve(__dirname, relativeRoot)))?.isDirectory()) {
|
||||
topDir = relativeRoot
|
||||
}
|
||||
|
||||
await fs.unlink(exportFilePath)
|
||||
}
|
||||
})
|
||||
|
||||
it('should export a csv file containing the expected products', async () => {
|
||||
it("should export a csv file containing the expected products", async () => {
|
||||
jest.setTimeout(1000000)
|
||||
const api = useApi()
|
||||
|
||||
@@ -97,37 +109,52 @@ describe("Batch job of product-export type", () => {
|
||||
},
|
||||
],
|
||||
}
|
||||
const createProductRes =
|
||||
await api.post("/admin/products", productPayload, adminReqConfig)
|
||||
const createProductRes = await api.post(
|
||||
"/admin/products",
|
||||
productPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const productId = createProductRes.data.product.id
|
||||
const variantId = createProductRes.data.product.variants[0].id
|
||||
|
||||
const batchPayload = {
|
||||
type: "product-export",
|
||||
context: {
|
||||
filterable_fields: { title: "Test export product" }
|
||||
filterable_fields: {
|
||||
title: "Test export product",
|
||||
},
|
||||
},
|
||||
}
|
||||
const batchJobRes = await api.post("/admin/batch-jobs", batchPayload, adminReqConfig)
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob;
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api
|
||||
.get(`/admin/batch-jobs/${batchJobId}`, adminReqConfig)
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
shouldContinuePulling = !(batchJob.status === "completed")
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
@@ -137,7 +164,7 @@ describe("Batch job of product-export type", () => {
|
||||
expect(batchJob.result?.file_size).toBe(fileSize)
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter(l => l)
|
||||
const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(1)
|
||||
|
||||
@@ -150,4 +177,61 @@ describe("Batch job of product-export type", () => {
|
||||
expect(lineColumn[24]).toBe(productPayload.variants[0].title)
|
||||
expect(lineColumn[25]).toBe(productPayload.variants[0].sku)
|
||||
})
|
||||
})
|
||||
|
||||
it("should export a csv file containing a limited number of products", async () => {
|
||||
jest.setTimeout(1000000)
|
||||
const api = useApi()
|
||||
|
||||
const batchPayload = {
|
||||
type: "product-export",
|
||||
context: {
|
||||
batch_size: 1,
|
||||
filterable_fields: { collection_id: "test-collection" },
|
||||
order: "created_at",
|
||||
},
|
||||
}
|
||||
|
||||
const batchJobRes = await api.post(
|
||||
"/admin/batch-jobs",
|
||||
batchPayload,
|
||||
adminReqConfig
|
||||
)
|
||||
const batchJobId = batchJobRes.data.batch_job.id
|
||||
|
||||
expect(batchJobId).toBeTruthy()
|
||||
|
||||
// Pull to check the status until it is completed
|
||||
let batchJob
|
||||
let shouldContinuePulling = true
|
||||
while (shouldContinuePulling) {
|
||||
const res = await api.get(
|
||||
`/admin/batch-jobs/${batchJobId}`,
|
||||
adminReqConfig
|
||||
)
|
||||
|
||||
await new Promise((resolve, _) => {
|
||||
setTimeout(resolve, 1000)
|
||||
})
|
||||
|
||||
batchJob = res.data.batch_job
|
||||
shouldContinuePulling = !(
|
||||
batchJob.status === "completed" || batchJob.status === "failed"
|
||||
)
|
||||
}
|
||||
|
||||
expect(batchJob.status).toBe("completed")
|
||||
|
||||
exportFilePath = path.resolve(__dirname, batchJob.result.file_key)
|
||||
const isFileExists = (await fs.stat(exportFilePath)).isFile()
|
||||
|
||||
expect(isFileExists).toBeTruthy()
|
||||
|
||||
const data = (await fs.readFile(exportFilePath)).toString()
|
||||
const [, ...lines] = data.split("\r\n").filter((l) => l)
|
||||
|
||||
expect(lines.length).toBe(4)
|
||||
|
||||
const csvLine = lines[0].split(";")
|
||||
expect(csvLine[0]).toBe("test-product")
|
||||
})
|
||||
})
|
||||
|
||||
@@ -8,7 +8,7 @@ module.exports = {
|
||||
redis_url: process.env.REDIS_URL,
|
||||
database_url: `postgres://${DB_USERNAME}:${DB_PASSWORD}@localhost/medusa-integration-${workerId}`,
|
||||
database_type: "postgres",
|
||||
jwt_secret: 'test',
|
||||
cookie_secret: 'test'
|
||||
jwt_secret: "test",
|
||||
cookie_secret: "test",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -1,55 +1,63 @@
|
||||
import { AbstractFileService } from "@medusajs/medusa"
|
||||
import stream from "stream"
|
||||
import { resolve } from "path"
|
||||
import * as fs from "fs"
|
||||
import * as path from "path"
|
||||
|
||||
export default class LocalFileService extends AbstractFileService {
|
||||
// eslint-disable-next-line no-empty-pattern
|
||||
constructor({}, options) {
|
||||
super({});
|
||||
this.upload_dir_ = process.env.UPLOAD_DIR ?? options.upload_dir ?? "uploads/images";
|
||||
super({})
|
||||
this.upload_dir_ =
|
||||
process.env.UPLOAD_DIR ?? options.upload_dir ?? "uploads/images"
|
||||
|
||||
if (!fs.existsSync(this.upload_dir_)) {
|
||||
fs.mkdirSync(this.upload_dir_);
|
||||
fs.mkdirSync(this.upload_dir_)
|
||||
}
|
||||
}
|
||||
|
||||
upload(file) {
|
||||
return new Promise((resolve, reject) => {
|
||||
const path = resolve(this.upload_dir_, file.originalname)
|
||||
fs.writeFile(path, "", err => {
|
||||
if (err) {
|
||||
reject(err);
|
||||
}
|
||||
async upload(file) {
|
||||
const uploadPath = path.join(
|
||||
this.upload_dir_,
|
||||
path.dirname(file.originalname)
|
||||
)
|
||||
|
||||
resolve({ url: path });
|
||||
});
|
||||
});
|
||||
if (!fs.existsSync(uploadPath)) {
|
||||
fs.mkdirSync(uploadPath, { recursive: true })
|
||||
}
|
||||
|
||||
const filePath = path.resolve(this.upload_dir_, file.originalname)
|
||||
fs.writeFile(filePath, "", (error) => {
|
||||
if (error) {
|
||||
throw error
|
||||
}
|
||||
})
|
||||
return { url: filePath }
|
||||
}
|
||||
|
||||
delete({ name }) {
|
||||
async delete({ name }) {
|
||||
return new Promise((resolve, _) => {
|
||||
const path = resolve(this.upload_dir_, name)
|
||||
fs.unlink(path, err => {
|
||||
fs.unlink(path, (err) => {
|
||||
if (err) {
|
||||
throw err;
|
||||
throw err
|
||||
}
|
||||
|
||||
resolve("file unlinked");
|
||||
});
|
||||
});
|
||||
resolve("file unlinked")
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async getUploadStreamDescriptor({ name, ext }) {
|
||||
const fileKey = `${name}.${ext}`
|
||||
const path = resolve(this.upload_dir_, fileKey)
|
||||
const fileKey = `${name}-${Date.now()}.${ext}`
|
||||
const filePath = path.resolve(this.upload_dir_, fileKey)
|
||||
|
||||
const isFileExists = fs.existsSync(path)
|
||||
const isFileExists = fs.existsSync(filePath)
|
||||
if (!isFileExists) {
|
||||
await this.upload({ originalname: fileKey })
|
||||
}
|
||||
|
||||
const pass = new stream.PassThrough()
|
||||
pass.pipe(fs.createWriteStream(path))
|
||||
pass.pipe(fs.createWriteStream(filePath))
|
||||
|
||||
return {
|
||||
writeStream: pass,
|
||||
|
||||
@@ -5,6 +5,12 @@ const { setPort } = require("./use-api")
|
||||
module.exports = ({ cwd, redisUrl, uploadDir, verbose }) => {
|
||||
const serverPath = path.join(__dirname, "test-server.js")
|
||||
|
||||
// in order to prevent conflicts in redis, use a different db for each worker
|
||||
// same fix as for databases (works with up to 15)
|
||||
// redis dbs are 0-indexed and jest worker ids are indexed from 1
|
||||
const workerId = parseInt(process.env.JEST_WORKER_ID || "1")
|
||||
const redisUrlWithDatabase = `${redisUrl}/${workerId - 1}`
|
||||
|
||||
return new Promise((resolve, reject) => {
|
||||
const medusaProcess = spawn("node", [path.resolve(serverPath)], {
|
||||
cwd,
|
||||
@@ -13,8 +19,8 @@ module.exports = ({ cwd, redisUrl, uploadDir, verbose }) => {
|
||||
NODE_ENV: "development",
|
||||
JWT_SECRET: "test",
|
||||
COOKIE_SECRET: "test",
|
||||
REDIS_URL: redisUrl, // If provided, will use a real instance, otherwise a fake instance
|
||||
UPLOAD_DIR: uploadDir // If provided, will be used for the fake local file service
|
||||
REDIS_URL: redisUrl ? redisUrlWithDatabase : undefined, // If provided, will use a real instance, otherwise a fake instance
|
||||
UPLOAD_DIR: uploadDir, // If provided, will be used for the fake local file service
|
||||
},
|
||||
stdio: verbose
|
||||
? ["inherit", "inherit", "inherit", "ipc"]
|
||||
|
||||
@@ -70,7 +70,7 @@ class DigitalOceanService extends AbstractFileService {
|
||||
|
||||
const pass = new stream.PassThrough()
|
||||
|
||||
const fileKey = `${fileData.name}-${Date.now()}.${fileData.ext}`
|
||||
const fileKey = `${fileData.name}.${fileData.ext}`
|
||||
const params = {
|
||||
ACL: fileData.acl ?? "private",
|
||||
Bucket: this.bucket_,
|
||||
|
||||
@@ -24,12 +24,16 @@ export default (app) => {
|
||||
route.post("/", middlewares.wrap(require("./create-batch-job").default))
|
||||
|
||||
const batchJobRouter = Router({ mergeParams: true })
|
||||
|
||||
route.use("/:id", getRequestedBatchJob, canAccessBatchJob, batchJobRouter)
|
||||
|
||||
batchJobRouter.get("/", middlewares.wrap(require("./get-batch-job").default))
|
||||
|
||||
batchJobRouter.post(
|
||||
"/confirm",
|
||||
middlewares.wrap(require("./confirm-batch-job").default)
|
||||
)
|
||||
|
||||
batchJobRouter.post(
|
||||
"/cancel",
|
||||
middlewares.wrap(require("./cancel-batch-job").default)
|
||||
|
||||
@@ -5,12 +5,16 @@ import { ConfigModule, MedusaContainer } from "../types/global"
|
||||
import { Logger } from "../types/global"
|
||||
|
||||
type Options = {
|
||||
container: MedusaContainer;
|
||||
configModule: ConfigModule;
|
||||
logger: Logger;
|
||||
container: MedusaContainer
|
||||
configModule: ConfigModule
|
||||
logger: Logger
|
||||
}
|
||||
|
||||
async function redisLoader({ container, configModule, logger }: Options): Promise<void> {
|
||||
async function redisLoader({
|
||||
container,
|
||||
configModule,
|
||||
logger,
|
||||
}: Options): Promise<void> {
|
||||
if (configModule.projectConfig.redis_url) {
|
||||
// Economical way of dealing with redis clients
|
||||
const client = new RealRedis(configModule.projectConfig.redis_url)
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import glob from "glob"
|
||||
import path from "path"
|
||||
import { AwilixContainer, asFunction, aliasTo } from "awilix"
|
||||
import { asFunction, aliasTo } from "awilix"
|
||||
|
||||
import formatRegistrationName from "../utils/format-registration-name"
|
||||
import { isBatchJobStrategy } from "../interfaces"
|
||||
|
||||
@@ -1,5 +1,16 @@
|
||||
import { AfterLoad, BeforeInsert, Column, Entity, JoinColumn, ManyToOne } from "typeorm"
|
||||
import { BatchJobResultError, BatchJobResultStatDescriptor, BatchJobStatus } from "../types/batch-job"
|
||||
import {
|
||||
AfterLoad,
|
||||
BeforeInsert,
|
||||
Column,
|
||||
Entity,
|
||||
JoinColumn,
|
||||
ManyToOne,
|
||||
} from "typeorm"
|
||||
import {
|
||||
BatchJobResultError,
|
||||
BatchJobResultStatDescriptor,
|
||||
BatchJobStatus,
|
||||
} from "../types/batch-job"
|
||||
import { DbAwareColumn, resolveDbType } from "../utils/db-aware-column"
|
||||
import { SoftDeletableEntity } from "../interfaces/models/soft-deletable-entity"
|
||||
import { generateEntityId } from "../utils/generate-entity-id"
|
||||
@@ -33,7 +44,7 @@ export class BatchJob extends SoftDeletableEntity {
|
||||
} & Record<string, unknown>
|
||||
|
||||
@Column({ type: "boolean", nullable: false, default: false })
|
||||
dry_run: boolean = false;
|
||||
dry_run = false
|
||||
|
||||
@Column({ type: resolveDbType("timestamptz"), nullable: true })
|
||||
pre_processed_at?: Date
|
||||
@@ -141,7 +152,7 @@ export class BatchJob extends SoftDeletableEntity {
|
||||
* description: "The date of the completion."
|
||||
* type: string
|
||||
* format: date-time
|
||||
* canceled_at:
|
||||
* canceled_at:
|
||||
* description: "The date of the concellation."
|
||||
* type: string
|
||||
* format: date-time
|
||||
|
||||
@@ -0,0 +1,74 @@
|
||||
import { DeepPartial } from "typeorm"
|
||||
import {
|
||||
FulfillmentStatus,
|
||||
Order,
|
||||
OrderStatus,
|
||||
PaymentStatus,
|
||||
} from "../../models"
|
||||
|
||||
const createdAtDate = new Date("2019-01-01T00:00:00.000Z")
|
||||
|
||||
export const ordersToExport: DeepPartial<Order>[] = [
|
||||
{
|
||||
id: "order_1",
|
||||
created_at: createdAtDate,
|
||||
display_id: 123,
|
||||
status: OrderStatus.PENDING,
|
||||
fulfillment_status: FulfillmentStatus.PARTIALLY_FULFILLED,
|
||||
payment_status: PaymentStatus.CAPTURED,
|
||||
subtotal: 10,
|
||||
shipping_total: 10,
|
||||
discount_total: 0,
|
||||
gift_card_total: 0,
|
||||
refunded_total: 0,
|
||||
tax_total: 5,
|
||||
total: 25,
|
||||
currency_code: "usd",
|
||||
region_id: "region_1",
|
||||
shipping_address: {
|
||||
id: "address_1",
|
||||
address_1: "123 Main St",
|
||||
address_2: "",
|
||||
city: "New York",
|
||||
country_code: "US",
|
||||
postal_code: "10001",
|
||||
},
|
||||
customer: {
|
||||
id: "customer_1",
|
||||
first_name: "John",
|
||||
last_name: "Doe",
|
||||
email: "John@Doe.com",
|
||||
},
|
||||
},
|
||||
{
|
||||
id: "order_2",
|
||||
created_at: createdAtDate,
|
||||
display_id: 124,
|
||||
status: OrderStatus.COMPLETED,
|
||||
fulfillment_status: FulfillmentStatus.FULFILLED,
|
||||
payment_status: PaymentStatus.CAPTURED,
|
||||
subtotal: 125,
|
||||
shipping_total: 10,
|
||||
discount_total: 0,
|
||||
gift_card_total: 0,
|
||||
refunded_total: 0,
|
||||
tax_total: 0,
|
||||
total: 135,
|
||||
currency_code: "eur",
|
||||
region_id: "region_2",
|
||||
shipping_address: {
|
||||
id: "address_2",
|
||||
address_1: "Hovedgaden 1",
|
||||
address_2: "",
|
||||
city: "Copenhagen",
|
||||
country_code: "DK",
|
||||
postal_code: "1150",
|
||||
},
|
||||
customer: {
|
||||
id: "customer_2",
|
||||
first_name: "Jane",
|
||||
last_name: "Doe",
|
||||
email: "Jane@Doe.com",
|
||||
},
|
||||
},
|
||||
]
|
||||
@@ -0,0 +1,12 @@
|
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP
|
||||
|
||||
exports[`Order export strategy should process the batch job and generate the appropriate output 1`] = `
|
||||
Array [
|
||||
"Order_ID;Display_ID;Order status;Date;Customer First name;Customer Last name;Customer Email;Customer ID;Shipping Address 1;Shipping Address 2;Shipping Country Code;Shipping City;Shipping Postal Code;Shipping Region ID;Fulfillment Status;Payment Status;Subtotal;Shipping Total;Discount Total;Gift Card Total;Refunded Total;Tax Total;Total;Currency Code
|
||||
",
|
||||
"order_1;123;pending;Tue, 01 Jan 2019 00:00:00 GMT;John;Doe;John@Doe.com;customer_1;123 Main St;;US;New York;10001;region_1;partially_fulfilled;captured;10;10;0;0;0;5;25;usd
|
||||
",
|
||||
"order_2;124;completed;Tue, 01 Jan 2019 00:00:00 GMT;Jane;Doe;Jane@Doe.com;customer_2;Hovedgaden 1;;DK;Copenhagen;1150;region_2;fulfilled;captured;125;10;0;0;0;0;135;eur
|
||||
",
|
||||
]
|
||||
`;
|
||||
@@ -0,0 +1,138 @@
|
||||
import OrderExportStrategy from "../../../batch-jobs/order/export"
|
||||
import { IdMap, MockManager } from "medusa-test-utils"
|
||||
import { User } from "../../../../models"
|
||||
import { BatchJobStatus } from "../../../../types/batch-job"
|
||||
import { ordersToExport } from "../../../__fixtures__/order-export-data"
|
||||
|
||||
const outputDataStorage: string[] = []
|
||||
|
||||
let fakeJob = {
|
||||
id: IdMap.getId("order-export-job"),
|
||||
type: "order-export",
|
||||
context: {
|
||||
params: {},
|
||||
list_config: {
|
||||
select: [
|
||||
"id",
|
||||
"display_id",
|
||||
"status",
|
||||
"created_at",
|
||||
"fulfillment_status",
|
||||
"payment_status",
|
||||
"subtotal",
|
||||
"shipping_total",
|
||||
"discount_total",
|
||||
"gift_card_total",
|
||||
"refunded_total",
|
||||
"tax_total",
|
||||
"total",
|
||||
"currency_code",
|
||||
"region_id",
|
||||
],
|
||||
relations: ["customer", "shipping_address"],
|
||||
},
|
||||
},
|
||||
created_by: IdMap.getId("order-export-job-creator"),
|
||||
created_by_user: {} as User,
|
||||
result: {},
|
||||
dry_run: false,
|
||||
status: BatchJobStatus.PROCESSING,
|
||||
}
|
||||
|
||||
const fileServiceMock = {
|
||||
delete: jest.fn(),
|
||||
withTransaction: function () {
|
||||
return this
|
||||
},
|
||||
getUploadStreamDescriptor: jest.fn().mockImplementation(() => {
|
||||
return Promise.resolve({
|
||||
writeStream: {
|
||||
write: (data: string) => {
|
||||
outputDataStorage.push(data)
|
||||
},
|
||||
end: () => void 0,
|
||||
},
|
||||
promise: Promise.resolve(),
|
||||
fileKey: "product-export.csv",
|
||||
})
|
||||
}),
|
||||
}
|
||||
const batchJobServiceMock = {
|
||||
withTransaction: function (): any {
|
||||
return this
|
||||
},
|
||||
update: jest.fn().mockImplementation(async (data) => {
|
||||
fakeJob = {
|
||||
...fakeJob,
|
||||
...data,
|
||||
}
|
||||
return fakeJob
|
||||
}),
|
||||
complete: jest.fn().mockImplementation(async () => {
|
||||
fakeJob.status = BatchJobStatus.COMPLETED
|
||||
return fakeJob
|
||||
}),
|
||||
ready: jest.fn().mockImplementation(async () => {
|
||||
fakeJob.status = BatchJobStatus.READY
|
||||
return fakeJob
|
||||
}),
|
||||
retrieve: jest.fn().mockImplementation(async () => {
|
||||
return fakeJob
|
||||
}),
|
||||
}
|
||||
const orderServiceMock = {
|
||||
withTransaction: function (): any {
|
||||
return this
|
||||
},
|
||||
listAndCount: jest
|
||||
.fn()
|
||||
.mockImplementation(() =>
|
||||
Promise.resolve([ordersToExport, ordersToExport.length])
|
||||
),
|
||||
list: jest.fn().mockImplementation(() => Promise.resolve(ordersToExport)),
|
||||
}
|
||||
|
||||
describe("Order export strategy", () => {
|
||||
const orderExportStrategy = new OrderExportStrategy({
|
||||
batchJobService: batchJobServiceMock as any,
|
||||
fileService: fileServiceMock as any,
|
||||
orderService: orderServiceMock as any,
|
||||
manager: MockManager,
|
||||
})
|
||||
|
||||
it("Should generate header as template", async () => {
|
||||
const template = await orderExportStrategy.buildTemplate()
|
||||
expect(template.split(";")).toEqual([
|
||||
"Order_ID",
|
||||
"Display_ID",
|
||||
"Order status",
|
||||
"Date",
|
||||
"Customer First name",
|
||||
"Customer Last name",
|
||||
"Customer Email",
|
||||
"Customer ID",
|
||||
"Shipping Address 1",
|
||||
"Shipping Address 2",
|
||||
"Shipping Country Code",
|
||||
"Shipping City",
|
||||
"Shipping Postal Code",
|
||||
"Shipping Region ID",
|
||||
"Fulfillment Status",
|
||||
"Payment Status",
|
||||
"Subtotal",
|
||||
"Shipping Total",
|
||||
"Discount Total",
|
||||
"Gift Card Total",
|
||||
"Refunded Total",
|
||||
"Tax Total",
|
||||
"Total",
|
||||
"Currency Code\r\n",
|
||||
])
|
||||
})
|
||||
|
||||
it("should process the batch job and generate the appropriate output", async () => {
|
||||
await orderExportStrategy.processJob(fakeJob.id)
|
||||
|
||||
expect(outputDataStorage).toMatchSnapshot()
|
||||
})
|
||||
})
|
||||
295
packages/medusa/src/strategies/batch-jobs/order/export.ts
Normal file
295
packages/medusa/src/strategies/batch-jobs/order/export.ts
Normal file
@@ -0,0 +1,295 @@
|
||||
import { EntityManager } from "typeorm"
|
||||
import {
|
||||
OrderDescriptor,
|
||||
OrderExportBatchJob,
|
||||
OrderExportBatchJobContext,
|
||||
orderExportPropertiesDescriptors,
|
||||
} from "."
|
||||
import { AdminPostBatchesReq } from "../../../api/routes/admin/batch/create-batch-job"
|
||||
import { IFileService } from "../../../interfaces"
|
||||
import { AbstractBatchJobStrategy } from "../../../interfaces/batch-job-strategy"
|
||||
import { Order } from "../../../models"
|
||||
import { OrderService } from "../../../services"
|
||||
import BatchJobService from "../../../services/batch-job"
|
||||
import { BatchJobStatus } from "../../../types/batch-job"
|
||||
import { prepareListQuery } from "../../../utils/get-query-config"
|
||||
|
||||
type InjectedDependencies = {
|
||||
fileService: IFileService<any>
|
||||
orderService: OrderService
|
||||
batchJobService: BatchJobService
|
||||
manager: EntityManager
|
||||
}
|
||||
|
||||
class OrderExportStrategy extends AbstractBatchJobStrategy<OrderExportStrategy> {
|
||||
public static identifier = "order-export-strategy"
|
||||
public static batchType = "order-export"
|
||||
|
||||
public defaultMaxRetry = 3
|
||||
|
||||
protected readonly DEFAULT_LIMIT = 100
|
||||
protected readonly NEWLINE = "\r\n"
|
||||
protected readonly DELIMITER = ";"
|
||||
|
||||
protected manager_: EntityManager
|
||||
protected transactionManager_: EntityManager | undefined
|
||||
protected readonly fileService_: IFileService<any>
|
||||
protected readonly batchJobService_: BatchJobService
|
||||
protected readonly orderService_: OrderService
|
||||
|
||||
protected readonly defaultRelations_ = ["customer", "shipping_address"]
|
||||
protected readonly defaultFields_ = [
|
||||
"id",
|
||||
"display_id",
|
||||
"status",
|
||||
"created_at",
|
||||
"fulfillment_status",
|
||||
"payment_status",
|
||||
"subtotal",
|
||||
"shipping_total",
|
||||
"discount_total",
|
||||
"gift_card_total",
|
||||
"refunded_total",
|
||||
"tax_total",
|
||||
"total",
|
||||
"currency_code",
|
||||
"region_id",
|
||||
]
|
||||
|
||||
constructor({
|
||||
fileService,
|
||||
batchJobService,
|
||||
orderService,
|
||||
manager,
|
||||
}: InjectedDependencies) {
|
||||
// eslint-disable-next-line prefer-rest-params
|
||||
super(arguments[0])
|
||||
|
||||
this.manager_ = manager
|
||||
this.fileService_ = fileService
|
||||
this.batchJobService_ = batchJobService
|
||||
this.orderService_ = orderService
|
||||
}
|
||||
|
||||
async prepareBatchJobForProcessing(
|
||||
batchJob: AdminPostBatchesReq,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
req: Express.Request
|
||||
): Promise<AdminPostBatchesReq> {
|
||||
const {
|
||||
limit,
|
||||
offset,
|
||||
order,
|
||||
fields,
|
||||
expand,
|
||||
filterable_fields,
|
||||
...context
|
||||
} = batchJob.context as OrderExportBatchJobContext
|
||||
|
||||
const listConfig = prepareListQuery(
|
||||
{
|
||||
limit,
|
||||
offset,
|
||||
order,
|
||||
fields,
|
||||
expand,
|
||||
},
|
||||
{
|
||||
isList: true,
|
||||
defaultRelations: this.defaultRelations_,
|
||||
defaultFields: this.defaultFields_,
|
||||
}
|
||||
)
|
||||
|
||||
batchJob.context = {
|
||||
...(context ?? {}),
|
||||
list_config: listConfig,
|
||||
filterable_fields,
|
||||
}
|
||||
|
||||
return batchJob
|
||||
}
|
||||
|
||||
async preProcessBatchJob(batchJobId: string): Promise<void> {
|
||||
return await this.atomicPhase_(async (transactionManager) => {
|
||||
const batchJob = (await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.retrieve(batchJobId)) as OrderExportBatchJob
|
||||
|
||||
const offset = batchJob.context?.list_config?.skip ?? 0
|
||||
const limit = batchJob.context?.list_config?.take ?? this.DEFAULT_LIMIT
|
||||
|
||||
const { list_config = {}, filterable_fields = {} } = batchJob.context
|
||||
|
||||
let count = batchJob.context?.batch_size
|
||||
|
||||
if (!count) {
|
||||
const [, orderCount] = await this.orderService_
|
||||
.withTransaction(transactionManager)
|
||||
.listAndCount(filterable_fields, {
|
||||
...(list_config ?? {}),
|
||||
skip: offset as number,
|
||||
order: { created_at: "DESC" },
|
||||
take: Math.min(batchJob.context.batch_size ?? Infinity, limit),
|
||||
})
|
||||
count = orderCount
|
||||
}
|
||||
|
||||
await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.update(batchJob, {
|
||||
result: {
|
||||
stat_descriptors: [
|
||||
{
|
||||
key: "order-export-count",
|
||||
name: "Order count to export",
|
||||
message: `There will be ${count} orders exported by this action`,
|
||||
},
|
||||
],
|
||||
},
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
async processJob(batchJobId: string): Promise<void> {
|
||||
let offset = 0
|
||||
let limit = this.DEFAULT_LIMIT
|
||||
let advancementCount = 0
|
||||
let orderCount = 0
|
||||
let approximateFileSize = 0
|
||||
|
||||
return await this.atomicPhase_(
|
||||
async (transactionManager) => {
|
||||
let batchJob = (await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.retrieve(batchJobId)) as OrderExportBatchJob
|
||||
|
||||
const { writeStream, fileKey, promise } = await this.fileService_
|
||||
.withTransaction(transactionManager)
|
||||
.getUploadStreamDescriptor({
|
||||
name: `exports/orders/order-export-${Date.now()}`,
|
||||
ext: "csv",
|
||||
})
|
||||
|
||||
advancementCount =
|
||||
batchJob.result?.advancement_count ?? advancementCount
|
||||
offset = (batchJob.context?.list_config?.skip ?? 0) + advancementCount
|
||||
limit = batchJob.context?.list_config?.take ?? limit
|
||||
|
||||
const { list_config = {}, filterable_fields = {} } = batchJob.context
|
||||
const [, count] = await this.orderService_.listAndCount(
|
||||
filterable_fields,
|
||||
{
|
||||
...list_config,
|
||||
order: { created_at: "DESC" },
|
||||
skip: offset,
|
||||
take: Math.min(batchJob.context.batch_size ?? Infinity, limit),
|
||||
}
|
||||
)
|
||||
|
||||
const lineDescriptor = this.getLineDescriptor(
|
||||
list_config.select as string[],
|
||||
list_config.relations as string[]
|
||||
)
|
||||
|
||||
const header = this.buildHeader(lineDescriptor)
|
||||
approximateFileSize += Buffer.from(header).byteLength
|
||||
writeStream.write(header)
|
||||
|
||||
orderCount = batchJob.context?.batch_size ?? count
|
||||
let orders = []
|
||||
|
||||
while (offset < orderCount) {
|
||||
orders = await this.orderService_
|
||||
.withTransaction(transactionManager)
|
||||
.list(filterable_fields, {
|
||||
...list_config,
|
||||
skip: offset,
|
||||
take: Math.min(orderCount - offset, limit),
|
||||
})
|
||||
|
||||
orders.forEach((order) => {
|
||||
const line = this.buildCSVLine(order, lineDescriptor)
|
||||
approximateFileSize += Buffer.from(line).byteLength
|
||||
writeStream.write(line)
|
||||
})
|
||||
|
||||
advancementCount += orders.length
|
||||
offset += orders.length
|
||||
|
||||
batchJob = (await this.batchJobService_
|
||||
.withTransaction(transactionManager)
|
||||
.update(batchJobId, {
|
||||
result: {
|
||||
file_key: fileKey,
|
||||
file_size: approximateFileSize,
|
||||
count: orderCount,
|
||||
advancement_count: advancementCount,
|
||||
progress: advancementCount / orderCount,
|
||||
},
|
||||
})) as OrderExportBatchJob
|
||||
|
||||
if (batchJob.status === BatchJobStatus.CANCELED) {
|
||||
writeStream.end()
|
||||
|
||||
await this.fileService_
|
||||
.withTransaction(transactionManager)
|
||||
.delete({ key: fileKey })
|
||||
|
||||
return
|
||||
}
|
||||
}
|
||||
|
||||
writeStream.end()
|
||||
|
||||
await promise
|
||||
},
|
||||
"REPEATABLE READ",
|
||||
async (err: Error) => {
|
||||
this.handleProcessingError(batchJobId, err, {
|
||||
offset,
|
||||
count: orderCount,
|
||||
progress: offset / orderCount,
|
||||
})
|
||||
}
|
||||
)
|
||||
}
|
||||
|
||||
public async buildTemplate(): Promise<string> {
|
||||
return this.buildHeader(
|
||||
this.getLineDescriptor(this.defaultFields_, this.defaultRelations_)
|
||||
)
|
||||
}
|
||||
|
||||
private buildHeader(
|
||||
lineDescriptor: OrderDescriptor[] = orderExportPropertiesDescriptors
|
||||
): string {
|
||||
return (
|
||||
[...lineDescriptor.map(({ title }) => title)].join(this.DELIMITER) +
|
||||
this.NEWLINE
|
||||
)
|
||||
}
|
||||
|
||||
private buildCSVLine(
|
||||
order: Order,
|
||||
lineDescriptor: OrderDescriptor[]
|
||||
): string {
|
||||
return (
|
||||
[...lineDescriptor.map(({ accessor }) => accessor(order))].join(
|
||||
this.DELIMITER
|
||||
) + this.NEWLINE
|
||||
)
|
||||
}
|
||||
|
||||
private getLineDescriptor(
|
||||
fields: string[],
|
||||
relations: string[]
|
||||
): OrderDescriptor[] {
|
||||
return orderExportPropertiesDescriptors.filter(
|
||||
({ fieldName }) =>
|
||||
fields.indexOf(fieldName) !== -1 || relations.indexOf(fieldName) !== -1
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
export default OrderExportStrategy
|
||||
153
packages/medusa/src/strategies/batch-jobs/order/index.ts
Normal file
153
packages/medusa/src/strategies/batch-jobs/order/index.ts
Normal file
@@ -0,0 +1,153 @@
|
||||
import { BatchJob, Order } from "../../../models"
|
||||
import { Selector } from "../../../types/common"
|
||||
|
||||
export type OrderExportBatchJobContext = {
|
||||
offset?: number
|
||||
limit?: number
|
||||
order?: string
|
||||
fields?: string
|
||||
expand?: string
|
||||
|
||||
list_config?: {
|
||||
select?: string[]
|
||||
relations?: string[]
|
||||
skip?: number
|
||||
take?: number
|
||||
order?: Record<string, "ASC" | "DESC">
|
||||
}
|
||||
filterable_fields?: Selector<Order>
|
||||
|
||||
retry_count?: number
|
||||
max_retry?: number
|
||||
batch_size?: number
|
||||
}
|
||||
|
||||
export type OrderExportBatchJob = BatchJob & {
|
||||
context: OrderExportBatchJobContext
|
||||
}
|
||||
|
||||
export type OrderDescriptor = {
|
||||
fieldName: string
|
||||
title: string
|
||||
accessor: (entity: Order) => string
|
||||
}
|
||||
|
||||
export const orderExportPropertiesDescriptors: OrderDescriptor[] = [
|
||||
{
|
||||
fieldName: "id",
|
||||
title: "Order_ID",
|
||||
accessor: (order: Order): string => order.id,
|
||||
},
|
||||
{
|
||||
fieldName: "display_id",
|
||||
title: "Display_ID",
|
||||
accessor: (order: Order): string => order.display_id.toString(),
|
||||
},
|
||||
{
|
||||
fieldName: "status",
|
||||
title: "Order status",
|
||||
accessor: (order: Order): string => order.status.toString(),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "created_at",
|
||||
title: "Date",
|
||||
accessor: (order: Order): string => order.created_at.toUTCString(),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "customer",
|
||||
title: [
|
||||
"Customer First name",
|
||||
"Customer Last name",
|
||||
"Customer Email",
|
||||
"Customer ID",
|
||||
].join(";"),
|
||||
accessor: (order: Order): string =>
|
||||
[
|
||||
order.customer.first_name,
|
||||
order.customer.last_name,
|
||||
order.customer.email,
|
||||
order.customer.id,
|
||||
].join(";"),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "shipping_address",
|
||||
title: [
|
||||
"Shipping Address 1",
|
||||
"Shipping Address 2",
|
||||
"Shipping Country Code",
|
||||
"Shipping City",
|
||||
"Shipping Postal Code",
|
||||
"Shipping Region ID",
|
||||
].join(";"),
|
||||
accessor: (order: Order): string =>
|
||||
[
|
||||
order.shipping_address?.address_1,
|
||||
order.shipping_address?.address_2,
|
||||
order.shipping_address?.country_code,
|
||||
order.shipping_address?.city,
|
||||
order.shipping_address?.postal_code,
|
||||
order.region_id,
|
||||
].join(";"),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "fulfillment_status",
|
||||
title: "Fulfillment Status",
|
||||
accessor: (order: Order): string => order.fulfillment_status,
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "payment_status",
|
||||
title: "Payment Status",
|
||||
accessor: (order: Order): string => order.payment_status,
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "subtotal",
|
||||
title: "Subtotal",
|
||||
accessor: (order: Order): string => order.subtotal.toString(),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "shipping_total",
|
||||
title: "Shipping Total",
|
||||
accessor: (order: Order): string => order.shipping_total.toString(),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "discount_total",
|
||||
title: "Discount Total",
|
||||
accessor: (order: Order): string => order.discount_total.toString(),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "gift_card_total",
|
||||
title: "Gift Card Total",
|
||||
accessor: (order: Order): string => order.gift_card_total.toString(),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "refunded_total",
|
||||
title: "Refunded Total",
|
||||
accessor: (order: Order): string => order.refunded_total.toString(),
|
||||
},
|
||||
{
|
||||
fieldName: "tax_total",
|
||||
title: "Tax Total",
|
||||
accessor: (order: Order): string => order.tax_total.toString(),
|
||||
},
|
||||
{
|
||||
fieldName: "total",
|
||||
title: "Total",
|
||||
accessor: (order: Order): string => order.total.toString(),
|
||||
},
|
||||
|
||||
{
|
||||
fieldName: "currency_code",
|
||||
title: "Currency Code",
|
||||
accessor: (order: Order): string => order.currency_code,
|
||||
},
|
||||
]
|
||||
@@ -78,6 +78,44 @@ export default class ProductExportStrategy extends AbstractBatchJobStrategy<
|
||||
return ""
|
||||
}
|
||||
|
||||
async prepareBatchJobForProcessing(
|
||||
batchJob: CreateBatchJobInput,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
req: Express.Request
|
||||
): Promise<CreateBatchJobInput> {
|
||||
const {
|
||||
limit,
|
||||
offset,
|
||||
order,
|
||||
fields,
|
||||
expand,
|
||||
filterable_fields,
|
||||
...context
|
||||
} = (batchJob?.context ?? {}) as ProductExportBatchJobContext
|
||||
|
||||
const listConfig = prepareListQuery(
|
||||
{
|
||||
limit,
|
||||
offset,
|
||||
order,
|
||||
fields,
|
||||
expand,
|
||||
},
|
||||
{
|
||||
isList: true,
|
||||
defaultRelations: this.defaultRelations_,
|
||||
}
|
||||
)
|
||||
|
||||
batchJob.context = {
|
||||
...(context ?? {}),
|
||||
list_config: listConfig,
|
||||
filterable_fields,
|
||||
}
|
||||
|
||||
return batchJob
|
||||
}
|
||||
|
||||
async preProcessBatchJob(batchJobId: string): Promise<void> {
|
||||
return await this.atomicPhase_(async (transactionManager) => {
|
||||
const batchJob = (await this.batchJobService_
|
||||
@@ -177,44 +215,6 @@ export default class ProductExportStrategy extends AbstractBatchJobStrategy<
|
||||
})
|
||||
}
|
||||
|
||||
async prepareBatchJobForProcessing(
|
||||
batchJob: CreateBatchJobInput,
|
||||
// eslint-disable-next-line @typescript-eslint/no-unused-vars
|
||||
req: Express.Request
|
||||
): Promise<CreateBatchJobInput> {
|
||||
const {
|
||||
limit,
|
||||
offset,
|
||||
order,
|
||||
fields,
|
||||
expand,
|
||||
filterable_fields,
|
||||
...context
|
||||
} = (batchJob?.context ?? {}) as ProductExportBatchJobContext
|
||||
|
||||
const listConfig = prepareListQuery(
|
||||
{
|
||||
limit,
|
||||
offset,
|
||||
order,
|
||||
fields,
|
||||
expand,
|
||||
},
|
||||
{
|
||||
isList: true,
|
||||
defaultRelations: this.defaultRelations_,
|
||||
}
|
||||
)
|
||||
|
||||
batchJob.context = {
|
||||
...(context ?? {}),
|
||||
list_config: listConfig,
|
||||
filterable_fields,
|
||||
}
|
||||
|
||||
return batchJob
|
||||
}
|
||||
|
||||
async processJob(batchJobId: string): Promise<void> {
|
||||
let offset = 0
|
||||
let limit = this.DEFAULT_LIMIT
|
||||
@@ -231,7 +231,7 @@ export default class ProductExportStrategy extends AbstractBatchJobStrategy<
|
||||
const { writeStream, fileKey, promise } = await this.fileService_
|
||||
.withTransaction(transactionManager)
|
||||
.getUploadStreamDescriptor({
|
||||
name: `product-export-${Date.now()}`,
|
||||
name: `exports/products/product-export-${Date.now()}`,
|
||||
ext: "csv",
|
||||
})
|
||||
|
||||
|
||||
Reference in New Issue
Block a user