fix(medusa): Export data should always provide a file_key (#1785)

This commit is contained in:
Adrien de Peretti
2022-07-10 11:38:27 +02:00
committed by GitHub
parent 41a757a5f8
commit 6715eb11de
4 changed files with 136 additions and 25 deletions

View File

@@ -53,7 +53,7 @@ const fileServiceMock = {
end: () => void 0,
},
promise: Promise.resolve(),
fileKey: "product-export.csv",
fileKey: "order-export.csv",
})
}),
}
@@ -61,21 +61,19 @@ const batchJobServiceMock = {
withTransaction: function (): any {
return this
},
update: jest.fn().mockImplementation(async (data) => {
update: jest.fn().mockImplementation(async (job, data) => {
fakeJob = {
...fakeJob,
...data,
context: { ...fakeJob?.context, ...data?.context },
result: { ...fakeJob?.result, ...data?.result }
}
return fakeJob
return Promise.resolve(fakeJob)
}),
complete: jest.fn().mockImplementation(async () => {
fakeJob.status = BatchJobStatus.COMPLETED
return fakeJob
}),
ready: jest.fn().mockImplementation(async () => {
fakeJob.status = BatchJobStatus.READY
return fakeJob
}),
retrieve: jest.fn().mockImplementation(async () => {
return fakeJob
}),
@@ -91,6 +89,15 @@ const orderServiceMock = {
),
list: jest.fn().mockImplementation(() => Promise.resolve(ordersToExport)),
}
const orderServiceWithoutDataMock = {
...orderServiceMock,
listAndCount: jest
.fn()
.mockImplementation(() =>
Promise.resolve([[], 0])
),
list: jest.fn().mockImplementation(() => Promise.resolve([])),
}
describe("Order export strategy", () => {
const orderExportStrategy = new OrderExportStrategy({
@@ -134,5 +141,19 @@ describe("Order export strategy", () => {
await orderExportStrategy.processJob(fakeJob.id)
expect(outputDataStorage).toMatchSnapshot()
expect((fakeJob.result as any).file_key).toBeDefined()
})
it("should always provide a file_key even with no data", async () => {
const orderExportStrategy = new OrderExportStrategy({
batchJobService: batchJobServiceMock as any,
fileService: fileServiceMock as any,
orderService: orderServiceWithoutDataMock as any,
manager: MockManager,
})
await orderExportStrategy.processJob(fakeJob.id)
expect((fakeJob.result as any).file_key).toBeDefined()
})
})

View File

@@ -3,8 +3,7 @@ import { IdMap, MockManager } from "medusa-test-utils"
import { User } from "../../../../models"
import { BatchJobStatus } from "../../../../types/batch-job"
import { productsToExport } from "../../../__fixtures__/product-export-data"
import { AdminPostBatchesReq } from "../../../../api/routes/admin/batch/create-batch-job"
import { defaultAdminProductRelations } from "../../../../api/routes/admin/products"
import { AdminPostBatchesReq, defaultAdminProductRelations } from "../../../../api"
import { ProductExportBatchJob } from "../../../batch-jobs/product"
import { Request } from "express"
@@ -21,6 +20,12 @@ let fakeJob = {
status: BatchJobStatus.PROCESSING as BatchJobStatus
} as ProductExportBatchJob
let canceledFakeJob = {
...fakeJob,
id: "bj_failed",
status: BatchJobStatus.CANCELED
} as ProductExportBatchJob
const fileServiceMock = {
delete: jest.fn(),
getUploadStreamDescriptor: jest.fn().mockImplementation(() => {
@@ -32,7 +37,7 @@ const fileServiceMock = {
end: () => void 0
},
promise: Promise.resolve(),
url: 'product-export.csv'
fileKey: 'product-export.csv'
})
}),
withTransaction: function () {
@@ -43,13 +48,25 @@ const batchJobServiceMock = {
withTransaction: function () {
return this
},
update: jest.fn().mockImplementation((job, data) => {
update: jest.fn().mockImplementation((jobOrId, data) => {
if ((jobOrId?.id ?? jobOrId) === "bj_failed") {
canceledFakeJob = {
...canceledFakeJob,
...data,
context: { ...canceledFakeJob?.context, ...data?.context },
result: { ...canceledFakeJob?.result, ...data?.result }
}
return Promise.resolve(canceledFakeJob)
}
fakeJob = {
...fakeJob,
...data,
context: { ...fakeJob?.context, ...data?.context },
result: { ...fakeJob?.result, ...data?.result }
}
return Promise.resolve(fakeJob)
}),
updateStatus: jest.fn().mockImplementation((status) => {
@@ -60,8 +77,11 @@ const batchJobServiceMock = {
fakeJob.status = BatchJobStatus.COMPLETED
return Promise.resolve(fakeJob)
}),
retrieve: jest.fn().mockImplementation(() => {
return Promise.resolve(fakeJob)
retrieve: jest.fn().mockImplementation((id) => {
const targetFakeJob = id === "bj_failed"
? canceledFakeJob
: fakeJob
return Promise.resolve(targetFakeJob)
}),
setFailed: jest.fn().mockImplementation((...args) => {
console.error(...args)
@@ -77,6 +97,14 @@ const productServiceMock = {
return Promise.resolve([productsToExport, productsToExport.length])
}),
}
const productServiceWithNoDataMock = {
...productServiceMock,
list: jest.fn().mockImplementation(() => Promise.resolve([])),
count: jest.fn().mockImplementation(() => Promise.resolve(0)),
listAndCount: jest.fn().mockImplementation(() => {
return Promise.resolve([[], 0])
}),
}
const managerMock = MockManager
describe("Product export strategy", () => {
@@ -149,6 +177,7 @@ describe("Product export strategy", () => {
await productExportStrategy.preProcessBatchJob(fakeJob.id)
await productExportStrategy.processJob(fakeJob.id)
expect(outputDataStorage).toMatchSnapshot()
expect((fakeJob.result as any).file_key).toBeDefined()
})
it('should prepare the job to be pre proccessed', async () => {
@@ -206,4 +235,35 @@ describe("Product export strategy", () => {
filterable_fields: undefined
}))
})
it("should always provide a file_key even with no data", async () => {
const productExportStrategy = new ProductExportStrategy({
batchJobService: batchJobServiceMock as any,
fileService: fileServiceMock as any,
productService: productServiceWithNoDataMock as any,
manager: MockManager,
})
await productExportStrategy.prepareBatchJobForProcessing(fakeJob, {} as Request)
await productExportStrategy.preProcessBatchJob(fakeJob.id)
await productExportStrategy.processJob(fakeJob.id)
expect((fakeJob.result as any).file_key).toBeDefined()
})
it("should remove the file_key and file_size if the job is canceled", async () => {
const productExportStrategy = new ProductExportStrategy({
batchJobService: batchJobServiceMock as any,
fileService: fileServiceMock as any,
productService: productServiceMock as any,
manager: MockManager,
})
await productExportStrategy.prepareBatchJobForProcessing(canceledFakeJob, {} as Request)
await productExportStrategy.preProcessBatchJob(canceledFakeJob.id)
await productExportStrategy.processJob(canceledFakeJob.id)
expect((canceledFakeJob.result as any).file_key).not.toBeDefined()
expect((canceledFakeJob.result as any).file_size).not.toBeDefined()
})
})

View File

@@ -187,17 +187,26 @@ class OrderExportStrategy extends AbstractBatchJobStrategy<OrderExportStrategy>
}
)
orderCount = batchJob.context?.batch_size ?? count
let orders = []
const lineDescriptor = this.getLineDescriptor(
list_config.select as string[],
list_config.relations as string[]
)
const header = this.buildHeader(lineDescriptor)
approximateFileSize += Buffer.from(header).byteLength
writeStream.write(header)
approximateFileSize += Buffer.from(header).byteLength
orderCount = batchJob.context?.batch_size ?? count
let orders = []
await this.batchJobService_
.withTransaction(transactionManager)
.update(batchJobId, {
result: {
file_key: fileKey,
file_size: approximateFileSize,
},
})
while (offset < orderCount) {
orders = await this.orderService_
@@ -221,7 +230,6 @@ class OrderExportStrategy extends AbstractBatchJobStrategy<OrderExportStrategy>
.withTransaction(transactionManager)
.update(batchJobId, {
result: {
file_key: fileKey,
file_size: approximateFileSize,
count: orderCount,
advancement_count: advancementCount,

View File

@@ -3,7 +3,7 @@ import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces"
import { Product, ProductVariant } from "../../../models"
import { BatchJobService, ProductService } from "../../../services"
import { BatchJobStatus, CreateBatchJobInput } from "../../../types/batch-job"
import { defaultAdminProductRelations } from "../../../api/routes/admin/products"
import { defaultAdminProductRelations } from "../../../api"
import { prepareListQuery } from "../../../utils/get-query-config"
import {
ProductExportBatchJob,
@@ -236,9 +236,17 @@ export default class ProductExportStrategy extends AbstractBatchJobStrategy<
})
const header = await this.buildHeader(batchJob)
approximateFileSize += Buffer.from(header).byteLength
writeStream.write(header)
approximateFileSize += Buffer.from(header).byteLength
await this.batchJobService_
.withTransaction(transactionManager)
.update(batchJobId, {
result: {
file_key: fileKey,
file_size: approximateFileSize,
},
})
advancementCount =
batchJob.result?.advancement_count ?? advancementCount
@@ -284,7 +292,6 @@ export default class ProductExportStrategy extends AbstractBatchJobStrategy<
.withTransaction(transactionManager)
.update(batchJobId, {
result: {
file_key: fileKey,
file_size: approximateFileSize,
count: productCount,
advancement_count: advancementCount,
@@ -294,10 +301,7 @@ export default class ProductExportStrategy extends AbstractBatchJobStrategy<
if (batchJob.status === BatchJobStatus.CANCELED) {
writeStream.end()
await this.fileService_
.withTransaction(transactionManager)
.delete({ fileKey })
await this.onProcessCanceled(batchJobId, fileKey)
return
}
}
@@ -427,4 +431,22 @@ export default class ProductExportStrategy extends AbstractBatchJobStrategy<
return outputLineData
}
private async onProcessCanceled(
batchJobId: string,
fileKey: string
): Promise<void> {
const transactionManager = this.transactionManager_ ?? this.manager_
await this.fileService_
.withTransaction(transactionManager)
.delete({ fileKey })
await this.batchJobService_
.withTransaction(transactionManager)
.update(batchJobId, {
result: {
file_key: undefined,
file_size: undefined,
},
})
}
}