feat: wire up direct uploads with local file provider (#12643)
This commit is contained in:
9
.changeset/bright-parents-know.md
Normal file
9
.changeset/bright-parents-know.md
Normal file
@@ -0,0 +1,9 @@
|
||||
---
|
||||
"@medusajs/file-local": patch
|
||||
"@medusajs/core-flows": patch
|
||||
"@medusajs/js-sdk": patch
|
||||
"@medusajs/dashboard": patch
|
||||
"integration-tests-http": patch
|
||||
---
|
||||
|
||||
feat: wire up direct uploads with local file provider
|
||||
@@ -605,6 +605,22 @@ medusaIntegrationTestRunner({
|
||||
'Invalid column name(s) "Product field"'
|
||||
)
|
||||
})
|
||||
|
||||
it("should handle error when the source file does not exists", async () => {
|
||||
const { body, meta } = getUploadReq({
|
||||
name: "test.csv",
|
||||
key: "test.csv",
|
||||
size: 0,
|
||||
})
|
||||
|
||||
const batchJobRes = await api
|
||||
.post("/admin/products/imports", body, meta)
|
||||
.catch((e) => e)
|
||||
|
||||
expect(batchJobRes.response.data.message).toEqual(
|
||||
"An unknown error occurred."
|
||||
)
|
||||
})
|
||||
})
|
||||
},
|
||||
})
|
||||
|
||||
@@ -55,7 +55,7 @@ medusaIntegrationTestRunner({
|
||||
extension: "csv",
|
||||
mime_type: "text/csv",
|
||||
size: file.size,
|
||||
url: expect.stringContaining(response.data.filename),
|
||||
url: "/admin/uploads",
|
||||
})
|
||||
)
|
||||
expect(response.status).toEqual(200)
|
||||
|
||||
@@ -400,7 +400,7 @@ export const useImportProducts = (
|
||||
>
|
||||
) => {
|
||||
return useMutation({
|
||||
mutationFn: (payload) => sdk.admin.product.import(payload),
|
||||
mutationFn: (payload) => sdk.admin.product.createImport(payload),
|
||||
onSuccess: (data, variables, context) => {
|
||||
options?.onSuccess?.(data, variables, context)
|
||||
},
|
||||
|
||||
@@ -14,6 +14,8 @@ export type NormalizeProductCsvV1StepInput = string
|
||||
|
||||
export const normalizeCsvToChunksStepId = "normalize-product-csv-to-chunks"
|
||||
|
||||
type Chunk = { id: string; toCreate: number; toUpdate: number }
|
||||
|
||||
/**
|
||||
* Processes a chunk of products by writing them to a file. Later the
|
||||
* file will be processed after the import has been confirmed.
|
||||
@@ -23,7 +25,7 @@ async function processChunk(
|
||||
fileKey: string,
|
||||
csvRows: ReturnType<(typeof CSVNormalizer)["preProcess"]>[],
|
||||
currentRowNumber: number
|
||||
) {
|
||||
): Promise<Chunk> {
|
||||
const normalizer = new CSVNormalizer(csvRows)
|
||||
const products = normalizer.proccess(currentRowNumber)
|
||||
|
||||
@@ -76,7 +78,7 @@ async function createChunks(
|
||||
file: IFileModuleService,
|
||||
fileKey: string,
|
||||
stream: Parser
|
||||
) {
|
||||
): Promise<Chunk[]> {
|
||||
/**
|
||||
* The row under process
|
||||
*/
|
||||
@@ -97,7 +99,7 @@ async function createChunks(
|
||||
* Validated chunks that have been written with the file
|
||||
* provider
|
||||
*/
|
||||
const chunks: { id: string; toCreate: number; toUpdate: number }[] = []
|
||||
const chunks: Chunk[] = []
|
||||
|
||||
/**
|
||||
* Currently collected rows to be processed as one chunk
|
||||
@@ -192,36 +194,51 @@ async function createChunks(
|
||||
export const normalizeCsvToChunksStep = createStep(
|
||||
normalizeCsvToChunksStepId,
|
||||
async (fileKey: NormalizeProductCsvV1StepInput, { container }) => {
|
||||
const file = container.resolve(Modules.FILE)
|
||||
const contents = await file.getDownloadStream(fileKey)
|
||||
const chunks = await createChunks(
|
||||
file,
|
||||
fileKey,
|
||||
contents.pipe(
|
||||
parse({
|
||||
return new Promise<
|
||||
StepResponse<{
|
||||
chunks: Chunk[]
|
||||
summary: Omit<Chunk, "id">
|
||||
}>
|
||||
>(async (resolve, reject) => {
|
||||
try {
|
||||
const file = container.resolve(Modules.FILE)
|
||||
const contents = await file.getDownloadStream(fileKey)
|
||||
const transformer = parse({
|
||||
columns: true,
|
||||
skip_empty_lines: true,
|
||||
})
|
||||
)
|
||||
)
|
||||
|
||||
const summary = chunks.reduce<{ toCreate: number; toUpdate: number }>(
|
||||
(result, chunk) => {
|
||||
result.toCreate = result.toCreate + chunk.toCreate
|
||||
result.toUpdate = result.toUpdate + chunk.toUpdate
|
||||
return result
|
||||
},
|
||||
{ toCreate: 0, toUpdate: 0 }
|
||||
)
|
||||
contents.on("error", reject)
|
||||
|
||||
/**
|
||||
* Delete CSV file once we have the chunks
|
||||
*/
|
||||
await file.deleteFiles(fileKey)
|
||||
const chunks = await createChunks(
|
||||
file,
|
||||
fileKey,
|
||||
contents.pipe(transformer)
|
||||
)
|
||||
|
||||
return new StepResponse({
|
||||
chunks,
|
||||
summary,
|
||||
const summary = chunks.reduce<{ toCreate: number; toUpdate: number }>(
|
||||
(result, chunk) => {
|
||||
result.toCreate = result.toCreate + chunk.toCreate
|
||||
result.toUpdate = result.toUpdate + chunk.toUpdate
|
||||
return result
|
||||
},
|
||||
{ toCreate: 0, toUpdate: 0 }
|
||||
)
|
||||
|
||||
/**
|
||||
* Delete CSV file once we have the chunks
|
||||
*/
|
||||
await file.deleteFiles(fileKey)
|
||||
|
||||
resolve(
|
||||
new StepResponse({
|
||||
chunks,
|
||||
summary,
|
||||
})
|
||||
)
|
||||
} catch (error) {
|
||||
reject(error)
|
||||
}
|
||||
})
|
||||
}
|
||||
)
|
||||
|
||||
@@ -12,7 +12,10 @@ export const processImportChunksStepId = "process-import-chunks"
|
||||
* const data = parseProductCsvStep("products.csv")
|
||||
*/
|
||||
export const processImportChunksStep = createStep(
|
||||
processImportChunksStepId,
|
||||
{
|
||||
name: processImportChunksStepId,
|
||||
async: true,
|
||||
},
|
||||
async (input: { chunks: { id: string }[] }, { container }) => {
|
||||
const file = container.resolve(Modules.FILE)
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { HttpTypes, SelectParams } from "@medusajs/types"
|
||||
import { Client } from "../client"
|
||||
import { Client, FetchError } from "../client"
|
||||
import { ClientHeaders } from "../types"
|
||||
|
||||
export class Product {
|
||||
@@ -114,10 +114,40 @@ export class Product {
|
||||
* special headers in this request, since external services like S3 will
|
||||
* give a CORS error.
|
||||
*/
|
||||
await fetch(response.url, {
|
||||
method: "PUT",
|
||||
body: body.file,
|
||||
})
|
||||
if (
|
||||
response.url.startsWith("http://") ||
|
||||
response.url.startsWith("https://")
|
||||
) {
|
||||
const uploadResponse = await fetch(response.url, {
|
||||
method: "PUT",
|
||||
body: body.file,
|
||||
})
|
||||
if (uploadResponse.status >= 400) {
|
||||
throw new FetchError(
|
||||
uploadResponse.statusText,
|
||||
uploadResponse.statusText,
|
||||
uploadResponse.status
|
||||
)
|
||||
}
|
||||
} else {
|
||||
const form = new FormData()
|
||||
form.append("files", body.file)
|
||||
|
||||
const localUploadResponse = await this.client.fetch<{
|
||||
files: HttpTypes.AdminUploadFile
|
||||
}>("admin/uploads", {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...headers,
|
||||
// Let the browser determine the content type.
|
||||
"content-type": null,
|
||||
},
|
||||
body: form,
|
||||
query,
|
||||
})
|
||||
|
||||
response.filename = localUploadResponse.files[0].id
|
||||
}
|
||||
|
||||
/**
|
||||
* Perform products import using the uploaded file name
|
||||
@@ -164,7 +194,7 @@ export class Product {
|
||||
headers?: ClientHeaders
|
||||
) {
|
||||
return await this.client.fetch<{}>(
|
||||
`/admin/products/import/${transactionId}/confirm`,
|
||||
`/admin/products/imports/${transactionId}/confirm`,
|
||||
{
|
||||
method: "POST",
|
||||
headers,
|
||||
|
||||
@@ -138,16 +138,6 @@ export class LocalFileService extends AbstractFileProviderService {
|
||||
* Returns the pre-signed URL that the client (frontend) can use to trigger
|
||||
* a file upload. In this case, the Medusa backend will implement the
|
||||
* "/upload" endpoint to perform the file upload.
|
||||
*
|
||||
* Since, we do not want the client to perform link detection on the frontend
|
||||
* and then prepare a different kind of request for cloud providers and different
|
||||
* request for the local server, we will have to make these URLs self sufficient.
|
||||
*
|
||||
* What is a self sufficient URL
|
||||
*
|
||||
* - There should be no need to specify the MIME type or filename separately in request body (cloud providers don't allow it).
|
||||
* - There should be no need to pass auth headers like cookies. Again cloud providers
|
||||
* won't allow it and will likely result in a CORS error.
|
||||
*/
|
||||
async getPresignedUploadUrl(
|
||||
fileData: FileTypes.ProviderGetPresignedUploadUrlDTO
|
||||
@@ -159,18 +149,8 @@ export class LocalFileService extends AbstractFileProviderService {
|
||||
)
|
||||
}
|
||||
|
||||
const uploadUrl = new URL(
|
||||
"upload",
|
||||
`${this.backendUrl_.replace(/\/$/, "")}/`
|
||||
)
|
||||
|
||||
uploadUrl.searchParams.set("filename", fileData.filename)
|
||||
if (fileData.mimeType) {
|
||||
uploadUrl.searchParams.set("type", fileData.mimeType)
|
||||
}
|
||||
|
||||
return {
|
||||
url: uploadUrl.toString(),
|
||||
url: "/admin/uploads",
|
||||
key: fileData.filename,
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user