diff --git a/.changeset/bright-parents-know.md b/.changeset/bright-parents-know.md new file mode 100644 index 0000000000..2a61949ae7 --- /dev/null +++ b/.changeset/bright-parents-know.md @@ -0,0 +1,9 @@ +--- +"@medusajs/file-local": patch +"@medusajs/core-flows": patch +"@medusajs/js-sdk": patch +"@medusajs/dashboard": patch +"integration-tests-http": patch +--- + +feat: wire up direct uploads with local file provider diff --git a/integration-tests/http/__tests__/product/admin/product-imports.spec.ts b/integration-tests/http/__tests__/product/admin/product-imports.spec.ts index 6a19814b71..ed53470435 100644 --- a/integration-tests/http/__tests__/product/admin/product-imports.spec.ts +++ b/integration-tests/http/__tests__/product/admin/product-imports.spec.ts @@ -605,6 +605,22 @@ medusaIntegrationTestRunner({ 'Invalid column name(s) "Product field"' ) }) + + it("should handle error when the source file does not exists", async () => { + const { body, meta } = getUploadReq({ + name: "test.csv", + key: "test.csv", + size: 0, + }) + + const batchJobRes = await api + .post("/admin/products/imports", body, meta) + .catch((e) => e) + + expect(batchJobRes.response.data.message).toEqual( + "An unknown error occurred." + ) + }) }) }, }) diff --git a/integration-tests/http/__tests__/upload/admin/presigned-urls.spec.ts b/integration-tests/http/__tests__/upload/admin/presigned-urls.spec.ts index 7f731c906f..14fb432705 100644 --- a/integration-tests/http/__tests__/upload/admin/presigned-urls.spec.ts +++ b/integration-tests/http/__tests__/upload/admin/presigned-urls.spec.ts @@ -55,7 +55,7 @@ medusaIntegrationTestRunner({ extension: "csv", mime_type: "text/csv", size: file.size, - url: expect.stringContaining(response.data.filename), + url: "/admin/uploads", }) ) expect(response.status).toEqual(200) diff --git a/packages/admin/dashboard/src/hooks/api/products.tsx b/packages/admin/dashboard/src/hooks/api/products.tsx index f054cbd5d6..f1411de957 100644 --- a/packages/admin/dashboard/src/hooks/api/products.tsx +++ b/packages/admin/dashboard/src/hooks/api/products.tsx @@ -400,7 +400,7 @@ export const useImportProducts = ( > ) => { return useMutation({ - mutationFn: (payload) => sdk.admin.product.import(payload), + mutationFn: (payload) => sdk.admin.product.createImport(payload), onSuccess: (data, variables, context) => { options?.onSuccess?.(data, variables, context) }, diff --git a/packages/core/core-flows/src/product/steps/normalize-products-to-chunks.ts b/packages/core/core-flows/src/product/steps/normalize-products-to-chunks.ts index e3efaeb17a..424145b58c 100644 --- a/packages/core/core-flows/src/product/steps/normalize-products-to-chunks.ts +++ b/packages/core/core-flows/src/product/steps/normalize-products-to-chunks.ts @@ -14,6 +14,8 @@ export type NormalizeProductCsvV1StepInput = string export const normalizeCsvToChunksStepId = "normalize-product-csv-to-chunks" +type Chunk = { id: string; toCreate: number; toUpdate: number } + /** * Processes a chunk of products by writing them to a file. Later the * file will be processed after the import has been confirmed. @@ -23,7 +25,7 @@ async function processChunk( fileKey: string, csvRows: ReturnType<(typeof CSVNormalizer)["preProcess"]>[], currentRowNumber: number -) { +): Promise { const normalizer = new CSVNormalizer(csvRows) const products = normalizer.proccess(currentRowNumber) @@ -76,7 +78,7 @@ async function createChunks( file: IFileModuleService, fileKey: string, stream: Parser -) { +): Promise { /** * The row under process */ @@ -97,7 +99,7 @@ async function createChunks( * Validated chunks that have been written with the file * provider */ - const chunks: { id: string; toCreate: number; toUpdate: number }[] = [] + const chunks: Chunk[] = [] /** * Currently collected rows to be processed as one chunk @@ -192,36 +194,51 @@ async function createChunks( export const normalizeCsvToChunksStep = createStep( normalizeCsvToChunksStepId, async (fileKey: NormalizeProductCsvV1StepInput, { container }) => { - const file = container.resolve(Modules.FILE) - const contents = await file.getDownloadStream(fileKey) - const chunks = await createChunks( - file, - fileKey, - contents.pipe( - parse({ + return new Promise< + StepResponse<{ + chunks: Chunk[] + summary: Omit + }> + >(async (resolve, reject) => { + try { + const file = container.resolve(Modules.FILE) + const contents = await file.getDownloadStream(fileKey) + const transformer = parse({ columns: true, skip_empty_lines: true, }) - ) - ) - const summary = chunks.reduce<{ toCreate: number; toUpdate: number }>( - (result, chunk) => { - result.toCreate = result.toCreate + chunk.toCreate - result.toUpdate = result.toUpdate + chunk.toUpdate - return result - }, - { toCreate: 0, toUpdate: 0 } - ) + contents.on("error", reject) - /** - * Delete CSV file once we have the chunks - */ - await file.deleteFiles(fileKey) + const chunks = await createChunks( + file, + fileKey, + contents.pipe(transformer) + ) - return new StepResponse({ - chunks, - summary, + const summary = chunks.reduce<{ toCreate: number; toUpdate: number }>( + (result, chunk) => { + result.toCreate = result.toCreate + chunk.toCreate + result.toUpdate = result.toUpdate + chunk.toUpdate + return result + }, + { toCreate: 0, toUpdate: 0 } + ) + + /** + * Delete CSV file once we have the chunks + */ + await file.deleteFiles(fileKey) + + resolve( + new StepResponse({ + chunks, + summary, + }) + ) + } catch (error) { + reject(error) + } }) } ) diff --git a/packages/core/core-flows/src/product/steps/process-import-chunks.ts b/packages/core/core-flows/src/product/steps/process-import-chunks.ts index c256a84f6a..12932cf488 100644 --- a/packages/core/core-flows/src/product/steps/process-import-chunks.ts +++ b/packages/core/core-flows/src/product/steps/process-import-chunks.ts @@ -12,7 +12,10 @@ export const processImportChunksStepId = "process-import-chunks" * const data = parseProductCsvStep("products.csv") */ export const processImportChunksStep = createStep( - processImportChunksStepId, + { + name: processImportChunksStepId, + async: true, + }, async (input: { chunks: { id: string }[] }, { container }) => { const file = container.resolve(Modules.FILE) diff --git a/packages/core/js-sdk/src/admin/product.ts b/packages/core/js-sdk/src/admin/product.ts index 2ce5955963..c0326b268b 100644 --- a/packages/core/js-sdk/src/admin/product.ts +++ b/packages/core/js-sdk/src/admin/product.ts @@ -1,5 +1,5 @@ import { HttpTypes, SelectParams } from "@medusajs/types" -import { Client } from "../client" +import { Client, FetchError } from "../client" import { ClientHeaders } from "../types" export class Product { @@ -114,10 +114,40 @@ export class Product { * special headers in this request, since external services like S3 will * give a CORS error. */ - await fetch(response.url, { - method: "PUT", - body: body.file, - }) + if ( + response.url.startsWith("http://") || + response.url.startsWith("https://") + ) { + const uploadResponse = await fetch(response.url, { + method: "PUT", + body: body.file, + }) + if (uploadResponse.status >= 400) { + throw new FetchError( + uploadResponse.statusText, + uploadResponse.statusText, + uploadResponse.status + ) + } + } else { + const form = new FormData() + form.append("files", body.file) + + const localUploadResponse = await this.client.fetch<{ + files: HttpTypes.AdminUploadFile + }>("admin/uploads", { + method: "POST", + headers: { + ...headers, + // Let the browser determine the content type. + "content-type": null, + }, + body: form, + query, + }) + + response.filename = localUploadResponse.files[0].id + } /** * Perform products import using the uploaded file name @@ -164,7 +194,7 @@ export class Product { headers?: ClientHeaders ) { return await this.client.fetch<{}>( - `/admin/products/import/${transactionId}/confirm`, + `/admin/products/imports/${transactionId}/confirm`, { method: "POST", headers, diff --git a/packages/modules/providers/file-local/src/services/local-file.ts b/packages/modules/providers/file-local/src/services/local-file.ts index 7eacc6cba2..60c464b583 100644 --- a/packages/modules/providers/file-local/src/services/local-file.ts +++ b/packages/modules/providers/file-local/src/services/local-file.ts @@ -138,16 +138,6 @@ export class LocalFileService extends AbstractFileProviderService { * Returns the pre-signed URL that the client (frontend) can use to trigger * a file upload. In this case, the Medusa backend will implement the * "/upload" endpoint to perform the file upload. - * - * Since, we do not want the client to perform link detection on the frontend - * and then prepare a different kind of request for cloud providers and different - * request for the local server, we will have to make these URLs self sufficient. - * - * What is a self sufficient URL - * - * - There should be no need to specify the MIME type or filename separately in request body (cloud providers don't allow it). - * - There should be no need to pass auth headers like cookies. Again cloud providers - * won't allow it and will likely result in a CORS error. */ async getPresignedUploadUrl( fileData: FileTypes.ProviderGetPresignedUploadUrlDTO @@ -159,18 +149,8 @@ export class LocalFileService extends AbstractFileProviderService { ) } - const uploadUrl = new URL( - "upload", - `${this.backendUrl_.replace(/\/$/, "")}/` - ) - - uploadUrl.searchParams.set("filename", fileData.filename) - if (fileData.mimeType) { - uploadUrl.searchParams.set("type", fileData.mimeType) - } - return { - url: uploadUrl.toString(), + url: "/admin/uploads", key: fileData.filename, } }