feat: wire up direct uploads with local file provider (#12643)
This commit is contained in:
9
.changeset/bright-parents-know.md
Normal file
9
.changeset/bright-parents-know.md
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
"@medusajs/file-local": patch
|
||||||
|
"@medusajs/core-flows": patch
|
||||||
|
"@medusajs/js-sdk": patch
|
||||||
|
"@medusajs/dashboard": patch
|
||||||
|
"integration-tests-http": patch
|
||||||
|
---
|
||||||
|
|
||||||
|
feat: wire up direct uploads with local file provider
|
||||||
@@ -605,6 +605,22 @@ medusaIntegrationTestRunner({
|
|||||||
'Invalid column name(s) "Product field"'
|
'Invalid column name(s) "Product field"'
|
||||||
)
|
)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
it("should handle error when the source file does not exists", async () => {
|
||||||
|
const { body, meta } = getUploadReq({
|
||||||
|
name: "test.csv",
|
||||||
|
key: "test.csv",
|
||||||
|
size: 0,
|
||||||
|
})
|
||||||
|
|
||||||
|
const batchJobRes = await api
|
||||||
|
.post("/admin/products/imports", body, meta)
|
||||||
|
.catch((e) => e)
|
||||||
|
|
||||||
|
expect(batchJobRes.response.data.message).toEqual(
|
||||||
|
"An unknown error occurred."
|
||||||
|
)
|
||||||
|
})
|
||||||
})
|
})
|
||||||
},
|
},
|
||||||
})
|
})
|
||||||
|
|||||||
@@ -55,7 +55,7 @@ medusaIntegrationTestRunner({
|
|||||||
extension: "csv",
|
extension: "csv",
|
||||||
mime_type: "text/csv",
|
mime_type: "text/csv",
|
||||||
size: file.size,
|
size: file.size,
|
||||||
url: expect.stringContaining(response.data.filename),
|
url: "/admin/uploads",
|
||||||
})
|
})
|
||||||
)
|
)
|
||||||
expect(response.status).toEqual(200)
|
expect(response.status).toEqual(200)
|
||||||
|
|||||||
@@ -400,7 +400,7 @@ export const useImportProducts = (
|
|||||||
>
|
>
|
||||||
) => {
|
) => {
|
||||||
return useMutation({
|
return useMutation({
|
||||||
mutationFn: (payload) => sdk.admin.product.import(payload),
|
mutationFn: (payload) => sdk.admin.product.createImport(payload),
|
||||||
onSuccess: (data, variables, context) => {
|
onSuccess: (data, variables, context) => {
|
||||||
options?.onSuccess?.(data, variables, context)
|
options?.onSuccess?.(data, variables, context)
|
||||||
},
|
},
|
||||||
|
|||||||
@@ -14,6 +14,8 @@ export type NormalizeProductCsvV1StepInput = string
|
|||||||
|
|
||||||
export const normalizeCsvToChunksStepId = "normalize-product-csv-to-chunks"
|
export const normalizeCsvToChunksStepId = "normalize-product-csv-to-chunks"
|
||||||
|
|
||||||
|
type Chunk = { id: string; toCreate: number; toUpdate: number }
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Processes a chunk of products by writing them to a file. Later the
|
* Processes a chunk of products by writing them to a file. Later the
|
||||||
* file will be processed after the import has been confirmed.
|
* file will be processed after the import has been confirmed.
|
||||||
@@ -23,7 +25,7 @@ async function processChunk(
|
|||||||
fileKey: string,
|
fileKey: string,
|
||||||
csvRows: ReturnType<(typeof CSVNormalizer)["preProcess"]>[],
|
csvRows: ReturnType<(typeof CSVNormalizer)["preProcess"]>[],
|
||||||
currentRowNumber: number
|
currentRowNumber: number
|
||||||
) {
|
): Promise<Chunk> {
|
||||||
const normalizer = new CSVNormalizer(csvRows)
|
const normalizer = new CSVNormalizer(csvRows)
|
||||||
const products = normalizer.proccess(currentRowNumber)
|
const products = normalizer.proccess(currentRowNumber)
|
||||||
|
|
||||||
@@ -76,7 +78,7 @@ async function createChunks(
|
|||||||
file: IFileModuleService,
|
file: IFileModuleService,
|
||||||
fileKey: string,
|
fileKey: string,
|
||||||
stream: Parser
|
stream: Parser
|
||||||
) {
|
): Promise<Chunk[]> {
|
||||||
/**
|
/**
|
||||||
* The row under process
|
* The row under process
|
||||||
*/
|
*/
|
||||||
@@ -97,7 +99,7 @@ async function createChunks(
|
|||||||
* Validated chunks that have been written with the file
|
* Validated chunks that have been written with the file
|
||||||
* provider
|
* provider
|
||||||
*/
|
*/
|
||||||
const chunks: { id: string; toCreate: number; toUpdate: number }[] = []
|
const chunks: Chunk[] = []
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Currently collected rows to be processed as one chunk
|
* Currently collected rows to be processed as one chunk
|
||||||
@@ -192,36 +194,51 @@ async function createChunks(
|
|||||||
export const normalizeCsvToChunksStep = createStep(
|
export const normalizeCsvToChunksStep = createStep(
|
||||||
normalizeCsvToChunksStepId,
|
normalizeCsvToChunksStepId,
|
||||||
async (fileKey: NormalizeProductCsvV1StepInput, { container }) => {
|
async (fileKey: NormalizeProductCsvV1StepInput, { container }) => {
|
||||||
const file = container.resolve(Modules.FILE)
|
return new Promise<
|
||||||
const contents = await file.getDownloadStream(fileKey)
|
StepResponse<{
|
||||||
const chunks = await createChunks(
|
chunks: Chunk[]
|
||||||
file,
|
summary: Omit<Chunk, "id">
|
||||||
fileKey,
|
}>
|
||||||
contents.pipe(
|
>(async (resolve, reject) => {
|
||||||
parse({
|
try {
|
||||||
|
const file = container.resolve(Modules.FILE)
|
||||||
|
const contents = await file.getDownloadStream(fileKey)
|
||||||
|
const transformer = parse({
|
||||||
columns: true,
|
columns: true,
|
||||||
skip_empty_lines: true,
|
skip_empty_lines: true,
|
||||||
})
|
})
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
const summary = chunks.reduce<{ toCreate: number; toUpdate: number }>(
|
contents.on("error", reject)
|
||||||
(result, chunk) => {
|
|
||||||
result.toCreate = result.toCreate + chunk.toCreate
|
|
||||||
result.toUpdate = result.toUpdate + chunk.toUpdate
|
|
||||||
return result
|
|
||||||
},
|
|
||||||
{ toCreate: 0, toUpdate: 0 }
|
|
||||||
)
|
|
||||||
|
|
||||||
/**
|
const chunks = await createChunks(
|
||||||
* Delete CSV file once we have the chunks
|
file,
|
||||||
*/
|
fileKey,
|
||||||
await file.deleteFiles(fileKey)
|
contents.pipe(transformer)
|
||||||
|
)
|
||||||
|
|
||||||
return new StepResponse({
|
const summary = chunks.reduce<{ toCreate: number; toUpdate: number }>(
|
||||||
chunks,
|
(result, chunk) => {
|
||||||
summary,
|
result.toCreate = result.toCreate + chunk.toCreate
|
||||||
|
result.toUpdate = result.toUpdate + chunk.toUpdate
|
||||||
|
return result
|
||||||
|
},
|
||||||
|
{ toCreate: 0, toUpdate: 0 }
|
||||||
|
)
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Delete CSV file once we have the chunks
|
||||||
|
*/
|
||||||
|
await file.deleteFiles(fileKey)
|
||||||
|
|
||||||
|
resolve(
|
||||||
|
new StepResponse({
|
||||||
|
chunks,
|
||||||
|
summary,
|
||||||
|
})
|
||||||
|
)
|
||||||
|
} catch (error) {
|
||||||
|
reject(error)
|
||||||
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
)
|
)
|
||||||
|
|||||||
@@ -12,7 +12,10 @@ export const processImportChunksStepId = "process-import-chunks"
|
|||||||
* const data = parseProductCsvStep("products.csv")
|
* const data = parseProductCsvStep("products.csv")
|
||||||
*/
|
*/
|
||||||
export const processImportChunksStep = createStep(
|
export const processImportChunksStep = createStep(
|
||||||
processImportChunksStepId,
|
{
|
||||||
|
name: processImportChunksStepId,
|
||||||
|
async: true,
|
||||||
|
},
|
||||||
async (input: { chunks: { id: string }[] }, { container }) => {
|
async (input: { chunks: { id: string }[] }, { container }) => {
|
||||||
const file = container.resolve(Modules.FILE)
|
const file = container.resolve(Modules.FILE)
|
||||||
|
|
||||||
|
|||||||
@@ -1,5 +1,5 @@
|
|||||||
import { HttpTypes, SelectParams } from "@medusajs/types"
|
import { HttpTypes, SelectParams } from "@medusajs/types"
|
||||||
import { Client } from "../client"
|
import { Client, FetchError } from "../client"
|
||||||
import { ClientHeaders } from "../types"
|
import { ClientHeaders } from "../types"
|
||||||
|
|
||||||
export class Product {
|
export class Product {
|
||||||
@@ -114,10 +114,40 @@ export class Product {
|
|||||||
* special headers in this request, since external services like S3 will
|
* special headers in this request, since external services like S3 will
|
||||||
* give a CORS error.
|
* give a CORS error.
|
||||||
*/
|
*/
|
||||||
await fetch(response.url, {
|
if (
|
||||||
method: "PUT",
|
response.url.startsWith("http://") ||
|
||||||
body: body.file,
|
response.url.startsWith("https://")
|
||||||
})
|
) {
|
||||||
|
const uploadResponse = await fetch(response.url, {
|
||||||
|
method: "PUT",
|
||||||
|
body: body.file,
|
||||||
|
})
|
||||||
|
if (uploadResponse.status >= 400) {
|
||||||
|
throw new FetchError(
|
||||||
|
uploadResponse.statusText,
|
||||||
|
uploadResponse.statusText,
|
||||||
|
uploadResponse.status
|
||||||
|
)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const form = new FormData()
|
||||||
|
form.append("files", body.file)
|
||||||
|
|
||||||
|
const localUploadResponse = await this.client.fetch<{
|
||||||
|
files: HttpTypes.AdminUploadFile
|
||||||
|
}>("admin/uploads", {
|
||||||
|
method: "POST",
|
||||||
|
headers: {
|
||||||
|
...headers,
|
||||||
|
// Let the browser determine the content type.
|
||||||
|
"content-type": null,
|
||||||
|
},
|
||||||
|
body: form,
|
||||||
|
query,
|
||||||
|
})
|
||||||
|
|
||||||
|
response.filename = localUploadResponse.files[0].id
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform products import using the uploaded file name
|
* Perform products import using the uploaded file name
|
||||||
@@ -164,7 +194,7 @@ export class Product {
|
|||||||
headers?: ClientHeaders
|
headers?: ClientHeaders
|
||||||
) {
|
) {
|
||||||
return await this.client.fetch<{}>(
|
return await this.client.fetch<{}>(
|
||||||
`/admin/products/import/${transactionId}/confirm`,
|
`/admin/products/imports/${transactionId}/confirm`,
|
||||||
{
|
{
|
||||||
method: "POST",
|
method: "POST",
|
||||||
headers,
|
headers,
|
||||||
|
|||||||
@@ -138,16 +138,6 @@ export class LocalFileService extends AbstractFileProviderService {
|
|||||||
* Returns the pre-signed URL that the client (frontend) can use to trigger
|
* Returns the pre-signed URL that the client (frontend) can use to trigger
|
||||||
* a file upload. In this case, the Medusa backend will implement the
|
* a file upload. In this case, the Medusa backend will implement the
|
||||||
* "/upload" endpoint to perform the file upload.
|
* "/upload" endpoint to perform the file upload.
|
||||||
*
|
|
||||||
* Since, we do not want the client to perform link detection on the frontend
|
|
||||||
* and then prepare a different kind of request for cloud providers and different
|
|
||||||
* request for the local server, we will have to make these URLs self sufficient.
|
|
||||||
*
|
|
||||||
* What is a self sufficient URL
|
|
||||||
*
|
|
||||||
* - There should be no need to specify the MIME type or filename separately in request body (cloud providers don't allow it).
|
|
||||||
* - There should be no need to pass auth headers like cookies. Again cloud providers
|
|
||||||
* won't allow it and will likely result in a CORS error.
|
|
||||||
*/
|
*/
|
||||||
async getPresignedUploadUrl(
|
async getPresignedUploadUrl(
|
||||||
fileData: FileTypes.ProviderGetPresignedUploadUrlDTO
|
fileData: FileTypes.ProviderGetPresignedUploadUrlDTO
|
||||||
@@ -159,18 +149,8 @@ export class LocalFileService extends AbstractFileProviderService {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
const uploadUrl = new URL(
|
|
||||||
"upload",
|
|
||||||
`${this.backendUrl_.replace(/\/$/, "")}/`
|
|
||||||
)
|
|
||||||
|
|
||||||
uploadUrl.searchParams.set("filename", fileData.filename)
|
|
||||||
if (fileData.mimeType) {
|
|
||||||
uploadUrl.searchParams.set("type", fileData.mimeType)
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
return {
|
||||||
url: uploadUrl.toString(),
|
url: "/admin/uploads",
|
||||||
key: fileData.filename,
|
key: fileData.filename,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|||||||
Reference in New Issue
Block a user