diff --git a/.changeset/rude-mirrors-hang.md b/.changeset/rude-mirrors-hang.md new file mode 100644 index 0000000000..019c5da5b1 --- /dev/null +++ b/.changeset/rude-mirrors-hang.md @@ -0,0 +1,8 @@ +--- +"@medusajs/medusa": patch +"@medusajs/file-local": patch +"@medusajs/file-s3": patch +"@medusajs/core-flows": patch +--- + +fix(medusa,file-local,file-s3,core-flows): fix csv parsing special characters diff --git a/packages/core/core-flows/src/product/steps/normalize-products-to-chunks.ts b/packages/core/core-flows/src/product/steps/normalize-products-to-chunks.ts index 4dabcf9100..cde0739707 100644 --- a/packages/core/core-flows/src/product/steps/normalize-products-to-chunks.ts +++ b/packages/core/core-flows/src/product/steps/normalize-products-to-chunks.ts @@ -1,11 +1,11 @@ import { parse, Parser } from "csv-parse" import type { HttpTypes, IFileModuleService } from "@medusajs/framework/types" import { - Modules, CSVNormalizer, + Modules, productValidators, } from "@medusajs/framework/utils" -import { StepResponse, createStep } from "@medusajs/framework/workflows-sdk" +import { createStep, StepResponse } from "@medusajs/framework/workflows-sdk" /** * The CSV file content to parse. @@ -203,6 +203,7 @@ export const normalizeCsvToChunksStep = createStep( try { const file = container.resolve(Modules.FILE) const contents = await file.getDownloadStream(fileKey) + const transformer = parse({ columns: true, skip_empty_lines: true, diff --git a/packages/medusa/src/api/admin/uploads/route.ts b/packages/medusa/src/api/admin/uploads/route.ts index ef3750fd13..59673dda1f 100644 --- a/packages/medusa/src/api/admin/uploads/route.ts +++ b/packages/medusa/src/api/admin/uploads/route.ts @@ -24,7 +24,7 @@ export const POST = async ( files: input?.map((f) => ({ filename: f.originalname, mimeType: f.mimetype, - content: f.buffer.toString("binary"), + content: f.buffer.toString("base64"), access: "public", })), }, diff --git a/packages/modules/providers/file-local/src/services/local-file.ts b/packages/modules/providers/file-local/src/services/local-file.ts index 60c464b583..5ca891fbbe 100644 --- a/packages/modules/providers/file-local/src/services/local-file.ts +++ b/packages/modules/providers/file-local/src/services/local-file.ts @@ -57,7 +57,19 @@ export class LocalFileService extends AbstractFileProviderService { const filePath = this.getUploadFilePath(baseDir, fileKey) const fileUrl = this.getUploadFileUrl(fileKey) - const content = Buffer.from(file.content as string, "binary") + let content: Buffer + try { + const decoded = Buffer.from(file.content, "base64") + if (decoded.toString("base64") === file.content) { + content = decoded + } else { + content = Buffer.from(file.content, "utf8") + } + } catch { + // Last-resort fallback: binary + content = Buffer.from(file.content, "binary") + } + await fs.writeFile(filePath, content) return { diff --git a/packages/modules/providers/file-s3/src/services/s3-file.ts b/packages/modules/providers/file-s3/src/services/s3-file.ts index 53e5e4fe05..4caedd5587 100644 --- a/packages/modules/providers/file-s3/src/services/s3-file.ts +++ b/packages/modules/providers/file-s3/src/services/s3-file.ts @@ -120,7 +120,19 @@ export class S3FileService extends AbstractFileProviderService { parsedFilename.ext }` - const content = Buffer.from(file.content, "binary") + let content: Buffer + try { + const decoded = Buffer.from(file.content, "base64") + if (decoded.toString("base64") === file.content) { + content = decoded + } else { + content = Buffer.from(file.content, "utf8") + } + } catch { + // Last-resort fallback: binary + content = Buffer.from(file.content, "binary") + } + const command = new PutObjectCommand({ // We probably also want to support a separate bucket altogether for private files // protected private_bucket_: string