feat: Add s3-compatible plugin for the file module (#7143)

This commit is contained in:
Stevche Radevski
2024-04-25 09:06:47 +02:00
committed by GitHub
parent abf1283ab6
commit f341265f42
18 changed files with 1599 additions and 4 deletions

View File

@@ -42,7 +42,7 @@ medusaIntegrationTestRunner({
await createAdminUser(dbConnection, adminHeaders, appContainer)
})
describe("POST /admin/uploads with", () => {
describe("POST /admin/uploads", () => {
beforeEach(async () => {})
it("uploads a single file successfully", async () => {

View File

@@ -1,4 +1,4 @@
import { FileTypes } from "@medusajs/types"
import { FileTypes, LocalFileServiceOptions } from "@medusajs/types"
import { AbstractFileProviderService, MedusaError } from "@medusajs/utils"
import fs from "fs/promises"
import path from "path"
@@ -8,7 +8,7 @@ export class LocalFileService extends AbstractFileProviderService {
protected uploadDir_: string
protected backendUrl_: string
constructor(_, options) {
constructor(_, options: LocalFileServiceOptions) {
super()
this.uploadDir_ = options?.upload_dir || "uploads"
this.backendUrl_ = options?.backend_url || "http://localhost:9000"

4
packages/file-s3/.gitignore vendored Normal file
View File

@@ -0,0 +1,4 @@
dist
node_modules
.DS_store
yarn.lock

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 40 KiB

View File

@@ -0,0 +1,79 @@
import fs from "fs/promises"
import axios from "axios"
import { S3FileService } from "../../src/services/s3-file"
jest.setTimeout(100000)
// Note: This test hits the S3 service, and it is mainly meant to be run manually after setting all the envvars below.
// We can also set up some test buckets in our pipeline to run this test, but it is not really that important to do so for now.
describe.skip("S3 File Plugin", () => {
let s3Service: S3FileService
let fixtureImagePath: string
beforeAll(() => {
fixtureImagePath =
process.cwd() + "/integration-tests/__fixtures__/catphoto.jpg"
s3Service = new S3FileService(
{
logger: console as any,
},
{
endpoint: process.env.S3_TEST_ENDPOINT ?? "",
file_url: process.env.S3_TEST_FILE_URL ?? "",
access_key_id: process.env.S3_TEST_ACCESS_KEY_ID ?? "",
secret_access_key: process.env.S3_TEST_SECRET_ACCESS_KEY ?? "",
region: process.env.S3_TEST_REGION ?? "",
bucket: process.env.S3_TEST_BUCKET ?? "",
prefix: "tests/",
additional_client_config: process.env.S3_TEST_ENDPOINT?.includes(
"localhost"
)
? {
sslEnabled: false,
s3ForcePathStyle: true,
}
: {},
}
)
})
it("uploads, reads, and then deletes a file successfully", async () => {
const fileContent = await fs.readFile(fixtureImagePath)
const fixtureAsBinary = fileContent.toString("binary")
const resp = await s3Service.upload({
filename: "catphoto.jpg",
mimeType: "image/jpeg",
content: fixtureAsBinary,
})
expect(resp).toEqual({
key: expect.stringMatching(/tests\/catphoto.*\.jpg/),
url: expect.stringMatching(/https:\/\/.*\.jpg/),
})
const signedUrl = await s3Service.getPresignedDownloadUrl({
fileKey: resp.key,
})
const signedUrlFile = Buffer.from(
await axios
.get(signedUrl, { responseType: "arraybuffer" })
.then((r) => r.data)
)
expect(signedUrlFile.toString("binary")).toEqual(fixtureAsBinary)
await s3Service.delete({ fileKey: resp.key })
// TODO: Currently the presignedURL will be returned even if the file doesn't exist. Should we check for existence first?
const deletedFileUrl = await s3Service.getPresignedDownloadUrl({
fileKey: resp.key,
})
const { response } = await axios
.get(deletedFileUrl, { responseType: "arraybuffer" })
.catch((e) => e)
expect(response.status).toEqual(404)
})
})

View File

@@ -0,0 +1,16 @@
module.exports = {
globals: {
"ts-jest": {
tsconfig: "tsconfig.spec.json",
isolatedModules: false,
},
},
transform: {
"^.+\\.[jt]s?$": "ts-jest",
},
testEnvironment: `node`,
moduleNameMapper: {
"^axios$": "axios/dist/node/axios.cjs",
},
moduleFileExtensions: [`js`, `jsx`, `ts`, `tsx`, `json`],
}

View File

@@ -0,0 +1,43 @@
{
"name": "@medusajs/file-s3",
"version": "0.0.2",
"description": "S3 protocol file storage for Medusa. Supports any S3-compatible storage provider",
"main": "dist/index.js",
"repository": {
"type": "git",
"url": "https://github.com/medusajs/medusa",
"directory": "packages/file-local"
},
"files": [
"dist"
],
"engines": {
"node": ">=16"
},
"author": "Medusa",
"license": "MIT",
"scripts": {
"prepublishOnly": "cross-env NODE_ENV=production tsc --build",
"test": "jest --passWithNoTests src",
"test:integration": "jest --forceExit -- integration-tests/**/__tests__/**/*.spec.ts",
"build": "rimraf dist && tsc -p ./tsconfig.json",
"watch": "tsc --watch"
},
"devDependencies": {
"axios": "^1.6.8",
"cross-env": "^5.2.1",
"jest": "^25.5.4",
"rimraf": "^5.0.1",
"typescript": "^4.9.5"
},
"dependencies": {
"@aws-sdk/client-s3": "^3.556.0",
"@aws-sdk/s3-request-presigner": "^3.556.0",
"@medusajs/utils": "^1.11.7",
"ulid": "^2.3.0"
},
"keywords": [
"medusa-plugin",
"medusa-plugin-s3"
]
}

View File

@@ -0,0 +1,10 @@
import { ModuleProviderExports } from "@medusajs/types"
import { S3FileService } from "./services/s3-file"
const services = [S3FileService]
const providerExport: ModuleProviderExports = {
services,
}
export default providerExport

View File

@@ -0,0 +1,154 @@
import {
DeleteObjectCommand,
GetObjectCommand,
PutObjectCommand,
S3Client,
S3ClientConfigType,
} from "@aws-sdk/client-s3"
import { getSignedUrl } from "@aws-sdk/s3-request-presigner"
import { FileTypes, Logger, S3FileServiceOptions } from "@medusajs/types"
import { AbstractFileProviderService, MedusaError } from "@medusajs/utils"
import path from "path"
import { ulid } from "ulid"
type InjectedDependencies = {
logger: Logger
}
interface S3FileServiceConfig {
// TODO: We probably don't need this as either the service should return it or we should be able to calculate it.
fileUrl: string
accessKeyId: string
secretAccessKey: string
region: string
bucket: string
prefix?: string
endpoint?: string
cacheControl?: string
downloadFileDuration?: number
additionalClientConfig?: Record<string, any>
}
// FUTURE: At one point we will probably need to support authenticating with IAM roles instead.
export class S3FileService extends AbstractFileProviderService {
static identifier = "s3"
protected config_: S3FileServiceConfig
protected logger_: Logger
protected client_: S3Client
constructor({ logger }: InjectedDependencies, options: S3FileServiceOptions) {
super()
this.config_ = {
fileUrl: options.file_url,
accessKeyId: options.access_key_id,
secretAccessKey: options.secret_access_key,
region: options.region,
bucket: options.bucket,
prefix: options.prefix ?? "",
endpoint: options.endpoint,
cacheControl: options.cache_control ?? "public, max-age=31536000",
downloadFileDuration: options.download_file_duration ?? 60 * 60,
additionalClientConfig: options.additional_client_config ?? {},
}
this.logger_ = logger
this.client_ = this.getClient()
}
protected getClient() {
const config: S3ClientConfigType = {
credentials: {
accessKeyId: this.config_.accessKeyId,
secretAccessKey: this.config_.secretAccessKey,
},
region: this.config_.region,
endpoint: this.config_.endpoint,
...this.config_.additionalClientConfig,
}
return new S3Client(config)
}
async upload(
file: FileTypes.ProviderUploadFileDTO
): Promise<FileTypes.ProviderFileResultDTO> {
if (!file) {
throw new MedusaError(MedusaError.Types.INVALID_DATA, `No file provided`)
}
if (!file.filename) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
`No filename provided`
)
}
const parsedFilename = path.parse(file.filename)
// TODO: Allow passing a full path for storage per request, not as a global config.
const fileKey = `${this.config_.prefix}${parsedFilename.name}-${ulid()}${
parsedFilename.ext
}`
const content = Buffer.from(file.content, "binary")
const command = new PutObjectCommand({
// TODO: Add support for private files
// We probably also want to support a separate bucket altogether for private files
// protected private_bucket_: string
// protected private_access_key_id_: string
// protected private_secret_access_key_: string
// ACL: options.acl ?? (options.isProtected ? "private" : "public-read"),
Bucket: this.config_.bucket,
Body: content,
Key: fileKey,
ContentType: file.mimeType,
CacheControl: this.config_.cacheControl,
// Note: We could potentially set the content disposition when uploading,
// but storing the original filename as metadata should suffice.
Metadata: {
"x-amz-meta-original-filename": file.filename,
},
})
try {
await this.client_.send(command)
} catch (e) {
this.logger_.error(e)
throw e
}
return {
url: `${this.config_.fileUrl}/${fileKey}`,
key: fileKey,
}
}
async delete(file: FileTypes.ProviderDeleteFileDTO): Promise<void> {
const command = new DeleteObjectCommand({
Bucket: this.config_.bucket,
Key: file.fileKey,
})
try {
await this.client_.send(command)
} catch (e) {
// TODO: Rethrow depending on the error (eg. a file not found error is fine, but a failed request should be rethrown)
this.logger_.error(e)
}
}
async getPresignedDownloadUrl(
fileData: FileTypes.ProviderGetFileDTO
): Promise<string> {
// TODO: Allow passing content disposition when getting a presigned URL
const command = new GetObjectCommand({
Bucket: this.config_.bucket,
Key: `${fileData.fileKey}`,
})
return await getSignedUrl(this.client_, command, {
expiresIn: this.config_.downloadFileDuration,
})
}
}

View File

@@ -0,0 +1,36 @@
{
"compilerOptions": {
"lib": [
"es5",
"es6",
"es2019"
],
"target": "es5",
"jsx": "react-jsx" /* Specify JSX code generation: 'preserve', 'react-native', or 'react'. */,
"outDir": "./dist",
"esModuleInterop": true,
"declaration": true,
"module": "commonjs",
"moduleResolution": "node",
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"noImplicitReturns": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"noImplicitThis": true,
"allowJs": true,
"skipLibCheck": true,
"downlevelIteration": true, // to use ES5 specific tooling
"inlineSourceMap": true /* Emit a single file with source maps instead of having a separate file. */
},
"include": ["src"],
"exclude": [
"dist",
"build",
"src/**/__tests__",
"src/**/__mocks__",
"src/**/__fixtures__",
"node_modules",
".eslintrc.js"
]
}

View File

@@ -0,0 +1,5 @@
{
"extends": "./tsconfig.json",
"include": ["src"],
"exclude": ["node_modules"]
}

View File

@@ -33,6 +33,8 @@ export default class FileModuleService implements FileTypes.IFileModuleService {
data: CreateFileDTO[] | CreateFileDTO
): Promise<FileDTO[] | FileDTO> {
const input = Array.isArray(data) ? data : [data]
// TODO: Validate file mime type, have config for allowed types
const files = await Promise.all(
input.map((file) => this.fileProviderService_.upload(file))
)

View File

@@ -2,3 +2,4 @@ export * from "./common"
export * from "./mutations"
export * from "./service"
export * from "./provider"
export * from "./providers"

View File

@@ -0,0 +1,2 @@
export * from "./s3"
export * from "./local"

View File

@@ -0,0 +1,4 @@
export interface LocalFileServiceOptions {
upload_dir?: string
backend_url?: string
}

View File

@@ -0,0 +1,12 @@
export interface S3FileServiceOptions {
file_url: string
access_key_id: string
secret_access_key: string
region: string
bucket: string
prefix?: string
endpoint?: string
cache_control?: string
download_file_duration?: number
additional_client_config?: Record<string, any>
}

1229
yarn.lock

File diff suppressed because it is too large Load Diff