diff --git a/.changeset/six-parrots-shave.md b/.changeset/six-parrots-shave.md new file mode 100644 index 0000000000..29cbd800bc --- /dev/null +++ b/.changeset/six-parrots-shave.md @@ -0,0 +1,5 @@ +--- +"@medusajs/framework": patch +--- + + chore(framework): Unified resource loading and exclude non js/ts files diff --git a/packages/core/framework/src/jobs/__fixtures__/plugin/jobs-with-other-files/order-summary.md b/packages/core/framework/src/jobs/__fixtures__/plugin/jobs-with-other-files/order-summary.md new file mode 100644 index 0000000000..ac8e4fbe5a --- /dev/null +++ b/packages/core/framework/src/jobs/__fixtures__/plugin/jobs-with-other-files/order-summary.md @@ -0,0 +1,11 @@ +import { MedusaContainer } from "@medusajs/types" + +export default async function handler(container: MedusaContainer) { + console.log(`You have received 5 orders today`) +} + +export const config = { + name: "summarize-orders", + schedule: "* * * * * *", + numberOfExecutions: 2, +} diff --git a/packages/core/framework/src/jobs/__fixtures__/plugin/jobs-with-other-files/order-summary.txt b/packages/core/framework/src/jobs/__fixtures__/plugin/jobs-with-other-files/order-summary.txt new file mode 100644 index 0000000000..ac8e4fbe5a --- /dev/null +++ b/packages/core/framework/src/jobs/__fixtures__/plugin/jobs-with-other-files/order-summary.txt @@ -0,0 +1,11 @@ +import { MedusaContainer } from "@medusajs/types" + +export default async function handler(container: MedusaContainer) { + console.log(`You have received 5 orders today`) +} + +export const config = { + name: "summarize-orders", + schedule: "* * * * * *", + numberOfExecutions: 2, +} diff --git a/packages/core/framework/src/jobs/__tests__/register-jobs.spec.ts b/packages/core/framework/src/jobs/__tests__/register-jobs.spec.ts index 5dbc885fac..24a8612d2f 100644 --- a/packages/core/framework/src/jobs/__tests__/register-jobs.spec.ts +++ b/packages/core/framework/src/jobs/__tests__/register-jobs.spec.ts @@ -6,13 +6,14 @@ import { JobLoader } from "../job-loader" describe("register jobs", () => { WorkflowScheduler.setStorage(new MockSchedulerStorage()) - let jobLoader!: JobLoader - - beforeAll(() => { - jobLoader = new JobLoader(join(__dirname, "../__fixtures__/plugin/jobs")) + afterEach(() => { + WorkflowManager.unregisterAll() }) - it("registers jobs from plugins", async () => { + it("should registers jobs from plugins", async () => { + const jobLoader: JobLoader = new JobLoader( + join(__dirname, "../__fixtures__/plugin/jobs") + ) await jobLoader.load() const workflow = WorkflowManager.getWorkflow("job-summarize-orders") expect(workflow).toBeDefined() @@ -21,4 +22,13 @@ describe("register jobs", () => { numberOfExecutions: 2, }) }) + + it("should not load non js/ts files", async () => { + const jobLoader: JobLoader = new JobLoader( + join(__dirname, "../__fixtures__/plugin/jobs-with-other-files") + ) + await jobLoader.load() + const workflow = WorkflowManager.getWorkflow("job-summarize-orders") + expect(workflow).toBeUndefined() + }) }) diff --git a/packages/core/framework/src/jobs/job-loader.ts b/packages/core/framework/src/jobs/job-loader.ts index a2059981cd..ce984f89b8 100644 --- a/packages/core/framework/src/jobs/job-loader.ts +++ b/packages/core/framework/src/jobs/job-loader.ts @@ -1,21 +1,13 @@ import type { SchedulerOptions } from "@medusajs/orchestration" import { MedusaContainer } from "@medusajs/types" -import { - dynamicImport, - isObject, - MedusaError, - promiseAll, - readDirRecursive, -} from "@medusajs/utils" +import { isObject, MedusaError } from "@medusajs/utils" import { createStep, createWorkflow, StepResponse, } from "@medusajs/workflows-sdk" -import { Dirent } from "fs" -import { access } from "fs/promises" -import { join } from "path" import { logger } from "../logger" +import { ResourceLoader } from "../utils/resource-loader" type CronJobConfig = { name: string @@ -25,27 +17,26 @@ type CronJobConfig = { type CronJobHandler = (container: MedusaContainer) => Promise -export class JobLoader { - /** - * The directory from which to load the jobs - * @private - */ - #sourceDir: string | string[] - - /** - * The list of file names to exclude from the subscriber scan - * @private - */ - #excludes: RegExp[] = [ - /index\.js/, - /index\.ts/, - /\.DS_Store/, - /(\.ts\.map|\.js\.map|\.d\.ts|\.md)/, - /^_[^/\\]*(\.[^/\\]+)?$/, - ] +export class JobLoader extends ResourceLoader { + protected resourceName = "job" constructor(sourceDir: string | string[]) { - this.#sourceDir = sourceDir + super(sourceDir) + } + + protected async onFileLoaded( + path: string, + fileExports: { + default: CronJobHandler + config: CronJobConfig + } + ) { + this.validateConfig(fileExports.config) + logger.debug(`Registering job from ${path}.`) + this.register({ + config: fileExports.config, + handler: fileExports.default, + }) } /** @@ -85,7 +76,7 @@ export class JobLoader { * @param handler * @protected */ - protected registerJob({ + protected register({ config, handler, }: { @@ -128,58 +119,8 @@ export class JobLoader { * Load cron jobs from one or multiple source paths */ async load() { - const normalizedSourcePath = Array.isArray(this.#sourceDir) - ? this.#sourceDir - : [this.#sourceDir] + await super.discoverResources() - const promises = normalizedSourcePath.map(async (sourcePath) => { - try { - await access(sourcePath) - } catch { - logger.info(`No job to load from ${sourcePath}. skipped.`) - return - } - - return await readDirRecursive(sourcePath).then(async (entries) => { - const fileEntries = entries.filter((entry: Dirent) => { - return ( - !entry.isDirectory() && - !this.#excludes.some((exclude) => exclude.test(entry.name)) - ) - }) - - logger.debug(`Registering jobs from ${sourcePath}.`) - - return await promiseAll( - fileEntries.map(async (entry: Dirent) => { - const fullPath = join(entry.path, entry.name) - - const module_ = await dynamicImport(fullPath) - - const input = { - config: module_.config, - handler: module_.default, - } - - this.validateConfig(input.config) - return input - }) - ) - }) - }) - - const jobsInputs = await promiseAll(promises) - const flatJobsInput = jobsInputs.flat(1).filter( - ( - job - ): job is { - config: CronJobConfig - handler: CronJobHandler - } => !!job - ) - - flatJobsInput.map(this.registerJob) - - logger.debug(`Job registered.`) + logger.debug(`Jobs registered.`) } } diff --git a/packages/core/framework/src/subscribers/__fixtures__/subscribers/order-notifier-md.md b/packages/core/framework/src/subscribers/__fixtures__/subscribers/order-notifier-md.md new file mode 100644 index 0000000000..5a716f4e72 --- /dev/null +++ b/packages/core/framework/src/subscribers/__fixtures__/subscribers/order-notifier-md.md @@ -0,0 +1,10 @@ +import { SubscriberArgs, SubscriberConfig } from "../../types" + +export default async function orderNotifier(\_: SubscriberArgs) { +return await Promise.resolve() +} + +export const config: SubscriberConfig = { +event: ["order.placed", "order.canceled", "order.completed"], +context: { subscriberId: "order-notifier-md" }, +} diff --git a/packages/core/framework/src/subscribers/__fixtures__/subscribers/order-notifier-txt.txt b/packages/core/framework/src/subscribers/__fixtures__/subscribers/order-notifier-txt.txt new file mode 100644 index 0000000000..df6b9e8a12 --- /dev/null +++ b/packages/core/framework/src/subscribers/__fixtures__/subscribers/order-notifier-txt.txt @@ -0,0 +1,10 @@ +import { SubscriberArgs, SubscriberConfig } from "../../types" + +export default async function orderNotifier(_: SubscriberArgs) { + return await Promise.resolve() +} + +export const config: SubscriberConfig = { + event: ["order.placed", "order.canceled", "order.completed"], + context: { subscriberId: "order-notifier-txt" }, +} diff --git a/packages/core/framework/src/subscribers/subscriber-loader.ts b/packages/core/framework/src/subscribers/subscriber-loader.ts index a029eaae0a..ebe6551c6c 100644 --- a/packages/core/framework/src/subscribers/subscriber-loader.ts +++ b/packages/core/framework/src/subscribers/subscriber-loader.ts @@ -1,20 +1,12 @@ import { Event, IEventBusModuleService, Subscriber } from "@medusajs/types" -import { - dynamicImport, - kebabCase, - Modules, - promiseAll, - readDirRecursive, - resolveExports, -} from "@medusajs/utils" -import { access } from "fs/promises" -import { join, parse } from "path" +import { kebabCase, Modules } from "@medusajs/utils" +import { parse } from "path" -import { Dirent } from "fs" import { configManager } from "../config" import { container } from "../container" import { logger } from "../logger" import { SubscriberArgs, SubscriberConfig } from "./types" +import { ResourceLoader } from "../utils/resource-loader" type SubscriberHandler = (args: SubscriberArgs) => Promise @@ -23,31 +15,15 @@ type SubscriberModule = { handler: SubscriberHandler } -export class SubscriberLoader { +export class SubscriberLoader extends ResourceLoader { + protected resourceName = "subscriber" + /** * The options of the plugin from which the subscribers are being loaded * @private */ #pluginOptions: Record - /** - * The base directory from which to scan for the subscribers - * @private - */ - #sourceDir: string | string[] - - /** - * The list of file names to exclude from the subscriber scan - * @private - */ - #excludes: RegExp[] = [ - /index\.js/, - /index\.ts/, - /\.DS_Store/, - /(\.ts\.map|\.js\.map|\.d\.ts|\.md)/, - /^_[^/\\]*(\.[^/\\]+)?$/, - ] - /** * Map of subscribers descriptors to consume in the loader * @private @@ -58,10 +34,28 @@ export class SubscriberLoader { sourceDir: string | string[], options: Record = {} ) { - this.#sourceDir = sourceDir + super(sourceDir) this.#pluginOptions = options } + protected async onFileLoaded( + path: string, + fileExports: Record + ) { + const isValid = this.validateSubscriber(fileExports, path) + + logger.debug(`Registering subscribers from ${path}.`) + + if (!isValid) { + return + } + + this.#subscriberDescriptors.set(path, { + config: fileExports.config, + handler: fileExports.default, + }) + } + private validateSubscriber( subscriber: any, path: string @@ -122,42 +116,6 @@ export class SubscriberLoader { return true } - private async createDescriptor(absolutePath: string) { - return await dynamicImport(absolutePath).then((module_) => { - module_ = resolveExports(module_) - const isValid = this.validateSubscriber(module_, absolutePath) - - if (!isValid) { - return - } - - this.#subscriberDescriptors.set(absolutePath, { - config: module_.config, - handler: module_.default, - }) - }) - } - - private async createMap(dirPath: string) { - const promises = await readDirRecursive(dirPath).then(async (entries) => { - const fileEntries = entries.filter((entry) => { - return ( - !entry.isDirectory() && - !this.#excludes.some((exclude) => exclude.test(entry.name)) - ) - }) - - logger.debug(`Registering subscribers from ${dirPath}.`) - - return fileEntries.flatMap(async (entry: Dirent) => { - const fullPath = join(entry.path, entry.name) - return await this.createDescriptor(fullPath) - }) - }) - - await promiseAll(promises) - } - private inferIdentifier( fileName: string, { context }: SubscriberConfig, @@ -222,21 +180,7 @@ export class SubscriberLoader { } async load() { - const normalizeSourcePaths = Array.isArray(this.#sourceDir) - ? this.#sourceDir - : [this.#sourceDir] - const promises = normalizeSourcePaths.map(async (sourcePath) => { - try { - await access(sourcePath) - } catch { - logger.info(`No subscribers to load from ${sourcePath}. skipped.`) - return - } - - return await this.createMap(sourcePath) - }) - - await promiseAll(promises) + await super.discoverResources() for (const [ fileName, diff --git a/packages/core/framework/src/utils/resource-loader.ts b/packages/core/framework/src/utils/resource-loader.ts new file mode 100644 index 0000000000..d2c2f62c15 --- /dev/null +++ b/packages/core/framework/src/utils/resource-loader.ts @@ -0,0 +1,101 @@ +import { dynamicImport, promiseAll, readDirRecursive } from "@medusajs/utils" +import { Dirent } from "fs" +import { access } from "fs/promises" +import { join, parse } from "path" +import { logger } from "../logger" + +export abstract class ResourceLoader { + /** + * The name of the resource (e.g job, subscriber, workflow) + */ + protected abstract resourceName: string + + /** + * The directory from which to load the jobs + * @private + */ + #sourceDir: string | string[] + + /** + * The list of file names to exclude from the subscriber scan + * @private + */ + #excludes: RegExp[] = [/^_[^/\\]*(\.[^/\\]+)?$/] + + constructor(sourceDir: string | string[]) { + this.#sourceDir = sourceDir + } + + /** + * Discover resources from the source directory + * @param exclude - custom exclusion regexes + * @param customFiltering - custom filtering function + * @returns The resources discovered + */ + protected async discoverResources({ + exclude, + customFiltering, + }: { + exclude?: RegExp[] + customFiltering?: (entry: Dirent) => boolean + } = {}): Promise[]> { + exclude ??= [] + customFiltering ??= (entry: Dirent) => { + const parsedName = parse(entry.name) + + return ( + !entry.isDirectory() && + parsedName.name !== "index" && + !parsedName.base.endsWith(".d.ts") && + [".js", ".ts"].includes(parsedName.ext) && + !this.#excludes.some((exclude) => exclude.test(parsedName.base)) && + !exclude.some((exclude) => exclude.test(parsedName.base)) + ) + } + + const normalizedSourcePath = Array.isArray(this.#sourceDir) + ? this.#sourceDir + : [this.#sourceDir] + + const promises = normalizedSourcePath.map(async (sourcePath) => { + try { + await access(sourcePath) + } catch { + logger.info( + `No ${this.resourceName} to load from ${sourcePath}. skipped.` + ) + return + } + + return await readDirRecursive(sourcePath).then(async (entries) => { + const fileEntries = entries.filter((entry: Dirent) => + customFiltering(entry) + ) + + return await promiseAll( + fileEntries.map(async (entry: Dirent) => { + const fullPath = join(entry.path, entry.name) + + const module_ = await dynamicImport(fullPath) + + await this.onFileLoaded(fullPath, module_) + return module_ + }) + ) + }) + }) + + const resources = await promiseAll(promises) + return resources.flat() + } + + /** + * Called when a file is imported + * @param path - The path of the file + * @param fileExports - The exports of the file + */ + protected abstract onFileLoaded( + path: string, + fileExports: Record + ): Promise | never +} diff --git a/packages/core/framework/src/workflows/workflow-loader.ts b/packages/core/framework/src/workflows/workflow-loader.ts index a8b5945327..47d19f009f 100644 --- a/packages/core/framework/src/workflows/workflow-loader.ts +++ b/packages/core/framework/src/workflows/workflow-loader.ts @@ -1,30 +1,18 @@ -import { dynamicImport, promiseAll, readDirRecursive } from "@medusajs/utils" -import { Dirent } from "fs" -import { access } from "fs/promises" -import { join } from "path" import { logger } from "../logger" +import { ResourceLoader } from "../utils/resource-loader" -export class WorkflowLoader { - /** - * The directory from which to load the workflows - * @private - */ - #sourceDir: string | string[] - - /** - * The list of file names to exclude from the subscriber scan - * @private - */ - #excludes: RegExp[] = [ - /index\.js/, - /index\.ts/, - /\.DS_Store/, - /(\.ts\.map|\.js\.map|\.d\.ts|\.md)/, - /^_[^/\\]*(\.[^/\\]+)?$/, - ] +export class WorkflowLoader extends ResourceLoader { + protected resourceName = "workflow" constructor(sourceDir: string | string[]) { - this.#sourceDir = sourceDir + super(sourceDir) + } + + protected async onFileLoaded( + path: string, + fileExports: Record + ) { + logger.debug(`Registering workflows from ${path}.`) } /** @@ -32,38 +20,7 @@ export class WorkflowLoader { * therefore we only need to import them */ async load() { - const normalizedSourcePath = Array.isArray(this.#sourceDir) - ? this.#sourceDir - : [this.#sourceDir] - - const promises = normalizedSourcePath.map(async (sourcePath) => { - try { - await access(sourcePath) - } catch { - logger.info(`No workflow to load from ${sourcePath}. skipped.`) - return - } - - return await readDirRecursive(sourcePath).then(async (entries) => { - const fileEntries = entries.filter((entry: Dirent) => { - return ( - !entry.isDirectory() && - !this.#excludes.some((exclude) => exclude.test(entry.name)) - ) - }) - - logger.debug(`Registering workflows from ${sourcePath}.`) - - return await promiseAll( - fileEntries.map(async (entry: Dirent) => { - const fullPath = join(entry.path, entry.name) - return await dynamicImport(fullPath) - }) - ) - }) - }) - - await promiseAll(promises) + await super.discoverResources() logger.debug(`Workflows registered.`) }