feat(utils): define file config (#13283)
** What
- Allow auto-loaded Medusa files to export a config object.
- Currently supports isDisabled to control loading.
- new instance `FeatureFlag` exported by `@medusajs/framework/utils`
- `feature-flags` is now a supported folder for medusa projects, modules, providers and plugins. They will be loaded and added to `FeatureFlag`
** Why
- Enables conditional loading of routes, migrations, jobs, subscribers, workflows, and other files based on feature flags.
```ts
// /src/feature-flags
import { FlagSettings } from "@medusajs/framework/feature-flags"
const CustomFeatureFlag: FlagSettings = {
key: "custom_feature",
default_val: false,
env_key: "FF_MY_CUSTOM_FEATURE",
description: "Enable xyz",
}
export default CustomFeatureFlag
```
```ts
// /src/modules/my-custom-module/migration/Migration20250822135845.ts
import { FeatureFlag } from "@medusajs/framework/utils"
export class Migration20250822135845 extends Migration {
override async up(){ }
override async down(){ }
}
defineFileConfig({
isDisabled: () => !FeatureFlag.isFeatureEnabled("custom_feature")
})
```
This commit is contained in:
committed by
GitHub
parent
4cda412243
commit
e413cfefc2
@@ -167,7 +167,6 @@ describe("flattenObjectToKeyValuePairs", function () {
|
||||
}
|
||||
|
||||
const keyValueParis = flattenObjectToKeyValuePairs(cart)
|
||||
console.log(JSON.stringify(keyValueParis, null, 2))
|
||||
expect(keyValueParis).toEqual({
|
||||
id: "cart_01JRDH08QD8CZ0KJDVE410KM1J",
|
||||
currency_code: "usd",
|
||||
|
||||
28
packages/core/utils/src/common/define-file-config.ts
Normal file
28
packages/core/utils/src/common/define-file-config.ts
Normal file
@@ -0,0 +1,28 @@
|
||||
import { InputFileConfig } from "@medusajs/types"
|
||||
import { getCallerFilePath } from "./get-caller-file-path"
|
||||
|
||||
export const MEDUSA_SKIP_FILE = Symbol.for("__MEDUSA_SKIP_FILE__")
|
||||
/**
|
||||
* The "defineFileConfig" helper can be used to define the configuration
|
||||
* of any file auto-loaded by Medusa.
|
||||
*
|
||||
* It is used to avoid loading files that are not required. Like a feature flag
|
||||
* that is disabled.
|
||||
*/
|
||||
const FILE_CONFIGS = new Map()
|
||||
export function defineFileConfig(config?: InputFileConfig) {
|
||||
const filePath = config?.path ?? getCallerFilePath()
|
||||
FILE_CONFIGS.set(filePath, config)
|
||||
}
|
||||
|
||||
export function getDefinedFileConfig(path?: string) {
|
||||
return FILE_CONFIGS.get(path)
|
||||
}
|
||||
|
||||
export function isFileDisabled(path?: string) {
|
||||
return !!getDefinedFileConfig(path)?.isDisabled?.()
|
||||
}
|
||||
|
||||
export function isFileSkipped(exported: unknown) {
|
||||
return !!exported?.[MEDUSA_SKIP_FILE]
|
||||
}
|
||||
@@ -1,3 +1,4 @@
|
||||
import { isFileDisabled, MEDUSA_SKIP_FILE } from "./define-file-config"
|
||||
import { resolveExports } from "./resolve-exports"
|
||||
|
||||
/**
|
||||
@@ -13,5 +14,12 @@ import { resolveExports } from "./resolve-exports"
|
||||
*/
|
||||
export async function dynamicImport(path: string): Promise<any> {
|
||||
const module = require(path)
|
||||
return resolveExports(module)
|
||||
|
||||
const exported = resolveExports(module)
|
||||
|
||||
if (isFileDisabled(path)) {
|
||||
exported[MEDUSA_SKIP_FILE] = true
|
||||
}
|
||||
|
||||
return exported
|
||||
}
|
||||
|
||||
@@ -15,6 +15,7 @@ export * from "./deep-equal-obj"
|
||||
export * from "./deep-flat-map"
|
||||
export * from "./deep-merge"
|
||||
export * from "./define-config"
|
||||
export * from "./define-file-config"
|
||||
export * from "./dynamic-import"
|
||||
export * from "./env-editor"
|
||||
export * from "./errors"
|
||||
@@ -50,6 +51,7 @@ export * from "./map-object-to"
|
||||
export * from "./medusa-container"
|
||||
export * from "./merge-metadata"
|
||||
export * from "./merge-plugin-modules"
|
||||
export * from "./normalize-csv-value"
|
||||
export * from "./normalize-import-path-with-source"
|
||||
export * from "./object-from-string-path"
|
||||
export * from "./object-to-string-path"
|
||||
@@ -69,6 +71,7 @@ export * from "./remove-nullisih"
|
||||
export * from "./remove-undefined"
|
||||
export * from "./remove-undefined-properties"
|
||||
export * from "./resolve-exports"
|
||||
export * from "./retry-execution"
|
||||
export * from "./rules"
|
||||
export * from "./selector-constraints-to-string"
|
||||
export * from "./serialize-error"
|
||||
@@ -88,5 +91,3 @@ export * from "./upper-case-first"
|
||||
export * from "./validate-handle"
|
||||
export * from "./validate-module-name"
|
||||
export * from "./wrap-handler"
|
||||
export * from "./normalize-csv-value"
|
||||
export * from "./retry-execution"
|
||||
|
||||
@@ -5,7 +5,8 @@ import { join } from "path"
|
||||
* @param path
|
||||
*/
|
||||
export function normalizeImportPathWithSource(
|
||||
path: string | undefined
|
||||
path: string | undefined,
|
||||
cwd: string = process.cwd()
|
||||
): string {
|
||||
let normalizePath = path
|
||||
|
||||
@@ -19,7 +20,7 @@ export function normalizeImportPathWithSource(
|
||||
* "./src" directory inside it.
|
||||
*/
|
||||
let sourceDir = normalizePath.startsWith("./src") ? "./" : "./src"
|
||||
normalizePath = join(process.cwd(), sourceDir, normalizePath)
|
||||
normalizePath = join(cwd, sourceDir, normalizePath)
|
||||
}
|
||||
|
||||
return normalizePath ?? ""
|
||||
|
||||
@@ -25,10 +25,12 @@ export async function readDirRecursive(
|
||||
dir: string,
|
||||
options?: {
|
||||
ignoreMissing?: boolean
|
||||
maxDepth?: number
|
||||
}
|
||||
): Promise<Dirent[]> {
|
||||
let allEntries: Dirent[] = []
|
||||
const readRecursive = async (dir: string) => {
|
||||
const readRecursive = async (dir: string, depth: number = 1) => {
|
||||
const maxDepth = options?.maxDepth ?? Infinity
|
||||
try {
|
||||
const entries = await readdir(dir, { withFileTypes: true })
|
||||
for (const entry of entries) {
|
||||
@@ -38,14 +40,15 @@ export async function readDirRecursive(
|
||||
})
|
||||
allEntries.push(entry)
|
||||
|
||||
if (entry.isDirectory()) {
|
||||
await readRecursive(fullPath)
|
||||
if (entry.isDirectory() && depth < maxDepth) {
|
||||
await readRecursive(fullPath, depth + 1)
|
||||
}
|
||||
}
|
||||
} catch (error) {
|
||||
if (options?.ignoreMissing && error.code === "ENOENT") {
|
||||
return
|
||||
}
|
||||
|
||||
throw error
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,10 +1,11 @@
|
||||
export * from "./mikro-orm/base-entity"
|
||||
export * from "./mikro-orm/big-number-field"
|
||||
export * from "./mikro-orm/custom-db-migrator"
|
||||
export * from "./mikro-orm/decorators/searchable"
|
||||
export * from "./mikro-orm/mikro-orm-create-connection"
|
||||
export * from "./mikro-orm/mikro-orm-free-text-search-filter"
|
||||
export * from "./mikro-orm/mikro-orm-repository"
|
||||
export * from "./mikro-orm/mikro-orm-soft-deletable-filter"
|
||||
export * from "./mikro-orm/mikro-orm-serializer"
|
||||
export * from "./mikro-orm/base-entity"
|
||||
export * from "./mikro-orm/mikro-orm-soft-deletable-filter"
|
||||
export * from "./mikro-orm/utils"
|
||||
export * from "./mikro-orm/decorators/searchable"
|
||||
export * from "./utils"
|
||||
|
||||
59
packages/core/utils/src/dal/mikro-orm/custom-db-migrator.ts
Normal file
59
packages/core/utils/src/dal/mikro-orm/custom-db-migrator.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { Constructor } from "@medusajs/types"
|
||||
import { MikroORM, Utils } from "@mikro-orm/core"
|
||||
import {
|
||||
Migrator as BaseMigrator,
|
||||
Migration,
|
||||
UmzugMigration,
|
||||
} from "@mikro-orm/migrations"
|
||||
import { isFileDisabled, isFileSkipped } from "../../common/define-file-config"
|
||||
import { dynamicImport } from "../../common/dynamic-import"
|
||||
|
||||
export class CustomDBMigrator extends BaseMigrator {
|
||||
static register(orm: MikroORM): void {
|
||||
orm.config.registerExtension(
|
||||
"@mikro-orm/migrator",
|
||||
() => new CustomDBMigrator(orm.em as any)
|
||||
)
|
||||
}
|
||||
|
||||
resolve(params) {
|
||||
require(params.path)
|
||||
if (isFileDisabled(params.path)) {
|
||||
return {
|
||||
name: "Noop",
|
||||
up: () => {},
|
||||
down: () => {},
|
||||
} as any
|
||||
}
|
||||
|
||||
const $this = this as any
|
||||
const createMigrationHandler = async (method) => {
|
||||
const migration = await Utils.dynamicImport(params.path)
|
||||
const MigrationClass = Object.values(
|
||||
migration
|
||||
)[0] as Constructor<Migration>
|
||||
const instance = new MigrationClass($this.driver, $this.config)
|
||||
await $this.runner.run(instance, method)
|
||||
}
|
||||
|
||||
return {
|
||||
name: $this.storage.getMigrationName(params.name),
|
||||
up: () => createMigrationHandler("up"),
|
||||
down: () => createMigrationHandler("down"),
|
||||
}
|
||||
}
|
||||
|
||||
async getPendingMigrations(): Promise<UmzugMigration[]> {
|
||||
const pending = await super.getPendingMigrations()
|
||||
|
||||
// Filter out migrations that are disabled by file config
|
||||
return pending.filter(async (pendingFile: UmzugMigration) => {
|
||||
const migration = await dynamicImport(pendingFile.path!)
|
||||
if (isFileSkipped(migration)) {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -3,6 +3,7 @@ import { Filter as MikroORMFilter } from "@mikro-orm/core"
|
||||
import { TSMigrationGenerator } from "@mikro-orm/migrations"
|
||||
import { isString, retryExecution, stringifyCircular } from "../../common"
|
||||
import { normalizeMigrationSQL } from "../utils"
|
||||
import { CustomDBMigrator } from "./custom-db-migrator"
|
||||
|
||||
type FilterDef = Parameters<typeof MikroORMFilter>[0]
|
||||
|
||||
@@ -107,6 +108,7 @@ export async function mikroOrmCreateConnection(
|
||||
false
|
||||
),
|
||||
},
|
||||
extensions: [CustomDBMigrator],
|
||||
// We don't want to do any DB checks when establishing the connection. This happens once when creating the pg_connection, and it can happen again explicitly if necessary.
|
||||
connect: false,
|
||||
ensureDatabase: false,
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const AnalyticsFeatureFlag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "analytics",
|
||||
default_val: true,
|
||||
env_key: "MEDUSA_FF_ANALYTICS",
|
||||
description:
|
||||
"Enable Medusa to collect data on usage, errors and performance for the purpose of improving the product",
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
import { FlagSettings } from "@medusajs/types"
|
||||
import { readdir } from "fs/promises"
|
||||
import { join, normalize } from "path"
|
||||
import { dynamicImport, isString, readDirRecursive } from "../common"
|
||||
|
||||
const excludedFiles = ["index.js", "index.ts"]
|
||||
const excludedExtensions = [".d.ts", ".d.ts.map", ".js.map"]
|
||||
|
||||
function isFeatureFlag(flag: unknown): flag is FlagSettings {
|
||||
const f = flag as any
|
||||
return !!f && isString(f.key) && isString(f.env_key)
|
||||
}
|
||||
|
||||
/**
|
||||
* Discover feature flag definitions from a directory and subdirectories
|
||||
*/
|
||||
export async function discoverFeatureFlagsFromDir(
|
||||
sourcePath?: string,
|
||||
maxDepth: number = 2
|
||||
): Promise<FlagSettings[]> {
|
||||
if (!sourcePath) {
|
||||
return []
|
||||
}
|
||||
|
||||
const root = normalize(sourcePath)
|
||||
const discovered: FlagSettings[] = []
|
||||
|
||||
const allEntries = await readDirRecursive(root, {
|
||||
ignoreMissing: true,
|
||||
maxDepth,
|
||||
})
|
||||
|
||||
const featureFlagDirs = allEntries
|
||||
.filter((e) => e.isDirectory() && e.name === "feature-flags")
|
||||
.map((e) => join((e as any).path as string, e.name))
|
||||
|
||||
if (!featureFlagDirs.length) {
|
||||
return discovered
|
||||
}
|
||||
|
||||
await Promise.all(
|
||||
featureFlagDirs.map(async (scanDir) => {
|
||||
const entries = await readdir(scanDir, { withFileTypes: true })
|
||||
await Promise.all(
|
||||
entries.map(async (entry) => {
|
||||
if (entry.isDirectory()) {
|
||||
return
|
||||
}
|
||||
|
||||
if (
|
||||
excludedExtensions.some((ext) => entry.name.endsWith(ext)) ||
|
||||
excludedFiles.includes(entry.name)
|
||||
) {
|
||||
return
|
||||
}
|
||||
|
||||
const fileExports = await dynamicImport(join(scanDir, entry.name))
|
||||
const values = Object.values(fileExports)
|
||||
for (const value of values) {
|
||||
if (isFeatureFlag(value)) {
|
||||
discovered.push(value)
|
||||
}
|
||||
}
|
||||
})
|
||||
)
|
||||
})
|
||||
)
|
||||
|
||||
return discovered
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
import { isObject, isString } from "../../common"
|
||||
import { isObject, isString } from "../common"
|
||||
|
||||
export class FlagRouter implements FeatureFlagTypes.IFlagRouter {
|
||||
private readonly flags: Record<string, boolean | Record<string, boolean>> = {}
|
||||
@@ -75,3 +75,5 @@ export class FlagRouter implements FeatureFlagTypes.IFlagRouter {
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
export const FeatureFlag = new FlagRouter({})
|
||||
@@ -1,10 +1,3 @@
|
||||
export * from "./analytics"
|
||||
export * from "./many-to-many-inventory"
|
||||
export * from "./medusa-v2"
|
||||
export * from "./order-editing"
|
||||
export * from "./product-categories"
|
||||
export * from "./publishable-api-keys"
|
||||
export * from "./sales-channels"
|
||||
export * from "./tax-inclusive-pricing"
|
||||
export * from "./utils"
|
||||
export * from "./workflows"
|
||||
export * from "./discover-feature-flags"
|
||||
export * from "./flag-router"
|
||||
export * from "./register-flag"
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const ManyToManyInventoryFeatureFlag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "many_to_many_inventory",
|
||||
default_val: false,
|
||||
env_key: "MEDUSA_FF_MANY_TO_MANY_INVENTORY",
|
||||
description:
|
||||
"Enable capability to have many to many relationship between inventory items and variants",
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const MedusaV2Flag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "medusa_v2",
|
||||
default_val: false,
|
||||
env_key: "MEDUSA_FF_MEDUSA_V2",
|
||||
description: "[WIP] Enable Medusa V2",
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const OrderEditingFeatureFlag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "order_editing",
|
||||
default_val: true,
|
||||
env_key: "MEDUSA_FF_ORDER_EDITING",
|
||||
description: "[WIP] Enable the order editing feature",
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const ProductCategoryFeatureFlag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "product_categories",
|
||||
default_val: false,
|
||||
env_key: "MEDUSA_FF_PRODUCT_CATEGORIES",
|
||||
description: "[WIP] Enable the product categories feature",
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const PublishableAPIKeysFeatureFlag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "publishable_api_keys",
|
||||
default_val: true,
|
||||
env_key: "MEDUSA_FF_PUBLISHABLE_API_KEYS",
|
||||
description: "[WIP] Enable the publishable API keys feature",
|
||||
}
|
||||
66
packages/core/utils/src/feature-flags/register-flag.ts
Normal file
66
packages/core/utils/src/feature-flags/register-flag.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { FlagSettings, Logger } from "@medusajs/types"
|
||||
import {
|
||||
isDefined,
|
||||
isObject,
|
||||
isString,
|
||||
isTruthy,
|
||||
objectFromStringPath,
|
||||
} from "../common"
|
||||
import { FlagRouter } from "./flag-router"
|
||||
|
||||
export type RegisterFeatureFlagOptions = {
|
||||
flag: FlagSettings
|
||||
projectConfigFlags: Record<string, string | boolean | Record<string, boolean>>
|
||||
router: FlagRouter
|
||||
logger?: Logger
|
||||
track?: (key: string) => void
|
||||
}
|
||||
|
||||
/**
|
||||
* Registers a feature flag on the provided router.
|
||||
* Resolving precedence:
|
||||
* - env overrides
|
||||
* - project config overrides
|
||||
* - default value
|
||||
*/
|
||||
export function registerFeatureFlag(options: RegisterFeatureFlagOptions) {
|
||||
const { flag, projectConfigFlags, router, logger, track } = options
|
||||
|
||||
let value: boolean | Record<string, boolean> = isTruthy(flag.default_val)
|
||||
let from: string | undefined
|
||||
|
||||
if (isDefined(process.env[flag.env_key])) {
|
||||
from = "environment"
|
||||
const envVal = process.env[flag.env_key]
|
||||
|
||||
value = isTruthy(envVal)
|
||||
|
||||
const parsedFromEnv = isString(envVal) ? envVal.split(",") : []
|
||||
if (parsedFromEnv.length > 1) {
|
||||
value = objectFromStringPath(parsedFromEnv)
|
||||
}
|
||||
} else if (isDefined(projectConfigFlags[flag.key])) {
|
||||
from = "project config"
|
||||
|
||||
const pc = projectConfigFlags[flag.key] as string | boolean
|
||||
value = isTruthy(pc)
|
||||
|
||||
if (isObject(projectConfigFlags[flag.key])) {
|
||||
value = projectConfigFlags[flag.key] as Record<string, boolean>
|
||||
}
|
||||
}
|
||||
|
||||
if (logger && from) {
|
||||
logger.info(
|
||||
`Using flag ${flag.env_key} from ${from} with value ${JSON.stringify(
|
||||
value
|
||||
)}`
|
||||
)
|
||||
}
|
||||
|
||||
if (track && value === true) {
|
||||
track(flag.key)
|
||||
}
|
||||
|
||||
router.setFlag(flag.key, value)
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const SalesChannelFeatureFlag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "sales_channels",
|
||||
default_val: true,
|
||||
env_key: "MEDUSA_FF_SALES_CHANNELS",
|
||||
description: "[WIP] Enable the sales channels feature",
|
||||
}
|
||||
@@ -1,8 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const TaxInclusivePricingFeatureFlag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "tax_inclusive_pricing",
|
||||
default_val: false,
|
||||
env_key: "MEDUSA_FF_TAX_INCLUSIVE_PRICING",
|
||||
description: "[WIP] Enable tax inclusive pricing",
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
export * from "./flag-router"
|
||||
@@ -1,8 +0,0 @@
|
||||
import { FeatureFlagTypes } from "@medusajs/types"
|
||||
|
||||
export const WorkflowsFeatureFlag: FeatureFlagTypes.FlagSettings = {
|
||||
key: "workflows",
|
||||
default_val: false,
|
||||
env_key: "MEDUSA_FF_WORKFLOWS",
|
||||
description: "[WIP] Enable workflows",
|
||||
}
|
||||
@@ -9,6 +9,7 @@ import { EventEmitter } from "events"
|
||||
import { access, mkdir, rename, writeFile } from "fs/promises"
|
||||
import { dirname, join } from "path"
|
||||
import { readDir } from "../common"
|
||||
import { CustomDBMigrator } from "../dal/mikro-orm/custom-db-migrator"
|
||||
|
||||
/**
|
||||
* Events emitted by the migrations class
|
||||
@@ -50,6 +51,7 @@ export class Migrations extends EventEmitter<MigrationsEvents> {
|
||||
...this.#configOrConnection.migrations,
|
||||
silent: true,
|
||||
},
|
||||
extensions: [CustomDBMigrator],
|
||||
})
|
||||
)
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
import { CustomDBMigrator } from "../../dal/mikro-orm/custom-db-migrator"
|
||||
import { defineMikroOrmCliConfig } from "../mikro-orm-cli-config-builder"
|
||||
|
||||
const moduleName = "myTestService"
|
||||
@@ -28,6 +29,7 @@ describe("defineMikroOrmCliConfig", () => {
|
||||
generator: expect.any(Function),
|
||||
snapshotName: ".snapshot-medusa-my-test",
|
||||
},
|
||||
extensions: [CustomDBMigrator],
|
||||
})
|
||||
})
|
||||
|
||||
@@ -47,6 +49,7 @@ describe("defineMikroOrmCliConfig", () => {
|
||||
generator: expect.any(Function),
|
||||
snapshotName: ".snapshot-medusa-my-test",
|
||||
},
|
||||
extensions: [CustomDBMigrator],
|
||||
})
|
||||
})
|
||||
})
|
||||
|
||||
@@ -2,9 +2,9 @@ import { LoaderOptions, Logger, ModulesSdkTypes } from "@medusajs/types"
|
||||
import { EntitySchema } from "@mikro-orm/core"
|
||||
import { EOL } from "os"
|
||||
import { resolve } from "path"
|
||||
import { dynamicImport, isFileSkipped } from "../../common"
|
||||
import { mikroOrmCreateConnection } from "../../dal"
|
||||
import { loadDatabaseConfig } from "../load-module-database-config"
|
||||
import { dynamicImport } from "../../common"
|
||||
|
||||
/**
|
||||
* Utility function to build a seed script that will insert the seed data.
|
||||
@@ -52,6 +52,10 @@ export function buildSeedScript({
|
||||
}
|
||||
)
|
||||
|
||||
if (isFileSkipped(dataSeed)) {
|
||||
return
|
||||
}
|
||||
|
||||
const dbData = loadDatabaseConfig(moduleName, options)!
|
||||
const entities = Object.values(models) as unknown as EntitySchema[]
|
||||
|
||||
|
||||
@@ -7,7 +7,7 @@ import type {
|
||||
} from "@mikro-orm/core"
|
||||
import { defineConfig } from "@mikro-orm/postgresql"
|
||||
import { kebabCase } from "../common"
|
||||
import { CustomTsMigrationGenerator } from "../dal"
|
||||
import { CustomDBMigrator, CustomTsMigrationGenerator } from "../dal"
|
||||
import { DmlEntity, toMikroOrmEntities } from "../dml"
|
||||
|
||||
type Options = Partial<Omit<MikroORMOptions, "entities" | "entitiesTs">> & {
|
||||
@@ -65,5 +65,6 @@ export function defineMikroOrmCliConfig(
|
||||
generator: CustomTsMigrationGenerator,
|
||||
...options.migrations,
|
||||
},
|
||||
extensions: [CustomDBMigrator],
|
||||
}) as ReturnedOptions
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user