Chore/test runner improvements (#12439)

**What**
Make sure there is no open handles left and that the shutdown function are properly called. Refactor and improve the medusa test runner. Make sure all modules instances are released and cleaned up

**NOTE:**
On a separate PR we can continue the investigation for the memory growing over time while the tests execute
This commit is contained in:
Adrien de Peretti
2025-05-14 15:17:41 +02:00
committed by GitHub
parent e60d15ea84
commit ab22faaa52
13 changed files with 562 additions and 292 deletions

View File

@@ -0,0 +1,7 @@
---
"@medusajs/test-utils": patch
"@medusajs/workflow-engine-inmemory": patch
"@medusajs/workflow-engine-redis": patch
---
Chore/test runner improvements

View File

@@ -31,7 +31,7 @@ describe("waitSubscribersExecution", () => {
describe("with no existing listeners", () => {
it("should resolve when event is fired before timeout", async () => {
const waitPromise = waitSubscribersExecution(TEST_EVENT, eventBus as any)
setTimeout(() => eventBus.emit(TEST_EVENT, "test-data"), 100)
setTimeout(() => eventBus.emit(TEST_EVENT, "test-data"), 100).unref()
jest.advanceTimersByTime(100)
@@ -69,7 +69,7 @@ describe("waitSubscribersExecution", () => {
describe("with existing listeners", () => {
it("should resolve when all listeners complete successfully", async () => {
const listener = jest.fn().mockImplementation(() => {
return new Promise((resolve) => setTimeout(resolve, 200))
return new Promise((resolve) => setTimeout(resolve, 200).unref())
})
eventBus.eventEmitter_.on(TEST_EVENT, listener)
@@ -131,15 +131,15 @@ describe("waitSubscribersExecution", () => {
describe("with multiple listeners", () => {
it("should resolve when all listeners complete", async () => {
const listener1 = jest.fn().mockImplementation(() => {
return new Promise((resolve) => setTimeout(resolve, 100))
return new Promise((resolve) => setTimeout(resolve, 100).unref())
})
const listener2 = jest.fn().mockImplementation(() => {
return new Promise((resolve) => setTimeout(resolve, 200))
return new Promise((resolve) => setTimeout(resolve, 200).unref())
})
const listener3 = jest.fn().mockImplementation(() => {
return new Promise((resolve) => setTimeout(resolve, 300))
return new Promise((resolve) => setTimeout(resolve, 300).unref())
})
eventBus.eventEmitter_.on(TEST_EVENT, listener1)

View File

@@ -5,6 +5,8 @@ import {
SqlEntityManager,
} from "@mikro-orm/postgresql"
import { createDatabase, dropDatabase } from "pg-god"
import { logger } from "@medusajs/framework/logger"
import { execOrTimeout } from "./medusa-test-runner-utils"
const DB_HOST = process.env.DB_HOST ?? "localhost"
const DB_USERNAME = process.env.DB_USERNAME ?? ""
@@ -123,31 +125,43 @@ export function getMikroOrmWrapper({
schema: this.schema,
})
// Initializing the ORM
this.orm = await MikroORM.init(OrmConfig)
this.manager = this.orm.em
try {
await this.orm.getSchemaGenerator().ensureDatabase()
} catch (err) {
console.log(err)
}
this.orm = await MikroORM.init(OrmConfig)
this.manager = this.orm.em
await this.manager?.execute(
`CREATE SCHEMA IF NOT EXISTS "${this.schema ?? "public"}";`
)
try {
await this.orm.getSchemaGenerator().ensureDatabase()
} catch (err) {
logger.error("Error ensuring database:", err)
throw err
}
const pendingMigrations = await this.orm
.getMigrator()
.getPendingMigrations()
await this.manager?.execute(
`CREATE SCHEMA IF NOT EXISTS "${this.schema ?? "public"}";`
)
if (pendingMigrations && pendingMigrations.length > 0) {
await this.orm
const pendingMigrations = await this.orm
.getMigrator()
.up({ migrations: pendingMigrations.map((m) => m.name!) })
} else {
await this.orm.schema.refreshDatabase() // ensure db exists and is fresh
.getPendingMigrations()
if (pendingMigrations && pendingMigrations.length > 0) {
await this.orm
.getMigrator()
.up({ migrations: pendingMigrations.map((m) => m.name!) })
} else {
await this.orm.schema.refreshDatabase()
}
} catch (error) {
if (this.orm) {
try {
await this.orm.close()
} catch (closeError) {
logger.error("Error closing ORM:", closeError)
}
}
this.orm = null
this.manager = null
throw error
}
},
@@ -156,20 +170,30 @@ export function getMikroOrmWrapper({
throw new Error("ORM not configured")
}
await this.manager?.execute(
`DROP SCHEMA IF EXISTS "${this.schema ?? "public"}" CASCADE;`
)
await this.manager?.execute(
`CREATE SCHEMA IF NOT EXISTS "${this.schema ?? "public"}";`
)
try {
await this.orm.close()
} catch {}
await this.manager?.execute(
`DROP SCHEMA IF EXISTS "${this.schema ?? "public"}" CASCADE;`
)
this.orm = null
this.manager = null
await this.manager?.execute(
`CREATE SCHEMA IF NOT EXISTS "${this.schema ?? "public"}";`
)
const closePromise = this.orm.close()
await execOrTimeout(closePromise)
} catch (error) {
logger.error("Error clearing database:", error)
try {
await this.orm?.close()
} catch (closeError) {
logger.error("Error during forced ORM close:", closeError)
}
throw error
} finally {
this.orm = null
this.manager = null
}
},
}
}
@@ -178,10 +202,15 @@ export const dbTestUtilFactory = (): any => ({
pgConnection_: null,
create: async function (dbName: string) {
await createDatabase(
{ databaseName: dbName, errorIfExist: false },
pgGodCredentials
)
try {
await createDatabase(
{ databaseName: dbName, errorIfExist: false },
pgGodCredentials
)
} catch (error) {
logger.error("Error creating database:", error)
throw error
}
},
teardown: async function ({ schema }: { schema?: string } = {}) {
@@ -189,50 +218,77 @@ export const dbTestUtilFactory = (): any => ({
return
}
const runRawQuery = this.pgConnection_.raw.bind(this.pgConnection_)
try {
const runRawQuery = this.pgConnection_.raw.bind(this.pgConnection_)
schema ??= "public"
schema ??= "public"
await runRawQuery(`SET session_replication_role = 'replica';`)
const { rows: tableNames } = await runRawQuery(`SELECT table_name
FROM information_schema.tables
WHERE table_schema = '${schema}';`)
await runRawQuery(`SET session_replication_role = 'replica';`)
const { rows: tableNames } = await runRawQuery(`SELECT table_name
FROM information_schema.tables
WHERE table_schema = '${schema}';`)
const skipIndexPartitionPrefix = "cat_"
const mainPartitionTables = ["index_data", "index_relation"]
let hasIndexTables = false
const skipIndexPartitionPrefix = "cat_"
const mainPartitionTables = ["index_data", "index_relation"]
let hasIndexTables = false
for (const { table_name } of tableNames) {
if (mainPartitionTables.includes(table_name)) {
hasIndexTables = true
for (const { table_name } of tableNames) {
if (mainPartitionTables.includes(table_name)) {
hasIndexTables = true
}
if (
table_name.startsWith(skipIndexPartitionPrefix) ||
mainPartitionTables.includes(table_name)
) {
continue
}
await runRawQuery(`DELETE FROM ${schema}."${table_name}";`)
}
// Skipping index partition tables.
if (
table_name.startsWith(skipIndexPartitionPrefix) ||
mainPartitionTables.includes(table_name)
) {
continue
if (hasIndexTables) {
await runRawQuery(`TRUNCATE TABLE ${schema}.index_data;`)
await runRawQuery(`TRUNCATE TABLE ${schema}.index_relation;`)
}
await runRawQuery(`DELETE
FROM ${schema}."${table_name}";`)
await runRawQuery(`SET session_replication_role = 'origin';`)
} catch (error) {
logger.error("Error during database teardown:", error)
throw error
}
if (hasIndexTables) {
await runRawQuery(`TRUNCATE TABLE ${schema}.index_data;`)
await runRawQuery(`TRUNCATE TABLE ${schema}.index_relation;`)
}
await runRawQuery(`SET session_replication_role = 'origin';`)
},
shutdown: async function (dbName: string) {
await this.pgConnection_?.context?.destroy()
await this.pgConnection_?.destroy()
try {
const cleanupPromises: Promise<any>[] = []
return await dropDatabase(
{ databaseName: dbName, errorIfNonExist: false },
pgGodCredentials
)
if (this.pgConnection_?.context) {
cleanupPromises.push(
execOrTimeout(this.pgConnection_.context.destroy())
)
}
if (this.pgConnection_) {
cleanupPromises.push(execOrTimeout(this.pgConnection_.destroy()))
}
await Promise.all(cleanupPromises)
return await dropDatabase(
{ databaseName: dbName, errorIfNonExist: false },
pgGodCredentials
)
} catch (error) {
logger.error("Error during database shutdown:", error)
try {
await this.pgConnection_?.context?.destroy()
await this.pgConnection_?.destroy()
} catch (cleanupError) {
logger.error("Error during forced cleanup:", cleanupError)
}
throw error
} finally {
this.pgConnection_ = null
}
},
})

View File

@@ -25,6 +25,7 @@ export const waitSubscribersExecution = (
)
)
}, timeout)
timeoutId.unref()
})
// If there are no existing listeners, resolve once the event happens. Otherwise, wrap the existing subscribers in a promise and resolve once they are done.

View File

@@ -8,6 +8,7 @@ import {
createPgConnection,
promiseAll,
} from "@medusajs/framework/utils"
import { logger } from "@medusajs/framework/logger"
export interface InitModulesOptions {
injectedDependencies?: Record<string, unknown>
@@ -69,7 +70,7 @@ export async function initModules({
])
} else {
if (!preventConnectionDestroyWarning) {
console.info(
logger.info(
`You are using a custom shared connection. The connection won't be destroyed automatically.`
)
}

View File

@@ -2,8 +2,9 @@ import express from "express"
import getPort from "get-port"
import { resolve } from "path"
import { MedusaContainer } from "@medusajs/framework/types"
import { applyEnvVarsToProcess } from "./utils"
import { applyEnvVarsToProcess, execOrTimeout } from "./utils"
import { promiseAll, GracefulShutdownServer } from "@medusajs/framework/utils"
import { logger } from "@medusajs/framework/logger"
async function bootstrapApp({
cwd,
@@ -14,18 +15,23 @@ async function bootstrapApp({
const loaders = require("@medusajs/medusa/loaders/index").default
const { container, shutdown } = await loaders({
directory: resolve(cwd || process.cwd()),
expressApp: app,
})
try {
const { container, shutdown } = await loaders({
directory: resolve(cwd || process.cwd()),
expressApp: app,
})
const PORT = process.env.PORT ? parseInt(process.env.PORT) : await getPort()
const PORT = process.env.PORT ? parseInt(process.env.PORT) : await getPort()
return {
shutdown,
container,
app,
port: PORT,
return {
shutdown,
container,
app,
port: PORT,
}
} catch (error) {
logger.error("Error bootstrapping app:", error)
throw error
}
}
@@ -37,44 +43,84 @@ export async function startApp({
container: MedusaContainer
port: number
}> {
const {
app,
port,
container,
shutdown: medusaShutdown,
} = await bootstrapApp({
cwd,
env,
})
let expressServer: any
let medusaShutdown: () => Promise<void> = async () => void 0
let container: MedusaContainer
let expressServer
try {
const {
app,
port,
container: appContainer,
shutdown: appShutdown,
} = await bootstrapApp({
cwd,
env,
})
const shutdown = async () => {
await promiseAll([expressServer?.shutdown(), medusaShutdown()])
container = appContainer
medusaShutdown = appShutdown
if (typeof global !== "undefined" && global?.gc) {
global.gc()
const shutdown = async () => {
try {
const shutdownPromise = promiseAll([
expressServer?.shutdown(),
medusaShutdown(),
])
await execOrTimeout(shutdownPromise)
if (typeof global !== "undefined" && global?.gc) {
global.gc()
}
} catch (error) {
logger.error("Error during shutdown:", error)
try {
await expressServer?.shutdown()
await medusaShutdown()
} catch (cleanupError) {
logger.error("Error during forced cleanup:", cleanupError)
}
throw error
}
}
}
return await new Promise((resolve, reject) => {
const server = app
.listen(port)
.on("error", async (err) => {
await shutdown()
return reject(err)
})
.on("listening", () => {
process.send?.(port)
resolve({
shutdown,
container,
port,
return await new Promise((resolve, reject) => {
const server = app
.listen(port)
.on("error", async (err) => {
logger.error("Error starting server:", err)
await shutdown()
return reject(err)
})
})
.on("listening", () => {
process.send?.(port)
// TODO: fix that once we find the appropriate place to put this util
expressServer = GracefulShutdownServer.create(server)
})
resolve({
shutdown,
container,
port,
})
})
expressServer = GracefulShutdownServer.create(server)
})
} catch (error) {
logger.error("Error in startApp:", error)
if (expressServer) {
try {
await expressServer.shutdown()
} catch (cleanupError) {
logger.error("Error cleaning up express server:", cleanupError)
}
}
if (medusaShutdown) {
try {
await medusaShutdown()
} catch (cleanupError) {
logger.error("Error cleaning up medusa:", cleanupError)
}
}
throw error
}
}

View File

@@ -1,5 +1,6 @@
import type { MedusaAppLoader } from "@medusajs/framework"
import { Logger, MedusaContainer } from "@medusajs/framework/types"
import { logger } from "@medusajs/framework/logger"
import {
ContainerRegistrationKeys,
getResolvedPlugins,
@@ -27,7 +28,7 @@ export async function migrateDatabase(appLoader: MedusaAppLoader) {
try {
await appLoader.runModulesMigrations()
} catch (err) {
console.error("Something went wrong while running the migrations")
logger.error("Something went wrong while running the migrations")
throw err
}
}

View File

@@ -5,3 +5,23 @@ export function applyEnvVarsToProcess(env?: Record<any, any>) {
Object.entries(env).forEach(([k, v]) => (process.env[k] = v))
}
}
/**
* Execute a function and return a promise that resolves when the function
* resolves or rejects when the function rejects or the timeout is reached.
* @param fn - The function to execute.
* @param timeout - The timeout in milliseconds.
* @returns A promise that resolves when the function resolves or rejects when the function rejects or the timeout is reached.
*/
export async function execOrTimeout(
fn: Promise<any> | (() => Promise<void>),
timeout: number = 5000
) {
const timeoutPromise = new Promise((_, reject) => {
setTimeout(() => reject(new Error("Timeout")), timeout).unref()
})
const fnPromise = typeof fn === "function" ? fn() : fn
return Promise.race([fnPromise, timeoutPromise])
}

View File

@@ -1,10 +1,11 @@
import { MedusaAppOutput } from "@medusajs/framework/modules-sdk"
import { ContainerLike, MedusaContainer } from "@medusajs/framework/types"
import { MedusaContainer } from "@medusajs/framework/types"
import {
ContainerRegistrationKeys,
createMedusaContainer,
} from "@medusajs/framework/utils"
import { asValue } from "awilix"
import { logger } from "@medusajs/framework/logger"
import { dbTestUtilFactory, getDatabaseURL } from "./database"
import {
applyEnvVarsToProcess,
@@ -33,6 +34,254 @@ export interface MedusaSuiteOptions {
getMedusaApp: () => MedusaAppOutput
}
interface TestRunnerConfig {
moduleName?: string
env?: Record<string, any>
dbName?: string
medusaConfigFile?: string
schema?: string
debug?: boolean
inApp?: boolean
}
class MedusaTestRunner {
private dbName: string
private schema: string
private cwd: string
private env: Record<string, any>
private debug: boolean
// @ts-ignore
private inApp: boolean
private dbUtils: ReturnType<typeof dbTestUtilFactory>
private dbConfig: {
dbName: string
clientUrl: string
schema: string
debug: boolean
}
private globalContainer: MedusaContainer | null = null
private apiUtils: any = null
private loadedApplication: any = null
private shutdown: () => Promise<void> = async () => void 0
private isFirstTime = true
constructor(config: TestRunnerConfig) {
const tempName = parseInt(process.env.JEST_WORKER_ID || "1")
const moduleName =
config.moduleName ?? Math.random().toString(36).substring(7)
this.dbName =
config.dbName ??
`medusa-${moduleName.toLowerCase()}-integration-${tempName}`
this.schema = config.schema ?? "public"
this.cwd = config.medusaConfigFile ?? process.cwd()
this.env = config.env ?? {}
this.debug = config.debug ?? false
this.inApp = config.inApp ?? false
this.dbUtils = dbTestUtilFactory()
this.dbConfig = {
dbName: this.dbName,
clientUrl: getDatabaseURL(this.dbName),
schema: this.schema,
debug: this.debug,
}
this.setupProcessHandlers()
}
private setupProcessHandlers(): void {
process.on("SIGTERM", async () => {
await this.cleanup()
process.exit(0)
})
process.on("SIGINT", async () => {
await this.cleanup()
process.exit(0)
})
}
private createApiProxy(): any {
return new Proxy(
{},
{
get: (target, prop) => {
return this.apiUtils?.[prop]
},
}
)
}
private createDbConnectionProxy(): any {
return new Proxy(
{},
{
get: (target, prop) => {
return this.dbUtils.pgConnection_?.[prop]
},
}
)
}
private async initializeDatabase(): Promise<void> {
try {
logger.info(`Creating database ${this.dbName}`)
await this.dbUtils.create(this.dbName)
this.dbUtils.pgConnection_ = await initDb()
} catch (error) {
logger.error(`Error initializing database: ${error?.message}`)
await this.cleanup()
throw error
}
}
private async setupApplication(): Promise<void> {
const { container, MedusaAppLoader } = await import("@medusajs/framework")
const appLoader = new MedusaAppLoader()
container.register({
[ContainerRegistrationKeys.LOGGER]: asValue(logger),
})
await this.initializeDatabase()
logger.info(
`Migrating database with core migrations and links ${this.dbName}`
)
await migrateDatabase(appLoader)
await syncLinks(appLoader, this.cwd, container, logger)
await clearInstances()
this.loadedApplication = await appLoader.load()
try {
const {
shutdown,
container: appContainer,
port,
} = await startApp({
cwd: this.cwd,
env: this.env,
})
this.globalContainer = appContainer
this.shutdown = async () => {
await shutdown()
if (this.apiUtils?.cancelToken?.source) {
this.apiUtils.cancelToken.source.cancel(
"Request canceled by shutdown"
)
}
}
const { default: axios } = (await import("axios")) as any
const cancelTokenSource = axios.CancelToken.source()
this.apiUtils = axios.create({
baseURL: `http://localhost:${port}`,
cancelToken: cancelTokenSource.token,
})
this.apiUtils.cancelToken = { source: cancelTokenSource }
} catch (error) {
logger.error(`Error starting the app: ${error?.message}`)
await this.cleanup()
throw error
}
}
public async cleanup(): Promise<void> {
try {
process.removeAllListeners("SIGTERM")
process.removeAllListeners("SIGINT")
await this.dbUtils.shutdown(this.dbName)
await this.shutdown()
await clearInstances()
if (this.apiUtils?.cancelToken?.source) {
this.apiUtils.cancelToken.source.cancel("Cleanup")
}
if (this.globalContainer?.dispose) {
await this.globalContainer.dispose()
}
this.apiUtils = null
this.loadedApplication = null
this.globalContainer = null
if (global.gc) {
global.gc()
}
} catch (error) {
logger.error("Error during cleanup:", error?.message)
}
}
public async beforeAll(): Promise<void> {
try {
this.setupProcessHandlers()
await configLoaderOverride(this.cwd, this.dbConfig)
applyEnvVarsToProcess(this.env)
await this.setupApplication()
} catch (error) {
await this.cleanup()
throw error
}
}
public async beforeEach(): Promise<void> {
if (this.isFirstTime) {
this.isFirstTime = false
return
}
await this.afterEach()
const container = this.globalContainer as MedusaContainer
const copiedContainer = createMedusaContainer({}, container)
try {
const { MedusaAppLoader } = await import("@medusajs/framework")
const medusaAppLoader = new MedusaAppLoader({
container: copiedContainer,
})
await medusaAppLoader.runModulesLoader()
} catch (error) {
await copiedContainer.dispose?.()
logger.error("Error running modules loaders:", error?.message)
throw error
}
}
public async afterEach(): Promise<void> {
try {
await this.dbUtils.teardown({ schema: this.schema })
} catch (error) {
logger.error("Error tearing down database:", error?.message)
throw error
}
}
public getOptions(): MedusaSuiteOptions {
return {
api: this.createApiProxy(),
dbConnection: this.createDbConnectionProxy(),
getMedusaApp: () => this.loadedApplication,
getContainer: () => this.globalContainer as MedusaContainer,
dbConfig: {
dbName: this.dbName,
schema: this.schema,
clientUrl: this.dbConfig.clientUrl,
},
dbUtils: this.dbUtils,
}
}
}
export function medusaIntegrationTestRunner({
moduleName,
dbName,
@@ -52,172 +301,61 @@ export function medusaIntegrationTestRunner({
inApp?: boolean
testSuite: (options: MedusaSuiteOptions) => void
}) {
const tempName = parseInt(process.env.JEST_WORKER_ID || "1")
moduleName = moduleName ?? Math.random().toString(36).substring(7)
dbName ??= `medusa-${moduleName.toLowerCase()}-integration-${tempName}`
let dbConfig = {
const runner = new MedusaTestRunner({
moduleName,
dbName,
clientUrl: getDatabaseURL(dbName),
medusaConfigFile,
schema,
env,
debug,
}
const cwd = medusaConfigFile ?? process.cwd()
let shutdown = async () => void 0
const dbUtils = dbTestUtilFactory()
let globalContainer: ContainerLike
let apiUtils: any
let loadedApplication: any
let options = {
api: new Proxy(
{},
{
get: (target, prop) => {
return apiUtils[prop]
},
}
),
dbConnection: new Proxy(
{},
{
get: (target, prop) => {
return dbUtils.pgConnection_[prop]
},
}
),
getMedusaApp: () => loadedApplication,
getContainer: () => globalContainer,
dbConfig: {
dbName,
schema,
clientUrl: dbConfig.clientUrl,
},
dbUtils,
} as MedusaSuiteOptions
let isFirstTime = true
const beforeAll_ = async () => {
await configLoaderOverride(cwd, dbConfig)
applyEnvVarsToProcess(env)
const { logger, container, MedusaAppLoader } = await import(
"@medusajs/framework"
)
const appLoader = new MedusaAppLoader()
container.register({
[ContainerRegistrationKeys.LOGGER]: asValue(logger),
})
try {
logger.info(`Creating database ${dbName}`)
await dbUtils.create(dbName)
dbUtils.pgConnection_ = await initDb()
} catch (error) {
logger.error(`Error initializing database: ${error?.message}`)
throw error
}
logger.info(`Migrating database with core migrations and links ${dbName}`)
await migrateDatabase(appLoader)
await syncLinks(appLoader, cwd, container, logger)
await clearInstances()
let containerRes: MedusaContainer = container
let serverShutdownRes: () => any
let portRes: number
loadedApplication = await appLoader.load()
try {
const {
shutdown = () => void 0,
container: appContainer,
port,
} = await startApp({
cwd,
env,
})
containerRes = appContainer
serverShutdownRes = shutdown
portRes = port
} catch (error) {
logger.error(`Error starting the app: error?.message`)
throw error
}
/**
* Run application migrations and sync links when inside
* an application
*/
if (inApp) {
logger.info(`Migrating database with core migrations and links ${dbName}`)
await migrateDatabase(appLoader)
await syncLinks(appLoader, cwd, containerRes, logger)
}
const { default: axios } = (await import("axios")) as any
const cancelTokenSource = axios.CancelToken.source()
globalContainer = containerRes
shutdown = async () => {
await serverShutdownRes()
cancelTokenSource.cancel("Request canceled by shutdown")
}
apiUtils = axios.create({
baseURL: `http://localhost:${portRes}`,
cancelToken: cancelTokenSource.token,
})
}
const beforeEach_ = async () => {
// The beforeAll already run everything, so lets not re run the loaders for the first iteration
if (isFirstTime) {
isFirstTime = false
return
}
const container = options.getContainer()
const copiedContainer = createMedusaContainer({}, container)
try {
const { MedusaAppLoader } = await import("@medusajs/framework")
const medusaAppLoader = new MedusaAppLoader({
container: copiedContainer,
})
await medusaAppLoader.runModulesLoader()
} catch (error) {
console.error("Error runner modules loaders", error?.message)
throw error
}
}
const afterEach_ = async () => {
try {
await dbUtils.teardown({ schema })
} catch (error) {
console.error("Error tearing down database:", error?.message)
throw error
}
}
inApp,
})
return describe("", () => {
beforeAll(beforeAll_)
beforeEach(beforeEach_)
afterEach(afterEach_)
afterAll(async () => {
await dbUtils.shutdown(dbName)
await shutdown()
let testOptions: MedusaSuiteOptions
beforeAll(async () => {
await runner.beforeAll()
testOptions = runner.getOptions()
})
testSuite(options!)
beforeEach(async () => {
await runner.beforeEach()
})
afterEach(async () => {
await runner.afterEach()
})
afterAll(async () => {
// Run main cleanup
await runner.cleanup()
// Clean references to the test options
for (const key in testOptions) {
if (typeof testOptions[key] === "function") {
testOptions[key] = null
} else if (
typeof testOptions[key] === "object" &&
testOptions[key] !== null
) {
Object.keys(testOptions[key]).forEach((k) => {
testOptions[key][k] = null
})
testOptions[key] = null
}
}
// Encourage garbage collection
// @ts-ignore
testOptions = null
if (global.gc) {
global.gc()
}
})
// Run test suite with options
testSuite(runner.getOptions())
})
}

View File

@@ -268,7 +268,7 @@ async function start(args: {
if (!isShuttingDown) {
cluster.fork()
} else if (!isPresent(cluster.workers)) {
setTimeout(killMainProccess, 100)
setTimeout(killMainProccess, 100).unref()
}
})

View File

@@ -328,7 +328,7 @@ export class DataSynchronizer {
break
}
await setTimeout(0)
await setTimeout(0, undefined, { ref: false })
}
let acknoledgement: { lastCursor: string; done?: boolean; err?: Error } = {

View File

@@ -193,7 +193,7 @@ export class InMemoryLockingProvider implements ILockingProvider {
setTimeout(() => {
cancellationToken.cancelled = true
reject(new Error("Timed-out acquiring lock."))
}, seconds * 1000)
}, seconds * 1000).unref()
})
}
}

View File

@@ -70,14 +70,6 @@ export class WorkflowsModuleService<
}
__hooks = {
onApplicationShutdown: async () => {
await this.workflowOrchestratorService_.onApplicationShutdown()
await this.redisDisconnectHandler_()
clearInterval(this.clearTimeout_)
},
onApplicationPrepareShutdown: async () => {
await this.workflowOrchestratorService_.onApplicationPrepareShutdown()
},
onApplicationStart: async () => {
await this.workflowOrchestratorService_.onApplicationStart()
@@ -88,6 +80,14 @@ export class WorkflowsModuleService<
} catch {}
}, 1000 * 60 * 60)
},
onApplicationPrepareShutdown: async () => {
await this.workflowOrchestratorService_.onApplicationPrepareShutdown()
},
onApplicationShutdown: async () => {
await this.workflowOrchestratorService_.onApplicationShutdown()
await this.redisDisconnectHandler_()
clearInterval(this.clearTimeout_)
},
}
static prepareFilters<T>(filters: T & { q?: string }) {