### What
Leverage yarn workspaces and Turborepo for integration-tests in order to accelerate development and reduce DevX complexity.
### Why
The current solution for running integration tests requires using `medusa-dev-cli` in order to publish packages to a local npm repository. The package where the command is executed will have its package.json altered for any known medusa dependency in order to install from the local npm. The process is taxing on the host machine resources and prevents rapid iterations when working with integration tests.
For more information, see documentation: f0cc1b324c/docs/content/usage/local-development.md (run-api-integration-tests)
### How
By declaring `integeration-tests/**/*` as a workspace, Turborepo can now be leveraged to build and run integration test as if there were packages. The build process will take care of interdependency between package in order to ensure local dependency are met.
In addition, within each integration-tests "packages", we can declare local dependencies as "*" which will translate to using the one that is part of the current build, regardless of the dependency's version number. No more fiddling with version numbers.
Github actions pertaining to integration-tests have been streamlined to use the new behavior.
The integration-tests packages have been marked as `private:true` in order to avoid publishing them to the public npm registry.
### Testing
```
cd root-of-medusajs-medusa-repo/
yarn install
yarn build
yarn test:integration:api
yarn test:integration:plugins
```
After a code change, `yarn build` must be run before re-running an integration test, which is the same procedure as for unit tests.
Resolves: CORE-845
153 lines
3.7 KiB
JavaScript
153 lines
3.7 KiB
JavaScript
const path = require("path")
|
|
|
|
const { dropDatabase } = require("pg-god")
|
|
const { createConnection } = require("typeorm")
|
|
const dbFactory = require("./use-template-db")
|
|
|
|
const DB_HOST = process.env.DB_HOST
|
|
const DB_USERNAME = process.env.DB_USERNAME
|
|
const DB_PASSWORD = process.env.DB_PASSWORD
|
|
const DB_NAME = process.env.DB_TEMP_NAME
|
|
const DB_URL = `postgres://${DB_USERNAME}:${DB_PASSWORD}@${DB_HOST}/${DB_NAME}`
|
|
|
|
const pgGodCredentials = {
|
|
user: DB_USERNAME,
|
|
password: DB_PASSWORD,
|
|
host: DB_HOST,
|
|
}
|
|
|
|
const keepTables = [
|
|
"store",
|
|
"staged_job",
|
|
"shipping_profile",
|
|
"fulfillment_provider",
|
|
"payment_provider",
|
|
"country",
|
|
"currency",
|
|
]
|
|
|
|
let connectionType = "postgresql"
|
|
|
|
const DbTestUtil = {
|
|
db_: null,
|
|
|
|
setDb: function (connection) {
|
|
this.db_ = connection
|
|
},
|
|
|
|
clear: async function () {
|
|
this.db_.synchronize(true)
|
|
},
|
|
|
|
teardown: async function ({ forceDelete } = {}) {
|
|
forceDelete = forceDelete || []
|
|
|
|
const entities = this.db_.entityMetadatas
|
|
const manager = this.db_.manager
|
|
|
|
if (connectionType === "sqlite") {
|
|
await manager.query(`PRAGMA foreign_keys = OFF`)
|
|
} else {
|
|
await manager.query(`SET session_replication_role = 'replica';`)
|
|
}
|
|
|
|
for (const entity of entities) {
|
|
if (
|
|
keepTables.includes(entity.tableName) &&
|
|
!forceDelete.includes(entity.tableName)
|
|
) {
|
|
continue
|
|
}
|
|
|
|
await manager.query(`DELETE
|
|
FROM "${entity.tableName}";`)
|
|
}
|
|
if (connectionType === "sqlite") {
|
|
await manager.query(`PRAGMA foreign_keys = ON`)
|
|
} else {
|
|
await manager.query(`SET session_replication_role = 'origin';`)
|
|
}
|
|
},
|
|
|
|
shutdown: async function () {
|
|
await this.db_.close()
|
|
return await dropDatabase({ DB_NAME }, pgGodCredentials)
|
|
},
|
|
}
|
|
|
|
const instance = DbTestUtil
|
|
|
|
module.exports = {
|
|
initDb: async function ({ cwd }) {
|
|
const configPath = path.resolve(path.join(cwd, `medusa-config.js`))
|
|
const { projectConfig, featureFlags } = require(configPath)
|
|
|
|
const featureFlagsLoader =
|
|
require("@medusajs/medusa/dist/loaders/feature-flags").default
|
|
|
|
const featureFlagsRouter = featureFlagsLoader({ featureFlags })
|
|
|
|
const modelsLoader = require("@medusajs/medusa/dist/loaders/models").default
|
|
|
|
const entities = modelsLoader({}, { register: false })
|
|
|
|
if (projectConfig.database_type === "sqlite") {
|
|
connectionType = "sqlite"
|
|
const dbConnection = await createConnection({
|
|
type: "sqlite",
|
|
database: projectConfig.database_database,
|
|
synchronize: true,
|
|
entities,
|
|
})
|
|
|
|
instance.setDb(dbConnection)
|
|
return dbConnection
|
|
} else {
|
|
await dbFactory.createFromTemplate(DB_NAME)
|
|
|
|
// get migrations with enabled featureflags
|
|
const migrationDir = path.resolve(
|
|
path.join(
|
|
__dirname,
|
|
`../../`,
|
|
`node_modules`,
|
|
`@medusajs`,
|
|
`medusa`,
|
|
`dist`,
|
|
`migrations`,
|
|
`*.js`
|
|
)
|
|
)
|
|
|
|
const {
|
|
getEnabledMigrations,
|
|
} = require("@medusajs/medusa/dist/commands/utils/get-migrations")
|
|
|
|
const enabledMigrations = await getEnabledMigrations(
|
|
[migrationDir],
|
|
(flag) => featureFlagsRouter.isFeatureEnabled(flag)
|
|
)
|
|
|
|
const enabledEntities = entities.filter(
|
|
(e) => typeof e.isFeatureEnabled === "undefined" || e.isFeatureEnabled()
|
|
)
|
|
|
|
const dbConnection = await createConnection({
|
|
type: "postgres",
|
|
url: DB_URL,
|
|
entities: enabledEntities,
|
|
migrations: enabledMigrations,
|
|
name: "integration-tests",
|
|
})
|
|
|
|
await dbConnection.runMigrations()
|
|
|
|
instance.setDb(dbConnection)
|
|
return dbConnection
|
|
}
|
|
},
|
|
useDb: function () {
|
|
return instance
|
|
},
|
|
}
|