feat: Development server for core + plugins (#2448)

This commit is contained in:
Carlos R. L. Rodrigues
2022-10-21 10:53:06 -03:00
committed by GitHub
parent 4de4f20b46
commit b88cef2b1f
23 changed files with 762 additions and 14 deletions

View File

@@ -8,6 +8,7 @@ on:
jobs:
test-cli-with-database:
env:
NODE_ENV: CI
REDIS_URL: redis://localhost:6379
DATABASE_URL: "postgres://postgres:postgres@localhost/cli-test"

View File

@@ -125,7 +125,7 @@ module.exports = async (connection, data = {}) => {
})
await manager.save(liRma)
manager.insert(CustomShippingOption, {
await manager.insert(CustomShippingOption, {
id: "cso-test",
cart_id: cartWithCustomSo.id,
price: 0,

View File

@@ -0,0 +1,7 @@
# Default postgres credentials
DB_HOST=localhost
DB_USERNAME=postgres
DB_PASSWORD=''
DB_NAME=development
SERVER_PORT=9000

View File

@@ -0,0 +1,31 @@
const path = require("path")
require("dotenv").config({ path: path.join(__dirname, ".env.development") })
const { initDb } = require("./use-db-development")
require("./dev-require")
const seedDB = async (db) => {
const seeder = require("./database/index.js")
try {
await seeder(db)
} catch (err) {
console.log("Error", err)
}
}
const start = async () => {
console.log("Creating DB...")
const dbConnection = await initDb()
console.log("Creating DB. DONE")
console.log("Seeding DB...")
await seedDB(dbConnection)
console.log("Seeding DB... DONE")
await dbConnection.close()
process.exit()
}
start()

View File

@@ -0,0 +1,17 @@
const { Customer } = require("@medusajs/medusa")
module.exports = async (connection) => {
const manager = connection.manager
const customer = await manager.create(Customer, {
id: "customer-1",
email: "test1@email.com",
first_name: "John",
last_name: "Doe",
password_hash:
"c2NyeXB0AAEAAAABAAAAAVMdaddoGjwU1TafDLLlBKnOTQga7P2dbrfgf3fB+rCD/cJOMuGzAvRdKutbYkVpuJWTU39P7OpuWNkUVoEETOVLMJafbI8qs8Qx/7jMQXkN",
// password matching "test"
has_account: true,
})
await manager.save(customer)
}

View File

@@ -0,0 +1,9 @@
const user = require("./user")
const region = require("./region")
const customer = require("./customer")
module.exports = async (db) => {
await user(db)
await region(db)
await customer(db)
}

View File

@@ -0,0 +1,45 @@
const { Region } = require("@medusajs/medusa")
module.exports = async (connection) => {
const manager = connection.manager
const r = manager.create(Region, {
id: "test-region",
name: "Test Region",
payment_providers: [{ id: "test-pay" }],
currency_code: "usd",
tax_rate: 0,
})
await manager.save(r)
const europeRegion = manager.create(Region, {
id: "eur-region",
name: "Europe Region",
payment_providers: [{ id: "test-pay" }],
currency_code: "eur",
tax_rate: 0,
})
await manager.save(europeRegion)
// Region with multiple countries
const regionWithMultipleCoutries = manager.create(Region, {
id: "test-region-multiple",
name: "Test Region",
currency_code: "eur",
tax_rate: 0,
})
await manager.save(regionWithMultipleCoutries)
await manager.query(
`UPDATE "country" SET region_id='test-region-multiple' WHERE iso_2 = 'no'`
)
await manager.query(
`UPDATE "country" SET region_id='test-region-multiple' WHERE iso_2 = 'dk'`
)
await manager.query(
`UPDATE "country" SET region_id='test-region' WHERE iso_2 = 'us'`
)
}

View File

@@ -0,0 +1,17 @@
const Scrypt = require("scrypt-kdf")
const { User } = require("@medusajs/medusa")
module.exports = async (connection) => {
const manager = connection.manager
const buf = await Scrypt.kdf("secret_password", { logN: 1, r: 1, p: 1 })
const password_hash = buf.toString("base64")
await manager.insert(User, {
id: "admin_user",
email: "admin@medusa.js",
api_token: "test_token",
role: "admin",
password_hash,
})
}

View File

@@ -0,0 +1,55 @@
if (process.env.NODE_ENV !== "development") {
return
}
const path = require("path")
const Module = require("module")
const originalRequire = Module.prototype.require
const medusaCore = path.resolve(path.join(__dirname, "../../packages"))
function replacePath(requirePath, package, concatPackage = true) {
const idx = requirePath.indexOf(package)
const packPath = requirePath.substring(idx + package.length)
let newPath = path.resolve(
medusaCore +
"/" +
(concatPackage ? package + "/" : "") +
packPath.replace("/dist", "/src").replace(".js", "")
)
if (!newPath.includes("/src")) {
newPath += "/src"
}
return newPath
}
Module.prototype.require = function (...args) {
const interfaces = "medusa-interfaces"
const utils = "medusa-core-utils"
const base = "@medusajs"
if (args[0].includes(base)) {
args[0] = replacePath(args[0], base, false)
} else if (args[0].includes(interfaces)) {
args[0] = replacePath(args[0], interfaces)
} else if (args[0].includes(utils)) {
args[0] = replacePath(args[0], utils)
}
if (args[0] === "glob") {
const glob = originalRequire.apply(this, args)
const originalGlobSync = glob.sync
glob.GlobSync = glob.sync = (pattern, options) => {
if (pattern.endsWith(".js") || pattern.endsWith(".ts")) {
pattern = pattern.replace(".js", ".{j,t}s").replace("/dist/", "/src/")
}
return originalGlobSync.apply(this, [pattern, options])
}
return glob
}
return originalRequire.apply(this, args)
}

View File

@@ -0,0 +1,23 @@
const DB_HOST = process.env.DB_HOST
const DB_USERNAME = process.env.DB_USERNAME
const DB_PASSWORD = process.env.DB_PASSWORD
const DB_NAME = process.env.DB_NAME
module.exports = {
plugins: [
{
resolve: `./packages/medusa-payment-stripe`,
options: {
api_key: "api_key",
webhook_secret: "api_key",
},
},
],
projectConfig: {
redis_url: process.env.REDIS_URL,
database_url: `postgres://${DB_USERNAME}:${DB_PASSWORD}@${DB_HOST}/${DB_NAME}`,
database_type: "postgres",
jwt_secret: "test",
cookie_secret: "test",
},
}

View File

@@ -0,0 +1,117 @@
const path = require("path")
const express = require("express")
const importFrom = require("import-from")
const chokidar = require("chokidar")
require("dotenv").config({ path: path.join(__dirname, ".env.development") })
process.env.DEV_MODE = !!process[Symbol.for("ts-node.register.instance")]
require("./dev-require")
const medusaCore = path
.resolve(path.join(__dirname, "../../packages"))
.replace(/\\/g, "/")
let WATCHING = false
const watchFiles = () => {
if (WATCHING) {
return
}
WATCHING = true
const watcher = chokidar.watch(medusaCore, {
ignored: (rawPath) => {
const path = rawPath.replace(/\\/g, "/")
if (
path.includes("/node_modules") ||
path.includes("/dist") ||
path.includes("/__") ||
(/\..*/i.test(path) &&
!(
path.endsWith(".js") ||
path.endsWith(".ts") ||
path.includes("/src")
))
) {
return true
}
return false
},
})
watcher.on("change", async function (rawFile) {
console.log("Reloading server...")
const start = Date.now()
const file = rawFile.replace(/\\/g, "/")
if (file.includes("/models") || file.includes("/repositories")) {
Object.keys(require.cache).forEach(function (id) {
const name = require.cache[id].filename
if (!name.includes("typeorm")) {
return
}
delete require.cache[id]
})
}
const allModules = Object.keys(module.constructor._cache)
const path = file.split("/")
const src = path.findIndex((folder) => folder === "src")
const next = path.slice(0, src + 2).join("/")
for (const rawName of allModules) {
const name = rawName.replace(/\\/g, "/")
if (name.includes("typeorm")) {
delete module.constructor._cache[rawName]
} else if (name.includes(medusaCore)) {
if (
name.includes("repositories") ||
name.includes("loaders") ||
next.endsWith(".js") ||
next.endsWith(".ts") ||
name.startsWith(next)
) {
delete module.constructor._cache[rawName]
}
}
}
await bootstrapApp()
console.log("Server reloaded in", Date.now() - start, "ms")
})
}
let server
const bootstrapApp = async () => {
if (server) {
server.close()
}
const app = express()
const dir = path.resolve(
path.join(__dirname, "../../packages/medusa/src/loaders")
)
const loaders = importFrom(dir, ".").default
const configDir = __dirname
const { dbConnection } = await loaders({
directory: configDir,
expressApp: app,
})
const port = process.env.SERVER_PORT ?? 9000
server = app.listen(port, (err) => {
watchFiles()
console.log(`Server Running at localhost:${port}`)
})
database = dbConnection
}
bootstrapApp()

View File

@@ -0,0 +1,96 @@
import { AbstractFileService } from "@medusajs/medusa"
import stream from "stream"
import { resolve } from "path"
import * as fs from "fs"
import mkdirp from "mkdirp"
export default class LocalFileService extends AbstractFileService {
constructor({}, options) {
super({}, options)
this.upload_dir_ =
process.env.UPLOAD_DIR ?? options.upload_dir ?? "uploads/images"
if (!fs.existsSync(this.upload_dir_)) {
fs.mkdirSync(this.upload_dir_)
}
}
upload(file) {
return new Promise((resolvePromise, reject) => {
const path = resolve(this.upload_dir_, file.originalname)
let content = ""
if (file.filename) {
content = fs.readFileSync(
resolve(process.cwd(), "uploads", file.filename)
)
}
const pathSegments = path.split("/")
pathSegments.splice(-1)
const dirname = pathSegments.join("/")
mkdirp.sync(dirname, { recursive: true })
fs.writeFile(path, content.toString(), (err) => {
if (err) {
reject(err)
}
resolvePromise({ url: path })
})
})
}
delete({ fileKey }) {
return new Promise((resolvePromise, reject) => {
const path = resolve(this.upload_dir_, fileKey)
fs.unlink(path, (err) => {
if (err) {
reject(err)
}
resolvePromise("file unlinked")
})
})
}
async getUploadStreamDescriptor({ name, ext }) {
const fileKey = `${name}.${ext}`
const path = resolve(this.upload_dir_, fileKey)
const isFileExists = fs.existsSync(path)
if (!isFileExists) {
await this.upload({ originalname: fileKey })
}
const pass = new stream.PassThrough()
pass.pipe(fs.createWriteStream(path))
return {
writeStream: pass,
promise: Promise.resolve(),
url: `${this.upload_dir_}/${fileKey}`,
fileKey,
}
}
async getDownloadStream({ fileKey }) {
return new Promise((resolvePromise, reject) => {
try {
const path = resolve(this.upload_dir_, fileKey)
const data = fs.readFileSync(path)
const readable = stream.Readable()
readable._read = function () {}
readable.push(data.toString())
readable.push(null)
resolvePromise(readable)
} catch (e) {
reject(e)
}
})
}
async getPresignedDownloadUrl({ fileKey }) {
return `${this.upload_dir_}/${fileKey}`
}
}

View File

@@ -0,0 +1,53 @@
import { FulfillmentService } from "medusa-interfaces"
class TestFulService extends FulfillmentService {
static identifier = "test-ful"
constructor() {
super()
}
getFulfillmentOptions() {
return [
{
id: "manual-fulfillment",
},
]
}
validateFulfillmentData(data, cart) {
return data
}
validateOption(data) {
return true
}
canCalculate() {
return true
}
calculatePrice(data) {
return data.price
}
createOrder() {
// No data is being sent anywhere
return Promise.resolve({})
}
createReturn() {
return Promise.resolve({})
}
createFulfillment() {
// No data is being sent anywhere
return Promise.resolve({})
}
cancelFulfillment() {
return Promise.resolve({})
}
}
export default TestFulService

View File

@@ -0,0 +1,19 @@
import { NotificationService } from "medusa-interfaces"
class TestNotiService extends NotificationService {
static identifier = "test-not"
constructor() {
super()
}
async sendNotification() {
return Promise.resolve()
}
async resendNotification() {
return Promise.resolve()
}
}
export default TestNotiService

View File

@@ -0,0 +1,87 @@
import { AbstractPaymentService } from "@medusajs/medusa"
class TestPayService extends AbstractPaymentService {
static identifier = "test-pay"
constructor(_) {
super(_)
}
async getStatus(paymentData) {
return "authorized"
}
async retrieveSavedMethods(customer) {
return Promise.resolve([])
}
async createPayment(cart) {
const fields = [
"total",
"subtotal",
"tax_total",
"discount_total",
"shipping_total",
"gift_card_total",
]
const data = {}
for (const k of fields) {
data[k] = cart[k]
}
return data
}
async createPaymentNew(inputData) {
return inputData
}
async retrievePayment(data) {
return {}
}
async getPaymentData(sessionData) {
return {}
}
async authorizePayment(sessionData, context = {}) {
if (
sessionData.cart_id === "cart-id-tax-line-testing-for-pending-payment"
) {
return { data: {}, status: "pending" }
}
return { data: {}, status: "authorized" }
}
async updatePaymentData(sessionData, update) {
return {}
}
async updatePayment(sessionData, cart) {
return {}
}
async updatePaymentNew(sessionData) {
return sessionData
}
async deletePayment(payment) {
return {}
}
async capturePayment(payment) {
return {}
}
async refundPayment(payment, amountToRefund) {
return {}
}
async cancelPayment(payment) {
return {}
}
}
export default TestPayService

View File

@@ -0,0 +1,85 @@
const path = require("path")
const { createConnection } = require("typeorm")
const DB_HOST = process.env.DB_HOST
const DB_USERNAME = process.env.DB_USERNAME
const DB_PASSWORD = process.env.DB_PASSWORD
const DB_NAME = process.env.DB_NAME
const DB_URL = `postgres://${DB_USERNAME}:${DB_PASSWORD}@${DB_HOST}/${DB_NAME}`
require("./dev-require")
async function createDB() {
const connection = await createConnection({
type: "postgres",
url: `postgres://${DB_USERNAME}:${DB_PASSWORD}@${DB_HOST}`,
})
await connection.query(`DROP DATABASE IF EXISTS "${DB_NAME}";`)
await connection.query(`CREATE DATABASE "${DB_NAME}";`)
await connection.close()
}
module.exports = {
initDb: async function () {
const cwd = path.resolve(path.join(__dirname, "../.."))
const configPath = path.resolve(
path.join(__dirname, "../api/medusa-config.js")
)
const { featureFlags } = require(configPath)
const basePath = path.join(cwd, "packages/medusa/src")
const featureFlagsLoader = require(path.join(
basePath,
`loaders`,
`feature-flags`
)).default
const featureFlagsRouter = featureFlagsLoader({ featureFlags })
const modelsLoader = require(path.join(
basePath,
`loaders`,
`models`
)).default
const entities = modelsLoader({}, { register: false })
// get migraitons with enabled featureflags
const migrationDir = path.resolve(
path.join(basePath, `migrations`, `*.{j,t}s`)
)
const { getEnabledMigrations } = require(path.join(
basePath,
`commands`,
`utils`,
`get-migrations`
))
const enabledMigrations = await getEnabledMigrations(
[migrationDir],
(flag) => featureFlagsRouter.isFeatureEnabled(flag)
)
const enabledEntities = entities.filter(
(e) => typeof e.isFeatureEnabled === "undefined" || e.isFeatureEnabled()
)
await createDB()
const dbConnection = await createConnection({
type: "postgres",
url: DB_URL,
entities: enabledEntities,
migrations: enabledMigrations,
//logging: true,
})
await dbConnection.runMigrations()
return dbConnection
},
}

View File

@@ -125,7 +125,7 @@ module.exports = async (connection, data = {}) => {
})
await manager.save(liRma)
manager.insert(CustomShippingOption, {
await manager.insert(CustomShippingOption, {
id: "cso-test",
cart_id: cartWithCustomSo.id,
price: 0,

View File

@@ -0,0 +1,29 @@
{
"compilerOptions": {
"lib": ["es5", "es6", "es2019"],
"target": "es5",
"outDir": "./dist",
"esModuleInterop": true,
"declaration": true,
"module": "commonjs",
"moduleResolution": "node",
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"sourceMap": true,
"noImplicitReturns": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"noImplicitThis": true,
"allowJs": true,
"skipLibCheck": true,
"downlevelIteration": true // to use ES5 specific tooling
},
"include": ["./**/*"],
"exclude": [
"./dist/**/*",
"./**/factories",
"./**/helpers",
"./**/__snapshots__",
"node_modules"
]
}

View File

@@ -40,6 +40,7 @@
"prettier": "^2.7.1",
"resolve-cwd": "^3.0.0",
"ts-jest": "^26.5.6",
"ts-node": "^10.9.1",
"turbo": "^1.3.1",
"typedoc": "0.23.10",
"typedoc-frontmatter-plugin": "link:docs-util/typedoc-plugins/typedoc-frontmatter-plugin",
@@ -71,6 +72,8 @@
"generate:entities": "typedoc --options typedoc.entities.js",
"release:snapshot": "changeset publish --no-git-tags --snapshot --tag snapshot",
"generate:announcement": "node ./scripts/doc-change-release.js",
"develop": "NODE_ENV=development ts-node --transpile-only ./integration-tests/development/server.js",
"develop:create:db": "NODE_ENV=development ts-node --transpile-only ./integration-tests/development/create-database.js",
"version:staging": "yarn changeset pre enter staging && yarn changeset version",
"check:freshness": "node ./scripts/freshness-check.js"
},

View File

@@ -1,4 +1,9 @@
import { Connection, createConnection, LoggerOptions } from "typeorm"
import {
Connection,
createConnection,
getConnectionManager,
getConnection,
} from "typeorm"
import { ShortenedNamingStrategy } from "../utils/naming-strategy"
import { AwilixContainer } from "awilix"
import { ConnectionOptions } from "typeorm/connection/ConnectionOptions"
@@ -17,6 +22,11 @@ export default async ({
const isSqlite = configModule.projectConfig.database_type === "sqlite"
const cnnManager = getConnectionManager()
if (cnnManager.has("default") && getConnection().isConnected) {
await getConnection().close()
}
const connection = await createConnection({
type: configModule.projectConfig.database_type,
url: configModule.projectConfig.database_url,

View File

@@ -628,7 +628,7 @@ function resolvePlugin(pluginName: string): {
// warnOnIncompatiblePeerDependency(packageJSON.name, packageJSON)
return {
resolve: resolvedPath,
resolve: resolvedPath + (process.env.DEV_MODE ? "/src" : ""),
id: createPluginId(packageJSON.name),
name: packageJSON.name,
options: {},

View File

@@ -26,18 +26,23 @@ export default ({ container, configModule, isTest }: LoaderOptions): void => {
const coreFull = path.join(__dirname, corePath)
const ignore = [
"**/__fixtures__/**",
"**/index.js",
"**/index.ts",
"**/utils.js",
"**/utils.ts",
"**/types.js",
"**/types.ts",
"**/types/**",
]
if (!useMock) {
ignore.push("**/__tests__/**", "**/__mocks__/**")
}
const core = glob.sync(coreFull, {
cwd: __dirname,
ignore: [
"**/__fixtures__/**",
"**/index.js",
"**/index.ts",
"**/utils.js",
"**/utils.ts",
"**/types.js",
"**/types.ts",
"**/types/**",
],
ignore,
})
core.forEach((fn) => {

View File

@@ -30098,6 +30098,7 @@ __metadata:
resolve-cwd: ^3.0.0
swagger-inline: ^3.2.2
ts-jest: ^26.5.6
ts-node: ^10.9.1
turbo: ^1.3.1
typedoc: 0.23.10
typedoc-frontmatter-plugin: "link:docs-util/typedoc-plugins/typedoc-frontmatter-plugin"
@@ -32943,6 +32944,44 @@ __metadata:
languageName: node
linkType: hard
"ts-node@npm:^10.9.1":
version: 10.9.1
resolution: "ts-node@npm:10.9.1"
dependencies:
"@cspotcode/source-map-support": ^0.8.0
"@tsconfig/node10": ^1.0.7
"@tsconfig/node12": ^1.0.7
"@tsconfig/node14": ^1.0.0
"@tsconfig/node16": ^1.0.2
acorn: ^8.4.1
acorn-walk: ^8.1.1
arg: ^4.1.0
create-require: ^1.1.0
diff: ^4.0.1
make-error: ^1.1.1
v8-compile-cache-lib: ^3.0.1
yn: 3.1.1
peerDependencies:
"@swc/core": ">=1.2.50"
"@swc/wasm": ">=1.2.50"
"@types/node": "*"
typescript: ">=2.7"
peerDependenciesMeta:
"@swc/core":
optional: true
"@swc/wasm":
optional: true
bin:
ts-node: dist/bin.js
ts-node-cwd: dist/bin-cwd.js
ts-node-esm: dist/bin-esm.js
ts-node-script: dist/bin-script.js
ts-node-transpile-only: dist/bin-transpile.js
ts-script: dist/bin-script-deprecated.js
checksum: 95187932fb83f3901e22546bd2feeac7d2feb4f412f42ac3a595f049a23e8dcf70516dffb51866391228ea2dbcfaea039e250fb2bb334d48a86ab2b6aea0ae2d
languageName: node
linkType: hard
"ts-node@npm:^9":
version: 9.1.1
resolution: "ts-node@npm:9.1.1"