feat: dev cli (#203)

* feat: adds dev-cli

* feat: adds dev-cli

* fix: works with org package names

* fix: medusa cli bin

* hotfix(brightpearl): rounding errors + failing customer test (#199)

* fix: verdacio publish

* fix: update yarn lock

* fix(CI): update node

* fix: update yarn lock
This commit is contained in:
Sebastian Rindom
2021-03-16 09:51:04 +01:00
committed by GitHub
parent 7c7f86e8e8
commit 695b1fd0a5
24 changed files with 6240 additions and 3 deletions

View File

@@ -7,8 +7,7 @@ executors:
parameters: parameters:
image: image:
type: string type: string
# First 10.x LTS release, but old Yarn default: "12.13"
default: "10.14"
docker: docker:
- image: circleci/node:<< parameters.image >> - image: circleci/node:<< parameters.image >>

View File

@@ -0,0 +1,3 @@
#!/usr/bin/env node
require("./dist/index.js")

View File

@@ -4,7 +4,7 @@
"description": "Command Line interface for Medusa Commerce", "description": "Command Line interface for Medusa Commerce",
"main": "dist/index.js", "main": "dist/index.js",
"bin": { "bin": {
"medusa": "dist/index.js" "medusa": "cli.js"
}, },
"repository": { "repository": {
"type": "git", "type": "git",

View File

@@ -0,0 +1,3 @@
{
"presets": [["babel-preset-medusa-package"]]
}

33
packages/medusa-dev-cli/.gitignore vendored Normal file
View File

@@ -0,0 +1,33 @@
# Logs
logs
*.log
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directory
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git
node_modules
decls
dist
# verdaccio local storage
verdaccio

View File

@@ -0,0 +1,3 @@
src
flow-typed
verdaccio

View File

@@ -0,0 +1,74 @@
# medusa-dev-cli
A command-line tool for local Medusa development. When doing development work on
Medusa core, this tool allows you to copy the changes to the various
Medusa packages to Medusa projects.
## Install
`npm install -g medusa-dev-cli`
## Configuration / First time setup
The medusa-dev-cli tool needs to know where your cloned Medusa repository is
located. You typically only need to configure this once.
`medusa-dev --set-path-to-repo /path/to/my/cloned/version/medusa`
## How to use
Navigate to the project you want to link to your forked Medusa repository and
run:
`medusa-dev`
The tool will then scan your project's package.json to find its Medusa
dependencies and copy the latest source from your cloned version of Medusa into
your project's node_modules folder. A watch task is then created to re-copy any
modules that might change while you're working on the code, so you can leave
this program running.
Typically you'll also want to run `npm run watch` in the Medusa repo to set up
watchers to build Medusa source code.
## Revert to current packages
If you've recently run `medusa-dev` your `node_modules` will be out of sync with current published packages. In order to undo this, you can remove the `node_modules` directory or run:
```shell
git checkout package.json; yarn --force
```
or
```shell
git checkout package.json; npm install --force
```
### Other commands
#### `--packages`
You can prevent the automatic dependencies scan and instead specify a list of
packages you want to link by using the `--packages` option:
`medusa-dev --packages @medusajs/medusa medusa-interfaces`
#### `--scan-once`
With this flag, the tool will do an initial scan and copy and then quit. This is
useful for setting up automated testing/builds of Medusa projects from the latest
code.
#### `--quiet`
Don't output anything except for a success message when used together with
`--scan-once`.
#### `--copy-all`
Copy all modules/files in the medusa source repo in packages/
#### `--force-install`
Disables copying files into node_modules and forces usage of local npm repository.

View File

@@ -0,0 +1,51 @@
{
"name": "medusa-dev-cli",
"description": "CLI helpers for contributors working on Medusa",
"version": "0.0.2-alpha.641+d9faeee4",
"author": "Sebastian Rindom <skrindom@gmail.com>",
"bin": {
"medusa-dev": "./dist/index.js"
},
"dependencies": {
"@babel/runtime": "^7.12.5",
"chokidar": "^3.5.0",
"configstore": "^5.0.1",
"del": "^6.0.0",
"execa": "^4.1.0",
"find-yarn-workspace-root": "^2.0.0",
"fs-extra": "^9.0.1",
"got": "^10.7.0",
"is-absolute": "^1.0.0",
"lodash": "^4.17.21",
"signal-exit": "^3.0.3",
"verdaccio": "^4.10.0",
"yargs": "^15.4.1"
},
"devDependencies": {
"@babel/cli": "^7.12.1",
"@babel/core": "^7.12.3",
"babel-preset-medusa-package": "^1.1.0",
"cross-env": "^7.0.3"
},
"homepage": "https://github.com/medusajs/medusa/tree/master/packages/medusa-dev-cli#readme",
"keywords": [
"medusa"
],
"license": "MIT",
"main": "index.js",
"repository": {
"type": "git",
"url": "https://github.com/medusajs/medusa.git",
"directory": "packages/medusa-dev-cli"
},
"scripts": {
"build": "babel src --out-dir dist --ignore \"**/__tests__\"",
"prepare": "cross-env NODE_ENV=production npm run build",
"test": "echo \"Error: no test specified\" && exit 1",
"watch": "babel -w src --out-dir dist --ignore \"**/__tests__\""
},
"engines": {
"node": ">=12.13.0"
},
"gitHead": "d9faeee4c0dd0930b85a14143443a6d9cc787ab2"
}

View File

@@ -0,0 +1,132 @@
#!/usr/bin/env node
const Configstore = require(`configstore`);
const pkg = require(`../package.json`);
const _ = require(`lodash`);
const path = require(`path`);
const os = require(`os`);
const watch = require(`./watch`);
const { getVersionInfo } = require(`./utils/version`);
const argv = require(`yargs`)
.usage(`Usage: medusa-dev [options]`)
.alias(`q`, `quiet`)
.nargs(`q`, 0)
.describe(`q`, `Do not output copy file information`)
.alias(`s`, `scan-once`)
.nargs(`s`, 0)
.describe(`s`, `Scan once. Do not start file watch`)
.alias(`p`, `set-path-to-repo`)
.nargs(`p`, 1)
.describe(
`p`,
`Set path to Medusa repository.
You typically only need to configure this once.`
)
.nargs(`force-install`, 0)
.describe(
`force-install`,
`Disables copying files into node_modules and forces usage of local npm repository.`
)
.alias(`C`, `copy-all`)
.nargs(`C`, 0)
.describe(
`C`,
`Copy all contents in packages/ instead of just medusa packages`
)
.array(`packages`)
.describe(`packages`, `Explicitly specify packages to copy`)
.help(`h`)
.alias(`h`, `help`)
.nargs(`v`, 0)
.alias(`v`, `version`)
.describe(`v`, `Print the currently installed version of Medusa Dev CLI`)
.argv;
if (argv.version) {
console.log(getVersionInfo());
process.exit();
}
const conf = new Configstore(pkg.name);
const fs = require(`fs-extra`);
let pathToRepo = argv.setPathToRepo;
if (pathToRepo) {
if (pathToRepo.includes(`~`)) {
pathToRepo = path.join(os.homedir(), pathToRepo.split(`~`).pop());
}
conf.set(`medusa-location`, path.resolve(pathToRepo));
process.exit();
}
const havePackageJsonFile = fs.existsSync(`package.json`);
if (!havePackageJsonFile) {
console.error(`Current folder must have a package.json file!`);
process.exit();
}
const medusaLocation = conf.get(`medusa-location`);
if (!medusaLocation) {
console.error(
`
You haven't set the path yet to your cloned
version of medusa. Do so now by running:
medusa-dev --set-path-to-repo /path/to/my/cloned/version/medusa
`
);
process.exit();
}
// get list of packages from monorepo
const monoRepoPackages = [];
const pkgsDirs = fs.readdirSync(path.join(medusaLocation, `packages`));
for (const dir of pkgsDirs) {
const pack = JSON.parse(
fs.readFileSync(path.join(medusaLocation, `packages`, dir, `package.json`))
);
monoRepoPackages.push(pack.name);
}
const localPkg = JSON.parse(fs.readFileSync(`package.json`));
// intersect dependencies with monoRepoPackages to get list of packages that are used
const localPackages = _.intersection(
monoRepoPackages,
Object.keys(_.merge({}, localPkg.dependencies, localPkg.devDependencies))
);
if (!argv.packages && _.isEmpty(localPackages)) {
console.error(
`
You haven't got any medusa dependencies into your current package.json
You probably want to pass in a list of packages to start
developing on! For example:
medusa-dev --packages @medusajs/medusa
If you prefer to place them in your package.json dependencies instead,
medusa-dev will pick them up.
`
);
if (!argv.forceInstall) {
process.exit();
} else {
console.log(
`Continuing other dependencies installation due to "--forceInstall" flag`
);
}
}
watch(medusaLocation, argv.packages, {
localPackages,
quiet: argv.quiet,
scanOnce: argv.scanOnce,
forceInstall: argv.forceInstall,
monoRepoPackages,
});

View File

@@ -0,0 +1,19 @@
const signalExit = require(`signal-exit`);
const cleanupTasks = new Set();
exports.registerCleanupTask = (taskFn) => {
cleanupTasks.add(taskFn);
return () => {
const result = taskFn();
cleanupTasks.delete(taskFn);
return result;
};
};
signalExit(() => {
if (cleanupTasks.size) {
console.log(`Process exitted in middle of publishing - cleaning up`);
cleanupTasks.forEach((taskFn) => taskFn());
}
});

View File

@@ -0,0 +1,75 @@
const startVerdaccio = require(`verdaccio`).default;
const fs = require(`fs-extra`);
const _ = require(`lodash`);
let VerdaccioInitPromise = null;
const { verdaccioConfig } = require(`./verdaccio-config`);
const { publishPackage } = require(`./publish-package`);
const { installPackages } = require(`./install-packages`);
const startServer = () => {
if (VerdaccioInitPromise) {
return VerdaccioInitPromise;
}
console.log(`Starting local verdaccio server`);
// clear storage
fs.removeSync(verdaccioConfig.storage);
VerdaccioInitPromise = new Promise((resolve) => {
startVerdaccio(
verdaccioConfig,
verdaccioConfig.port,
verdaccioConfig.storage,
`1.0.0`,
`medusa-dev`,
(webServer, addr, pkgName, pkgVersion) => {
// console.log(webServer)
webServer.listen(addr.port || addr.path, addr.host, () => {
console.log(`Started local verdaccio server`);
resolve();
});
}
);
});
return VerdaccioInitPromise;
};
exports.startVerdaccio = startServer;
exports.publishPackagesLocallyAndInstall = async ({
packagesToPublish,
localPackages,
root,
ignorePackageJSONChanges,
yarnWorkspaceRoot,
}) => {
await startServer();
const versionPostFix = Date.now();
const newlyPublishedPackageVersions = {};
for (const packageName of packagesToPublish) {
newlyPublishedPackageVersions[packageName] = await publishPackage({
packageName,
packagesToPublish,
root,
versionPostFix,
ignorePackageJSONChanges,
});
}
const packagesToInstall = _.intersection(packagesToPublish, localPackages);
await installPackages({
packagesToInstall,
yarnWorkspaceRoot,
newlyPublishedPackageVersions,
});
};

View File

@@ -0,0 +1,151 @@
const path = require(`path`)
const fs = require(`fs-extra`)
const { promisifiedSpawn } = require(`../utils/promisified-spawn`)
const { registryUrl } = require(`./verdaccio-config`)
const installPackages = async ({
packagesToInstall,
yarnWorkspaceRoot,
newlyPublishedPackageVersions,
}) => {
console.log(
`Installing packages from local registry:\n${packagesToInstall
.map(packageAndVersion => ` - ${packageAndVersion}`)
.join(`\n`)}`
)
let installCmd
if (yarnWorkspaceRoot) {
// this is very hacky - given root, we run `yarn workspaces info`
// to get list of all workspaces and their locations, and manually
// edit package.json file for packages we want to install
// to make sure there are no mismatched versions of same package
// in workspaces which should preserve node_modules structure
// (packages being mostly hoisted to top-level node_modules)
const { stdout } = await promisifiedSpawn([
`yarn`,
[`workspaces`, `info`, `--json`],
{ stdio: `pipe` },
])
let workspacesLayout
try {
workspacesLayout = JSON.parse(JSON.parse(stdout).data)
} catch (e) {
/*
Yarn 1.22 doesn't output pure json - it has leading and trailing text:
```
$ yarn workspaces info --json
yarn workspaces v1.22.0
{
"z": {
"location": "z",
"workspaceDependencies": [],
"mismatchedWorkspaceDependencies": []
},
"y": {
"location": "y",
"workspaceDependencies": [],
"mismatchedWorkspaceDependencies": []
}
}
Done in 0.48s.
```
So we need to do some sanitization. We find JSON by matching substring
that starts with `{` and ends with `}`
*/
const regex = /^[^{]*({.*})[^}]*$/gs
const sanitizedStdOut = regex.exec(stdout)
if (sanitizedStdOut?.length >= 2) {
// pick content of first (and only) capturing group
const jsonString = sanitizedStdOut[1]
try {
workspacesLayout = JSON.parse(jsonString)
} catch (e) {
console.error(
`Failed to parse "sanitized" output of "yarn workspaces info" command.\n\nSanitized string: "${jsonString}`
)
// not exitting here, because we have general check for `workspacesLayout` being set below
}
}
}
if (!workspacesLayout) {
console.error(
`Couldn't parse output of "yarn workspaces info" command`,
stdout
)
process.exit(1)
}
const handleDeps = deps => {
if (!deps) {
return false
}
let changed = false
Object.keys(deps).forEach(depName => {
if (packagesToInstall.includes(depName)) {
deps[depName] = `gatsby-dev`
changed = true
}
})
return changed
}
Object.keys(workspacesLayout).forEach(workspaceName => {
const { location } = workspacesLayout[workspaceName]
const pkgJsonPath = path.join(yarnWorkspaceRoot, location, `package.json`)
if (!fs.existsSync(pkgJsonPath)) {
return
}
const pkg = JSON.parse(fs.readFileSync(pkgJsonPath, `utf8`))
let changed = false
changed |= handleDeps(pkg.dependencies)
changed |= handleDeps(pkg.devDependencies)
changed |= handleDeps(pkg.peerDependencies)
if (changed) {
console.log(`Changing deps in ${pkgJsonPath} to use @gatsby-dev`)
fs.outputJSONSync(pkgJsonPath, pkg, {
spaces: 2,
})
}
})
// package.json files are changed - so we just want to install
// using verdaccio registry
installCmd = [
`yarn`,
[`install`, `--registry=${registryUrl}`, `--ignore-engines`],
]
} else {
installCmd = [
`yarn`,
[
`add`,
...packagesToInstall.map(packageName => {
const packageVersion = newlyPublishedPackageVersions[packageName]
return `${packageName}@${packageVersion}`
}),
`--registry=${registryUrl}`,
`--exact`,
`--ignore-engines`,
],
]
}
try {
await promisifiedSpawn(installCmd)
console.log(`Installation complete`)
} catch (error) {
console.error(`Installation failed`, error)
process.exit(1)
}
}
exports.installPackages = installPackages

View File

@@ -0,0 +1,153 @@
const fs = require(`fs-extra`);
const path = require(`path`);
const { promisifiedSpawn } = require(`../utils/promisified-spawn`);
const { registryUrl } = require(`./verdaccio-config`);
const NPMRCContent = `${registryUrl.replace(
/https?:/g,
``
)}/:_authToken="medusa-dev"`;
const {
getMonorepoPackageJsonPath,
} = require(`../utils/get-monorepo-package-json-path`);
const { registerCleanupTask } = require(`./cleanup-tasks`);
/**
* Edit package.json to:
* - adjust version to temporary one
* - change version selectors for dependencies that
* will be published, to make sure that yarn
* install them in local site
*/
const adjustPackageJson = ({
monoRepoPackageJsonPath,
packageName,
versionPostFix,
packagesToPublish,
ignorePackageJSONChanges,
root,
}) => {
// we need to check if package depend on any other package to will be published and
// adjust version selector to point to dev version of package so local registry is used
// for dependencies.
const monorepoPKGjsonString = fs.readFileSync(
monoRepoPackageJsonPath,
`utf-8`
);
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString);
monorepoPKGjson.version = `${monorepoPKGjson.version}-dev-${versionPostFix}`;
packagesToPublish.forEach((packageThatWillBePublished) => {
if (
monorepoPKGjson.dependencies &&
monorepoPKGjson.dependencies[packageThatWillBePublished]
) {
const currentVersion = JSON.parse(
fs.readFileSync(
getMonorepoPackageJsonPath({
packageName: packageThatWillBePublished,
root,
}),
`utf-8`
)
).version;
monorepoPKGjson.dependencies[
packageThatWillBePublished
] = `${currentVersion}-dev-${versionPostFix}`;
}
});
const temporaryMonorepoPKGjsonString = JSON.stringify(monorepoPKGjson);
const unignorePackageJSONChanges = ignorePackageJSONChanges(packageName, [
monorepoPKGjsonString,
temporaryMonorepoPKGjsonString,
]);
// change version and dependency versions
fs.outputFileSync(monoRepoPackageJsonPath, temporaryMonorepoPKGjsonString);
return {
newPackageVersion: monorepoPKGjson.version,
unadjustPackageJson: registerCleanupTask(() => {
// restore original package.json
fs.outputFileSync(monoRepoPackageJsonPath, monorepoPKGjsonString);
unignorePackageJSONChanges();
}),
};
};
/**
* Anonymous publishing require dummy .npmrc
* See https://github.com/verdaccio/verdaccio/issues/212#issuecomment-308578500
* This is `npm publish` (as in linked comment) and `yarn publish` requirement.
* This is not verdaccio restriction.
*/
const createTemporaryNPMRC = ({ pathToPackage }) => {
const NPMRCPath = path.join(pathToPackage, `.npmrc`);
fs.outputFileSync(NPMRCPath, NPMRCContent);
return registerCleanupTask(() => {
fs.removeSync(NPMRCPath);
});
};
const publishPackage = async ({
packageName,
packagesToPublish,
root,
versionPostFix,
ignorePackageJSONChanges,
}) => {
const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({
packageName,
root,
});
const { unadjustPackageJson, newPackageVersion } = adjustPackageJson({
monoRepoPackageJsonPath,
packageName,
root,
versionPostFix,
packagesToPublish,
ignorePackageJSONChanges,
});
const pathToPackage = path.dirname(monoRepoPackageJsonPath);
const uncreateTemporaryNPMRC = createTemporaryNPMRC({ pathToPackage });
// npm publish
const publishCmd = [
`npm`,
[`publish`, `--tag`, `medusa-dev`, `--registry=${registryUrl}`],
{
cwd: pathToPackage,
},
];
console.log(
`Publishing ${packageName}@${newPackageVersion} to local registry`
);
try {
await promisifiedSpawn(publishCmd);
console.log(
`Published ${packageName}@${newPackageVersion} to local registry`
);
} catch (e) {
console.error(`Failed to publish ${packageName}@${newPackageVersion}`, e);
process.exit(1);
}
uncreateTemporaryNPMRC();
unadjustPackageJson();
return newPackageVersion;
};
exports.publishPackage = publishPackage;

View File

@@ -0,0 +1,33 @@
const path = require(`path`)
const os = require(`os`)
const verdaccioConfig = {
storage: path.join(os.tmpdir(), `verdaccio`, `storage`),
port: 4873, // default
max_body_size: `1000mb`,
web: {
enable: true,
title: `gatsby-dev`,
},
logs: [{ type: `stdout`, format: `pretty-timestamped`, level: `warn` }],
packages: {
"**": {
access: `$all`,
publish: `$all`,
proxy: `npmjs`,
},
},
uplinks: {
npmjs: {
url: `https://registry.npmjs.org/`,
// default is 2 max_fails - on flaky networks that cause a lot of failed installations
max_fails: 10,
},
},
}
exports.verdaccioConfig = verdaccioConfig
const registryUrl = `http://localhost:${verdaccioConfig.port}`
exports.registryUrl = registryUrl

View File

@@ -0,0 +1,29 @@
const { getDependantPackages } = require(`../get-dependant-packages`)
describe(`getDependantPackages`, () => {
it(`handles deep dependency chains`, () => {
const packagesToPublish = getDependantPackages({
packageName: `package-a-dep1-dep1`,
depTree: {
"package-a-dep1": new Set([`package-a`]),
"package-a-dep1-dep1": new Set([`package-a-dep1`]),
"not-related": new Set([`also-not-related`]),
},
})
expect(packagesToPublish).toEqual(
new Set([`package-a`, `package-a-dep1`, `package-a-dep1-dep1`])
)
})
it(`doesn't get stuck in circular dependency loops`, () => {
const packagesToPublish = getDependantPackages({
packageName: `package-a`,
depTree: {
"package-a": new Set([`package-b`]),
"package-b": new Set([`package-a`]),
},
})
expect(packagesToPublish).toEqual(new Set([`package-a`, `package-b`]))
})
})

View File

@@ -0,0 +1,68 @@
const path = require(`path`)
const { traversePackagesDeps } = require(`../traverse-package-deps`)
jest.doMock(
path.join(...`<monorepo-path>/packages/package-a/package.json`.split(`/`)),
() => {
return {
dependencies: {
"unrelated-package": `*`,
"package-a-dep1": `*`,
},
}
},
{ virtual: true }
)
jest.doMock(
path.join(
...`<monorepo-path>/packages/package-a-dep1/package.json`.split(`/`)
),
() => {
return {
dependencies: {
"package-a-dep1-dep1": `*`,
},
}
},
{ virtual: true }
)
jest.doMock(
path.join(
...`<monorepo-path>/packages/package-a-dep1-dep1/package.json`.split(`/`)
),
() => {
return {
dependencies: {},
}
},
{ virtual: true }
)
describe(`traversePackageDeps`, () => {
it(`handles deep dependency chains`, () => {
const { seenPackages, depTree } = traversePackagesDeps({
root: `<monorepo-path>`,
packages: [`package-a`, `doesnt-exist`],
monoRepoPackages: [
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
`package-not-used`,
],
})
expect(seenPackages).toEqual([
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
])
expect(depTree).toEqual({
"package-a-dep1": new Set([`package-a`]),
"package-a-dep1-dep1": new Set([`package-a-dep1`]),
})
})
})

View File

@@ -0,0 +1,190 @@
const fs = require(`fs-extra`);
const _ = require(`lodash`);
const {
getMonorepoPackageJsonPath,
} = require(`./get-monorepo-package-json-path`);
const got = require(`got`);
function difference(object, base) {
function changes(object, base) {
return _.transform(object, function (result, value, key) {
if (!_.isEqual(value, base[key])) {
result[key] =
_.isObject(value) && _.isObject(base[key])
? changes(value, base[key])
: value;
}
});
}
return changes(object, base);
}
/**
* Compare dependencies of installed packages and monorepo package.
* It will skip dependencies that are removed in monorepo package.
*
* If local package is not installed, it will check unpkg.com.
* This allow medusa-dev to skip publishing unnecesairly and
* let install packages from public npm repository if nothing changed.
*/
exports.checkDepsChanges = async ({
newPath,
packageName,
monoRepoPackages,
root,
isInitialScan,
ignoredPackageJSON,
}) => {
let localPKGjson;
let packageNotInstalled = false;
try {
localPKGjson = JSON.parse(fs.readFileSync(newPath, `utf-8`));
} catch {
packageNotInstalled = true;
// there is no local package - so we still need to install deps
// this is nice because devs won't need to do initial package installation - we can handle this.
if (!isInitialScan) {
console.log(
`'${packageName}' doesn't seem to be installed. Restart medusa-dev to publish it`
);
return {
didDepsChanged: false,
packageNotInstalled,
};
}
// if package is not installed, we will do http GET request to
// unkpg to check if dependency in package published in public
// npm repository are different
// this allow us to not publish to local repository
// and save some time/work
try {
const response = await got(
`https://unpkg.com/${packageName}/package.json`
);
if (response?.statusCode !== 200) {
throw new Error(`No response or non 200 code`);
}
localPKGjson = JSON.parse(response.body);
} catch {
console.log(
`'${packageName}' doesn't seem to be installed and is not published on NPM.`
);
return {
didDepsChanged: true,
packageNotInstalled,
};
}
}
const monoDir = packageName.startsWith("@medusajs")
? packageName.split("/")[1]
: packageName;
const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({
packageName: monoDir,
root,
});
const monorepoPKGjsonString = fs.readFileSync(
monoRepoPackageJsonPath,
`utf-8`
);
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString);
if (ignoredPackageJSON.has(packageName)) {
if (ignoredPackageJSON.get(packageName).includes(monorepoPKGjsonString)) {
// we are in middle of publishing and content of package.json is one set during publish process,
// so we need to not cause false positives
return {
didDepsChanged: false,
packageNotInstalled,
};
}
}
if (!monorepoPKGjson.dependencies) monorepoPKGjson.dependencies = {};
if (!localPKGjson.dependencies) localPKGjson.dependencies = {};
const areDepsEqual = _.isEqual(
monorepoPKGjson.dependencies,
localPKGjson.dependencies
);
if (!areDepsEqual) {
const diff = difference(
monorepoPKGjson.dependencies,
localPKGjson.dependencies
);
const diff2 = difference(
localPKGjson.dependencies,
monorepoPKGjson.dependencies
);
let needPublishing = false;
let isPublishing = false;
const depChangeLog = _.uniq(Object.keys({ ...diff, ...diff2 }))
.reduce((acc, key) => {
if (monorepoPKGjson.dependencies[key] === `medusa-dev`) {
// if we are in middle of publishing to local repository - ignore
isPublishing = true;
return acc;
}
if (localPKGjson.dependencies[key] === `medusa-dev`) {
// monorepo packages will restore version, but after installation
// in local site - it will use `medusa-dev` dist tag - we need
// to ignore changes that
return acc;
}
if (
localPKGjson.dependencies[key] &&
monorepoPKGjson.dependencies[key]
) {
// Check only for version changes in packages
// that are not from medusa repo.
// Changes in medusa packages will be copied over
// from monorepo - and if those contain other dependency
// changes - they will be covered
if (!monoRepoPackages.includes(key)) {
acc.push(
` - '${key}' changed version from ${localPKGjson.dependencies[key]} to ${monorepoPKGjson.dependencies[key]}`
);
needPublishing = true;
}
} else if (monorepoPKGjson.dependencies[key]) {
acc.push(
` - '${key}@${monorepoPKGjson.dependencies[key]}' was added`
);
needPublishing = true;
} else {
acc.push(` - '${key}@${localPKGjson.dependencies[key]}' was removed`);
// this doesn't need publishing really, so will skip this
}
return acc;
}, [])
.join(`\n`);
if (!isPublishing && depChangeLog.length > 0) {
console.log(`Dependencies of '${packageName}' changed:\n${depChangeLog}`);
if (isInitialScan) {
console.log(
`Will ${!needPublishing ? `not ` : ``}publish to local npm registry.`
);
} else {
console.warn(
`Installation of dependencies after initial scan is not implemented`
);
}
return {
didDepsChanged: needPublishing,
packageNotInstalled,
};
}
}
return {
didDepsChanged: false,
packageNotInstalled,
};
};

View File

@@ -0,0 +1,30 @@
/**
* Recursively get set of packages that depend on given package.
* Set also includes passed package.
*/
const getDependantPackages = ({
packageName,
depTree,
packagesToPublish = new Set(),
}) => {
if (packagesToPublish.has(packageName)) {
// bail early if package was already handled
return packagesToPublish
}
packagesToPublish.add(packageName)
const dependants = depTree[packageName]
if (dependants) {
dependants.forEach(dependant =>
getDependantPackages({
packageName: dependant,
depTree,
packagesToPublish,
})
)
}
return packagesToPublish
}
exports.getDependantPackages = getDependantPackages

View File

@@ -0,0 +1,11 @@
const path = require(`path`);
exports.getMonorepoPackageJsonPath = ({ packageName, root }) => {
let dirName = packageName;
if (packageName.startsWith("@medusajs")) {
const [, directory] = packageName.split("/");
dirName = directory;
}
return path.join(root, `packages`, dirName, `package.json`);
};

View File

@@ -0,0 +1,29 @@
const execa = require(`execa`)
const defaultSpawnArgs = {
cwd: process.cwd(),
stdio: `inherit`,
}
exports.setDefaultSpawnStdio = stdio => {
defaultSpawnArgs.stdio = stdio
}
exports.promisifiedSpawn = async ([cmd, args = [], spawnArgs = {}]) => {
const spawnOptions = {
...defaultSpawnArgs,
...spawnArgs,
}
try {
return await execa(cmd, args, spawnOptions)
} catch (e) {
if (spawnOptions.stdio === `ignore`) {
console.log(
`\nCommand "${cmd} ${args.join(
` `
)}" failed.\nTo see details of failed command, rerun "medusa-dev" without "--quiet" or "-q" switch\n`
)
}
throw e
}
}

View File

@@ -0,0 +1,76 @@
const _ = require(`lodash`);
const path = require(`path`);
/**
* @typedef {Object} TraversePackagesDepsReturn
* @property {Object} depTree Lookup table to check dependants for given package.
* Used to determine which packages need to be published.
*/
/**
* Compile final list of packages to watch
* This will include packages explicitly defined packages and all their dependencies
* Also creates dependency graph that is used later to determine which packages
* would need to be published when their dependencies change
* @param {Object} $0
* @param {String} $0.root Path to root of medusa monorepo repository
* @param {String[]} $0.packages Initial array of packages to watch
* This can be extracted from project dependencies or explicitly set by `--packages` flag
* @param {String[]} $0.monoRepoPackages Array of packages in medusa monorepo
* @param {String[]} [$0.seenPackages] Array of packages that were already traversed.
* This makes sure dependencies are extracted one time for each package and avoid any
* infinite loops.
* @param {DepTree} [$0.depTree] Used internally to recursively construct dependency graph.
* @return {TraversePackagesDepsReturn}
*/
const traversePackagesDeps = ({
root,
packages,
monoRepoPackages,
seenPackages = [...packages],
depTree = {},
}) => {
packages.forEach((p) => {
if (p.startsWith("@medusajs")) {
p = p.split("/")[1];
}
let pkgJson;
try {
pkgJson = require(path.join(root, `packages`, p, `package.json`));
} catch {
console.error(`"${p}" package doesn't exist in monorepo.`);
// remove from seenPackages
seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p);
return;
}
const fromMonoRepo = _.intersection(
Object.keys({ ...pkgJson.dependencies }),
monoRepoPackages
);
fromMonoRepo.forEach((pkgName) => {
depTree[pkgName] = (depTree[pkgName] || new Set()).add(p);
});
// only traverse not yet seen packages to avoid infinite loops
const newPackages = _.difference(fromMonoRepo, seenPackages);
if (newPackages.length) {
newPackages.forEach((depFromMonorepo) => {
seenPackages.push(depFromMonorepo);
});
traversePackagesDeps({
root,
packages: fromMonoRepo,
monoRepoPackages,
seenPackages,
depTree,
});
}
});
return { seenPackages, depTree };
};
exports.traversePackagesDeps = traversePackagesDeps;

View File

@@ -0,0 +1,4 @@
exports.getVersionInfo = () => {
const { version: devCliVersion } = require(`../../package.json`);
return `Medusa Dev CLI version: ${devCliVersion}`;
};

View File

@@ -0,0 +1,352 @@
const chokidar = require(`chokidar`);
const _ = require(`lodash`);
const del = require(`del`);
const fs = require(`fs-extra`);
const path = require(`path`);
const findWorkspaceRoot = require(`find-yarn-workspace-root`);
const { publishPackagesLocallyAndInstall } = require(`./local-npm-registry`);
const { checkDepsChanges } = require(`./utils/check-deps-changes`);
const { getDependantPackages } = require(`./utils/get-dependant-packages`);
const {
setDefaultSpawnStdio,
promisifiedSpawn,
} = require(`./utils/promisified-spawn`);
const { traversePackagesDeps } = require(`./utils/traverse-package-deps`);
let numCopied = 0;
const quit = () => {
console.log(`Copied ${numCopied} files`);
process.exit();
};
const MAX_COPY_RETRIES = 3;
/*
* non-existent packages break on('ready')
* See: https://github.com/paulmillr/chokidar/issues/449
*/
async function watch(
root,
packages,
{ scanOnce, quiet, forceInstall, monoRepoPackages, localPackages }
) {
setDefaultSpawnStdio(quiet ? `ignore` : `inherit`);
// determine if in yarn workspace - if in workspace, force using verdaccio
// as current logic of copying files will not work correctly.
const yarnWorkspaceRoot = findWorkspaceRoot();
if (yarnWorkspaceRoot && process.env.NODE_ENV !== `test`) {
console.log(`Yarn workspace found.`);
forceInstall = true;
}
let afterPackageInstallation = false;
let queuedCopies = [];
const realCopyPath = (arg) => {
const { oldPath, newPath, quiet, resolve, reject, retry = 0 } = arg;
fs.copy(oldPath, newPath, (err) => {
if (err) {
if (retry >= MAX_COPY_RETRIES) {
console.error(err);
reject(err);
return;
} else {
setTimeout(
() => realCopyPath({ ...arg, retry: retry + 1 }),
500 * Math.pow(2, retry)
);
return;
}
}
// When the medusa binary is copied over, it is not setup with the executable
// permissions that it is given when installed via yarn.
// This fixes the issue where after running meduas-dev, running `yarn medusa develop`
// fails with a permission issue.
if (/(bin\/meduas.js|medusa(-cli)?\/cli.js)$/.test(newPath)) {
fs.chmodSync(newPath, `0755`);
}
numCopied += 1;
if (!quiet) {
console.log(`Copied ${oldPath} to ${newPath}`);
}
resolve();
});
};
const copyPath = (oldPath, newPath, quiet, packageName) =>
new Promise((resolve, reject) => {
const argObj = { oldPath, newPath, quiet, packageName, resolve, reject };
if (afterPackageInstallation) {
realCopyPath(argObj);
} else {
queuedCopies.push(argObj);
}
});
const runQueuedCopies = () => {
afterPackageInstallation = true;
queuedCopies.forEach((argObj) => realCopyPath(argObj));
queuedCopies = [];
};
const clearJSFilesFromNodeModules = async () => {
const packagesToClear = queuedCopies.reduce((acc, { packageName }) => {
if (packageName) {
acc.add(packageName);
}
return acc;
}, new Set());
await Promise.all(
[...packagesToClear].map(
async (packageToClear) =>
await del([
`node_modules/${packageToClear}/**/*.{js,js.map}`,
`!node_modules/${packageToClear}/node_modules/**/*.{js,js.map}`,
`!node_modules/${packageToClear}/src/**/*.{js,js.map}`,
])
)
);
};
// check packages deps and if they depend on other packages from monorepo
// add them to packages list
const { seenPackages, depTree } = traversePackagesDeps({
root,
packages: _.uniq(localPackages),
monoRepoPackages,
});
const allPackagesToWatch = packages
? _.intersection(packages, seenPackages)
: seenPackages;
const ignoredPackageJSON = new Map();
const ignorePackageJSONChanges = (packageName, contentArray) => {
ignoredPackageJSON.set(packageName, contentArray);
return () => {
ignoredPackageJSON.delete(packageName);
};
};
if (forceInstall) {
try {
if (allPackagesToWatch.length > 0) {
await publishPackagesLocallyAndInstall({
packagesToPublish: allPackagesToWatch,
root,
localPackages,
ignorePackageJSONChanges,
yarnWorkspaceRoot,
});
} else {
// run `yarn`
const yarnInstallCmd = [`yarn`];
console.log(`Installing packages from public NPM registry`);
await promisifiedSpawn(yarnInstallCmd);
console.log(`Installation complete`);
}
} catch (e) {
console.log(e);
}
process.exit();
}
if (allPackagesToWatch.length === 0) {
console.error(`There are no packages to watch.`);
return;
}
const cleanToWatch = allPackagesToWatch.map((pkgName) => {
if (pkgName.startsWith(`@medusajs`)) {
return pkgName.split("/")[1];
}
return pkgName;
});
const ignored = [
/[/\\]node_modules[/\\]/i,
/\.git/i,
/\.DS_Store/,
/[/\\]__tests__[/\\]/i,
/[/\\]__mocks__[/\\]/i,
/\.npmrc/i,
].concat(
cleanToWatch.map((p) => new RegExp(`${p}[\\/\\\\]src[\\/\\\\]`, `i`))
);
const watchers = _.uniq(
cleanToWatch
.map((p) => path.join(root, `/packages/`, p))
.filter((p) => fs.existsSync(p))
);
let allCopies = [];
const packagesToPublish = new Set();
let isInitialScan = true;
let isPublishing = false;
const waitFor = new Set();
let anyPackageNotInstalled = false;
const watchEvents = [`change`, `add`];
chokidar
.watch(watchers, {
ignored: [(filePath) => _.some(ignored, (reg) => reg.test(filePath))],
})
.on(`all`, async (event, filePath) => {
if (!watchEvents.includes(event)) {
return;
}
const [pack] = filePath
.split(/packages[/\\]/)
.pop()
.split(/[/\\]/);
const sourcePkg = JSON.parse(
fs.readFileSync(path.join(root, `/packages/`, pack, `package.json`))
);
const packageName = sourcePkg.name;
const prefix = path.join(root, `/packages/`, pack);
// Copy it over local version.
// Don't copy over the medusa bin file as that breaks the NPM symlink.
if (_.includes(filePath, `dist/medusa-cli.js`)) {
return;
}
const relativePackageFile = path.relative(prefix, filePath);
const newPath = path.join(
`./node_modules/${packageName}`,
relativePackageFile
);
if (relativePackageFile === `package.json`) {
// package.json files will change during publish to adjust version of package (and dependencies), so ignore
// changes during this process
if (isPublishing) {
return;
}
// Compare dependencies with local version
const didDepsChangedPromise = checkDepsChanges({
newPath,
packageName,
monoRepoPackages,
root,
isInitialScan,
ignoredPackageJSON,
});
if (isInitialScan) {
// normally checkDepsChanges would be sync,
// but because it also can do async GET request
// to unpkg if local package is not installed
// keep track of it to make sure all of it
// finish before installing
waitFor.add(didDepsChangedPromise);
}
const {
didDepsChanged,
packageNotInstalled,
} = await didDepsChangedPromise;
if (packageNotInstalled) {
anyPackageNotInstalled = true;
}
if (didDepsChanged) {
if (isInitialScan) {
waitFor.delete(didDepsChangedPromise);
// handle dependency change only in initial scan - this is for sure doable to
// handle this in watching mode correctly - but for the sake of shipping
// this I limit more work/time consuming edge cases.
// Dependency changed - now we need to figure out
// the packages that actually need to be published.
// If package with changed dependencies is dependency of other
// medusa package - like for example `medusa-plugin-page-creator`
// we need to publish both `medusa-plugin-page-creator` and `medusa`
// and install `medusa` in example site project.
getDependantPackages({
packageName,
depTree,
packages,
}).forEach((packageToPublish) => {
// scheduling publish - we will publish when `ready` is emitted
// as we can do single publish then
packagesToPublish.add(packageToPublish);
});
}
}
// don't ever copy package.json as this will mess up any future dependency
// changes checks
return;
}
const localCopies = [copyPath(filePath, newPath, quiet, packageName)];
// If this is from "cache-dir" also copy it into the site's .cache
if (_.includes(filePath, `cache-dir`)) {
const newCachePath = path.join(
`.cache/`,
path.relative(path.join(prefix, `cache-dir`), filePath)
);
localCopies.push(copyPath(filePath, newCachePath, quiet));
}
allCopies = allCopies.concat(localCopies);
})
.on(`ready`, async () => {
// wait for all async work needed to be done
// before publishing / installing
await Promise.all(Array.from(waitFor));
if (isInitialScan) {
isInitialScan = false;
if (packagesToPublish.size > 0) {
isPublishing = true;
await publishPackagesLocallyAndInstall({
packagesToPublish: Array.from(packagesToPublish),
root,
localPackages,
ignorePackageJSONChanges,
});
packagesToPublish.clear();
isPublishing = false;
} else if (anyPackageNotInstalled) {
// run `yarn`
const yarnInstallCmd = [`yarn`];
console.log(`Installing packages from public NPM registry`);
await promisifiedSpawn(yarnInstallCmd);
console.log(`Installation complete`);
}
await clearJSFilesFromNodeModules();
runQueuedCopies();
}
// all files watched, quit once all files are copied if necessary
Promise.all(allCopies).then(() => {
if (scanOnce) {
quit();
}
});
});
}
module.exports = watch;

File diff suppressed because it is too large Load Diff