feat: dev cli (#203)

* feat: adds dev-cli

* feat: adds dev-cli

* fix: works with org package names

* fix: medusa cli bin

* hotfix(brightpearl): rounding errors + failing customer test (#199)

* fix: verdacio publish

* fix: update yarn lock

* fix(CI): update node

* fix: update yarn lock
This commit is contained in:
Sebastian Rindom
2021-03-16 09:51:04 +01:00
committed by GitHub
parent 7c7f86e8e8
commit 695b1fd0a5
24 changed files with 6240 additions and 3 deletions

View File

@@ -0,0 +1,29 @@
const { getDependantPackages } = require(`../get-dependant-packages`)
describe(`getDependantPackages`, () => {
it(`handles deep dependency chains`, () => {
const packagesToPublish = getDependantPackages({
packageName: `package-a-dep1-dep1`,
depTree: {
"package-a-dep1": new Set([`package-a`]),
"package-a-dep1-dep1": new Set([`package-a-dep1`]),
"not-related": new Set([`also-not-related`]),
},
})
expect(packagesToPublish).toEqual(
new Set([`package-a`, `package-a-dep1`, `package-a-dep1-dep1`])
)
})
it(`doesn't get stuck in circular dependency loops`, () => {
const packagesToPublish = getDependantPackages({
packageName: `package-a`,
depTree: {
"package-a": new Set([`package-b`]),
"package-b": new Set([`package-a`]),
},
})
expect(packagesToPublish).toEqual(new Set([`package-a`, `package-b`]))
})
})

View File

@@ -0,0 +1,68 @@
const path = require(`path`)
const { traversePackagesDeps } = require(`../traverse-package-deps`)
jest.doMock(
path.join(...`<monorepo-path>/packages/package-a/package.json`.split(`/`)),
() => {
return {
dependencies: {
"unrelated-package": `*`,
"package-a-dep1": `*`,
},
}
},
{ virtual: true }
)
jest.doMock(
path.join(
...`<monorepo-path>/packages/package-a-dep1/package.json`.split(`/`)
),
() => {
return {
dependencies: {
"package-a-dep1-dep1": `*`,
},
}
},
{ virtual: true }
)
jest.doMock(
path.join(
...`<monorepo-path>/packages/package-a-dep1-dep1/package.json`.split(`/`)
),
() => {
return {
dependencies: {},
}
},
{ virtual: true }
)
describe(`traversePackageDeps`, () => {
it(`handles deep dependency chains`, () => {
const { seenPackages, depTree } = traversePackagesDeps({
root: `<monorepo-path>`,
packages: [`package-a`, `doesnt-exist`],
monoRepoPackages: [
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
`package-not-used`,
],
})
expect(seenPackages).toEqual([
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
])
expect(depTree).toEqual({
"package-a-dep1": new Set([`package-a`]),
"package-a-dep1-dep1": new Set([`package-a-dep1`]),
})
})
})

View File

@@ -0,0 +1,190 @@
const fs = require(`fs-extra`);
const _ = require(`lodash`);
const {
getMonorepoPackageJsonPath,
} = require(`./get-monorepo-package-json-path`);
const got = require(`got`);
function difference(object, base) {
function changes(object, base) {
return _.transform(object, function (result, value, key) {
if (!_.isEqual(value, base[key])) {
result[key] =
_.isObject(value) && _.isObject(base[key])
? changes(value, base[key])
: value;
}
});
}
return changes(object, base);
}
/**
* Compare dependencies of installed packages and monorepo package.
* It will skip dependencies that are removed in monorepo package.
*
* If local package is not installed, it will check unpkg.com.
* This allow medusa-dev to skip publishing unnecesairly and
* let install packages from public npm repository if nothing changed.
*/
exports.checkDepsChanges = async ({
newPath,
packageName,
monoRepoPackages,
root,
isInitialScan,
ignoredPackageJSON,
}) => {
let localPKGjson;
let packageNotInstalled = false;
try {
localPKGjson = JSON.parse(fs.readFileSync(newPath, `utf-8`));
} catch {
packageNotInstalled = true;
// there is no local package - so we still need to install deps
// this is nice because devs won't need to do initial package installation - we can handle this.
if (!isInitialScan) {
console.log(
`'${packageName}' doesn't seem to be installed. Restart medusa-dev to publish it`
);
return {
didDepsChanged: false,
packageNotInstalled,
};
}
// if package is not installed, we will do http GET request to
// unkpg to check if dependency in package published in public
// npm repository are different
// this allow us to not publish to local repository
// and save some time/work
try {
const response = await got(
`https://unpkg.com/${packageName}/package.json`
);
if (response?.statusCode !== 200) {
throw new Error(`No response or non 200 code`);
}
localPKGjson = JSON.parse(response.body);
} catch {
console.log(
`'${packageName}' doesn't seem to be installed and is not published on NPM.`
);
return {
didDepsChanged: true,
packageNotInstalled,
};
}
}
const monoDir = packageName.startsWith("@medusajs")
? packageName.split("/")[1]
: packageName;
const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({
packageName: monoDir,
root,
});
const monorepoPKGjsonString = fs.readFileSync(
monoRepoPackageJsonPath,
`utf-8`
);
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString);
if (ignoredPackageJSON.has(packageName)) {
if (ignoredPackageJSON.get(packageName).includes(monorepoPKGjsonString)) {
// we are in middle of publishing and content of package.json is one set during publish process,
// so we need to not cause false positives
return {
didDepsChanged: false,
packageNotInstalled,
};
}
}
if (!monorepoPKGjson.dependencies) monorepoPKGjson.dependencies = {};
if (!localPKGjson.dependencies) localPKGjson.dependencies = {};
const areDepsEqual = _.isEqual(
monorepoPKGjson.dependencies,
localPKGjson.dependencies
);
if (!areDepsEqual) {
const diff = difference(
monorepoPKGjson.dependencies,
localPKGjson.dependencies
);
const diff2 = difference(
localPKGjson.dependencies,
monorepoPKGjson.dependencies
);
let needPublishing = false;
let isPublishing = false;
const depChangeLog = _.uniq(Object.keys({ ...diff, ...diff2 }))
.reduce((acc, key) => {
if (monorepoPKGjson.dependencies[key] === `medusa-dev`) {
// if we are in middle of publishing to local repository - ignore
isPublishing = true;
return acc;
}
if (localPKGjson.dependencies[key] === `medusa-dev`) {
// monorepo packages will restore version, but after installation
// in local site - it will use `medusa-dev` dist tag - we need
// to ignore changes that
return acc;
}
if (
localPKGjson.dependencies[key] &&
monorepoPKGjson.dependencies[key]
) {
// Check only for version changes in packages
// that are not from medusa repo.
// Changes in medusa packages will be copied over
// from monorepo - and if those contain other dependency
// changes - they will be covered
if (!monoRepoPackages.includes(key)) {
acc.push(
` - '${key}' changed version from ${localPKGjson.dependencies[key]} to ${monorepoPKGjson.dependencies[key]}`
);
needPublishing = true;
}
} else if (monorepoPKGjson.dependencies[key]) {
acc.push(
` - '${key}@${monorepoPKGjson.dependencies[key]}' was added`
);
needPublishing = true;
} else {
acc.push(` - '${key}@${localPKGjson.dependencies[key]}' was removed`);
// this doesn't need publishing really, so will skip this
}
return acc;
}, [])
.join(`\n`);
if (!isPublishing && depChangeLog.length > 0) {
console.log(`Dependencies of '${packageName}' changed:\n${depChangeLog}`);
if (isInitialScan) {
console.log(
`Will ${!needPublishing ? `not ` : ``}publish to local npm registry.`
);
} else {
console.warn(
`Installation of dependencies after initial scan is not implemented`
);
}
return {
didDepsChanged: needPublishing,
packageNotInstalled,
};
}
}
return {
didDepsChanged: false,
packageNotInstalled,
};
};

View File

@@ -0,0 +1,30 @@
/**
* Recursively get set of packages that depend on given package.
* Set also includes passed package.
*/
const getDependantPackages = ({
packageName,
depTree,
packagesToPublish = new Set(),
}) => {
if (packagesToPublish.has(packageName)) {
// bail early if package was already handled
return packagesToPublish
}
packagesToPublish.add(packageName)
const dependants = depTree[packageName]
if (dependants) {
dependants.forEach(dependant =>
getDependantPackages({
packageName: dependant,
depTree,
packagesToPublish,
})
)
}
return packagesToPublish
}
exports.getDependantPackages = getDependantPackages

View File

@@ -0,0 +1,11 @@
const path = require(`path`);
exports.getMonorepoPackageJsonPath = ({ packageName, root }) => {
let dirName = packageName;
if (packageName.startsWith("@medusajs")) {
const [, directory] = packageName.split("/");
dirName = directory;
}
return path.join(root, `packages`, dirName, `package.json`);
};

View File

@@ -0,0 +1,29 @@
const execa = require(`execa`)
const defaultSpawnArgs = {
cwd: process.cwd(),
stdio: `inherit`,
}
exports.setDefaultSpawnStdio = stdio => {
defaultSpawnArgs.stdio = stdio
}
exports.promisifiedSpawn = async ([cmd, args = [], spawnArgs = {}]) => {
const spawnOptions = {
...defaultSpawnArgs,
...spawnArgs,
}
try {
return await execa(cmd, args, spawnOptions)
} catch (e) {
if (spawnOptions.stdio === `ignore`) {
console.log(
`\nCommand "${cmd} ${args.join(
` `
)}" failed.\nTo see details of failed command, rerun "medusa-dev" without "--quiet" or "-q" switch\n`
)
}
throw e
}
}

View File

@@ -0,0 +1,76 @@
const _ = require(`lodash`);
const path = require(`path`);
/**
* @typedef {Object} TraversePackagesDepsReturn
* @property {Object} depTree Lookup table to check dependants for given package.
* Used to determine which packages need to be published.
*/
/**
* Compile final list of packages to watch
* This will include packages explicitly defined packages and all their dependencies
* Also creates dependency graph that is used later to determine which packages
* would need to be published when their dependencies change
* @param {Object} $0
* @param {String} $0.root Path to root of medusa monorepo repository
* @param {String[]} $0.packages Initial array of packages to watch
* This can be extracted from project dependencies or explicitly set by `--packages` flag
* @param {String[]} $0.monoRepoPackages Array of packages in medusa monorepo
* @param {String[]} [$0.seenPackages] Array of packages that were already traversed.
* This makes sure dependencies are extracted one time for each package and avoid any
* infinite loops.
* @param {DepTree} [$0.depTree] Used internally to recursively construct dependency graph.
* @return {TraversePackagesDepsReturn}
*/
const traversePackagesDeps = ({
root,
packages,
monoRepoPackages,
seenPackages = [...packages],
depTree = {},
}) => {
packages.forEach((p) => {
if (p.startsWith("@medusajs")) {
p = p.split("/")[1];
}
let pkgJson;
try {
pkgJson = require(path.join(root, `packages`, p, `package.json`));
} catch {
console.error(`"${p}" package doesn't exist in monorepo.`);
// remove from seenPackages
seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p);
return;
}
const fromMonoRepo = _.intersection(
Object.keys({ ...pkgJson.dependencies }),
monoRepoPackages
);
fromMonoRepo.forEach((pkgName) => {
depTree[pkgName] = (depTree[pkgName] || new Set()).add(p);
});
// only traverse not yet seen packages to avoid infinite loops
const newPackages = _.difference(fromMonoRepo, seenPackages);
if (newPackages.length) {
newPackages.forEach((depFromMonorepo) => {
seenPackages.push(depFromMonorepo);
});
traversePackagesDeps({
root,
packages: fromMonoRepo,
monoRepoPackages,
seenPackages,
depTree,
});
}
});
return { seenPackages, depTree };
};
exports.traversePackagesDeps = traversePackagesDeps;

View File

@@ -0,0 +1,4 @@
exports.getVersionInfo = () => {
const { version: devCliVersion } = require(`../../package.json`);
return `Medusa Dev CLI version: ${devCliVersion}`;
};