make medusa-dev-cli compatible with yarn 2/3 (#1765)

This commit is contained in:
Zakaria El Asri
2022-07-03 10:24:41 +01:00
committed by GitHub
parent 3fb74bf512
commit 8fe5fb5503
11 changed files with 464 additions and 378 deletions

View File

@@ -1,12 +1,12 @@
#!/usr/bin/env node
const Configstore = require(`configstore`);
const pkg = require(`../package.json`);
const _ = require(`lodash`);
const path = require(`path`);
const os = require(`os`);
const watch = require(`./watch`);
const { getVersionInfo } = require(`./utils/version`);
const Configstore = require(`configstore`)
const pkg = require(`../package.json`)
const _ = require(`lodash`)
const path = require(`path`)
const os = require(`os`)
const watch = require(`./watch`)
const { getVersionInfo } = require(`./utils/version`)
const argv = require(`yargs`)
.usage(`Usage: medusa-dev [options]`)
.alias(`q`, `quiet`)
@@ -19,7 +19,7 @@ const argv = require(`yargs`)
.nargs(`p`, 1)
.describe(
`p`,
`Set path to Medusa repository.
`Set path to medusa repository.
You typically only need to configure this once.`
)
.nargs(`force-install`, 0)
@@ -27,6 +27,11 @@ You typically only need to configure this once.`
`force-install`,
`Disables copying files into node_modules and forces usage of local npm repository.`
)
.nargs(`external-registry`, 0)
.describe(
`external-registry`,
`Run 'yarn add' commands without the --registry flag.`
)
.alias(`C`, `copy-all`)
.nargs(`C`, 0)
.describe(
@@ -39,87 +44,101 @@ You typically only need to configure this once.`
.alias(`h`, `help`)
.nargs(`v`, 0)
.alias(`v`, `version`)
.describe(`v`, `Print the currently installed version of Medusa Dev CLI`)
.argv;
.describe(`v`, `Print the currently installed version of Medusa Dev CLI`).argv
if (argv.version) {
console.log(getVersionInfo());
process.exit();
console.log(getVersionInfo())
process.exit()
}
const conf = new Configstore(pkg.name);
const conf = new Configstore(pkg.name)
const fs = require(`fs-extra`);
const fs = require(`fs-extra`)
let pathToRepo = argv.setPathToRepo;
let pathToRepo = argv.setPathToRepo
if (pathToRepo) {
if (pathToRepo.includes(`~`)) {
pathToRepo = path.join(os.homedir(), pathToRepo.split(`~`).pop());
pathToRepo = path.join(os.homedir(), pathToRepo.split(`~`).pop())
}
conf.set(`medusa-location`, path.resolve(pathToRepo));
process.exit();
conf.set(`medusa-location`, path.resolve(pathToRepo))
process.exit()
}
const havePackageJsonFile = fs.existsSync(`package.json`);
const havePackageJsonFile = fs.existsSync(`package.json`)
if (!havePackageJsonFile) {
console.error(`Current folder must have a package.json file!`);
process.exit();
console.error(`Current folder must have a package.json file!`)
process.exit()
}
const medusaLocation = conf.get(`medusa-location`);
const medusaLocation = conf.get(`medusa-location`)
if (!medusaLocation) {
console.error(
`
You haven't set the path yet to your cloned
version of medusa. Do so now by running:
medusa-dev --set-path-to-repo /path/to/my/cloned/version/medusa
`
);
process.exit();
)
process.exit()
}
// get list of packages from monorepo
const monoRepoPackages = [];
const packageNameToPath = new Map()
const monoRepoPackages = fs
.readdirSync(path.join(medusaLocation, `packages`))
.map((dirName) => {
try {
const localPkg = JSON.parse(
fs.readFileSync(
path.join(medusaLocation, `packages`, dirName, `package.json`)
)
)
const pkgsDirs = fs.readdirSync(path.join(medusaLocation, `packages`));
for (const dir of pkgsDirs) {
const pack = JSON.parse(
fs.readFileSync(path.join(medusaLocation, `packages`, dir, `package.json`))
);
monoRepoPackages.push(pack.name);
}
if (localPkg?.name) {
packageNameToPath.set(
localPkg.name,
path.join(medusaLocation, `packages`, dirName)
)
return localPkg.name
}
} catch (error) {
// fallback to generic one
}
const localPkg = JSON.parse(fs.readFileSync(`package.json`));
packageNameToPath.set(
dirName,
path.join(medusaLocation, `packages`, dirName)
)
return dirName
})
const localPkg = JSON.parse(fs.readFileSync(`package.json`))
// intersect dependencies with monoRepoPackages to get list of packages that are used
const localPackages = _.intersection(
monoRepoPackages,
Object.keys(_.merge({}, localPkg.dependencies, localPkg.devDependencies))
);
)
if (!argv.packages && _.isEmpty(localPackages)) {
console.error(
`
You haven't got any medusa dependencies into your current package.json
You probably want to pass in a list of packages to start
developing on! For example:
medusa-dev --packages @medusajs/medusa
medusa-dev --packages medusa medusa-js
If you prefer to place them in your package.json dependencies instead,
medusa-dev will pick them up.
`
);
)
if (!argv.forceInstall) {
process.exit();
process.exit()
} else {
console.log(
`Continuing other dependencies installation due to "--forceInstall" flag`
);
)
}
}
@@ -129,4 +148,6 @@ watch(medusaLocation, argv.packages, {
scanOnce: argv.scanOnce,
forceInstall: argv.forceInstall,
monoRepoPackages,
});
packageNameToPath,
externalRegistry: argv.externalRegistry,
})

View File

@@ -1,23 +1,23 @@
const startVerdaccio = require(`verdaccio`).default;
const startVerdaccio = require(`verdaccio`).default
const fs = require(`fs-extra`);
const _ = require(`lodash`);
const fs = require(`fs-extra`)
const _ = require(`lodash`)
let VerdaccioInitPromise = null;
let VerdaccioInitPromise = null
const { verdaccioConfig } = require(`./verdaccio-config`);
const { publishPackage } = require(`./publish-package`);
const { installPackages } = require(`./install-packages`);
const { verdaccioConfig } = require(`./verdaccio-config`)
const { publishPackage } = require(`./publish-package`)
const { installPackages } = require(`./install-packages`)
const startServer = () => {
if (VerdaccioInitPromise) {
return VerdaccioInitPromise;
return VerdaccioInitPromise
}
console.log(`Starting local verdaccio server`);
console.log(`Starting local verdaccio server`)
// clear storage
fs.removeSync(verdaccioConfig.storage);
fs.removeSync(verdaccioConfig.storage)
VerdaccioInitPromise = new Promise((resolve) => {
startVerdaccio(
@@ -29,47 +29,49 @@ const startServer = () => {
(webServer, addr, pkgName, pkgVersion) => {
// console.log(webServer)
webServer.listen(addr.port || addr.path, addr.host, () => {
console.log(`Started local verdaccio server`);
console.log(`Started local verdaccio server`)
resolve();
});
resolve()
})
}
);
});
)
})
return VerdaccioInitPromise;
};
return VerdaccioInitPromise
}
exports.startVerdaccio = startServer;
exports.startVerdaccio = startServer
exports.publishPackagesLocallyAndInstall = async ({
packagesToPublish,
localPackages,
root,
packageNameToPath,
ignorePackageJSONChanges,
yarnWorkspaceRoot,
externalRegistry,
}) => {
await startServer();
await startServer()
const versionPostFix = Date.now();
const versionPostFix = Date.now()
const newlyPublishedPackageVersions = {};
const newlyPublishedPackageVersions = {}
for (const packageName of packagesToPublish) {
newlyPublishedPackageVersions[packageName] = await publishPackage({
packageName,
packagesToPublish,
root,
packageNameToPath,
versionPostFix,
ignorePackageJSONChanges,
});
})
}
const packagesToInstall = _.intersection(packagesToPublish, localPackages);
const packagesToInstall = _.intersection(packagesToPublish, localPackages)
await installPackages({
packagesToInstall,
yarnWorkspaceRoot,
newlyPublishedPackageVersions,
});
};
externalRegistry,
})
}

View File

@@ -8,10 +8,11 @@ const installPackages = async ({
packagesToInstall,
yarnWorkspaceRoot,
newlyPublishedPackageVersions,
externalRegistry,
}) => {
console.log(
`Installing packages from local registry:\n${packagesToInstall
.map(packageAndVersion => ` - ${packageAndVersion}`)
.map((packageAndVersion) => ` - ${packageAndVersion}`)
.join(`\n`)}`
)
let installCmd
@@ -80,22 +81,22 @@ const installPackages = async ({
process.exit(1)
}
const handleDeps = deps => {
const handleDeps = (deps) => {
if (!deps) {
return false
}
let changed = false
Object.keys(deps).forEach(depName => {
Object.keys(deps).forEach((depName) => {
if (packagesToInstall.includes(depName)) {
deps[depName] = `gatsby-dev`
deps[depName] = `medusa-dev`
changed = true
}
})
return changed
}
Object.keys(workspacesLayout).forEach(workspaceName => {
Object.keys(workspacesLayout).forEach((workspaceName) => {
const { location } = workspacesLayout[workspaceName]
const pkgJsonPath = path.join(yarnWorkspaceRoot, location, `package.json`)
if (!fs.existsSync(pkgJsonPath)) {
@@ -109,7 +110,7 @@ const installPackages = async ({
changed |= handleDeps(pkg.peerDependencies)
if (changed) {
console.log(`Changing deps in ${pkgJsonPath} to use @gatsby-dev`)
console.log(`Changing deps in ${pkgJsonPath} to use @medusa-dev`)
fs.outputJSONSync(pkgJsonPath, pkg, {
spaces: 2,
})
@@ -118,24 +119,28 @@ const installPackages = async ({
// package.json files are changed - so we just want to install
// using verdaccio registry
installCmd = [
`yarn`,
[`install`, `--registry=${registryUrl}`, `--ignore-engines`],
]
const yarnCommands = [`install`]
if (!externalRegistry) {
yarnCommands.push(`--registry=${registryUrl}`)
}
installCmd = [`yarn`, yarnCommands]
} else {
installCmd = [
`yarn`,
[
`add`,
...packagesToInstall.map(packageName => {
const packageVersion = newlyPublishedPackageVersions[packageName]
return `${packageName}@${packageVersion}`
}),
`--registry=${registryUrl}`,
`--exact`,
`--ignore-engines`,
],
const yarnCommands = [
`add`,
...packagesToInstall.map((packageName) => {
const packageVersion = newlyPublishedPackageVersions[packageName]
return `${packageName}@${packageVersion}`
}),
`--exact`,
]
if (!externalRegistry) {
yarnCommands.push(`--registry=${registryUrl}`)
}
installCmd = [`yarn`, yarnCommands]
}
try {

View File

@@ -1,18 +1,18 @@
const fs = require(`fs-extra`);
const path = require(`path`);
const fs = require(`fs-extra`)
const path = require(`path`)
const { promisifiedSpawn } = require(`../utils/promisified-spawn`);
const { registryUrl } = require(`./verdaccio-config`);
const { promisifiedSpawn } = require(`../utils/promisified-spawn`)
const { registryUrl } = require(`./verdaccio-config`)
const NPMRCContent = `${registryUrl.replace(
/https?:/g,
``
)}/:_authToken="medusa-dev"`;
)}/:_authToken="medusa-dev"`
const {
getMonorepoPackageJsonPath,
} = require(`../utils/get-monorepo-package-json-path`);
const { registerCleanupTask } = require(`./cleanup-tasks`);
} = require(`../utils/get-monorepo-package-json-path`)
const { registerCleanupTask } = require(`./cleanup-tasks`)
/**
* Edit package.json to:
@@ -27,7 +27,7 @@ const adjustPackageJson = ({
versionPostFix,
packagesToPublish,
ignorePackageJSONChanges,
root,
packageNameToPath,
}) => {
// we need to check if package depend on any other package to will be published and
// adjust version selector to point to dev version of package so local registry is used
@@ -36,10 +36,10 @@ const adjustPackageJson = ({
const monorepoPKGjsonString = fs.readFileSync(
monoRepoPackageJsonPath,
`utf-8`
);
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString);
)
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString)
monorepoPKGjson.version = `${monorepoPKGjson.version}-dev-${versionPostFix}`;
monorepoPKGjson.version = `${monorepoPKGjson.version}-dev-${versionPostFix}`
packagesToPublish.forEach((packageThatWillBePublished) => {
if (
monorepoPKGjson.dependencies &&
@@ -49,37 +49,37 @@ const adjustPackageJson = ({
fs.readFileSync(
getMonorepoPackageJsonPath({
packageName: packageThatWillBePublished,
root,
packageNameToPath,
}),
`utf-8`
)
).version;
).version
monorepoPKGjson.dependencies[
packageThatWillBePublished
] = `${currentVersion}-dev-${versionPostFix}`;
] = `${currentVersion}-dev-${versionPostFix}`
}
});
})
const temporaryMonorepoPKGjsonString = JSON.stringify(monorepoPKGjson);
const temporaryMonorepoPKGjsonString = JSON.stringify(monorepoPKGjson)
const unignorePackageJSONChanges = ignorePackageJSONChanges(packageName, [
monorepoPKGjsonString,
temporaryMonorepoPKGjsonString,
]);
])
// change version and dependency versions
fs.outputFileSync(monoRepoPackageJsonPath, temporaryMonorepoPKGjsonString);
fs.outputFileSync(monoRepoPackageJsonPath, temporaryMonorepoPKGjsonString)
return {
newPackageVersion: monorepoPKGjson.version,
unadjustPackageJson: registerCleanupTask(() => {
// restore original package.json
fs.outputFileSync(monoRepoPackageJsonPath, monorepoPKGjsonString);
unignorePackageJSONChanges();
fs.outputFileSync(monoRepoPackageJsonPath, monorepoPKGjsonString)
unignorePackageJSONChanges()
}),
};
};
}
}
/**
* Anonymous publishing require dummy .npmrc
@@ -88,38 +88,38 @@ const adjustPackageJson = ({
* This is not verdaccio restriction.
*/
const createTemporaryNPMRC = ({ pathToPackage }) => {
const NPMRCPath = path.join(pathToPackage, `.npmrc`);
fs.outputFileSync(NPMRCPath, NPMRCContent);
const NPMRCPath = path.join(pathToPackage, `.npmrc`)
fs.outputFileSync(NPMRCPath, NPMRCContent)
return registerCleanupTask(() => {
fs.removeSync(NPMRCPath);
});
};
fs.removeSync(NPMRCPath)
})
}
const publishPackage = async ({
packageName,
packagesToPublish,
root,
versionPostFix,
ignorePackageJSONChanges,
packageNameToPath,
}) => {
const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({
packageName,
root,
});
packageNameToPath,
})
const { unadjustPackageJson, newPackageVersion } = adjustPackageJson({
monoRepoPackageJsonPath,
packageName,
root,
packageNameToPath,
versionPostFix,
packagesToPublish,
ignorePackageJSONChanges,
});
})
const pathToPackage = path.dirname(monoRepoPackageJsonPath);
const pathToPackage = path.dirname(monoRepoPackageJsonPath)
const uncreateTemporaryNPMRC = createTemporaryNPMRC({ pathToPackage });
const uncreateTemporaryNPMRC = createTemporaryNPMRC({ pathToPackage })
// npm publish
const publishCmd = [
@@ -128,26 +128,26 @@ const publishPackage = async ({
{
cwd: pathToPackage,
},
];
]
console.log(
`Publishing ${packageName}@${newPackageVersion} to local registry`
);
)
try {
await promisifiedSpawn(publishCmd);
await promisifiedSpawn(publishCmd)
console.log(
`Published ${packageName}@${newPackageVersion} to local registry`
);
)
} catch (e) {
console.error(`Failed to publish ${packageName}@${newPackageVersion}`, e);
process.exit(1);
console.error(`Failed to publish ${packageName}@${newPackageVersion}`, e)
process.exit(1)
}
uncreateTemporaryNPMRC();
unadjustPackageJson();
uncreateTemporaryNPMRC()
unadjustPackageJson()
return newPackageVersion;
};
return newPackageVersion
}
exports.publishPackage = publishPackage;
exports.publishPackage = publishPackage

View File

@@ -1,5 +1,15 @@
const { getDependantPackages } = require(`../get-dependant-packages`)
function createMockPackageNameToPath(packageNames) {
const packageNameToPath = new Map()
for (const packageName of packageNames) {
packageNameToPath.set(packageName, `/test/${packageName}`)
}
return packageNameToPath
}
describe(`getDependantPackages`, () => {
it(`handles deep dependency chains`, () => {
const packagesToPublish = getDependantPackages({
@@ -9,6 +19,13 @@ describe(`getDependantPackages`, () => {
"package-a-dep1-dep1": new Set([`package-a-dep1`]),
"not-related": new Set([`also-not-related`]),
},
packageNameToPath: createMockPackageNameToPath([
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
`not-related`,
`also-not-related`,
]),
})
expect(packagesToPublish).toEqual(
@@ -23,6 +40,10 @@ describe(`getDependantPackages`, () => {
"package-a": new Set([`package-b`]),
"package-b": new Set([`package-a`]),
},
packageNameToPath: createMockPackageNameToPath([
`package-a`,
`package-b`,
]),
})
expect(packagesToPublish).toEqual(new Set([`package-a`, `package-b`]))
})

View File

@@ -43,15 +43,25 @@ jest.doMock(
describe(`traversePackageDeps`, () => {
it(`handles deep dependency chains`, () => {
const monoRepoPackages = [
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
`package-not-used`,
]
const packageNameToPath = new Map()
for (const packageName of monoRepoPackages) {
packageNameToPath.set(
packageName,
path.join(...`<monorepo-path>/packages/${packageName}`.split(`/`))
)
}
const { seenPackages, depTree } = traversePackagesDeps({
root: `<monorepo-path>`,
packages: [`package-a`, `doesnt-exist`],
monoRepoPackages: [
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
`package-not-used`,
],
monoRepoPackages,
packageNameToPath,
})
expect(seenPackages).toEqual([

View File

@@ -1,9 +1,9 @@
const fs = require(`fs-extra`);
const _ = require(`lodash`);
const fs = require(`fs-extra`)
const _ = require(`lodash`)
const {
getMonorepoPackageJsonPath,
} = require(`./get-monorepo-package-json-path`);
const got = require(`got`);
} = require(`./get-monorepo-package-json-path`)
const got = require(`got`)
function difference(object, base) {
function changes(object, base) {
@@ -12,11 +12,11 @@ function difference(object, base) {
result[key] =
_.isObject(value) && _.isObject(base[key])
? changes(value, base[key])
: value;
: value
}
});
})
}
return changes(object, base);
return changes(object, base)
}
/**
@@ -24,33 +24,33 @@ function difference(object, base) {
* It will skip dependencies that are removed in monorepo package.
*
* If local package is not installed, it will check unpkg.com.
* This allow medusa-dev to skip publishing unnecesairly and
* This allow gatsby-dev to skip publishing unnecesairly and
* let install packages from public npm repository if nothing changed.
*/
exports.checkDepsChanges = async ({
newPath,
packageName,
monoRepoPackages,
root,
isInitialScan,
ignoredPackageJSON,
packageNameToPath,
}) => {
let localPKGjson;
let packageNotInstalled = false;
let localPKGjson
let packageNotInstalled = false
try {
localPKGjson = JSON.parse(fs.readFileSync(newPath, `utf-8`));
localPKGjson = JSON.parse(fs.readFileSync(newPath, `utf-8`))
} catch {
packageNotInstalled = true;
packageNotInstalled = true
// there is no local package - so we still need to install deps
// this is nice because devs won't need to do initial package installation - we can handle this.
if (!isInitialScan) {
console.log(
`'${packageName}' doesn't seem to be installed. Restart medusa-dev to publish it`
);
`'${packageName}' doesn't seem to be installed. Restart gatsby-dev to publish it`
)
return {
didDepsChanged: false,
packageNotInstalled,
};
}
}
// if package is not installed, we will do http GET request to
@@ -60,37 +60,33 @@ exports.checkDepsChanges = async ({
// this allow us to not publish to local repository
// and save some time/work
try {
const response = await got(
`https://unpkg.com/${packageName}/package.json`
);
const version = getPackageVersion(packageName)
const url = `https://unpkg.com/${packageName}@${version}/package.json`
const response = await got(url)
if (response?.statusCode !== 200) {
throw new Error(`No response or non 200 code`);
throw new Error(`No response or non 200 code for ${url}`)
}
localPKGjson = JSON.parse(response.body);
} catch {
localPKGjson = JSON.parse(response.body)
} catch (e) {
console.log(
`'${packageName}' doesn't seem to be installed and is not published on NPM.`
);
`'${packageName}' doesn't seem to be installed and is not published on NPM. Error: ${e.message}`
)
return {
didDepsChanged: true,
packageNotInstalled,
};
}
}
}
const monoDir = packageName.startsWith("@medusajs")
? packageName.split("/")[1]
: packageName;
const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({
packageName: monoDir,
root,
});
packageName,
packageNameToPath,
})
const monorepoPKGjsonString = fs.readFileSync(
monoRepoPackageJsonPath,
`utf-8`
);
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString);
)
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString)
if (ignoredPackageJSON.has(packageName)) {
if (ignoredPackageJSON.get(packageName).includes(monorepoPKGjsonString)) {
// we are in middle of publishing and content of package.json is one set during publish process,
@@ -98,44 +94,44 @@ exports.checkDepsChanges = async ({
return {
didDepsChanged: false,
packageNotInstalled,
};
}
}
}
if (!monorepoPKGjson.dependencies) monorepoPKGjson.dependencies = {};
if (!localPKGjson.dependencies) localPKGjson.dependencies = {};
if (!monorepoPKGjson.dependencies) monorepoPKGjson.dependencies = {}
if (!localPKGjson.dependencies) localPKGjson.dependencies = {}
const areDepsEqual = _.isEqual(
monorepoPKGjson.dependencies,
localPKGjson.dependencies
);
)
if (!areDepsEqual) {
const diff = difference(
monorepoPKGjson.dependencies,
localPKGjson.dependencies
);
)
const diff2 = difference(
localPKGjson.dependencies,
monorepoPKGjson.dependencies
);
)
let needPublishing = false;
let isPublishing = false;
let needPublishing = false
let isPublishing = false
const depChangeLog = _.uniq(Object.keys({ ...diff, ...diff2 }))
.reduce((acc, key) => {
if (monorepoPKGjson.dependencies[key] === `medusa-dev`) {
if (monorepoPKGjson.dependencies[key] === `gatsby-dev`) {
// if we are in middle of publishing to local repository - ignore
isPublishing = true;
return acc;
isPublishing = true
return acc
}
if (localPKGjson.dependencies[key] === `medusa-dev`) {
if (localPKGjson.dependencies[key] === `gatsby-dev`) {
// monorepo packages will restore version, but after installation
// in local site - it will use `medusa-dev` dist tag - we need
// in local site - it will use `gatsby-dev` dist tag - we need
// to ignore changes that
return acc;
return acc
}
if (
@@ -143,48 +139,55 @@ exports.checkDepsChanges = async ({
monorepoPKGjson.dependencies[key]
) {
// Check only for version changes in packages
// that are not from medusa repo.
// Changes in medusa packages will be copied over
// that are not from gatsby repo.
// Changes in gatsby packages will be copied over
// from monorepo - and if those contain other dependency
// changes - they will be covered
if (!monoRepoPackages.includes(key)) {
acc.push(
` - '${key}' changed version from ${localPKGjson.dependencies[key]} to ${monorepoPKGjson.dependencies[key]}`
);
needPublishing = true;
)
needPublishing = true
}
} else if (monorepoPKGjson.dependencies[key]) {
acc.push(
` - '${key}@${monorepoPKGjson.dependencies[key]}' was added`
);
needPublishing = true;
acc.push(` - '${key}@${monorepoPKGjson.dependencies[key]}' was added`)
needPublishing = true
} else {
acc.push(` - '${key}@${localPKGjson.dependencies[key]}' was removed`);
acc.push(` - '${key}@${localPKGjson.dependencies[key]}' was removed`)
// this doesn't need publishing really, so will skip this
}
return acc;
return acc
}, [])
.join(`\n`);
.join(`\n`)
if (!isPublishing && depChangeLog.length > 0) {
console.log(`Dependencies of '${packageName}' changed:\n${depChangeLog}`);
console.log(`Dependencies of '${packageName}' changed:\n${depChangeLog}`)
if (isInitialScan) {
console.log(
`Will ${!needPublishing ? `not ` : ``}publish to local npm registry.`
);
)
} else {
console.warn(
`Installation of dependencies after initial scan is not implemented`
);
)
}
return {
didDepsChanged: needPublishing,
packageNotInstalled,
};
}
}
}
return {
didDepsChanged: false,
packageNotInstalled,
};
};
}
}
function getPackageVersion(packageName) {
const projectPackageJson = JSON.parse(
fs.readFileSync(`./package.json`, `utf-8`)
)
const { dependencies = {}, devDependencies = {} } = projectPackageJson
const version = dependencies[packageName] || devDependencies[packageName]
return version || `latest`
}

View File

@@ -1,11 +1,4 @@
const path = require(`path`);
const path = require(`path`)
exports.getMonorepoPackageJsonPath = ({ packageName, root }) => {
let dirName = packageName;
if (packageName.startsWith("@medusajs")) {
const [, directory] = packageName.split("/");
dirName = directory;
}
return path.join(root, `packages`, dirName, `package.json`);
};
exports.getMonorepoPackageJsonPath = ({ packageName, packageNameToPath }) =>
path.join(packageNameToPath.get(packageName), `package.json`)

View File

@@ -1,10 +1,17 @@
const _ = require(`lodash`);
const path = require(`path`);
const _ = require(`lodash`)
const path = require(`path`)
/**
* @typedef {Object} TraversePackagesDepsReturn
* @property {Object} depTree Lookup table to check dependants for given package.
* Used to determine which packages need to be published.
* @example
* ```
* {
* "medusa-cli": Set(["medusa"]),
* "medusa-telemetry": Set(["medusa", "medusa-cli"]),
* }
* ```
*/
/**
@@ -24,53 +31,58 @@ const path = require(`path`);
* @return {TraversePackagesDepsReturn}
*/
const traversePackagesDeps = ({
root,
packages,
monoRepoPackages,
seenPackages = [...packages],
depTree = {},
packageNameToPath,
}) => {
packages.forEach((p) => {
if (p.startsWith("@medusajs")) {
p = p.split("/")[1];
}
let pkgJson;
let pkgJson
try {
pkgJson = require(path.join(root, `packages`, p, `package.json`));
} catch {
console.error(`"${p}" package doesn't exist in monorepo.`);
const packageRoot = packageNameToPath.get(p)
if (packageRoot) {
pkgJson = require(path.join(packageRoot, `package.json`))
} else {
console.error(`"${p}" package doesn't exist in monorepo.`)
// remove from seenPackages
seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p)
return
}
} catch (e) {
console.error(`"${p}" package doesn't exist in monorepo.`, e)
// remove from seenPackages
seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p);
return;
seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p)
return
}
const fromMonoRepo = _.intersection(
Object.keys({ ...pkgJson.dependencies }),
monoRepoPackages
);
)
fromMonoRepo.forEach((pkgName) => {
depTree[pkgName] = (depTree[pkgName] || new Set()).add(p);
});
depTree[pkgName] = (depTree[pkgName] || new Set()).add(p)
})
// only traverse not yet seen packages to avoid infinite loops
const newPackages = _.difference(fromMonoRepo, seenPackages);
const newPackages = _.difference(fromMonoRepo, seenPackages)
if (newPackages.length) {
newPackages.forEach((depFromMonorepo) => {
seenPackages.push(depFromMonorepo);
});
seenPackages.push(depFromMonorepo)
})
traversePackagesDeps({
root,
packages: fromMonoRepo,
monoRepoPackages,
seenPackages,
depTree,
});
packageNameToPath,
})
}
});
return { seenPackages, depTree };
};
})
return { seenPackages, depTree }
}
exports.traversePackagesDeps = traversePackagesDeps;
exports.traversePackagesDeps = traversePackagesDeps

View File

@@ -1,27 +1,27 @@
const chokidar = require(`chokidar`);
const _ = require(`lodash`);
const del = require(`del`);
const fs = require(`fs-extra`);
const path = require(`path`);
const findWorkspaceRoot = require(`find-yarn-workspace-root`);
const chokidar = require(`chokidar`)
const _ = require(`lodash`)
const del = require(`del`)
const fs = require(`fs-extra`)
const path = require(`path`)
const findWorkspaceRoot = require(`find-yarn-workspace-root`)
const { publishPackagesLocallyAndInstall } = require(`./local-npm-registry`);
const { checkDepsChanges } = require(`./utils/check-deps-changes`);
const { getDependantPackages } = require(`./utils/get-dependant-packages`);
const { publishPackagesLocallyAndInstall } = require(`./local-npm-registry`)
const { checkDepsChanges } = require(`./utils/check-deps-changes`)
const { getDependantPackages } = require(`./utils/get-dependant-packages`)
const {
setDefaultSpawnStdio,
promisifiedSpawn,
} = require(`./utils/promisified-spawn`);
const { traversePackagesDeps } = require(`./utils/traverse-package-deps`);
} = require(`./utils/promisified-spawn`)
const { traversePackagesDeps } = require(`./utils/traverse-package-deps`)
let numCopied = 0;
let numCopied = 0
const quit = () => {
console.log(`Copied ${numCopied} files`);
process.exit();
};
console.log(`Copied ${numCopied} files`)
process.exit()
}
const MAX_COPY_RETRIES = 3;
const MAX_COPY_RETRIES = 3
/*
* non-existent packages break on('ready')
@@ -30,76 +30,89 @@ const MAX_COPY_RETRIES = 3;
async function watch(
root,
packages,
{ scanOnce, quiet, forceInstall, monoRepoPackages, localPackages }
{
scanOnce,
quiet,
forceInstall,
monoRepoPackages,
localPackages,
packageNameToPath,
externalRegistry,
}
) {
setDefaultSpawnStdio(quiet ? `ignore` : `inherit`);
setDefaultSpawnStdio(quiet ? `ignore` : `inherit`)
// determine if in yarn workspace - if in workspace, force using verdaccio
// as current logic of copying files will not work correctly.
const yarnWorkspaceRoot = findWorkspaceRoot();
const yarnWorkspaceRoot = findWorkspaceRoot()
if (yarnWorkspaceRoot && process.env.NODE_ENV !== `test`) {
console.log(`Yarn workspace found.`);
forceInstall = true;
console.log(`Yarn workspace found.`)
forceInstall = true
}
let afterPackageInstallation = false;
let queuedCopies = [];
let afterPackageInstallation = false
let queuedCopies = []
const realCopyPath = (arg) => {
const { oldPath, newPath, quiet, resolve, reject, retry = 0 } = arg;
const { oldPath, newPath, quiet, resolve, reject, retry = 0 } = arg
fs.copy(oldPath, newPath, (err) => {
if (err) {
if (retry >= MAX_COPY_RETRIES) {
console.error(err);
reject(err);
return;
console.error(err)
reject(err)
return
} else {
setTimeout(
() => realCopyPath({ ...arg, retry: retry + 1 }),
500 * Math.pow(2, retry)
);
return;
)
return
}
}
// When the medusa binary is copied over, it is not setup with the executable
// permissions that it is given when installed via yarn.
// This fixes the issue where after running meduas-dev, running `yarn medusa develop`
// This fixes the issue where after running medusa-dev, running `yarn medusa develop`
// fails with a permission issue.
if (/(bin\/meduas.js|medusa(-cli)?\/cli.js)$/.test(newPath)) {
fs.chmodSync(newPath, `0755`);
// @fixes https://github.com/medusajs/medusa/issues/18809
// Binary files we target:
// - medusa/bin/medusa.js
// -medusa/cli.js
// -medusa-cli/cli.js
if (/(bin\/medusa.js|medusa(-cli)?\/cli.js)$/.test(newPath)) {
fs.chmodSync(newPath, `0755`)
}
numCopied += 1;
numCopied += 1
if (!quiet) {
console.log(`Copied ${oldPath} to ${newPath}`);
console.log(`Copied ${oldPath} to ${newPath}`)
}
resolve();
});
};
resolve()
})
}
const copyPath = (oldPath, newPath, quiet, packageName) =>
new Promise((resolve, reject) => {
const argObj = { oldPath, newPath, quiet, packageName, resolve, reject };
const argObj = { oldPath, newPath, quiet, packageName, resolve, reject }
if (afterPackageInstallation) {
realCopyPath(argObj);
realCopyPath(argObj)
} else {
queuedCopies.push(argObj);
queuedCopies.push(argObj)
}
});
})
const runQueuedCopies = () => {
afterPackageInstallation = true;
queuedCopies.forEach((argObj) => realCopyPath(argObj));
queuedCopies = [];
};
afterPackageInstallation = true
queuedCopies.forEach((argObj) => realCopyPath(argObj))
queuedCopies = []
}
const clearJSFilesFromNodeModules = async () => {
const packagesToClear = queuedCopies.reduce((acc, { packageName }) => {
if (packageName) {
acc.add(packageName);
acc.add(packageName)
}
return acc;
}, new Set());
return acc
}, new Set())
await Promise.all(
[...packagesToClear].map(
@@ -110,65 +123,64 @@ async function watch(
`!node_modules/${packageToClear}/src/**/*.{js,js.map}`,
])
)
);
};
)
}
// check packages deps and if they depend on other packages from monorepo
// add them to packages list
const { seenPackages, depTree } = traversePackagesDeps({
root,
packages: _.uniq(localPackages),
monoRepoPackages,
});
packageNameToPath,
})
const allPackagesToWatch = packages
? _.intersection(packages, seenPackages)
: seenPackages;
: seenPackages
const ignoredPackageJSON = new Map();
const ignoredPackageJSON = new Map()
const ignorePackageJSONChanges = (packageName, contentArray) => {
ignoredPackageJSON.set(packageName, contentArray);
ignoredPackageJSON.set(packageName, contentArray)
return () => {
ignoredPackageJSON.delete(packageName);
};
};
ignoredPackageJSON.delete(packageName)
}
}
if (forceInstall) {
try {
if (allPackagesToWatch.length > 0) {
await publishPackagesLocallyAndInstall({
packagesToPublish: allPackagesToWatch,
root,
packageNameToPath,
localPackages,
ignorePackageJSONChanges,
yarnWorkspaceRoot,
});
externalRegistry,
})
} else {
// run `yarn`
const yarnInstallCmd = [`yarn`];
const yarnInstallCmd = [`yarn`]
console.log(`Installing packages from public NPM registry`);
await promisifiedSpawn(yarnInstallCmd);
console.log(`Installation complete`);
console.log(`Installing packages from public NPM registry`)
await promisifiedSpawn(yarnInstallCmd)
console.log(`Installation complete`)
}
} catch (e) {
console.log(e);
console.log(e)
}
process.exit();
process.exit()
}
if (allPackagesToWatch.length === 0) {
console.error(`There are no packages to watch.`);
return;
console.error(`There are no packages to watch.`)
return
}
const cleanToWatch = allPackagesToWatch.map((pkgName) => {
if (pkgName.startsWith(`@medusajs`)) {
return pkgName.split("/")[1];
}
return pkgName;
});
const allPackagesIgnoringThemesToWatch = allPackagesToWatch.filter(
(pkgName) => !pkgName.startsWith(`medusa-theme`)
)
const ignored = [
/[/\\]node_modules[/\\]/i,
@@ -178,63 +190,70 @@ async function watch(
/[/\\]__mocks__[/\\]/i,
/\.npmrc/i,
].concat(
cleanToWatch.map((p) => new RegExp(`${p}[\\/\\\\]src[\\/\\\\]`, `i`))
);
allPackagesIgnoringThemesToWatch.map(
(p) => new RegExp(`${p}[\\/\\\\]src[\\/\\\\]`, `i`)
)
)
const watchers = _.uniq(
cleanToWatch
.map((p) => path.join(root, `/packages/`, p))
allPackagesToWatch
.map((p) => path.join(packageNameToPath.get(p)))
.filter((p) => fs.existsSync(p))
);
)
let allCopies = [];
const packagesToPublish = new Set();
let isInitialScan = true;
let isPublishing = false;
let allCopies = []
const packagesToPublish = new Set()
let isInitialScan = true
let isPublishing = false
const waitFor = new Set();
let anyPackageNotInstalled = false;
const watchEvents = [`change`, `add`];
const waitFor = new Set()
let anyPackageNotInstalled = false
const watchEvents = [`change`, `add`]
const packagePathMatchingEntries = Array.from(packageNameToPath.entries())
chokidar
.watch(watchers, {
ignored: [(filePath) => _.some(ignored, (reg) => reg.test(filePath))],
})
.on(`all`, async (event, filePath) => {
if (!watchEvents.includes(event)) {
return;
return
}
const [pack] = filePath
.split(/packages[/\\]/)
.pop()
.split(/[/\\]/);
// match against paths
let packageName
const sourcePkg = JSON.parse(
fs.readFileSync(path.join(root, `/packages/`, pack, `package.json`))
);
const packageName = sourcePkg.name;
for (const [_packageName, packagePath] of packagePathMatchingEntries) {
const relativeToThisPackage = path.relative(packagePath, filePath)
if (!relativeToThisPackage.startsWith(`..`)) {
packageName = _packageName
break
}
}
const prefix = path.join(root, `/packages/`, pack);
if (!packageName) {
return
}
const prefix = packageNameToPath.get(packageName)
// Copy it over local version.
// Don't copy over the medusa bin file as that breaks the NPM symlink.
if (_.includes(filePath, `dist/medusa-cli.js`)) {
return;
return
}
const relativePackageFile = path.relative(prefix, filePath);
const relativePackageFile = path.relative(prefix, filePath)
const newPath = path.join(
`./node_modules/${packageName}`,
relativePackageFile
);
)
if (relativePackageFile === `package.json`) {
// package.json files will change during publish to adjust version of package (and dependencies), so ignore
// changes during this process
if (isPublishing) {
return;
return
}
// Compare dependencies with local version
@@ -243,10 +262,10 @@ async function watch(
newPath,
packageName,
monoRepoPackages,
root,
packageNameToPath,
isInitialScan,
ignoredPackageJSON,
});
})
if (isInitialScan) {
// normally checkDepsChanges would be sync,
@@ -255,21 +274,19 @@ async function watch(
// keep track of it to make sure all of it
// finish before installing
waitFor.add(didDepsChangedPromise);
waitFor.add(didDepsChangedPromise)
}
const {
didDepsChanged,
packageNotInstalled,
} = await didDepsChangedPromise;
const { didDepsChanged, packageNotInstalled } =
await didDepsChangedPromise
if (packageNotInstalled) {
anyPackageNotInstalled = true;
anyPackageNotInstalled = true
}
if (didDepsChanged) {
if (isInitialScan) {
waitFor.delete(didDepsChangedPromise);
waitFor.delete(didDepsChangedPromise)
// handle dependency change only in initial scan - this is for sure doable to
// handle this in watching mode correctly - but for the sake of shipping
// this I limit more work/time consuming edge cases.
@@ -287,66 +304,67 @@ async function watch(
}).forEach((packageToPublish) => {
// scheduling publish - we will publish when `ready` is emitted
// as we can do single publish then
packagesToPublish.add(packageToPublish);
});
packagesToPublish.add(packageToPublish)
})
}
}
// don't ever copy package.json as this will mess up any future dependency
// changes checks
return;
return
}
const localCopies = [copyPath(filePath, newPath, quiet, packageName)];
const localCopies = [copyPath(filePath, newPath, quiet, packageName)]
// If this is from "cache-dir" also copy it into the site's .cache
if (_.includes(filePath, `cache-dir`)) {
const newCachePath = path.join(
`.cache/`,
path.relative(path.join(prefix, `cache-dir`), filePath)
);
localCopies.push(copyPath(filePath, newCachePath, quiet));
)
localCopies.push(copyPath(filePath, newCachePath, quiet))
}
allCopies = allCopies.concat(localCopies);
allCopies = allCopies.concat(localCopies)
})
.on(`ready`, async () => {
// wait for all async work needed to be done
// before publishing / installing
await Promise.all(Array.from(waitFor));
await Promise.all(Array.from(waitFor))
if (isInitialScan) {
isInitialScan = false;
isInitialScan = false
if (packagesToPublish.size > 0) {
isPublishing = true;
isPublishing = true
await publishPackagesLocallyAndInstall({
packagesToPublish: Array.from(packagesToPublish),
root,
packageNameToPath,
localPackages,
ignorePackageJSONChanges,
});
packagesToPublish.clear();
isPublishing = false;
externalRegistry,
})
packagesToPublish.clear()
isPublishing = false
} else if (anyPackageNotInstalled) {
// run `yarn`
const yarnInstallCmd = [`yarn`];
const yarnInstallCmd = [`yarn`]
console.log(`Installing packages from public NPM registry`);
await promisifiedSpawn(yarnInstallCmd);
console.log(`Installation complete`);
console.log(`Installing packages from public NPM registry`)
await promisifiedSpawn(yarnInstallCmd)
console.log(`Installation complete`)
}
await clearJSFilesFromNodeModules();
runQueuedCopies();
await clearJSFilesFromNodeModules()
runQueuedCopies()
}
// all files watched, quit once all files are copied if necessary
Promise.all(allCopies).then(() => {
if (scanOnce) {
quit();
quit()
}
});
});
})
})
}
module.exports = watch;
module.exports = watch