Merge branch 'develop' of github.com:medusajs/medusa into develop

This commit is contained in:
Sebastian Rindom
2021-03-16 13:57:55 +01:00
51 changed files with 7273 additions and 42 deletions

View File

@@ -7,8 +7,7 @@ executors:
parameters:
image:
type: string
# First 10.x LTS release, but old Yarn
default: "10.14"
default: "12.13"
docker:
- image: circleci/node:<< parameters.image >>

View File

@@ -1,5 +1,6 @@
const { dropDatabase } = require("pg-god");
const path = require("path");
const { ReturnReason } = require("@medusajs/medusa");
const setupServer = require("../../../helpers/setup-server");
const { useApi } = require("../../../helpers/use-api");
@@ -468,6 +469,91 @@ describe("/admin/orders", () => {
});
});
describe("POST /admin/orders/:id/return", () => {
let rrId;
beforeEach(async () => {
try {
await adminSeeder(dbConnection);
await orderSeeder(dbConnection);
const created = dbConnection.manager.create(ReturnReason, {
value: "too_big",
label: "Too Big",
});
const result = await dbConnection.manager.save(created);
rrId = result.id;
} catch (err) {
console.log(err);
throw err;
}
});
afterEach(async () => {
const manager = dbConnection.manager;
await manager.query(`DELETE FROM "cart"`);
await manager.query(`DELETE FROM "fulfillment_item"`);
await manager.query(`DELETE FROM "fulfillment"`);
await manager.query(`DELETE FROM "swap"`);
await manager.query(`DELETE FROM "return_item"`);
await manager.query(`DELETE FROM "return_reason"`);
await manager.query(`DELETE FROM "return"`);
await manager.query(`DELETE FROM "claim_image"`);
await manager.query(`DELETE FROM "claim_tag"`);
await manager.query(`DELETE FROM "claim_item"`);
await manager.query(`DELETE FROM "shipping_method"`);
await manager.query(`DELETE FROM "line_item"`);
await manager.query(`DELETE FROM "claim_order"`);
await manager.query(`DELETE FROM "money_amount"`);
await manager.query(`DELETE FROM "product_variant"`);
await manager.query(`DELETE FROM "product"`);
await manager.query(`DELETE FROM "shipping_option"`);
await manager.query(`DELETE FROM "discount"`);
await manager.query(`DELETE FROM "payment"`);
await manager.query(`DELETE FROM "order"`);
await manager.query(`DELETE FROM "customer"`);
await manager.query(
`UPDATE "country" SET region_id=NULL WHERE iso_2 = 'us'`
);
await manager.query(`DELETE FROM "region"`);
await manager.query(`DELETE FROM "user"`);
});
it("creates a return", async () => {
const api = useApi();
const response = await api.post(
"/admin/orders/test-order/return",
{
items: [
{
item_id: "test-item",
quantity: 1,
reason_id: rrId,
note: "TOO SMALL",
},
],
},
{
headers: {
authorization: "Bearer test_token",
},
}
);
expect(response.status).toEqual(200);
expect(response.data.order.returns[0].refund_amount).toEqual(7200);
expect(response.data.order.returns[0].items).toEqual([
expect.objectContaining({
item_id: "test-item",
quantity: 1,
reason_id: rrId,
note: "TOO SMALL",
}),
]);
});
});
describe("GET /admin/orders", () => {
beforeEach(async () => {
try {

View File

@@ -0,0 +1,170 @@
const { dropDatabase } = require("pg-god");
const path = require("path");
const setupServer = require("../../../helpers/setup-server");
const { useApi } = require("../../../helpers/use-api");
const { initDb } = require("../../../helpers/use-db");
const adminSeeder = require("../../helpers/admin-seeder");
jest.setTimeout(30000);
describe("/admin/return-reasons", () => {
let medusaProcess;
let dbConnection;
beforeAll(async () => {
const cwd = path.resolve(path.join(__dirname, "..", ".."));
dbConnection = await initDb({ cwd });
medusaProcess = await setupServer({ cwd });
});
afterAll(async () => {
await dbConnection.close();
await dropDatabase({ databaseName: "medusa-integration" });
medusaProcess.kill();
});
describe("POST /admin/return-reasons", () => {
beforeEach(async () => {
try {
await adminSeeder(dbConnection);
} catch (err) {
console.log(err);
throw err;
}
});
afterEach(async () => {
const manager = dbConnection.manager;
await manager.query(`DELETE FROM "return_reason"`);
await manager.query(`DELETE FROM "user"`);
});
it("creates a return_reason", async () => {
const api = useApi();
const payload = {
label: "Too Big",
description: "Use this if the size was too big",
value: "too_big",
};
const response = await api
.post("/admin/return-reasons", payload, {
headers: {
Authorization: "Bearer test_token",
},
})
.catch((err) => {
console.log(err);
});
expect(response.status).toEqual(200);
expect(response.data.return_reason).toEqual(
expect.objectContaining({
label: "Too Big",
description: "Use this if the size was too big",
value: "too_big",
})
);
});
it("update a return reason", async () => {
const api = useApi();
const payload = {
label: "Too Big Typo",
description: "Use this if the size was too big",
value: "too_big",
};
const response = await api
.post("/admin/return-reasons", payload, {
headers: {
Authorization: "Bearer test_token",
},
})
.catch((err) => {
console.log(err);
});
expect(response.status).toEqual(200);
expect(response.data.return_reason).toEqual(
expect.objectContaining({
label: "Too Big Typo",
description: "Use this if the size was too big",
value: "too_big",
})
);
const newResponse = await api
.post(
`/admin/return-reasons/${response.data.return_reason.id}`,
{
label: "Too Big",
description: "new desc",
},
{
headers: {
Authorization: "Bearer test_token",
},
}
)
.catch((err) => {
console.log(err);
});
expect(newResponse.data.return_reason).toEqual(
expect.objectContaining({
label: "Too Big",
description: "new desc",
value: "too_big",
})
);
});
it("list return reasons", async () => {
const api = useApi();
const payload = {
label: "Too Big Typo",
description: "Use this if the size was too big",
value: "too_big",
};
await api
.post("/admin/return-reasons", payload, {
headers: {
Authorization: "Bearer test_token",
},
})
.catch((err) => {
console.log(err);
});
const response = await api
.get("/admin/return-reasons", {
headers: {
Authorization: "Bearer test_token",
},
})
.catch((err) => {
console.log(err);
});
expect(response.status).toEqual(200);
console.log(response.data);
expect(response.data.return_reasons).toEqual([
expect.objectContaining({
value: "too_big",
}),
]);
});
});
});

View File

@@ -0,0 +1,69 @@
const { dropDatabase } = require("pg-god");
const path = require("path");
const { ReturnReason } = require("@medusajs/medusa");
const setupServer = require("../../../helpers/setup-server");
const { useApi } = require("../../../helpers/use-api");
const { initDb } = require("../../../helpers/use-db");
jest.setTimeout(30000);
describe("/store/return-reasons", () => {
let medusaProcess;
let dbConnection;
beforeAll(async () => {
const cwd = path.resolve(path.join(__dirname, "..", ".."));
dbConnection = await initDb({ cwd });
medusaProcess = await setupServer({ cwd });
});
afterAll(async () => {
await dbConnection.close();
await dropDatabase({ databaseName: "medusa-integration" });
medusaProcess.kill();
});
describe("GET /store/return-reasons", () => {
let rrId;
beforeEach(async () => {
try {
const created = dbConnection.manager.create(ReturnReason, {
value: "too_big",
label: "Too Big",
});
const result = await dbConnection.manager.save(created);
rrId = result.id;
} catch (err) {
console.log(err);
throw err;
}
});
afterEach(async () => {
const manager = dbConnection.manager;
await manager.query(`DELETE FROM "return_reason"`);
});
it("list return reasons", async () => {
const api = useApi();
const response = await api.get("/store/return-reasons").catch((err) => {
console.log(err);
});
expect(response.status).toEqual(200);
expect(response.data.return_reasons).toEqual([
expect.objectContaining({
id: rrId,
value: "too_big",
}),
]);
});
});
});

View File

@@ -2,6 +2,7 @@ const { dropDatabase } = require("pg-god");
const path = require("path");
const {
Region,
ReturnReason,
Order,
Customer,
ShippingProfile,
@@ -35,6 +36,8 @@ describe("/store/carts", () => {
});
describe("POST /store/returns", () => {
let rrId;
beforeEach(async () => {
const manager = dbConnection.manager;
await manager.query(
@@ -110,6 +113,14 @@ describe("/store/carts", () => {
amount: 1000,
is_return: true,
});
const created = dbConnection.manager.create(ReturnReason, {
value: "too_big",
label: "Too Big",
});
const result = await dbConnection.manager.save(created);
rrId = result.id;
});
afterEach(async () => {
@@ -117,6 +128,7 @@ describe("/store/carts", () => {
await manager.query(`DELETE FROM "shipping_method"`);
await manager.query(`DELETE FROM "shipping_option"`);
await manager.query(`DELETE FROM "return_item"`);
await manager.query(`DELETE FROM "return_reason"`);
await manager.query(`DELETE FROM "return"`);
await manager.query(`DELETE FROM "line_item"`);
await manager.query(`DELETE FROM "order"`);
@@ -174,5 +186,33 @@ describe("/store/carts", () => {
expect(response.data.return.refund_amount).toEqual(7000);
});
it("creates a return with reasons", async () => {
const api = useApi();
const response = await api
.post("/store/returns", {
order_id: "order_test",
items: [
{
reason_id: rrId,
note: "TOO small",
item_id: "test-item",
quantity: 1,
},
],
})
.catch((err) => {
return err.response;
});
expect(response.status).toEqual(200);
expect(response.data.return.items).toEqual([
expect.objectContaining({
reason_id: rrId,
note: "TOO small",
}),
]);
});
});
});

View File

@@ -8,15 +8,15 @@
"build": "babel src -d dist --extensions \".ts,.js\""
},
"dependencies": {
"@medusajs/medusa": "1.1.11-dev-1615546159319",
"medusa-interfaces": "1.1.1-dev-1615546159319",
"@medusajs/medusa": "1.1.11-dev-1615882960610",
"medusa-interfaces": "1.1.1-dev-1615882960610",
"typeorm": "^0.2.31"
},
"devDependencies": {
"@babel/cli": "^7.12.10",
"@babel/core": "^7.12.10",
"@babel/node": "^7.12.10",
"babel-preset-medusa-package": "1.1.0-dev-1615546159319",
"babel-preset-medusa-package": "1.1.0-dev-1615882960610",
"jest": "^26.6.3"
}
}

View File

@@ -1369,10 +1369,10 @@
"@types/yargs" "^15.0.0"
chalk "^4.0.0"
"@medusajs/medusa@1.1.11-dev-1615546159319":
version "1.1.11-dev-1615546159319"
resolved "http://localhost:4873/@medusajs%2fmedusa/-/medusa-1.1.11-dev-1615546159319.tgz#88d494cd9f765d69cfa8450aad1c459e3f358c2c"
integrity sha512-0REuafQjwykR6zbjjuIq151S3nLWdHBIFcy7fzKcR7Yhs2XkHMDFToUD01/uY2d/GqmKGvn/HsDLPx2a7Sj4SQ==
"@medusajs/medusa@1.1.11-dev-1615882960610":
version "1.1.11-dev-1615882960610"
resolved "http://localhost:4873/@medusajs%2fmedusa/-/medusa-1.1.11-dev-1615882960610.tgz#805c66ace0750f63ce8d49d89af2b08588455a1b"
integrity sha512-dvkCGMt/lM51ZW6HJoAV70uy6wuuhGS6PAiRvmuofRf331yvHO95KBGMxKbGLtsyL1SAt9Bf1V8Wm4zPYL/BNQ==
dependencies:
"@babel/plugin-transform-classes" "^7.9.5"
"@hapi/joi" "^16.1.8"
@@ -1394,8 +1394,8 @@
joi "^17.3.0"
joi-objectid "^3.0.1"
jsonwebtoken "^8.5.1"
medusa-core-utils "1.1.0-dev-1615546159319"
medusa-test-utils "1.1.3-dev-1615546159319"
medusa-core-utils "1.1.0-dev-1615882960610"
medusa-test-utils "1.1.3-dev-1615882960610"
morgan "^1.9.1"
multer "^1.4.2"
passport "^0.4.0"
@@ -1884,10 +1884,10 @@ babel-preset-jest@^26.6.2:
babel-plugin-jest-hoist "^26.6.2"
babel-preset-current-node-syntax "^1.0.0"
babel-preset-medusa-package@1.1.0-dev-1615546159319:
version "1.1.0-dev-1615546159319"
resolved "http://localhost:4873/babel-preset-medusa-package/-/babel-preset-medusa-package-1.1.0-dev-1615546159319.tgz#eaaa8908d9b04487cbfbd90cd5422c8ade611506"
integrity sha512-IS60RphbSYg+PQA5ywdjba2eL+pyHaHAUm93658fAMezSKnRaDEvtle42DK63EifQC35K1pQc0j7YUbgg8ZODg==
babel-preset-medusa-package@1.1.0-dev-1615882960610:
version "1.1.0-dev-1615882960610"
resolved "http://localhost:4873/babel-preset-medusa-package/-/babel-preset-medusa-package-1.1.0-dev-1615882960610.tgz#1cb3b658a6114759b33bb022a19865d8d5751a9a"
integrity sha512-PF+RAij3oCoXtU7/WbyJVCvassselnk4I4u2NXo7sxqg0Z6V1N9IwEVxTsg7G2PfiXmT2EYaJPPsq9st8lOvuQ==
dependencies:
"@babel/plugin-proposal-class-properties" "^7.12.1"
"@babel/plugin-proposal-decorators" "^7.12.1"
@@ -4493,28 +4493,28 @@ media-typer@0.3.0:
resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748"
integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g=
medusa-core-utils@1.1.0-dev-1615546159319:
version "1.1.0-dev-1615546159319"
resolved "http://localhost:4873/medusa-core-utils/-/medusa-core-utils-1.1.0-dev-1615546159319.tgz#34147e32463b002c51b815f7329e88881a0fbec2"
integrity sha512-t/QeZmHaFdBmn1wC+StZDq+tJescILybCSNIOFLSUlOaUaOPGwZ0UhYN/MNeMrBwAoQq9ZO5QjJsNNbQK33gWA==
medusa-core-utils@1.1.0-dev-1615882960610:
version "1.1.0-dev-1615882960610"
resolved "http://localhost:4873/medusa-core-utils/-/medusa-core-utils-1.1.0-dev-1615882960610.tgz#5358b5edc3cd571071ed5bf6c3781b397f2674f9"
integrity sha512-v6zvSdkJn97jY7JwEebIpHy5SZzj2KO+m5duIRmRTnBQUI7DWVxOOO7DRqc24W0shwCz/H51PdKjhDwdtSgFwQ==
dependencies:
joi "^17.3.0"
joi-objectid "^3.0.1"
medusa-interfaces@1.1.1-dev-1615546159319:
version "1.1.1-dev-1615546159319"
resolved "http://localhost:4873/medusa-interfaces/-/medusa-interfaces-1.1.1-dev-1615546159319.tgz#a75e2841a9c68b26bc688706d59fccfd655c8f41"
integrity sha512-BqE+ARKvJgmLHABuAGfCSquNa1XPF8+ll/dB6XMMjPAPzlGF9kV/GhQIsjq8fzoZ8ZvrLBD0ak5ndOK/BH/NEA==
medusa-interfaces@1.1.1-dev-1615882960610:
version "1.1.1-dev-1615882960610"
resolved "http://localhost:4873/medusa-interfaces/-/medusa-interfaces-1.1.1-dev-1615882960610.tgz#6062b8c6fadb33e89e7ed53b5627a182af1c4511"
integrity sha512-WdyYK1yqSUIXIJSjLpOCRotVAEIVeggnqNh+AtNuxddmMBJrS0tFUhj2RetkyMkoVsjTuEkSOI/LY9V8fK9xCA==
dependencies:
medusa-core-utils "1.1.0-dev-1615546159319"
medusa-core-utils "1.1.0-dev-1615882960610"
medusa-test-utils@1.1.3-dev-1615546159319:
version "1.1.3-dev-1615546159319"
resolved "http://localhost:4873/medusa-test-utils/-/medusa-test-utils-1.1.3-dev-1615546159319.tgz#912b4bf83e03c247bc189ebc315c9db4200f3a71"
integrity sha512-hW8xwrf52a5PAzZDL/kOk9Mlv4Jdpgfx+IPUz/F2D3D8ky3OwikwonXKvPvaRC5JaCdjI/xipPaSWUkJeb1dzg==
medusa-test-utils@1.1.3-dev-1615882960610:
version "1.1.3-dev-1615882960610"
resolved "http://localhost:4873/medusa-test-utils/-/medusa-test-utils-1.1.3-dev-1615882960610.tgz#55326f1884469e5a41221e1a029e792ed3035129"
integrity sha512-cnRbzR4I+R1NkZw1w6e8NwKPDqXodhaK7vAbIc3ogRHt8iXTYIVaU9RJTBCYnmZQ7Y/0zQCSd7NplB+PQ75ssw==
dependencies:
"@babel/plugin-transform-classes" "^7.9.5"
medusa-core-utils "1.1.0-dev-1615546159319"
medusa-core-utils "1.1.0-dev-1615882960610"
randomatic "^3.1.1"
merge-descriptors@1.0.1:

View File

@@ -0,0 +1,3 @@
#!/usr/bin/env node
require("./dist/index.js")

View File

@@ -4,7 +4,7 @@
"description": "Command Line interface for Medusa Commerce",
"main": "dist/index.js",
"bin": {
"medusa": "dist/index.js"
"medusa": "cli.js"
},
"repository": {
"type": "git",

View File

@@ -0,0 +1,3 @@
{
"presets": [["babel-preset-medusa-package"]]
}

33
packages/medusa-dev-cli/.gitignore vendored Normal file
View File

@@ -0,0 +1,33 @@
# Logs
logs
*.log
# Runtime data
pids
*.pid
*.seed
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
# Grunt intermediate storage (http://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# node-waf configuration
.lock-wscript
# Compiled binary addons (http://nodejs.org/api/addons.html)
build/Release
# Dependency directory
# https://www.npmjs.org/doc/misc/npm-faq.html#should-i-check-my-node_modules-folder-into-git
node_modules
decls
dist
# verdaccio local storage
verdaccio

View File

@@ -0,0 +1,3 @@
src
flow-typed
verdaccio

View File

@@ -0,0 +1,74 @@
# medusa-dev-cli
A command-line tool for local Medusa development. When doing development work on
Medusa core, this tool allows you to copy the changes to the various
Medusa packages to Medusa projects.
## Install
`npm install -g medusa-dev-cli`
## Configuration / First time setup
The medusa-dev-cli tool needs to know where your cloned Medusa repository is
located. You typically only need to configure this once.
`medusa-dev --set-path-to-repo /path/to/my/cloned/version/medusa`
## How to use
Navigate to the project you want to link to your forked Medusa repository and
run:
`medusa-dev`
The tool will then scan your project's package.json to find its Medusa
dependencies and copy the latest source from your cloned version of Medusa into
your project's node_modules folder. A watch task is then created to re-copy any
modules that might change while you're working on the code, so you can leave
this program running.
Typically you'll also want to run `npm run watch` in the Medusa repo to set up
watchers to build Medusa source code.
## Revert to current packages
If you've recently run `medusa-dev` your `node_modules` will be out of sync with current published packages. In order to undo this, you can remove the `node_modules` directory or run:
```shell
git checkout package.json; yarn --force
```
or
```shell
git checkout package.json; npm install --force
```
### Other commands
#### `--packages`
You can prevent the automatic dependencies scan and instead specify a list of
packages you want to link by using the `--packages` option:
`medusa-dev --packages @medusajs/medusa medusa-interfaces`
#### `--scan-once`
With this flag, the tool will do an initial scan and copy and then quit. This is
useful for setting up automated testing/builds of Medusa projects from the latest
code.
#### `--quiet`
Don't output anything except for a success message when used together with
`--scan-once`.
#### `--copy-all`
Copy all modules/files in the medusa source repo in packages/
#### `--force-install`
Disables copying files into node_modules and forces usage of local npm repository.

View File

@@ -0,0 +1,51 @@
{
"name": "medusa-dev-cli",
"description": "CLI helpers for contributors working on Medusa",
"version": "0.0.2-alpha.641+d9faeee4",
"author": "Sebastian Rindom <skrindom@gmail.com>",
"bin": {
"medusa-dev": "./dist/index.js"
},
"dependencies": {
"@babel/runtime": "^7.12.5",
"chokidar": "^3.5.0",
"configstore": "^5.0.1",
"del": "^6.0.0",
"execa": "^4.1.0",
"find-yarn-workspace-root": "^2.0.0",
"fs-extra": "^9.0.1",
"got": "^10.7.0",
"is-absolute": "^1.0.0",
"lodash": "^4.17.21",
"signal-exit": "^3.0.3",
"verdaccio": "^4.10.0",
"yargs": "^15.4.1"
},
"devDependencies": {
"@babel/cli": "^7.12.1",
"@babel/core": "^7.12.3",
"babel-preset-medusa-package": "^1.1.0",
"cross-env": "^7.0.3"
},
"homepage": "https://github.com/medusajs/medusa/tree/master/packages/medusa-dev-cli#readme",
"keywords": [
"medusa"
],
"license": "MIT",
"main": "index.js",
"repository": {
"type": "git",
"url": "https://github.com/medusajs/medusa.git",
"directory": "packages/medusa-dev-cli"
},
"scripts": {
"build": "babel src --out-dir dist --ignore \"**/__tests__\"",
"prepare": "cross-env NODE_ENV=production npm run build",
"test": "echo \"Error: no test specified\" && exit 1",
"watch": "babel -w src --out-dir dist --ignore \"**/__tests__\""
},
"engines": {
"node": ">=12.13.0"
},
"gitHead": "d9faeee4c0dd0930b85a14143443a6d9cc787ab2"
}

View File

@@ -0,0 +1,132 @@
#!/usr/bin/env node
const Configstore = require(`configstore`);
const pkg = require(`../package.json`);
const _ = require(`lodash`);
const path = require(`path`);
const os = require(`os`);
const watch = require(`./watch`);
const { getVersionInfo } = require(`./utils/version`);
const argv = require(`yargs`)
.usage(`Usage: medusa-dev [options]`)
.alias(`q`, `quiet`)
.nargs(`q`, 0)
.describe(`q`, `Do not output copy file information`)
.alias(`s`, `scan-once`)
.nargs(`s`, 0)
.describe(`s`, `Scan once. Do not start file watch`)
.alias(`p`, `set-path-to-repo`)
.nargs(`p`, 1)
.describe(
`p`,
`Set path to Medusa repository.
You typically only need to configure this once.`
)
.nargs(`force-install`, 0)
.describe(
`force-install`,
`Disables copying files into node_modules and forces usage of local npm repository.`
)
.alias(`C`, `copy-all`)
.nargs(`C`, 0)
.describe(
`C`,
`Copy all contents in packages/ instead of just medusa packages`
)
.array(`packages`)
.describe(`packages`, `Explicitly specify packages to copy`)
.help(`h`)
.alias(`h`, `help`)
.nargs(`v`, 0)
.alias(`v`, `version`)
.describe(`v`, `Print the currently installed version of Medusa Dev CLI`)
.argv;
if (argv.version) {
console.log(getVersionInfo());
process.exit();
}
const conf = new Configstore(pkg.name);
const fs = require(`fs-extra`);
let pathToRepo = argv.setPathToRepo;
if (pathToRepo) {
if (pathToRepo.includes(`~`)) {
pathToRepo = path.join(os.homedir(), pathToRepo.split(`~`).pop());
}
conf.set(`medusa-location`, path.resolve(pathToRepo));
process.exit();
}
const havePackageJsonFile = fs.existsSync(`package.json`);
if (!havePackageJsonFile) {
console.error(`Current folder must have a package.json file!`);
process.exit();
}
const medusaLocation = conf.get(`medusa-location`);
if (!medusaLocation) {
console.error(
`
You haven't set the path yet to your cloned
version of medusa. Do so now by running:
medusa-dev --set-path-to-repo /path/to/my/cloned/version/medusa
`
);
process.exit();
}
// get list of packages from monorepo
const monoRepoPackages = [];
const pkgsDirs = fs.readdirSync(path.join(medusaLocation, `packages`));
for (const dir of pkgsDirs) {
const pack = JSON.parse(
fs.readFileSync(path.join(medusaLocation, `packages`, dir, `package.json`))
);
monoRepoPackages.push(pack.name);
}
const localPkg = JSON.parse(fs.readFileSync(`package.json`));
// intersect dependencies with monoRepoPackages to get list of packages that are used
const localPackages = _.intersection(
monoRepoPackages,
Object.keys(_.merge({}, localPkg.dependencies, localPkg.devDependencies))
);
if (!argv.packages && _.isEmpty(localPackages)) {
console.error(
`
You haven't got any medusa dependencies into your current package.json
You probably want to pass in a list of packages to start
developing on! For example:
medusa-dev --packages @medusajs/medusa
If you prefer to place them in your package.json dependencies instead,
medusa-dev will pick them up.
`
);
if (!argv.forceInstall) {
process.exit();
} else {
console.log(
`Continuing other dependencies installation due to "--forceInstall" flag`
);
}
}
watch(medusaLocation, argv.packages, {
localPackages,
quiet: argv.quiet,
scanOnce: argv.scanOnce,
forceInstall: argv.forceInstall,
monoRepoPackages,
});

View File

@@ -0,0 +1,19 @@
const signalExit = require(`signal-exit`);
const cleanupTasks = new Set();
exports.registerCleanupTask = (taskFn) => {
cleanupTasks.add(taskFn);
return () => {
const result = taskFn();
cleanupTasks.delete(taskFn);
return result;
};
};
signalExit(() => {
if (cleanupTasks.size) {
console.log(`Process exitted in middle of publishing - cleaning up`);
cleanupTasks.forEach((taskFn) => taskFn());
}
});

View File

@@ -0,0 +1,75 @@
const startVerdaccio = require(`verdaccio`).default;
const fs = require(`fs-extra`);
const _ = require(`lodash`);
let VerdaccioInitPromise = null;
const { verdaccioConfig } = require(`./verdaccio-config`);
const { publishPackage } = require(`./publish-package`);
const { installPackages } = require(`./install-packages`);
const startServer = () => {
if (VerdaccioInitPromise) {
return VerdaccioInitPromise;
}
console.log(`Starting local verdaccio server`);
// clear storage
fs.removeSync(verdaccioConfig.storage);
VerdaccioInitPromise = new Promise((resolve) => {
startVerdaccio(
verdaccioConfig,
verdaccioConfig.port,
verdaccioConfig.storage,
`1.0.0`,
`medusa-dev`,
(webServer, addr, pkgName, pkgVersion) => {
// console.log(webServer)
webServer.listen(addr.port || addr.path, addr.host, () => {
console.log(`Started local verdaccio server`);
resolve();
});
}
);
});
return VerdaccioInitPromise;
};
exports.startVerdaccio = startServer;
exports.publishPackagesLocallyAndInstall = async ({
packagesToPublish,
localPackages,
root,
ignorePackageJSONChanges,
yarnWorkspaceRoot,
}) => {
await startServer();
const versionPostFix = Date.now();
const newlyPublishedPackageVersions = {};
for (const packageName of packagesToPublish) {
newlyPublishedPackageVersions[packageName] = await publishPackage({
packageName,
packagesToPublish,
root,
versionPostFix,
ignorePackageJSONChanges,
});
}
const packagesToInstall = _.intersection(packagesToPublish, localPackages);
await installPackages({
packagesToInstall,
yarnWorkspaceRoot,
newlyPublishedPackageVersions,
});
};

View File

@@ -0,0 +1,151 @@
const path = require(`path`)
const fs = require(`fs-extra`)
const { promisifiedSpawn } = require(`../utils/promisified-spawn`)
const { registryUrl } = require(`./verdaccio-config`)
const installPackages = async ({
packagesToInstall,
yarnWorkspaceRoot,
newlyPublishedPackageVersions,
}) => {
console.log(
`Installing packages from local registry:\n${packagesToInstall
.map(packageAndVersion => ` - ${packageAndVersion}`)
.join(`\n`)}`
)
let installCmd
if (yarnWorkspaceRoot) {
// this is very hacky - given root, we run `yarn workspaces info`
// to get list of all workspaces and their locations, and manually
// edit package.json file for packages we want to install
// to make sure there are no mismatched versions of same package
// in workspaces which should preserve node_modules structure
// (packages being mostly hoisted to top-level node_modules)
const { stdout } = await promisifiedSpawn([
`yarn`,
[`workspaces`, `info`, `--json`],
{ stdio: `pipe` },
])
let workspacesLayout
try {
workspacesLayout = JSON.parse(JSON.parse(stdout).data)
} catch (e) {
/*
Yarn 1.22 doesn't output pure json - it has leading and trailing text:
```
$ yarn workspaces info --json
yarn workspaces v1.22.0
{
"z": {
"location": "z",
"workspaceDependencies": [],
"mismatchedWorkspaceDependencies": []
},
"y": {
"location": "y",
"workspaceDependencies": [],
"mismatchedWorkspaceDependencies": []
}
}
Done in 0.48s.
```
So we need to do some sanitization. We find JSON by matching substring
that starts with `{` and ends with `}`
*/
const regex = /^[^{]*({.*})[^}]*$/gs
const sanitizedStdOut = regex.exec(stdout)
if (sanitizedStdOut?.length >= 2) {
// pick content of first (and only) capturing group
const jsonString = sanitizedStdOut[1]
try {
workspacesLayout = JSON.parse(jsonString)
} catch (e) {
console.error(
`Failed to parse "sanitized" output of "yarn workspaces info" command.\n\nSanitized string: "${jsonString}`
)
// not exitting here, because we have general check for `workspacesLayout` being set below
}
}
}
if (!workspacesLayout) {
console.error(
`Couldn't parse output of "yarn workspaces info" command`,
stdout
)
process.exit(1)
}
const handleDeps = deps => {
if (!deps) {
return false
}
let changed = false
Object.keys(deps).forEach(depName => {
if (packagesToInstall.includes(depName)) {
deps[depName] = `gatsby-dev`
changed = true
}
})
return changed
}
Object.keys(workspacesLayout).forEach(workspaceName => {
const { location } = workspacesLayout[workspaceName]
const pkgJsonPath = path.join(yarnWorkspaceRoot, location, `package.json`)
if (!fs.existsSync(pkgJsonPath)) {
return
}
const pkg = JSON.parse(fs.readFileSync(pkgJsonPath, `utf8`))
let changed = false
changed |= handleDeps(pkg.dependencies)
changed |= handleDeps(pkg.devDependencies)
changed |= handleDeps(pkg.peerDependencies)
if (changed) {
console.log(`Changing deps in ${pkgJsonPath} to use @gatsby-dev`)
fs.outputJSONSync(pkgJsonPath, pkg, {
spaces: 2,
})
}
})
// package.json files are changed - so we just want to install
// using verdaccio registry
installCmd = [
`yarn`,
[`install`, `--registry=${registryUrl}`, `--ignore-engines`],
]
} else {
installCmd = [
`yarn`,
[
`add`,
...packagesToInstall.map(packageName => {
const packageVersion = newlyPublishedPackageVersions[packageName]
return `${packageName}@${packageVersion}`
}),
`--registry=${registryUrl}`,
`--exact`,
`--ignore-engines`,
],
]
}
try {
await promisifiedSpawn(installCmd)
console.log(`Installation complete`)
} catch (error) {
console.error(`Installation failed`, error)
process.exit(1)
}
}
exports.installPackages = installPackages

View File

@@ -0,0 +1,153 @@
const fs = require(`fs-extra`);
const path = require(`path`);
const { promisifiedSpawn } = require(`../utils/promisified-spawn`);
const { registryUrl } = require(`./verdaccio-config`);
const NPMRCContent = `${registryUrl.replace(
/https?:/g,
``
)}/:_authToken="medusa-dev"`;
const {
getMonorepoPackageJsonPath,
} = require(`../utils/get-monorepo-package-json-path`);
const { registerCleanupTask } = require(`./cleanup-tasks`);
/**
* Edit package.json to:
* - adjust version to temporary one
* - change version selectors for dependencies that
* will be published, to make sure that yarn
* install them in local site
*/
const adjustPackageJson = ({
monoRepoPackageJsonPath,
packageName,
versionPostFix,
packagesToPublish,
ignorePackageJSONChanges,
root,
}) => {
// we need to check if package depend on any other package to will be published and
// adjust version selector to point to dev version of package so local registry is used
// for dependencies.
const monorepoPKGjsonString = fs.readFileSync(
monoRepoPackageJsonPath,
`utf-8`
);
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString);
monorepoPKGjson.version = `${monorepoPKGjson.version}-dev-${versionPostFix}`;
packagesToPublish.forEach((packageThatWillBePublished) => {
if (
monorepoPKGjson.dependencies &&
monorepoPKGjson.dependencies[packageThatWillBePublished]
) {
const currentVersion = JSON.parse(
fs.readFileSync(
getMonorepoPackageJsonPath({
packageName: packageThatWillBePublished,
root,
}),
`utf-8`
)
).version;
monorepoPKGjson.dependencies[
packageThatWillBePublished
] = `${currentVersion}-dev-${versionPostFix}`;
}
});
const temporaryMonorepoPKGjsonString = JSON.stringify(monorepoPKGjson);
const unignorePackageJSONChanges = ignorePackageJSONChanges(packageName, [
monorepoPKGjsonString,
temporaryMonorepoPKGjsonString,
]);
// change version and dependency versions
fs.outputFileSync(monoRepoPackageJsonPath, temporaryMonorepoPKGjsonString);
return {
newPackageVersion: monorepoPKGjson.version,
unadjustPackageJson: registerCleanupTask(() => {
// restore original package.json
fs.outputFileSync(monoRepoPackageJsonPath, monorepoPKGjsonString);
unignorePackageJSONChanges();
}),
};
};
/**
* Anonymous publishing require dummy .npmrc
* See https://github.com/verdaccio/verdaccio/issues/212#issuecomment-308578500
* This is `npm publish` (as in linked comment) and `yarn publish` requirement.
* This is not verdaccio restriction.
*/
const createTemporaryNPMRC = ({ pathToPackage }) => {
const NPMRCPath = path.join(pathToPackage, `.npmrc`);
fs.outputFileSync(NPMRCPath, NPMRCContent);
return registerCleanupTask(() => {
fs.removeSync(NPMRCPath);
});
};
const publishPackage = async ({
packageName,
packagesToPublish,
root,
versionPostFix,
ignorePackageJSONChanges,
}) => {
const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({
packageName,
root,
});
const { unadjustPackageJson, newPackageVersion } = adjustPackageJson({
monoRepoPackageJsonPath,
packageName,
root,
versionPostFix,
packagesToPublish,
ignorePackageJSONChanges,
});
const pathToPackage = path.dirname(monoRepoPackageJsonPath);
const uncreateTemporaryNPMRC = createTemporaryNPMRC({ pathToPackage });
// npm publish
const publishCmd = [
`npm`,
[`publish`, `--tag`, `medusa-dev`, `--registry=${registryUrl}`],
{
cwd: pathToPackage,
},
];
console.log(
`Publishing ${packageName}@${newPackageVersion} to local registry`
);
try {
await promisifiedSpawn(publishCmd);
console.log(
`Published ${packageName}@${newPackageVersion} to local registry`
);
} catch (e) {
console.error(`Failed to publish ${packageName}@${newPackageVersion}`, e);
process.exit(1);
}
uncreateTemporaryNPMRC();
unadjustPackageJson();
return newPackageVersion;
};
exports.publishPackage = publishPackage;

View File

@@ -0,0 +1,33 @@
const path = require(`path`)
const os = require(`os`)
const verdaccioConfig = {
storage: path.join(os.tmpdir(), `verdaccio`, `storage`),
port: 4873, // default
max_body_size: `1000mb`,
web: {
enable: true,
title: `gatsby-dev`,
},
logs: [{ type: `stdout`, format: `pretty-timestamped`, level: `warn` }],
packages: {
"**": {
access: `$all`,
publish: `$all`,
proxy: `npmjs`,
},
},
uplinks: {
npmjs: {
url: `https://registry.npmjs.org/`,
// default is 2 max_fails - on flaky networks that cause a lot of failed installations
max_fails: 10,
},
},
}
exports.verdaccioConfig = verdaccioConfig
const registryUrl = `http://localhost:${verdaccioConfig.port}`
exports.registryUrl = registryUrl

View File

@@ -0,0 +1,29 @@
const { getDependantPackages } = require(`../get-dependant-packages`)
describe(`getDependantPackages`, () => {
it(`handles deep dependency chains`, () => {
const packagesToPublish = getDependantPackages({
packageName: `package-a-dep1-dep1`,
depTree: {
"package-a-dep1": new Set([`package-a`]),
"package-a-dep1-dep1": new Set([`package-a-dep1`]),
"not-related": new Set([`also-not-related`]),
},
})
expect(packagesToPublish).toEqual(
new Set([`package-a`, `package-a-dep1`, `package-a-dep1-dep1`])
)
})
it(`doesn't get stuck in circular dependency loops`, () => {
const packagesToPublish = getDependantPackages({
packageName: `package-a`,
depTree: {
"package-a": new Set([`package-b`]),
"package-b": new Set([`package-a`]),
},
})
expect(packagesToPublish).toEqual(new Set([`package-a`, `package-b`]))
})
})

View File

@@ -0,0 +1,68 @@
const path = require(`path`)
const { traversePackagesDeps } = require(`../traverse-package-deps`)
jest.doMock(
path.join(...`<monorepo-path>/packages/package-a/package.json`.split(`/`)),
() => {
return {
dependencies: {
"unrelated-package": `*`,
"package-a-dep1": `*`,
},
}
},
{ virtual: true }
)
jest.doMock(
path.join(
...`<monorepo-path>/packages/package-a-dep1/package.json`.split(`/`)
),
() => {
return {
dependencies: {
"package-a-dep1-dep1": `*`,
},
}
},
{ virtual: true }
)
jest.doMock(
path.join(
...`<monorepo-path>/packages/package-a-dep1-dep1/package.json`.split(`/`)
),
() => {
return {
dependencies: {},
}
},
{ virtual: true }
)
describe(`traversePackageDeps`, () => {
it(`handles deep dependency chains`, () => {
const { seenPackages, depTree } = traversePackagesDeps({
root: `<monorepo-path>`,
packages: [`package-a`, `doesnt-exist`],
monoRepoPackages: [
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
`package-not-used`,
],
})
expect(seenPackages).toEqual([
`package-a`,
`package-a-dep1`,
`package-a-dep1-dep1`,
])
expect(depTree).toEqual({
"package-a-dep1": new Set([`package-a`]),
"package-a-dep1-dep1": new Set([`package-a-dep1`]),
})
})
})

View File

@@ -0,0 +1,190 @@
const fs = require(`fs-extra`);
const _ = require(`lodash`);
const {
getMonorepoPackageJsonPath,
} = require(`./get-monorepo-package-json-path`);
const got = require(`got`);
function difference(object, base) {
function changes(object, base) {
return _.transform(object, function (result, value, key) {
if (!_.isEqual(value, base[key])) {
result[key] =
_.isObject(value) && _.isObject(base[key])
? changes(value, base[key])
: value;
}
});
}
return changes(object, base);
}
/**
* Compare dependencies of installed packages and monorepo package.
* It will skip dependencies that are removed in monorepo package.
*
* If local package is not installed, it will check unpkg.com.
* This allow medusa-dev to skip publishing unnecesairly and
* let install packages from public npm repository if nothing changed.
*/
exports.checkDepsChanges = async ({
newPath,
packageName,
monoRepoPackages,
root,
isInitialScan,
ignoredPackageJSON,
}) => {
let localPKGjson;
let packageNotInstalled = false;
try {
localPKGjson = JSON.parse(fs.readFileSync(newPath, `utf-8`));
} catch {
packageNotInstalled = true;
// there is no local package - so we still need to install deps
// this is nice because devs won't need to do initial package installation - we can handle this.
if (!isInitialScan) {
console.log(
`'${packageName}' doesn't seem to be installed. Restart medusa-dev to publish it`
);
return {
didDepsChanged: false,
packageNotInstalled,
};
}
// if package is not installed, we will do http GET request to
// unkpg to check if dependency in package published in public
// npm repository are different
// this allow us to not publish to local repository
// and save some time/work
try {
const response = await got(
`https://unpkg.com/${packageName}/package.json`
);
if (response?.statusCode !== 200) {
throw new Error(`No response or non 200 code`);
}
localPKGjson = JSON.parse(response.body);
} catch {
console.log(
`'${packageName}' doesn't seem to be installed and is not published on NPM.`
);
return {
didDepsChanged: true,
packageNotInstalled,
};
}
}
const monoDir = packageName.startsWith("@medusajs")
? packageName.split("/")[1]
: packageName;
const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({
packageName: monoDir,
root,
});
const monorepoPKGjsonString = fs.readFileSync(
monoRepoPackageJsonPath,
`utf-8`
);
const monorepoPKGjson = JSON.parse(monorepoPKGjsonString);
if (ignoredPackageJSON.has(packageName)) {
if (ignoredPackageJSON.get(packageName).includes(monorepoPKGjsonString)) {
// we are in middle of publishing and content of package.json is one set during publish process,
// so we need to not cause false positives
return {
didDepsChanged: false,
packageNotInstalled,
};
}
}
if (!monorepoPKGjson.dependencies) monorepoPKGjson.dependencies = {};
if (!localPKGjson.dependencies) localPKGjson.dependencies = {};
const areDepsEqual = _.isEqual(
monorepoPKGjson.dependencies,
localPKGjson.dependencies
);
if (!areDepsEqual) {
const diff = difference(
monorepoPKGjson.dependencies,
localPKGjson.dependencies
);
const diff2 = difference(
localPKGjson.dependencies,
monorepoPKGjson.dependencies
);
let needPublishing = false;
let isPublishing = false;
const depChangeLog = _.uniq(Object.keys({ ...diff, ...diff2 }))
.reduce((acc, key) => {
if (monorepoPKGjson.dependencies[key] === `medusa-dev`) {
// if we are in middle of publishing to local repository - ignore
isPublishing = true;
return acc;
}
if (localPKGjson.dependencies[key] === `medusa-dev`) {
// monorepo packages will restore version, but after installation
// in local site - it will use `medusa-dev` dist tag - we need
// to ignore changes that
return acc;
}
if (
localPKGjson.dependencies[key] &&
monorepoPKGjson.dependencies[key]
) {
// Check only for version changes in packages
// that are not from medusa repo.
// Changes in medusa packages will be copied over
// from monorepo - and if those contain other dependency
// changes - they will be covered
if (!monoRepoPackages.includes(key)) {
acc.push(
` - '${key}' changed version from ${localPKGjson.dependencies[key]} to ${monorepoPKGjson.dependencies[key]}`
);
needPublishing = true;
}
} else if (monorepoPKGjson.dependencies[key]) {
acc.push(
` - '${key}@${monorepoPKGjson.dependencies[key]}' was added`
);
needPublishing = true;
} else {
acc.push(` - '${key}@${localPKGjson.dependencies[key]}' was removed`);
// this doesn't need publishing really, so will skip this
}
return acc;
}, [])
.join(`\n`);
if (!isPublishing && depChangeLog.length > 0) {
console.log(`Dependencies of '${packageName}' changed:\n${depChangeLog}`);
if (isInitialScan) {
console.log(
`Will ${!needPublishing ? `not ` : ``}publish to local npm registry.`
);
} else {
console.warn(
`Installation of dependencies after initial scan is not implemented`
);
}
return {
didDepsChanged: needPublishing,
packageNotInstalled,
};
}
}
return {
didDepsChanged: false,
packageNotInstalled,
};
};

View File

@@ -0,0 +1,30 @@
/**
* Recursively get set of packages that depend on given package.
* Set also includes passed package.
*/
const getDependantPackages = ({
packageName,
depTree,
packagesToPublish = new Set(),
}) => {
if (packagesToPublish.has(packageName)) {
// bail early if package was already handled
return packagesToPublish
}
packagesToPublish.add(packageName)
const dependants = depTree[packageName]
if (dependants) {
dependants.forEach(dependant =>
getDependantPackages({
packageName: dependant,
depTree,
packagesToPublish,
})
)
}
return packagesToPublish
}
exports.getDependantPackages = getDependantPackages

View File

@@ -0,0 +1,11 @@
const path = require(`path`);
exports.getMonorepoPackageJsonPath = ({ packageName, root }) => {
let dirName = packageName;
if (packageName.startsWith("@medusajs")) {
const [, directory] = packageName.split("/");
dirName = directory;
}
return path.join(root, `packages`, dirName, `package.json`);
};

View File

@@ -0,0 +1,29 @@
const execa = require(`execa`)
const defaultSpawnArgs = {
cwd: process.cwd(),
stdio: `inherit`,
}
exports.setDefaultSpawnStdio = stdio => {
defaultSpawnArgs.stdio = stdio
}
exports.promisifiedSpawn = async ([cmd, args = [], spawnArgs = {}]) => {
const spawnOptions = {
...defaultSpawnArgs,
...spawnArgs,
}
try {
return await execa(cmd, args, spawnOptions)
} catch (e) {
if (spawnOptions.stdio === `ignore`) {
console.log(
`\nCommand "${cmd} ${args.join(
` `
)}" failed.\nTo see details of failed command, rerun "medusa-dev" without "--quiet" or "-q" switch\n`
)
}
throw e
}
}

View File

@@ -0,0 +1,76 @@
const _ = require(`lodash`);
const path = require(`path`);
/**
* @typedef {Object} TraversePackagesDepsReturn
* @property {Object} depTree Lookup table to check dependants for given package.
* Used to determine which packages need to be published.
*/
/**
* Compile final list of packages to watch
* This will include packages explicitly defined packages and all their dependencies
* Also creates dependency graph that is used later to determine which packages
* would need to be published when their dependencies change
* @param {Object} $0
* @param {String} $0.root Path to root of medusa monorepo repository
* @param {String[]} $0.packages Initial array of packages to watch
* This can be extracted from project dependencies or explicitly set by `--packages` flag
* @param {String[]} $0.monoRepoPackages Array of packages in medusa monorepo
* @param {String[]} [$0.seenPackages] Array of packages that were already traversed.
* This makes sure dependencies are extracted one time for each package and avoid any
* infinite loops.
* @param {DepTree} [$0.depTree] Used internally to recursively construct dependency graph.
* @return {TraversePackagesDepsReturn}
*/
const traversePackagesDeps = ({
root,
packages,
monoRepoPackages,
seenPackages = [...packages],
depTree = {},
}) => {
packages.forEach((p) => {
if (p.startsWith("@medusajs")) {
p = p.split("/")[1];
}
let pkgJson;
try {
pkgJson = require(path.join(root, `packages`, p, `package.json`));
} catch {
console.error(`"${p}" package doesn't exist in monorepo.`);
// remove from seenPackages
seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p);
return;
}
const fromMonoRepo = _.intersection(
Object.keys({ ...pkgJson.dependencies }),
monoRepoPackages
);
fromMonoRepo.forEach((pkgName) => {
depTree[pkgName] = (depTree[pkgName] || new Set()).add(p);
});
// only traverse not yet seen packages to avoid infinite loops
const newPackages = _.difference(fromMonoRepo, seenPackages);
if (newPackages.length) {
newPackages.forEach((depFromMonorepo) => {
seenPackages.push(depFromMonorepo);
});
traversePackagesDeps({
root,
packages: fromMonoRepo,
monoRepoPackages,
seenPackages,
depTree,
});
}
});
return { seenPackages, depTree };
};
exports.traversePackagesDeps = traversePackagesDeps;

View File

@@ -0,0 +1,4 @@
exports.getVersionInfo = () => {
const { version: devCliVersion } = require(`../../package.json`);
return `Medusa Dev CLI version: ${devCliVersion}`;
};

View File

@@ -0,0 +1,352 @@
const chokidar = require(`chokidar`);
const _ = require(`lodash`);
const del = require(`del`);
const fs = require(`fs-extra`);
const path = require(`path`);
const findWorkspaceRoot = require(`find-yarn-workspace-root`);
const { publishPackagesLocallyAndInstall } = require(`./local-npm-registry`);
const { checkDepsChanges } = require(`./utils/check-deps-changes`);
const { getDependantPackages } = require(`./utils/get-dependant-packages`);
const {
setDefaultSpawnStdio,
promisifiedSpawn,
} = require(`./utils/promisified-spawn`);
const { traversePackagesDeps } = require(`./utils/traverse-package-deps`);
let numCopied = 0;
const quit = () => {
console.log(`Copied ${numCopied} files`);
process.exit();
};
const MAX_COPY_RETRIES = 3;
/*
* non-existent packages break on('ready')
* See: https://github.com/paulmillr/chokidar/issues/449
*/
async function watch(
root,
packages,
{ scanOnce, quiet, forceInstall, monoRepoPackages, localPackages }
) {
setDefaultSpawnStdio(quiet ? `ignore` : `inherit`);
// determine if in yarn workspace - if in workspace, force using verdaccio
// as current logic of copying files will not work correctly.
const yarnWorkspaceRoot = findWorkspaceRoot();
if (yarnWorkspaceRoot && process.env.NODE_ENV !== `test`) {
console.log(`Yarn workspace found.`);
forceInstall = true;
}
let afterPackageInstallation = false;
let queuedCopies = [];
const realCopyPath = (arg) => {
const { oldPath, newPath, quiet, resolve, reject, retry = 0 } = arg;
fs.copy(oldPath, newPath, (err) => {
if (err) {
if (retry >= MAX_COPY_RETRIES) {
console.error(err);
reject(err);
return;
} else {
setTimeout(
() => realCopyPath({ ...arg, retry: retry + 1 }),
500 * Math.pow(2, retry)
);
return;
}
}
// When the medusa binary is copied over, it is not setup with the executable
// permissions that it is given when installed via yarn.
// This fixes the issue where after running meduas-dev, running `yarn medusa develop`
// fails with a permission issue.
if (/(bin\/meduas.js|medusa(-cli)?\/cli.js)$/.test(newPath)) {
fs.chmodSync(newPath, `0755`);
}
numCopied += 1;
if (!quiet) {
console.log(`Copied ${oldPath} to ${newPath}`);
}
resolve();
});
};
const copyPath = (oldPath, newPath, quiet, packageName) =>
new Promise((resolve, reject) => {
const argObj = { oldPath, newPath, quiet, packageName, resolve, reject };
if (afterPackageInstallation) {
realCopyPath(argObj);
} else {
queuedCopies.push(argObj);
}
});
const runQueuedCopies = () => {
afterPackageInstallation = true;
queuedCopies.forEach((argObj) => realCopyPath(argObj));
queuedCopies = [];
};
const clearJSFilesFromNodeModules = async () => {
const packagesToClear = queuedCopies.reduce((acc, { packageName }) => {
if (packageName) {
acc.add(packageName);
}
return acc;
}, new Set());
await Promise.all(
[...packagesToClear].map(
async (packageToClear) =>
await del([
`node_modules/${packageToClear}/**/*.{js,js.map}`,
`!node_modules/${packageToClear}/node_modules/**/*.{js,js.map}`,
`!node_modules/${packageToClear}/src/**/*.{js,js.map}`,
])
)
);
};
// check packages deps and if they depend on other packages from monorepo
// add them to packages list
const { seenPackages, depTree } = traversePackagesDeps({
root,
packages: _.uniq(localPackages),
monoRepoPackages,
});
const allPackagesToWatch = packages
? _.intersection(packages, seenPackages)
: seenPackages;
const ignoredPackageJSON = new Map();
const ignorePackageJSONChanges = (packageName, contentArray) => {
ignoredPackageJSON.set(packageName, contentArray);
return () => {
ignoredPackageJSON.delete(packageName);
};
};
if (forceInstall) {
try {
if (allPackagesToWatch.length > 0) {
await publishPackagesLocallyAndInstall({
packagesToPublish: allPackagesToWatch,
root,
localPackages,
ignorePackageJSONChanges,
yarnWorkspaceRoot,
});
} else {
// run `yarn`
const yarnInstallCmd = [`yarn`];
console.log(`Installing packages from public NPM registry`);
await promisifiedSpawn(yarnInstallCmd);
console.log(`Installation complete`);
}
} catch (e) {
console.log(e);
}
process.exit();
}
if (allPackagesToWatch.length === 0) {
console.error(`There are no packages to watch.`);
return;
}
const cleanToWatch = allPackagesToWatch.map((pkgName) => {
if (pkgName.startsWith(`@medusajs`)) {
return pkgName.split("/")[1];
}
return pkgName;
});
const ignored = [
/[/\\]node_modules[/\\]/i,
/\.git/i,
/\.DS_Store/,
/[/\\]__tests__[/\\]/i,
/[/\\]__mocks__[/\\]/i,
/\.npmrc/i,
].concat(
cleanToWatch.map((p) => new RegExp(`${p}[\\/\\\\]src[\\/\\\\]`, `i`))
);
const watchers = _.uniq(
cleanToWatch
.map((p) => path.join(root, `/packages/`, p))
.filter((p) => fs.existsSync(p))
);
let allCopies = [];
const packagesToPublish = new Set();
let isInitialScan = true;
let isPublishing = false;
const waitFor = new Set();
let anyPackageNotInstalled = false;
const watchEvents = [`change`, `add`];
chokidar
.watch(watchers, {
ignored: [(filePath) => _.some(ignored, (reg) => reg.test(filePath))],
})
.on(`all`, async (event, filePath) => {
if (!watchEvents.includes(event)) {
return;
}
const [pack] = filePath
.split(/packages[/\\]/)
.pop()
.split(/[/\\]/);
const sourcePkg = JSON.parse(
fs.readFileSync(path.join(root, `/packages/`, pack, `package.json`))
);
const packageName = sourcePkg.name;
const prefix = path.join(root, `/packages/`, pack);
// Copy it over local version.
// Don't copy over the medusa bin file as that breaks the NPM symlink.
if (_.includes(filePath, `dist/medusa-cli.js`)) {
return;
}
const relativePackageFile = path.relative(prefix, filePath);
const newPath = path.join(
`./node_modules/${packageName}`,
relativePackageFile
);
if (relativePackageFile === `package.json`) {
// package.json files will change during publish to adjust version of package (and dependencies), so ignore
// changes during this process
if (isPublishing) {
return;
}
// Compare dependencies with local version
const didDepsChangedPromise = checkDepsChanges({
newPath,
packageName,
monoRepoPackages,
root,
isInitialScan,
ignoredPackageJSON,
});
if (isInitialScan) {
// normally checkDepsChanges would be sync,
// but because it also can do async GET request
// to unpkg if local package is not installed
// keep track of it to make sure all of it
// finish before installing
waitFor.add(didDepsChangedPromise);
}
const {
didDepsChanged,
packageNotInstalled,
} = await didDepsChangedPromise;
if (packageNotInstalled) {
anyPackageNotInstalled = true;
}
if (didDepsChanged) {
if (isInitialScan) {
waitFor.delete(didDepsChangedPromise);
// handle dependency change only in initial scan - this is for sure doable to
// handle this in watching mode correctly - but for the sake of shipping
// this I limit more work/time consuming edge cases.
// Dependency changed - now we need to figure out
// the packages that actually need to be published.
// If package with changed dependencies is dependency of other
// medusa package - like for example `medusa-plugin-page-creator`
// we need to publish both `medusa-plugin-page-creator` and `medusa`
// and install `medusa` in example site project.
getDependantPackages({
packageName,
depTree,
packages,
}).forEach((packageToPublish) => {
// scheduling publish - we will publish when `ready` is emitted
// as we can do single publish then
packagesToPublish.add(packageToPublish);
});
}
}
// don't ever copy package.json as this will mess up any future dependency
// changes checks
return;
}
const localCopies = [copyPath(filePath, newPath, quiet, packageName)];
// If this is from "cache-dir" also copy it into the site's .cache
if (_.includes(filePath, `cache-dir`)) {
const newCachePath = path.join(
`.cache/`,
path.relative(path.join(prefix, `cache-dir`), filePath)
);
localCopies.push(copyPath(filePath, newCachePath, quiet));
}
allCopies = allCopies.concat(localCopies);
})
.on(`ready`, async () => {
// wait for all async work needed to be done
// before publishing / installing
await Promise.all(Array.from(waitFor));
if (isInitialScan) {
isInitialScan = false;
if (packagesToPublish.size > 0) {
isPublishing = true;
await publishPackagesLocallyAndInstall({
packagesToPublish: Array.from(packagesToPublish),
root,
localPackages,
ignorePackageJSONChanges,
});
packagesToPublish.clear();
isPublishing = false;
} else if (anyPackageNotInstalled) {
// run `yarn`
const yarnInstallCmd = [`yarn`];
console.log(`Installing packages from public NPM registry`);
await promisifiedSpawn(yarnInstallCmd);
console.log(`Installation complete`);
}
await clearJSFilesFromNodeModules();
runQueuedCopies();
}
// all files watched, quit once all files are copied if necessary
Promise.all(allCopies).then(() => {
if (scanOnce) {
quit();
}
});
});
}
module.exports = watch;

File diff suppressed because it is too large Load Diff

View File

@@ -11,6 +11,7 @@ import shippingProfileRoutes from "./shipping-profiles"
import discountRoutes from "./discounts"
import giftCardRoutes from "./gift-cards"
import orderRoutes from "./orders"
import returnReasonRoutes from "./return-reasons"
import storeRoutes from "./store"
import uploadRoutes from "./uploads"
import customerRoutes from "./customers"
@@ -64,6 +65,7 @@ export default (app, container, config) => {
variantRoutes(route)
collectionRoutes(route)
notificationRoutes(route)
returnReasonRoutes(route)
return app
}

View File

@@ -13,6 +13,8 @@ const defaultRelations = [
"fulfillments.tracking_links",
"fulfillments.items",
"returns",
"returns.items",
"returns.items.reason",
"gift_cards",
"gift_card_transactions",
"claims",

View File

@@ -195,6 +195,8 @@ export const defaultRelations = [
"fulfillments.tracking_links",
"fulfillments.items",
"returns",
"returns.items",
"returns.items.reason",
"gift_cards",
"gift_card_transactions",
"claims",

View File

@@ -21,6 +21,12 @@ import { defaultRelations, defaultFields } from "./"
* item_id:
* description: The id of the Line Item.
* type: string
* reason_id:
* description: The id of the Return Reason to use.
* type: string
* note:
* description: An optional note with information about the Return.
* type: string
* quantity:
* description: The quantity of the Line Item.
* type: integer
@@ -60,6 +66,8 @@ export default async (req, res) => {
.items({
item_id: Validator.string().required(),
quantity: Validator.number().required(),
reason_id: Validator.string().optional(),
note: Validator.string().optional(),
})
.required(),
return_shipping: Validator.object()

View File

@@ -0,0 +1,66 @@
import { MedusaError, Validator } from "medusa-core-utils"
import { defaultRelations, defaultFields } from "./"
/**
* @oas [post] /return-reasons
* operationId: "PostReturnReasons"
* summary: "Create a Return Reason"
* description: "Creates a Return Reason"
* requestBody:
* content:
* application/json:
* schema:
* properties:
* label:
* description: "The label to display to the Customer."
* type: string
* value:
* description: "The value that the Return Reason will be identified by. Must be unique."
* type: string
* description:
* description: "An optional description to for the Reason."
* type: string
* metadata:
* description: An optional set of key-value pairs with additional information.
* type: object
* tags:
* - Return Reason
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* return_reason:
* $ref: "#/components/schemas/return_reason"
*/
export default async (req, res) => {
const schema = Validator.object().keys({
value: Validator.string().required(),
label: Validator.string().required(),
description: Validator.string()
.optional()
.allow(""),
metadata: Validator.object().optional(),
})
const { value, error } = schema.validate(req.body)
if (error) {
throw new MedusaError(MedusaError.Types.INVALID_DATA, error.details)
}
try {
const returnReasonService = req.scope.resolve("returnReasonService")
const result = await returnReasonService.create(value)
const reason = await returnReasonService.retrieve(result.id, {
select: defaultFields,
relations: defaultRelations,
})
res.status(200).json({ return_reason: reason })
} catch (err) {
throw err
}
}

View File

@@ -0,0 +1,37 @@
import { MedusaError, Validator } from "medusa-core-utils"
import { defaultRelations, defaultFields } from "./"
/**
* @oas [get] /return-reasons/{id}
* operationId: "GetReturnReasonsReason"
* summary: "Retrieve a Return Reason"
* description: "Retrieves a Return Reason."
* parameters:
* - (path) id=* {string} The id of the Return Reason.
* tags:
* - Return Reason
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* return_reason:
* $ref: "#/components/schemas/return_reason"
*/
export default async (req, res) => {
const { id } = req.params
try {
const returnReasonService = req.scope.resolve("returnReasonService")
const data = await returnReasonService.retrieve(id, {
select: defaultFields,
relations: defaultRelations,
})
res.status(200).json({ return_reason: data })
} catch (err) {
throw err
}
}

View File

@@ -0,0 +1,42 @@
import { Router } from "express"
import middlewares from "../../../middlewares"
const route = Router()
export default app => {
app.use("/return-reasons", route)
/**
* List reasons
*/
route.get("/", middlewares.wrap(require("./list-reasons").default))
/**
* Retrieve reason
*/
route.get("/:id", middlewares.wrap(require("./get-reason").default))
/**
* Create a reason
*/
route.post("/", middlewares.wrap(require("./create-reason").default))
/**
* Update a reason
*/
route.post("/:id", middlewares.wrap(require("./update-reason").default))
return app
}
export const defaultFields = [
"id",
"value",
"label",
"description",
"created_at",
"updated_at",
"deleted_at",
]
export const defaultRelations = []

View File

@@ -0,0 +1,37 @@
import { MedusaError, Validator } from "medusa-core-utils"
import { defaultRelations, defaultFields } from "./"
/**
* @oas [get] /return-reasons
* operationId: "GetReturnReasons"
* summary: "List Return Reasons"
* description: "Retrieves a list of Return Reasons."
* tags:
* - Return Reason
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* return_reasons:
* type: array
* items:
* $ref: "#/components/schemas/return_reason"
*/
export default async (req, res) => {
try {
const returnReasonService = req.scope.resolve("returnReasonService")
const query = {}
const data = await returnReasonService.list(query, {
select: defaultFields,
relations: defaultRelations,
})
res.status(200).json({ return_reasons: data })
} catch (err) {
throw err
}
}

View File

@@ -0,0 +1,70 @@
import { MedusaError, Validator } from "medusa-core-utils"
import { defaultRelations, defaultFields } from "./"
/**
* @oas [post] /return-reasons/:id
* operationId: "PostReturnReasonsReason"
* summary: "Update a Return Reason"
* description: "Updates a Return Reason"
* parameters:
* - (path) id=* {string} The id of the Return Reason.
* requestBody:
* content:
* application/json:
* schema:
* properties:
* label:
* description: "The label to display to the Customer."
* type: string
* value:
* description: "The value that the Return Reason will be identified by. Must be unique."
* type: string
* description:
* description: "An optional description to for the Reason."
* type: string
* metadata:
* description: An optional set of key-value pairs with additional information.
* type: object
* tags:
* - Return Reason
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* return_reason:
* $ref: "#/components/schemas/return_reason"
*/
export default async (req, res) => {
const { id } = req.params
const schema = Validator.object().keys({
label: Validator.string().optional(),
description: Validator.string()
.optional()
.allow(""),
metadata: Validator.object().optional(),
})
const { value, error } = schema.validate(req.body)
if (error) {
throw new MedusaError(MedusaError.Types.INVALID_DATA, error.details)
}
try {
const returnReasonService = req.scope.resolve("returnReasonService")
await returnReasonService.update(id, value)
const reason = await returnReasonService.retrieve(id, {
select: defaultFields,
relations: defaultRelations,
})
res.status(200).json({ return_reason: reason })
} catch (err) {
throw err
}
}

View File

@@ -11,6 +11,7 @@ import customerRoutes from "./customers"
import shippingOptionRoutes from "./shipping-options"
import regionRoutes from "./regions"
import returnRoutes from "./returns"
import returnReasonRoutes from "./return-reasons"
import swapRoutes from "./swaps"
import variantRoutes from "./variants"
import giftCardRoutes from "./gift-cards"
@@ -41,6 +42,7 @@ export default (app, container, config) => {
variantRoutes(route)
returnRoutes(route)
giftCardRoutes(route)
returnReasonRoutes(route)
return app
}

View File

@@ -0,0 +1,37 @@
import { MedusaError, Validator } from "medusa-core-utils"
import { defaultRelations, defaultFields } from "./"
/**
* @oas [get] /return-reasons/{id}
* operationId: "GetReturnReasonsReason"
* summary: "Retrieve a Return Reason"
* description: "Retrieves a Return Reason."
* parameters:
* - (path) id=* {string} The id of the Return Reason.
* tags:
* - Return Reason
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* return_reason:
* $ref: "#/components/schemas/return_reason"
*/
export default async (req, res) => {
const { id } = req.params
try {
const returnReasonService = req.scope.resolve("returnReasonService")
const data = await returnReasonService.retrieve(id, {
select: defaultFields,
relations: defaultRelations,
})
res.status(200).json({ return_reason: data })
} catch (err) {
throw err
}
}

View File

@@ -0,0 +1,32 @@
import { Router } from "express"
import middlewares from "../../../middlewares"
const route = Router()
export default app => {
app.use("/return-reasons", route)
/**
* List reasons
*/
route.get("/", middlewares.wrap(require("./list-reasons").default))
/**
* Retrieve reason
*/
route.get("/:id", middlewares.wrap(require("./get-reason").default))
return app
}
export const defaultFields = [
"id",
"value",
"label",
"description",
"created_at",
"updated_at",
"deleted_at",
]
export const defaultRelations = []

View File

@@ -0,0 +1,37 @@
import { MedusaError, Validator } from "medusa-core-utils"
import { defaultRelations, defaultFields } from "./"
/**
* @oas [get] /return-reasons
* operationId: "GetReturnReasons"
* summary: "List Return Reasons"
* description: "Retrieves a list of Return Reasons."
* tags:
* - Return Reason
* responses:
* 200:
* description: OK
* content:
* application/json:
* schema:
* properties:
* return_reasons:
* type: array
* items:
* $ref: "#/components/schemas/return_reason"
*/
export default async (req, res) => {
try {
const returnReasonService = req.scope.resolve("returnReasonService")
const query = {}
const data = await returnReasonService.list(query, {
select: defaultFields,
relations: defaultRelations,
})
res.status(200).json({ return_reasons: data })
} catch (err) {
throw err
}
}

View File

@@ -8,6 +8,8 @@ export default async (req, res) => {
.items({
item_id: Validator.string().required(),
quantity: Validator.number().required(),
reason_id: Validator.string().optional(),
note: Validator.string().optional(),
})
.required(),
return_shipping: Validator.object()
@@ -109,13 +111,14 @@ export default async (req, res) => {
const { key, error } = await idempotencyKeyService.workStage(
idempotencyKey.idempotency_key,
async manager => {
let order = await orderService
.withTransaction(manager)
.retrieve(value.order_id, { relations: ["returns"] })
let ret = await returnService.withTransaction(manager).list({
idempotency_key: idempotencyKey.idempotency_key,
})
let ret = await returnService.withTransaction(manager).list(
{
idempotency_key: idempotencyKey.idempotency_key,
},
{
relations: ["items", "items.reason"],
}
)
if (!ret.length) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,

View File

@@ -41,3 +41,4 @@ export { StagedJob } from "./models/staged-job"
export { Store } from "./models/store"
export { Swap } from "./models/swap"
export { User } from "./models/user"
export { ReturnReason } from "./models/return-reason"

View File

@@ -0,0 +1,33 @@
import { MigrationInterface, QueryRunner } from "typeorm"
export class returnReason1615891636559 implements MigrationInterface {
name = "returnReason1615891636559"
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`CREATE TABLE "return_reason" ("id" character varying NOT NULL, "value" character varying NOT NULL, "label" character varying NOT NULL, "description" character varying, "created_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deleted_at" TIMESTAMP WITH TIME ZONE, "metadata" jsonb, CONSTRAINT "PK_95fd1172973165790903e65660a" PRIMARY KEY ("id"))`
)
await queryRunner.query(
`CREATE UNIQUE INDEX "IDX_00605f9d662c06b81c1b60ce24" ON "return_reason" ("value") `
)
await queryRunner.query(
`ALTER TABLE "return_item" ADD "reason_id" character varying`
)
await queryRunner.query(
`ALTER TABLE "return_item" ADD "note" character varying`
)
await queryRunner.query(
`ALTER TABLE "return_item" ADD CONSTRAINT "FK_d742532378a65022e7ceb328828" FOREIGN KEY ("reason_id") REFERENCES "return_reason"("id") ON DELETE NO ACTION ON UPDATE NO ACTION`
)
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(
`ALTER TABLE "return_item" DROP CONSTRAINT "FK_d742532378a65022e7ceb328828"`
)
await queryRunner.query(`ALTER TABLE "return_item" DROP COLUMN "note"`)
await queryRunner.query(`ALTER TABLE "return_item" DROP COLUMN "reason_id"`)
await queryRunner.query(`DROP INDEX "IDX_00605f9d662c06b81c1b60ce24"`)
await queryRunner.query(`DROP TABLE "return_reason"`)
}
}

View File

@@ -16,6 +16,7 @@ import {
JoinTable,
} from "typeorm"
import { ReturnReason } from "./return-reason"
import { Return } from "./return"
import { LineItem } from "./line-item"
@@ -47,6 +48,16 @@ export class ReturnItem {
@Column({ type: "int", nullable: true })
received_quantity: number
@Column({ nullable: true })
reason_id: string
@ManyToOne(() => ReturnReason, { eager: true })
@JoinColumn({ name: "reason_id" })
reason: ReturnReason
@Column({ nullable: true })
note: string
@Column({ type: "jsonb", nullable: true })
metadata: any
}
@@ -79,6 +90,13 @@ export class ReturnItem {
* recieved_quantity:
* description: "The quantity that was received in the warehouse."
* type: integer
* reason:
* description: "The reason for returning the item."
* anyOf:
* - $ref: "#/components/schemas/return_reason"
* note:
* description: "An optional note with additional details about the Return."
* type: string
* metadata:
* description: "An optional key-value map with additional information."
* type: object

View File

@@ -0,0 +1,81 @@
import {
Entity,
Index,
BeforeInsert,
Column,
DeleteDateColumn,
CreateDateColumn,
UpdateDateColumn,
PrimaryColumn,
} from "typeorm"
import { ulid } from "ulid"
@Entity()
export class ReturnReason {
@PrimaryColumn()
id: string
@Index({ unique: true })
@Column()
value: string
@Column()
label: string
@Column({ nullable: true })
description: string
@CreateDateColumn({ type: "timestamptz" })
created_at: Date
@UpdateDateColumn({ type: "timestamptz" })
updated_at: Date
@DeleteDateColumn({ type: "timestamptz" })
deleted_at: Date
@Column({ type: "jsonb", nullable: true })
metadata: any
@BeforeInsert()
private beforeInsert() {
if (this.id) return
const id = ulid()
this.id = `rr_${id}`
}
}
/**
* @schema return_reason
* title: "Return Reason"
* description: "A Reason for why a given product is returned. A Return Reason can be used on Return Items in order to indicate why a Line Item was returned."
* x-resourceId: return_reason
* properties:
* id:
* description: "The id of the Return Reason will start with `rr_`."
* type: string
* description:
* description: "A description of the Reason."
* type: string
* label:
* description: "A text that can be displayed to the Customer as a reason."
* type: string
* value:
* description: "The value to identify the reason by."
* type: string
* created_at:
* description: "The date with timezone at which the resource was created."
* type: string
* format: date-time
* updated_at:
* description: "The date with timezone at which the resource was last updated."
* type: string
* format: date-time
* deleted_at:
* description: "The date with timezone at which the resource was deleted."
* type: string
* format: date-time
* metadata:
* description: "An optional key-value map with additional information."
* type: object
*/

View File

@@ -0,0 +1,5 @@
import { EntityRepository, Repository } from "typeorm"
import { ReturnReason } from "../models/return-reason"
@EntityRepository(ReturnReason)
export class ReturnReasonRepository extends Repository<ReturnReason> {}

View File

@@ -0,0 +1,97 @@
import _ from "lodash"
import { Validator, MedusaError } from "medusa-core-utils"
import { BaseService } from "medusa-interfaces"
class ReturnReasonService extends BaseService {
constructor({ manager, returnReasonRepository }) {
super()
/** @private @constant {EntityManager} */
this.manager_ = manager
/** @private @constant {ReturnReasonRepository} */
this.retReasonRepo_ = returnReasonRepository
}
withTransaction(manager) {
if (!manager) {
return this
}
const cloned = new ReturnReasonService({
manager,
returnReasonRepository: this.retReasonRepo_,
})
cloned.transactionManager_ = manager
return cloned
}
create(data) {
return this.atomicPhase_(async manager => {
const rrRepo = manager.getCustomRepository(this.retReasonRepo_)
const created = rrRepo.create(data)
const result = await rrRepo.save(created)
return result
})
}
update(id, data) {
return this.atomicPhase_(async manager => {
const rrRepo = manager.getCustomRepository(this.retReasonRepo_)
const reason = await this.retrieve(id)
if ("description" in data) {
reason.description = data.description
}
if ("label" in data) {
reason.label = data.label
}
await rrRepo.save(reason)
return reason
})
}
/**
* @param {Object} selector - the query object for find
* @return {Promise} the result of the find operation
*/
async list(
selector,
config = { skip: 0, take: 50, order: { created_at: "DESC" } }
) {
const rrRepo = this.manager_.getCustomRepository(this.retReasonRepo_)
const query = this.buildQuery_(selector, config)
return rrRepo.find(query)
}
/**
* Gets an order by id.
* @param {string} orderId - id of order to retrieve
* @return {Promise<Order>} the order document
*/
async retrieve(id, config = {}) {
const rrRepo = this.manager_.getCustomRepository(this.retReasonRepo_)
const validatedId = this.validateId_(id)
const query = this.buildQuery_({ id: validatedId }, config)
const item = await rrRepo.findOne(query)
if (!item) {
throw new MedusaError(
MedusaError.Types.NOT_FOUND,
`Return Reason with id: ${id} was not found.`
)
}
return item
}
}
export default ReturnReasonService

View File

@@ -14,6 +14,7 @@ class ReturnService extends BaseService {
returnRepository,
returnItemRepository,
shippingOptionService,
returnReasonService,
fulfillmentProviderService,
}) {
super()
@@ -38,6 +39,8 @@ class ReturnService extends BaseService {
/** @private @const {FulfillmentProviderService} */
this.fulfillmentProviderService_ = fulfillmentProviderService
this.returnReasonService_ = returnReasonService
}
withTransaction(transactionManager) {
@@ -53,6 +56,7 @@ class ReturnService extends BaseService {
returnItemRepository: this.returnItemRepository_,
shippingOptionService: this.shippingOptionService_,
fulfillmentProviderService: this.fulfillmentProviderService_,
returnReasonService: this.returnReasonService_,
})
cloned.transactionManager_ = transactionManager
@@ -72,9 +76,9 @@ class ReturnService extends BaseService {
*/
async getFulfillmentItems_(order, items, transformer) {
const toReturn = await Promise.all(
items.map(async ({ item_id, quantity }) => {
const item = order.items.find(i => i.id === item_id)
return transformer(item, quantity)
items.map(async data => {
const item = order.items.find(i => i.id === data.item_id)
return transformer(item, data.quantity, data)
})
)
@@ -127,10 +131,11 @@ class ReturnService extends BaseService {
* @param {LineItem?} item - the line item to check has sufficient returnable
* quantity.
* @param {number} quantity - the quantity that is requested to be returned.
* @param {object} additional - the quantity that is requested to be returned.
* @return {LineItem} a line item where the quantity is set to the requested
* return quantity.
*/
validateReturnLineItem_(item, quantity) {
validateReturnLineItem_(item, quantity, additional) {
if (!item) {
throw new MedusaError(
MedusaError.Types.INVALID_DATA,
@@ -146,10 +151,20 @@ class ReturnService extends BaseService {
)
}
return {
const toReturn = {
...item,
quantity,
}
if ("reason_id" in additional) {
toReturn.reason_id = additional.reason_id
}
if ("note" in additional) {
toReturn.note = additional.note
}
return toReturn
}
/**
@@ -288,6 +303,8 @@ class ReturnService extends BaseService {
item_id: i.id,
quantity: i.quantity,
requested_quantity: i.quantity,
reason_id: i.reason_id,
note: i.note,
metadata: i.metadata,
})
)