diff --git a/.changeset/README.md b/.changeset/README.md new file mode 100644 index 0000000000..e5b6d8d6a6 --- /dev/null +++ b/.changeset/README.md @@ -0,0 +1,8 @@ +# Changesets + +Hello and welcome! This folder has been automatically generated by `@changesets/cli`, a build tool that works +with multi-package repos, or single-package repos to help you version and publish your code. You can +find the full documentation for it [in our repository](https://github.com/changesets/changesets) + +We have a quick list of common questions to get you started engaging with this project in +[our documentation](https://github.com/changesets/changesets/blob/main/docs/common-questions.md) diff --git a/.changeset/config.json b/.changeset/config.json new file mode 100644 index 0000000000..d5bef0f2f1 --- /dev/null +++ b/.changeset/config.json @@ -0,0 +1,11 @@ +{ + "$schema": "https://unpkg.com/@changesets/config@2.0.0/schema.json", + "changelog": ["@changesets/changelog-github", { "repo": "medusajs/medusa" }], + "commit": false, + "fixed": [], + "linked": [], + "access": "public", + "baseBranch": "master", + "updateInternalDependencies": "patch", + "ignore": [] +} diff --git a/.eslintrc.js b/.eslintrc.js index ee070ed040..82c29f3eb6 100644 --- a/.eslintrc.js +++ b/.eslintrc.js @@ -29,7 +29,7 @@ module.exports = { files: [`*.ts`], parser: `@typescript-eslint/parser`, plugins: [`@typescript-eslint/eslint-plugin`], - extends: [`plugin:@typescript-eslint/recommended`], + extends: [`plugin:@typescript-eslint/recommended`, "prettier"], rules: { "valid-jsdoc": [ "error", diff --git a/.github/.kodiak.toml b/.github/.kodiak.toml new file mode 100644 index 0000000000..f97cf85d58 --- /dev/null +++ b/.github/.kodiak.toml @@ -0,0 +1,20 @@ +#.kodiak.toml +version = 1 + +[merge] +automerge_label = "automerge" +require_automerge_label = true +blocking_labels = ["on hold", "wip", "blocked"] +method = "squash" +delete_branch_on_merge = true +optimistic_updates = false +prioritize_ready_to_merge = true +notify_on_conflict = true + +[merge.message] +title = "pull_request_title" +body = "pull_request_body" +include_coauthors= true +include_pr_number = true +body_type = "markdown" +strip_html_comments = true \ No newline at end of file diff --git a/.github/actions/setup-server/action.yml b/.github/actions/setup-server/action.yml new file mode 100644 index 0000000000..fe16033ca7 --- /dev/null +++ b/.github/actions/setup-server/action.yml @@ -0,0 +1,42 @@ +name: "Setup test env" +description: "Setup test environment for actions" + +inputs: + node-version: + description: "Node version" + required: false + default: "14" + cache-extension: + description: "Extension for fetching cached dependencies" + required: true + +runs: + using: "composite" + steps: + - name: Cancel Previous Runs + uses: styfle/cancel-workflow-action@0.9.1 + with: + access_token: ${{ github.token }} + + - name: Setup Node.js environment + uses: actions/setup-node@v2.4.1 + with: + node-version: ${{ inputs.node-version }} + cache: "yarn" + + - name: Bootstrap packages + uses: ./.github/actions/cache-bootstrap + with: + extension: ${{ inputs.cache-extension }} + + - name: Build Packages + shell: "bash" + run: yarn build + + - name: Install dev cli + shell: "bash" + run: sudo npm i -g medusa-dev-cli + + - name: Set path to medusa repo + shell: "bash" + run: medusa-dev --set-path-to-repo $(pwd) diff --git a/.github/actions/test-server/action.yml b/.github/actions/test-server/action.yml new file mode 100644 index 0000000000..5bdf141b24 --- /dev/null +++ b/.github/actions/test-server/action.yml @@ -0,0 +1,33 @@ +name: "Test server" +description: "Test the currently running medusa server to see if a user has been created and that the server is seeded" + +inputs: + email: + description: "email of user to log in" + required: false + default: "test@test.com" + password: + description: "password of user to log in" + required: false + default: "password" + pathToSeedData: + description: "path to seed data" + required: false + default: "../cli-test/data/seed.json" + +runs: + using: "composite" + steps: + - name: Wait for live server response + shell: "bash" + run: ./integration-tests/scripts/cli/wait-for-server-live.sh + - name: Log in with user + shell: "bash" + run: ./integration-tests/scripts/cli/login.sh ${{ inputs.email }} ${{ inputs.password }} + - name: GetProducts + shell: "bash" + run: ./integration-tests/scripts/cli/get-products.sh ${{ inputs.pathToSeedData }} + + - name: Kill server + shell: "bash" + run: kill -9 $(lsof -t -i :9000) diff --git a/.github/workflows/action.yml b/.github/workflows/action.yml index e77d96bd4e..739bfcb0ff 100644 --- a/.github/workflows/action.yml +++ b/.github/workflows/action.yml @@ -1,9 +1,9 @@ name: Medusa Pipeline -on: +on: pull_request: paths-ignore: - - 'docs/**' - - 'www/**' + - "docs/**" + - "www/**" jobs: unit-tests: @@ -43,6 +43,15 @@ jobs: runs-on: ubuntu-latest services: + redis: + image: redis + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 postgres: image: postgres env: diff --git a/.github/workflows/snapshot-this.yml b/.github/workflows/snapshot-this.yml new file mode 100644 index 0000000000..ffabc8e3f7 --- /dev/null +++ b/.github/workflows/snapshot-this.yml @@ -0,0 +1,133 @@ +# Inspired from https://github.com/Shopify/quilt/blob/main/.github/workflows/snapit.yml +name: Snapshot This + +on: + issue_comment: + types: + - created + +concurrency: ${{ github.workflow }}-${{ github.ref }} + +jobs: + snapshot: + name: Snapshot Release + if: | + github.event.issue.pull_request && + github.event.comment.body == '/snapshot-this' + runs-on: ubuntu-latest + steps: + - name: Validate pull request + uses: actions/github-script@v6 + with: + script: | + try { + // Add a rocket reaction to the comment + await github.rest.reactions.createForIssueComment({ + ...context.repo, + comment_id: context.payload.comment.id, + content: 'rocket', + }) + + // Only allow comment creators who have "write" permissions to repo + const actorPermission = (await github.rest.repos.getCollaboratorPermissionLevel({ + ...context.repo, + username: context.actor + })).data.permission + const isPermitted = ['write', 'admin'].includes(actorPermission) + if (!isPermitted) { + const errorMessage = 'Only users with write permission to the respository can run /snapshot-this' + await github.rest.issues.createComment({ + ...context.repo, + issue_number: context.issue.number, + body: errorMessage, + }) + core.setFailed(errorMessage) + return; + } + + const pullRequest = await github.rest.pulls.get({ + ...context.repo, + pull_number: context.issue.number, + }) + // Pull request from fork + if (context.payload.repository.full_name !== pullRequest.data.head.repo.full_name) { + const errorMessage = '`/snapshot-this` is not supported on pull requests from forked repositories.' + await github.rest.issues.createComment({ + ...context.repo, + issue_number: context.issue.number, + body: errorMessage, + }) + core.setFailed(errorMessage) + } + } catch (err) { + core.setFailed(`Request failed with error ${err}`) + } + - name: Checkout pull request branch + uses: actions/checkout@v2 + with: + ref: ${{ format('refs/pull/{0}/merge', github.event.issue.number) }} + + # Because changeset entries are consumed and removed on the + # 'changeset-release/main' branch, we need to reset the files + # so the following 'changeset version --snapshot' command will + # regenerate the package version bumps with the snapshot releases + - name: Reset changeset entries on changeset-release/main branch + run: | + if [[ $(git branch --show-current) == 'changeset-release/main' ]]; then + git checkout origin/main -- .changeset + fi + - name: Setup Node.js + uses: actions/setup-node@v2 + with: + node-version: 16.x + cache: "yarn" + + - name: Bootstrap packages + uses: ./.github/actions/cache-bootstrap + with: + extension: snapshot-this + + - name: Create an .npmrc + env: + NPM_TOKEN: ${{ secrets.NPM_TOKEN }} + run: | + cat << EOF > "$HOME/.npmrc" + //registry.npmjs.org/:_authToken=$NPM_TOKEN + EOF + + - name: Create and publish snapshot release + uses: actions/github-script@v6 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + script: | + await exec.exec('yarn run changeset version --snapshot snapshot') + const {stdout} = await exec.getExecOutput('yarn run release:snapshot') + const newTags = Array + .from(stdout.matchAll(/New tag:\s+([^\s\n]+)/g)) + .map(([_, tag]) => tag) + if (newTags.length) { + const multiple = newTags.length > 1 + const body = ( + `#### :rocket: A snapshot release has been made for this PR\n\n` + + `Test the snapshot${multiple ? 's' : ''} by updating your \`package.json\` ` + + `with the newly published version${multiple ? 's' : ''}:\n` + + newTags.map(tag => ( + '```sh\n' + + `yarn add ${tag}\n` + + '```' + )).join('\n') + + `\n\n> Latest commit: ${context.sha}` + + ) + await github.rest.issues.createComment({ + ...context.repo, + issue_number: context.issue.number, + body, + }) + await github.rest.reactions.createForIssueComment({ + ...context.repo, + comment_id: context.payload.comment.id, + content: 'hooray', + }) + } diff --git a/.github/workflows/test-cli-with-database.yml b/.github/workflows/test-cli-with-database.yml new file mode 100644 index 0000000000..00ce8a5e8f --- /dev/null +++ b/.github/workflows/test-cli-with-database.yml @@ -0,0 +1,100 @@ +name: CLI Pipeline +on: [pull_request] + +jobs: + test-cli-with-database: + env: + REDIS_URL: redis://localhost:6379 + DATABASE_URL: "postgres://postgres:postgres@localhost/cli-test" + + strategy: + matrix: + db: [sqlite, postgres] + + services: + redis: + image: redis + # Set health checks to wait until redis has started + options: >- + --health-cmd "redis-cli ping" + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 6379:6379 + + postgres: + image: postgres + env: + POSTGRES_PASSWORD: postgres + POSTGRES_USER: postgres + POSTGRES_DB: cli-test + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + ports: + - 5432:5432 + + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v2.3.5 + with: + fetch-depth: 0 + + - name: Setup development server + uses: ./.github/actions/setup-server + with: + cache-extension: "cli-test" + + - name: Install Medusa cli + run: yarn global add @medusajs/medusa-cli + + - name: Create Medusa project + run: | + medusa new cli-test + working-directory: .. + + - name: Install postgres config + if: matrix.db == 'postgres' + run: | + curl \ + https://raw.githubusercontent.com/medusajs/medusa-starter-default/feat/postgres-config/medusa-config.js \ + --output medusa-config.js + working-directory: ../cli-test + + - name: run medusa dev + run: medusa-dev --force-install + working-directory: ../cli-test + + - name: Run migrations + run: medusa migrations run + working-directory: ../cli-test + + - name: Seed db + run: yarn seed + working-directory: ../cli-test + + - name: Create admin user + run: medusa user -e test@test.com -p password -i admin_123 + working-directory: ../cli-test + + ########################## Test medusa develop ############################### + + - name: Run development server + run: medusa develop > /dev/null 2>&1 & + working-directory: ../cli-test + + - name: Testing development server + uses: ./.github/actions/test-server + + ########################### Test medusa start ################################ + + - name: Starting medusa + run: medusa start > /dev/null 2>&1 & + working-directory: ../cli-test + + - name: Testing server + uses: ./.github/actions/test-server diff --git a/docs/content/advanced/backend/plugins/create.md b/docs/content/advanced/backend/plugins/create.md index 5a956ecd9a..50c8b4fd03 100644 --- a/docs/content/advanced/backend/plugins/create.md +++ b/docs/content/advanced/backend/plugins/create.md @@ -49,7 +49,6 @@ A basic Medusa server installed with the `medusa new` command has dependencies s "medusa-interfaces": "^1.3.0", "medusa-payment-manual": "^1.0.16", "medusa-payment-stripe": "^1.1.38", - "mongoose": "^5.13.3", "typeorm": "^0.2.36" }, "devDependencies": { @@ -169,10 +168,10 @@ const plugins = [ { resolve: `medusa-plugin-custom`, options: { - name: 'My Store' - } - } -]; + name: "My Store", + }, + }, +] ``` Then, you can have access to your plugin configuration in the constructor of services in your plugin: @@ -195,11 +194,11 @@ export default (rootDirectory, options) => { router.get("/hello-world", (req, res) => { res.json({ - message: `Welcome to ${options.name ? options.name : 'Medusa'}!` + message: `Welcome to ${options.name ? options.name : "Medusa"}!`, }) }) - return router; + return router } ``` @@ -249,10 +248,10 @@ const plugins = [ resolve: `medusa-plugin-custom`, //if your plugin has configurations options: { - name: 'My Store' - } - } -]; + name: "My Store", + }, + }, +] ``` :::note @@ -269,7 +268,7 @@ npm run start ## NPM Ignore File -Not all files that you use while developing your plugin are necessary to be published. +Not all files that you use while developing your plugin are necessary to be published. For example, the files you add in the `src` directory are compiled to a `dist` directory before publishing. Then, when a developer installs your plugin, they’ll just be using the files under the `dist` directory. diff --git a/integration-tests/api/__tests__/admin/__snapshots__/colllections.js.snap b/integration-tests/api/__tests__/admin/__snapshots__/colllections.js.snap index 10891cc07a..f544c24ece 100644 --- a/integration-tests/api/__tests__/admin/__snapshots__/colllections.js.snap +++ b/integration-tests/api/__tests__/admin/__snapshots__/colllections.js.snap @@ -189,20 +189,20 @@ Object { "collections": Array [ Object { "created_at": Any, - "handle": "test-collection", - "id": "test-collection", + "handle": "test-collection2", + "id": "test-collection2", "products": Array [ Object { - "collection_id": "test-collection", + "collection_id": "test-collection2", "created_at": Any, "deleted_at": null, "description": "test-product-description", "discountable": true, "external_id": null, - "handle": "test-product", + "handle": "test-product_filtering_2", "height": null, "hs_code": null, - "id": "test-product", + "id": "test-product_filtering_2", "is_giftcard": false, "length": null, "material": null, @@ -210,44 +210,17 @@ Object { "mid_code": null, "origin_country": null, "profile_id": StringMatching /\\^sp_\\*/, - "status": "draft", + "status": "published", "subtitle": null, "thumbnail": null, - "title": "Test product", - "type_id": "test-type", - "updated_at": Any, - "weight": null, - "width": null, - }, - Object { - "collection_id": "test-collection", - "created_at": Any, - "deleted_at": null, - "description": "test-product-description1", - "discountable": true, - "external_id": null, - "handle": "test-product1", - "height": null, - "hs_code": null, - "id": "test-product1", - "is_giftcard": false, - "length": null, - "material": null, - "metadata": null, - "mid_code": null, - "origin_country": null, - "profile_id": StringMatching /\\^sp_\\*/, - "status": "draft", - "subtitle": null, - "thumbnail": null, - "title": "Test product1", + "title": "Test product filtering 2", "type_id": "test-type", "updated_at": Any, "weight": null, "width": null, }, ], - "title": "Test collection", + "title": "Test collection 2", "updated_at": Any, }, Object { @@ -315,20 +288,20 @@ Object { }, Object { "created_at": Any, - "handle": "test-collection2", - "id": "test-collection2", + "handle": "test-collection", + "id": "test-collection", "products": Array [ Object { - "collection_id": "test-collection2", + "collection_id": "test-collection", "created_at": Any, "deleted_at": null, "description": "test-product-description", "discountable": true, "external_id": null, - "handle": "test-product_filtering_2", + "handle": "test-product", "height": null, "hs_code": null, - "id": "test-product_filtering_2", + "id": "test-product", "is_giftcard": false, "length": null, "material": null, @@ -336,17 +309,44 @@ Object { "mid_code": null, "origin_country": null, "profile_id": StringMatching /\\^sp_\\*/, - "status": "published", + "status": "draft", "subtitle": null, "thumbnail": null, - "title": "Test product filtering 2", + "title": "Test product", + "type_id": "test-type", + "updated_at": Any, + "weight": null, + "width": null, + }, + Object { + "collection_id": "test-collection", + "created_at": Any, + "deleted_at": null, + "description": "test-product-description1", + "discountable": true, + "external_id": null, + "handle": "test-product1", + "height": null, + "hs_code": null, + "id": "test-product1", + "is_giftcard": false, + "length": null, + "material": null, + "metadata": null, + "mid_code": null, + "origin_country": null, + "profile_id": StringMatching /\\^sp_\\*/, + "status": "draft", + "subtitle": null, + "thumbnail": null, + "title": "Test product1", "type_id": "test-type", "updated_at": Any, "weight": null, "width": null, }, ], - "title": "Test collection 2", + "title": "Test collection", "updated_at": Any, }, ], diff --git a/integration-tests/api/__tests__/admin/__snapshots__/product-tag.js.snap b/integration-tests/api/__tests__/admin/__snapshots__/product-tag.js.snap index 91656cacc1..50244b3fb8 100644 --- a/integration-tests/api/__tests__/admin/__snapshots__/product-tag.js.snap +++ b/integration-tests/api/__tests__/admin/__snapshots__/product-tag.js.snap @@ -12,13 +12,13 @@ Array [ "created_at": Any, "id": "tag3", "updated_at": Any, - "value": "123", + "value": "1235", }, Object { "created_at": Any, "id": "tag4", "updated_at": Any, - "value": "123", + "value": "1234", }, ] `; @@ -35,13 +35,13 @@ Array [ "created_at": Any, "id": "tag3", "updated_at": Any, - "value": "123", + "value": "1235", }, Object { "created_at": Any, "id": "tag4", "updated_at": Any, - "value": "123", + "value": "1234", }, ] `; diff --git a/integration-tests/api/__tests__/admin/__snapshots__/product.js.snap b/integration-tests/api/__tests__/admin/__snapshots__/product.js.snap index a4a7db30fd..4f26a78ef8 100644 --- a/integration-tests/api/__tests__/admin/__snapshots__/product.js.snap +++ b/integration-tests/api/__tests__/admin/__snapshots__/product.js.snap @@ -79,6 +79,8 @@ Array [ "allow_backorder": false, "barcode": "test-barcode", "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean", @@ -105,6 +107,8 @@ Array [ ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -123,6 +127,7 @@ Array [ ], "product_id": StringMatching /\\^test-\\*/, "sku": "test-sku", + "tax_rates": null, "title": "Test variant", "upc": "test-upc", "updated_at": Any, @@ -133,6 +138,8 @@ Array [ "allow_backorder": false, "barcode": null, "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean2", @@ -159,6 +166,8 @@ Array [ ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -177,6 +186,7 @@ Array [ ], "product_id": StringMatching /\\^test-\\*/, "sku": "test-sku2", + "tax_rates": null, "title": "Test variant rank (2)", "upc": "test-upc2", "updated_at": Any, @@ -187,6 +197,8 @@ Array [ "allow_backorder": false, "barcode": "test-barcode 1", "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean1", @@ -213,6 +225,8 @@ Array [ ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -231,6 +245,7 @@ Array [ ], "product_id": StringMatching /\\^test-\\*/, "sku": "test-sku1", + "tax_rates": null, "title": "Test variant rank (1)", "upc": "test-upc1", "updated_at": Any, @@ -241,6 +256,8 @@ Array [ "allow_backorder": false, "barcode": "test-barcode-sale", "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean-sale", @@ -267,6 +284,8 @@ Array [ ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 1000, @@ -285,6 +304,7 @@ Array [ ], "product_id": StringMatching /\\^test-\\*/, "sku": "test-sku-sale", + "tax_rates": null, "title": "Test variant", "upc": "test-upc-sale", "updated_at": Any, @@ -353,6 +373,8 @@ Array [ "allow_backorder": false, "barcode": null, "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean4", @@ -379,6 +401,8 @@ Array [ ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -397,6 +421,7 @@ Array [ ], "product_id": StringMatching /\\^test-\\*/, "sku": "test-sku4", + "tax_rates": null, "title": "Test variant rank (2)", "upc": "test-upc4", "updated_at": Any, @@ -407,6 +432,8 @@ Array [ "allow_backorder": false, "barcode": null, "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean3", @@ -433,6 +460,8 @@ Array [ ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -451,6 +480,7 @@ Array [ ], "product_id": StringMatching /\\^test-\\*/, "sku": "test-sku3", + "tax_rates": null, "title": "Test variant rank (2)", "upc": "test-upc3", "updated_at": Any, @@ -609,6 +639,8 @@ Array [ "allow_backorder": false, "barcode": null, "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": null, @@ -635,6 +667,8 @@ Array [ ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -653,6 +687,7 @@ Array [ ], "product_id": StringMatching /\\^prod_\\*/, "sku": null, + "tax_rates": null, "title": "Test variant", "upc": null, "updated_at": Any, @@ -769,6 +804,8 @@ Object { "allow_backorder": false, "barcode": null, "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": null, @@ -805,6 +842,8 @@ Object { ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -851,6 +890,7 @@ Object { ], "product_id": StringMatching /\\^prod_\\*/, "sku": null, + "tax_rates": null, "title": "Test variant", "upc": null, "updated_at": Any, @@ -933,6 +973,8 @@ Object { "allow_backorder": false, "barcode": "test-barcode", "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean", @@ -959,6 +1001,8 @@ Object { ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 75, @@ -977,6 +1021,7 @@ Object { ], "product_id": "test-product", "sku": "test-sku", + "tax_rates": null, "title": "Test variant", "upc": "test-upc", "updated_at": Any, diff --git a/integration-tests/api/__tests__/admin/colllections.js b/integration-tests/api/__tests__/admin/colllections.js index dd6ede0bb4..4f47d1efa2 100644 --- a/integration-tests/api/__tests__/admin/colllections.js +++ b/integration-tests/api/__tests__/admin/colllections.js @@ -176,20 +176,14 @@ describe("/admin/collections", () => { expect(response.data).toMatchSnapshot({ collections: [ { - id: "test-collection", - handle: "test-collection", - title: "Test collection", + id: "test-collection2", + handle: "test-collection2", + title: "Test collection 2", created_at: expect.any(String), updated_at: expect.any(String), products: [ { - collection_id: "test-collection", - created_at: expect.any(String), - updated_at: expect.any(String), - profile_id: expect.stringMatching(/^sp_*/), - }, - { - collection_id: "test-collection", + collection_id: "test-collection2", created_at: expect.any(String), updated_at: expect.any(String), profile_id: expect.stringMatching(/^sp_*/), @@ -218,14 +212,20 @@ describe("/admin/collections", () => { ], }, { - id: "test-collection2", - handle: "test-collection2", - title: "Test collection 2", + id: "test-collection", + handle: "test-collection", + title: "Test collection", created_at: expect.any(String), updated_at: expect.any(String), products: [ { - collection_id: "test-collection2", + collection_id: "test-collection", + created_at: expect.any(String), + updated_at: expect.any(String), + profile_id: expect.stringMatching(/^sp_*/), + }, + { + collection_id: "test-collection", created_at: expect.any(String), updated_at: expect.any(String), profile_id: expect.stringMatching(/^sp_*/), diff --git a/integration-tests/api/__tests__/admin/draft-order.js b/integration-tests/api/__tests__/admin/draft-order.js index c010598e82..272635d149 100644 --- a/integration-tests/api/__tests__/admin/draft-order.js +++ b/integration-tests/api/__tests__/admin/draft-order.js @@ -353,6 +353,84 @@ describe("/admin/draft-orders", () => { ) }) + it("creates a draft order with discount and free shipping along the line item", async () => { + const api = useApi() + + const payload = { + email: "oli@test.dk", + shipping_address: "oli-shipping", + discounts: [{ code: "TEST" }, { code: "free-shipping"}], + items: [ + { + variant_id: "test-variant", + quantity: 2, + metadata: {}, + }, + ], + region_id: "test-region", + customer_id: "oli-test", + shipping_methods: [ + { + option_id: "test-option", + }, + ], + } + + const response = await api + .post("/admin/draft-orders", payload, { + headers: { + Authorization: "Bearer test_token", + }, + }) + .catch((err) => { + console.log(err) + }) + + const created = await api + .get(`/admin/draft-orders/${response.data.draft_order.id}`, { + headers: { + Authorization: "Bearer test_token", + }, + }) + .catch((err) => { + console.log(err) + }) + + const draftOrder = created.data.draft_order + const lineItemId = draftOrder.cart.items[0].id + + expect(response.status).toEqual(200) + expect(draftOrder.cart.items).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + variant_id: "test-variant", + unit_price: 8000, + quantity: 2, + adjustments: expect.arrayContaining([ + expect.objectContaining({ + item_id: lineItemId, + amount: 1600, + description: "discount", + discount_id: "test-discount", + }), + ]), + }), + ]) + ) + + // Check that discounts are applied + expect(draftOrder.cart.discounts).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + code: "TEST", + }), + expect.objectContaining({ + code: "free-shipping", + }) + ]) + ) + }) + it("creates a draft order with created shipping address", async () => { const api = useApi() diff --git a/integration-tests/api/__tests__/admin/product-tag.js b/integration-tests/api/__tests__/admin/product-tag.js index 1c3de72149..b2cfffdfb2 100644 --- a/integration-tests/api/__tests__/admin/product-tag.js +++ b/integration-tests/api/__tests__/admin/product-tag.js @@ -89,11 +89,10 @@ describe("/admin/product-tags", () => { updated_at: expect.any(String), } - expect(res.data.product_tags.map((pt) => pt.value)).toEqual([ - "123", - "123", - "123", - ]) + expect(res.data.product_tags.length).toEqual(3) + expect(res.data.product_tags.map((pt) => pt.value)).toEqual( + expect.arrayContaining(["123", "1235", "1234"]) + ) expect(res.data.product_tags).toMatchSnapshot([ tagMatch, diff --git a/integration-tests/api/__tests__/admin/product.js b/integration-tests/api/__tests__/admin/product.js index 41ffc406f2..866381ad17 100644 --- a/integration-tests/api/__tests__/admin/product.js +++ b/integration-tests/api/__tests__/admin/product.js @@ -6,7 +6,11 @@ const { initDb, useDb } = require("../../../helpers/use-db") const adminSeeder = require("../../helpers/admin-seeder") const productSeeder = require("../../helpers/product-seeder") -const { ProductVariant, ProductOptionValue, MoneyAmount } = require("@medusajs/medusa") +const { + ProductVariant, + ProductOptionValue, + MoneyAmount, +} = require("@medusajs/medusa") const priceListSeeder = require("../../helpers/price-list-seeder") jest.setTimeout(50000) @@ -18,7 +22,7 @@ describe("/admin/products", () => { beforeAll(async () => { const cwd = path.resolve(path.join(__dirname, "..", "..")) dbConnection = await initDb({ cwd }) - medusaProcess = await setupServer({ cwd }) + medusaProcess = await setupServer({ cwd, verbose: false }) }) afterAll(async () => { @@ -232,6 +236,7 @@ describe("/admin/products", () => { }) expect(response.status).toEqual(200) + expect(response.data.count).toEqual(1) expect(response.data.products).toEqual([ expect.objectContaining({ id: "test-product_filtering_4", @@ -256,6 +261,36 @@ describe("/admin/products", () => { expect(response.data.products.length).toEqual(2) }) + it("returns a list of products with free text query including variant prices", async () => { + const api = useApi() + + const response = await api + .get("/admin/products?q=test+product1", { + headers: { + Authorization: "Bearer test_token", + }, + }) + .catch((err) => { + console.log(err) + }) + + const expectedVariantPrices = response.data.products[0].variants + .map((v) => v.prices) + .flat(1) + + expect(response.status).toEqual(200) + expect(expectedVariantPrices).toEqual( + expect.arrayContaining([ + expect.objectContaining({ + id: "test-price4", + }), + expect.objectContaining({ + id: "test-price3", + }), + ]) + ) + }) + it("returns a list of products with free text query and offset", async () => { const api = useApi() @@ -1388,7 +1423,7 @@ describe("/admin/products", () => { }) describe("GET /admin/products/:id/variants", () => { - beforeEach(async() => { + beforeEach(async () => { try { await productSeeder(dbConnection) await adminSeeder(dbConnection) @@ -1398,12 +1433,12 @@ describe("/admin/products", () => { } }) - afterEach(async() => { + afterEach(async () => { const db = useDb() await db.teardown() }) - it('should return the variants related to the requested product', async () => { + it("should return the variants related to the requested product", async () => { const api = useApi() const res = await api @@ -1420,10 +1455,22 @@ describe("/admin/products", () => { expect(res.data.variants.length).toBe(4) expect(res.data.variants).toEqual( expect.arrayContaining([ - expect.objectContaining({ id: "test-variant", product_id: "test-product" }), - expect.objectContaining({ id: "test-variant_1", product_id: "test-product" }), - expect.objectContaining({ id: "test-variant_2", product_id: "test-product" }), - expect.objectContaining({ id: "test-variant-sale", product_id: "test-product" }), + expect.objectContaining({ + id: "test-variant", + product_id: "test-product", + }), + expect.objectContaining({ + id: "test-variant_1", + product_id: "test-product", + }), + expect.objectContaining({ + id: "test-variant_2", + product_id: "test-product", + }), + expect.objectContaining({ + id: "test-variant-sale", + product_id: "test-product", + }), ]) ) }) @@ -1812,16 +1859,22 @@ describe("/admin/products", () => { expect(optValPost).toEqual(undefined) // Validate that the option still exists in the DB with deleted_at - const optValDeleted = await dbConnection.manager.findOne(ProductOptionValue, { - variant_id: "test-variant_2", - }, { - withDeleted: true, - }) + const optValDeleted = await dbConnection.manager.findOne( + ProductOptionValue, + { + variant_id: "test-variant_2", + }, + { + withDeleted: true, + } + ) - expect(optValDeleted).toEqual(expect.objectContaining({ - deleted_at: expect.any(Date), - variant_id: "test-variant_2", - })) + expect(optValDeleted).toEqual( + expect.objectContaining({ + deleted_at: expect.any(Date), + variant_id: "test-variant_2", + }) + ) }) it("successfully deletes a product and any option value associated with one of its variants", async () => { @@ -1854,16 +1907,22 @@ describe("/admin/products", () => { expect(optValPost).toEqual(undefined) // Validate that the option still exists in the DB with deleted_at - const optValDeleted = await dbConnection.manager.findOne(ProductOptionValue, { - variant_id: "test-variant_2", - }, { - withDeleted: true, - }) + const optValDeleted = await dbConnection.manager.findOne( + ProductOptionValue, + { + variant_id: "test-variant_2", + }, + { + withDeleted: true, + } + ) - expect(optValDeleted).toEqual(expect.objectContaining({ - deleted_at: expect.any(Date), - variant_id: "test-variant_2", - })) + expect(optValDeleted).toEqual( + expect.objectContaining({ + deleted_at: expect.any(Date), + variant_id: "test-variant_2", + }) + ) }) it("successfully deletes a product variant and its associated prices", async () => { @@ -1889,26 +1948,29 @@ describe("/admin/products", () => { expect(response.status).toEqual(200) // Validate that the price was deleted - const pricePost = await dbConnection.manager.findOne( - MoneyAmount, - { - id: "test-price", - } - ) + const pricePost = await dbConnection.manager.findOne(MoneyAmount, { + id: "test-price", + }) expect(pricePost).toEqual(undefined) // Validate that the price still exists in the DB with deleted_at - const optValDeleted = await dbConnection.manager.findOne(MoneyAmount, { - id: "test-price", - }, { - withDeleted: true, - }) + const optValDeleted = await dbConnection.manager.findOne( + MoneyAmount, + { + id: "test-price", + }, + { + withDeleted: true, + } + ) - expect(optValDeleted).toEqual(expect.objectContaining({ - deleted_at: expect.any(Date), - id: "test-price", - })) + expect(optValDeleted).toEqual( + expect.objectContaining({ + deleted_at: expect.any(Date), + id: "test-price", + }) + ) }) it("successfully deletes a product and any prices associated with one of its variants", async () => { @@ -1938,16 +2000,22 @@ describe("/admin/products", () => { expect(pricePost).toEqual(undefined) // Validate that the price still exists in the DB with deleted_at - const optValDeleted = await dbConnection.manager.findOne(MoneyAmount, { - id: "test-price", - }, { - withDeleted: true, - }) + const optValDeleted = await dbConnection.manager.findOne( + MoneyAmount, + { + id: "test-price", + }, + { + withDeleted: true, + } + ) - expect(optValDeleted).toEqual(expect.objectContaining({ - deleted_at: expect.any(Date), - id: "test-price", - })) + expect(optValDeleted).toEqual( + expect.objectContaining({ + deleted_at: expect.any(Date), + id: "test-price", + }) + ) }) it("successfully creates product with soft-deleted product handle and deletes it again", async () => { @@ -2182,4 +2250,45 @@ describe("/admin/products", () => { ) }) }) + + describe("GET /admin/products/tag-usage", () => { + beforeEach(async () => { + try { + await productSeeder(dbConnection) + await adminSeeder(dbConnection) + } catch (err) { + console.log(err) + throw err + } + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + }) + + it("successfully gets the tags usage", async () => { + const api = useApi() + + const res = await api + .get("/admin/products/tag-usage", { + headers: { + Authorization: "Bearer test_token", + }, + }) + .catch((err) => { + console.log(err) + }) + + expect(res.status).toEqual(200) + expect(res.data.tags.length).toEqual(3) + expect(res.data.tags).toEqual( + expect.arrayContaining([ + { id: "tag1", usage_count: "2", value: "123" }, + { id: "tag3", usage_count: "2", value: "1235" }, + { id: "tag4", usage_count: "1", value: "1234" }, + ]) + ) + }) + }) }) diff --git a/integration-tests/api/__tests__/batch-jobs/__snapshots__/api.js.snap b/integration-tests/api/__tests__/batch-jobs/__snapshots__/api.js.snap index 237dd3c33f..78911629c3 100644 --- a/integration-tests/api/__tests__/batch-jobs/__snapshots__/api.js.snap +++ b/integration-tests/api/__tests__/batch-jobs/__snapshots__/api.js.snap @@ -1,39 +1,142 @@ // Jest Snapshot v1, https://goo.gl/fbAQLP -exports[`/admin/batch GET /admin/batch lists batch jobs created by the user 1`] = ` +exports[`/admin/batch-jobs GET /admin/batch-jobs lists batch jobs created by the user 1`] = ` Object { "batch_jobs": Array [ Object { + "canceled_at": null, + "completed_at": "2022-06-27T22:00:00.000Z", + "confirmed_at": null, "context": Object {}, "created_at": Any, "created_by": "admin_user", "deleted_at": null, + "dry_run": false, + "failed_at": null, + "id": "job_5", + "pre_processed_at": null, + "processing_at": null, + "result": null, + "status": "completed", + "type": "product-export", + "updated_at": Any, + }, + Object { + "canceled_at": null, + "completed_at": null, + "confirmed_at": null, + "context": Object {}, + "created_at": Any, + "created_by": "admin_user", + "deleted_at": null, + "dry_run": false, + "failed_at": null, "id": "job_3", + "pre_processed_at": null, + "processing_at": null, "result": null, "status": "created", - "type": "batch_2", + "type": "product-export", "updated_at": Any, }, Object { + "canceled_at": null, + "completed_at": null, + "confirmed_at": null, "context": Object {}, "created_at": Any, "created_by": "admin_user", "deleted_at": null, + "dry_run": false, + "failed_at": null, "id": "job_2", + "pre_processed_at": null, + "processing_at": null, "result": null, "status": "created", - "type": "batch_2", + "type": "product-export", "updated_at": Any, }, Object { + "canceled_at": null, + "completed_at": null, + "confirmed_at": null, "context": Object {}, "created_at": Any, "created_by": "admin_user", "deleted_at": null, + "dry_run": false, + "failed_at": null, "id": "job_1", + "pre_processed_at": null, + "processing_at": null, "result": null, "status": "created", - "type": "batch_1", + "type": "product-export", + "updated_at": Any, + }, + ], + "count": 4, + "limit": 10, + "offset": 0, +} +`; + +exports[`/admin/batch-jobs GET /admin/batch-jobs lists batch jobs created by the user and where completed_at is null 1`] = ` +Object { + "batch_jobs": Array [ + Object { + "canceled_at": null, + "completed_at": null, + "confirmed_at": null, + "context": Object {}, + "created_at": Any, + "created_by": "admin_user", + "deleted_at": null, + "dry_run": false, + "failed_at": null, + "id": "job_3", + "pre_processed_at": null, + "processing_at": null, + "result": null, + "status": "created", + "type": "product-export", + "updated_at": Any, + }, + Object { + "canceled_at": null, + "completed_at": null, + "confirmed_at": null, + "context": Object {}, + "created_at": Any, + "created_by": "admin_user", + "deleted_at": null, + "dry_run": false, + "failed_at": null, + "id": "job_2", + "pre_processed_at": null, + "processing_at": null, + "result": null, + "status": "created", + "type": "product-export", + "updated_at": Any, + }, + Object { + "canceled_at": null, + "completed_at": null, + "confirmed_at": null, + "context": Object {}, + "created_at": Any, + "created_by": "admin_user", + "deleted_at": null, + "dry_run": false, + "failed_at": null, + "id": "job_1", + "pre_processed_at": null, + "processing_at": null, + "result": null, + "status": "created", + "type": "product-export", "updated_at": Any, }, ], @@ -42,3 +145,64 @@ Object { "offset": 0, } `; + +exports[`/admin/batch-jobs POST /admin/batch-jobs/ Creates a batch job 1`] = ` +Object { + "canceled_at": null, + "completed_at": null, + "confirmed_at": null, + "context": Object { + "list_config": Object { + "order": Object { + "created_at": "DESC", + }, + "relations": Array [ + "variants", + "variants.prices", + "variants.options", + "images", + "options", + "tags", + "type", + "collection", + "variants.prices.region", + ], + "skip": 0, + "take": 50, + }, + }, + "created_at": Any, + "created_by": "admin_user", + "deleted_at": null, + "dry_run": false, + "failed_at": null, + "id": Any, + "pre_processed_at": null, + "processing_at": null, + "result": null, + "status": "created", + "type": "product-export", + "updated_at": Any, +} +`; + +exports[`/admin/batch-jobs POST /admin/batch-jobs/:id/cancel Cancels batch job created by the user 1`] = ` +Object { + "canceled_at": Any, + "completed_at": null, + "confirmed_at": null, + "context": Object {}, + "created_at": Any, + "created_by": "admin_user", + "deleted_at": null, + "dry_run": false, + "failed_at": null, + "id": "job_1", + "pre_processed_at": null, + "processing_at": null, + "result": null, + "status": "canceled", + "type": "product-export", + "updated_at": Any, +} +`; diff --git a/integration-tests/api/__tests__/batch-jobs/api.js b/integration-tests/api/__tests__/batch-jobs/api.js index 5ce7972194..dbf350009f 100644 --- a/integration-tests/api/__tests__/batch-jobs/api.js +++ b/integration-tests/api/__tests__/batch-jobs/api.js @@ -5,6 +5,8 @@ const { useApi } = require("../../../helpers/use-api") const { initDb, useDb } = require("../../../helpers/use-db") const adminSeeder = require("../../helpers/admin-seeder") +const userSeeder = require("../../helpers/user-seeder") + const { simpleBatchJobFactory } = require("../../factories") jest.setTimeout(50000) @@ -15,14 +17,53 @@ const adminReqConfig = { }, } -describe("/admin/batch", () => { +const setupJobDb = async (dbConnection) => { + try { + await adminSeeder(dbConnection) + await userSeeder(dbConnection) + + await simpleBatchJobFactory(dbConnection, { + id: "job_1", + type: "product-export", + created_by: "admin_user", + }) + await simpleBatchJobFactory(dbConnection, { + id: "job_2", + type: "product-export", + created_by: "admin_user", + }) + await simpleBatchJobFactory(dbConnection, { + id: "job_3", + type: "product-export", + created_by: "admin_user", + }) + await simpleBatchJobFactory(dbConnection, { + id: "job_4", + type: "product-export", + status: "awaiting_confirmation", + created_by: "member-user", + }) + await simpleBatchJobFactory(dbConnection, { + id: "job_5", + type: "product-export", + status: "completed", + completed_at: "2022-06-27T22:00:00.000Z", + created_by: "admin_user", + }) + } catch (err) { + console.log(err) + throw err + } +} + +describe("/admin/batch-jobs", () => { let medusaProcess let dbConnection beforeAll(async () => { const cwd = path.resolve(path.join(__dirname, "..", "..")) dbConnection = await initDb({ cwd }) - medusaProcess = await setupServer({ cwd, verbose: false }) + medusaProcess = await setupServer({ cwd }) }) afterAll(async () => { @@ -32,34 +73,9 @@ describe("/admin/batch", () => { medusaProcess.kill() }) - describe("GET /admin/batch", () => { + describe("GET /admin/batch-jobs", () => { beforeEach(async () => { - try { - await simpleBatchJobFactory(dbConnection, { - id: "job_1", - type: "batch_1", - created_by: "admin_user", - }) - await simpleBatchJobFactory(dbConnection, { - id: "job_2", - type: "batch_2", - created_by: "admin_user", - }) - await simpleBatchJobFactory(dbConnection, { - id: "job_3", - type: "batch_2", - created_by: "admin_user", - }) - await simpleBatchJobFactory(dbConnection, { - id: "job_4", - type: "batch_1", - created_by: "not_this_user", - }) - await adminSeeder(dbConnection) - } catch (err) { - console.log(err) - throw err - } + await setupJobDb(dbConnection) }) afterEach(async () => { @@ -69,26 +85,243 @@ describe("/admin/batch", () => { it("lists batch jobs created by the user", async () => { const api = useApi() - const response = await api.get("/admin/batch", adminReqConfig) + const response = await api.get("/admin/batch-jobs", adminReqConfig) + + expect(response.status).toEqual(200) + expect(response.data.batch_jobs.length).toEqual(4) + expect(response.data).toMatchSnapshot({ + batch_jobs: [ + { + id: "job_5", + created_at: expect.any(String), + updated_at: expect.any(String), + created_by: "admin_user", + }, + { + id: "job_3", + created_at: expect.any(String), + updated_at: expect.any(String), + created_by: "admin_user", + }, + { + id: "job_2", + created_at: expect.any(String), + updated_at: expect.any(String), + created_by: "admin_user", + }, + { + id: "job_1", + created_at: expect.any(String), + updated_at: expect.any(String), + created_by: "admin_user", + }, + ], + }) + }) + + it("lists batch jobs created by the user and where completed_at is null ", async () => { + const api = useApi() + const response = await api.get( + "/admin/batch-jobs?completed_at=null", + adminReqConfig + ) expect(response.status).toEqual(200) expect(response.data.batch_jobs.length).toEqual(3) expect(response.data).toMatchSnapshot({ batch_jobs: [ { + id: "job_3", created_at: expect.any(String), updated_at: expect.any(String), + created_by: "admin_user", }, { + id: "job_2", created_at: expect.any(String), updated_at: expect.any(String), + created_by: "admin_user", }, { + id: "job_1", created_at: expect.any(String), updated_at: expect.any(String), + created_by: "admin_user", }, ], }) }) }) + + describe("GET /admin/batch-jobs/:id", () => { + beforeEach(async () => { + await setupJobDb(dbConnection) + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + }) + + it("return batch job created by the user", async () => { + const api = useApi() + const response = await api.get("/admin/batch-jobs/job_1", adminReqConfig) + + expect(response.status).toEqual(200) + expect(response.data.batch_job).toEqual( + expect.objectContaining({ + created_at: expect.any(String), + updated_at: expect.any(String), + created_by: "admin_user", + }) + ) + }) + + it("should fail on batch job created by other user", async () => { + const api = useApi() + await api.get("/admin/batch-jobs/job_4", adminReqConfig).catch((err) => { + expect(err.response.status).toEqual(400) + expect(err.response.data.type).toEqual("not_allowed") + expect(err.response.data.message).toEqual( + "Cannot access a batch job that does not belong to the logged in user" + ) + }) + }) + }) + + describe("POST /admin/batch-jobs/", () => { + beforeEach(async () => { + await setupJobDb(dbConnection) + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + }) + + it("Creates a batch job", async () => { + const api = useApi() + + const response = await api.post( + "/admin/batch-jobs", + { + type: "product-export", + context: {}, + }, + adminReqConfig + ) + + expect(response.status).toEqual(201) + expect(response.data.batch_job).toMatchSnapshot({ + created_by: "admin_user", + status: "created", + id: expect.any(String), + created_at: expect.any(String), + updated_at: expect.any(String), + }) + }) + }) + + describe("POST /admin/batch-jobs/:id/confirm", () => { + beforeEach(async () => { + await setupJobDb(dbConnection) + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + }) + + it("Fails to confirm a batch job created by a different user", async () => { + const api = useApi() + + const jobId = "job_4" + + api + .post(`/admin/batch-jobs/${jobId}/confirm`, {}, adminReqConfig) + .catch((err) => { + expect(err.response.status).toEqual(400) + expect(err.response.data.type).toEqual("not_allowed") + expect(err.response.data.message).toEqual( + "Cannot access a batch job that does not belong to the logged in user" + ) + }) + }) + }) + + describe("POST /admin/batch-jobs/:id/cancel", () => { + beforeEach(async () => { + try { + await setupJobDb(dbConnection) + await simpleBatchJobFactory(dbConnection, { + id: "job_complete", + type: "product-export", + created_by: "admin_user", + completed_at: new Date(), + }) + } catch (e) { + console.log(e) + throw e + } + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + }) + + it("Cancels batch job created by the user", async () => { + const api = useApi() + + const jobId = "job_1" + + const response = await api.post( + `/admin/batch-jobs/${jobId}/cancel`, + {}, + adminReqConfig + ) + + expect(response.status).toEqual(200) + expect(response.data.batch_job).toMatchSnapshot({ + created_at: expect.any(String), + updated_at: expect.any(String), + canceled_at: expect.any(String), + status: "canceled", + }) + }) + + it("Fails to cancel a batch job created by a different user", async () => { + expect.assertions(3) + const api = useApi() + + const jobId = "job_4" + + api + .post(`/admin/batch-jobs/${jobId}/cancel`, {}, adminReqConfig) + .catch((err) => { + expect(err.response.status).toEqual(400) + expect(err.response.data.type).toEqual("not_allowed") + expect(err.response.data.message).toEqual( + "Cannot access a batch job that does not belong to the logged in user" + ) + }) + }) + + it("Fails to cancel a batch job that is already complete", async () => { + expect.assertions(3) + const api = useApi() + + const jobId = "job_complete" + + await api + .post(`/admin/batch-jobs/${jobId}/cancel`, {}, adminReqConfig) + .catch((err) => { + expect(err.response.status).toEqual(400) + expect(err.response.data.type).toEqual("not_allowed") + expect(err.response.data.message).toEqual( + "Cannot cancel completed batch job" + ) + }) + }) + }) }) diff --git a/integration-tests/api/__tests__/batch-jobs/order/export.js b/integration-tests/api/__tests__/batch-jobs/order/export.js new file mode 100644 index 0000000000..3e4421f6cb --- /dev/null +++ b/integration-tests/api/__tests__/batch-jobs/order/export.js @@ -0,0 +1,251 @@ +const path = require("path") +const fs = require("fs/promises") +import { sep, resolve } from "path" + +const setupServer = require("../../../../helpers/setup-server") +const { useApi } = require("../../../../helpers/use-api") +const { initDb, useDb } = require("../../../../helpers/use-db") + +const adminSeeder = require("../../../helpers/admin-seeder") +const userSeeder = require("../../../helpers/user-seeder") +const orderSeeder = require("../../../helpers/order-seeder") + +const adminReqConfig = { + headers: { + Authorization: "Bearer test_token", + }, +} + +jest.setTimeout(1000000) + +describe("Batchjob with type order-export", () => { + let medusaProcess + let dbConnection + let exportFilePath = "" + let topDir = "" + + beforeAll(async () => { + const cwd = path.resolve(path.join(__dirname, "..", "..", "..")) + dbConnection = await initDb({ cwd }) + medusaProcess = await setupServer({ + cwd, + redisUrl: "redis://127.0.0.1:6379", + uploadDir: __dirname, + verbose: false, + }) + }) + + afterAll(async () => { + if (topDir !== "") { + await fs.rm(resolve(__dirname, topDir), { recursive: true }) + } + + const db = useDb() + await db.shutdown() + + medusaProcess.kill() + }) + + beforeEach(async () => { + try { + await adminSeeder(dbConnection) + await userSeeder(dbConnection) + await orderSeeder(dbConnection) + } catch (e) { + console.log(e) + throw e + } + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + + const isFileExists = (await fs.stat(exportFilePath))?.isFile() + + if (isFileExists) { + const [, relativeRoot] = exportFilePath.replace(__dirname, "").split(sep) + + if ((await fs.stat(resolve(__dirname, relativeRoot)))?.isDirectory()) { + topDir = relativeRoot + } + + await fs.unlink(exportFilePath) + } + }) + + it("Should export a file containing all orders", async () => { + jest.setTimeout(1000000) + const api = useApi() + + const batchPayload = { + type: "order-export", + context: {}, + } + + const batchJobRes = await api.post( + "/admin/batch-jobs", + batchPayload, + adminReqConfig + ) + const batchJobId = batchJobRes.data.batch_job.id + + expect(batchJobId).toBeTruthy() + + // Pull to check the status until it is completed + let batchJob + let shouldContinuePulling = true + + while (shouldContinuePulling) { + const res = await api.get( + `/admin/batch-jobs/${batchJobId}`, + adminReqConfig + ) + + batchJob = res.data.batch_job + shouldContinuePulling = !( + batchJob.status === "completed" || batchJob.status === "failed" + ) + + if (shouldContinuePulling) { + await new Promise((resolve, _) => { + setTimeout(resolve, 1000) + }) + } + } + + expect(batchJob.status).toBe("completed") + + expect(batchJob.status).toBe("completed") + + exportFilePath = path.resolve(__dirname, batchJob.result.file_key) + const isFileExists = (await fs.stat(exportFilePath)).isFile() + + expect(isFileExists).toBeTruthy() + + const fileSize = (await fs.stat(exportFilePath)).size + expect(batchJob.result?.file_size).toBe(fileSize) + + const data = (await fs.readFile(exportFilePath)).toString() + const [, ...lines] = data.split("\r\n").filter((l) => l) + + expect(lines.length).toBe(6) + + const csvLine = lines[0].split(";") + + expect(csvLine[0]).toBe("discount-order") + expect(csvLine[1]).toBe("6") + expect(csvLine[14]).toBe("fulfilled") + expect(csvLine[15]).toBe("captured") + expect(csvLine[16]).toBe("8000") + }) + + it("Should export a file containing a limited number of orders", async () => { + jest.setTimeout(1000000) + const api = useApi() + + const batchPayload = { + type: "order-export", + context: { batch_size: 3 }, + } + + const batchJobRes = await api.post( + "/admin/batch-jobs", + batchPayload, + adminReqConfig + ) + const batchJobId = batchJobRes.data.batch_job.id + + expect(batchJobId).toBeTruthy() + + // Pull to check the status until it is completed + let batchJob + let shouldContinuePulling = true + + while (shouldContinuePulling) { + const res = await api.get( + `/admin/batch-jobs/${batchJobId}`, + adminReqConfig + ) + + batchJob = res.data.batch_job + shouldContinuePulling = !( + batchJob.status === "completed" || batchJob.status === "failed" + ) + + if (shouldContinuePulling) { + await new Promise((resolve, _) => { + setTimeout(resolve, 1000) + }) + } + } + + exportFilePath = path.resolve(__dirname, batchJob.result.file_key) + const isFileExists = (await fs.stat(exportFilePath)).isFile() + + expect(isFileExists).toBeTruthy() + + const data = (await fs.readFile(exportFilePath)).toString() + const [, ...lines] = data.split("\r\n").filter((l) => l) + + expect(lines.length).toBe(3) + }) + + it("Should export a file with orders from a single customer", async () => { + jest.setTimeout(1000000) + const api = useApi() + + const batchPayload = { + type: "order-export", + context: { filterable_fields: { email: "test@email.com" } }, + } + + const batchJobRes = await api.post( + "/admin/batch-jobs", + batchPayload, + adminReqConfig + ) + const batchJobId = batchJobRes.data.batch_job.id + + expect(batchJobId).toBeTruthy() + + // Pull to check the status until it is completed + let batchJob + let shouldContinuePulling = true + + while (shouldContinuePulling) { + const res = await api.get( + `/admin/batch-jobs/${batchJobId}`, + adminReqConfig + ) + + batchJob = res.data.batch_job + shouldContinuePulling = !( + batchJob.status === "completed" || batchJob.status === "failed" + ) + + if (shouldContinuePulling) { + await new Promise((resolve, _) => { + setTimeout(resolve, 1000) + }) + } + } + + expect(batchJob.status).toBe("completed") + + exportFilePath = path.resolve(__dirname, batchJob.result.file_key) + const isFileExists = (await fs.stat(exportFilePath)).isFile() + + expect(isFileExists).toBeTruthy() + + const data = (await fs.readFile(exportFilePath)).toString() + const [, ...lines] = data.split("\r\n").filter((l) => l) + + expect(lines.length).toBe(1) + + const csvLine = lines[0].split(";") + + expect(csvLine[0]).toBe("test-order") + expect(csvLine[6]).toBe("test@email.com") + }) +}) diff --git a/integration-tests/api/__tests__/batch-jobs/product/export.js b/integration-tests/api/__tests__/batch-jobs/product/export.js new file mode 100644 index 0000000000..8722da43c7 --- /dev/null +++ b/integration-tests/api/__tests__/batch-jobs/product/export.js @@ -0,0 +1,237 @@ +const path = require("path") +const fs = require("fs/promises") +import { sep, resolve } from "path" + +const setupServer = require("../../../../helpers/setup-server") +const { useApi } = require("../../../../helpers/use-api") +const { initDb, useDb } = require("../../../../helpers/use-db") + +const adminSeeder = require("../../../helpers/admin-seeder") +const userSeeder = require("../../../helpers/user-seeder") +const productSeeder = require("../../../helpers/product-seeder") + +const adminReqConfig = { + headers: { + Authorization: "Bearer test_token", + }, +} + +jest.setTimeout(1000000) + +describe("Batch job of product-export type", () => { + let medusaProcess + let dbConnection + let exportFilePath = "" + let topDir = "" + + beforeAll(async () => { + const cwd = path.resolve(path.join(__dirname, "..", "..", "..")) + dbConnection = await initDb({ cwd }) + medusaProcess = await setupServer({ + cwd, + redisUrl: "redis://127.0.0.1:6379", + uploadDir: __dirname, + verbose: false, + }) + }) + + afterAll(async () => { + if (topDir !== "") { + await fs.rm(resolve(__dirname, topDir), { recursive: true }) + } + + const db = useDb() + await db.shutdown() + + medusaProcess.kill() + }) + + beforeEach(async () => { + try { + await productSeeder(dbConnection) + await adminSeeder(dbConnection) + await userSeeder(dbConnection) + } catch (e) { + console.log(e) + throw e + } + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + + const isFileExists = (await fs.stat(exportFilePath))?.isFile() + + if (isFileExists) { + const [, relativeRoot] = exportFilePath.replace(__dirname, "").split(sep) + + if ((await fs.stat(resolve(__dirname, relativeRoot)))?.isDirectory()) { + topDir = relativeRoot + } + + await fs.unlink(exportFilePath) + } + }) + + it("should export a csv file containing the expected products", async () => { + jest.setTimeout(1000000) + const api = useApi() + + const productPayload = { + title: "Test export product", + description: "test-product-description", + type: { value: "test-type" }, + images: ["test-image.png", "test-image-2.png"], + collection_id: "test-collection", + tags: [{ value: "123" }, { value: "456" }], + options: [{ title: "size" }, { title: "color" }], + variants: [ + { + title: "Test variant", + inventory_quantity: 10, + sku: "test-variant-sku-product-export", + prices: [ + { + currency_code: "usd", + amount: 100, + }, + { + currency_code: "eur", + amount: 45, + }, + { + currency_code: "dkk", + amount: 30, + }, + ], + options: [{ value: "large" }, { value: "green" }], + }, + ], + } + const createProductRes = await api.post( + "/admin/products", + productPayload, + adminReqConfig + ) + const productId = createProductRes.data.product.id + const variantId = createProductRes.data.product.variants[0].id + + const batchPayload = { + type: "product-export", + context: { + filterable_fields: { + title: "Test export product", + }, + }, + } + const batchJobRes = await api.post( + "/admin/batch-jobs", + batchPayload, + adminReqConfig + ) + const batchJobId = batchJobRes.data.batch_job.id + + expect(batchJobId).toBeTruthy() + + // Pull to check the status until it is completed + let batchJob + let shouldContinuePulling = true + while (shouldContinuePulling) { + const res = await api.get( + `/admin/batch-jobs/${batchJobId}`, + adminReqConfig + ) + + await new Promise((resolve, _) => { + setTimeout(resolve, 1000) + }) + + batchJob = res.data.batch_job + shouldContinuePulling = !( + batchJob.status === "completed" || batchJob.status === "failed" + ) + } + + expect(batchJob.status).toBe("completed") + + exportFilePath = path.resolve(__dirname, batchJob.result.file_key) + const isFileExists = (await fs.stat(exportFilePath)).isFile() + + expect(isFileExists).toBeTruthy() + + const fileSize = (await fs.stat(exportFilePath)).size + expect(batchJob.result?.file_size).toBe(fileSize) + + const data = (await fs.readFile(exportFilePath)).toString() + const [, ...lines] = data.split("\r\n").filter((l) => l) + + expect(lines.length).toBe(1) + + const lineColumn = lines[0].split(";") + + expect(lineColumn[0]).toBe(productId) + expect(lineColumn[2]).toBe(productPayload.title) + expect(lineColumn[4]).toBe(productPayload.description) + expect(lineColumn[23]).toBe(variantId) + expect(lineColumn[24]).toBe(productPayload.variants[0].title) + expect(lineColumn[25]).toBe(productPayload.variants[0].sku) + }) + + it("should export a csv file containing a limited number of products", async () => { + jest.setTimeout(1000000) + const api = useApi() + + const batchPayload = { + type: "product-export", + context: { + batch_size: 1, + filterable_fields: { collection_id: "test-collection" }, + order: "created_at", + }, + } + + const batchJobRes = await api.post( + "/admin/batch-jobs", + batchPayload, + adminReqConfig + ) + const batchJobId = batchJobRes.data.batch_job.id + + expect(batchJobId).toBeTruthy() + + // Pull to check the status until it is completed + let batchJob + let shouldContinuePulling = true + while (shouldContinuePulling) { + const res = await api.get( + `/admin/batch-jobs/${batchJobId}`, + adminReqConfig + ) + + await new Promise((resolve, _) => { + setTimeout(resolve, 1000) + }) + + batchJob = res.data.batch_job + shouldContinuePulling = !( + batchJob.status === "completed" || batchJob.status === "failed" + ) + } + + expect(batchJob.status).toBe("completed") + + exportFilePath = path.resolve(__dirname, batchJob.result.file_key) + const isFileExists = (await fs.stat(exportFilePath)).isFile() + + expect(isFileExists).toBeTruthy() + + const data = (await fs.readFile(exportFilePath)).toString() + const [, ...lines] = data.split("\r\n").filter((l) => l) + + expect(lines.length).toBe(4) + + const csvLine = lines[0].split(";") + expect(csvLine[0]).toBe("test-product") + }) +}) diff --git a/integration-tests/api/__tests__/store/__snapshots__/product-variants.js.snap b/integration-tests/api/__tests__/store/__snapshots__/product-variants.js.snap index 24ae2723b4..7086baa4dc 100644 --- a/integration-tests/api/__tests__/store/__snapshots__/product-variants.js.snap +++ b/integration-tests/api/__tests__/store/__snapshots__/product-variants.js.snap @@ -6,6 +6,8 @@ Object { "allow_backorder": false, "barcode": "test-barcode", "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean", @@ -32,6 +34,8 @@ Object { ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -51,6 +55,7 @@ Object { "product": Any, "product_id": "test-product", "sku": "test-sku", + "tax_rates": null, "title": "Test variant", "upc": "test-upc", "updated_at": Any, @@ -66,7 +71,9 @@ Object { "allow_backorder": false, "barcode": "test-barcode", "calculated_price": 80, + "calculated_price_incl_tax": null, "calculated_price_type": "sale", + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean", @@ -93,6 +100,8 @@ Object { ], "origin_country": null, "original_price": 100, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -137,6 +146,7 @@ Object { "product": Any, "product_id": "test-product", "sku": "test-sku", + "tax_rates": null, "title": "Test variant", "upc": "test-upc", "updated_at": Any, @@ -153,6 +163,8 @@ Object { "allow_backorder": false, "barcode": null, "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": null, @@ -179,6 +191,8 @@ Object { ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -198,6 +212,7 @@ Object { "product": Any, "product_id": Any, "sku": null, + "tax_rates": null, "title": "test2", "upc": null, "updated_at": Any, @@ -214,7 +229,9 @@ Object { "allow_backorder": false, "barcode": "test-barcode", "calculated_price": 80, + "calculated_price_incl_tax": 80, "calculated_price_type": "sale", + "calculated_tax": 0, "created_at": Any, "deleted_at": null, "ean": "test-ean", @@ -241,6 +258,8 @@ Object { ], "origin_country": null, "original_price": 100, + "original_price_incl_tax": 100, + "original_tax": 0, "prices": Array [ Object { "amount": 100, @@ -285,6 +304,13 @@ Object { "product": Any, "product_id": "test-product", "sku": "test-sku", + "tax_rates": Array [ + Object { + "code": "default", + "name": "default", + "rate": 0, + }, + ], "title": "Test variant", "upc": "test-upc", "updated_at": Any, @@ -301,6 +327,8 @@ Object { "allow_backorder": false, "barcode": "test-barcode", "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean", @@ -327,6 +355,8 @@ Object { ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -346,6 +376,7 @@ Object { "product": Any, "product_id": "test-product", "sku": "test-sku", + "tax_rates": null, "title": "Test variant", "upc": "test-upc", "updated_at": Any, diff --git a/integration-tests/api/__tests__/store/__snapshots__/products.js.snap b/integration-tests/api/__tests__/store/__snapshots__/products.js.snap index d998ac8523..d5177b53c5 100644 --- a/integration-tests/api/__tests__/store/__snapshots__/products.js.snap +++ b/integration-tests/api/__tests__/store/__snapshots__/products.js.snap @@ -131,6 +131,8 @@ Object { "allow_backorder": false, "barcode": "test-barcode", "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean", @@ -157,6 +159,8 @@ Object { ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -200,6 +204,7 @@ Object { ], "product_id": "test-product", "sku": "test-sku", + "tax_rates": null, "title": "Test variant", "upc": "test-upc", "updated_at": Any, @@ -210,6 +215,8 @@ Object { "allow_backorder": false, "barcode": null, "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean2", @@ -236,6 +243,8 @@ Object { ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -279,6 +288,7 @@ Object { ], "product_id": "test-product", "sku": "test-sku2", + "tax_rates": null, "title": "Test variant rank (2)", "upc": "test-upc2", "updated_at": Any, @@ -289,6 +299,8 @@ Object { "allow_backorder": false, "barcode": "test-barcode 1", "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": "test-ean1", @@ -315,6 +327,8 @@ Object { ], "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -358,6 +372,7 @@ Object { ], "product_id": "test-product", "sku": "test-sku1", + "tax_rates": null, "title": "Test variant rank (1)", "upc": "test-upc1", "updated_at": Any, @@ -385,6 +400,8 @@ Object { "allow_backorder": false, "barcode": null, "calculated_price": null, + "calculated_price_incl_tax": null, + "calculated_tax": null, "created_at": Any, "deleted_at": null, "ean": null, @@ -399,6 +416,8 @@ Object { "mid_code": null, "origin_country": null, "original_price": null, + "original_price_incl_tax": null, + "original_tax": null, "prices": Array [ Object { "amount": 100, @@ -417,6 +436,7 @@ Object { ], "product_id": Any, "sku": null, + "tax_rates": null, "title": "test-variant", "upc": null, "updated_at": Any, diff --git a/integration-tests/api/__tests__/store/customer.js b/integration-tests/api/__tests__/store/customer.js index 3bdf830daf..a3e8fdbdd4 100644 --- a/integration-tests/api/__tests__/store/customer.js +++ b/integration-tests/api/__tests__/store/customer.js @@ -58,6 +58,21 @@ describe("/store/customers", () => { expect(response.data.customer).not.toHaveProperty("password_hash") }) + it("normalizes email", async () => { + const api = useApi() + + const response = await api.post("/store/customers", { + first_name: "James", + last_name: "Bond", + email: "James@Bond.com", + password: "test", + }) + + expect(response.status).toEqual(200) + expect(response.data.customer).not.toHaveProperty("password_hash") + expect(response.data.customer.email).toEqual("james@bond.com") + }) + it("responds 422 on duplicate", async () => { const api = useApi() diff --git a/integration-tests/api/__tests__/store/product-variants.js b/integration-tests/api/__tests__/store/product-variants.js index b11bcfbfbc..a97508f0b0 100644 --- a/integration-tests/api/__tests__/store/product-variants.js +++ b/integration-tests/api/__tests__/store/product-variants.js @@ -15,7 +15,7 @@ describe("/store/variants", () => { beforeAll(async () => { const cwd = path.resolve(path.join(__dirname, "..", "..")) dbConnection = await initDb({ cwd }) - medusaProcess = await setupServer({ cwd }) + medusaProcess = await setupServer({ cwd, verbose: false }) }) afterAll(async () => { diff --git a/integration-tests/api/__tests__/taxes/cart.js b/integration-tests/api/__tests__/taxes/cart.js new file mode 100644 index 0000000000..ea8c3765b7 --- /dev/null +++ b/integration-tests/api/__tests__/taxes/cart.js @@ -0,0 +1,237 @@ +const path = require("path") + +const setupServer = require("../../../helpers/setup-server") +const { useApi } = require("../../../helpers/use-api") +const { useDb, initDb } = require("../../../helpers/use-db") +const { + simpleDiscountFactory, + simpleRegionFactory, + simpleProductFactory, + simpleProductTaxRateFactory, +} = require("../../factories") + +const adminSeeder = require("../../helpers/admin-seeder") + +jest.setTimeout(30000) + +describe("Cart Totals Calculations", () => { + let medusaProcess + let dbConnection + + beforeAll(async () => { + const cwd = path.resolve(path.join(__dirname, "..", "..")) + dbConnection = await initDb({ cwd }) + medusaProcess = await setupServer({ cwd }) + }) + + afterAll(async () => { + const db = useDb() + await db.shutdown() + + medusaProcess.kill() + }) + + beforeEach(async () => { + try { + await adminSeeder(dbConnection) + } catch (err) { + console.log(err) + throw err + } + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + }) + + it("sets correct line item totals for a cart with item of price 100 and tax rate 10", async () => { + const api = useApi() + + const region = await simpleRegionFactory(dbConnection) + const product = await simpleProductFactory(dbConnection) + await simpleProductTaxRateFactory(dbConnection, { + product_id: product.id, + rate: { + region_id: region.id, + rate: 10, + }, + }) + + // create cart + const { cart } = await api + .post("/store/carts", { + region_id: region.id, + }) + .then((res) => res.data) + + // add product to cart + const res = await api.post(`/store/carts/${cart.id}/line-items`, { + variant_id: product.variants[0].id, + quantity: 1, + }) + + expect(res.data.cart.items[0].unit_price).toEqual(100) + expect(res.data.cart.items[0].quantity).toEqual(1) + expect(res.data.cart.items[0].subtotal).toEqual(100) + expect(res.data.cart.items[0].tax_total).toEqual(10) + expect(res.data.cart.items[0].total).toEqual(110) + expect(res.data.cart.items[0].original_total).toEqual(110) + expect(res.data.cart.items[0].original_tax_total).toEqual(10) + expect(res.data.cart.items[0].discount_total).toEqual(0) + expect(res.data.cart.items[0].gift_card_total).toEqual(0) + }) + + it("sets correct line item totals for a cart with item of price 100; tax rate 10; discount 10", async () => { + const api = useApi() + + const region = await simpleRegionFactory(dbConnection) + const product = await simpleProductFactory(dbConnection) + const discount = await simpleDiscountFactory(dbConnection, { + regions: [region.id], + type: "percentage", + value: 10, + }) + + await simpleProductTaxRateFactory(dbConnection, { + product_id: product.id, + rate: { + region_id: region.id, + rate: 10, + }, + }) + + const { cart } = await api + .post("/store/carts", { + region_id: region.id, + }) + .then((res) => res.data) + + await api.post(`/store/carts/${cart.id}/line-items`, { + variant_id: product.variants[0].id, + quantity: 1, + }) + + const res = await api.post(`/store/carts/${cart.id}`, { + discounts: [ + { + code: discount.code, + }, + ], + }) + + expect(res.data.cart.items[0].unit_price).toEqual(100) + expect(res.data.cart.items[0].quantity).toEqual(1) + expect(res.data.cart.items[0].subtotal).toEqual(100) + expect(res.data.cart.items[0].tax_total).toEqual(9) + expect(res.data.cart.items[0].total).toEqual(99) + expect(res.data.cart.items[0].original_total).toEqual(110) + expect(res.data.cart.items[0].original_tax_total).toEqual(10) + expect(res.data.cart.items[0].discount_total).toEqual(10) + expect(res.data.cart.items[0].gift_card_total).toEqual(0) + }) + + it("doesn't include taxes in !automatic_taxes regions", async () => { + const api = useApi() + + const region = await simpleRegionFactory(dbConnection, { + automatic_taxes: false, + }) + const product = await simpleProductFactory(dbConnection) + const discount = await simpleDiscountFactory(dbConnection, { + regions: [region.id], + type: "percentage", + value: 10, + }) + + await simpleProductTaxRateFactory(dbConnection, { + product_id: product.id, + rate: { + region_id: region.id, + rate: 10, + }, + }) + + const { cart } = await api + .post("/store/carts", { + region_id: region.id, + }) + .then((res) => res.data) + + await api.post(`/store/carts/${cart.id}/line-items`, { + variant_id: product.variants[0].id, + quantity: 1, + }) + + const res = await api.post(`/store/carts/${cart.id}`, { + discounts: [ + { + code: discount.code, + }, + ], + }) + + expect(res.data.cart.items[0].unit_price).toEqual(100) + expect(res.data.cart.items[0].quantity).toEqual(1) + expect(res.data.cart.items[0].subtotal).toEqual(100) + expect(res.data.cart.items[0].tax_total).toEqual(0) + expect(res.data.cart.items[0].total).toEqual(90) + expect(res.data.cart.items[0].original_total).toEqual(100) + expect(res.data.cart.items[0].original_tax_total).toEqual(0) + expect(res.data.cart.items[0].discount_total).toEqual(10) + expect(res.data.cart.items[0].gift_card_total).toEqual(0) + }) + + it("includes taxes in !automatic_taxes regions when forced", async () => { + const api = useApi() + + const region = await simpleRegionFactory(dbConnection, { + automatic_taxes: false, + }) + const product = await simpleProductFactory(dbConnection) + const discount = await simpleDiscountFactory(dbConnection, { + regions: [region.id], + type: "percentage", + value: 10, + }) + + await simpleProductTaxRateFactory(dbConnection, { + product_id: product.id, + rate: { + region_id: region.id, + rate: 10, + }, + }) + + const { cart } = await api + .post("/store/carts", { + region_id: region.id, + }) + .then((res) => res.data) + + await api.post(`/store/carts/${cart.id}/line-items`, { + variant_id: product.variants[0].id, + quantity: 1, + }) + + await api.post(`/store/carts/${cart.id}`, { + discounts: [ + { + code: discount.code, + }, + ], + }) + + const res = await api.post(`/store/carts/${cart.id}/taxes`) + + expect(res.data.cart.items[0].unit_price).toEqual(100) + expect(res.data.cart.items[0].quantity).toEqual(1) + expect(res.data.cart.items[0].subtotal).toEqual(100) + expect(res.data.cart.items[0].tax_total).toEqual(9) + expect(res.data.cart.items[0].total).toEqual(99) + expect(res.data.cart.items[0].original_total).toEqual(110) + expect(res.data.cart.items[0].original_tax_total).toEqual(10) + expect(res.data.cart.items[0].discount_total).toEqual(10) + expect(res.data.cart.items[0].gift_card_total).toEqual(0) + }) +}) diff --git a/integration-tests/api/__tests__/taxes/shipping-options.js b/integration-tests/api/__tests__/taxes/shipping-options.js new file mode 100644 index 0000000000..7f91216db8 --- /dev/null +++ b/integration-tests/api/__tests__/taxes/shipping-options.js @@ -0,0 +1,109 @@ +const path = require("path") + +const setupServer = require("../../../helpers/setup-server") +const { useApi } = require("../../../helpers/use-api") +const { useDb, initDb } = require("../../../helpers/use-db") +const { + simpleRegionFactory, + simpleProductFactory, + simpleShippingTaxRateFactory, + simpleShippingOptionFactory, +} = require("../../factories") + +const adminSeeder = require("../../helpers/admin-seeder") + +jest.setTimeout(30000) + +describe("Shipping Options Totals Calculations", () => { + let medusaProcess + let dbConnection + + beforeAll(async () => { + const cwd = path.resolve(path.join(__dirname, "..", "..")) + dbConnection = await initDb({ cwd }) + medusaProcess = await setupServer({ cwd }) + }) + + afterAll(async () => { + const db = useDb() + await db.shutdown() + + medusaProcess.kill() + }) + + beforeEach(async () => { + try { + await adminSeeder(dbConnection) + } catch (err) { + console.log(err) + throw err + } + }) + + afterEach(async () => { + const db = useDb() + await db.teardown() + }) + + it("admin gets correct shipping prices", async () => { + const api = useApi() + + const region = await simpleRegionFactory(dbConnection, { + tax_rate: 25, + }) + const so = await simpleShippingOptionFactory(dbConnection, { + region_id: region.id, + price: 100, + }) + await simpleShippingTaxRateFactory(dbConnection, { + shipping_option_id: so.id, + rate: { + region_id: region.id, + rate: 10, + }, + }) + + const res = await api.get(`/admin/shipping-options`, { + headers: { + Authorization: `Bearer test_token`, + }, + }) + + expect(res.data.shipping_options).toEqual([ + expect.objectContaining({ + id: so.id, + amount: 100, + price_incl_tax: 110, + }), + ]) + }) + + it("gets correct shipping prices", async () => { + const api = useApi() + + const region = await simpleRegionFactory(dbConnection, { + tax_rate: 25, + }) + const so = await simpleShippingOptionFactory(dbConnection, { + region_id: region.id, + price: 100, + }) + await simpleShippingTaxRateFactory(dbConnection, { + shipping_option_id: so.id, + rate: { + region_id: region.id, + rate: 10, + }, + }) + + const res = await api.get(`/store/shipping-options?region_id=${region.id}`) + + expect(res.data.shipping_options).toEqual([ + expect.objectContaining({ + id: so.id, + amount: 100, + price_incl_tax: 110, + }), + ]) + }) +}) diff --git a/integration-tests/api/factories/index.ts b/integration-tests/api/factories/index.ts index 42621df157..56b0298a4f 100644 --- a/integration-tests/api/factories/index.ts +++ b/integration-tests/api/factories/index.ts @@ -1,5 +1,6 @@ export * from "./simple-payment-factory" export * from "./simple-batch-job-factory" +export * from "./simple-discount-factory" export * from "./simple-order-factory" export * from "./simple-cart-factory" export * from "./simple-region-factory" @@ -13,3 +14,4 @@ export * from "./simple-shipping-option-factory" export * from "./simple-shipping-method-factory" export * from "./simple-product-type-tax-rate-factory" export * from "./simple-price-list-factory" +export * from "./simple-batch-job-factory" diff --git a/integration-tests/api/factories/simple-batch-job-factory.ts b/integration-tests/api/factories/simple-batch-job-factory.ts index 837137982c..d4a61647d7 100644 --- a/integration-tests/api/factories/simple-batch-job-factory.ts +++ b/integration-tests/api/factories/simple-batch-job-factory.ts @@ -7,6 +7,8 @@ export type BatchJobFactoryData = { status?: BatchJobStatus created_by?: string context?: Record + awaiting_confirmation_at?: Date | string + completed_at?: Date | string } export const simpleBatchJobFactory = async ( @@ -15,13 +17,14 @@ export const simpleBatchJobFactory = async ( ): Promise => { const manager = connection.manager - const job = manager.create(BatchJob, { + const job = manager.create(BatchJob, { id: data.id, status: data.status ?? BatchJobStatus.CREATED, + completed_at: data.completed_at ?? null, type: data.type ?? "test-job", created_by: data.created_by ?? null, context: data.context ?? {}, }) - return await manager.save(job) + return await manager.save(job) } diff --git a/integration-tests/api/helpers/draft-order-seeder.js b/integration-tests/api/helpers/draft-order-seeder.js index 19b63b0d7c..e865f51eb4 100644 --- a/integration-tests/api/helpers/draft-order-seeder.js +++ b/integration-tests/api/helpers/draft-order-seeder.js @@ -119,7 +119,15 @@ module.exports = async (connection, data = {}) => { type: "percentage", }) - const d = manager.create(Discount, { + await manager.insert(DiscountRule, { + id: "free-shipping-rule", + description: "Free shipping rule", + type: "free_shipping", + value: 100, + allocation: "total", + }) + + const testDiscount = manager.create(Discount, { id: "test-discount", code: "TEST", is_dynamic: false, @@ -127,7 +135,15 @@ module.exports = async (connection, data = {}) => { rule_id: "discount_rule_id", }) - d.regions = [ + const freeShippingDiscount = manager.create(Discount, { + id: "free-shipping-discount", + code: "free-shipping", + is_dynamic: false, + is_disabled: false, + rule_id: "free-shipping-rule", + }) + + testDiscount.regions = [ { id: "test-region", name: "Test Region", @@ -136,7 +152,17 @@ module.exports = async (connection, data = {}) => { }, ] - await manager.save(d) + freeShippingDiscount.regions = [ + { + id: "test-region", + name: "Test Region", + currency_code: "usd", + tax_rate: 0, + }, + ] + + await manager.save(testDiscount) + await manager.save(freeShippingDiscount) await manager.query( `UPDATE "country" SET region_id='test-region' WHERE iso_2 = 'us'` diff --git a/integration-tests/api/helpers/product-seeder.js b/integration-tests/api/helpers/product-seeder.js index a9ce2cb21f..d66a0c862b 100644 --- a/integration-tests/api/helpers/product-seeder.js +++ b/integration-tests/api/helpers/product-seeder.js @@ -50,14 +50,14 @@ module.exports = async (connection, data = {}) => { const tag3 = await manager.create(ProductTag, { id: "tag3", - value: "123", + value: "1235", }) await manager.save(tag3) const tag4 = await manager.create(ProductTag, { id: "tag4", - value: "123", + value: "1234", }) await manager.save(tag4) diff --git a/integration-tests/api/medusa-config.js b/integration-tests/api/medusa-config.js index 05a0bc3bce..5d076946b6 100644 --- a/integration-tests/api/medusa-config.js +++ b/integration-tests/api/medusa-config.js @@ -5,10 +5,10 @@ const workerId = parseInt(process.env.JEST_WORKER_ID || "1") module.exports = { plugins: [], projectConfig: { - // redis_url: REDIS_URL, + redis_url: process.env.REDIS_URL, database_url: `postgres://${DB_USERNAME}:${DB_PASSWORD}@localhost/medusa-integration-${workerId}`, database_type: "postgres", - jwt_secret: 'test', - cookie_secret: 'test' + jwt_secret: "test", + cookie_secret: "test", }, } diff --git a/integration-tests/api/package.json b/integration-tests/api/package.json index b33b3077b5..4e6408e364 100644 --- a/integration-tests/api/package.json +++ b/integration-tests/api/package.json @@ -8,16 +8,16 @@ "build": "babel src -d dist --extensions \".ts,.js\"" }, "dependencies": { - "@medusajs/medusa": "1.3.0-dev-1652704115624", + "@medusajs/medusa": "1.3.2-dev-1655728455189", "faker": "^5.5.3", - "medusa-interfaces": "1.3.0-dev-1652704115624", + "medusa-interfaces": "1.3.0-dev-1655728455189", "typeorm": "^0.2.31" }, "devDependencies": { "@babel/cli": "^7.12.10", "@babel/core": "^7.12.10", "@babel/node": "^7.12.10", - "babel-preset-medusa-package": "1.1.19-dev-1652704115624", + "babel-preset-medusa-package": "1.1.19-dev-1655728455189", "jest": "^26.6.3" } } diff --git a/integration-tests/api/src/services/local-file-service.js b/integration-tests/api/src/services/local-file-service.js new file mode 100644 index 0000000000..948e507284 --- /dev/null +++ b/integration-tests/api/src/services/local-file-service.js @@ -0,0 +1,69 @@ +import { AbstractFileService } from "@medusajs/medusa" +import stream from "stream" +import * as fs from "fs" +import * as path from "path" + +export default class LocalFileService extends AbstractFileService { + // eslint-disable-next-line no-empty-pattern + constructor({}, options) { + super({}) + this.upload_dir_ = + process.env.UPLOAD_DIR ?? options.upload_dir ?? "uploads/images" + + if (!fs.existsSync(this.upload_dir_)) { + fs.mkdirSync(this.upload_dir_) + } + } + + async upload(file) { + const uploadPath = path.join( + this.upload_dir_, + path.dirname(file.originalname) + ) + + if (!fs.existsSync(uploadPath)) { + fs.mkdirSync(uploadPath, { recursive: true }) + } + + const filePath = path.resolve(this.upload_dir_, file.originalname) + fs.writeFile(filePath, "", (error) => { + if (error) { + throw error + } + }) + return { url: filePath } + } + + async delete({ name }) { + return new Promise((resolve, _) => { + const path = resolve(this.upload_dir_, name) + fs.unlink(path, (err) => { + if (err) { + throw err + } + + resolve("file unlinked") + }) + }) + } + + async getUploadStreamDescriptor({ name, ext }) { + const fileKey = `${name}-${Date.now()}.${ext}` + const filePath = path.resolve(this.upload_dir_, fileKey) + + const isFileExists = fs.existsSync(filePath) + if (!isFileExists) { + await this.upload({ originalname: fileKey }) + } + + const pass = new stream.PassThrough() + pass.pipe(fs.createWriteStream(filePath)) + + return { + writeStream: pass, + promise: Promise.resolve(), + url: `${this.upload_dir_}/${fileKey}`, + fileKey, + } + } +} diff --git a/integration-tests/api/yarn.lock b/integration-tests/api/yarn.lock index 9222f91511..122b5d8b06 100644 --- a/integration-tests/api/yarn.lock +++ b/integration-tests/api/yarn.lock @@ -1327,10 +1327,10 @@ semver "^7.3.5" tar "^6.1.11" -"@medusajs/medusa-cli@1.3.0-dev-1652704115624": - version "1.3.0-dev-1652704115624" - resolved "http://localhost:4873/@medusajs%2fmedusa-cli/-/medusa-cli-1.3.0-dev-1652704115624.tgz#9841fcc6123cd9c72d544d48316d66cf49ad8dc0" - integrity sha512-Lhk6pdvgv4UrLLUY/aYuty3TKfgDlvSUfmG4cTgZXbLehnRSbLelPcX+YKCtiM4d0NYfgF364H7p0NAMuCkBIg== +"@medusajs/medusa-cli@1.3.0-dev-1655728455189": + version "1.3.0-dev-1655728455189" + resolved "http://localhost:4873/@medusajs%2fmedusa-cli/-/medusa-cli-1.3.0-dev-1655728455189.tgz#1d6bf606fbd96167faf0824d221646f664e06d66" + integrity sha512-wMDVBN6X6cG6Ni/0/H5K54Hs3RmYLiGvI81VqWQtisjia0T98Hf8MuhxlJ4kS3ny34r5M7Qjs+KCoiospgzbqA== dependencies: "@babel/polyfill" "^7.8.7" "@babel/runtime" "^7.9.6" @@ -1348,8 +1348,8 @@ is-valid-path "^0.1.1" joi-objectid "^3.0.1" meant "^1.0.1" - medusa-core-utils "1.1.31-dev-1652704115624" - medusa-telemetry "0.0.11-dev-1652704115624" + medusa-core-utils "1.1.31-dev-1655728455189" + medusa-telemetry "0.0.11-dev-1655728455189" netrc-parser "^3.1.6" open "^8.0.6" ora "^5.4.1" @@ -1363,13 +1363,13 @@ winston "^3.3.3" yargs "^15.3.1" -"@medusajs/medusa@1.3.0-dev-1652704115624": - version "1.3.0-dev-1652704115624" - resolved "http://localhost:4873/@medusajs%2fmedusa/-/medusa-1.3.0-dev-1652704115624.tgz#040eede718d0eec6b01b4471e9af8495314c3ff0" - integrity sha512-x2Wg7lP5A25NMENqcZoC00O46Y3ojLaSnxa9L6E3u375nYWa99uxyT8ckXhklVqzhM3iBtReb/20H4r2+niwmw== +"@medusajs/medusa@1.3.2-dev-1655728455189": + version "1.3.2-dev-1655728455189" + resolved "http://localhost:4873/@medusajs%2fmedusa/-/medusa-1.3.2-dev-1655728455189.tgz#c3c189fa7aebf94117349067acc0b0a0bc537a28" + integrity sha512-oYMihSFpQKrAru2vRnCU9q6A6j6Zgq0/6153m+vibhrbuvW8NZUJSoRjEEA925c8Yprr4xWwFhYghB0jUjIAPA== dependencies: "@hapi/joi" "^16.1.8" - "@medusajs/medusa-cli" "1.3.0-dev-1652704115624" + "@medusajs/medusa-cli" "1.3.0-dev-1655728455189" "@types/lodash" "^4.14.168" awilix "^4.2.3" body-parser "^1.19.0" @@ -1392,10 +1392,12 @@ joi "^17.3.0" joi-objectid "^3.0.1" jsonwebtoken "^8.5.1" - medusa-core-utils "1.1.31-dev-1652704115624" - medusa-test-utils "1.1.37-dev-1652704115624" + medusa-core-utils "1.1.31-dev-1655728455189" + medusa-test-utils "1.1.37-dev-1655728455189" morgan "^1.9.1" multer "^1.4.2" + node-schedule "^2.1.0" + papaparse "^5.3.2" passport "^0.4.0" passport-http-bearer "^1.0.1" passport-jwt "^4.0.0" @@ -2039,10 +2041,10 @@ babel-preset-jest@^26.6.2: babel-plugin-jest-hoist "^26.6.2" babel-preset-current-node-syntax "^1.0.0" -babel-preset-medusa-package@1.1.19-dev-1652704115624: - version "1.1.19-dev-1652704115624" - resolved "http://localhost:4873/babel-preset-medusa-package/-/babel-preset-medusa-package-1.1.19-dev-1652704115624.tgz#fa584e39e7c0a1b808af25953f81653252225be7" - integrity sha512-Nv8Si592nO+UZmMQuOGCs1pFs5ShIsCKclbqBybahjGZIPPD6bVNaz9dujSaZnlxvA9a3gn4dvuh3H8IRitUug== +babel-preset-medusa-package@1.1.19-dev-1655728455189: + version "1.1.19-dev-1655728455189" + resolved "http://localhost:4873/babel-preset-medusa-package/-/babel-preset-medusa-package-1.1.19-dev-1655728455189.tgz#a7884d6869c9ac7adb23c5789d6dc858614381c7" + integrity sha512-i8JKgbu59S+WU58tRV5iJtJ9UnJnpOPwEtU9b7WVz9X5LLDeUk6Mmhcb6XvTuYCfdC8bXJ/Hk8NlqETIZw1lPQ== dependencies: "@babel/plugin-proposal-class-properties" "^7.12.1" "@babel/plugin-proposal-decorators" "^7.12.1" @@ -2768,6 +2770,14 @@ cron-parser@^2.13.0: is-nan "^1.3.0" moment-timezone "^0.5.31" +cron-parser@^3.5.0: + version "3.5.0" + resolved "http://localhost:4873/cron-parser/-/cron-parser-3.5.0.tgz#b1a9da9514c0310aa7ef99c2f3f1d0f8c235257c" + integrity sha512-wyVZtbRs6qDfFd8ap457w3XVntdvqcwBGxBoTvJQH9KGVKL/fB+h2k3C8AqiVxvUQKN1Ps/Ns46CNViOpVDhfQ== + dependencies: + is-nan "^1.3.2" + luxon "^1.26.0" + cross-spawn@^6.0.0, cross-spawn@^6.0.5: version "6.0.5" resolved "http://localhost:4873/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" @@ -4198,7 +4208,7 @@ is-lambda@^1.0.1: resolved "http://localhost:4873/is-lambda/-/is-lambda-1.0.1.tgz#3d9877899e6a53efc0160504cde15f82e6f061d5" integrity sha1-PZh3iZ5qU+/AFgUEzeFfgubwYdU= -is-nan@^1.3.0: +is-nan@^1.3.0, is-nan@^1.3.2: version "1.3.2" resolved "http://localhost:4873/is-nan/-/is-nan-1.3.2.tgz#043a54adea31748b55b6cd4e09aadafa69bd9e1d" integrity sha512-E+zBKpQ2t6MEo1VsonYmluk9NxGrbzpeeLC2xIViuO2EjU2xsXsBPwTr3Ykv9l08UYEVEdWeRZNouaZqF6RN0w== @@ -5083,6 +5093,11 @@ logform@^2.3.2, logform@^2.4.0: safe-stable-stringify "^2.3.1" triple-beam "^1.3.0" +long-timeout@0.1.1: + version "0.1.1" + resolved "http://localhost:4873/long-timeout/-/long-timeout-0.1.1.tgz#9721d788b47e0bcb5a24c2e2bee1a0da55dab514" + integrity sha512-BFRuQUqc7x2NWxfJBCyUrN8iYUYznzL9JROmRz1gZ6KlOIgmoD+njPVbb+VNn2nGMKggMsK79iUNErillsrx7w== + lower-case@^2.0.2: version "2.0.2" resolved "http://localhost:4873/lower-case/-/lower-case-2.0.2.tgz#6fa237c63dbdc4a82ca0fd882e4722dc5e634e28" @@ -5097,6 +5112,11 @@ lru-cache@^6.0.0: dependencies: yallist "^4.0.0" +luxon@^1.26.0: + version "1.28.0" + resolved "http://localhost:4873/luxon/-/luxon-1.28.0.tgz#e7f96daad3938c06a62de0fb027115d251251fbf" + integrity sha512-TfTiyvZhwBYM/7QdAVDh+7dBTBA29v4ik0Ce9zda3Mnf8on1S5KJI8P2jKFZ8+5C0jhmr0KwJEO/Wdpm0VeWJQ== + make-dir@^2.0.0, make-dir@^2.1.0: version "2.1.0" resolved "http://localhost:4873/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" @@ -5168,23 +5188,23 @@ media-typer@0.3.0: resolved "http://localhost:4873/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= -medusa-core-utils@1.1.31-dev-1652704115624: - version "1.1.31-dev-1652704115624" - resolved "http://localhost:4873/medusa-core-utils/-/medusa-core-utils-1.1.31-dev-1652704115624.tgz#cda24aa1a292a1bd0a770774aae9a970fc7ab963" - integrity sha512-evkWva10x6JaHBex5S8Zi9Ae/ZStG858nb18w5ITxbCHWvGYq6/VyLpo4vB7I6wuU3z6kDTMd/yyvsVXv33O2g== +medusa-core-utils@1.1.31-dev-1655728455189: + version "1.1.31-dev-1655728455189" + resolved "http://localhost:4873/medusa-core-utils/-/medusa-core-utils-1.1.31-dev-1655728455189.tgz#d1765c3aa5ff294722a44c7f5b90dacb185f03ee" + integrity sha512-hsyo22Hp5A18+qA5DhfQRvY5eAM3IpzxuuY/SVk50Pxrf+r3U4aWmL4uX0qGpQeKWv6gOLmPxt+OyJIDUPNrwg== dependencies: joi "^17.3.0" joi-objectid "^3.0.1" -medusa-interfaces@1.3.0-dev-1652704115624: - version "1.3.0-dev-1652704115624" - resolved "http://localhost:4873/medusa-interfaces/-/medusa-interfaces-1.3.0-dev-1652704115624.tgz#02dbbda562f99e7de96e7e6657a6af2a20855e36" - integrity sha512-aSm6gYWF0gPHoIswHllB/YrGwkrYRr4ZrBmWh3QSulDYnjmtyBFHKkatrC1pscAEFG/5h4d1LEENUHYrBQ4tQg== +medusa-interfaces@1.3.0-dev-1655728455189: + version "1.3.0-dev-1655728455189" + resolved "http://localhost:4873/medusa-interfaces/-/medusa-interfaces-1.3.0-dev-1655728455189.tgz#4eaa0dc41671bb24777b3630c8640124c4ca9f24" + integrity sha512-lcYVYQUg5ImtXLtR4pHHOqXi1q1kl46Gi9rgjAUWVvldnssVDnGThjgnVCFeCkDz+qMqn7lxTHoZXaDIneadlA== -medusa-telemetry@0.0.11-dev-1652704115624: - version "0.0.11-dev-1652704115624" - resolved "http://localhost:4873/medusa-telemetry/-/medusa-telemetry-0.0.11-dev-1652704115624.tgz#4de0f4a66a9f8bb4f61d8b7c24aa9cbd719039a7" - integrity sha512-sfCzUM4mlXpBJVaqAycKAmeEWalzyg18+gUo+As/mxoEUIUR5XtpZ5Bdx484/2GUvRP8OdOYC1uPpbuTZbLXcw== +medusa-telemetry@0.0.11-dev-1655728455189: + version "0.0.11-dev-1655728455189" + resolved "http://localhost:4873/medusa-telemetry/-/medusa-telemetry-0.0.11-dev-1655728455189.tgz#552ba1e2038641321f152b5ba7df0d88f29c26bf" + integrity sha512-MCTDiUg62y2xmRAFeOZHdaKXd2VhVw/9JE8Ewm9IY8QWSkumQOMKVPUAfdgra8EYsp3TD7LixWRlPB2TTn7DKg== dependencies: axios "^0.21.1" axios-retry "^3.1.9" @@ -5196,13 +5216,13 @@ medusa-telemetry@0.0.11-dev-1652704115624: remove-trailing-slash "^0.1.1" uuid "^8.3.2" -medusa-test-utils@1.1.37-dev-1652704115624: - version "1.1.37-dev-1652704115624" - resolved "http://localhost:4873/medusa-test-utils/-/medusa-test-utils-1.1.37-dev-1652704115624.tgz#7118753a4afd1c6ed6f1d1ceb75fdf9bfb65afa3" - integrity sha512-OnQEA/1jj4jnQGukMtbcqg2HWlKve0BqzeS8T5O2VDL5zldFG9SXFF3LupC51nh96u97dcKZk5A6JL8oHwonog== +medusa-test-utils@1.1.37-dev-1655728455189: + version "1.1.37-dev-1655728455189" + resolved "http://localhost:4873/medusa-test-utils/-/medusa-test-utils-1.1.37-dev-1655728455189.tgz#d6283c0075a69dee988d437f5aedfbba334aa847" + integrity sha512-SHkPjpiC6A37Duat9HrWPz+tBbZL50zDEXr/sjDiF0N8cEYNIxVRz2Y2/kbbKegSG3UTcckk5VUtxP17LiQKBw== dependencies: "@babel/plugin-transform-classes" "^7.9.5" - medusa-core-utils "1.1.31-dev-1652704115624" + medusa-core-utils "1.1.31-dev-1655728455189" randomatic "^3.1.1" merge-descriptors@1.0.1: @@ -5544,6 +5564,15 @@ node-releases@^2.0.3: resolved "http://localhost:4873/node-releases/-/node-releases-2.0.4.tgz#f38252370c43854dc48aa431c766c6c398f40476" integrity sha512-gbMzqQtTtDz/00jQzZ21PQzdI9PyLYqUSvD0p3naOhX4odFji0ZxYdnVwPTxmSwkmxhcFImpozceidSG+AgoPQ== +node-schedule@^2.1.0: + version "2.1.0" + resolved "http://localhost:4873/node-schedule/-/node-schedule-2.1.0.tgz#068ae38d7351c330616f7fe7cdb05036f977cbaf" + integrity sha512-nl4JTiZ7ZQDc97MmpTq9BQjYhq7gOtoh7SiPH069gBFBj0PzD8HI7zyFs6rzqL8Y5tTiEEYLxgtbx034YPrbyQ== + dependencies: + cron-parser "^3.5.0" + long-timeout "0.1.1" + sorted-array-functions "^1.3.0" + nopt@^5.0.0: version "5.0.0" resolved "http://localhost:4873/nopt/-/nopt-5.0.0.tgz#530942bb58a512fccafe53fe210f13a25355dc88" @@ -5815,6 +5844,11 @@ packet-reader@1.0.0: resolved "http://localhost:4873/packet-reader/-/packet-reader-1.0.0.tgz#9238e5480dedabacfe1fe3f2771063f164157d74" integrity sha512-HAKu/fG3HpHFO0AA8WE8q2g+gBJaZ9MG7fcKk+IJPLTGAD6Psw4443l+9DGRbOIh3/aXr7Phy0TjilYivJo5XQ== +papaparse@^5.3.2: + version "5.3.2" + resolved "http://localhost:4873/papaparse/-/papaparse-5.3.2.tgz#d1abed498a0ee299f103130a6109720404fbd467" + integrity sha512-6dNZu0Ki+gyV0eBsFKJhYr+MdQYAzFUGlBMNj3GNrmHxmz1lfRa24CjFObPXtjcetlOv5Ad299MhIK0znp3afw== + parse-json@^5.0.0: version "5.2.0" resolved "http://localhost:4873/parse-json/-/parse-json-5.2.0.tgz#c76fc66dee54231c962b22bcc8a72cf2f99753cd" @@ -6760,6 +6794,11 @@ socks@^2.6.2: ip "^1.1.5" smart-buffer "^4.2.0" +sorted-array-functions@^1.3.0: + version "1.3.0" + resolved "http://localhost:4873/sorted-array-functions/-/sorted-array-functions-1.3.0.tgz#8605695563294dffb2c9796d602bd8459f7a0dd5" + integrity sha512-2sqgzeFlid6N4Z2fUQ1cvFmTOLRi/sEDzSQ0OKYchqgoPmQBVyM3959qYx3fpS6Esef80KjmpgPeEr028dP3OA== + source-map-resolve@^0.5.0: version "0.5.3" resolved "http://localhost:4873/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" diff --git a/integration-tests/helpers/bootstrap-app.js b/integration-tests/helpers/bootstrap-app.js index 0dc1c221e2..52ebc998cb 100644 --- a/integration-tests/helpers/bootstrap-app.js +++ b/integration-tests/helpers/bootstrap-app.js @@ -5,26 +5,30 @@ const importFrom = require("import-from") module.exports = { bootstrapApp: async ({ cwd } = {}) => { - const app = express() + try { + const app = express() - const loaders = importFrom( - cwd || process.cwd(), - "@medusajs/medusa/dist/loaders" - ).default + const loaders = importFrom( + cwd || process.cwd(), + "@medusajs/medusa/dist/loaders" + ).default - const { container, dbConnection } = await loaders({ - directory: path.resolve(cwd || process.cwd()), - expressApp: app, - isTest: false, - }) + const { container, dbConnection } = await loaders({ + directory: path.resolve(cwd || process.cwd()), + expressApp: app, + isTest: false, + }) - const PORT = await getPort() + const PORT = await getPort() - return { - container, - db: dbConnection, - app, - port: PORT, + return { + container, + db: dbConnection, + app, + port: PORT, + } + } catch (e) { + console.log(e) } }, } diff --git a/integration-tests/helpers/setup-server.js b/integration-tests/helpers/setup-server.js index 105b90c13a..3e9f2cf232 100644 --- a/integration-tests/helpers/setup-server.js +++ b/integration-tests/helpers/setup-server.js @@ -2,9 +2,15 @@ const path = require("path") const { spawn } = require("child_process") const { setPort } = require("./use-api") -module.exports = ({ cwd, verbose }) => { +module.exports = ({ cwd, redisUrl, uploadDir, verbose, env }) => { const serverPath = path.join(__dirname, "test-server.js") + // in order to prevent conflicts in redis, use a different db for each worker + // same fix as for databases (works with up to 15) + // redis dbs are 0-indexed and jest worker ids are indexed from 1 + const workerId = parseInt(process.env.JEST_WORKER_ID || "1") + const redisUrlWithDatabase = `${redisUrl}/${workerId - 1}` + return new Promise((resolve, reject) => { const medusaProcess = spawn("node", [path.resolve(serverPath)], { cwd, @@ -13,6 +19,9 @@ module.exports = ({ cwd, verbose }) => { NODE_ENV: "development", JWT_SECRET: "test", COOKIE_SECRET: "test", + REDIS_URL: redisUrl ? redisUrlWithDatabase : undefined, // If provided, will use a real instance, otherwise a fake instance + UPLOAD_DIR: uploadDir, // If provided, will be used for the fake local file service + ...env, }, stdio: verbose ? ["inherit", "inherit", "inherit", "ipc"] diff --git a/integration-tests/helpers/start-server-with-environment.js b/integration-tests/helpers/start-server-with-environment.js new file mode 100644 index 0000000000..cef895fb74 --- /dev/null +++ b/integration-tests/helpers/start-server-with-environment.js @@ -0,0 +1,28 @@ +const setupServer = require("./setup-server") +const { initDb } = require("./use-db") + +const startServerWithEnvironment = async ({ cwd, verbose, env }) => { + if (env) { + Object.entries(env).forEach(([key, value]) => { + process.env[key] = value + }) + } + + const dbConnection = await initDb({ + cwd, + }) + + Object.entries(env).forEach(([key, value]) => { + delete process.env[key] + }) + + const medusaProcess = await setupServer({ + cwd, + verbose, + env, + }) + + return [medusaProcess, dbConnection] +} + +export default startServerWithEnvironment diff --git a/integration-tests/helpers/use-db.js b/integration-tests/helpers/use-db.js index 5fb1d64c58..36b301c33b 100644 --- a/integration-tests/helpers/use-db.js +++ b/integration-tests/helpers/use-db.js @@ -79,6 +79,19 @@ const instance = DbTestUtil module.exports = { initDb: async function ({ cwd }) { const configPath = path.resolve(path.join(cwd, `medusa-config.js`)) + const { projectConfig, featureFlags } = require(configPath) + + const featureFlagsLoader = require(path.join( + cwd, + `node_modules`, + `@medusajs`, + `medusa`, + `dist`, + `loaders`, + `feature-flags` + )).default + + const featureFlagsRouter = featureFlagsLoader({ featureFlags }) const modelsLoader = require(path.join( cwd, @@ -89,9 +102,9 @@ module.exports = { `loaders`, `models` )).default + const entities = modelsLoader({}, { register: false }) - const { projectConfig } = require(configPath) if (projectConfig.database_type === "sqlite") { connectionType = "sqlite" const dbConnection = await createConnection({ @@ -108,12 +121,48 @@ module.exports = { await dbFactory.createFromTemplate(databaseName) + // get migraitons with enabled featureflags + const migrationDir = path.resolve( + path.join( + cwd, + `node_modules`, + `@medusajs`, + `medusa`, + `dist`, + `migrations`, + `*.js` + ) + ) + + const { getEnabledMigrations } = require(path.join( + cwd, + `node_modules`, + `@medusajs`, + `medusa`, + `dist`, + `commands`, + `utils`, + `get-migrations` + )) + + const enabledMigrations = await getEnabledMigrations( + [migrationDir], + (flag) => featureFlagsRouter.isFeatureEnabled(flag) + ) + + const enabledEntities = entities.filter( + (e) => typeof e.isFeatureEnabled === "undefined" || e.isFeatureEnabled() + ) + const dbConnection = await createConnection({ type: "postgres", url: DB_URL, - entities, + entities: enabledEntities, + migrations: enabledMigrations, }) + await dbConnection.runMigrations() + instance.setDb(dbConnection) return dbConnection } diff --git a/integration-tests/helpers/use-template-db.js b/integration-tests/helpers/use-template-db.js index e5c77a2b6d..484acda457 100644 --- a/integration-tests/helpers/use-template-db.js +++ b/integration-tests/helpers/use-template-db.js @@ -31,10 +31,28 @@ class DatabaseFactory { `@medusajs`, `medusa`, `dist`, - `migrations` + `migrations`, + `*.js` ) ) + const { getEnabledMigrations } = require(path.join( + cwd, + `node_modules`, + `@medusajs`, + `medusa`, + `dist`, + `commands`, + `utils`, + `get-migrations` + )) + + // filter migrations to only include those that dont have feature flags + const enabledMigrations = await getEnabledMigrations( + [migrationDir], + (flag) => false + ) + await dropDatabase( { databaseName: this.templateDbName, @@ -51,7 +69,7 @@ class DatabaseFactory { type: "postgres", name: "templateConnection", url: `${DB_URL}/${this.templateDbName}`, - migrations: [`${migrationDir}/*.js`], + migrations: enabledMigrations, }) await templateDbConnection.runMigrations() @@ -92,7 +110,7 @@ class DatabaseFactory { } async destroy() { - let connection = await this.getMasterConnection() + const connection = await this.getMasterConnection() await connection.query(`DROP DATABASE IF EXISTS "${this.templateDbName}";`) await connection.close() diff --git a/integration-tests/scripts/cli/get-products.sh b/integration-tests/scripts/cli/get-products.sh new file mode 100755 index 0000000000..60da1f0471 --- /dev/null +++ b/integration-tests/scripts/cli/get-products.sh @@ -0,0 +1,24 @@ +#!/bin/bash + +seedJson=$(pwd)/$1 + +res=$(curl -s 'http://localhost:9000/store/products' | \ +python3 -c " + +import sys, json; + +loadedProducts = json.load(sys.stdin)['products']; + +seededProducts = json.load(open(\""$seedJson"\"))['products']; + +result = set([product['title'] for product in loadedProducts]) == set([product['title'] for product in seededProducts]) + +print(result) +") + +if [[ "$res" != "True" ]] ; then + echo "Seed failed, products are not equal" + exit 1 +else + exit 0 +fi \ No newline at end of file diff --git a/integration-tests/scripts/cli/login.sh b/integration-tests/scripts/cli/login.sh new file mode 100755 index 0000000000..bf9ab24e2f --- /dev/null +++ b/integration-tests/scripts/cli/login.sh @@ -0,0 +1,15 @@ +#!/bin/bash + +status_code=$(curl \ + -X POST\ + -H "Content-Type: application/json"\ + -d '{"email":"'$1'", "password":"'$2'"}'\ + --write-out %{http_code}\ + http://localhost:9000/admin/auth) + +if [[ "$status_code" -ne 200 ]] ; then + echo "Site status changed to $status_code" + exit 1 +else + exit 0 +fi \ No newline at end of file diff --git a/integration-tests/scripts/cli/wait-for-server-live.sh b/integration-tests/scripts/cli/wait-for-server-live.sh new file mode 100755 index 0000000000..639de3d9d7 --- /dev/null +++ b/integration-tests/scripts/cli/wait-for-server-live.sh @@ -0,0 +1,29 @@ +#!/bin/bash + +for i in {1..6} +do + echo $i + status_code=$(curl \ + -X GET \ + --write-out %{http_code} \ + --silent\ + --output /dev/null\ + http://localhost:9000/store/products) + +echo $status_code + if [[ "$status_code" -ne 000 ]] ; then + echo "exiting" + exit 0 + else + sleep 5 + fi +done + +echo $status_code + +if [[ "$status_code" = 000 ]] ; then + echo "Site status changed to $status_code" + exit 1 +else + exit 0 +fi diff --git a/package.json b/package.json index 1f6a520453..1f82807537 100644 --- a/package.json +++ b/package.json @@ -40,7 +40,6 @@ "lerna": "^3.22.1", "lint-staged": "^11.2.3", "microbundle": "^0.13.3", - "mongoose": "^5.10.15", "pg-god": "^1.0.11", "prettier": "^2.1.1", "resolve-cwd": "^3.0.0", @@ -67,9 +66,12 @@ "test:integration:api": "jest --config=integration-tests/jest.config.js --projects=integration-tests/api", "test:integration:plugins": "jest --config=integration-tests/jest.config.js --projects=integration-tests/plugins", "test:fixtures": "jest --config=docs-util/jest.config.js --runInBand", - "generate:services": "typedoc --options typedoc.services.js" + "generate:services": "typedoc --options typedoc.services.js", + "release:snapshot": "changeset publish --no-git-tags --snapshot --tag snapshot" }, "dependencies": { + "@changesets/changelog-github": "^0.4.5", + "@changesets/cli": "^2.23.0", "global": "^4.4.0", "import-from": "^3.0.0", "oas-normalize": "^5.0.1", diff --git a/packages/medusa-cli/CHANGELOG.md b/packages/medusa-cli/CHANGELOG.md index 22bd3d385e..1003cf2fee 100644 --- a/packages/medusa-cli/CHANGELOG.md +++ b/packages/medusa-cli/CHANGELOG.md @@ -3,6 +3,17 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.1](https://github.com/medusajs/medusa/compare/@medusajs/medusa-cli@1.3.0...@medusajs/medusa-cli@1.3.1) (2022-07-05) + + +### Features + +* **medusa-cli:** Allow to revert migrations from the CLI ([#1353](https://github.com/medusajs/medusa/issues/1353)) ([012513b](https://github.com/medusajs/medusa/commit/012513b6a1e90169e9e0e53f7a59841a34fbaeb3)) + + + + + # [1.3.0](https://github.com/medusajs/medusa/compare/@medusajs/medusa-cli@1.2.1...@medusajs/medusa-cli@1.3.0) (2022-05-01) **Note:** Version bump only for package @medusajs/medusa-cli diff --git a/packages/medusa-cli/package.json b/packages/medusa-cli/package.json index c46968a0ca..613a89c7b0 100644 --- a/packages/medusa-cli/package.json +++ b/packages/medusa-cli/package.json @@ -1,6 +1,6 @@ { "name": "@medusajs/medusa-cli", - "version": "1.3.0", + "version": "1.3.1", "description": "Command Line interface for Medusa Commerce", "main": "dist/index.js", "bin": { diff --git a/packages/medusa-cli/src/create-cli.js b/packages/medusa-cli/src/create-cli.js index 13616de300..c2297f324c 100644 --- a/packages/medusa-cli/src/create-cli.js +++ b/packages/medusa-cli/src/create-cli.js @@ -166,11 +166,11 @@ function buildLocalCommands(cli, isLocalProject) { }) .command({ command: `migrations [action]`, - desc: `Migrate the database to the most recent version.`, + desc: `Manage migrations from the core and your own project`, builder: { action: { demand: true, - choices: ["run", "show"], + choices: ["run", "revert", "show"], }, }, handler: handlerP( diff --git a/packages/medusa-dev-cli/CHANGELOG.md b/packages/medusa-dev-cli/CHANGELOG.md index 0fc365bdd8..20a23dd239 100644 --- a/packages/medusa-dev-cli/CHANGELOG.md +++ b/packages/medusa-dev-cli/CHANGELOG.md @@ -3,6 +3,22 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [0.0.26](https://github.com/medusajs/medusa/compare/medusa-dev-cli@0.0.24...medusa-dev-cli@0.0.26) (2022-07-05) + +**Note:** Version bump only for package medusa-dev-cli + + + + + +## [0.0.25](https://github.com/medusajs/medusa/compare/medusa-dev-cli@0.0.24...medusa-dev-cli@0.0.25) (2022-07-05) + +**Note:** Version bump only for package medusa-dev-cli + + + + + ## [0.0.24](https://github.com/medusajs/medusa/compare/medusa-dev-cli@0.0.23...medusa-dev-cli@0.0.24) (2021-12-08) **Note:** Version bump only for package medusa-dev-cli diff --git a/packages/medusa-dev-cli/package.json b/packages/medusa-dev-cli/package.json index d6f4b62abb..18fb8f5ee2 100644 --- a/packages/medusa-dev-cli/package.json +++ b/packages/medusa-dev-cli/package.json @@ -1,7 +1,7 @@ { "name": "medusa-dev-cli", "description": "CLI helpers for contributors working on Medusa", - "version": "0.0.24", + "version": "0.0.26", "author": "Sebastian Rindom ", "bin": { "medusa-dev": "./dist/index.js" @@ -16,6 +16,7 @@ "fs-extra": "^9.0.1", "got": "^11.8.5", "is-absolute": "^1.0.0", + "jest": "^25.5.2", "lodash": "^4.17.21", "signal-exit": "^3.0.3", "verdaccio": "^4.10.0", @@ -39,10 +40,10 @@ "directory": "packages/medusa-dev-cli" }, "scripts": { - "build": "babel src --out-dir dist --ignore \"**/__tests__\"", "prepare": "cross-env NODE_ENV=production npm run build", - "test": "echo \"Error: no test specified\" && exit 1", - "watch": "babel -w src --out-dir dist --ignore \"**/__tests__\"" + "build": "babel src --out-dir dist", + "test": "jest", + "watch": "babel -w src --out-dir dist" }, "engines": { "node": ">=12.13.0" diff --git a/packages/medusa-dev-cli/src/index.js b/packages/medusa-dev-cli/src/index.js index 8748bf018a..f6e35c852a 100644 --- a/packages/medusa-dev-cli/src/index.js +++ b/packages/medusa-dev-cli/src/index.js @@ -1,12 +1,12 @@ #!/usr/bin/env node -const Configstore = require(`configstore`); -const pkg = require(`../package.json`); -const _ = require(`lodash`); -const path = require(`path`); -const os = require(`os`); -const watch = require(`./watch`); -const { getVersionInfo } = require(`./utils/version`); +const Configstore = require(`configstore`) +const pkg = require(`../package.json`) +const _ = require(`lodash`) +const path = require(`path`) +const os = require(`os`) +const watch = require(`./watch`) +const { getVersionInfo } = require(`./utils/version`) const argv = require(`yargs`) .usage(`Usage: medusa-dev [options]`) .alias(`q`, `quiet`) @@ -19,7 +19,7 @@ const argv = require(`yargs`) .nargs(`p`, 1) .describe( `p`, - `Set path to Medusa repository. + `Set path to medusa repository. You typically only need to configure this once.` ) .nargs(`force-install`, 0) @@ -27,6 +27,11 @@ You typically only need to configure this once.` `force-install`, `Disables copying files into node_modules and forces usage of local npm repository.` ) + .nargs(`external-registry`, 0) + .describe( + `external-registry`, + `Run 'yarn add' commands without the --registry flag.` + ) .alias(`C`, `copy-all`) .nargs(`C`, 0) .describe( @@ -39,87 +44,101 @@ You typically only need to configure this once.` .alias(`h`, `help`) .nargs(`v`, 0) .alias(`v`, `version`) - .describe(`v`, `Print the currently installed version of Medusa Dev CLI`) - .argv; + .describe(`v`, `Print the currently installed version of Medusa Dev CLI`).argv if (argv.version) { - console.log(getVersionInfo()); - process.exit(); + console.log(getVersionInfo()) + process.exit() } -const conf = new Configstore(pkg.name); +const conf = new Configstore(pkg.name) -const fs = require(`fs-extra`); +const fs = require(`fs-extra`) -let pathToRepo = argv.setPathToRepo; +let pathToRepo = argv.setPathToRepo if (pathToRepo) { if (pathToRepo.includes(`~`)) { - pathToRepo = path.join(os.homedir(), pathToRepo.split(`~`).pop()); + pathToRepo = path.join(os.homedir(), pathToRepo.split(`~`).pop()) } - conf.set(`medusa-location`, path.resolve(pathToRepo)); - process.exit(); + conf.set(`medusa-location`, path.resolve(pathToRepo)) + process.exit() } -const havePackageJsonFile = fs.existsSync(`package.json`); +const havePackageJsonFile = fs.existsSync(`package.json`) if (!havePackageJsonFile) { - console.error(`Current folder must have a package.json file!`); - process.exit(); + console.error(`Current folder must have a package.json file!`) + process.exit() } -const medusaLocation = conf.get(`medusa-location`); +const medusaLocation = conf.get(`medusa-location`) if (!medusaLocation) { console.error( ` You haven't set the path yet to your cloned version of medusa. Do so now by running: - medusa-dev --set-path-to-repo /path/to/my/cloned/version/medusa ` - ); - process.exit(); + ) + process.exit() } // get list of packages from monorepo -const monoRepoPackages = []; +const packageNameToPath = new Map() +const monoRepoPackages = fs + .readdirSync(path.join(medusaLocation, `packages`)) + .map((dirName) => { + try { + const localPkg = JSON.parse( + fs.readFileSync( + path.join(medusaLocation, `packages`, dirName, `package.json`) + ) + ) -const pkgsDirs = fs.readdirSync(path.join(medusaLocation, `packages`)); -for (const dir of pkgsDirs) { - const pack = JSON.parse( - fs.readFileSync(path.join(medusaLocation, `packages`, dir, `package.json`)) - ); - monoRepoPackages.push(pack.name); -} + if (localPkg?.name) { + packageNameToPath.set( + localPkg.name, + path.join(medusaLocation, `packages`, dirName) + ) + return localPkg.name + } + } catch (error) { + // fallback to generic one + } -const localPkg = JSON.parse(fs.readFileSync(`package.json`)); + packageNameToPath.set( + dirName, + path.join(medusaLocation, `packages`, dirName) + ) + return dirName + }) + +const localPkg = JSON.parse(fs.readFileSync(`package.json`)) // intersect dependencies with monoRepoPackages to get list of packages that are used const localPackages = _.intersection( monoRepoPackages, Object.keys(_.merge({}, localPkg.dependencies, localPkg.devDependencies)) -); +) if (!argv.packages && _.isEmpty(localPackages)) { console.error( ` You haven't got any medusa dependencies into your current package.json - You probably want to pass in a list of packages to start developing on! For example: - -medusa-dev --packages @medusajs/medusa - +medusa-dev --packages medusa medusa-js If you prefer to place them in your package.json dependencies instead, medusa-dev will pick them up. ` - ); + ) if (!argv.forceInstall) { - process.exit(); + process.exit() } else { console.log( `Continuing other dependencies installation due to "--forceInstall" flag` - ); + ) } } @@ -129,4 +148,6 @@ watch(medusaLocation, argv.packages, { scanOnce: argv.scanOnce, forceInstall: argv.forceInstall, monoRepoPackages, -}); + packageNameToPath, + externalRegistry: argv.externalRegistry, +}) diff --git a/packages/medusa-dev-cli/src/local-npm-registry/index.js b/packages/medusa-dev-cli/src/local-npm-registry/index.js index cb39d2465b..25e1d009e4 100644 --- a/packages/medusa-dev-cli/src/local-npm-registry/index.js +++ b/packages/medusa-dev-cli/src/local-npm-registry/index.js @@ -1,23 +1,23 @@ -const startVerdaccio = require(`verdaccio`).default; +const startVerdaccio = require(`verdaccio`).default -const fs = require(`fs-extra`); -const _ = require(`lodash`); +const fs = require(`fs-extra`) +const _ = require(`lodash`) -let VerdaccioInitPromise = null; +let VerdaccioInitPromise = null -const { verdaccioConfig } = require(`./verdaccio-config`); -const { publishPackage } = require(`./publish-package`); -const { installPackages } = require(`./install-packages`); +const { verdaccioConfig } = require(`./verdaccio-config`) +const { publishPackage } = require(`./publish-package`) +const { installPackages } = require(`./install-packages`) const startServer = () => { if (VerdaccioInitPromise) { - return VerdaccioInitPromise; + return VerdaccioInitPromise } - console.log(`Starting local verdaccio server`); + console.log(`Starting local verdaccio server`) // clear storage - fs.removeSync(verdaccioConfig.storage); + fs.removeSync(verdaccioConfig.storage) VerdaccioInitPromise = new Promise((resolve) => { startVerdaccio( @@ -29,47 +29,49 @@ const startServer = () => { (webServer, addr, pkgName, pkgVersion) => { // console.log(webServer) webServer.listen(addr.port || addr.path, addr.host, () => { - console.log(`Started local verdaccio server`); + console.log(`Started local verdaccio server`) - resolve(); - }); + resolve() + }) } - ); - }); + ) + }) - return VerdaccioInitPromise; -}; + return VerdaccioInitPromise +} -exports.startVerdaccio = startServer; +exports.startVerdaccio = startServer exports.publishPackagesLocallyAndInstall = async ({ packagesToPublish, localPackages, - root, + packageNameToPath, ignorePackageJSONChanges, yarnWorkspaceRoot, + externalRegistry, }) => { - await startServer(); + await startServer() - const versionPostFix = Date.now(); + const versionPostFix = Date.now() - const newlyPublishedPackageVersions = {}; + const newlyPublishedPackageVersions = {} for (const packageName of packagesToPublish) { newlyPublishedPackageVersions[packageName] = await publishPackage({ packageName, packagesToPublish, - root, + packageNameToPath, versionPostFix, ignorePackageJSONChanges, - }); + }) } - const packagesToInstall = _.intersection(packagesToPublish, localPackages); + const packagesToInstall = _.intersection(packagesToPublish, localPackages) await installPackages({ packagesToInstall, yarnWorkspaceRoot, newlyPublishedPackageVersions, - }); -}; + externalRegistry, + }) +} diff --git a/packages/medusa-dev-cli/src/local-npm-registry/install-packages.js b/packages/medusa-dev-cli/src/local-npm-registry/install-packages.js index 116b9fcb28..ba98996b29 100644 --- a/packages/medusa-dev-cli/src/local-npm-registry/install-packages.js +++ b/packages/medusa-dev-cli/src/local-npm-registry/install-packages.js @@ -8,10 +8,11 @@ const installPackages = async ({ packagesToInstall, yarnWorkspaceRoot, newlyPublishedPackageVersions, + externalRegistry, }) => { console.log( `Installing packages from local registry:\n${packagesToInstall - .map(packageAndVersion => ` - ${packageAndVersion}`) + .map((packageAndVersion) => ` - ${packageAndVersion}`) .join(`\n`)}` ) let installCmd @@ -80,22 +81,22 @@ const installPackages = async ({ process.exit(1) } - const handleDeps = deps => { + const handleDeps = (deps) => { if (!deps) { return false } let changed = false - Object.keys(deps).forEach(depName => { + Object.keys(deps).forEach((depName) => { if (packagesToInstall.includes(depName)) { - deps[depName] = `gatsby-dev` + deps[depName] = `medusa-dev` changed = true } }) return changed } - Object.keys(workspacesLayout).forEach(workspaceName => { + Object.keys(workspacesLayout).forEach((workspaceName) => { const { location } = workspacesLayout[workspaceName] const pkgJsonPath = path.join(yarnWorkspaceRoot, location, `package.json`) if (!fs.existsSync(pkgJsonPath)) { @@ -109,7 +110,7 @@ const installPackages = async ({ changed |= handleDeps(pkg.peerDependencies) if (changed) { - console.log(`Changing deps in ${pkgJsonPath} to use @gatsby-dev`) + console.log(`Changing deps in ${pkgJsonPath} to use @medusa-dev`) fs.outputJSONSync(pkgJsonPath, pkg, { spaces: 2, }) @@ -118,24 +119,28 @@ const installPackages = async ({ // package.json files are changed - so we just want to install // using verdaccio registry - installCmd = [ - `yarn`, - [`install`, `--registry=${registryUrl}`, `--ignore-engines`], - ] + const yarnCommands = [`install`] + + if (!externalRegistry) { + yarnCommands.push(`--registry=${registryUrl}`) + } + + installCmd = [`yarn`, yarnCommands] } else { - installCmd = [ - `yarn`, - [ - `add`, - ...packagesToInstall.map(packageName => { - const packageVersion = newlyPublishedPackageVersions[packageName] - return `${packageName}@${packageVersion}` - }), - `--registry=${registryUrl}`, - `--exact`, - `--ignore-engines`, - ], + const yarnCommands = [ + `add`, + ...packagesToInstall.map((packageName) => { + const packageVersion = newlyPublishedPackageVersions[packageName] + return `${packageName}@${packageVersion}` + }), + `--exact`, ] + + if (!externalRegistry) { + yarnCommands.push(`--registry=${registryUrl}`) + } + + installCmd = [`yarn`, yarnCommands] } try { diff --git a/packages/medusa-dev-cli/src/local-npm-registry/publish-package.js b/packages/medusa-dev-cli/src/local-npm-registry/publish-package.js index 83911f6fd8..b88b0d7336 100644 --- a/packages/medusa-dev-cli/src/local-npm-registry/publish-package.js +++ b/packages/medusa-dev-cli/src/local-npm-registry/publish-package.js @@ -1,18 +1,18 @@ -const fs = require(`fs-extra`); -const path = require(`path`); +const fs = require(`fs-extra`) +const path = require(`path`) -const { promisifiedSpawn } = require(`../utils/promisified-spawn`); -const { registryUrl } = require(`./verdaccio-config`); +const { promisifiedSpawn } = require(`../utils/promisified-spawn`) +const { registryUrl } = require(`./verdaccio-config`) const NPMRCContent = `${registryUrl.replace( /https?:/g, `` -)}/:_authToken="medusa-dev"`; +)}/:_authToken="medusa-dev"` const { getMonorepoPackageJsonPath, -} = require(`../utils/get-monorepo-package-json-path`); -const { registerCleanupTask } = require(`./cleanup-tasks`); +} = require(`../utils/get-monorepo-package-json-path`) +const { registerCleanupTask } = require(`./cleanup-tasks`) /** * Edit package.json to: @@ -27,7 +27,7 @@ const adjustPackageJson = ({ versionPostFix, packagesToPublish, ignorePackageJSONChanges, - root, + packageNameToPath, }) => { // we need to check if package depend on any other package to will be published and // adjust version selector to point to dev version of package so local registry is used @@ -36,10 +36,10 @@ const adjustPackageJson = ({ const monorepoPKGjsonString = fs.readFileSync( monoRepoPackageJsonPath, `utf-8` - ); - const monorepoPKGjson = JSON.parse(monorepoPKGjsonString); + ) + const monorepoPKGjson = JSON.parse(monorepoPKGjsonString) - monorepoPKGjson.version = `${monorepoPKGjson.version}-dev-${versionPostFix}`; + monorepoPKGjson.version = `${monorepoPKGjson.version}-dev-${versionPostFix}` packagesToPublish.forEach((packageThatWillBePublished) => { if ( monorepoPKGjson.dependencies && @@ -49,37 +49,37 @@ const adjustPackageJson = ({ fs.readFileSync( getMonorepoPackageJsonPath({ packageName: packageThatWillBePublished, - root, + packageNameToPath, }), `utf-8` ) - ).version; + ).version monorepoPKGjson.dependencies[ packageThatWillBePublished - ] = `${currentVersion}-dev-${versionPostFix}`; + ] = `${currentVersion}-dev-${versionPostFix}` } - }); + }) - const temporaryMonorepoPKGjsonString = JSON.stringify(monorepoPKGjson); + const temporaryMonorepoPKGjsonString = JSON.stringify(monorepoPKGjson) const unignorePackageJSONChanges = ignorePackageJSONChanges(packageName, [ monorepoPKGjsonString, temporaryMonorepoPKGjsonString, - ]); + ]) // change version and dependency versions - fs.outputFileSync(monoRepoPackageJsonPath, temporaryMonorepoPKGjsonString); + fs.outputFileSync(monoRepoPackageJsonPath, temporaryMonorepoPKGjsonString) return { newPackageVersion: monorepoPKGjson.version, unadjustPackageJson: registerCleanupTask(() => { // restore original package.json - fs.outputFileSync(monoRepoPackageJsonPath, monorepoPKGjsonString); - unignorePackageJSONChanges(); + fs.outputFileSync(monoRepoPackageJsonPath, monorepoPKGjsonString) + unignorePackageJSONChanges() }), - }; -}; + } +} /** * Anonymous publishing require dummy .npmrc @@ -88,38 +88,38 @@ const adjustPackageJson = ({ * This is not verdaccio restriction. */ const createTemporaryNPMRC = ({ pathToPackage }) => { - const NPMRCPath = path.join(pathToPackage, `.npmrc`); - fs.outputFileSync(NPMRCPath, NPMRCContent); + const NPMRCPath = path.join(pathToPackage, `.npmrc`) + fs.outputFileSync(NPMRCPath, NPMRCContent) return registerCleanupTask(() => { - fs.removeSync(NPMRCPath); - }); -}; + fs.removeSync(NPMRCPath) + }) +} const publishPackage = async ({ packageName, packagesToPublish, - root, versionPostFix, ignorePackageJSONChanges, + packageNameToPath, }) => { const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({ packageName, - root, - }); + packageNameToPath, + }) const { unadjustPackageJson, newPackageVersion } = adjustPackageJson({ monoRepoPackageJsonPath, packageName, - root, + packageNameToPath, versionPostFix, packagesToPublish, ignorePackageJSONChanges, - }); + }) - const pathToPackage = path.dirname(monoRepoPackageJsonPath); + const pathToPackage = path.dirname(monoRepoPackageJsonPath) - const uncreateTemporaryNPMRC = createTemporaryNPMRC({ pathToPackage }); + const uncreateTemporaryNPMRC = createTemporaryNPMRC({ pathToPackage }) // npm publish const publishCmd = [ @@ -128,26 +128,26 @@ const publishPackage = async ({ { cwd: pathToPackage, }, - ]; + ] console.log( `Publishing ${packageName}@${newPackageVersion} to local registry` - ); + ) try { - await promisifiedSpawn(publishCmd); + await promisifiedSpawn(publishCmd) console.log( `Published ${packageName}@${newPackageVersion} to local registry` - ); + ) } catch (e) { - console.error(`Failed to publish ${packageName}@${newPackageVersion}`, e); - process.exit(1); + console.error(`Failed to publish ${packageName}@${newPackageVersion}`, e) + process.exit(1) } - uncreateTemporaryNPMRC(); - unadjustPackageJson(); + uncreateTemporaryNPMRC() + unadjustPackageJson() - return newPackageVersion; -}; + return newPackageVersion +} -exports.publishPackage = publishPackage; +exports.publishPackage = publishPackage diff --git a/packages/medusa-dev-cli/src/utils/__tests__/get-dependant-packages.js b/packages/medusa-dev-cli/src/utils/__tests__/get-dependant-packages.js index b24b13b5f5..280737b124 100644 --- a/packages/medusa-dev-cli/src/utils/__tests__/get-dependant-packages.js +++ b/packages/medusa-dev-cli/src/utils/__tests__/get-dependant-packages.js @@ -1,5 +1,15 @@ const { getDependantPackages } = require(`../get-dependant-packages`) +function createMockPackageNameToPath(packageNames) { + const packageNameToPath = new Map() + + for (const packageName of packageNames) { + packageNameToPath.set(packageName, `/test/${packageName}`) + } + + return packageNameToPath +} + describe(`getDependantPackages`, () => { it(`handles deep dependency chains`, () => { const packagesToPublish = getDependantPackages({ @@ -9,6 +19,13 @@ describe(`getDependantPackages`, () => { "package-a-dep1-dep1": new Set([`package-a-dep1`]), "not-related": new Set([`also-not-related`]), }, + packageNameToPath: createMockPackageNameToPath([ + `package-a`, + `package-a-dep1`, + `package-a-dep1-dep1`, + `not-related`, + `also-not-related`, + ]), }) expect(packagesToPublish).toEqual( @@ -23,6 +40,10 @@ describe(`getDependantPackages`, () => { "package-a": new Set([`package-b`]), "package-b": new Set([`package-a`]), }, + packageNameToPath: createMockPackageNameToPath([ + `package-a`, + `package-b`, + ]), }) expect(packagesToPublish).toEqual(new Set([`package-a`, `package-b`])) }) diff --git a/packages/medusa-dev-cli/src/utils/__tests__/traverse-package-deps.js b/packages/medusa-dev-cli/src/utils/__tests__/traverse-package-deps.js index 8724b405f2..83d66f048e 100644 --- a/packages/medusa-dev-cli/src/utils/__tests__/traverse-package-deps.js +++ b/packages/medusa-dev-cli/src/utils/__tests__/traverse-package-deps.js @@ -43,15 +43,25 @@ jest.doMock( describe(`traversePackageDeps`, () => { it(`handles deep dependency chains`, () => { + const monoRepoPackages = [ + `package-a`, + `package-a-dep1`, + `package-a-dep1-dep1`, + `package-not-used`, + ] + const packageNameToPath = new Map() + for (const packageName of monoRepoPackages) { + packageNameToPath.set( + packageName, + path.join(...`/packages/${packageName}`.split(`/`)) + ) + } + const { seenPackages, depTree } = traversePackagesDeps({ root: ``, packages: [`package-a`, `doesnt-exist`], - monoRepoPackages: [ - `package-a`, - `package-a-dep1`, - `package-a-dep1-dep1`, - `package-not-used`, - ], + monoRepoPackages, + packageNameToPath, }) expect(seenPackages).toEqual([ diff --git a/packages/medusa-dev-cli/src/utils/check-deps-changes.js b/packages/medusa-dev-cli/src/utils/check-deps-changes.js index 9ad357124c..3f135353eb 100644 --- a/packages/medusa-dev-cli/src/utils/check-deps-changes.js +++ b/packages/medusa-dev-cli/src/utils/check-deps-changes.js @@ -1,9 +1,9 @@ -const fs = require(`fs-extra`); -const _ = require(`lodash`); +const fs = require(`fs-extra`) +const _ = require(`lodash`) const { getMonorepoPackageJsonPath, -} = require(`./get-monorepo-package-json-path`); -const got = require(`got`); +} = require(`./get-monorepo-package-json-path`) +const got = require(`got`) function difference(object, base) { function changes(object, base) { @@ -12,11 +12,11 @@ function difference(object, base) { result[key] = _.isObject(value) && _.isObject(base[key]) ? changes(value, base[key]) - : value; + : value } - }); + }) } - return changes(object, base); + return changes(object, base) } /** @@ -24,33 +24,33 @@ function difference(object, base) { * It will skip dependencies that are removed in monorepo package. * * If local package is not installed, it will check unpkg.com. - * This allow medusa-dev to skip publishing unnecesairly and + * This allow gatsby-dev to skip publishing unnecesairly and * let install packages from public npm repository if nothing changed. */ exports.checkDepsChanges = async ({ newPath, packageName, monoRepoPackages, - root, isInitialScan, ignoredPackageJSON, + packageNameToPath, }) => { - let localPKGjson; - let packageNotInstalled = false; + let localPKGjson + let packageNotInstalled = false try { - localPKGjson = JSON.parse(fs.readFileSync(newPath, `utf-8`)); + localPKGjson = JSON.parse(fs.readFileSync(newPath, `utf-8`)) } catch { - packageNotInstalled = true; + packageNotInstalled = true // there is no local package - so we still need to install deps // this is nice because devs won't need to do initial package installation - we can handle this. if (!isInitialScan) { console.log( - `'${packageName}' doesn't seem to be installed. Restart medusa-dev to publish it` - ); + `'${packageName}' doesn't seem to be installed. Restart gatsby-dev to publish it` + ) return { didDepsChanged: false, packageNotInstalled, - }; + } } // if package is not installed, we will do http GET request to @@ -60,37 +60,33 @@ exports.checkDepsChanges = async ({ // this allow us to not publish to local repository // and save some time/work try { - const response = await got( - `https://unpkg.com/${packageName}/package.json` - ); + const version = getPackageVersion(packageName) + const url = `https://unpkg.com/${packageName}@${version}/package.json` + const response = await got(url) if (response?.statusCode !== 200) { - throw new Error(`No response or non 200 code`); + throw new Error(`No response or non 200 code for ${url}`) } - localPKGjson = JSON.parse(response.body); - } catch { + localPKGjson = JSON.parse(response.body) + } catch (e) { console.log( - `'${packageName}' doesn't seem to be installed and is not published on NPM.` - ); + `'${packageName}' doesn't seem to be installed and is not published on NPM. Error: ${e.message}` + ) return { didDepsChanged: true, packageNotInstalled, - }; + } } } - const monoDir = packageName.startsWith("@medusajs") - ? packageName.split("/")[1] - : packageName; - const monoRepoPackageJsonPath = getMonorepoPackageJsonPath({ - packageName: monoDir, - root, - }); + packageName, + packageNameToPath, + }) const monorepoPKGjsonString = fs.readFileSync( monoRepoPackageJsonPath, `utf-8` - ); - const monorepoPKGjson = JSON.parse(monorepoPKGjsonString); + ) + const monorepoPKGjson = JSON.parse(monorepoPKGjsonString) if (ignoredPackageJSON.has(packageName)) { if (ignoredPackageJSON.get(packageName).includes(monorepoPKGjsonString)) { // we are in middle of publishing and content of package.json is one set during publish process, @@ -98,44 +94,44 @@ exports.checkDepsChanges = async ({ return { didDepsChanged: false, packageNotInstalled, - }; + } } } - if (!monorepoPKGjson.dependencies) monorepoPKGjson.dependencies = {}; - if (!localPKGjson.dependencies) localPKGjson.dependencies = {}; + if (!monorepoPKGjson.dependencies) monorepoPKGjson.dependencies = {} + if (!localPKGjson.dependencies) localPKGjson.dependencies = {} const areDepsEqual = _.isEqual( monorepoPKGjson.dependencies, localPKGjson.dependencies - ); + ) if (!areDepsEqual) { const diff = difference( monorepoPKGjson.dependencies, localPKGjson.dependencies - ); + ) const diff2 = difference( localPKGjson.dependencies, monorepoPKGjson.dependencies - ); + ) - let needPublishing = false; - let isPublishing = false; + let needPublishing = false + let isPublishing = false const depChangeLog = _.uniq(Object.keys({ ...diff, ...diff2 })) .reduce((acc, key) => { - if (monorepoPKGjson.dependencies[key] === `medusa-dev`) { + if (monorepoPKGjson.dependencies[key] === `gatsby-dev`) { // if we are in middle of publishing to local repository - ignore - isPublishing = true; - return acc; + isPublishing = true + return acc } - if (localPKGjson.dependencies[key] === `medusa-dev`) { + if (localPKGjson.dependencies[key] === `gatsby-dev`) { // monorepo packages will restore version, but after installation - // in local site - it will use `medusa-dev` dist tag - we need + // in local site - it will use `gatsby-dev` dist tag - we need // to ignore changes that - return acc; + return acc } if ( @@ -143,48 +139,55 @@ exports.checkDepsChanges = async ({ monorepoPKGjson.dependencies[key] ) { // Check only for version changes in packages - // that are not from medusa repo. - // Changes in medusa packages will be copied over + // that are not from gatsby repo. + // Changes in gatsby packages will be copied over // from monorepo - and if those contain other dependency // changes - they will be covered if (!monoRepoPackages.includes(key)) { acc.push( ` - '${key}' changed version from ${localPKGjson.dependencies[key]} to ${monorepoPKGjson.dependencies[key]}` - ); - needPublishing = true; + ) + needPublishing = true } } else if (monorepoPKGjson.dependencies[key]) { - acc.push( - ` - '${key}@${monorepoPKGjson.dependencies[key]}' was added` - ); - needPublishing = true; + acc.push(` - '${key}@${monorepoPKGjson.dependencies[key]}' was added`) + needPublishing = true } else { - acc.push(` - '${key}@${localPKGjson.dependencies[key]}' was removed`); + acc.push(` - '${key}@${localPKGjson.dependencies[key]}' was removed`) // this doesn't need publishing really, so will skip this } - return acc; + return acc }, []) - .join(`\n`); + .join(`\n`) if (!isPublishing && depChangeLog.length > 0) { - console.log(`Dependencies of '${packageName}' changed:\n${depChangeLog}`); + console.log(`Dependencies of '${packageName}' changed:\n${depChangeLog}`) if (isInitialScan) { console.log( `Will ${!needPublishing ? `not ` : ``}publish to local npm registry.` - ); + ) } else { console.warn( `Installation of dependencies after initial scan is not implemented` - ); + ) } return { didDepsChanged: needPublishing, packageNotInstalled, - }; + } } } return { didDepsChanged: false, packageNotInstalled, - }; -}; + } +} + +function getPackageVersion(packageName) { + const projectPackageJson = JSON.parse( + fs.readFileSync(`./package.json`, `utf-8`) + ) + const { dependencies = {}, devDependencies = {} } = projectPackageJson + const version = dependencies[packageName] || devDependencies[packageName] + return version || `latest` +} diff --git a/packages/medusa-dev-cli/src/utils/get-monorepo-package-json-path.js b/packages/medusa-dev-cli/src/utils/get-monorepo-package-json-path.js index 13523a2411..fe6ceecf63 100644 --- a/packages/medusa-dev-cli/src/utils/get-monorepo-package-json-path.js +++ b/packages/medusa-dev-cli/src/utils/get-monorepo-package-json-path.js @@ -1,11 +1,4 @@ -const path = require(`path`); +const path = require(`path`) -exports.getMonorepoPackageJsonPath = ({ packageName, root }) => { - let dirName = packageName; - if (packageName.startsWith("@medusajs")) { - const [, directory] = packageName.split("/"); - dirName = directory; - } - - return path.join(root, `packages`, dirName, `package.json`); -}; +exports.getMonorepoPackageJsonPath = ({ packageName, packageNameToPath }) => + path.join(packageNameToPath.get(packageName), `package.json`) diff --git a/packages/medusa-dev-cli/src/utils/traverse-package-deps.js b/packages/medusa-dev-cli/src/utils/traverse-package-deps.js index 3149ffd2e9..20acbeef89 100644 --- a/packages/medusa-dev-cli/src/utils/traverse-package-deps.js +++ b/packages/medusa-dev-cli/src/utils/traverse-package-deps.js @@ -1,10 +1,17 @@ -const _ = require(`lodash`); -const path = require(`path`); +const _ = require(`lodash`) +const path = require(`path`) /** * @typedef {Object} TraversePackagesDepsReturn * @property {Object} depTree Lookup table to check dependants for given package. * Used to determine which packages need to be published. + * @example + * ``` + * { + * "medusa-cli": Set(["medusa"]), + * "medusa-telemetry": Set(["medusa", "medusa-cli"]), + * } + * ``` */ /** @@ -24,53 +31,58 @@ const path = require(`path`); * @return {TraversePackagesDepsReturn} */ const traversePackagesDeps = ({ - root, packages, monoRepoPackages, seenPackages = [...packages], depTree = {}, + packageNameToPath, }) => { packages.forEach((p) => { - if (p.startsWith("@medusajs")) { - p = p.split("/")[1]; - } - let pkgJson; + let pkgJson try { - pkgJson = require(path.join(root, `packages`, p, `package.json`)); - } catch { - console.error(`"${p}" package doesn't exist in monorepo.`); + const packageRoot = packageNameToPath.get(p) + if (packageRoot) { + pkgJson = require(path.join(packageRoot, `package.json`)) + } else { + console.error(`"${p}" package doesn't exist in monorepo.`) + // remove from seenPackages + seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p) + return + } + } catch (e) { + console.error(`"${p}" package doesn't exist in monorepo.`, e) // remove from seenPackages - seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p); - return; + seenPackages = seenPackages.filter((seenPkg) => seenPkg !== p) + return } const fromMonoRepo = _.intersection( Object.keys({ ...pkgJson.dependencies }), monoRepoPackages - ); + ) fromMonoRepo.forEach((pkgName) => { - depTree[pkgName] = (depTree[pkgName] || new Set()).add(p); - }); + depTree[pkgName] = (depTree[pkgName] || new Set()).add(p) + }) // only traverse not yet seen packages to avoid infinite loops - const newPackages = _.difference(fromMonoRepo, seenPackages); + const newPackages = _.difference(fromMonoRepo, seenPackages) if (newPackages.length) { newPackages.forEach((depFromMonorepo) => { - seenPackages.push(depFromMonorepo); - }); + seenPackages.push(depFromMonorepo) + }) traversePackagesDeps({ - root, packages: fromMonoRepo, monoRepoPackages, seenPackages, depTree, - }); + packageNameToPath, + }) } - }); - return { seenPackages, depTree }; -}; + }) + return { seenPackages, depTree } +} -exports.traversePackagesDeps = traversePackagesDeps; +exports.traversePackagesDeps = traversePackagesDeps diff --git a/packages/medusa-dev-cli/src/watch.js b/packages/medusa-dev-cli/src/watch.js index b038043c6b..b9cd91d069 100644 --- a/packages/medusa-dev-cli/src/watch.js +++ b/packages/medusa-dev-cli/src/watch.js @@ -1,27 +1,27 @@ -const chokidar = require(`chokidar`); -const _ = require(`lodash`); -const del = require(`del`); -const fs = require(`fs-extra`); -const path = require(`path`); -const findWorkspaceRoot = require(`find-yarn-workspace-root`); +const chokidar = require(`chokidar`) +const _ = require(`lodash`) +const del = require(`del`) +const fs = require(`fs-extra`) +const path = require(`path`) +const findWorkspaceRoot = require(`find-yarn-workspace-root`) -const { publishPackagesLocallyAndInstall } = require(`./local-npm-registry`); -const { checkDepsChanges } = require(`./utils/check-deps-changes`); -const { getDependantPackages } = require(`./utils/get-dependant-packages`); +const { publishPackagesLocallyAndInstall } = require(`./local-npm-registry`) +const { checkDepsChanges } = require(`./utils/check-deps-changes`) +const { getDependantPackages } = require(`./utils/get-dependant-packages`) const { setDefaultSpawnStdio, promisifiedSpawn, -} = require(`./utils/promisified-spawn`); -const { traversePackagesDeps } = require(`./utils/traverse-package-deps`); +} = require(`./utils/promisified-spawn`) +const { traversePackagesDeps } = require(`./utils/traverse-package-deps`) -let numCopied = 0; +let numCopied = 0 const quit = () => { - console.log(`Copied ${numCopied} files`); - process.exit(); -}; + console.log(`Copied ${numCopied} files`) + process.exit() +} -const MAX_COPY_RETRIES = 3; +const MAX_COPY_RETRIES = 3 /* * non-existent packages break on('ready') @@ -30,76 +30,89 @@ const MAX_COPY_RETRIES = 3; async function watch( root, packages, - { scanOnce, quiet, forceInstall, monoRepoPackages, localPackages } + { + scanOnce, + quiet, + forceInstall, + monoRepoPackages, + localPackages, + packageNameToPath, + externalRegistry, + } ) { - setDefaultSpawnStdio(quiet ? `ignore` : `inherit`); + setDefaultSpawnStdio(quiet ? `ignore` : `inherit`) // determine if in yarn workspace - if in workspace, force using verdaccio // as current logic of copying files will not work correctly. - const yarnWorkspaceRoot = findWorkspaceRoot(); + const yarnWorkspaceRoot = findWorkspaceRoot() if (yarnWorkspaceRoot && process.env.NODE_ENV !== `test`) { - console.log(`Yarn workspace found.`); - forceInstall = true; + console.log(`Yarn workspace found.`) + forceInstall = true } - let afterPackageInstallation = false; - let queuedCopies = []; + let afterPackageInstallation = false + let queuedCopies = [] const realCopyPath = (arg) => { - const { oldPath, newPath, quiet, resolve, reject, retry = 0 } = arg; + const { oldPath, newPath, quiet, resolve, reject, retry = 0 } = arg fs.copy(oldPath, newPath, (err) => { if (err) { if (retry >= MAX_COPY_RETRIES) { - console.error(err); - reject(err); - return; + console.error(err) + reject(err) + return } else { setTimeout( () => realCopyPath({ ...arg, retry: retry + 1 }), 500 * Math.pow(2, retry) - ); - return; + ) + return } } // When the medusa binary is copied over, it is not setup with the executable // permissions that it is given when installed via yarn. - // This fixes the issue where after running meduas-dev, running `yarn medusa develop` + // This fixes the issue where after running medusa-dev, running `yarn medusa develop` // fails with a permission issue. - if (/(bin\/meduas.js|medusa(-cli)?\/cli.js)$/.test(newPath)) { - fs.chmodSync(newPath, `0755`); + // @fixes https://github.com/medusajs/medusa/issues/18809 + // Binary files we target: + // - medusa/bin/medusa.js + // -medusa/cli.js + // -medusa-cli/cli.js + if (/(bin\/medusa.js|medusa(-cli)?\/cli.js)$/.test(newPath)) { + fs.chmodSync(newPath, `0755`) } - numCopied += 1; + numCopied += 1 if (!quiet) { - console.log(`Copied ${oldPath} to ${newPath}`); + console.log(`Copied ${oldPath} to ${newPath}`) } - resolve(); - }); - }; + resolve() + }) + } const copyPath = (oldPath, newPath, quiet, packageName) => new Promise((resolve, reject) => { - const argObj = { oldPath, newPath, quiet, packageName, resolve, reject }; + const argObj = { oldPath, newPath, quiet, packageName, resolve, reject } if (afterPackageInstallation) { - realCopyPath(argObj); + realCopyPath(argObj) } else { - queuedCopies.push(argObj); + queuedCopies.push(argObj) } - }); + }) const runQueuedCopies = () => { - afterPackageInstallation = true; - queuedCopies.forEach((argObj) => realCopyPath(argObj)); - queuedCopies = []; - }; + afterPackageInstallation = true + queuedCopies.forEach((argObj) => realCopyPath(argObj)) + queuedCopies = [] + } const clearJSFilesFromNodeModules = async () => { const packagesToClear = queuedCopies.reduce((acc, { packageName }) => { if (packageName) { - acc.add(packageName); + acc.add(packageName) } - return acc; - }, new Set()); + return acc + }, new Set()) await Promise.all( [...packagesToClear].map( @@ -110,65 +123,64 @@ async function watch( `!node_modules/${packageToClear}/src/**/*.{js,js.map}`, ]) ) - ); - }; + ) + } // check packages deps and if they depend on other packages from monorepo // add them to packages list const { seenPackages, depTree } = traversePackagesDeps({ root, packages: _.uniq(localPackages), monoRepoPackages, - }); + packageNameToPath, + }) const allPackagesToWatch = packages ? _.intersection(packages, seenPackages) - : seenPackages; + : seenPackages - const ignoredPackageJSON = new Map(); + const ignoredPackageJSON = new Map() const ignorePackageJSONChanges = (packageName, contentArray) => { - ignoredPackageJSON.set(packageName, contentArray); + ignoredPackageJSON.set(packageName, contentArray) return () => { - ignoredPackageJSON.delete(packageName); - }; - }; + ignoredPackageJSON.delete(packageName) + } + } if (forceInstall) { try { if (allPackagesToWatch.length > 0) { await publishPackagesLocallyAndInstall({ packagesToPublish: allPackagesToWatch, - root, + packageNameToPath, localPackages, ignorePackageJSONChanges, yarnWorkspaceRoot, - }); + externalRegistry, + }) } else { // run `yarn` - const yarnInstallCmd = [`yarn`]; + const yarnInstallCmd = [`yarn`] - console.log(`Installing packages from public NPM registry`); - await promisifiedSpawn(yarnInstallCmd); - console.log(`Installation complete`); + console.log(`Installing packages from public NPM registry`) + await promisifiedSpawn(yarnInstallCmd) + console.log(`Installation complete`) } } catch (e) { - console.log(e); + console.log(e) } - process.exit(); + process.exit() } if (allPackagesToWatch.length === 0) { - console.error(`There are no packages to watch.`); - return; + console.error(`There are no packages to watch.`) + return } - const cleanToWatch = allPackagesToWatch.map((pkgName) => { - if (pkgName.startsWith(`@medusajs`)) { - return pkgName.split("/")[1]; - } - return pkgName; - }); + const allPackagesIgnoringThemesToWatch = allPackagesToWatch.filter( + (pkgName) => !pkgName.startsWith(`medusa-theme`) + ) const ignored = [ /[/\\]node_modules[/\\]/i, @@ -178,63 +190,70 @@ async function watch( /[/\\]__mocks__[/\\]/i, /\.npmrc/i, ].concat( - cleanToWatch.map((p) => new RegExp(`${p}[\\/\\\\]src[\\/\\\\]`, `i`)) - ); + allPackagesIgnoringThemesToWatch.map( + (p) => new RegExp(`${p}[\\/\\\\]src[\\/\\\\]`, `i`) + ) + ) const watchers = _.uniq( - cleanToWatch - .map((p) => path.join(root, `/packages/`, p)) + allPackagesToWatch + .map((p) => path.join(packageNameToPath.get(p))) .filter((p) => fs.existsSync(p)) - ); + ) - let allCopies = []; - const packagesToPublish = new Set(); - let isInitialScan = true; - let isPublishing = false; + let allCopies = [] + const packagesToPublish = new Set() + let isInitialScan = true + let isPublishing = false - const waitFor = new Set(); - let anyPackageNotInstalled = false; - - const watchEvents = [`change`, `add`]; + const waitFor = new Set() + let anyPackageNotInstalled = false + const watchEvents = [`change`, `add`] + const packagePathMatchingEntries = Array.from(packageNameToPath.entries()) chokidar .watch(watchers, { ignored: [(filePath) => _.some(ignored, (reg) => reg.test(filePath))], }) .on(`all`, async (event, filePath) => { if (!watchEvents.includes(event)) { - return; + return } - const [pack] = filePath - .split(/packages[/\\]/) - .pop() - .split(/[/\\]/); + // match against paths + let packageName - const sourcePkg = JSON.parse( - fs.readFileSync(path.join(root, `/packages/`, pack, `package.json`)) - ); - const packageName = sourcePkg.name; + for (const [_packageName, packagePath] of packagePathMatchingEntries) { + const relativeToThisPackage = path.relative(packagePath, filePath) + if (!relativeToThisPackage.startsWith(`..`)) { + packageName = _packageName + break + } + } - const prefix = path.join(root, `/packages/`, pack); + if (!packageName) { + return + } + + const prefix = packageNameToPath.get(packageName) // Copy it over local version. // Don't copy over the medusa bin file as that breaks the NPM symlink. if (_.includes(filePath, `dist/medusa-cli.js`)) { - return; + return } - const relativePackageFile = path.relative(prefix, filePath); + const relativePackageFile = path.relative(prefix, filePath) const newPath = path.join( `./node_modules/${packageName}`, relativePackageFile - ); + ) if (relativePackageFile === `package.json`) { // package.json files will change during publish to adjust version of package (and dependencies), so ignore // changes during this process if (isPublishing) { - return; + return } // Compare dependencies with local version @@ -243,10 +262,10 @@ async function watch( newPath, packageName, monoRepoPackages, - root, + packageNameToPath, isInitialScan, ignoredPackageJSON, - }); + }) if (isInitialScan) { // normally checkDepsChanges would be sync, @@ -255,21 +274,19 @@ async function watch( // keep track of it to make sure all of it // finish before installing - waitFor.add(didDepsChangedPromise); + waitFor.add(didDepsChangedPromise) } - const { - didDepsChanged, - packageNotInstalled, - } = await didDepsChangedPromise; + const { didDepsChanged, packageNotInstalled } = + await didDepsChangedPromise if (packageNotInstalled) { - anyPackageNotInstalled = true; + anyPackageNotInstalled = true } if (didDepsChanged) { if (isInitialScan) { - waitFor.delete(didDepsChangedPromise); + waitFor.delete(didDepsChangedPromise) // handle dependency change only in initial scan - this is for sure doable to // handle this in watching mode correctly - but for the sake of shipping // this I limit more work/time consuming edge cases. @@ -287,66 +304,67 @@ async function watch( }).forEach((packageToPublish) => { // scheduling publish - we will publish when `ready` is emitted // as we can do single publish then - packagesToPublish.add(packageToPublish); - }); + packagesToPublish.add(packageToPublish) + }) } } // don't ever copy package.json as this will mess up any future dependency // changes checks - return; + return } - const localCopies = [copyPath(filePath, newPath, quiet, packageName)]; + const localCopies = [copyPath(filePath, newPath, quiet, packageName)] // If this is from "cache-dir" also copy it into the site's .cache if (_.includes(filePath, `cache-dir`)) { const newCachePath = path.join( `.cache/`, path.relative(path.join(prefix, `cache-dir`), filePath) - ); - localCopies.push(copyPath(filePath, newCachePath, quiet)); + ) + localCopies.push(copyPath(filePath, newCachePath, quiet)) } - allCopies = allCopies.concat(localCopies); + allCopies = allCopies.concat(localCopies) }) .on(`ready`, async () => { // wait for all async work needed to be done // before publishing / installing - await Promise.all(Array.from(waitFor)); + await Promise.all(Array.from(waitFor)) if (isInitialScan) { - isInitialScan = false; + isInitialScan = false if (packagesToPublish.size > 0) { - isPublishing = true; + isPublishing = true await publishPackagesLocallyAndInstall({ packagesToPublish: Array.from(packagesToPublish), - root, + packageNameToPath, localPackages, ignorePackageJSONChanges, - }); - packagesToPublish.clear(); - isPublishing = false; + externalRegistry, + }) + packagesToPublish.clear() + isPublishing = false } else if (anyPackageNotInstalled) { // run `yarn` - const yarnInstallCmd = [`yarn`]; + const yarnInstallCmd = [`yarn`] - console.log(`Installing packages from public NPM registry`); - await promisifiedSpawn(yarnInstallCmd); - console.log(`Installation complete`); + console.log(`Installing packages from public NPM registry`) + await promisifiedSpawn(yarnInstallCmd) + console.log(`Installation complete`) } - await clearJSFilesFromNodeModules(); - runQueuedCopies(); + await clearJSFilesFromNodeModules() + runQueuedCopies() } // all files watched, quit once all files are copied if necessary Promise.all(allCopies).then(() => { if (scanOnce) { - quit(); + quit() } - }); - }); + }) + }) } -module.exports = watch; +module.exports = watch diff --git a/packages/medusa-file-minio/CHANGELOG.md b/packages/medusa-file-minio/CHANGELOG.md index bdd957fc6e..0122cad5ee 100644 --- a/packages/medusa-file-minio/CHANGELOG.md +++ b/packages/medusa-file-minio/CHANGELOG.md @@ -3,6 +3,23 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.0.7](https://github.com/medusajs/medusa/compare/medusa-file-minio@1.0.6...medusa-file-minio@1.0.7) (2022-07-05) + + +### Bug Fixes + +* **medusa-file-spaces,medusa-file-s3,medusa-file-minio:** Add options to super call in file plugins ([#1714](https://github.com/medusajs/medusa/issues/1714)) ([a5f717b](https://github.com/medusajs/medusa/commit/a5f717be5ae1954f3dbf1e7b2edb35d11088a8c8)) + + +### Features + +* **medusa:** Delete and download url endpoints ([#1705](https://github.com/medusajs/medusa/issues/1705)) ([cc29b64](https://github.com/medusajs/medusa/commit/cc29b641c9358415b46179371988e7ddc11d2664)) +* **medusa:** Extend file-service interface + move to core ([#1577](https://github.com/medusajs/medusa/issues/1577)) ([8e42d37](https://github.com/medusajs/medusa/commit/8e42d37e84e80c003b9c0311117ab8a8871aa61b)) + + + + + ## [1.0.6](https://github.com/medusajs/medusa/compare/medusa-file-minio@1.0.4...medusa-file-minio@1.0.6) (2022-06-19) **Note:** Version bump only for package medusa-file-minio diff --git a/packages/medusa-file-minio/README.md b/packages/medusa-file-minio/README.md index e2fb95f7fc..005f1ab57e 100644 --- a/packages/medusa-file-minio/README.md +++ b/packages/medusa-file-minio/README.md @@ -13,4 +13,4 @@ Learn more about how you can use this plugin in the [documentation](https://docs access_key_id: "YOUR-ACCESS-KEY", secret_access_key: "YOUR-SECRET-KEY", } -``` \ No newline at end of file +``` diff --git a/packages/medusa-file-minio/package.json b/packages/medusa-file-minio/package.json index 2446910238..61f00614ef 100644 --- a/packages/medusa-file-minio/package.json +++ b/packages/medusa-file-minio/package.json @@ -1,6 +1,6 @@ { "name": "medusa-file-minio", - "version": "1.0.6", + "version": "1.0.7", "description": "MinIO server file connector for Medusa", "main": "index.js", "repository": { diff --git a/packages/medusa-file-minio/src/services/minio.js b/packages/medusa-file-minio/src/services/minio.js index 1f06a4b237..04186f3948 100644 --- a/packages/medusa-file-minio/src/services/minio.js +++ b/packages/medusa-file-minio/src/services/minio.js @@ -1,10 +1,11 @@ -import fs from "fs" +import { AbstractFileService } from '@medusajs/medusa' import aws from "aws-sdk" -import { FileService } from "medusa-interfaces" +import fs from "fs" -class MinioService extends FileService { +class MinioService extends AbstractFileService { + constructor({}, options) { - super() + super({}, options) this.bucket_ = options.bucket this.accessKeyId_ = options.access_key_id @@ -15,14 +16,14 @@ class MinioService extends FileService { } upload(file) { - aws.config.setPromisesDependency() + aws.config.setPromisesDependency(null) aws.config.update({ accessKeyId: this.accessKeyId_, secretAccessKey: this.secretAccessKey_, endpoint: this.endpoint_, s3ForcePathStyle: this.s3ForcePathStyle_, signatureVersion: this.signatureVersion_, - }) + }, true) const s3 = new aws.S3() const params = { @@ -46,14 +47,14 @@ class MinioService extends FileService { } delete(file) { - aws.config.setPromisesDependency() + aws.config.setPromisesDependency(null) aws.config.update({ accessKeyId: this.accessKeyId_, secretAccessKey: this.secretAccessKey_, endpoint: this.endpoint_, s3ForcePathStyle: this.s3ForcePathStyle_, signatureVersion: this.signatureVersion_, - }) + }, true) const s3 = new aws.S3() const params = { @@ -71,6 +72,18 @@ class MinioService extends FileService { }) }) } + + async getUploadStreamDescriptor(fileData) { + throw new Error("Method not implemented.") + } + + async getDownloadStream(fileData) { + throw new Error("Method not implemented.") + } + + async getPresignedDownloadUrl(fileData) { + throw new Error("Method not implemented.") + } } export default MinioService diff --git a/packages/medusa-file-s3/CHANGELOG.md b/packages/medusa-file-s3/CHANGELOG.md index 9c13299bc0..0f2c566677 100644 --- a/packages/medusa-file-s3/CHANGELOG.md +++ b/packages/medusa-file-s3/CHANGELOG.md @@ -3,6 +3,22 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.1.3](https://github.com/medusajs/medusa/compare/medusa-file-s3@1.1.2...medusa-file-s3@1.1.3) (2022-07-05) + + +### Bug Fixes + +* **medusa-file-spaces,medusa-file-s3,medusa-file-minio:** Add options to super call in file plugins ([#1714](https://github.com/medusajs/medusa/issues/1714)) ([a5f717b](https://github.com/medusajs/medusa/commit/a5f717be5ae1954f3dbf1e7b2edb35d11088a8c8)) + + +### Features + +* **medusa:** Extend file-service interface + move to core ([#1577](https://github.com/medusajs/medusa/issues/1577)) ([8e42d37](https://github.com/medusajs/medusa/commit/8e42d37e84e80c003b9c0311117ab8a8871aa61b)) + + + + + ## [1.1.2](https://github.com/medusajs/medusa/compare/medusa-file-s3@1.1.0...medusa-file-s3@1.1.2) (2022-06-19) **Note:** Version bump only for package medusa-file-s3 diff --git a/packages/medusa-file-s3/package.json b/packages/medusa-file-s3/package.json index da41071936..e01efa029b 100644 --- a/packages/medusa-file-s3/package.json +++ b/packages/medusa-file-s3/package.json @@ -1,6 +1,6 @@ { "name": "medusa-file-s3", - "version": "1.1.2", + "version": "1.1.3", "description": "AWS s3 file connector for Medusa", "main": "index.js", "repository": { diff --git a/packages/medusa-file-s3/src/services/s3.js b/packages/medusa-file-s3/src/services/s3.js index 85541260eb..496a11379f 100644 --- a/packages/medusa-file-s3/src/services/s3.js +++ b/packages/medusa-file-s3/src/services/s3.js @@ -1,10 +1,10 @@ import fs from "fs" import aws from "aws-sdk" -import { FileService } from "medusa-interfaces" +import { AbstractFileService } from '@medusajs/medusa' -class S3Service extends FileService { +class S3Service extends AbstractFileService { constructor({}, options) { - super() + super({}, options) this.bucket_ = options.bucket this.s3Url_ = options.s3_url @@ -15,13 +15,13 @@ class S3Service extends FileService { } upload(file) { - aws.config.setPromisesDependency() + aws.config.setPromisesDependency(null) aws.config.update({ accessKeyId: this.accessKeyId_, secretAccessKey: this.secretAccessKey_, region: this.region_, endpoint: this.endpoint_, - }) + }, true) const s3 = new aws.S3() var params = { @@ -44,13 +44,13 @@ class S3Service extends FileService { } delete(file) { - aws.config.setPromisesDependency() + aws.config.setPromisesDependency(null) aws.config.update({ accessKeyId: this.accessKeyId_, secretAccessKey: this.secretAccessKey_, region: this.region_, endpoint: this.endpoint_, - }) + }, true) const s3 = new aws.S3() var params = { @@ -68,6 +68,18 @@ class S3Service extends FileService { }) }) } + + async getUploadStreamDescriptor(fileData) { + throw new Error("Method not implemented.") + } + + async getDownloadStream(fileData) { + throw new Error("Method not implemented.") + } + + async getPresignedDownloadUrl(fileData) { + throw new Error("Method not implemented.") + } } export default S3Service diff --git a/packages/medusa-file-spaces/CHANGELOG.md b/packages/medusa-file-spaces/CHANGELOG.md index ddb744b9bc..76d4f29ded 100644 --- a/packages/medusa-file-spaces/CHANGELOG.md +++ b/packages/medusa-file-spaces/CHANGELOG.md @@ -3,6 +3,24 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.3](https://github.com/medusajs/medusa/compare/medusa-file-spaces@1.2.2...medusa-file-spaces@1.2.3) (2022-07-05) + + +### Bug Fixes + +* **medusa-file-spaces,medusa-file-s3,medusa-file-minio:** Add options to super call in file plugins ([#1714](https://github.com/medusajs/medusa/issues/1714)) ([a5f717b](https://github.com/medusajs/medusa/commit/a5f717be5ae1954f3dbf1e7b2edb35d11088a8c8)) + + +### Features + +* **medusa:** Add batch strategy for order exports ([#1603](https://github.com/medusajs/medusa/issues/1603)) ([bf47d1a](https://github.com/medusajs/medusa/commit/bf47d1aecd74f4489667609444a8b09393e894d3)) +* **medusa:** Extend file-service interface + move to core ([#1577](https://github.com/medusajs/medusa/issues/1577)) ([8e42d37](https://github.com/medusajs/medusa/commit/8e42d37e84e80c003b9c0311117ab8a8871aa61b)) +* **medusa-file-spaces:** DigitalOcean fileservice streaming ([#1585](https://github.com/medusajs/medusa/issues/1585)) ([abaf10b](https://github.com/medusajs/medusa/commit/abaf10b31d1e9a60710da87cac5c9c869195660d)), closes [#1583](https://github.com/medusajs/medusa/issues/1583) [#1580](https://github.com/medusajs/medusa/issues/1580) [#1582](https://github.com/medusajs/medusa/issues/1582) [#1583](https://github.com/medusajs/medusa/issues/1583) [#1580](https://github.com/medusajs/medusa/issues/1580) [#1582](https://github.com/medusajs/medusa/issues/1582) + + + + + ## [1.2.2](https://github.com/medusajs/medusa/compare/medusa-file-spaces@1.2.0...medusa-file-spaces@1.2.2) (2022-06-19) diff --git a/packages/medusa-file-spaces/package.json b/packages/medusa-file-spaces/package.json index 07de46418c..4a420eaca5 100644 --- a/packages/medusa-file-spaces/package.json +++ b/packages/medusa-file-spaces/package.json @@ -1,6 +1,6 @@ { "name": "medusa-file-spaces", - "version": "1.2.2", + "version": "1.2.3", "description": "Digital Ocean Spaces file connector for Medusa", "main": "index.js", "repository": { diff --git a/packages/medusa-file-spaces/src/services/digital-ocean.js b/packages/medusa-file-spaces/src/services/digital-ocean.js index d9ef58e2e7..67aae82ef6 100644 --- a/packages/medusa-file-spaces/src/services/digital-ocean.js +++ b/packages/medusa-file-spaces/src/services/digital-ocean.js @@ -1,11 +1,12 @@ -import fs from "fs" +import { AbstractFileService } from "@medusajs/medusa" import aws from "aws-sdk" +import fs from "fs" import { parse } from "path" -import { FileService } from "medusa-interfaces" +import stream from "stream" -class DigitalOceanService extends FileService { +class DigitalOceanService extends AbstractFileService { constructor({}, options) { - super() + super({}, options) this.bucket_ = options.bucket this.spacesUrl_ = options.spaces_url?.replace(/\/$/, "") @@ -16,18 +17,12 @@ class DigitalOceanService extends FileService { } upload(file) { - aws.config.setPromisesDependency() - aws.config.update({ - accessKeyId: this.accessKeyId_, - secretAccessKey: this.secretAccessKey_, - region: this.region_, - endpoint: this.endpoint_, - }) + this.updateAwsConfig() const parsedFilename = parse(file.originalname) const fileKey = `${parsedFilename.name}-${Date.now()}${parsedFilename.ext}` const s3 = new aws.S3() - var params = { + const params = { ACL: "public-read", Bucket: this.bucket_, Body: fs.createReadStream(file.path), @@ -51,16 +46,10 @@ class DigitalOceanService extends FileService { } delete(file) { - aws.config.setPromisesDependency() - aws.config.update({ - accessKeyId: this.accessKeyId_, - secretAccessKey: this.secretAccessKey_, - region: this.region_, - endpoint: this.endpoint_, - }) + this.updateAwsConfig() const s3 = new aws.S3() - var params = { + const params = { Bucket: this.bucket_, Key: `${file}`, } @@ -75,6 +64,71 @@ class DigitalOceanService extends FileService { }) }) } + + async getUploadStreamDescriptor(fileData) { + this.updateAwsConfig() + + const pass = new stream.PassThrough() + + const fileKey = `${fileData.name}.${fileData.ext}` + const params = { + ACL: fileData.acl ?? "private", + Bucket: this.bucket_, + Body: pass, + Key: fileKey, + } + + const s3 = new aws.S3() + return { + writeStream: pass, + promise: s3.upload(params).promise(), + url: `${this.spacesUrl_}/${fileKey}`, + fileKey, + } + } + + async getDownloadStream(fileData) { + this.updateAwsConfig() + + const s3 = new aws.S3() + + const params = { + Bucket: this.bucket_, + Key: `${fileData.fileKey}`, + } + + return s3.getObject(params).createReadStream() + } + + async getPresignedDownloadUrl(fileData) { + this.updateAwsConfig({ + signatureVersion: "v4", + }) + + const s3 = new aws.S3() + + const params = { + Bucket: this.bucket_, + Key: `${fileData.fileKey}`, + Expires: 60, // 60 seconds + } + + return await s3.getSignedUrlPromise("getObject", params) + } + + updateAwsConfig(additionalConfiguration = {}) { + aws.config.setPromisesDependency(null) + aws.config.update( + { + accessKeyId: this.accessKeyId_, + secretAccessKey: this.secretAccessKey_, + region: this.region_, + endpoint: this.endpoint_, + ...additionalConfiguration, + }, + true + ) + } } export default DigitalOceanService diff --git a/packages/medusa-fulfillment-webshipper/CHANGELOG.md b/packages/medusa-fulfillment-webshipper/CHANGELOG.md index 176dfdcbdc..c4b64c2f13 100644 --- a/packages/medusa-fulfillment-webshipper/CHANGELOG.md +++ b/packages/medusa-fulfillment-webshipper/CHANGELOG.md @@ -3,6 +3,18 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.3](https://github.com/medusajs/medusa/compare/medusa-fulfillment-webshipper@1.3.2...medusa-fulfillment-webshipper@1.3.3) (2022-07-05) + + +### Bug Fixes + +* **webshipper:** allow cancelling WS orders with error status ([#1755](https://github.com/medusajs/medusa/issues/1755)) ([1d3032d](https://github.com/medusajs/medusa/commit/1d3032dc671b477654afd13a218caa772454bde0)) +* **webshipper:** only add invoices if invoice generator produces a file ([#1749](https://github.com/medusajs/medusa/issues/1749)) ([c0e18d4](https://github.com/medusajs/medusa/commit/c0e18d473c050d143f67ed2c76fa9ec9414daf1a)) + + + + + ## [1.3.2](https://github.com/medusajs/medusa/compare/medusa-fulfillment-webshipper@1.3.0...medusa-fulfillment-webshipper@1.3.2) (2022-06-19) **Note:** Version bump only for package medusa-fulfillment-webshipper diff --git a/packages/medusa-fulfillment-webshipper/README.md b/packages/medusa-fulfillment-webshipper/README.md index 5ec1d6ec1e..130bdfe202 100644 --- a/packages/medusa-fulfillment-webshipper/README.md +++ b/packages/medusa-fulfillment-webshipper/README.md @@ -1,6 +1,6 @@ # medusa-fulfillment-webshipper -Adds Webshipper as a fulfilment provider in Medusa Commerce. +Adds Webshipper as a fulfilment provider in Medusa Commerce. On each new fulfillment an order is created in Webshipper. The plugin listens for shipment events and updated the shipment accordingly. A webhook listener is exposed at `/webshipper/shipments` to listen for shipment creations. You must create this webhook in Webshipper to have Medusa listen for shipment events. @@ -13,4 +13,5 @@ A webhook listener is exposed at `/webshipper/shipments` to listen for shipment order_channel_id: [the channel id to register orders on] (required) webhook_secret: [the webhook secret used to listen for shipments] (required) coo_countries: [an array of countries in which a Certificate of Origin will be attached] (default: "all") + delete_on_cancel [determines whether Webshipper orders are deleted when a Medusa fulfillment is canceled] (default: false) ``` diff --git a/packages/medusa-fulfillment-webshipper/package.json b/packages/medusa-fulfillment-webshipper/package.json index cc4b0680f0..c573f00cf1 100644 --- a/packages/medusa-fulfillment-webshipper/package.json +++ b/packages/medusa-fulfillment-webshipper/package.json @@ -1,6 +1,6 @@ { "name": "medusa-fulfillment-webshipper", - "version": "1.3.2", + "version": "1.3.3", "description": "Webshipper Fulfillment provider for Medusa", "main": "index.js", "repository": { @@ -18,7 +18,7 @@ "@babel/plugin-transform-runtime": "^7.7.6", "@babel/preset-env": "^7.7.5", "@babel/runtime": "^7.9.6", - "@medusajs/medusa": "^1.3.2", + "@medusajs/medusa": "^1.3.3", "client-sessions": "^0.8.0", "cross-env": "^5.2.1", "eslint": "^6.8.0", diff --git a/packages/medusa-fulfillment-webshipper/src/services/webshipper-fulfillment.js b/packages/medusa-fulfillment-webshipper/src/services/webshipper-fulfillment.js index 4ea0cd2e00..7f77ef20c0 100644 --- a/packages/medusa-fulfillment-webshipper/src/services/webshipper-fulfillment.js +++ b/packages/medusa-fulfillment-webshipper/src/services/webshipper-fulfillment.js @@ -287,19 +287,21 @@ class WebshipperFulfillmentService extends FulfillmentService { fulfillmentItems ) - invoice = await this.client_.documents - .create({ - type: "documents", - attributes: { - document_size: this.options_.document_size || "A4", - document_format: "PDF", - base64: base64Invoice, - document_type: "invoice", - }, - }) - .catch((err) => { - throw err - }) + if (base64Invoice) { + invoice = await this.client_.documents + .create({ + type: "documents", + attributes: { + document_size: this.options_.document_size || "A4", + document_format: "PDF", + base64: base64Invoice, + document_type: "invoice", + }, + }) + .catch((err) => { + throw err + }) + } const cooCountries = this.options_.coo_countries if ( @@ -621,19 +623,17 @@ class WebshipperFulfillmentService extends FulfillmentService { return Promise.resolve() } - if (order) { - if ( - order.data.attributes.status !== "pending" && - order.data.attributes.status !== "missing_rate" - ) { - if (order.data.attributes.status === "cancelled") { - return Promise.resolve(order) - } - throw new Error("Cannot cancel order") - } + if (this.options_.delete_on_cancel) { + return await this.client_.orders.delete(data.id) } - return this.client_.orders.delete(data.id) + return await this.client_.orders.update(data.id, { + id: data.id, + type: "orders", + attributes: { + status: "cancelled", + }, + }) } } diff --git a/packages/medusa-fulfillment-webshipper/src/utils/webshipper.js b/packages/medusa-fulfillment-webshipper/src/utils/webshipper.js index af4b0e5212..e93b2d2204 100644 --- a/packages/medusa-fulfillment-webshipper/src/utils/webshipper.js +++ b/packages/medusa-fulfillment-webshipper/src/utils/webshipper.js @@ -83,6 +83,16 @@ class Webshipper { }, }).then(({ data }) => data) }, + update: async (id, data) => { + const path = `/v2/orders/${id}` + return this.client_({ + method: "PATCH", + url: path, + data: { + data, + }, + }).then(({ data }) => data) + }, delete: async (id) => { const path = `/v2/orders/${id}` return this.client_({ diff --git a/packages/medusa-interfaces/CHANGELOG.md b/packages/medusa-interfaces/CHANGELOG.md index 1e113123ed..c0adee0198 100644 --- a/packages/medusa-interfaces/CHANGELOG.md +++ b/packages/medusa-interfaces/CHANGELOG.md @@ -3,6 +3,17 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.1](https://github.com/medusajs/medusa/compare/medusa-interfaces@1.3.0...medusa-interfaces@1.3.1) (2022-07-05) + + +### Bug Fixes + +* **medusa:** Remove deps `mongoose` + `mongodb` ([#1218](https://github.com/medusajs/medusa/issues/1218)) ([c76e23e](https://github.com/medusajs/medusa/commit/c76e23e84dd8cb08c3c709f9f95c4c17b9685439)) + + + + + # [1.3.0](https://github.com/medusajs/medusa/compare/medusa-interfaces@1.2.1...medusa-interfaces@1.3.0) (2022-05-01) diff --git a/packages/medusa-interfaces/package.json b/packages/medusa-interfaces/package.json index c262ba0f46..1814d84ea8 100644 --- a/packages/medusa-interfaces/package.json +++ b/packages/medusa-interfaces/package.json @@ -1,6 +1,6 @@ { "name": "medusa-interfaces", - "version": "1.3.0", + "version": "1.3.1", "description": "Core interfaces for Medusa", "main": "dist/index.js", "repository": { diff --git a/packages/medusa-interfaces/src/base-model.js b/packages/medusa-interfaces/src/base-model.js deleted file mode 100644 index e3c63eca30..0000000000 --- a/packages/medusa-interfaces/src/base-model.js +++ /dev/null @@ -1,153 +0,0 @@ -import mongoose from "mongoose" - -/** - * Interface for data models. The default data layer uses an internal mongoose - * model and is as such compatible with MongoDB. - * @interface - */ -class BaseModel { - constructor() { - /** @const the underlying mongoose model used for queries */ - this.mongooseModel_ = this.createMongooseModel_() - } - - /** - * Returns the model schema. The child class must implement the static schema - * property. - * @return {string} the models schema - */ - getSchema() { - if (!this.constructor.schema) { - throw new Error("Schema not defined") - } - return this.constructor.schema - } - - /** - * Returns the model name. The child class must implement the static modelName - * property. - * @return {string} the name of the model - */ - getModelName() { - if (!this.constructor.modelName) { - throw new Error("Every model must have a static modelName property") - } - return this.constructor.modelName - } - - /** - * Returns the schema options defined in child class. - * @return {object} the schema options - */ - getSchemaOptions() { - if (!this.constructor.schemaOptions) { - return {} - } - - return this.constructor.schemaOptions - } - - /** - * @private - * Creates a mongoose model based on schema, schema options and model name. - * @return {Mongooose.Model} the mongoose model - */ - createMongooseModel_() { - const schema = this.getSchema() - const options = this.getSchemaOptions() - - const mongooseSchema = new mongoose.Schema(schema, options) - - return mongoose.model(this.getModelName(), mongooseSchema) - } - - /** - */ - startSession() { - return this.mongooseModel_.startSession() - } - - /** - * Queries the mongoose model via the mongoose's findOne. - * @param query {object} a mongoose selector query - * @param options {?object=} mongoose options - * @return {?mongoose.Document} the retreived mongoose document or null. - */ - findOne(query, options = {}) { - return this.mongooseModel_.findOne(query, options).lean() - } - - /** - * Queries the mongoose model via the mongoose's find. - * @param query {object} a mongoose selector query - * @param options {?object=} mongoose options - * @return {Array} the retreived mongoose documents or - * an empty array - */ - find(query, options, offset, limit) { - return this.mongooseModel_ - .find(query, options) - .skip(offset) - .limit(limit) - .lean() - } - - count() { - return this.mongooseModel_.count({}) - } - - /** - * Update a model via the mongoose model's updateOne. - * @param query {object} a mongoose selector query - * @param update {object} mongoose update object - * @param options {?object=} mongoose options - * @return {object} mongoose result - */ - updateOne(query, update, options = {}) { - options.new = true - return this.mongooseModel_.findOneAndUpdate(query, update, options).lean() - } - - /** - * Update a model via the mongoose model's update. - * @param query {object} a mongoose selector query - * @param update {object} mongoose update object - * @param options {?object=} mongoose options - * @return {object} mongoose result - */ - update(query, update, options) { - return this.mongooseModel_.update(query, update, options) - } - - /** - * Creates a document in the mongoose model's collection via create. - * @param object {object} the value of the document to be created - * @param options {?object=} mongoose options - * @return {object} mongoose result - */ - create(object, options) { - return this.mongooseModel_.create(object, options) - } - - /** - * Deletes a document in the mongoose model's collection - * @param query {object} the value of the document to be created - * @param options {?object=} mongoose options - * @return {object} mongoose result - */ - deleteOne(query, options) { - return this.mongooseModel_.deleteOne(query, options) - } - - /** - * Deletes many document in the mongoose model's collection - * @param query {object} the value of the document to be created - * @param options {?object=} mongoose options - * @return {object} mongoose result - */ - delete(query, options) { - return this.mongooseModel_.deleteMany(query, options) - } -} - -export default BaseModel diff --git a/packages/medusa-interfaces/src/index.js b/packages/medusa-interfaces/src/index.js index b88b5dca74..10cf36af65 100644 --- a/packages/medusa-interfaces/src/index.js +++ b/packages/medusa-interfaces/src/index.js @@ -1,8 +1,7 @@ export { default as BaseService } from "./base-service" -export { default as BaseModel } from "./base-model" -export { default as PaymentService } from "./payment-service" -export { default as FulfillmentService } from "./fulfillment-service" export { default as FileService } from "./file-service" +export { default as FulfillmentService } from "./fulfillment-service" export { default as NotificationService } from "./notification-service" export { default as OauthService } from "./oauth-service" +export { default as PaymentService } from "./payment-service" export { default as SearchService } from "./search-service" diff --git a/packages/medusa-js/CHANGELOG.md b/packages/medusa-js/CHANGELOG.md index 6d960a61f0..5470677f08 100644 --- a/packages/medusa-js/CHANGELOG.md +++ b/packages/medusa-js/CHANGELOG.md @@ -3,6 +3,23 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.3](https://github.com/medusajs/medusa/compare/@medusajs/medusa-js@1.2.2...@medusajs/medusa-js@1.2.3) (2022-07-05) + + +### Bug Fixes + +* **medusa-js:** Fix `stringifyNullProperties` util ([#1766](https://github.com/medusajs/medusa/issues/1766)) ([7bee57f](https://github.com/medusajs/medusa/commit/7bee57f7c55e15b6a6c847dfda433e67f258ef8e)) + + +### Features + +* **medusa-js:** Create utils to stringify null values and respect object types ([#1748](https://github.com/medusajs/medusa/issues/1748)) ([fc1cbe7](https://github.com/medusajs/medusa/commit/fc1cbe72c7b0cd2879a8b112a6f63fa94c728a19)) +* **medusa,medusa-js,medusa-react:** Add BatchJob API support in `medusa-js` + `medusa-react` ([#1704](https://github.com/medusajs/medusa/issues/1704)) ([7302d76](https://github.com/medusajs/medusa/commit/7302d76e12683c989f340d2fcfaf4338dca6554a)) + + + + + ## [1.2.2](https://github.com/medusajs/medusa/compare/@medusajs/medusa-js@1.2.0...@medusajs/medusa-js@1.2.2) (2022-06-19) diff --git a/packages/medusa-js/package.json b/packages/medusa-js/package.json index c0029861ba..965010f3d8 100644 --- a/packages/medusa-js/package.json +++ b/packages/medusa-js/package.json @@ -1,6 +1,6 @@ { "name": "@medusajs/medusa-js", - "version": "1.2.2", + "version": "1.2.3", "description": "Client for Medusa Commerce Rest API", "main": "./dist/index.js", "types": "./dist/index.d.ts", @@ -15,7 +15,7 @@ "author": "Oliver Juhl", "license": "MIT", "dependencies": { - "@medusajs/medusa": "^1.3.2", + "@medusajs/medusa": "^1.3.3", "axios": "^0.24.0", "form-data": "^4.0.0", "qs": "^6.10.3", diff --git a/packages/medusa-js/src/resources/admin/batch-jobs.ts b/packages/medusa-js/src/resources/admin/batch-jobs.ts new file mode 100644 index 0000000000..457541ccf3 --- /dev/null +++ b/packages/medusa-js/src/resources/admin/batch-jobs.ts @@ -0,0 +1,60 @@ +import { + AdminBatchJobListRes, + AdminBatchJobRes, + AdminGetBatchParams, + AdminPostBatchesReq, +} from "@medusajs/medusa" +import qs from "qs" +import { ResponsePromise } from "../../typings" +import BaseResource from "../base" +import { stringifyNullProperties } from "../../utils" + +class AdminBatchJobsResource extends BaseResource { + create( + payload: AdminPostBatchesReq, + customHeaders: Record = {} + ): ResponsePromise { + const path = `/admin/batch-jobs` + return this.client.request("POST", path, payload, {}, customHeaders) + } + + list( + query?: AdminGetBatchParams, + customHeaders: Record = {} + ): ResponsePromise { + let path = `/admin/batch-jobs` + + if (query) { + const queryString = qs.stringify(stringifyNullProperties(query)) + path = `/admin/batch-jobs?${queryString}` + } + + return this.client.request("GET", path, {}, {}, customHeaders) + } + + cancel( + batchJobId: string, + customHeaders: Record = {} + ): ResponsePromise { + const path = `/admin/batch-jobs/${batchJobId}/cancel` + return this.client.request("POST", path, {}, {}, customHeaders) + } + + confirm( + batchJobId: string, + customHeaders: Record = {} + ): ResponsePromise { + const path = `/admin/batch-jobs/${batchJobId}/confirm` + return this.client.request("POST", path, {}, {}, customHeaders) + } + + retrieve( + batchJobId: string, + customHeaders: Record = {} + ): ResponsePromise { + const path = `/admin/batch-jobs/${batchJobId}` + return this.client.request("GET", path, {}, {}, customHeaders) + } +} + +export default AdminBatchJobsResource diff --git a/packages/medusa-js/src/resources/admin/index.ts b/packages/medusa-js/src/resources/admin/index.ts index 372c69d783..25b3c8a632 100644 --- a/packages/medusa-js/src/resources/admin/index.ts +++ b/packages/medusa-js/src/resources/admin/index.ts @@ -1,33 +1,35 @@ import BaseResource from "../base" import AdminAuthResource from "./auth" -import AdminCustomersResource from "./customers" -import AdminCustomerGroupsResource from "./customer-groups" -import AdminDiscountsResource from "./discounts" +import AdminBatchJobsResource from "./batch-jobs" import CollectionsResource from "./collections" +import AdminCustomerGroupsResource from "./customer-groups" +import AdminCustomersResource from "./customers" +import AdminDiscountsResource from "./discounts" import AdminDraftOrdersResource from "./draft-orders" import AdminGiftCardsResource from "./gift-cards" import AdminInvitesResource from "./invites" import AdminNotesResource from "./notes" -import AdminProductsResource from "./products" -import AdminProductTypesResource from "./product-types" -import AdminUsersResource from "./users" -import AdminReturnsResource from "./returns" +import AdminNotificationsResource from "./notifications" import AdminOrdersResource from "./orders" +import AdminPriceListResource from "./price-lists" +import AdminProductTagsResource from "./product-tags" +import AdminProductTypesResource from "./product-types" +import AdminProductsResource from "./products" +import AdminRegionsResource from "./regions" import AdminReturnReasonsResource from "./return-reasons" -import AdminVariantsResource from "./variants" -import AdminSwapsResource from "./swaps" -import AdminTaxRatesResource from "./tax-rates" +import AdminReturnsResource from "./returns" +import AdminShippingOptionsResource from "./shipping-options" import AdminShippingProfilesResource from "./shipping-profiles" import AdminStoresResource from "./store" -import AdminShippingOptionsResource from "./shipping-options" -import AdminRegionsResource from "./regions" -import AdminNotificationsResource from "./notifications" +import AdminSwapsResource from "./swaps" +import AdminTaxRatesResource from "./tax-rates" import AdminUploadsResource from "./uploads" -import AdminProductTagsResource from "./product-tags" -import AdminPriceListResource from "./price-lists" +import AdminUsersResource from "./users" +import AdminVariantsResource from "./variants" class Admin extends BaseResource { public auth = new AdminAuthResource(this.client) + public batchJobs = new AdminBatchJobsResource(this.client) public customers = new AdminCustomersResource(this.client) public customerGroups = new AdminCustomerGroupsResource(this.client) public discounts = new AdminDiscountsResource(this.client) diff --git a/packages/medusa-js/src/resources/admin/uploads.ts b/packages/medusa-js/src/resources/admin/uploads.ts index 2b24373259..3d83cc2f79 100644 --- a/packages/medusa-js/src/resources/admin/uploads.ts +++ b/packages/medusa-js/src/resources/admin/uploads.ts @@ -1,14 +1,21 @@ -import { AdminUploadRes, IAdminPostUploadsFile } from "@medusajs/medusa" +import { + AdminDeleteUploadsReq, + IAdminPostUploadsFileReq, + AdminDeleteUploadsRes, + AdminPostUploadsDownloadUrlReq, + AdminUploadsDownloadUrlRes, + AdminUploadsRes, +} from "@medusajs/medusa" +import FormData from "form-data" import { ResponsePromise } from "../../typings" import BaseResource from "../base" -import FormData from "form-data" class AdminUploadsResource extends BaseResource { private headers = { "Content-Type": "multipart/form-data", } - create(file: IAdminPostUploadsFile): ResponsePromise { + create(file: IAdminPostUploadsFileReq): ResponsePromise { const path = `/admin/uploads` const payload = new FormData() @@ -16,6 +23,24 @@ class AdminUploadsResource extends BaseResource { return this.client.request("POST", path, payload, {}, this.headers) } + + delete( + payload: AdminDeleteUploadsReq, + customHeaders: Record = {} + ): ResponsePromise { + const path = `/admin/uploads` + + return this.client.request("DELETE", path, payload, {}, customHeaders) + } + + getPresignedDownloadUrl( + payload: AdminPostUploadsDownloadUrlReq, + customHeaders: Record = {} + ): ResponsePromise { + const path = `/admin/uploads/download-url` + + return this.client.request("POST", path, payload, {}, customHeaders) + } } export default AdminUploadsResource diff --git a/packages/medusa-js/src/utils.ts b/packages/medusa-js/src/utils.ts new file mode 100644 index 0000000000..3f0b6f0097 --- /dev/null +++ b/packages/medusa-js/src/utils.ts @@ -0,0 +1,21 @@ +export function stringifyNullProperties(input: T): T { + const convertProperties = (obj: T) => { + const res = {} as T + + Object.keys(obj).reduce((acc: T, key: string) => { + if (obj[key] === null) { + acc[key] = "null" + } else if (typeof obj[key] === "object") { + acc[key] = convertProperties(obj[key]) + } else { + acc[key] = obj[key] + } + + return acc + }, res) + + return res + } + + return convertProperties(input) +} diff --git a/packages/medusa-js/test/utils/utils.test.ts b/packages/medusa-js/test/utils/utils.test.ts new file mode 100644 index 0000000000..055bab6130 --- /dev/null +++ b/packages/medusa-js/test/utils/utils.test.ts @@ -0,0 +1,31 @@ +import { stringifyNullProperties } from "../../src/utils" + +describe("stringifyNullProperties", () => { + test("returns empty object on no props", () => { + const result = stringifyNullProperties({}) + expect(result).toEqual({}) + }) + + test("successfully stringifies null property", () => { + const result = stringifyNullProperties({ test: null }) + expect(result).toEqual({ test: "null" }) + }) + + test("successfully stringifies nested null property", () => { + const result = stringifyNullProperties({ + test: { test_2: { test_3: null } }, + another_test: "test", + }) + expect(result).toEqual({ + test: { test_2: { test_3: "null" } }, + another_test: "test", + }) + }) + + test("successfully stringifies string property", () => { + const result = stringifyNullProperties({ + test: "test", + }) + expect(result).toEqual({ test: "test" }) + }) +}) diff --git a/packages/medusa-payment-paypal/CHANGELOG.md b/packages/medusa-payment-paypal/CHANGELOG.md index dbf7c2f1db..b103aabe11 100644 --- a/packages/medusa-payment-paypal/CHANGELOG.md +++ b/packages/medusa-payment-paypal/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.3](https://github.com/medusajs/medusa/compare/medusa-payment-paypal@1.2.2...medusa-payment-paypal@1.2.3) (2022-07-05) + +**Note:** Version bump only for package medusa-payment-paypal + + + + + ## [1.2.2](https://github.com/medusajs/medusa/compare/medusa-payment-paypal@1.2.0...medusa-payment-paypal@1.2.2) (2022-06-19) diff --git a/packages/medusa-payment-paypal/package.json b/packages/medusa-payment-paypal/package.json index 48298709d8..aef686a7f5 100644 --- a/packages/medusa-payment-paypal/package.json +++ b/packages/medusa-payment-paypal/package.json @@ -1,6 +1,6 @@ { "name": "medusa-payment-paypal", - "version": "1.2.2", + "version": "1.2.3", "description": "Paypal Payment provider for Meduas Commerce", "main": "index.js", "repository": { @@ -26,7 +26,7 @@ "cross-env": "^5.2.1", "eslint": "^6.8.0", "jest": "^25.5.2", - "medusa-interfaces": "^1.3.0", + "medusa-interfaces": "^1.3.1", "medusa-test-utils": "^1.1.37" }, "scripts": { diff --git a/packages/medusa-payment-stripe/CHANGELOG.md b/packages/medusa-payment-stripe/CHANGELOG.md index 63eef50b4b..13593ec8ba 100644 --- a/packages/medusa-payment-stripe/CHANGELOG.md +++ b/packages/medusa-payment-stripe/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.1.41](https://github.com/medusajs/medusa/compare/medusa-payment-stripe@1.1.40...medusa-payment-stripe@1.1.41) (2022-07-05) + +**Note:** Version bump only for package medusa-payment-stripe + + + + + ## [1.1.40](https://github.com/medusajs/medusa/compare/medusa-payment-stripe@1.1.38...medusa-payment-stripe@1.1.40) (2022-06-19) diff --git a/packages/medusa-payment-stripe/package.json b/packages/medusa-payment-stripe/package.json index ee97e1530a..69302f6f8b 100644 --- a/packages/medusa-payment-stripe/package.json +++ b/packages/medusa-payment-stripe/package.json @@ -1,6 +1,6 @@ { "name": "medusa-payment-stripe", - "version": "1.1.40", + "version": "1.1.41", "description": "Stripe Payment provider for Meduas Commerce", "main": "index.js", "repository": { diff --git a/packages/medusa-plugin-add-ons/.babelrc b/packages/medusa-plugin-add-ons/.babelrc deleted file mode 100644 index 4d2dfe8f09..0000000000 --- a/packages/medusa-plugin-add-ons/.babelrc +++ /dev/null @@ -1,13 +0,0 @@ -{ - "plugins": [ - "@babel/plugin-proposal-class-properties", - "@babel/plugin-transform-instanceof", - "@babel/plugin-transform-classes" - ], - "presets": ["@babel/preset-env"], - "env": { - "test": { - "plugins": ["@babel/plugin-transform-runtime"] - } - } -} diff --git a/packages/medusa-plugin-add-ons/.gitignore b/packages/medusa-plugin-add-ons/.gitignore deleted file mode 100644 index 718e6cc25c..0000000000 --- a/packages/medusa-plugin-add-ons/.gitignore +++ /dev/null @@ -1,15 +0,0 @@ -/lib -node_modules -.DS_store -.env* -/*.js -!index.js - -/dist - -/api -/services -/models -/subscribers -/loaders - diff --git a/packages/medusa-plugin-add-ons/.npmignore b/packages/medusa-plugin-add-ons/.npmignore deleted file mode 100644 index 2719a56633..0000000000 --- a/packages/medusa-plugin-add-ons/.npmignore +++ /dev/null @@ -1,8 +0,0 @@ -/lib -node_modules -.DS_store -.env* -/*.js -!index.js -yarn.lock -/src \ No newline at end of file diff --git a/packages/medusa-plugin-add-ons/CHANGELOG.md b/packages/medusa-plugin-add-ons/CHANGELOG.md deleted file mode 100644 index ab0913294d..0000000000 --- a/packages/medusa-plugin-add-ons/CHANGELOG.md +++ /dev/null @@ -1,222 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. -See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. - -# [1.2.0](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.37...medusa-plugin-add-ons@1.2.0) (2022-05-01) - -**Note:** Version bump only for package medusa-plugin-add-ons - - - - - -## [1.1.37](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.36...medusa-plugin-add-ons@1.1.37) (2022-01-11) - -**Note:** Version bump only for package medusa-plugin-add-ons - - - - - -## [1.1.36](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.35...medusa-plugin-add-ons@1.1.36) (2021-12-29) - -**Note:** Version bump only for package medusa-plugin-add-ons - - - - - -## [1.1.35](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.34...medusa-plugin-add-ons@1.1.35) (2021-12-17) - -**Note:** Version bump only for package medusa-plugin-add-ons - - - - - -## [1.1.34](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.33...medusa-plugin-add-ons@1.1.34) (2021-12-08) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.33](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.32...medusa-plugin-add-ons@1.1.33) (2021-11-23) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.32](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.31...medusa-plugin-add-ons@1.1.32) (2021-11-22) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.31](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.30...medusa-plugin-add-ons@1.1.31) (2021-11-19) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.30](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.29...medusa-plugin-add-ons@1.1.30) (2021-11-19) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.29](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.28...medusa-plugin-add-ons@1.1.29) (2021-10-18) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.28](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.27...medusa-plugin-add-ons@1.1.28) (2021-10-18) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.27](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.25...medusa-plugin-add-ons@1.1.27) (2021-10-18) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.26](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.25...medusa-plugin-add-ons@1.1.26) (2021-10-18) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.25](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.24...medusa-plugin-add-ons@1.1.25) (2021-09-15) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.24](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.23...medusa-plugin-add-ons@1.1.24) (2021-09-14) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.23](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.22...medusa-plugin-add-ons@1.1.23) (2021-08-05) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.22](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.21...medusa-plugin-add-ons@1.1.22) (2021-07-26) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.21](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.19...medusa-plugin-add-ons@1.1.21) (2021-07-15) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.20](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.19...medusa-plugin-add-ons@1.1.20) (2021-07-15) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.19](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.18...medusa-plugin-add-ons@1.1.19) (2021-07-02) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.18](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.17...medusa-plugin-add-ons@1.1.18) (2021-06-22) - -### Bug Fixes - -- release assist ([668e8a7](https://github.com/medusajs/medusa/commit/668e8a740200847fc2a41c91d2979097f1392532)) - -## [1.1.17](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.16...medusa-plugin-add-ons@1.1.17) (2021-06-09) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.16](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.15...medusa-plugin-add-ons@1.1.16) (2021-06-09) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.15](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.14...medusa-plugin-add-ons@1.1.15) (2021-06-09) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.14](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.13...medusa-plugin-add-ons@1.1.14) (2021-06-09) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.13](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.12...medusa-plugin-add-ons@1.1.13) (2021-06-08) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.12](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.9...medusa-plugin-add-ons@1.1.12) (2021-04-28) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.11](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.10...medusa-plugin-add-ons@1.1.11) (2021-04-20) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.10](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.9...medusa-plugin-add-ons@1.1.10) (2021-04-20) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.9](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.8...medusa-plugin-add-ons@1.1.9) (2021-04-13) - -### Bug Fixes - -- merge develop ([2982a8e](https://github.com/medusajs/medusa/commit/2982a8e682e90beb4549d969d9d3b04d78a46a2d)) -- merge develop ([a468c45](https://github.com/medusajs/medusa/commit/a468c451e82c68f41b5005a2e480057f6124aaa6)) - -## [1.1.8](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.7...medusa-plugin-add-ons@1.1.8) (2021-04-13) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.7](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.6...medusa-plugin-add-ons@1.1.7) (2021-03-30) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.6](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.5...medusa-plugin-add-ons@1.1.6) (2021-03-17) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.5](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.3...medusa-plugin-add-ons@1.1.5) (2021-03-17) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.4](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.3...medusa-plugin-add-ons@1.1.4) (2021-03-17) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.3](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.2...medusa-plugin-add-ons@1.1.3) (2021-02-17) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.2](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.1...medusa-plugin-add-ons@1.1.2) (2021-02-03) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.1.1](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.1.0...medusa-plugin-add-ons@1.1.1) (2021-01-27) - -**Note:** Version bump only for package medusa-plugin-add-ons - -# [1.1.0](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.8...medusa-plugin-add-ons@1.1.0) (2021-01-26) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.0.8](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.7...medusa-plugin-add-ons@1.0.8) (2020-12-17) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.0.7](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.6...medusa-plugin-add-ons@1.0.7) (2020-11-24) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.0.6](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.5...medusa-plugin-add-ons@1.0.6) (2020-10-19) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.0.5](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.4...medusa-plugin-add-ons@1.0.5) (2020-10-12) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.0.4](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.3...medusa-plugin-add-ons@1.0.4) (2020-10-09) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.0.3](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.2...medusa-plugin-add-ons@1.0.3) (2020-10-05) - -### Bug Fixes - -- **medusa-plugin-add-ons:** fixes add-on decorator ([5071f36](https://github.com/medusajs/medusa/commit/5071f362e4e140d11a1342a5058e8ad2efaa1ed4)) -- **medusa-plugin-addon:** Fixes admin endpoints, Adds flag to avoid merging add-on line-items ([d8483cd](https://github.com/medusajs/medusa/commit/d8483cd1352ecc587112723786b7c31882f9416e)) - -## [1.0.2](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.1...medusa-plugin-add-ons@1.0.2) (2020-09-09) - -**Note:** Version bump only for package medusa-plugin-add-ons - -## [1.0.1](https://github.com/medusajs/medusa/compare/medusa-plugin-add-ons@1.0.0...medusa-plugin-add-ons@1.0.1) (2020-09-09) - -**Note:** Version bump only for package medusa-plugin-add-ons - -# 1.0.0 (2020-09-09) - -### Features - -- **plugins:** Adds add-on plugin ([3de1e6d](https://github.com/medusajs/medusa/commit/3de1e6dd4ad4a2a48d4d8116ebdd011efce2b22a)) diff --git a/packages/medusa-plugin-add-ons/index.js b/packages/medusa-plugin-add-ons/index.js deleted file mode 100644 index 172f1ae6a4..0000000000 --- a/packages/medusa-plugin-add-ons/index.js +++ /dev/null @@ -1 +0,0 @@ -// noop diff --git a/packages/medusa-plugin-add-ons/package.json b/packages/medusa-plugin-add-ons/package.json deleted file mode 100644 index 41422fcd1b..0000000000 --- a/packages/medusa-plugin-add-ons/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "medusa-plugin-add-ons", - "version": "1.2.0", - "description": "Add-on plugin for Medusa Commerce", - "main": "index.js", - "repository": { - "type": "git", - "url": "https://github.com/medusajs/medusa", - "directory": "packages/medusa-plugin-add-ons" - }, - "author": "Oliver Juhl", - "license": "AGPL-3.0-or-later", - "devDependencies": { - "@babel/cli": "^7.7.5", - "@babel/core": "^7.7.5", - "@babel/node": "^7.7.4", - "@babel/plugin-proposal-class-properties": "^7.7.4", - "@babel/plugin-transform-classes": "^7.9.5", - "@babel/plugin-transform-instanceof": "^7.8.3", - "@babel/plugin-transform-runtime": "^7.7.6", - "@babel/preset-env": "^7.7.5", - "@babel/register": "^7.7.4", - "@babel/runtime": "^7.9.6", - "client-sessions": "^0.8.0", - "cross-env": "^7.0.2", - "eslint": "^6.8.0", - "jest": "^25.5.2", - "medusa-test-utils": "^1.1.37" - }, - "scripts": { - "build": "babel src -d . --ignore **/__tests__", - "prepare": "cross-env NODE_ENV=production npm run build", - "watch": "babel -w src --out-dir . --ignore **/__tests__", - "test": "jest" - }, - "dependencies": { - "body-parser": "^1.19.0", - "cors": "^2.8.5", - "express": "^4.17.1", - "medusa-core-utils": "^1.1.31", - "redis": "^3.0.2" - }, - "gitHead": "81a7ff73d012fda722f6e9ef0bd9ba0232d37808" -} diff --git a/packages/medusa-plugin-add-ons/src/api/index.js b/packages/medusa-plugin-add-ons/src/api/index.js deleted file mode 100644 index 2ed442a00a..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/index.js +++ /dev/null @@ -1,12 +0,0 @@ -import { Router } from "express" -import admin from "./routes/admin" -import store from "./routes/store" - -export default (rootDirectory) => { - const app = Router() - - store(app, rootDirectory) - admin(app, rootDirectory) - - return app -} diff --git a/packages/medusa-plugin-add-ons/src/api/middlewares/await-middleware.js b/packages/medusa-plugin-add-ons/src/api/middlewares/await-middleware.js deleted file mode 100644 index 1c3692b377..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/middlewares/await-middleware.js +++ /dev/null @@ -1 +0,0 @@ -export default (fn) => (...args) => fn(...args).catch(args[2]) diff --git a/packages/medusa-plugin-add-ons/src/api/middlewares/index.js b/packages/medusa-plugin-add-ons/src/api/middlewares/index.js deleted file mode 100644 index c784e319a9..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/middlewares/index.js +++ /dev/null @@ -1,5 +0,0 @@ -import { default as wrap } from "./await-middleware" - -export default { - wrap, -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/admin/create-add-on.js b/packages/medusa-plugin-add-ons/src/api/routes/admin/create-add-on.js deleted file mode 100644 index 08343344b9..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/admin/create-add-on.js +++ /dev/null @@ -1,29 +0,0 @@ -import { Validator, MedusaError } from "medusa-core-utils" - -export default async (req, res) => { - const schema = Validator.object().keys({ - name: Validator.string().required(), - prices: Validator.array() - .items({ - currency_code: Validator.string(), - amount: Validator.number(), - }) - .required(), - valid_for: Validator.array().items(), - metadata: Validator.object().optional(), - }) - - const { value, error } = schema.validate(req.body) - if (error) { - throw new MedusaError(MedusaError.Types.INVALID_DATA, error.details) - } - try { - const addOnService = req.scope.resolve("addOnService") - - const addOn = await addOnService.create(value) - - res.status(200).json({ addOn }) - } catch (err) { - throw err - } -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/admin/delete-add-on.js b/packages/medusa-plugin-add-ons/src/api/routes/admin/delete-add-on.js deleted file mode 100644 index d60678b8e6..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/admin/delete-add-on.js +++ /dev/null @@ -1,16 +0,0 @@ -export default async (req, res) => { - const { id } = req.params - - const addOnService = req.scope.resolve("addOnService") - try { - await addOnService.delete(id) - - res.status(200).send({ - id, - object: "addOn", - deleted: true, - }) - } catch (err) { - throw err - } -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/admin/get-add-on.js b/packages/medusa-plugin-add-ons/src/api/routes/admin/get-add-on.js deleted file mode 100644 index a4ef0bfbf6..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/admin/get-add-on.js +++ /dev/null @@ -1,17 +0,0 @@ -export default async (req, res) => { - const { id } = req.params - - try { - const addOnService = req.scope.resolve("addOnService") - let addOn = await addOnService.retrieve(id) - addOn = await addOnService.decorate( - addOn, - ["name", "valid_for", "prices"], - ["valid_for"] - ) - - res.json({ add_on: addOn }) - } catch (err) { - throw err - } -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/admin/index.js b/packages/medusa-plugin-add-ons/src/api/routes/admin/index.js deleted file mode 100644 index 5defbd8475..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/admin/index.js +++ /dev/null @@ -1,55 +0,0 @@ -import { Router } from "express" -import bodyParser from "body-parser" -import cors from "cors" -import middlewares from "../../middlewares" -import { getConfigFile } from "medusa-core-utils" - -const route = Router() - -export default (app, rootDirectory) => { - const { configModule } = getConfigFile(rootDirectory, `medusa-config`) - const config = (configModule && configModule.projectConfig) || {} - - const adminCors = config.admin_cors || "" - - route.use( - cors({ - origin: adminCors.split(","), - credentials: true, - }) - ) - - app.use("/admin", route) - - route.post( - "/add-ons", - bodyParser.json(), - middlewares.wrap(require("./create-add-on").default) - ) - - route.post( - "/add-ons/:id", - bodyParser.json(), - middlewares.wrap(require("./update-add-on").default) - ) - - route.get( - "/add-ons", - bodyParser.json(), - middlewares.wrap(require("./list-add-ons").default) - ) - - route.get( - "/add-ons/:id", - bodyParser.json(), - middlewares.wrap(require("./get-add-on").default) - ) - - route.delete( - "/add-ons/:id", - bodyParser.json(), - middlewares.wrap(require("./delete-add-on").default) - ) - - return app -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/admin/list-add-ons.js b/packages/medusa-plugin-add-ons/src/api/routes/admin/list-add-ons.js deleted file mode 100644 index fc61f78f36..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/admin/list-add-ons.js +++ /dev/null @@ -1,20 +0,0 @@ -export default async (req, res) => { - try { - const addOnService = req.scope.resolve("addOnService") - let addOns = await addOnService.list({}) - addOns = await Promise.all( - addOns.map((ao) => - addOnService.decorate( - ao, - ["name", "valid_for", "prices"], - ["valid_for"] - ) - ) - ) - - res.status(200).json({ add_ons: addOns }) - } catch (err) { - console.log(err) - throw err - } -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/admin/update-add-on.js b/packages/medusa-plugin-add-ons/src/api/routes/admin/update-add-on.js deleted file mode 100644 index d5cff10c64..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/admin/update-add-on.js +++ /dev/null @@ -1,39 +0,0 @@ -import { Validator, MedusaError } from "medusa-core-utils" - -export default async (req, res) => { - const { id } = req.params - - const schema = Validator.object().keys({ - name: Validator.string().optional(), - prices: Validator.array() - .items({ - currency_code: Validator.string().required(), - amount: Validator.number().required(), - }) - .optional(), - valid_for: Validator.array().optional(), - metadata: Validator.object().optional(), - }) - - const { value, error } = schema.validate(req.body) - if (error) { - throw new MedusaError(MedusaError.Types.INVALID_DATA, error.details) - } - try { - const addOnService = req.scope.resolve("addOnService") - - if (value.metadata) { - Object.entries(value.metadata).map(([key, value]) => { - addOnService.setMetadata(id, key, value) - }) - - delete value.metadata - } - - const addOn = await addOnService.update(id, value) - - res.status(200).json({ addOn }) - } catch (err) { - throw err - } -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/store/create-line-item.js b/packages/medusa-plugin-add-ons/src/api/routes/store/create-line-item.js deleted file mode 100644 index 2cd3a4d7c9..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/store/create-line-item.js +++ /dev/null @@ -1,39 +0,0 @@ -import { Validator, MedusaError } from "medusa-core-utils" - -export default async (req, res) => { - const { id } = req.params - - const schema = Validator.object().keys({ - variant_id: Validator.string().required(), - quantity: Validator.number().required(), - add_ons: Validator.array().items(Validator.string()).optional(), - metadata: Validator.object().optional(), - }) - - const { value, error } = schema.validate(req.body) - if (error) { - throw new MedusaError(MedusaError.Types.INVALID_DATA, error.details) - } - - try { - const lineItemService = req.scope.resolve("addOnLineItemService") - const cartService = req.scope.resolve("cartService") - - let cart = await cartService.retrieve(id) - - const lineItem = await lineItemService.generate( - value.variant_id, - cart.region_id, - value.quantity, - value.add_ons, - value.metadata - ) - - cart = await cartService.addLineItem(cart._id, lineItem) - cart = await cartService.decorate(cart, [], ["region"]) - - res.status(200).json({ cart }) - } catch (err) { - throw err - } -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/store/get-by-product.js b/packages/medusa-plugin-add-ons/src/api/routes/store/get-by-product.js deleted file mode 100644 index d6746cde9d..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/store/get-by-product.js +++ /dev/null @@ -1,27 +0,0 @@ -import { Validator } from "medusa-core-utils" - -export default async (req, res) => { - const schema = Validator.object({ - product_id: Validator.string().required(), - }) - - const { value, error } = schema.validate(region_id) - - if (error) { - throw error - } - - try { - const addOnService = req.scope.resolve("addOnService") - let addOn = await addOnService.retrieveByProduct(value.product_id) - addOn = await addOnService.decorate( - addOn, - ["name", "valid_for", "prices"], - ["valid_for"] - ) - - res.json({ add_on: addOn }) - } catch (err) { - throw err - } -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/store/index.js b/packages/medusa-plugin-add-ons/src/api/routes/store/index.js deleted file mode 100644 index 0a02b4c058..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/store/index.js +++ /dev/null @@ -1,37 +0,0 @@ -import { Router } from "express" -import bodyParser from "body-parser" -import cors from "cors" -import middlewares from "../../middlewares" -import { getConfigFile } from "medusa-core-utils" - -const route = Router() - -export default (app, rootDirectory) => { - const { configModule } = getConfigFile(rootDirectory, `medusa-config`) - const config = (configModule && configModule.projectConfig) || {} - - const storeCors = config.store_cors || "" - - route.use( - cors({ - origin: storeCors.split(","), - credentials: true, - }) - ) - - app.use("/store", route) - - route.post( - "/carts/:id/line-items/add-on", - bodyParser.json(), - middlewares.wrap(require("./create-line-item").default) - ) - - route.post( - "/carts/:id/line-items/:line_id/add-on", - bodyParser.json(), - middlewares.wrap(require("./update-line-item").default) - ) - - return app -} diff --git a/packages/medusa-plugin-add-ons/src/api/routes/store/update-line-item.js b/packages/medusa-plugin-add-ons/src/api/routes/store/update-line-item.js deleted file mode 100644 index b3717325a7..0000000000 --- a/packages/medusa-plugin-add-ons/src/api/routes/store/update-line-item.js +++ /dev/null @@ -1,52 +0,0 @@ -import { Validator, MedusaError } from "medusa-core-utils" - -export default async (req, res) => { - const { id, line_id } = req.params - - const schema = Validator.object().keys({ - add_ons: Validator.array().items(Validator.string()).optional(), - quantity: Validator.number().optional(), - metadata: Validator.object().optional(), - }) - - const { value, error } = schema.validate(req.body) - if (error) { - throw new MedusaError(MedusaError.Types.INVALID_DATA, error.details) - } - - try { - const lineItemService = req.scope.resolve("addOnLineItemService") - const cartService = req.scope.resolve("cartService") - - let cart - if (value.quantity === 0) { - cart = await cartService.removeLineItem(id, line_id) - } else { - cart = await cartService.retrieve(id) - - const existing = cart.items.find((i) => i._id.equals(line_id)) - if (!existing) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - "Could not find the line item" - ) - } - - const lineItem = await lineItemService.generate( - existing.content.variant._id, - cart.region_id, - value.quantity, - value.add_ons, - value.metadata - ) - - cart = await cartService.updateLineItem(cart._id, line_id, lineItem) - } - - cart = await cartService.decorate(cart, [], ["region"]) - - res.status(200).json({ cart }) - } catch (err) { - throw err - } -} diff --git a/packages/medusa-plugin-add-ons/src/loaders/decorator.js b/packages/medusa-plugin-add-ons/src/loaders/decorator.js deleted file mode 100644 index 2a94b4f2cc..0000000000 --- a/packages/medusa-plugin-add-ons/src/loaders/decorator.js +++ /dev/null @@ -1,26 +0,0 @@ -export default (container, config) => { - const cartService = container.resolve("cartService") - const addOnLineItemService = container.resolve("addOnLineItemService") - - cartService.addDecorator(async (cart) => { - try { - cart.items = await Promise.all( - cart.items.map((item) => { - if (item.metadata && item.metadata.add_ons) { - return addOnLineItemService.decorate( - item, - ["title", "quantity", "thumbnail", "content", "should_merge"], - ["add_ons"] - ) - } else { - return item - } - }) - ) - - return cart - } catch (error) { - return cart - } - }) -} diff --git a/packages/medusa-plugin-add-ons/src/models/__mocks__/add-on.js b/packages/medusa-plugin-add-ons/src/models/__mocks__/add-on.js deleted file mode 100644 index 1f96791cd4..0000000000 --- a/packages/medusa-plugin-add-ons/src/models/__mocks__/add-on.js +++ /dev/null @@ -1,46 +0,0 @@ -import { IdMap } from "medusa-test-utils" - -export const addOns = { - testAddOn: { - _id: IdMap.getId("test-add-on"), - name: "Chili", - prices: [ - { - currency_code: "DKK", - amount: 20, - }, - ], - valid_for: [IdMap.getId("test-product")], - }, - testAddOn2: { - _id: IdMap.getId("test-add-on-2"), - name: "Chili", - prices: [ - { - currency_code: "DKK", - amount: 20, - }, - ], - valid_for: [IdMap.getId("test-product")], - }, -} - -export const AddOnModelMock = { - create: jest.fn().mockReturnValue(Promise.resolve()), - find: jest.fn().mockImplementation((query) => { - return Promise.resolve([addOns.testAddOn, addOns.testAddOn2]) - }), - updateOne: jest.fn().mockImplementation((query, update) => { - return Promise.resolve() - }), - deleteOne: jest.fn().mockReturnValue(Promise.resolve()), - findOne: jest.fn().mockImplementation((query) => { - if (query._id === IdMap.getId("test-add-on")) { - return Promise.resolve(addOns.testAddOn) - } - if (query._id === IdMap.getId("test-add-on-2")) { - return Promise.resolve(addOns.testAddOn2) - } - return Promise.resolve(undefined) - }), -} diff --git a/packages/medusa-plugin-add-ons/src/models/add-on.js b/packages/medusa-plugin-add-ons/src/models/add-on.js deleted file mode 100644 index 9e876547f7..0000000000 --- a/packages/medusa-plugin-add-ons/src/models/add-on.js +++ /dev/null @@ -1,15 +0,0 @@ -import mongoose from "mongoose" -import { BaseModel } from "medusa-interfaces" - -class AddOnModel extends BaseModel { - static modelName = "AddOn" - static schema = { - name: { type: String, required: true }, - prices: { type: [], required: true }, - // Valid products - valid_for: { type: [String], required: true }, - metadata: { type: mongoose.Schema.Types.Mixed, default: {} }, - } -} - -export default AddOnModel diff --git a/packages/medusa-plugin-add-ons/src/services/__mocks__/add-on.js b/packages/medusa-plugin-add-ons/src/services/__mocks__/add-on.js deleted file mode 100644 index 5fc06b9d7b..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/__mocks__/add-on.js +++ /dev/null @@ -1,70 +0,0 @@ -import { IdMap } from "medusa-test-utils" - -export const addOns = { - testAddOn: { - _id: IdMap.getId("test-add-on"), - name: "Chili", - prices: [ - { - currency_code: "DKK", - amount: 20, - }, - ], - valid_for: [IdMap.getId("test-product")], - }, - testAddOn2: { - _id: IdMap.getId("test-add-on-2"), - name: "Chili", - prices: [ - { - currency_code: "DKK", - amount: 20, - }, - ], - valid_for: [IdMap.getId("test-product")], - }, - testAddOn3: { - _id: IdMap.getId("test-add-on-3"), - name: "Herbs", - prices: [ - { - currency_code: "DKK", - amount: 20, - }, - ], - valid_for: [], - }, -} - -export const AddOnServiceMock = { - retrieve: jest.fn().mockImplementation((addOnId) => { - if (addOnId === IdMap.getId("test-add-on")) { - return Promise.resolve(addOns.testAddOn) - } - if (addOnId === IdMap.getId("test-add-on-2")) { - return Promise.resolve(addOns.testAddOn2) - } - if (addOnId === IdMap.getId("test-add-on-3")) { - return Promise.resolve(addOns.testAddOn3) - } - return Promise.resolve(undefined) - }), - getRegionPrice: jest.fn().mockImplementation((addOnId, regionId) => { - if (addOnId === IdMap.getId("test-add-on")) { - return Promise.resolve(20) - } - if (addOnId === IdMap.getId("test-add-on-2")) { - return Promise.resolve(20) - } - if (addOnId === IdMap.getId("test-add-on-3")) { - return Promise.resolve(20) - } - return Promise.resolve(undefined) - }), -} - -const mock = jest.fn().mockImplementation(() => { - return AddOnServiceMock -}) - -export default mock diff --git a/packages/medusa-plugin-add-ons/src/services/__mocks__/event-bus.js b/packages/medusa-plugin-add-ons/src/services/__mocks__/event-bus.js deleted file mode 100644 index e9031d9428..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/__mocks__/event-bus.js +++ /dev/null @@ -1,10 +0,0 @@ -export const EventBusServiceMock = { - emit: jest.fn(), - subscribe: jest.fn(), -} - -const mock = jest.fn().mockImplementation(() => { - return EventBusServiceMock -}) - -export default mock diff --git a/packages/medusa-plugin-add-ons/src/services/__mocks__/product-variant.js b/packages/medusa-plugin-add-ons/src/services/__mocks__/product-variant.js deleted file mode 100644 index d7f69a54ad..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/__mocks__/product-variant.js +++ /dev/null @@ -1,94 +0,0 @@ -import { IdMap } from "medusa-test-utils" - -const variant1 = { - _id: IdMap.getId("test-variant-1"), - title: "variant1", - options: [], -} - -const variant2 = { - _id: IdMap.getId("test-variant-2"), - title: "variant2", - options: [ - { - option_id: IdMap.getId("color_id"), - value: "black", - }, - { - option_id: IdMap.getId("size_id"), - value: "160", - }, - ], -} - -const variant3 = { - _id: IdMap.getId("test-variant-3"), - title: "variant3", - options: [ - { - option_id: IdMap.getId("color_id"), - value: "blue", - }, - { - option_id: IdMap.getId("size_id"), - value: "150", - }, - ], -} - -const variant4 = { - _id: IdMap.getId("test-variant-4"), - title: "variant4", - options: [ - { - option_id: IdMap.getId("color_id"), - value: "blue", - }, - { - option_id: IdMap.getId("size_id"), - value: "50", - }, - ], -} - -export const variants = { - one: variant1, - two: variant2, - three: variant3, - four: variant4, -} - -export const ProductVariantServiceMock = { - retrieve: jest.fn().mockImplementation((variantId) => { - if (variantId === IdMap.getId("test-variant-1")) { - return Promise.resolve(variant1) - } - if (variantId === IdMap.getId("test-variant-2")) { - return Promise.resolve(variant2) - } - if (variantId === IdMap.getId("test-variant-3")) { - return Promise.resolve(variant3) - } - if (variantId === IdMap.getId("test-variant-4")) { - return Promise.resolve(variant4) - } - return Promise.resolve(undefined) - }), - getRegionPrice: jest.fn().mockImplementation((variantId, context) => { - if (variantId === IdMap.getId("test-variant-1")) { - if (context.regionId === IdMap.getId("world")) { - return Promise.resolve(10) - } else { - return Promise.resolve(20) - } - } - - return Promise.reject(new Error("Not found")) - }), -} - -const mock = jest.fn().mockImplementation(() => { - return ProductVariantServiceMock -}) - -export default mock diff --git a/packages/medusa-plugin-add-ons/src/services/__mocks__/product.js b/packages/medusa-plugin-add-ons/src/services/__mocks__/product.js deleted file mode 100644 index 6717cdec9a..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/__mocks__/product.js +++ /dev/null @@ -1,39 +0,0 @@ -import { IdMap } from "medusa-test-utils" - -export const products = { - product1: { - _id: IdMap.getId("test-product"), - description: "Test description", - title: "Product 1", - variants: [IdMap.getId("test-variant-1")], - // metadata: { - // add_ons: [IdMap.getId("test-add-on"), IdMap.getId("test-add-on-2")], - // }, - }, - product2: { - _id: IdMap.getId("test-product-2"), - title: "Product 2", - metadata: {}, - }, -} - -export const ProductServiceMock = { - retrieve: jest.fn().mockImplementation((productId) => { - if (productId === IdMap.getId("test-product")) { - return Promise.resolve(products.product1) - } - if (productId === IdMap.getId("test-product-2")) { - return Promise.resolve(products.product2) - } - return Promise.resolve(undefined) - }), - list: jest.fn().mockImplementation((query) => { - return Promise.resolve([products.product1]) - }), -} - -const mock = jest.fn().mockImplementation(() => { - return ProductServiceMock -}) - -export default mock diff --git a/packages/medusa-plugin-add-ons/src/services/__mocks__/region.js b/packages/medusa-plugin-add-ons/src/services/__mocks__/region.js deleted file mode 100644 index 6fd9cb71f0..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/__mocks__/region.js +++ /dev/null @@ -1,28 +0,0 @@ -import { IdMap } from "medusa-test-utils" - -export const regions = { - testRegion: { - _id: IdMap.getId("world"), - name: "Test Region", - countries: ["DK", "US", "DE"], - tax_rate: 0.25, - payment_providers: ["default_provider", "unregistered"], - fulfillment_providers: ["test_shipper"], - currency_code: "DKK", - }, -} - -export const RegionServiceMock = { - retrieve: jest.fn().mockImplementation((regionId) => { - if (regionId === IdMap.getId("world")) { - return Promise.resolve(regions.testRegion) - } - throw Error(regionId + "not found") - }), -} - -const mock = jest.fn().mockImplementation(() => { - return RegionServiceMock -}) - -export default mock diff --git a/packages/medusa-plugin-add-ons/src/services/__tests__/add-on-line-item.js b/packages/medusa-plugin-add-ons/src/services/__tests__/add-on-line-item.js deleted file mode 100644 index 2b2da853be..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/__tests__/add-on-line-item.js +++ /dev/null @@ -1,106 +0,0 @@ -import { IdMap } from "medusa-test-utils" -import AddOnLineItemService from "../add-on-line-item" -import { ProductVariantServiceMock } from "../__mocks__/product-variant" -import { ProductServiceMock } from "../__mocks__/product" -import { RegionServiceMock } from "../__mocks__/region" -import { AddOnServiceMock } from "../__mocks__/add-on" - -describe("LineItemService", () => { - describe("generate", () => { - let result - - const lineItemService = new AddOnLineItemService({ - addOnService: AddOnServiceMock, - productVariantService: ProductVariantServiceMock, - productService: ProductServiceMock, - regionService: RegionServiceMock, - }) - - beforeAll(async () => { - jest.clearAllMocks() - }) - - it("generates line item and successfully calculates full unit_price", async () => { - result = await lineItemService.generate( - IdMap.getId("test-variant-1"), - IdMap.getId("world"), - 1, - [IdMap.getId("test-add-on"), IdMap.getId("test-add-on-2")] - ) - expect(result).toEqual({ - title: "Product 1", - thumbnail: undefined, - should_merge: false, - content: { - unit_price: 50, - variant: { - _id: IdMap.getId("test-variant-1"), - title: "variant1", - options: [], - }, - product: { - _id: IdMap.getId("test-product"), - description: "Test description", - title: "Product 1", - variants: [IdMap.getId("test-variant-1")], - }, - quantity: 1, - }, - metadata: { - add_ons: [IdMap.getId("test-add-on"), IdMap.getId("test-add-on-2")], - }, - quantity: 1, - }) - }) - - it("generates line item and successfully calculates full unit_price for large quantity", async () => { - result = await lineItemService.generate( - IdMap.getId("test-variant-1"), - IdMap.getId("world"), - 3, - [IdMap.getId("test-add-on"), IdMap.getId("test-add-on-2")] - ) - expect(result).toEqual({ - title: "Product 1", - thumbnail: undefined, - should_merge: false, - content: { - unit_price: 150, - variant: { - _id: IdMap.getId("test-variant-1"), - title: "variant1", - options: [], - }, - product: { - _id: IdMap.getId("test-product"), - description: "Test description", - title: "Product 1", - variants: [IdMap.getId("test-variant-1")], - }, - quantity: 1, - }, - metadata: { - add_ons: [IdMap.getId("test-add-on"), IdMap.getId("test-add-on-2")], - }, - quantity: 3, - }) - }) - - it("fails if variant has no associated product", async () => { - try { - await lineItemService.generate( - IdMap.getId("test-variant-1"), - IdMap.getId("world"), - 1, - [ - IdMap.getId("test-add-on"), - IdMap.getId("test-add-on-2"), - IdMap.getId("test-add-on-3"), - ] - ) - } catch (err) { - expect(err.message).toBe(`Herbs can not be added to Product 1`) - } - }) - }) -}) diff --git a/packages/medusa-plugin-add-ons/src/services/__tests__/add-on.js b/packages/medusa-plugin-add-ons/src/services/__tests__/add-on.js deleted file mode 100644 index 1ef9d42064..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/__tests__/add-on.js +++ /dev/null @@ -1,134 +0,0 @@ -import { IdMap } from "medusa-test-utils" -import { AddOnModelMock, addOns } from "../../models/__mocks__/add-on" -import AddOnService from "../add-on" -import { EventBusServiceMock } from "../__mocks__/event-bus" -import { ProductServiceMock } from "../__mocks__/product" - -describe("AddOnService", () => { - describe("create", () => { - const addOnService = new AddOnService({ - addOnModel: AddOnModelMock, - productService: ProductServiceMock, - eventBusService: EventBusServiceMock, - }) - - beforeEach(async () => { - jest.clearAllMocks() - }) - - it("calls model layer create", async () => { - await addOnService.create({ - name: "Chili", - prices: [ - { - currency_code: "DKK", - amount: 20, - }, - ], - valid_for: [IdMap.getId("test-product")], - }) - - expect(AddOnModelMock.create).toBeCalledTimes(1) - expect(AddOnModelMock.create).toBeCalledWith({ - name: "Chili", - prices: [ - { - currency_code: "DKK", - amount: 20, - }, - ], - valid_for: [IdMap.getId("test-product")], - }) - }) - }) - - describe("retrieve", () => { - let result - beforeAll(async () => { - jest.clearAllMocks() - const addOnService = new AddOnService({ - addOnModel: AddOnModelMock, - }) - result = await addOnService.retrieve(IdMap.getId("test-add-on")) - }) - - it("calls model layer retrieve", async () => { - expect(AddOnModelMock.findOne).toBeCalledTimes(1) - expect(AddOnModelMock.findOne).toBeCalledWith({ - _id: IdMap.getId("test-add-on"), - }) - }) - - it("returns the add-on", () => { - expect(result).toEqual(addOns.testAddOn) - }) - }) - - describe("update", () => { - const addOnService = new AddOnService({ - addOnModel: AddOnModelMock, - productService: ProductServiceMock, - eventBusService: EventBusServiceMock, - }) - - beforeEach(async () => { - jest.clearAllMocks() - }) - - it("calls model layer create", async () => { - await addOnService.update(IdMap.getId("test-add-on"), { - name: "Chili Spice", - valid_for: [IdMap.getId("test-product"), IdMap.getId("test-product-2")], - }) - - expect(AddOnModelMock.updateOne).toBeCalledTimes(1) - expect(AddOnModelMock.updateOne).toBeCalledWith( - { _id: IdMap.getId("test-add-on") }, - { - $set: { - name: "Chili Spice", - valid_for: [ - IdMap.getId("test-product"), - IdMap.getId("test-product-2"), - ], - }, - }, - { runValidators: true } - ) - }) - }) - - describe("retrieveByProduct", () => { - describe("successful retrieval", () => { - let result - beforeAll(async () => { - jest.clearAllMocks() - const addOnService = new AddOnService({ - addOnModel: AddOnModelMock, - productService: ProductServiceMock, - }) - result = await addOnService.retrieveByProduct( - IdMap.getId("test-product") - ) - }) - - it("calls ProductService retrieve", async () => { - expect(ProductServiceMock.retrieve).toBeCalledTimes(1) - expect(ProductServiceMock.retrieve).toBeCalledWith( - IdMap.getId("test-product") - ) - }) - - it("calls model layer", () => { - expect(AddOnModelMock.find).toBeCalledTimes(1) - expect(AddOnModelMock.find).toBeCalledWith({ - valid_for: IdMap.getId("test-product"), - }) - }) - - it("returns the add-ons", () => { - expect(result).toEqual([addOns.testAddOn, addOns.testAddOn2]) - }) - }) - }) -}) diff --git a/packages/medusa-plugin-add-ons/src/services/add-on-line-item.js b/packages/medusa-plugin-add-ons/src/services/add-on-line-item.js deleted file mode 100644 index e30940be2a..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/add-on-line-item.js +++ /dev/null @@ -1,120 +0,0 @@ -import _ from "lodash" -import { BaseService } from "medusa-interfaces" -import { Validator, MedusaError } from "medusa-core-utils" - -class AddOnLineItemService extends BaseService { - static Events = { - UPDATED: "add_on.updated", - CREATED: "add_on.created", - } - - constructor( - { - addOnService, - productService, - productVariantService, - regionService, - eventBusService, - }, - options - ) { - super() - - this.addOnService_ = addOnService - - this.productService_ = productService - - this.productVariantService_ = productVariantService - - this.regionService_ = regionService - - this.eventBus_ = eventBusService - - this.options_ = options - } - - /** - * Generates a line item. - * @param {string} variantId - id of the line item variant - * @param {*} regionId - id of the cart region - * @param {*} quantity - number of items - * @param {[string]} addOnIds - id of add-ons - */ - async generate(variantId, regionId, quantity, addOnIds, metadata = {}) { - const variant = await this.productVariantService_.retrieve(variantId) - const region = await this.regionService_.retrieve(regionId) - - const products = await this.productService_.list({ variants: variantId }) - // this should never fail, since a variant must have a product associated - // with it to exists, but better safe than sorry - if (!products.length) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Could not find product for variant with id: ${variantId}` - ) - } - - const product = products[0] - - let unitPrice = await this.productVariantService_.getRegionPrice( - variant._id, - {regionId: region._id} - ) - - const addOnPrices = await Promise.all( - addOnIds.map(async (id) => { - const addOn = await this.addOnService_.retrieve(id) - // Check if any of the add-ons can't be added to the product - if (!addOn.valid_for.includes(`${product._id}`)) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `${addOn.name} can not be added to ${product.title}` - ) - } else { - return await this.addOnService_.getRegionPrice(id, region._id) - } - }) - ) - - unitPrice += _.sum(addOnPrices) - - const line = { - title: product.title, - quantity, - thumbnail: product.thumbnail, - should_merge: false, - content: { - unit_price: unitPrice * quantity, - variant, - product, - quantity: 1, - }, - should_merge: false, - metadata: { - ...metadata, - add_ons: addOnIds, - }, - } - - return line - } - - async decorate(lineItem, fields, expandFields = []) { - const requiredFields = ["_id", "metadata"] - const decorated = _.pick(lineItem, fields.concat(requiredFields)) - if ( - expandFields.includes("add_ons") && - decorated.metadata && - decorated.metadata.add_ons - ) { - decorated.metadata.add_ons = await Promise.all( - decorated.metadata.add_ons.map( - async (ao) => await this.addOnService_.retrieve(ao) - ) - ) - } - return decorated - } -} - -export default AddOnLineItemService diff --git a/packages/medusa-plugin-add-ons/src/services/add-on.js b/packages/medusa-plugin-add-ons/src/services/add-on.js deleted file mode 100644 index f4caa33a99..0000000000 --- a/packages/medusa-plugin-add-ons/src/services/add-on.js +++ /dev/null @@ -1,244 +0,0 @@ -import _ from "lodash" -import { BaseService } from "medusa-interfaces" -import { Validator, MedusaError } from "medusa-core-utils" - -class AddOnService extends BaseService { - static Events = { - UPDATED: "add_on.updated", - CREATED: "add_on.created", - } - - constructor( - { - addOnModel, - productService, - productVariantService, - regionService, - eventBusService, - }, - options - ) { - super() - - this.addOnModel_ = addOnModel - - this.productService_ = productService - - this.productVariantService_ = productVariantService - - this.regionService_ = regionService - - this.eventBus_ = eventBusService - - this.options_ = options - } - - /** - * Used to validate add-on ids. Throws an error if the cast fails - * @param {string} rawId - the raw add-on id to validate. - * @return {string} the validated id - */ - validateId_(rawId) { - return rawId - } - - /** - * @param {Object} selector - the query object for find - * @return {Promise} the result of the find operation - */ - list(selector, offset, limit) { - return this.addOnModel_.find(selector, {}, offset, limit) - } - - /** - * Gets an add-on by id. - * @param {string} addOnId - the id of the add-on to get. - * @return {Promise} the add-on document. - */ - async retrieve(addOnId) { - const validatedId = this.validateId_(addOnId) - const addOn = await this.addOnModel_ - .findOne({ _id: validatedId }) - .catch((err) => { - throw new MedusaError(MedusaError.Types.DB_ERROR, err.message) - }) - - if (!addOn) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Add-on with ${addOnId} was not found` - ) - } - return addOn - } - - /** - * Creates an add-on. - * @param {object} addOn - the add-on to create - * @return {Promise} resolves to the creation result. - */ - async create(addOn) { - await Promise.all( - addOn.valid_for.map((prodId) => { - this.productService_.retrieve(prodId) - }) - ) - - return this.addOnModel_ - .create(addOn) - .then((result) => { - this.eventBus_.emit(AddOnService.Events.CREATED, result) - return result - }) - .catch((err) => { - throw new MedusaError(MedusaError.Types.DB_ERROR, err.message) - }) - } - - /** - * Deletes an add-on. - * @param {object} addOnId - the add-on to delete - * @return {Promise} resolves to the deletion result. - */ - async delete(addOnId) { - const addOn = await this.retrieve(addOnId) - return this.addOnModel_.deleteOne({ _id: addOn._id }) - } - - /** - * Retrieves all valid add-ons for a given product. - * @param {object} productId - the product id to find add-ons for - * @return {Promise} returns a promise containing all add-ons for the product - */ - async retrieveByProduct(productId) { - const product = await this.productService_.retrieve(productId) - return this.addOnModel_.find({ valid_for: product._id }) - } - - /** - * Updates an add-on. Metadata updates should use dedicated methods, e.g. - * `setMetadata`, etc. The function will throw errors if metadata updates - * are attempted. - * @param {string} addOnId - the id of the add-on. Must be a string that - * can be casted to an ObjectId - * @param {object} update - an object with the update values. - * @return {Promise} resolves to the update result. - */ - async update(addOnId, update) { - const validatedId = this.validateId_(addOnId) - - await Promise.all( - update.valid_for.map((prodId) => { - this.productService_.retrieve(prodId) - }) - ) - - if (update.metadata) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - "Use setMetadata to update metadata fields" - ) - } - - return this.addOnModel_ - .updateOne( - { _id: validatedId }, - { $set: update }, - { runValidators: true } - ) - .catch((err) => { - throw new MedusaError(MedusaError.Types.DB_ERROR, err.message) - }) - } - - /** - * Gets the price specific to a region. If no region specific money amount - * exists the function will try to use a currency price. If no default - * currency price exists the function will throw an error. - * @param {string} addOnId - the id of the add-on to get price from - * @param {string} regionId - the id of the region to get price for - * @return {number} the price specific to the region - */ - async getRegionPrice(addOnId, regionId) { - const addOn = await this.retrieve(addOnId) - const region = await this.regionService_.retrieve(regionId) - - let price - addOn.prices.forEach(({ amount, currency_code }) => { - if (!price && currency_code === region.currency_code) { - // If we haven't yet found a price and the current money amount is - // the default money amount for the currency of the region we have found - // a possible price match - price = amount - } else if (region_id === region._id) { - // If the region matches directly with the money amount this is the best - // price - price = amount - } - }) - - // Return the price if we found a suitable match - if (price !== undefined) { - return price - } - - // If we got this far no price could be found for the region - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `A price for region: ${region.name} could not be found` - ) - } - - /** - * Decorates a add-on with add-on variants. - * @param {AddOn} addOn - the add-on to decorate. - * @param {string[]} fields - the fields to include. - * @param {string[]} expandFields - fields to expand. - * @return {AddOn} return the decorated add-on. - */ - async decorate(addOn, fields, expandFields = []) { - const requiredFields = ["_id", "metadata"] - const decorated = _.pick(addOn, fields.concat(requiredFields)) - if (expandFields.includes("valid_for")) { - decorated.valid_for = await Promise.all( - decorated.valid_for.map( - async (p) => await this.productService_.retrieve(p) - ) - ) - } - return decorated - } - - /** - * Dedicated method to set metadata for an add-on. - * To ensure that plugins does not overwrite each - * others metadata fields, setMetadata is provided. - * @param {string} addOnId - the add-on to decorate. - * @param {string} key - key for metadata field - * @param {string} value - value for metadata field. - * @return {Promise} resolves to the updated result. - */ - async setMetadata(addOnId, key, value) { - const validatedId = this.validateId_(addOnId) - - if (typeof key !== "string") { - throw new MedusaError( - MedusaError.Types.INVALID_ARGUMENT, - "Key type is invalid. Metadata keys must be strings" - ) - } - - const keyPath = `metadata.${key}` - return this.addOnModel_ - .updateOne({ _id: validatedId }, { $set: { [keyPath]: value } }) - .then((result) => { - this.eventBus_.emit(AddOnService.Events.UPDATED, result) - return result - }) - .catch((err) => { - throw new MedusaError(MedusaError.Types.DB_ERROR, err.message) - }) - } -} - -export default AddOnService diff --git a/packages/medusa-plugin-add-ons/yarn.lock b/packages/medusa-plugin-add-ons/yarn.lock deleted file mode 100644 index 24a368e099..0000000000 --- a/packages/medusa-plugin-add-ons/yarn.lock +++ /dev/null @@ -1,5448 +0,0 @@ -# THIS IS AN AUTOGENERATED FILE. DO NOT EDIT THIS FILE DIRECTLY. -# yarn lockfile v1 - - -"@babel/cli@^7.7.5": - version "7.11.6" - resolved "https://registry.yarnpkg.com/@babel/cli/-/cli-7.11.6.tgz#1fcbe61c2a6900c3539c06ee58901141f3558482" - integrity sha512-+w7BZCvkewSmaRM6H4L2QM3RL90teqEIHDIFXAmrW33+0jhlymnDAEdqVeCZATvxhQuio1ifoGVlJJbIiH9Ffg== - dependencies: - commander "^4.0.1" - convert-source-map "^1.1.0" - fs-readdir-recursive "^1.1.0" - glob "^7.0.0" - lodash "^4.17.19" - make-dir "^2.1.0" - slash "^2.0.0" - source-map "^0.5.0" - optionalDependencies: - chokidar "^2.1.8" - -"@babel/code-frame@^7.0.0", "@babel/code-frame@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.10.4.tgz#168da1a36e90da68ae8d49c0f1b48c7c6249213a" - integrity sha512-vG6SvB6oYEhvgisZNFRmRCUkLz11c7rp+tbNTynGqc6mS1d5ATd/sGyV6W0KZZnXRKMTzZDRgQT3Ou9jhpAfUg== - dependencies: - "@babel/highlight" "^7.10.4" - -"@babel/compat-data@^7.10.4", "@babel/compat-data@^7.11.0": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.11.0.tgz#e9f73efe09af1355b723a7f39b11bad637d7c99c" - integrity sha512-TPSvJfv73ng0pfnEOh17bYMPQbI95+nGWc71Ss4vZdRBHTDqmM9Z8ZV4rYz8Ks7sfzc95n30k6ODIq5UGnXcYQ== - dependencies: - browserslist "^4.12.0" - invariant "^2.2.4" - semver "^5.5.0" - -"@babel/core@^7.1.0", "@babel/core@^7.7.5": - version "7.11.6" - resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.11.6.tgz#3a9455dc7387ff1bac45770650bc13ba04a15651" - integrity sha512-Wpcv03AGnmkgm6uS6k8iwhIwTrcP0m17TL1n1sy7qD0qelDu4XNeW0dN0mHfa+Gei211yDaLoEe/VlbXQzM4Bg== - dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/generator" "^7.11.6" - "@babel/helper-module-transforms" "^7.11.0" - "@babel/helpers" "^7.10.4" - "@babel/parser" "^7.11.5" - "@babel/template" "^7.10.4" - "@babel/traverse" "^7.11.5" - "@babel/types" "^7.11.5" - convert-source-map "^1.7.0" - debug "^4.1.0" - gensync "^1.0.0-beta.1" - json5 "^2.1.2" - lodash "^4.17.19" - resolve "^1.3.2" - semver "^5.4.1" - source-map "^0.5.0" - -"@babel/generator@^7.11.5", "@babel/generator@^7.11.6": - version "7.11.6" - resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.11.6.tgz#b868900f81b163b4d464ea24545c61cbac4dc620" - integrity sha512-DWtQ1PV3r+cLbySoHrwn9RWEgKMBLLma4OBQloPRyDYvc5msJM9kvTLo1YnlJd1P/ZuKbdli3ijr5q3FvAF3uA== - dependencies: - "@babel/types" "^7.11.5" - jsesc "^2.5.1" - source-map "^0.5.0" - -"@babel/helper-annotate-as-pure@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.10.4.tgz#5bf0d495a3f757ac3bda48b5bf3b3ba309c72ba3" - integrity sha512-XQlqKQP4vXFB7BN8fEEerrmYvHp3fK/rBkRFz9jaJbzK0B1DSfej9Kc7ZzE8Z/OnId1jpJdNAZ3BFQjWG68rcA== - dependencies: - "@babel/types" "^7.10.4" - -"@babel/helper-builder-binary-assignment-operator-visitor@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.10.4.tgz#bb0b75f31bf98cbf9ff143c1ae578b87274ae1a3" - integrity sha512-L0zGlFrGWZK4PbT8AszSfLTM5sDU1+Az/En9VrdT8/LmEiJt4zXt+Jve9DCAnQcbqDhCI+29y/L93mrDzddCcg== - dependencies: - "@babel/helper-explode-assignable-expression" "^7.10.4" - "@babel/types" "^7.10.4" - -"@babel/helper-compilation-targets@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-compilation-targets/-/helper-compilation-targets-7.10.4.tgz#804ae8e3f04376607cc791b9d47d540276332bd2" - integrity sha512-a3rYhlsGV0UHNDvrtOXBg8/OpfV0OKTkxKPzIplS1zpx7CygDcWWxckxZeDd3gzPzC4kUT0A4nVFDK0wGMh4MQ== - dependencies: - "@babel/compat-data" "^7.10.4" - browserslist "^4.12.0" - invariant "^2.2.4" - levenary "^1.1.1" - semver "^5.5.0" - -"@babel/helper-create-class-features-plugin@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.10.5.tgz#9f61446ba80e8240b0a5c85c6fdac8459d6f259d" - integrity sha512-0nkdeijB7VlZoLT3r/mY3bUkw3T8WG/hNw+FATs/6+pG2039IJWjTYL0VTISqsNHMUTEnwbVnc89WIJX9Qed0A== - dependencies: - "@babel/helper-function-name" "^7.10.4" - "@babel/helper-member-expression-to-functions" "^7.10.5" - "@babel/helper-optimise-call-expression" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-replace-supers" "^7.10.4" - "@babel/helper-split-export-declaration" "^7.10.4" - -"@babel/helper-create-regexp-features-plugin@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.10.4.tgz#fdd60d88524659a0b6959c0579925e425714f3b8" - integrity sha512-2/hu58IEPKeoLF45DBwx3XFqsbCXmkdAay4spVr2x0jYgRxrSNp+ePwvSsy9g6YSaNDcKIQVPXk1Ov8S2edk2g== - dependencies: - "@babel/helper-annotate-as-pure" "^7.10.4" - "@babel/helper-regex" "^7.10.4" - regexpu-core "^4.7.0" - -"@babel/helper-define-map@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/helper-define-map/-/helper-define-map-7.10.5.tgz#b53c10db78a640800152692b13393147acb9bb30" - integrity sha512-fMw4kgFB720aQFXSVaXr79pjjcW5puTCM16+rECJ/plGS+zByelE8l9nCpV1GibxTnFVmUuYG9U8wYfQHdzOEQ== - dependencies: - "@babel/helper-function-name" "^7.10.4" - "@babel/types" "^7.10.5" - lodash "^4.17.19" - -"@babel/helper-explode-assignable-expression@^7.10.4": - version "7.11.4" - resolved "https://registry.yarnpkg.com/@babel/helper-explode-assignable-expression/-/helper-explode-assignable-expression-7.11.4.tgz#2d8e3470252cc17aba917ede7803d4a7a276a41b" - integrity sha512-ux9hm3zR4WV1Y3xXxXkdG/0gxF9nvI0YVmKVhvK9AfMoaQkemL3sJpXw+Xbz65azo8qJiEz2XVDUpK3KYhH3ZQ== - dependencies: - "@babel/types" "^7.10.4" - -"@babel/helper-function-name@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.10.4.tgz#d2d3b20c59ad8c47112fa7d2a94bc09d5ef82f1a" - integrity sha512-YdaSyz1n8gY44EmN7x44zBn9zQ1Ry2Y+3GTA+3vH6Mizke1Vw0aWDM66FOYEPw8//qKkmqOckrGgTYa+6sceqQ== - dependencies: - "@babel/helper-get-function-arity" "^7.10.4" - "@babel/template" "^7.10.4" - "@babel/types" "^7.10.4" - -"@babel/helper-get-function-arity@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-get-function-arity/-/helper-get-function-arity-7.10.4.tgz#98c1cbea0e2332f33f9a4661b8ce1505b2c19ba2" - integrity sha512-EkN3YDB+SRDgiIUnNgcmiD361ti+AVbL3f3Henf6dqqUyr5dMsorno0lJWJuLhDhkI5sYEpgj6y9kB8AOU1I2A== - dependencies: - "@babel/types" "^7.10.4" - -"@babel/helper-hoist-variables@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.10.4.tgz#d49b001d1d5a68ca5e6604dda01a6297f7c9381e" - integrity sha512-wljroF5PgCk2juF69kanHVs6vrLwIPNp6DLD+Lrl3hoQ3PpPPikaDRNFA+0t81NOoMt2DL6WW/mdU8k4k6ZzuA== - dependencies: - "@babel/types" "^7.10.4" - -"@babel/helper-member-expression-to-functions@^7.10.4", "@babel/helper-member-expression-to-functions@^7.10.5": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.11.0.tgz#ae69c83d84ee82f4b42f96e2a09410935a8f26df" - integrity sha512-JbFlKHFntRV5qKw3YC0CvQnDZ4XMwgzzBbld7Ly4Mj4cbFy3KywcR8NtNctRToMWJOVvLINJv525Gd6wwVEx/Q== - dependencies: - "@babel/types" "^7.11.0" - -"@babel/helper-module-imports@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-module-imports/-/helper-module-imports-7.10.4.tgz#4c5c54be04bd31670a7382797d75b9fa2e5b5620" - integrity sha512-nEQJHqYavI217oD9+s5MUBzk6x1IlvoS9WTPfgG43CbMEeStE0v+r+TucWdx8KFGowPGvyOkDT9+7DHedIDnVw== - dependencies: - "@babel/types" "^7.10.4" - -"@babel/helper-module-transforms@^7.10.4", "@babel/helper-module-transforms@^7.10.5", "@babel/helper-module-transforms@^7.11.0": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/helper-module-transforms/-/helper-module-transforms-7.11.0.tgz#b16f250229e47211abdd84b34b64737c2ab2d359" - integrity sha512-02EVu8COMuTRO1TAzdMtpBPbe6aQ1w/8fePD2YgQmxZU4gpNWaL9gK3Jp7dxlkUlUCJOTaSeA+Hrm1BRQwqIhg== - dependencies: - "@babel/helper-module-imports" "^7.10.4" - "@babel/helper-replace-supers" "^7.10.4" - "@babel/helper-simple-access" "^7.10.4" - "@babel/helper-split-export-declaration" "^7.11.0" - "@babel/template" "^7.10.4" - "@babel/types" "^7.11.0" - lodash "^4.17.19" - -"@babel/helper-optimise-call-expression@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.10.4.tgz#50dc96413d594f995a77905905b05893cd779673" - integrity sha512-n3UGKY4VXwXThEiKrgRAoVPBMqeoPgHVqiHZOanAJCG9nQUL2pLRQirUzl0ioKclHGpGqRgIOkgcIJaIWLpygg== - dependencies: - "@babel/types" "^7.10.4" - -"@babel/helper-plugin-utils@^7.0.0", "@babel/helper-plugin-utils@^7.10.4", "@babel/helper-plugin-utils@^7.8.0", "@babel/helper-plugin-utils@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-plugin-utils/-/helper-plugin-utils-7.10.4.tgz#2f75a831269d4f677de49986dff59927533cf375" - integrity sha512-O4KCvQA6lLiMU9l2eawBPMf1xPP8xPfB3iEQw150hOVTqj/rfXz0ThTb4HEzqQfs2Bmo5Ay8BzxfzVtBrr9dVg== - -"@babel/helper-regex@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/helper-regex/-/helper-regex-7.10.5.tgz#32dfbb79899073c415557053a19bd055aae50ae0" - integrity sha512-68kdUAzDrljqBrio7DYAEgCoJHxppJOERHOgOrDN7WjOzP0ZQ1LsSDRXcemzVZaLvjaJsJEESb6qt+znNuENDg== - dependencies: - lodash "^4.17.19" - -"@babel/helper-remap-async-to-generator@^7.10.4": - version "7.11.4" - resolved "https://registry.yarnpkg.com/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.11.4.tgz#4474ea9f7438f18575e30b0cac784045b402a12d" - integrity sha512-tR5vJ/vBa9wFy3m5LLv2faapJLnDFxNWff2SAYkSE4rLUdbp7CdObYFgI7wK4T/Mj4UzpjPwzR8Pzmr5m7MHGA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.10.4" - "@babel/helper-wrap-function" "^7.10.4" - "@babel/template" "^7.10.4" - "@babel/types" "^7.10.4" - -"@babel/helper-replace-supers@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-replace-supers/-/helper-replace-supers-7.10.4.tgz#d585cd9388ea06e6031e4cd44b6713cbead9e6cf" - integrity sha512-sPxZfFXocEymYTdVK1UNmFPBN+Hv5mJkLPsYWwGBxZAxaWfFu+xqp7b6qWD0yjNuNL2VKc6L5M18tOXUP7NU0A== - dependencies: - "@babel/helper-member-expression-to-functions" "^7.10.4" - "@babel/helper-optimise-call-expression" "^7.10.4" - "@babel/traverse" "^7.10.4" - "@babel/types" "^7.10.4" - -"@babel/helper-simple-access@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-simple-access/-/helper-simple-access-7.10.4.tgz#0f5ccda2945277a2a7a2d3a821e15395edcf3461" - integrity sha512-0fMy72ej/VEvF8ULmX6yb5MtHG4uH4Dbd6I/aHDb/JVg0bbivwt9Wg+h3uMvX+QSFtwr5MeItvazbrc4jtRAXw== - dependencies: - "@babel/template" "^7.10.4" - "@babel/types" "^7.10.4" - -"@babel/helper-skip-transparent-expression-wrappers@^7.11.0": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.11.0.tgz#eec162f112c2f58d3af0af125e3bb57665146729" - integrity sha512-0XIdiQln4Elglgjbwo9wuJpL/K7AGCY26kmEt0+pRP0TAj4jjyNq1MjoRvikrTVqKcx4Gysxt4cXvVFXP/JO2Q== - dependencies: - "@babel/types" "^7.11.0" - -"@babel/helper-split-export-declaration@^7.10.4", "@babel/helper-split-export-declaration@^7.11.0": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/helper-split-export-declaration/-/helper-split-export-declaration-7.11.0.tgz#f8a491244acf6a676158ac42072911ba83ad099f" - integrity sha512-74Vejvp6mHkGE+m+k5vHY93FX2cAtrw1zXrZXRlG4l410Nm9PxfEiVTn1PjDPV5SnmieiueY4AFg2xqhNFuuZg== - dependencies: - "@babel/types" "^7.11.0" - -"@babel/helper-validator-identifier@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.10.4.tgz#a78c7a7251e01f616512d31b10adcf52ada5e0d2" - integrity sha512-3U9y+43hz7ZM+rzG24Qe2mufW5KhvFg/NhnNph+i9mgCtdTCtMJuI1TMkrIUiK7Ix4PYlRF9I5dhqaLYA/ADXw== - -"@babel/helper-wrap-function@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helper-wrap-function/-/helper-wrap-function-7.10.4.tgz#8a6f701eab0ff39f765b5a1cfef409990e624b87" - integrity sha512-6py45WvEF0MhiLrdxtRjKjufwLL1/ob2qDJgg5JgNdojBAZSAKnAjkyOCNug6n+OBl4VW76XjvgSFTdaMcW0Ug== - dependencies: - "@babel/helper-function-name" "^7.10.4" - "@babel/template" "^7.10.4" - "@babel/traverse" "^7.10.4" - "@babel/types" "^7.10.4" - -"@babel/helpers@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/helpers/-/helpers-7.10.4.tgz#2abeb0d721aff7c0a97376b9e1f6f65d7a475044" - integrity sha512-L2gX/XeUONeEbI78dXSrJzGdz4GQ+ZTA/aazfUsFaWjSe95kiCuOZ5HsXvkiw3iwF+mFHSRUfJU8t6YavocdXA== - dependencies: - "@babel/template" "^7.10.4" - "@babel/traverse" "^7.10.4" - "@babel/types" "^7.10.4" - -"@babel/highlight@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.10.4.tgz#7d1bdfd65753538fabe6c38596cdb76d9ac60143" - integrity sha512-i6rgnR/YgPEQzZZnbTHHuZdlE8qyoBNalD6F+q4vAFlcMEcqmkoG+mPqJYJCo63qPf74+Y1UZsl3l6f7/RIkmA== - dependencies: - "@babel/helper-validator-identifier" "^7.10.4" - chalk "^2.0.0" - js-tokens "^4.0.0" - -"@babel/node@^7.7.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/node/-/node-7.10.5.tgz#30866322aa2c0251a9bdd73d07a9167bd1f4ed64" - integrity sha512-suosS7zZ2roj+fYVCnDuVezUbRc0sdoyF0Gj/1FzWxD4ebbGiBGtL5qyqHH4NO34B5m4vWWYWgyNhSsrqS8vwA== - dependencies: - "@babel/register" "^7.10.5" - commander "^4.0.1" - core-js "^3.2.1" - lodash "^4.17.19" - node-environment-flags "^1.0.5" - regenerator-runtime "^0.13.4" - resolve "^1.13.1" - v8flags "^3.1.1" - -"@babel/parser@^7.1.0", "@babel/parser@^7.10.4", "@babel/parser@^7.11.5": - version "7.11.5" - resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.11.5.tgz#c7ff6303df71080ec7a4f5b8c003c58f1cf51037" - integrity sha512-X9rD8qqm695vgmeaQ4fvz/o3+Wk4ZzQvSHkDBgpYKxpD4qTAUm88ZKtHkVqIOsYFFbIQ6wQYhC6q7pjqVK0E0Q== - -"@babel/plugin-proposal-async-generator-functions@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-async-generator-functions/-/plugin-proposal-async-generator-functions-7.10.5.tgz#3491cabf2f7c179ab820606cec27fed15e0e8558" - integrity sha512-cNMCVezQbrRGvXJwm9fu/1sJj9bHdGAgKodZdLqOQIpfoH3raqmRPBM17+lh7CzhiKRRBrGtZL9WcjxSoGYUSg== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-remap-async-to-generator" "^7.10.4" - "@babel/plugin-syntax-async-generators" "^7.8.0" - -"@babel/plugin-proposal-class-properties@^7.10.4", "@babel/plugin-proposal-class-properties@^7.7.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-class-properties/-/plugin-proposal-class-properties-7.10.4.tgz#a33bf632da390a59c7a8c570045d1115cd778807" - integrity sha512-vhwkEROxzcHGNu2mzUC0OFFNXdZ4M23ib8aRRcJSsW8BZK9pQMD7QB7csl97NBbgGZO7ZyHUyKDnxzOaP4IrCg== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-proposal-dynamic-import@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-dynamic-import/-/plugin-proposal-dynamic-import-7.10.4.tgz#ba57a26cb98b37741e9d5bca1b8b0ddf8291f17e" - integrity sha512-up6oID1LeidOOASNXgv/CFbgBqTuKJ0cJjz6An5tWD+NVBNlp3VNSBxv2ZdU7SYl3NxJC7agAQDApZusV6uFwQ== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-dynamic-import" "^7.8.0" - -"@babel/plugin-proposal-export-namespace-from@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-export-namespace-from/-/plugin-proposal-export-namespace-from-7.10.4.tgz#570d883b91031637b3e2958eea3c438e62c05f54" - integrity sha512-aNdf0LY6/3WXkhh0Fdb6Zk9j1NMD8ovj3F6r0+3j837Pn1S1PdNtcwJ5EG9WkVPNHPxyJDaxMaAOVq4eki0qbg== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - -"@babel/plugin-proposal-json-strings@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-json-strings/-/plugin-proposal-json-strings-7.10.4.tgz#593e59c63528160233bd321b1aebe0820c2341db" - integrity sha512-fCL7QF0Jo83uy1K0P2YXrfX11tj3lkpN7l4dMv9Y9VkowkhkQDwFHFd8IiwyK5MZjE8UpbgokkgtcReH88Abaw== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-json-strings" "^7.8.0" - -"@babel/plugin-proposal-logical-assignment-operators@^7.11.0": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-logical-assignment-operators/-/plugin-proposal-logical-assignment-operators-7.11.0.tgz#9f80e482c03083c87125dee10026b58527ea20c8" - integrity sha512-/f8p4z+Auz0Uaf+i8Ekf1iM7wUNLcViFUGiPxKeXvxTSl63B875YPiVdUDdem7hREcI0E0kSpEhS8tF5RphK7Q== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - -"@babel/plugin-proposal-nullish-coalescing-operator@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-nullish-coalescing-operator/-/plugin-proposal-nullish-coalescing-operator-7.10.4.tgz#02a7e961fc32e6d5b2db0649e01bf80ddee7e04a" - integrity sha512-wq5n1M3ZUlHl9sqT2ok1T2/MTt6AXE0e1Lz4WzWBr95LsAZ5qDXe4KnFuauYyEyLiohvXFMdbsOTMyLZs91Zlw== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" - -"@babel/plugin-proposal-numeric-separator@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-numeric-separator/-/plugin-proposal-numeric-separator-7.10.4.tgz#ce1590ff0a65ad12970a609d78855e9a4c1aef06" - integrity sha512-73/G7QoRoeNkLZFxsoCCvlg4ezE4eM+57PnOqgaPOozd5myfj7p0muD1mRVJvbUWbOzD+q3No2bWbaKy+DJ8DA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - -"@babel/plugin-proposal-object-rest-spread@^7.11.0": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-object-rest-spread/-/plugin-proposal-object-rest-spread-7.11.0.tgz#bd81f95a1f746760ea43b6c2d3d62b11790ad0af" - integrity sha512-wzch41N4yztwoRw0ak+37wxwJM2oiIiy6huGCoqkvSTA9acYWcPfn9Y4aJqmFFJ70KTJUu29f3DQ43uJ9HXzEA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.0" - "@babel/plugin-transform-parameters" "^7.10.4" - -"@babel/plugin-proposal-optional-catch-binding@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-catch-binding/-/plugin-proposal-optional-catch-binding-7.10.4.tgz#31c938309d24a78a49d68fdabffaa863758554dd" - integrity sha512-LflT6nPh+GK2MnFiKDyLiqSqVHkQnVf7hdoAvyTnnKj9xB3docGRsdPuxp6qqqW19ifK3xgc9U5/FwrSaCNX5g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" - -"@babel/plugin-proposal-optional-chaining@^7.11.0": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-optional-chaining/-/plugin-proposal-optional-chaining-7.11.0.tgz#de5866d0646f6afdaab8a566382fe3a221755076" - integrity sha512-v9fZIu3Y8562RRwhm1BbMRxtqZNFmFA2EG+pT2diuU8PT3H6T/KXoZ54KgYisfOFZHV6PfvAiBIZ9Rcz+/JCxA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-skip-transparent-expression-wrappers" "^7.11.0" - "@babel/plugin-syntax-optional-chaining" "^7.8.0" - -"@babel/plugin-proposal-private-methods@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-private-methods/-/plugin-proposal-private-methods-7.10.4.tgz#b160d972b8fdba5c7d111a145fc8c421fc2a6909" - integrity sha512-wh5GJleuI8k3emgTg5KkJK6kHNsGEr0uBTDBuQUBJwckk9xs1ez79ioheEVVxMLyPscB0LfkbVHslQqIzWV6Bw== - dependencies: - "@babel/helper-create-class-features-plugin" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-proposal-unicode-property-regex@^7.10.4", "@babel/plugin-proposal-unicode-property-regex@^7.4.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-proposal-unicode-property-regex/-/plugin-proposal-unicode-property-regex-7.10.4.tgz#4483cda53041ce3413b7fe2f00022665ddfaa75d" - integrity sha512-H+3fOgPnEXFL9zGYtKQe4IDOPKYlZdF1kqFDQRRb8PK4B8af1vAGK04tF5iQAAsui+mHNBQSAtd2/ndEDe9wuA== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-async-generators@^7.8.0", "@babel/plugin-syntax-async-generators@^7.8.4": - version "7.8.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz#a983fb1aeb2ec3f6ed042a210f640e90e786fe0d" - integrity sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-bigint@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz#4c9a6f669f5d0cdf1b90a1671e9a146be5300cea" - integrity sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-class-properties@^7.10.4", "@babel/plugin-syntax-class-properties@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.10.4.tgz#6644e6a0baa55a61f9e3231f6c9eeb6ee46c124c" - integrity sha512-GCSBF7iUle6rNugfURwNmCGG3Z/2+opxAMLs1nND4bhEG5PuxTIggDBoeYYSujAlLtsupzOHYJQgPS3pivwXIA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-dynamic-import@^7.8.0": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz#62bf98b2da3cd21d626154fc96ee5b3cb68eacb3" - integrity sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-export-namespace-from@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz#028964a9ba80dbc094c915c487ad7c4e7a66465a" - integrity sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.3" - -"@babel/plugin-syntax-import-meta@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz#ee601348c370fa334d2207be158777496521fd51" - integrity sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-json-strings@^7.8.0", "@babel/plugin-syntax-json-strings@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz#01ca21b668cd8218c9e640cb6dd88c5412b2c96a" - integrity sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-logical-assignment-operators@^7.10.4", "@babel/plugin-syntax-logical-assignment-operators@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz#ca91ef46303530448b906652bac2e9fe9941f699" - integrity sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-nullish-coalescing-operator@^7.8.0", "@babel/plugin-syntax-nullish-coalescing-operator@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz#167ed70368886081f74b5c36c65a88c03b66d1a9" - integrity sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-numeric-separator@^7.10.4", "@babel/plugin-syntax-numeric-separator@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz#b9b070b3e33570cd9fd07ba7fa91c0dd37b9af97" - integrity sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-syntax-object-rest-spread@^7.8.0", "@babel/plugin-syntax-object-rest-spread@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz#60e225edcbd98a640332a2e72dd3e66f1af55871" - integrity sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-catch-binding@^7.8.0", "@babel/plugin-syntax-optional-catch-binding@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz#6111a265bcfb020eb9efd0fdfd7d26402b9ed6c1" - integrity sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-optional-chaining@^7.8.0", "@babel/plugin-syntax-optional-chaining@^7.8.3": - version "7.8.3" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz#4f69c2ab95167e0180cd5336613f8c5788f7d48a" - integrity sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg== - dependencies: - "@babel/helper-plugin-utils" "^7.8.0" - -"@babel/plugin-syntax-top-level-await@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.10.4.tgz#4bbeb8917b54fcf768364e0a81f560e33a3ef57d" - integrity sha512-ni1brg4lXEmWyafKr0ccFWkJG0CeMt4WV1oyeBW6EFObF4oOHclbkj5cARxAPQyAQ2UTuplJyK4nfkXIMMFvsQ== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-arrow-functions@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.10.4.tgz#e22960d77e697c74f41c501d44d73dbf8a6a64cd" - integrity sha512-9J/oD1jV0ZCBcgnoFWFq1vJd4msoKb/TCpGNFyyLt0zABdcvgK3aYikZ8HjzB14c26bc7E3Q1yugpwGy2aTPNA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-async-to-generator@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.10.4.tgz#41a5017e49eb6f3cda9392a51eef29405b245a37" - integrity sha512-F6nREOan7J5UXTLsDsZG3DXmZSVofr2tGNwfdrVwkDWHfQckbQXnXSPfD7iO+c/2HGqycwyLST3DnZ16n+cBJQ== - dependencies: - "@babel/helper-module-imports" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-remap-async-to-generator" "^7.10.4" - -"@babel/plugin-transform-block-scoped-functions@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.10.4.tgz#1afa595744f75e43a91af73b0d998ecfe4ebc2e8" - integrity sha512-WzXDarQXYYfjaV1szJvN3AD7rZgZzC1JtjJZ8dMHUyiK8mxPRahynp14zzNjU3VkPqPsO38CzxiWO1c9ARZ8JA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-block-scoping@^7.10.4": - version "7.11.1" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.11.1.tgz#5b7efe98852bef8d652c0b28144cd93a9e4b5215" - integrity sha512-00dYeDE0EVEHuuM+26+0w/SCL0BH2Qy7LwHuI4Hi4MH5gkC8/AqMN5uWFJIsoXZrAphiMm1iXzBw6L2T+eA0ew== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-classes@^7.10.4", "@babel/plugin-transform-classes@^7.9.5": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-classes/-/plugin-transform-classes-7.10.4.tgz#405136af2b3e218bc4a1926228bc917ab1a0adc7" - integrity sha512-2oZ9qLjt161dn1ZE0Ms66xBncQH4In8Sqw1YWgBUZuGVJJS5c0OFZXL6dP2MRHrkU/eKhWg8CzFJhRQl50rQxA== - dependencies: - "@babel/helper-annotate-as-pure" "^7.10.4" - "@babel/helper-define-map" "^7.10.4" - "@babel/helper-function-name" "^7.10.4" - "@babel/helper-optimise-call-expression" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-replace-supers" "^7.10.4" - "@babel/helper-split-export-declaration" "^7.10.4" - globals "^11.1.0" - -"@babel/plugin-transform-computed-properties@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.10.4.tgz#9ded83a816e82ded28d52d4b4ecbdd810cdfc0eb" - integrity sha512-JFwVDXcP/hM/TbyzGq3l/XWGut7p46Z3QvqFMXTfk6/09m7xZHJUN9xHfsv7vqqD4YnfI5ueYdSJtXqqBLyjBw== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-destructuring@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.10.4.tgz#70ddd2b3d1bea83d01509e9bb25ddb3a74fc85e5" - integrity sha512-+WmfvyfsyF603iPa6825mq6Qrb7uLjTOsa3XOFzlYcYDHSS4QmpOWOL0NNBY5qMbvrcf3tq0Cw+v4lxswOBpgA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-dotall-regex@^7.10.4", "@babel/plugin-transform-dotall-regex@^7.4.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.10.4.tgz#469c2062105c1eb6a040eaf4fac4b488078395ee" - integrity sha512-ZEAVvUTCMlMFAbASYSVQoxIbHm2OkG2MseW6bV2JjIygOjdVv8tuxrCTzj1+Rynh7ODb8GivUy7dzEXzEhuPaA== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-duplicate-keys@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.10.4.tgz#697e50c9fee14380fe843d1f306b295617431e47" - integrity sha512-GL0/fJnmgMclHiBTTWXNlYjYsA7rDrtsazHG6mglaGSTh0KsrW04qml+Bbz9FL0LcJIRwBWL5ZqlNHKTkU3xAA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-exponentiation-operator@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.10.4.tgz#5ae338c57f8cf4001bdb35607ae66b92d665af2e" - integrity sha512-S5HgLVgkBcRdyQAHbKj+7KyuWx8C6t5oETmUuwz1pt3WTWJhsUV0WIIXuVvfXMxl/QQyHKlSCNNtaIamG8fysw== - dependencies: - "@babel/helper-builder-binary-assignment-operator-visitor" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-for-of@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.10.4.tgz#c08892e8819d3a5db29031b115af511dbbfebae9" - integrity sha512-ItdQfAzu9AlEqmusA/65TqJ79eRcgGmpPPFvBnGILXZH975G0LNjP1yjHvGgfuCxqrPPueXOPe+FsvxmxKiHHQ== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-function-name@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.10.4.tgz#6a467880e0fc9638514ba369111811ddbe2644b7" - integrity sha512-OcDCq2y5+E0dVD5MagT5X+yTRbcvFjDI2ZVAottGH6tzqjx/LKpgkUepu3hp/u4tZBzxxpNGwLsAvGBvQ2mJzg== - dependencies: - "@babel/helper-function-name" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-instanceof@^7.8.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-instanceof/-/plugin-transform-instanceof-7.10.4.tgz#05dad934f26e887c0009f6685a6e14fdd962120b" - integrity sha512-wzKw5L8hP8rJg0lZiEPiIRk6qGmTUKLAJK8VgRHCSlmbsTILMHmWT1muGlPDxwCNgHwVT4E7ewWWFi1X3KE5TA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-literals@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-literals/-/plugin-transform-literals-7.10.4.tgz#9f42ba0841100a135f22712d0e391c462f571f3c" - integrity sha512-Xd/dFSTEVuUWnyZiMu76/InZxLTYilOSr1UlHV+p115Z/Le2Fi1KXkJUYz0b42DfndostYlPub3m8ZTQlMaiqQ== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-member-expression-literals@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.10.4.tgz#b1ec44fcf195afcb8db2c62cd8e551c881baf8b7" - integrity sha512-0bFOvPyAoTBhtcJLr9VcwZqKmSjFml1iVxvPL0ReomGU53CX53HsM4h2SzckNdkQcHox1bpAqzxBI1Y09LlBSw== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-modules-amd@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.10.5.tgz#1b9cddaf05d9e88b3aad339cb3e445c4f020a9b1" - integrity sha512-elm5uruNio7CTLFItVC/rIzKLfQ17+fX7EVz5W0TMgIHFo1zY0Ozzx+lgwhL4plzl8OzVn6Qasx5DeEFyoNiRw== - dependencies: - "@babel/helper-module-transforms" "^7.10.5" - "@babel/helper-plugin-utils" "^7.10.4" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-commonjs@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.10.4.tgz#66667c3eeda1ebf7896d41f1f16b17105a2fbca0" - integrity sha512-Xj7Uq5o80HDLlW64rVfDBhao6OX89HKUmb+9vWYaLXBZOma4gA6tw4Ni1O5qVDoZWUV0fxMYA0aYzOawz0l+1w== - dependencies: - "@babel/helper-module-transforms" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-simple-access" "^7.10.4" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-systemjs@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.10.5.tgz#6270099c854066681bae9e05f87e1b9cadbe8c85" - integrity sha512-f4RLO/OL14/FP1AEbcsWMzpbUz6tssRaeQg11RH1BP/XnPpRoVwgeYViMFacnkaw4k4wjRSjn3ip1Uw9TaXuMw== - dependencies: - "@babel/helper-hoist-variables" "^7.10.4" - "@babel/helper-module-transforms" "^7.10.5" - "@babel/helper-plugin-utils" "^7.10.4" - babel-plugin-dynamic-import-node "^2.3.3" - -"@babel/plugin-transform-modules-umd@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.10.4.tgz#9a8481fe81b824654b3a0b65da3df89f3d21839e" - integrity sha512-mohW5q3uAEt8T45YT7Qc5ws6mWgJAaL/8BfWD9Dodo1A3RKWli8wTS+WiQ/knF+tXlPirW/1/MqzzGfCExKECA== - dependencies: - "@babel/helper-module-transforms" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-named-capturing-groups-regex@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.10.4.tgz#78b4d978810b6f3bcf03f9e318f2fc0ed41aecb6" - integrity sha512-V6LuOnD31kTkxQPhKiVYzYC/Jgdq53irJC/xBSmqcNcqFGV+PER4l6rU5SH2Vl7bH9mLDHcc0+l9HUOe4RNGKA== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.10.4" - -"@babel/plugin-transform-new-target@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.10.4.tgz#9097d753cb7b024cb7381a3b2e52e9513a9c6888" - integrity sha512-YXwWUDAH/J6dlfwqlWsztI2Puz1NtUAubXhOPLQ5gjR/qmQ5U96DY4FQO8At33JN4XPBhrjB8I4eMmLROjjLjw== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-object-super@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.10.4.tgz#d7146c4d139433e7a6526f888c667e314a093894" - integrity sha512-5iTw0JkdRdJvr7sY0vHqTpnruUpTea32JHmq/atIWqsnNussbRzjEDyWep8UNztt1B5IusBYg8Irb0bLbiEBCQ== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-replace-supers" "^7.10.4" - -"@babel/plugin-transform-parameters@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.10.5.tgz#59d339d58d0b1950435f4043e74e2510005e2c4a" - integrity sha512-xPHwUj5RdFV8l1wuYiu5S9fqWGM2DrYc24TMvUiRrPVm+SM3XeqU9BcokQX/kEUe+p2RBwy+yoiR1w/Blq6ubw== - dependencies: - "@babel/helper-get-function-arity" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-property-literals@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.10.4.tgz#f6fe54b6590352298785b83edd815d214c42e3c0" - integrity sha512-ofsAcKiUxQ8TY4sScgsGeR2vJIsfrzqvFb9GvJ5UdXDzl+MyYCaBj/FGzXuv7qE0aJcjWMILny1epqelnFlz8g== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-regenerator@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.10.4.tgz#2015e59d839074e76838de2159db421966fd8b63" - integrity sha512-3thAHwtor39A7C04XucbMg17RcZ3Qppfxr22wYzZNcVIkPHfpM9J0SO8zuCV6SZa265kxBJSrfKTvDCYqBFXGw== - dependencies: - regenerator-transform "^0.14.2" - -"@babel/plugin-transform-reserved-words@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.10.4.tgz#8f2682bcdcef9ed327e1b0861585d7013f8a54dd" - integrity sha512-hGsw1O6Rew1fkFbDImZIEqA8GoidwTAilwCyWqLBM9f+e/u/sQMQu7uX6dyokfOayRuuVfKOW4O7HvaBWM+JlQ== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-runtime@^7.7.6": - version "7.11.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-runtime/-/plugin-transform-runtime-7.11.5.tgz#f108bc8e0cf33c37da031c097d1df470b3a293fc" - integrity sha512-9aIoee+EhjySZ6vY5hnLjigHzunBlscx9ANKutkeWTJTx6m5Rbq6Ic01tLvO54lSusR+BxV7u4UDdCmXv5aagg== - dependencies: - "@babel/helper-module-imports" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - resolve "^1.8.1" - semver "^5.5.1" - -"@babel/plugin-transform-shorthand-properties@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.10.4.tgz#9fd25ec5cdd555bb7f473e5e6ee1c971eede4dd6" - integrity sha512-AC2K/t7o07KeTIxMoHneyX90v3zkm5cjHJEokrPEAGEy3UCp8sLKfnfOIGdZ194fyN4wfX/zZUWT9trJZ0qc+Q== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-spread@^7.11.0": - version "7.11.0" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-spread/-/plugin-transform-spread-7.11.0.tgz#fa84d300f5e4f57752fe41a6d1b3c554f13f17cc" - integrity sha512-UwQYGOqIdQJe4aWNyS7noqAnN2VbaczPLiEtln+zPowRNlD+79w3oi2TWfYe0eZgd+gjZCbsydN7lzWysDt+gw== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-skip-transparent-expression-wrappers" "^7.11.0" - -"@babel/plugin-transform-sticky-regex@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.10.4.tgz#8f3889ee8657581130a29d9cc91d7c73b7c4a28d" - integrity sha512-Ddy3QZfIbEV0VYcVtFDCjeE4xwVTJWTmUtorAJkn6u/92Z/nWJNV+mILyqHKrUxXYKA2EoCilgoPePymKL4DvQ== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/helper-regex" "^7.10.4" - -"@babel/plugin-transform-template-literals@^7.10.4": - version "7.10.5" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.10.5.tgz#78bc5d626a6642db3312d9d0f001f5e7639fde8c" - integrity sha512-V/lnPGIb+KT12OQikDvgSuesRX14ck5FfJXt6+tXhdkJ+Vsd0lDCVtF6jcB4rNClYFzaB2jusZ+lNISDk2mMMw== - dependencies: - "@babel/helper-annotate-as-pure" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-typeof-symbol@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.10.4.tgz#9509f1a7eec31c4edbffe137c16cc33ff0bc5bfc" - integrity sha512-QqNgYwuuW0y0H+kUE/GWSR45t/ccRhe14Fs/4ZRouNNQsyd4o3PG4OtHiIrepbM2WKUBDAXKCAK/Lk4VhzTaGA== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-unicode-escapes@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.10.4.tgz#feae523391c7651ddac115dae0a9d06857892007" - integrity sha512-y5XJ9waMti2J+e7ij20e+aH+fho7Wb7W8rNuu72aKRwCHFqQdhkdU2lo3uZ9tQuboEJcUFayXdARhcxLQ3+6Fg== - dependencies: - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/plugin-transform-unicode-regex@^7.10.4": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.10.4.tgz#e56d71f9282fac6db09c82742055576d5e6d80a8" - integrity sha512-wNfsc4s8N2qnIwpO/WP2ZiSyjfpTamT2C9V9FDH/Ljub9zw6P3SjkXcFmc0RQUt96k2fmIvtla2MMjgTwIAC+A== - dependencies: - "@babel/helper-create-regexp-features-plugin" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - -"@babel/preset-env@^7.7.5": - version "7.11.5" - resolved "https://registry.yarnpkg.com/@babel/preset-env/-/preset-env-7.11.5.tgz#18cb4b9379e3e92ffea92c07471a99a2914e4272" - integrity sha512-kXqmW1jVcnB2cdueV+fyBM8estd5mlNfaQi6lwLgRwCby4edpavgbFhiBNjmWA3JpB/yZGSISa7Srf+TwxDQoA== - dependencies: - "@babel/compat-data" "^7.11.0" - "@babel/helper-compilation-targets" "^7.10.4" - "@babel/helper-module-imports" "^7.10.4" - "@babel/helper-plugin-utils" "^7.10.4" - "@babel/plugin-proposal-async-generator-functions" "^7.10.4" - "@babel/plugin-proposal-class-properties" "^7.10.4" - "@babel/plugin-proposal-dynamic-import" "^7.10.4" - "@babel/plugin-proposal-export-namespace-from" "^7.10.4" - "@babel/plugin-proposal-json-strings" "^7.10.4" - "@babel/plugin-proposal-logical-assignment-operators" "^7.11.0" - "@babel/plugin-proposal-nullish-coalescing-operator" "^7.10.4" - "@babel/plugin-proposal-numeric-separator" "^7.10.4" - "@babel/plugin-proposal-object-rest-spread" "^7.11.0" - "@babel/plugin-proposal-optional-catch-binding" "^7.10.4" - "@babel/plugin-proposal-optional-chaining" "^7.11.0" - "@babel/plugin-proposal-private-methods" "^7.10.4" - "@babel/plugin-proposal-unicode-property-regex" "^7.10.4" - "@babel/plugin-syntax-async-generators" "^7.8.0" - "@babel/plugin-syntax-class-properties" "^7.10.4" - "@babel/plugin-syntax-dynamic-import" "^7.8.0" - "@babel/plugin-syntax-export-namespace-from" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.0" - "@babel/plugin-syntax-logical-assignment-operators" "^7.10.4" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.0" - "@babel/plugin-syntax-numeric-separator" "^7.10.4" - "@babel/plugin-syntax-object-rest-spread" "^7.8.0" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.0" - "@babel/plugin-syntax-optional-chaining" "^7.8.0" - "@babel/plugin-syntax-top-level-await" "^7.10.4" - "@babel/plugin-transform-arrow-functions" "^7.10.4" - "@babel/plugin-transform-async-to-generator" "^7.10.4" - "@babel/plugin-transform-block-scoped-functions" "^7.10.4" - "@babel/plugin-transform-block-scoping" "^7.10.4" - "@babel/plugin-transform-classes" "^7.10.4" - "@babel/plugin-transform-computed-properties" "^7.10.4" - "@babel/plugin-transform-destructuring" "^7.10.4" - "@babel/plugin-transform-dotall-regex" "^7.10.4" - "@babel/plugin-transform-duplicate-keys" "^7.10.4" - "@babel/plugin-transform-exponentiation-operator" "^7.10.4" - "@babel/plugin-transform-for-of" "^7.10.4" - "@babel/plugin-transform-function-name" "^7.10.4" - "@babel/plugin-transform-literals" "^7.10.4" - "@babel/plugin-transform-member-expression-literals" "^7.10.4" - "@babel/plugin-transform-modules-amd" "^7.10.4" - "@babel/plugin-transform-modules-commonjs" "^7.10.4" - "@babel/plugin-transform-modules-systemjs" "^7.10.4" - "@babel/plugin-transform-modules-umd" "^7.10.4" - "@babel/plugin-transform-named-capturing-groups-regex" "^7.10.4" - "@babel/plugin-transform-new-target" "^7.10.4" - "@babel/plugin-transform-object-super" "^7.10.4" - "@babel/plugin-transform-parameters" "^7.10.4" - "@babel/plugin-transform-property-literals" "^7.10.4" - "@babel/plugin-transform-regenerator" "^7.10.4" - "@babel/plugin-transform-reserved-words" "^7.10.4" - "@babel/plugin-transform-shorthand-properties" "^7.10.4" - "@babel/plugin-transform-spread" "^7.11.0" - "@babel/plugin-transform-sticky-regex" "^7.10.4" - "@babel/plugin-transform-template-literals" "^7.10.4" - "@babel/plugin-transform-typeof-symbol" "^7.10.4" - "@babel/plugin-transform-unicode-escapes" "^7.10.4" - "@babel/plugin-transform-unicode-regex" "^7.10.4" - "@babel/preset-modules" "^0.1.3" - "@babel/types" "^7.11.5" - browserslist "^4.12.0" - core-js-compat "^3.6.2" - invariant "^2.2.2" - levenary "^1.1.1" - semver "^5.5.0" - -"@babel/preset-modules@^0.1.3": - version "0.1.4" - resolved "https://registry.yarnpkg.com/@babel/preset-modules/-/preset-modules-0.1.4.tgz#362f2b68c662842970fdb5e254ffc8fc1c2e415e" - integrity sha512-J36NhwnfdzpmH41M1DrnkkgAqhZaqr/NBdPfQ677mLzlaXo+oDiv1deyCDtgAhz8p328otdob0Du7+xgHGZbKg== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@babel/plugin-proposal-unicode-property-regex" "^7.4.4" - "@babel/plugin-transform-dotall-regex" "^7.4.4" - "@babel/types" "^7.4.4" - esutils "^2.0.2" - -"@babel/register@^7.10.5", "@babel/register@^7.7.4": - version "7.11.5" - resolved "https://registry.yarnpkg.com/@babel/register/-/register-7.11.5.tgz#79becf89e0ddd0fba8b92bc279bc0f5d2d7ce2ea" - integrity sha512-CAml0ioKX+kOAvBQDHa/+t1fgOt3qkTIz0TrRtRAT6XY0m5qYZXR85k6/sLCNPMGhYDlCFHCYuU0ybTJbvlC6w== - dependencies: - find-cache-dir "^2.0.0" - lodash "^4.17.19" - make-dir "^2.1.0" - pirates "^4.0.0" - source-map-support "^0.5.16" - -"@babel/runtime@^7.8.4", "@babel/runtime@^7.9.6": - version "7.11.2" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.11.2.tgz#f549c13c754cc40b87644b9fa9f09a6a95fe0736" - integrity sha512-TeWkU52so0mPtDcaCTxNBI/IHiz0pZgr8VEFqXFtZWpYD08ZB6FaSwVAS8MKRQAP3bYKiVjwysOJgMFY28o6Tw== - dependencies: - regenerator-runtime "^0.13.4" - -"@babel/template@^7.10.4", "@babel/template@^7.3.3": - version "7.10.4" - resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.10.4.tgz#3251996c4200ebc71d1a8fc405fba940f36ba278" - integrity sha512-ZCjD27cGJFUB6nmCB1Enki3r+L5kJveX9pq1SvAUKoICy6CZ9yD8xO086YXdYhvNjBdnekm4ZnaP5yC8Cs/1tA== - dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/parser" "^7.10.4" - "@babel/types" "^7.10.4" - -"@babel/traverse@^7.1.0", "@babel/traverse@^7.10.4", "@babel/traverse@^7.11.5": - version "7.11.5" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.11.5.tgz#be777b93b518eb6d76ee2e1ea1d143daa11e61c3" - integrity sha512-EjiPXt+r7LiCZXEfRpSJd+jUMnBd4/9OUv7Nx3+0u9+eimMwJmG0Q98lw4/289JCoxSE8OolDMNZaaF/JZ69WQ== - dependencies: - "@babel/code-frame" "^7.10.4" - "@babel/generator" "^7.11.5" - "@babel/helper-function-name" "^7.10.4" - "@babel/helper-split-export-declaration" "^7.11.0" - "@babel/parser" "^7.11.5" - "@babel/types" "^7.11.5" - debug "^4.1.0" - globals "^11.1.0" - lodash "^4.17.19" - -"@babel/types@^7.0.0", "@babel/types@^7.10.4", "@babel/types@^7.10.5", "@babel/types@^7.11.0", "@babel/types@^7.11.5", "@babel/types@^7.3.0", "@babel/types@^7.3.3", "@babel/types@^7.4.4": - version "7.11.5" - resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.11.5.tgz#d9de577d01252d77c6800cee039ee64faf75662d" - integrity sha512-bvM7Qz6eKnJVFIn+1LPtjlBFPVN5jNDc1XmN15vWe7Q3DPBufWWsLiIvUu7xW87uTG6QoggpIDnUgLQvPheU+Q== - dependencies: - "@babel/helper-validator-identifier" "^7.10.4" - lodash "^4.17.19" - to-fast-properties "^2.0.0" - -"@bcoe/v8-coverage@^0.2.3": - version "0.2.3" - resolved "https://registry.yarnpkg.com/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz#75a2e8b51cb758a7553d6804a5932d7aace75c39" - integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== - -"@cnakazawa/watch@^1.0.3": - version "1.0.4" - resolved "https://registry.yarnpkg.com/@cnakazawa/watch/-/watch-1.0.4.tgz#f864ae85004d0fcab6f50be9141c4da368d1656a" - integrity sha512-v9kIhKwjeZThiWrLmj0y17CWoyddASLj9O2yvbZkbvw/N3rWOYy9zkV66ursAoVr0mV15bL8g0c4QZUE6cdDoQ== - dependencies: - exec-sh "^0.3.2" - minimist "^1.2.0" - -"@istanbuljs/load-nyc-config@^1.0.0": - version "1.1.0" - resolved "https://registry.yarnpkg.com/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz#fd3db1d59ecf7cf121e80650bb86712f9b55eced" - integrity sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ== - dependencies: - camelcase "^5.3.1" - find-up "^4.1.0" - get-package-type "^0.1.0" - js-yaml "^3.13.1" - resolve-from "^5.0.0" - -"@istanbuljs/schema@^0.1.2": - version "0.1.2" - resolved "https://registry.yarnpkg.com/@istanbuljs/schema/-/schema-0.1.2.tgz#26520bf09abe4a5644cd5414e37125a8954241dd" - integrity sha512-tsAQNx32a8CoFhjhijUIhI4kccIAgmGhy8LZMZgGfmXcpMbPRUqn5LWmgRttILi6yeGmBJd2xsPkFMs0PzgPCw== - -"@jest/console@^25.5.0": - version "25.5.0" - resolved "https://registry.yarnpkg.com/@jest/console/-/console-25.5.0.tgz#770800799d510f37329c508a9edd0b7b447d9abb" - integrity sha512-T48kZa6MK1Y6k4b89sexwmSF4YLeZS/Udqg3Jj3jG/cHH+N/sLFCEoXEDMOKugJQ9FxPN1osxIknvKkxt6MKyw== - dependencies: - "@jest/types" "^25.5.0" - chalk "^3.0.0" - jest-message-util "^25.5.0" - jest-util "^25.5.0" - slash "^3.0.0" - -"@jest/core@^25.5.4": - version "25.5.4" - resolved "https://registry.yarnpkg.com/@jest/core/-/core-25.5.4.tgz#3ef7412f7339210f003cdf36646bbca786efe7b4" - integrity sha512-3uSo7laYxF00Dg/DMgbn4xMJKmDdWvZnf89n8Xj/5/AeQ2dOQmn6b6Hkj/MleyzZWXpwv+WSdYWl4cLsy2JsoA== - dependencies: - "@jest/console" "^25.5.0" - "@jest/reporters" "^25.5.1" - "@jest/test-result" "^25.5.0" - "@jest/transform" "^25.5.1" - "@jest/types" "^25.5.0" - ansi-escapes "^4.2.1" - chalk "^3.0.0" - exit "^0.1.2" - graceful-fs "^4.2.4" - jest-changed-files "^25.5.0" - jest-config "^25.5.4" - jest-haste-map "^25.5.1" - jest-message-util "^25.5.0" - jest-regex-util "^25.2.6" - jest-resolve "^25.5.1" - jest-resolve-dependencies "^25.5.4" - jest-runner "^25.5.4" - jest-runtime "^25.5.4" - jest-snapshot "^25.5.1" - jest-util "^25.5.0" - jest-validate "^25.5.0" - jest-watcher "^25.5.0" - micromatch "^4.0.2" - p-each-series "^2.1.0" - realpath-native "^2.0.0" - rimraf "^3.0.0" - slash "^3.0.0" - strip-ansi "^6.0.0" - -"@jest/environment@^25.5.0": - version "25.5.0" - resolved "https://registry.yarnpkg.com/@jest/environment/-/environment-25.5.0.tgz#aa33b0c21a716c65686638e7ef816c0e3a0c7b37" - integrity sha512-U2VXPEqL07E/V7pSZMSQCvV5Ea4lqOlT+0ZFijl/i316cRMHvZ4qC+jBdryd+lmRetjQo0YIQr6cVPNxxK87mA== - dependencies: - "@jest/fake-timers" "^25.5.0" - "@jest/types" "^25.5.0" - jest-mock "^25.5.0" - -"@jest/fake-timers@^25.5.0": - version "25.5.0" - resolved "https://registry.yarnpkg.com/@jest/fake-timers/-/fake-timers-25.5.0.tgz#46352e00533c024c90c2bc2ad9f2959f7f114185" - integrity sha512-9y2+uGnESw/oyOI3eww9yaxdZyHq7XvprfP/eeoCsjqKYts2yRlsHS/SgjPDV8FyMfn2nbMy8YzUk6nyvdLOpQ== - dependencies: - "@jest/types" "^25.5.0" - jest-message-util "^25.5.0" - jest-mock "^25.5.0" - jest-util "^25.5.0" - lolex "^5.0.0" - -"@jest/globals@^25.5.2": - version "25.5.2" - resolved "https://registry.yarnpkg.com/@jest/globals/-/globals-25.5.2.tgz#5e45e9de8d228716af3257eeb3991cc2e162ca88" - integrity sha512-AgAS/Ny7Q2RCIj5kZ+0MuKM1wbF0WMLxbCVl/GOMoCNbODRdJ541IxJ98xnZdVSZXivKpJlNPIWa3QmY0l4CXA== - dependencies: - "@jest/environment" "^25.5.0" - "@jest/types" "^25.5.0" - expect "^25.5.0" - -"@jest/reporters@^25.5.1": - version "25.5.1" - resolved "https://registry.yarnpkg.com/@jest/reporters/-/reporters-25.5.1.tgz#cb686bcc680f664c2dbaf7ed873e93aa6811538b" - integrity sha512-3jbd8pPDTuhYJ7vqiHXbSwTJQNavczPs+f1kRprRDxETeE3u6srJ+f0NPuwvOmk+lmunZzPkYWIFZDLHQPkviw== - dependencies: - "@bcoe/v8-coverage" "^0.2.3" - "@jest/console" "^25.5.0" - "@jest/test-result" "^25.5.0" - "@jest/transform" "^25.5.1" - "@jest/types" "^25.5.0" - chalk "^3.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.2" - graceful-fs "^4.2.4" - istanbul-lib-coverage "^3.0.0" - istanbul-lib-instrument "^4.0.0" - istanbul-lib-report "^3.0.0" - istanbul-lib-source-maps "^4.0.0" - istanbul-reports "^3.0.2" - jest-haste-map "^25.5.1" - jest-resolve "^25.5.1" - jest-util "^25.5.0" - jest-worker "^25.5.0" - slash "^3.0.0" - source-map "^0.6.0" - string-length "^3.1.0" - terminal-link "^2.0.0" - v8-to-istanbul "^4.1.3" - optionalDependencies: - node-notifier "^6.0.0" - -"@jest/source-map@^25.5.0": - version "25.5.0" - resolved "https://registry.yarnpkg.com/@jest/source-map/-/source-map-25.5.0.tgz#df5c20d6050aa292c2c6d3f0d2c7606af315bd1b" - integrity sha512-eIGx0xN12yVpMcPaVpjXPnn3N30QGJCJQSkEDUt9x1fI1Gdvb07Ml6K5iN2hG7NmMP6FDmtPEssE3z6doOYUwQ== - dependencies: - callsites "^3.0.0" - graceful-fs "^4.2.4" - source-map "^0.6.0" - -"@jest/test-result@^25.5.0": - version "25.5.0" - resolved "https://registry.yarnpkg.com/@jest/test-result/-/test-result-25.5.0.tgz#139a043230cdeffe9ba2d8341b27f2efc77ce87c" - integrity sha512-oV+hPJgXN7IQf/fHWkcS99y0smKLU2czLBJ9WA0jHITLst58HpQMtzSYxzaBvYc6U5U6jfoMthqsUlUlbRXs0A== - dependencies: - "@jest/console" "^25.5.0" - "@jest/types" "^25.5.0" - "@types/istanbul-lib-coverage" "^2.0.0" - collect-v8-coverage "^1.0.0" - -"@jest/test-sequencer@^25.5.4": - version "25.5.4" - resolved "https://registry.yarnpkg.com/@jest/test-sequencer/-/test-sequencer-25.5.4.tgz#9b4e685b36954c38d0f052e596d28161bdc8b737" - integrity sha512-pTJGEkSeg1EkCO2YWq6hbFvKNXk8ejqlxiOg1jBNLnWrgXOkdY6UmqZpwGFXNnRt9B8nO1uWMzLLZ4eCmhkPNA== - dependencies: - "@jest/test-result" "^25.5.0" - graceful-fs "^4.2.4" - jest-haste-map "^25.5.1" - jest-runner "^25.5.4" - jest-runtime "^25.5.4" - -"@jest/transform@^25.5.1": - version "25.5.1" - resolved "https://registry.yarnpkg.com/@jest/transform/-/transform-25.5.1.tgz#0469ddc17699dd2bf985db55fa0fb9309f5c2db3" - integrity sha512-Y8CEoVwXb4QwA6Y/9uDkn0Xfz0finGkieuV0xkdF9UtZGJeLukD5nLkaVrVsODB1ojRWlaoD0AJZpVHCSnJEvg== - dependencies: - "@babel/core" "^7.1.0" - "@jest/types" "^25.5.0" - babel-plugin-istanbul "^6.0.0" - chalk "^3.0.0" - convert-source-map "^1.4.0" - fast-json-stable-stringify "^2.0.0" - graceful-fs "^4.2.4" - jest-haste-map "^25.5.1" - jest-regex-util "^25.2.6" - jest-util "^25.5.0" - micromatch "^4.0.2" - pirates "^4.0.1" - realpath-native "^2.0.0" - slash "^3.0.0" - source-map "^0.6.1" - write-file-atomic "^3.0.0" - -"@jest/types@^25.5.0": - version "25.5.0" - resolved "https://registry.yarnpkg.com/@jest/types/-/types-25.5.0.tgz#4d6a4793f7b9599fc3680877b856a97dbccf2a9d" - integrity sha512-OXD0RgQ86Tu3MazKo8bnrkDRaDXXMGUqd+kTtLtK1Zb7CRzQcaSRPPPV37SvYTdevXEBVxe0HXylEjs8ibkmCw== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.0" - "@types/istanbul-reports" "^1.1.1" - "@types/yargs" "^15.0.0" - chalk "^3.0.0" - -"@sinonjs/commons@^1.7.0": - version "1.8.1" - resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.1.tgz#e7df00f98a203324f6dc7cc606cad9d4a8ab2217" - integrity sha512-892K+kWUUi3cl+LlqEWIDrhvLgdL79tECi8JZUyq6IviKy/DNhuzCRlbHUjxK89f4ypPMMaFnFuR9Ie6DoIMsw== - dependencies: - type-detect "4.0.8" - -"@types/babel__core@^7.1.7": - version "7.1.9" - resolved "https://registry.yarnpkg.com/@types/babel__core/-/babel__core-7.1.9.tgz#77e59d438522a6fb898fa43dc3455c6e72f3963d" - integrity sha512-sY2RsIJ5rpER1u3/aQ8OFSI7qGIy8o1NEEbgb2UaJcvOtXOMpd39ko723NBpjQFg9SIX7TXtjejZVGeIMLhoOw== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - "@types/babel__generator" "*" - "@types/babel__template" "*" - "@types/babel__traverse" "*" - -"@types/babel__generator@*": - version "7.6.1" - resolved "https://registry.yarnpkg.com/@types/babel__generator/-/babel__generator-7.6.1.tgz#4901767b397e8711aeb99df8d396d7ba7b7f0e04" - integrity sha512-bBKm+2VPJcMRVwNhxKu8W+5/zT7pwNEqeokFOmbvVSqGzFneNxYcEBro9Ac7/N9tlsaPYnZLK8J1LWKkMsLAew== - dependencies: - "@babel/types" "^7.0.0" - -"@types/babel__template@*": - version "7.0.2" - resolved "https://registry.yarnpkg.com/@types/babel__template/-/babel__template-7.0.2.tgz#4ff63d6b52eddac1de7b975a5223ed32ecea9307" - integrity sha512-/K6zCpeW7Imzgab2bLkLEbz0+1JlFSrUMdw7KoIIu+IUdu51GWaBZpd3y1VXGVXzynvGa4DaIaxNZHiON3GXUg== - dependencies: - "@babel/parser" "^7.1.0" - "@babel/types" "^7.0.0" - -"@types/babel__traverse@*", "@types/babel__traverse@^7.0.6": - version "7.0.13" - resolved "https://registry.yarnpkg.com/@types/babel__traverse/-/babel__traverse-7.0.13.tgz#1874914be974a492e1b4cb00585cabb274e8ba18" - integrity sha512-i+zS7t6/s9cdQvbqKDARrcbrPvtJGlbYsMkazo03nTAK3RX9FNrLllXys22uiTGJapPOTZTQ35nHh4ISph4SLQ== - dependencies: - "@babel/types" "^7.3.0" - -"@types/color-name@^1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@types/color-name/-/color-name-1.1.1.tgz#1c1261bbeaa10a8055bbc5d8ab84b7b2afc846a0" - integrity sha512-rr+OQyAjxze7GgWrSaJwydHStIhHq2lvY3BOC2Mj7KnzI7XK0Uw1TOOdI9lDoajEbSWLiYgoo4f1R51erQfhPQ== - -"@types/graceful-fs@^4.1.2": - version "4.1.3" - resolved "https://registry.yarnpkg.com/@types/graceful-fs/-/graceful-fs-4.1.3.tgz#039af35fe26bec35003e8d86d2ee9c586354348f" - integrity sha512-AiHRaEB50LQg0pZmm659vNBb9f4SJ0qrAnteuzhSeAUcJKxoYgEnprg/83kppCnc2zvtCKbdZry1a5pVY3lOTQ== - dependencies: - "@types/node" "*" - -"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0", "@types/istanbul-lib-coverage@^2.0.1": - version "2.0.3" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.3.tgz#4ba8ddb720221f432e443bd5f9117fd22cfd4762" - integrity sha512-sz7iLqvVUg1gIedBOvlkxPlc8/uVzyS5OwGz1cKjXzkl3FpL3al0crU8YGU1WoHkxn0Wxbw5tyi6hvzJKNzFsw== - -"@types/istanbul-lib-report@*": - version "3.0.0" - resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#c14c24f18ea8190c118ee7562b7ff99a36552686" - integrity sha512-plGgXAPfVKFoYfa9NpYDAkseG+g6Jr294RqeqcqDixSbU34MZVJRi/P+7Y8GDpzkEwLaGZZOpKIEmeVZNtKsrg== - dependencies: - "@types/istanbul-lib-coverage" "*" - -"@types/istanbul-reports@^1.1.1": - version "1.1.2" - resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-1.1.2.tgz#e875cc689e47bce549ec81f3df5e6f6f11cfaeb2" - integrity sha512-P/W9yOX/3oPZSpaYOCQzGqgCQRXn0FFO/V8bWrCQs+wLmvVVxk6CRBXALEvNs9OHIatlnlFokfhuDo2ug01ciw== - dependencies: - "@types/istanbul-lib-coverage" "*" - "@types/istanbul-lib-report" "*" - -"@types/node@*": - version "14.6.4" - resolved "https://registry.yarnpkg.com/@types/node/-/node-14.6.4.tgz#a145cc0bb14ef9c4777361b7bbafa5cf8e3acb5a" - integrity sha512-Wk7nG1JSaMfMpoMJDKUsWYugliB2Vy55pdjLpmLixeyMi7HizW2I/9QoxsPCkXl3dO+ZOVqPumKaDUv5zJu2uQ== - -"@types/normalize-package-data@^2.4.0": - version "2.4.0" - resolved "https://registry.yarnpkg.com/@types/normalize-package-data/-/normalize-package-data-2.4.0.tgz#e486d0d97396d79beedd0a6e33f4534ff6b4973e" - integrity sha512-f5j5b/Gf71L+dbqxIpQ4Z2WlmI/mPJ0fOkGGmFgtb6sAu97EPczzbS3/tJKxmcYDj55OX6ssqwDAWOHIYDRDGA== - -"@types/prettier@^1.19.0": - version "1.19.1" - resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-1.19.1.tgz#33509849f8e679e4add158959fdb086440e9553f" - integrity sha512-5qOlnZscTn4xxM5MeGXAMOsIOIKIbh9e85zJWfBRVPlRMEVawzoPhINYbRGkBZCI8LxvBe7tJCdWiarA99OZfQ== - -"@types/stack-utils@^1.0.1": - version "1.0.1" - resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-1.0.1.tgz#0a851d3bd96498fa25c33ab7278ed3bd65f06c3e" - integrity sha512-l42BggppR6zLmpfU6fq9HEa2oGPEI8yrSPL3GITjfRInppYFahObbIQOQK3UGxEnyQpltZLaPe75046NOZQikw== - -"@types/yargs-parser@*": - version "15.0.0" - resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-15.0.0.tgz#cb3f9f741869e20cce330ffbeb9271590483882d" - integrity sha512-FA/BWv8t8ZWJ+gEOnLLd8ygxH/2UFbAvgEonyfN6yWGLKc7zVjbpl2Y4CTjid9h2RfgPP6SEt6uHwEOply00yw== - -"@types/yargs@^15.0.0": - version "15.0.5" - resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-15.0.5.tgz#947e9a6561483bdee9adffc983e91a6902af8b79" - integrity sha512-Dk/IDOPtOgubt/IaevIUbTgV7doaKkoorvOyYM2CMwuDyP89bekI7H4xLIwunNYiK9jhCkmc6pUrJk3cj2AB9w== - dependencies: - "@types/yargs-parser" "*" - -abab@^2.0.0: - version "2.0.4" - resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.4.tgz#6dfa57b417ca06d21b2478f0e638302f99c2405c" - integrity sha512-Eu9ELJWCz/c1e9gTiCY+FceWxcqzjYEbqMgtndnuSqZSUCOL73TWNK2mHfIj4Cw2E/ongOp+JISVNCmovt2KYQ== - -accepts@~1.3.7: - version "1.3.7" - resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd" - integrity sha512-Il80Qs2WjYlJIBNzNkK6KYqlVMTbZLXgHx2oT0pU/fjRHyEp+PEfEPY0R3WCwAGVOtauxh1hOxNgIf5bv7dQpA== - dependencies: - mime-types "~2.1.24" - negotiator "0.6.2" - -acorn-globals@^4.3.2: - version "4.3.4" - resolved "https://registry.yarnpkg.com/acorn-globals/-/acorn-globals-4.3.4.tgz#9fa1926addc11c97308c4e66d7add0d40c3272e7" - integrity sha512-clfQEh21R+D0leSbUdWf3OcfqyaCSAQ8Ryq00bofSekfr9W8u1jyYZo6ir0xu9Gtcf7BjcHJpnbZH7JOCpP60A== - dependencies: - acorn "^6.0.1" - acorn-walk "^6.0.1" - -acorn-jsx@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/acorn-jsx/-/acorn-jsx-5.2.0.tgz#4c66069173d6fdd68ed85239fc256226182b2ebe" - integrity sha512-HiUX/+K2YpkpJ+SzBffkM/AQ2YE03S0U1kjTLVpoJdhZMOWy8qvXVN9JdLqv2QsaQ6MPYQIuNmwD8zOiYUofLQ== - -acorn-walk@^6.0.1: - version "6.2.0" - resolved "https://registry.yarnpkg.com/acorn-walk/-/acorn-walk-6.2.0.tgz#123cb8f3b84c2171f1f7fb252615b1c78a6b1a8c" - integrity sha512-7evsyfH1cLOCdAzZAd43Cic04yKydNx0cF+7tiA19p1XnLLPU4dpCQOqpjqwokFe//vS0QqfqqjCS2JkiIs0cA== - -acorn@^6.0.1: - version "6.4.1" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-6.4.1.tgz#531e58ba3f51b9dacb9a6646ca4debf5b14ca474" - integrity sha512-ZVA9k326Nwrj3Cj9jlh3wGFutC2ZornPNARZwsNYqQYgN0EsV2d53w5RN/co65Ohn4sUAUtb1rSUAOD6XN9idA== - -acorn@^7.1.0, acorn@^7.1.1: - version "7.4.0" - resolved "https://registry.yarnpkg.com/acorn/-/acorn-7.4.0.tgz#e1ad486e6c54501634c6c397c5c121daa383607c" - integrity sha512-+G7P8jJmCHr+S+cLfQxygbWhXy+8YTVGzAkpEbcLo2mLoL7tij/VG41QSHACSf5QgYRhMZYHuNc6drJaO0Da+w== - -ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.3: - version "6.12.4" - resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.4.tgz#0614facc4522127fa713445c6bfd3ebd376e2234" - integrity sha512-eienB2c9qVQs2KWexhkrdMLVDoIQCz5KSeLxwg9Lzk4DOfBtIK9PQwwufcsn1jjGuf9WZmqPMbGxOzfcuphJCQ== - dependencies: - fast-deep-equal "^3.1.1" - fast-json-stable-stringify "^2.0.0" - json-schema-traverse "^0.4.1" - uri-js "^4.2.2" - -ansi-escapes@^4.2.1: - version "4.3.1" - resolved "https://registry.yarnpkg.com/ansi-escapes/-/ansi-escapes-4.3.1.tgz#a5c47cc43181f1f38ffd7076837700d395522a61" - integrity sha512-JWF7ocqNrp8u9oqpgV+wH5ftbt+cfvv+PTjOvKLT3AdYly/LmORARfEVT1iyjwN+4MqE5UmVKoAdIBqeoCHgLA== - dependencies: - type-fest "^0.11.0" - -ansi-regex@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-4.1.0.tgz#8b9f8f08cf1acb843756a839ca8c7e3168c51997" - integrity sha512-1apePfXM1UOSqw0o9IiFAovVz9M5S1Dg+4TrDwfMewQ6p/rmMueb7tWZjQ1rx4Loy1ArBggoqGpfqqdI4rondg== - -ansi-regex@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/ansi-regex/-/ansi-regex-5.0.0.tgz#388539f55179bf39339c81af30a654d69f87cb75" - integrity sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg== - -ansi-styles@^3.2.0, ansi-styles@^3.2.1: - version "3.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-3.2.1.tgz#41fbb20243e50b12be0f04b8dedbf07520ce841d" - integrity sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA== - dependencies: - color-convert "^1.9.0" - -ansi-styles@^4.0.0, ansi-styles@^4.1.0: - version "4.2.1" - resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.2.1.tgz#90ae75c424d008d2624c5bf29ead3177ebfcf359" - integrity sha512-9VGjrMsG1vePxcSweQsN20KY/c4zN0h9fLjqAbwbPfahM3t+NL+M9HC8xeXG2I8pX5NoamTGNuomEUFI7fcUjA== - dependencies: - "@types/color-name" "^1.1.1" - color-convert "^2.0.1" - -anymatch@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-2.0.0.tgz#bcb24b4f37934d9aa7ac17b4adaf89e7c76ef2eb" - integrity sha512-5teOsQWABXHHBFP9y3skS5P3d/WfWXpv3FUpy+LorMrNYaT9pI4oLMQX7jzQ2KklNpGpWHzdCXTDT2Y3XGlZBw== - dependencies: - micromatch "^3.1.4" - normalize-path "^2.1.1" - -anymatch@^3.0.3: - version "3.1.1" - resolved "https://registry.yarnpkg.com/anymatch/-/anymatch-3.1.1.tgz#c55ecf02185e2469259399310c173ce31233b142" - integrity sha512-mM8522psRCqzV+6LhomX5wgp25YVibjh8Wj23I5RPkPppSVSjyKD2A2mBJmWGa+KN7f2D6LNh9jkBCeyLktzjg== - dependencies: - normalize-path "^3.0.0" - picomatch "^2.0.4" - -argparse@^1.0.7: - version "1.0.10" - resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911" - integrity sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg== - dependencies: - sprintf-js "~1.0.2" - -arr-diff@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" - integrity sha1-1kYQdP6/7HHn4VI1dhoyml3HxSA= - -arr-flatten@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" - integrity sha512-L3hKV5R/p5o81R7O02IGnwpDmkp6E982XhtbuwSe3O4qOtMMMtodicASA1Cny2U+aCXcNpml+m4dPsvsJ3jatg== - -arr-union@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" - integrity sha1-45sJrqne+Gao8gbiiK9jkZuuOcQ= - -array-equal@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" - integrity sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM= - -array-flatten@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/array-flatten/-/array-flatten-1.1.1.tgz#9a5f699051b1e7073328f2a008968b64ea2955d2" - integrity sha1-ml9pkFGx5wczKPKgCJaLZOopVdI= - -array-unique@^0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" - integrity sha1-qJS3XUvE9s1nnvMkSp/Y9Gri1Cg= - -asn1@~0.2.3: - version "0.2.4" - resolved "https://registry.yarnpkg.com/asn1/-/asn1-0.2.4.tgz#8d2475dfab553bb33e77b54e59e880bb8ce23136" - integrity sha512-jxwzQpLQjSmWXgwaCZE9Nz+glAG01yF1QnWgbhGwHI5A6FRIEY6IVqtHhIepHqI7/kyEyQEagBC5mBEFlIYvdg== - dependencies: - safer-buffer "~2.1.0" - -assert-plus@1.0.0, assert-plus@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assert-plus/-/assert-plus-1.0.0.tgz#f12e0f3c5d77b0b1cdd9146942e4e96c1e4dd525" - integrity sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU= - -assign-symbols@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" - integrity sha1-WWZ/QfrdTyDMvCu5a41Pf3jsA2c= - -astral-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/astral-regex/-/astral-regex-1.0.0.tgz#6c8c3fb827dd43ee3918f27b82782ab7658a6fd9" - integrity sha512-+Ryf6g3BKoRc7jfp7ad8tM4TtMiaWvbF/1/sQcZPkkS7ag3D5nMBCe2UfOTONtAkaG0tO0ij3C5Lwmf1EiyjHg== - -async-each@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/async-each/-/async-each-1.0.3.tgz#b727dbf87d7651602f06f4d4ac387f47d91b0cbf" - integrity sha512-z/WhQ5FPySLdvREByI2vZiTWwCnF0moMJ1hK9YQwDTHKh6I7/uSckMetoRGb5UBZPC1z0jlw+n/XCgjeH7y1AQ== - -asynckit@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" - integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= - -atob@^2.1.2: - version "2.1.2" - resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" - integrity sha512-Wm6ukoaOGJi/73p/cl2GvLjTI5JM1k/O14isD73YML8StrH/7/lRFgmg8nICZgD3bZZvjwCGxtMOD3wWNAu8cg== - -aws-sign2@~0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/aws-sign2/-/aws-sign2-0.7.0.tgz#b46e890934a9591f2d2f6f86d7e6a9f1b3fe76a8" - integrity sha1-tG6JCTSpWR8tL2+G1+ap8bP+dqg= - -aws4@^1.8.0: - version "1.10.1" - resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.10.1.tgz#e1e82e4f3e999e2cfd61b161280d16a111f86428" - integrity sha512-zg7Hz2k5lI8kb7U32998pRRFin7zJlkfezGJjUc2heaD4Pw2wObakCDVzkKztTm/Ln7eiVvYsjqak0Ed4LkMDA== - -babel-jest@^25.5.1: - version "25.5.1" - resolved "https://registry.yarnpkg.com/babel-jest/-/babel-jest-25.5.1.tgz#bc2e6101f849d6f6aec09720ffc7bc5332e62853" - integrity sha512-9dA9+GmMjIzgPnYtkhBg73gOo/RHqPmLruP3BaGL4KEX3Dwz6pI8auSN8G8+iuEG90+GSswyKvslN+JYSaacaQ== - dependencies: - "@jest/transform" "^25.5.1" - "@jest/types" "^25.5.0" - "@types/babel__core" "^7.1.7" - babel-plugin-istanbul "^6.0.0" - babel-preset-jest "^25.5.0" - chalk "^3.0.0" - graceful-fs "^4.2.4" - slash "^3.0.0" - -babel-plugin-dynamic-import-node@^2.3.3: - version "2.3.3" - resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3" - integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ== - dependencies: - object.assign "^4.1.0" - -babel-plugin-istanbul@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/babel-plugin-istanbul/-/babel-plugin-istanbul-6.0.0.tgz#e159ccdc9af95e0b570c75b4573b7c34d671d765" - integrity sha512-AF55rZXpe7trmEylbaE1Gv54wn6rwU03aptvRoVIGP8YykoSxqdVLV1TfwflBCE/QtHmqtP8SWlTENqbK8GCSQ== - dependencies: - "@babel/helper-plugin-utils" "^7.0.0" - "@istanbuljs/load-nyc-config" "^1.0.0" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-instrument "^4.0.0" - test-exclude "^6.0.0" - -babel-plugin-jest-hoist@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-25.5.0.tgz#129c80ba5c7fc75baf3a45b93e2e372d57ca2677" - integrity sha512-u+/W+WAjMlvoocYGTwthAiQSxDcJAyHpQ6oWlHdFZaaN+Rlk8Q7iiwDPg2lN/FyJtAYnKjFxbn7xus4HCFkg5g== - dependencies: - "@babel/template" "^7.3.3" - "@babel/types" "^7.3.3" - "@types/babel__traverse" "^7.0.6" - -babel-preset-current-node-syntax@^0.1.2: - version "0.1.3" - resolved "https://registry.yarnpkg.com/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-0.1.3.tgz#b4b547acddbf963cba555ba9f9cbbb70bfd044da" - integrity sha512-uyexu1sVwcdFnyq9o8UQYsXwXflIh8LvrF5+cKrYam93ned1CStffB3+BEcsxGSgagoA3GEyjDqO4a/58hyPYQ== - dependencies: - "@babel/plugin-syntax-async-generators" "^7.8.4" - "@babel/plugin-syntax-bigint" "^7.8.3" - "@babel/plugin-syntax-class-properties" "^7.8.3" - "@babel/plugin-syntax-import-meta" "^7.8.3" - "@babel/plugin-syntax-json-strings" "^7.8.3" - "@babel/plugin-syntax-logical-assignment-operators" "^7.8.3" - "@babel/plugin-syntax-nullish-coalescing-operator" "^7.8.3" - "@babel/plugin-syntax-numeric-separator" "^7.8.3" - "@babel/plugin-syntax-object-rest-spread" "^7.8.3" - "@babel/plugin-syntax-optional-catch-binding" "^7.8.3" - "@babel/plugin-syntax-optional-chaining" "^7.8.3" - -babel-preset-jest@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/babel-preset-jest/-/babel-preset-jest-25.5.0.tgz#c1d7f191829487a907764c65307faa0e66590b49" - integrity sha512-8ZczygctQkBU+63DtSOKGh7tFL0CeCuz+1ieud9lJ1WPQ9O6A1a/r+LGn6Y705PA6whHQ3T1XuB/PmpfNYf8Fw== - dependencies: - babel-plugin-jest-hoist "^25.5.0" - babel-preset-current-node-syntax "^0.1.2" - -balanced-match@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" - integrity sha1-ibTRmasr7kneFk6gK4nORi1xt2c= - -base@^0.11.1: - version "0.11.2" - resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" - integrity sha512-5T6P4xPgpp0YDFvSWwEZ4NoE3aM4QBQXDzmVbraCkFj8zHM+mba8SyqB5DbZWyR7mYHo6Y7BdQo3MoA4m0TeQg== - dependencies: - cache-base "^1.0.1" - class-utils "^0.3.5" - component-emitter "^1.2.1" - define-property "^1.0.0" - isobject "^3.0.1" - mixin-deep "^1.2.0" - pascalcase "^0.1.1" - -bcrypt-pbkdf@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/bcrypt-pbkdf/-/bcrypt-pbkdf-1.0.2.tgz#a4301d389b6a43f9b67ff3ca11a3f6637e360e9e" - integrity sha1-pDAdOJtqQ/m2f/PKEaP2Y342Dp4= - dependencies: - tweetnacl "^0.14.3" - -binary-extensions@^1.0.0: - version "1.13.1" - resolved "https://registry.yarnpkg.com/binary-extensions/-/binary-extensions-1.13.1.tgz#598afe54755b2868a5330d2aff9d4ebb53209b65" - integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== - -bindings@^1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/bindings/-/bindings-1.5.0.tgz#10353c9e945334bc0511a6d90b38fbc7c9c504df" - integrity sha512-p2q/t/mhvuOj/UeLlV6566GD/guowlr0hHxClI0W9m7MWYkL1F0hLo+0Aexs9HSPCtR1SXQ0TD3MMKrXZajbiQ== - dependencies: - file-uri-to-path "1.0.0" - -body-parser@1.19.0, body-parser@^1.19.0: - version "1.19.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.19.0.tgz#96b2709e57c9c4e09a6fd66a8fd979844f69f08a" - integrity sha512-dhEPs72UPbDnAQJ9ZKMNTP6ptJaionhP5cBb541nXPlW60Jepo9RV/a4fX4XWW9CuFNK22krhrj1+rgzifNCsw== - dependencies: - bytes "3.1.0" - content-type "~1.0.4" - debug "2.6.9" - depd "~1.1.2" - http-errors "1.7.2" - iconv-lite "0.4.24" - on-finished "~2.3.0" - qs "6.7.0" - raw-body "2.4.0" - type-is "~1.6.17" - -brace-expansion@^1.1.7: - version "1.1.11" - resolved "https://registry.yarnpkg.com/brace-expansion/-/brace-expansion-1.1.11.tgz#3c7fcbf529d87226f3d2f52b966ff5271eb441dd" - integrity sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA== - dependencies: - balanced-match "^1.0.0" - concat-map "0.0.1" - -braces@^2.3.1, braces@^2.3.2: - version "2.3.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" - integrity sha512-aNdbnj9P8PjdXU4ybaWLK2IF3jc/EoDYbC7AazW6to3TRsfXxscC9UXOB5iDiEQrkyIbWp2SLQda4+QAa7nc3w== - dependencies: - arr-flatten "^1.1.0" - array-unique "^0.3.2" - extend-shallow "^2.0.1" - fill-range "^4.0.0" - isobject "^3.0.1" - repeat-element "^1.1.2" - snapdragon "^0.8.1" - snapdragon-node "^2.0.1" - split-string "^3.0.2" - to-regex "^3.0.1" - -braces@^3.0.1: - version "3.0.2" - resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.2.tgz#3454e1a462ee8d599e236df336cd9ea4f8afe107" - integrity sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A== - dependencies: - fill-range "^7.0.1" - -browser-process-hrtime@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/browser-process-hrtime/-/browser-process-hrtime-1.0.0.tgz#3c9b4b7d782c8121e56f10106d84c0d0ffc94626" - integrity sha512-9o5UecI3GhkpM6DrXr69PblIuWxPKk9Y0jHBRhdocZ2y7YECBFCsHm79Pr3OyR2AvjhDkabFJaDJMYRazHgsow== - -browser-resolve@^1.11.3: - version "1.11.3" - resolved "https://registry.yarnpkg.com/browser-resolve/-/browser-resolve-1.11.3.tgz#9b7cbb3d0f510e4cb86bdbd796124d28b5890af6" - integrity sha512-exDi1BYWB/6raKHmDTCicQfTkqwN5fioMFV4j8BsfMU4R2DK/QfZfK7kOVkmWCNANf0snkBzqGqAJBao9gZMdQ== - dependencies: - resolve "1.1.7" - -browserslist@^4.12.0, browserslist@^4.8.5: - version "4.14.0" - resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.14.0.tgz#2908951abfe4ec98737b72f34c3bcedc8d43b000" - integrity sha512-pUsXKAF2lVwhmtpeA3LJrZ76jXuusrNyhduuQs7CDFf9foT4Y38aQOserd2lMe5DSSrjf3fx34oHwryuvxAUgQ== - dependencies: - caniuse-lite "^1.0.30001111" - electron-to-chromium "^1.3.523" - escalade "^3.0.2" - node-releases "^1.1.60" - -bser@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/bser/-/bser-2.1.1.tgz#e6787da20ece9d07998533cfd9de6f5c38f4bc05" - integrity sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ== - dependencies: - node-int64 "^0.4.0" - -buffer-from@^1.0.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/buffer-from/-/buffer-from-1.1.1.tgz#32713bc028f75c02fdb710d7c7bcec1f2c6070ef" - integrity sha512-MQcXEUbCKtEo7bhqEs6560Hyd4XaovZlO/k9V3hjVUF/zwW7KBVdSK4gIt/bzwS9MbR5qob+F5jusZsb0YQK2A== - -bytes@3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.0.tgz#f6cf7933a360e0588fa9fde85651cdc7f805d1f6" - integrity sha512-zauLjrfCG+xvoyaqLoV8bLVXXNGC4JqlxFCutSDWA6fJrTo2ZuvLYTqZ7aHBLZSMOopbzwv8f+wZcVzfVTI2Dg== - -cache-base@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" - integrity sha512-AKcdTnFSWATd5/GCPRxr2ChwIJ85CeyrEyjRHlKxQ56d4XJMGym0uAiKn0xbLOGOl3+yRpOTi484dVCEc5AUzQ== - dependencies: - collection-visit "^1.0.0" - component-emitter "^1.2.1" - get-value "^2.0.6" - has-value "^1.0.0" - isobject "^3.0.1" - set-value "^2.0.0" - to-object-path "^0.3.0" - union-value "^1.0.0" - unset-value "^1.0.0" - -callsites@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/callsites/-/callsites-3.1.0.tgz#b3630abd8943432f54b3f0519238e33cd7df2f73" - integrity sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ== - -camelcase@^5.0.0, camelcase@^5.3.1: - version "5.3.1" - resolved "https://registry.yarnpkg.com/camelcase/-/camelcase-5.3.1.tgz#e3c9b31569e106811df242f715725a1f4c494320" - integrity sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg== - -caniuse-lite@^1.0.30001111: - version "1.0.30001124" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001124.tgz#5d9998190258e11630d674fc50ea8e579ae0ced2" - integrity sha512-zQW8V3CdND7GHRH6rxm6s59Ww4g/qGWTheoboW9nfeMg7sUoopIfKCcNZUjwYRCOrvereh3kwDpZj4VLQ7zGtA== - -capture-exit@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/capture-exit/-/capture-exit-2.0.0.tgz#fb953bfaebeb781f62898239dabb426d08a509a4" - integrity sha512-PiT/hQmTonHhl/HFGN+Lx3JJUznrVYJ3+AQsnthneZbvW7x+f08Tk7yLJTLEOUvBTbduLeeBkxEaYXUOUrRq6g== - dependencies: - rsvp "^4.8.4" - -caseless@~0.12.0: - version "0.12.0" - resolved "https://registry.yarnpkg.com/caseless/-/caseless-0.12.0.tgz#1b681c21ff84033c826543090689420d187151dc" - integrity sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw= - -chalk@^2.0.0, chalk@^2.1.0: - version "2.4.2" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" - integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== - dependencies: - ansi-styles "^3.2.1" - escape-string-regexp "^1.0.5" - supports-color "^5.3.0" - -chalk@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-3.0.0.tgz#3f73c2bf526591f574cc492c51e2456349f844e4" - integrity sha512-4D3B6Wf41KOYRFdszmDqMCGq5VV/uMAB273JILmO+3jAlh8X4qDtdtgCR3fxtbLEMzSx22QdhnDcJvu2u1fVwg== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chalk@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.0.tgz#4e14870a618d9e2edd97dd8345fd9d9dc315646a" - integrity sha512-qwx12AxXe2Q5xQ43Ac//I6v5aXTipYrSESdOgzrN+9XjgEpyjpKuvSGaN4qE93f7TQTlerQQ8S+EQ0EyDoVL1A== - dependencies: - ansi-styles "^4.1.0" - supports-color "^7.1.0" - -chardet@^0.7.0: - version "0.7.0" - resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.7.0.tgz#90094849f0937f2eedc2425d0d28a9e5f0cbad9e" - integrity sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA== - -chokidar@^2.1.8: - version "2.1.8" - resolved "https://registry.yarnpkg.com/chokidar/-/chokidar-2.1.8.tgz#804b3a7b6a99358c3c5c61e71d8728f041cff917" - integrity sha512-ZmZUazfOzf0Nve7duiCKD23PFSCs4JPoYyccjUFF3aQkQadqBhfzhjkwBH2mNOG9cTBwhamM37EIsIkZw3nRgg== - dependencies: - anymatch "^2.0.0" - async-each "^1.0.1" - braces "^2.3.2" - glob-parent "^3.1.0" - inherits "^2.0.3" - is-binary-path "^1.0.0" - is-glob "^4.0.0" - normalize-path "^3.0.0" - path-is-absolute "^1.0.0" - readdirp "^2.2.1" - upath "^1.1.1" - optionalDependencies: - fsevents "^1.2.7" - -ci-info@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-2.0.0.tgz#67a9e964be31a51e15e5010d58e6f12834002f46" - integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== - -class-utils@^0.3.5: - version "0.3.6" - resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" - integrity sha512-qOhPa/Fj7s6TY8H8esGu5QNpMMQxz79h+urzrNYN6mn+9BnxlDGf5QZ+XeCDsxSjPqsSR56XOZOJmpeurnLMeg== - dependencies: - arr-union "^3.1.0" - define-property "^0.2.5" - isobject "^3.0.0" - static-extend "^0.1.1" - -cli-cursor@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-3.1.0.tgz#264305a7ae490d1d03bf0c9ba7c925d1753af307" - integrity sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw== - dependencies: - restore-cursor "^3.1.0" - -cli-width@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/cli-width/-/cli-width-3.0.0.tgz#a2f48437a2caa9a22436e794bf071ec9e61cedf6" - integrity sha512-FxqpkPPwu1HjuN93Omfm4h8uIanXofW0RxVEW3k5RKx+mJJYSthzNhp32Kzxxy3YAEZ/Dc/EWN1vZRY0+kOhbw== - -client-sessions@^0.8.0: - version "0.8.0" - resolved "https://registry.yarnpkg.com/client-sessions/-/client-sessions-0.8.0.tgz#a7d8c5558ad5d56f2a199f3533eb654b5df893fd" - integrity sha1-p9jFVYrV1W8qGZ81M+tlS134k/0= - dependencies: - cookies "^0.7.0" - -cliui@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1" - integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ== - dependencies: - string-width "^4.2.0" - strip-ansi "^6.0.0" - wrap-ansi "^6.2.0" - -co@^4.6.0: - version "4.6.0" - resolved "https://registry.yarnpkg.com/co/-/co-4.6.0.tgz#6ea6bdf3d853ae54ccb8e47bfa0bf3f9031fb184" - integrity sha1-bqa989hTrlTMuOR7+gvz+QMfsYQ= - -collect-v8-coverage@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/collect-v8-coverage/-/collect-v8-coverage-1.0.1.tgz#cc2c8e94fc18bbdffe64d6534570c8a673b27f59" - integrity sha512-iBPtljfCNcTKNAto0KEtDfZ3qzjJvqE3aTGZsbhjSBlorqpXJlaWWtPO35D+ZImoC3KWejX64o+yPGxhWSTzfg== - -collection-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" - integrity sha1-S8A3PBZLwykbTTaMgpzxqApZ3KA= - dependencies: - map-visit "^1.0.0" - object-visit "^1.0.0" - -color-convert@^1.9.0: - version "1.9.3" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.3.tgz#bb71850690e1f136567de629d2d5471deda4c1e8" - integrity sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg== - dependencies: - color-name "1.1.3" - -color-convert@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3" - integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ== - dependencies: - color-name "~1.1.4" - -color-name@1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.3.tgz#a7d0558bd89c42f795dd42328f740831ca53bc25" - integrity sha1-p9BVi9icQveV3UIyj3QIMcpTvCU= - -color-name@~1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2" - integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA== - -combined-stream@^1.0.6, combined-stream@~1.0.6: - version "1.0.8" - resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f" - integrity sha512-FQN4MRfuJeHf7cBbBMJFXhKSDq+2kAArBlmRBvcvFE5BB1HZKXtSFASDhdlz9zOYwxh8lDdnvmMOe/+5cdoEdg== - dependencies: - delayed-stream "~1.0.0" - -commander@^4.0.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/commander/-/commander-4.1.1.tgz#9fd602bd936294e9e9ef46a3f4d6964044b18068" - integrity sha512-NOKm8xhkzAjzFx8B2v5OAHT+u5pRQc2UCa2Vq9jYL/31o2wi9mxBA7LIFs3sV5VSC49z6pEhfbMULvShKj26WA== - -commondir@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" - integrity sha1-3dgA2gxmEnOTzKWVDqloo6rxJTs= - -component-emitter@^1.2.1: - version "1.3.0" - resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" - integrity sha512-Rd3se6QB+sO1TwqZjscQrurpEPIfO0/yYnSin6Q/rD3mOutHvUrCAhJub3r90uNb+SESBuE0QYoB90YdfatsRg== - -concat-map@0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/concat-map/-/concat-map-0.0.1.tgz#d8a96bd77fd68df7793a73036a3ba0d5405d477b" - integrity sha1-2Klr13/Wjfd5OnMDajug1UBdR3s= - -content-disposition@0.5.3: - version "0.5.3" - resolved "https://registry.yarnpkg.com/content-disposition/-/content-disposition-0.5.3.tgz#e130caf7e7279087c5616c2007d0485698984fbd" - integrity sha512-ExO0774ikEObIAEV9kDo50o+79VCUdEB6n6lzKgGwupcVeRlhrj3qGAfwq8G6uBJjkqLrhT0qEYFcWng8z1z0g== - dependencies: - safe-buffer "5.1.2" - -content-type@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/content-type/-/content-type-1.0.4.tgz#e138cc75e040c727b1966fe5e5f8c9aee256fe3b" - integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== - -convert-source-map@^1.1.0, convert-source-map@^1.4.0, convert-source-map@^1.6.0, convert-source-map@^1.7.0: - version "1.7.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.7.0.tgz#17a2cb882d7f77d3490585e2ce6c524424a3a442" - integrity sha512-4FJkXzKXEDB1snCFZlLP4gpC3JILicCpGbzG9f9G7tGqGCzETQ2hWPrcinA9oU4wtf2biUaEH5065UnMeR33oA== - dependencies: - safe-buffer "~5.1.1" - -cookie-signature@1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/cookie-signature/-/cookie-signature-1.0.6.tgz#e303a882b342cc3ee8ca513a79999734dab3ae2c" - integrity sha1-4wOogrNCzD7oylE6eZmXNNqzriw= - -cookie@0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/cookie/-/cookie-0.4.0.tgz#beb437e7022b3b6d49019d088665303ebe9c14ba" - integrity sha512-+Hp8fLp57wnUSt0tY0tHEXh4voZRDnoIrZPqlo3DPiI4y9lwg/jqx+1Om94/W6ZaPDOUbnjOt/99w66zk+l1Xg== - -cookies@^0.7.0: - version "0.7.3" - resolved "https://registry.yarnpkg.com/cookies/-/cookies-0.7.3.tgz#7912ce21fbf2e8c2da70cf1c3f351aecf59dadfa" - integrity sha512-+gixgxYSgQLTaTIilDHAdlNPZDENDQernEMiIcZpYYP14zgHsCt4Ce1FEjFtcp6GefhozebB6orvhAAWx/IS0A== - dependencies: - depd "~1.1.2" - keygrip "~1.0.3" - -copy-descriptor@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" - integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40= - -core-js-compat@^3.6.2: - version "3.6.5" - resolved "https://registry.yarnpkg.com/core-js-compat/-/core-js-compat-3.6.5.tgz#2a51d9a4e25dfd6e690251aa81f99e3c05481f1c" - integrity sha512-7ItTKOhOZbznhXAQ2g/slGg1PJV5zDO/WdkTwi7UEOJmkvsE32PWvx6mKtDjiMpjnR2CNf6BAD6sSxIlv7ptng== - dependencies: - browserslist "^4.8.5" - semver "7.0.0" - -core-js@^3.2.1: - version "3.6.5" - resolved "https://registry.yarnpkg.com/core-js/-/core-js-3.6.5.tgz#7395dc273af37fb2e50e9bd3d9fe841285231d1a" - integrity sha512-vZVEEwZoIsI+vPEuoF9Iqf5H7/M3eeQqWlQnYa8FSKKePuYTf5MWnxb5SDAzCa60b3JBRS5g9b+Dq7b1y/RCrA== - -core-util-is@1.0.2, core-util-is@~1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/core-util-is/-/core-util-is-1.0.2.tgz#b5fd54220aa2bc5ab57aab7140c940754503c1a7" - integrity sha1-tf1UIgqivFq1eqtxQMlAdUUDwac= - -cors@^2.8.5: - version "2.8.5" - resolved "https://registry.yarnpkg.com/cors/-/cors-2.8.5.tgz#eac11da51592dd86b9f06f6e7ac293b3df875d29" - integrity sha512-KIHbLJqu73RGr/hnbrO9uBeixNGuvSQjul/jdFvS/KFSIH1hWVd1ng7zOHx+YrEfInLG7q4n6GHQ9cDtxv/P6g== - dependencies: - object-assign "^4" - vary "^1" - -cross-env@^7.0.2: - version "7.0.2" - resolved "https://registry.yarnpkg.com/cross-env/-/cross-env-7.0.2.tgz#bd5ed31339a93a3418ac4f3ca9ca3403082ae5f9" - integrity sha512-KZP/bMEOJEDCkDQAyRhu3RL2ZO/SUVrxQVI0G3YEQ+OLbRA3c6zgixe8Mq8a/z7+HKlNEjo8oiLUs8iRijY2Rw== - dependencies: - cross-spawn "^7.0.1" - -cross-spawn@^6.0.0, cross-spawn@^6.0.5: - version "6.0.5" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-6.0.5.tgz#4a5ec7c64dfae22c3a14124dbacdee846d80cbc4" - integrity sha512-eTVLrBSt7fjbDygz805pMnstIs2VTBNkRm0qxZd+M7A5XDdxVRWO5MxGBXZhjY4cqLYLdtrGqRf8mBPmzwSpWQ== - dependencies: - nice-try "^1.0.4" - path-key "^2.0.1" - semver "^5.5.0" - shebang-command "^1.2.0" - which "^1.2.9" - -cross-spawn@^7.0.0, cross-spawn@^7.0.1: - version "7.0.3" - resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-7.0.3.tgz#f73a85b9d5d41d045551c177e2882d4ac85728a6" - integrity sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w== - dependencies: - path-key "^3.1.0" - shebang-command "^2.0.0" - which "^2.0.1" - -cssom@^0.4.1: - version "0.4.4" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.4.4.tgz#5a66cf93d2d0b661d80bf6a44fb65f5c2e4e0a10" - integrity sha512-p3pvU7r1MyyqbTk+WbNJIgJjG2VmTIaB10rI93LzVPrmDJKkzKYMtxxyAvQXR/NS6otuzveI7+7BBq3SjBS2mw== - -cssom@~0.3.6: - version "0.3.8" - resolved "https://registry.yarnpkg.com/cssom/-/cssom-0.3.8.tgz#9f1276f5b2b463f2114d3f2c75250af8c1a36f4a" - integrity sha512-b0tGHbfegbhPJpxpiBPU2sCkigAqtM9O121le6bbOlgyV+NyGyCmVfJ6QW9eRjz8CpNfWEOYBIMIGRYkLwsIYg== - -cssstyle@^2.0.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/cssstyle/-/cssstyle-2.3.0.tgz#ff665a0ddbdc31864b09647f34163443d90b0852" - integrity sha512-AZL67abkUzIuvcHqk7c09cezpGNcxUxU4Ioi/05xHk4DQeTkWmGYftIE6ctU6AEt+Gn4n1lDStOtj7FKycP71A== - dependencies: - cssom "~0.3.6" - -dashdash@^1.12.0: - version "1.14.1" - resolved "https://registry.yarnpkg.com/dashdash/-/dashdash-1.14.1.tgz#853cfa0f7cbe2fed5de20326b8dd581035f6e2f0" - integrity sha1-hTz6D3y+L+1d4gMmuN1YEDX24vA= - dependencies: - assert-plus "^1.0.0" - -data-urls@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/data-urls/-/data-urls-1.1.0.tgz#15ee0582baa5e22bb59c77140da8f9c76963bbfe" - integrity sha512-YTWYI9se1P55u58gL5GkQHW4P6VJBJ5iBT+B5a7i2Tjadhv52paJG0qHX4A0OR6/t52odI64KP2YvFpkDOi3eQ== - dependencies: - abab "^2.0.0" - whatwg-mimetype "^2.2.0" - whatwg-url "^7.0.0" - -debug@2.6.9, debug@^2.2.0, debug@^2.3.3: - version "2.6.9" - resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" - integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== - dependencies: - ms "2.0.0" - -debug@^4.0.1, debug@^4.1.0, debug@^4.1.1: - version "4.1.1" - resolved "https://registry.yarnpkg.com/debug/-/debug-4.1.1.tgz#3b72260255109c6b589cee050f1d516139664791" - integrity sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw== - dependencies: - ms "^2.1.1" - -decamelize@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" - integrity sha1-9lNNFRSCabIDUue+4m9QH5oZEpA= - -decode-uri-component@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" - integrity sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU= - -deep-is@~0.1.3: - version "0.1.3" - resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" - integrity sha1-s2nW+128E+7PUk+RsHD+7cNXzzQ= - -deepmerge@^4.2.2: - version "4.2.2" - resolved "https://registry.yarnpkg.com/deepmerge/-/deepmerge-4.2.2.tgz#44d2ea3679b8f4d4ffba33f03d865fc1e7bf4955" - integrity sha512-FJ3UgI4gIl+PHZm53knsuSFpE+nESMr7M4v9QcgB7S63Kj/6WqMiFQJpBBYz1Pt+66bZpP3Q7Lye0Oo9MPKEdg== - -define-properties@^1.1.2, define-properties@^1.1.3: - version "1.1.3" - resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.3.tgz#cf88da6cbee26fe6db7094f61d870cbd84cee9f1" - integrity sha512-3MqfYKj2lLzdMSf8ZIZE/V+Zuy+BgD6f164e8K2w7dgnpKArBDerGYpM46IYYcjnkdPNMjPk9A6VFB8+3SKlXQ== - dependencies: - object-keys "^1.0.12" - -define-property@^0.2.5: - version "0.2.5" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" - integrity sha1-w1se+RjsPJkPmlvFe+BKrOxcgRY= - dependencies: - is-descriptor "^0.1.0" - -define-property@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" - integrity sha1-dp66rz9KY6rTr56NMEybvnm/sOY= - dependencies: - is-descriptor "^1.0.0" - -define-property@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" - integrity sha512-jwK2UV4cnPpbcG7+VRARKTZPUWowwXA8bzH5NP6ud0oeAxyYPuGZUAC7hMugpCdz4BeSZl2Dl9k66CHJ/46ZYQ== - dependencies: - is-descriptor "^1.0.2" - isobject "^3.0.1" - -delayed-stream@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619" - integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk= - -denque@^1.4.1: - version "1.4.1" - resolved "https://registry.yarnpkg.com/denque/-/denque-1.4.1.tgz#6744ff7641c148c3f8a69c307e51235c1f4a37cf" - integrity sha512-OfzPuSZKGcgr96rf1oODnfjqBFmr1DVoc/TrItj3Ohe0Ah1C5WX5Baquw/9U9KovnQ88EqmJbD66rKYUQYN1tQ== - -depd@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" - integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= - -destroy@~1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80" - integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA= - -detect-newline@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-3.1.0.tgz#576f5dfc63ae1a192ff192d8ad3af6308991b651" - integrity sha512-TLz+x/vEXm/Y7P7wn1EJFNLxYpUD4TgMosxY6fAVJUnJMbupHBOncxyWUG9OpTaH9EBD7uFI5LfEgmMOc54DsA== - -diff-sequences@^25.2.6: - version "25.2.6" - resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-25.2.6.tgz#5f467c00edd35352b7bca46d7927d60e687a76dd" - integrity sha512-Hq8o7+6GaZeoFjtpgvRBUknSXNeJiCx7V9Fr94ZMljNiCr9n9L8H8aJqgWOQiDDGdyn29fRNcDdRVJ5fdyihfg== - -doctrine@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/doctrine/-/doctrine-3.0.0.tgz#addebead72a6574db783639dc87a121773973961" - integrity sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w== - dependencies: - esutils "^2.0.2" - -domexception@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/domexception/-/domexception-1.0.1.tgz#937442644ca6a31261ef36e3ec677fe805582c90" - integrity sha512-raigMkn7CJNNo6Ihro1fzG7wr3fHuYVytzquZKX5n0yizGsTcYgzdIUwj1X9pK0VvjeihV+XiclP+DjwbsSKug== - dependencies: - webidl-conversions "^4.0.2" - -ecc-jsbn@~0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz#3a83a904e54353287874c564b7549386849a98c9" - integrity sha1-OoOpBOVDUyh4dMVkt1SThoSamMk= - dependencies: - jsbn "~0.1.0" - safer-buffer "^2.1.0" - -ee-first@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" - integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= - -electron-to-chromium@^1.3.523: - version "1.3.562" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.562.tgz#79c20277ee1c8d0173a22af00e38433b752bc70f" - integrity sha512-WhRe6liQ2q/w1MZc8mD8INkenHivuHdrr4r5EQHNomy3NJux+incP6M6lDMd0paShP3MD0WGe5R1TWmEClf+Bg== - -emoji-regex@^7.0.1: - version "7.0.3" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-7.0.3.tgz#933a04052860c85e83c122479c4748a8e4c72156" - integrity sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA== - -emoji-regex@^8.0.0: - version "8.0.0" - resolved "https://registry.yarnpkg.com/emoji-regex/-/emoji-regex-8.0.0.tgz#e818fd69ce5ccfcb404594f842963bf53164cc37" - integrity sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A== - -encodeurl@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/encodeurl/-/encodeurl-1.0.2.tgz#ad3ff4c86ec2d029322f5a02c3a9a606c95b3f59" - integrity sha1-rT/0yG7C0CkyL1oCw6mmBslbP1k= - -end-of-stream@^1.1.0: - version "1.4.4" - resolved "https://registry.yarnpkg.com/end-of-stream/-/end-of-stream-1.4.4.tgz#5ae64a5f45057baf3626ec14da0ca5e4b2431eb0" - integrity sha512-+uw1inIHVPQoaVuHzRyXd21icM+cnt4CzD5rW+NC1wjOUSTOs+Te7FOv7AhN7vS9x/oIyhLP5PR1H+phQAHu5Q== - dependencies: - once "^1.4.0" - -error-ex@^1.3.1: - version "1.3.2" - resolved "https://registry.yarnpkg.com/error-ex/-/error-ex-1.3.2.tgz#b4ac40648107fdcdcfae242f428bea8a14d4f1bf" - integrity sha512-7dFHNmqeFSEt2ZBsCriorKnn3Z2pj+fd9kmI6QoWw4//DL+icEBfc0U7qJCisqrTsKTjw4fNFy2pW9OqStD84g== - dependencies: - is-arrayish "^0.2.1" - -es-abstract@^1.17.0-next.1, es-abstract@^1.17.5: - version "1.17.6" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.17.6.tgz#9142071707857b2cacc7b89ecb670316c3e2d52a" - integrity sha512-Fr89bON3WFyUi5EvAeI48QTWX0AyekGgLA8H+c+7fbfCkJwRWRMLd8CQedNEyJuoYYhmtEqY92pgte1FAhBlhw== - dependencies: - es-to-primitive "^1.2.1" - function-bind "^1.1.1" - has "^1.0.3" - has-symbols "^1.0.1" - is-callable "^1.2.0" - is-regex "^1.1.0" - object-inspect "^1.7.0" - object-keys "^1.1.1" - object.assign "^4.1.0" - string.prototype.trimend "^1.0.1" - string.prototype.trimstart "^1.0.1" - -es-to-primitive@^1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/es-to-primitive/-/es-to-primitive-1.2.1.tgz#e55cd4c9cdc188bcefb03b366c736323fc5c898a" - integrity sha512-QCOllgZJtaUo9miYBcLChTUaHNjJF3PYs1VidD7AwiEj1kYxKeQTctLAezAOH5ZKRH0g2IgPn6KwB4IT8iRpvA== - dependencies: - is-callable "^1.1.4" - is-date-object "^1.0.1" - is-symbol "^1.0.2" - -escalade@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/escalade/-/escalade-3.0.2.tgz#6a580d70edb87880f22b4c91d0d56078df6962c4" - integrity sha512-gPYAU37hYCUhW5euPeR+Y74F7BL+IBsV93j5cvGriSaD1aG6MGsqsV1yamRdrWrb2j3aiZvb0X+UBOWpx3JWtQ== - -escape-html@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/escape-html/-/escape-html-1.0.3.tgz#0258eae4d3d0c0974de1c169188ef0051d1d1988" - integrity sha1-Aljq5NPQwJdN4cFpGI7wBR0dGYg= - -escape-string-regexp@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz#1b61c0562190a8dff6ae3bb2cf0200ca130b86d4" - integrity sha1-G2HAViGQqN/2rjuyzwIAyhMLhtQ= - -escodegen@^1.11.1: - version "1.14.3" - resolved "https://registry.yarnpkg.com/escodegen/-/escodegen-1.14.3.tgz#4e7b81fba61581dc97582ed78cab7f0e8d63f503" - integrity sha512-qFcX0XJkdg+PB3xjZZG/wKSuT1PnQWx57+TVSjIMmILd2yC/6ByYElPwJnslDsuWuSAp4AwJGumarAAmJch5Kw== - dependencies: - esprima "^4.0.1" - estraverse "^4.2.0" - esutils "^2.0.2" - optionator "^0.8.1" - optionalDependencies: - source-map "~0.6.1" - -eslint-scope@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/eslint-scope/-/eslint-scope-5.1.0.tgz#d0f971dfe59c69e0cada684b23d49dbf82600ce5" - integrity sha512-iiGRvtxWqgtx5m8EyQUJihBloE4EnYeGE/bz1wSPwJE6tZuJUtHlhqDM4Xj2ukE8Dyy1+HCZ4hE0fzIVMzb58w== - dependencies: - esrecurse "^4.1.0" - estraverse "^4.1.1" - -eslint-utils@^1.4.3: - version "1.4.3" - resolved "https://registry.yarnpkg.com/eslint-utils/-/eslint-utils-1.4.3.tgz#74fec7c54d0776b6f67e0251040b5806564e981f" - integrity sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q== - dependencies: - eslint-visitor-keys "^1.1.0" - -eslint-visitor-keys@^1.1.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/eslint-visitor-keys/-/eslint-visitor-keys-1.3.0.tgz#30ebd1ef7c2fdff01c3a4f151044af25fab0523e" - integrity sha512-6J72N8UNa462wa/KFODt/PJ3IU60SDpC3QXC1Hjc1BXXpfL2C9R5+AU7jhe0F6GREqVMh4Juu+NY7xn+6dipUQ== - -eslint@^6.8.0: - version "6.8.0" - resolved "https://registry.yarnpkg.com/eslint/-/eslint-6.8.0.tgz#62262d6729739f9275723824302fb227c8c93ffb" - integrity sha512-K+Iayyo2LtyYhDSYwz5D5QdWw0hCacNzyq1Y821Xna2xSJj7cijoLLYmLxTQgcgZ9mC61nryMy9S7GRbYpI5Ig== - dependencies: - "@babel/code-frame" "^7.0.0" - ajv "^6.10.0" - chalk "^2.1.0" - cross-spawn "^6.0.5" - debug "^4.0.1" - doctrine "^3.0.0" - eslint-scope "^5.0.0" - eslint-utils "^1.4.3" - eslint-visitor-keys "^1.1.0" - espree "^6.1.2" - esquery "^1.0.1" - esutils "^2.0.2" - file-entry-cache "^5.0.1" - functional-red-black-tree "^1.0.1" - glob-parent "^5.0.0" - globals "^12.1.0" - ignore "^4.0.6" - import-fresh "^3.0.0" - imurmurhash "^0.1.4" - inquirer "^7.0.0" - is-glob "^4.0.0" - js-yaml "^3.13.1" - json-stable-stringify-without-jsonify "^1.0.1" - levn "^0.3.0" - lodash "^4.17.14" - minimatch "^3.0.4" - mkdirp "^0.5.1" - natural-compare "^1.4.0" - optionator "^0.8.3" - progress "^2.0.0" - regexpp "^2.0.1" - semver "^6.1.2" - strip-ansi "^5.2.0" - strip-json-comments "^3.0.1" - table "^5.2.3" - text-table "^0.2.0" - v8-compile-cache "^2.0.3" - -espree@^6.1.2: - version "6.2.1" - resolved "https://registry.yarnpkg.com/espree/-/espree-6.2.1.tgz#77fc72e1fd744a2052c20f38a5b575832e82734a" - integrity sha512-ysCxRQY3WaXJz9tdbWOwuWr5Y/XrPTGX9Kiz3yoUXwW0VZ4w30HTkQLaGx/+ttFjF8i+ACbArnB4ce68a9m5hw== - dependencies: - acorn "^7.1.1" - acorn-jsx "^5.2.0" - eslint-visitor-keys "^1.1.0" - -esprima@^4.0.0, esprima@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/esprima/-/esprima-4.0.1.tgz#13b04cdb3e6c5d19df91ab6987a8695619b0aa71" - integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A== - -esquery@^1.0.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.3.1.tgz#b78b5828aa8e214e29fb74c4d5b752e1c033da57" - integrity sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ== - dependencies: - estraverse "^5.1.0" - -esrecurse@^4.1.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/esrecurse/-/esrecurse-4.3.0.tgz#7ad7964d679abb28bee72cec63758b1c5d2c9921" - integrity sha512-KmfKL3b6G+RXvP8N1vr3Tq1kL/oCFgn2NYXEtqP8/L3pKapUA4G8cFVaoF3SU323CD4XypR/ffioHmkti6/Tag== - dependencies: - estraverse "^5.2.0" - -estraverse@^4.1.1, estraverse@^4.2.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d" - integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw== - -estraverse@^5.1.0, estraverse@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.2.0.tgz#307df42547e6cc7324d3cf03c155d5cdb8c53880" - integrity sha512-BxbNGGNm0RyRYvUdHpIwv9IWzeM9XClbOxwoATuFdOE7ZE6wHL+HQ5T8hoPM+zHvmKzzsEqhgy0GrQ5X13afiQ== - -esutils@^2.0.2: - version "2.0.3" - resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.3.tgz#74d2eb4de0b8da1293711910d50775b9b710ef64" - integrity sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g== - -etag@~1.8.1: - version "1.8.1" - resolved "https://registry.yarnpkg.com/etag/-/etag-1.8.1.tgz#41ae2eeb65efa62268aebfea83ac7d79299b0887" - integrity sha1-Qa4u62XvpiJorr/qg6x9eSmbCIc= - -exec-sh@^0.3.2: - version "0.3.4" - resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.3.4.tgz#3a018ceb526cc6f6df2bb504b2bfe8e3a4934ec5" - integrity sha512-sEFIkc61v75sWeOe72qyrqg2Qg0OuLESziUDk/O/z2qgS15y2gWVFrI6f2Qn/qw/0/NCfCEsmNA4zOjkwEZT1A== - -execa@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-1.0.0.tgz#c6236a5bb4df6d6f15e88e7f017798216749ddd8" - integrity sha512-adbxcyWV46qiHyvSp50TKt05tB4tK3HcmF7/nxfAdhnox83seTDbwnaqKO4sXRy7roHAIFqJP/Rw/AuEbX61LA== - dependencies: - cross-spawn "^6.0.0" - get-stream "^4.0.0" - is-stream "^1.1.0" - npm-run-path "^2.0.0" - p-finally "^1.0.0" - signal-exit "^3.0.0" - strip-eof "^1.0.0" - -execa@^3.2.0: - version "3.4.0" - resolved "https://registry.yarnpkg.com/execa/-/execa-3.4.0.tgz#c08ed4550ef65d858fac269ffc8572446f37eb89" - integrity sha512-r9vdGQk4bmCuK1yKQu1KTwcT2zwfWdbdaXfCtAh+5nU/4fSX+JAb7vZGvI5naJrQlvONrEB20jeruESI69530g== - dependencies: - cross-spawn "^7.0.0" - get-stream "^5.0.0" - human-signals "^1.1.1" - is-stream "^2.0.0" - merge-stream "^2.0.0" - npm-run-path "^4.0.0" - onetime "^5.1.0" - p-finally "^2.0.0" - signal-exit "^3.0.2" - strip-final-newline "^2.0.0" - -exit@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/exit/-/exit-0.1.2.tgz#0632638f8d877cc82107d30a0fff1a17cba1cd0c" - integrity sha1-BjJjj42HfMghB9MKD/8aF8uhzQw= - -expand-brackets@^2.1.4: - version "2.1.4" - resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" - integrity sha1-t3c14xXOMPa27/D4OwQVGiJEliI= - dependencies: - debug "^2.3.3" - define-property "^0.2.5" - extend-shallow "^2.0.1" - posix-character-classes "^0.1.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -expect@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/expect/-/expect-25.5.0.tgz#f07f848712a2813bb59167da3fb828ca21f58bba" - integrity sha512-w7KAXo0+6qqZZhovCaBVPSIqQp7/UTcx4M9uKt2m6pd2VB1voyC8JizLRqeEqud3AAVP02g+hbErDu5gu64tlA== - dependencies: - "@jest/types" "^25.5.0" - ansi-styles "^4.0.0" - jest-get-type "^25.2.6" - jest-matcher-utils "^25.5.0" - jest-message-util "^25.5.0" - jest-regex-util "^25.2.6" - -express@^4.17.1: - version "4.17.1" - resolved "https://registry.yarnpkg.com/express/-/express-4.17.1.tgz#4491fc38605cf51f8629d39c2b5d026f98a4c134" - integrity sha512-mHJ9O79RqluphRrcw2X/GTh3k9tVv8YcoyY4Kkh4WDMUYKRZUq0h1o0w2rrrxBqM7VoeUVqgb27xlEMXTnYt4g== - dependencies: - accepts "~1.3.7" - array-flatten "1.1.1" - body-parser "1.19.0" - content-disposition "0.5.3" - content-type "~1.0.4" - cookie "0.4.0" - cookie-signature "1.0.6" - debug "2.6.9" - depd "~1.1.2" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - finalhandler "~1.1.2" - fresh "0.5.2" - merge-descriptors "1.0.1" - methods "~1.1.2" - on-finished "~2.3.0" - parseurl "~1.3.3" - path-to-regexp "0.1.7" - proxy-addr "~2.0.5" - qs "6.7.0" - range-parser "~1.2.1" - safe-buffer "5.1.2" - send "0.17.1" - serve-static "1.14.1" - setprototypeof "1.1.1" - statuses "~1.5.0" - type-is "~1.6.18" - utils-merge "1.0.1" - vary "~1.1.2" - -extend-shallow@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" - integrity sha1-Ua99YUrZqfYQ6huvu5idaxxWiQ8= - dependencies: - is-extendable "^0.1.0" - -extend-shallow@^3.0.0, extend-shallow@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" - integrity sha1-Jqcarwc7OfshJxcnRhMcJwQCjbg= - dependencies: - assign-symbols "^1.0.0" - is-extendable "^1.0.1" - -extend@~3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.2.tgz#f8b1136b4071fbd8eb140aff858b1019ec2915fa" - integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== - -external-editor@^3.0.3: - version "3.1.0" - resolved "https://registry.yarnpkg.com/external-editor/-/external-editor-3.1.0.tgz#cb03f740befae03ea4d283caed2741a83f335495" - integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== - dependencies: - chardet "^0.7.0" - iconv-lite "^0.4.24" - tmp "^0.0.33" - -extglob@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" - integrity sha512-Nmb6QXkELsuBr24CJSkilo6UHHgbekK5UiZgfE6UHD3Eb27YC6oD+bhcT+tJ6cl8dmsgdQxnWlcry8ksBIBLpw== - dependencies: - array-unique "^0.3.2" - define-property "^1.0.0" - expand-brackets "^2.1.4" - extend-shallow "^2.0.1" - fragment-cache "^0.2.1" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -extsprintf@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.3.0.tgz#96918440e3041a7a414f8c52e3c574eb3c3e1e05" - integrity sha1-lpGEQOMEGnpBT4xS48V06zw+HgU= - -extsprintf@^1.2.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/extsprintf/-/extsprintf-1.4.0.tgz#e2689f8f356fad62cca65a3a91c5df5f9551692f" - integrity sha1-4mifjzVvrWLMplo6kcXfX5VRaS8= - -fast-deep-equal@^3.1.1: - version "3.1.3" - resolved "https://registry.yarnpkg.com/fast-deep-equal/-/fast-deep-equal-3.1.3.tgz#3a7d56b559d6cbc3eb512325244e619a65c6c525" - integrity sha512-f3qQ9oQy9j2AhBe/H9VC91wLmKBCCU/gDOnKNAYG5hswO7BLKj09Hc5HYNz9cGI++xlpDCIgDaitVs03ATR84Q== - -fast-json-stable-stringify@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz#874bf69c6f404c2b5d99c481341399fd55892633" - integrity sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw== - -fast-levenshtein@~2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" - integrity sha1-PYpcZog6FqMMqGQ+hR8Zuqd5eRc= - -fb-watchman@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/fb-watchman/-/fb-watchman-2.0.1.tgz#fc84fb39d2709cf3ff6d743706157bb5708a8a85" - integrity sha512-DkPJKQeY6kKwmuMretBhr7G6Vodr7bFwDYTXIkfG1gjvNpaxBTQV3PbXg6bR1c1UP4jPOX0jHUbbHANL9vRjVg== - dependencies: - bser "2.1.1" - -figures@^3.0.0: - version "3.2.0" - resolved "https://registry.yarnpkg.com/figures/-/figures-3.2.0.tgz#625c18bd293c604dc4a8ddb2febf0c88341746af" - integrity sha512-yaduQFRKLXYOGgEn6AZau90j3ggSOyiqXU0F9JZfeXYhNa+Jk4X+s45A2zg5jns87GAFa34BBm2kXw4XpNcbdg== - dependencies: - escape-string-regexp "^1.0.5" - -file-entry-cache@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/file-entry-cache/-/file-entry-cache-5.0.1.tgz#ca0f6efa6dd3d561333fb14515065c2fafdf439c" - integrity sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g== - dependencies: - flat-cache "^2.0.1" - -file-uri-to-path@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd" - integrity sha512-0Zt+s3L7Vf1biwWZ29aARiVYLx7iMGnEUl9x33fbB/j3jR81u/O2LbqK+Bm1CDSNDKVtJ/YjwY7TUd5SkeLQLw== - -fill-range@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" - integrity sha1-1USBHUKPmOsGpj3EAtJAPDKMOPc= - dependencies: - extend-shallow "^2.0.1" - is-number "^3.0.0" - repeat-string "^1.6.1" - to-regex-range "^2.1.0" - -fill-range@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.0.1.tgz#1919a6a7c75fe38b2c7c77e5198535da9acdda40" - integrity sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ== - dependencies: - to-regex-range "^5.0.1" - -finalhandler@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.1.2.tgz#b7e7d000ffd11938d0fdb053506f6ebabe9f587d" - integrity sha512-aAWcW57uxVNrQZqFXjITpW3sIUQmHGG3qSb9mUah9MgMC4NeWhNOlNjXEYq3HjRAvL6arUviZGGJsBg6z0zsWA== - dependencies: - debug "2.6.9" - encodeurl "~1.0.2" - escape-html "~1.0.3" - on-finished "~2.3.0" - parseurl "~1.3.3" - statuses "~1.5.0" - unpipe "~1.0.0" - -find-cache-dir@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/find-cache-dir/-/find-cache-dir-2.1.0.tgz#8d0f94cd13fe43c6c7c261a0d86115ca918c05f7" - integrity sha512-Tq6PixE0w/VMFfCgbONnkiQIVol/JJL7nRMi20fqzA4NRs9AfeqMGeRdPi3wIhYkxjeBaWh2rxwapn5Tu3IqOQ== - dependencies: - commondir "^1.0.1" - make-dir "^2.0.0" - pkg-dir "^3.0.0" - -find-up@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-3.0.0.tgz#49169f1d7993430646da61ecc5ae355c21c97b73" - integrity sha512-1yD6RmLI1XBfxugvORwlck6f75tYL+iR0jqwsOrOxMZyGYqUuDhJ0l4AXdO1iX/FTs9cBAMEk1gWSEx1kSbylg== - dependencies: - locate-path "^3.0.0" - -find-up@^4.0.0, find-up@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" - integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== - dependencies: - locate-path "^5.0.0" - path-exists "^4.0.0" - -flat-cache@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/flat-cache/-/flat-cache-2.0.1.tgz#5d296d6f04bda44a4630a301413bdbc2ec085ec0" - integrity sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA== - dependencies: - flatted "^2.0.0" - rimraf "2.6.3" - write "1.0.3" - -flatted@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-2.0.2.tgz#4575b21e2bcee7434aa9be662f4b7b5f9c2b5138" - integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA== - -for-in@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" - integrity sha1-gQaNKVqBQuwKxybG4iAMMPttXoA= - -forever-agent@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" - integrity sha1-+8cfDEGt6zf5bFd60e1C2P2sypE= - -form-data@~2.3.2: - version "2.3.3" - resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.3.3.tgz#dcce52c05f644f298c6a7ab936bd724ceffbf3a6" - integrity sha512-1lLKB2Mu3aGP1Q/2eCOx0fNbRMe7XdwktwOruhfqqd0rIJWwN4Dh+E3hrPSlDCXnSR7UtZ1N38rVXm+6+MEhJQ== - dependencies: - asynckit "^0.4.0" - combined-stream "^1.0.6" - mime-types "^2.1.12" - -forwarded@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.2.tgz#98c23dab1175657b8c0573e8ceccd91b0ff18c84" - integrity sha1-mMI9qxF1ZXuMBXPozszZGw/xjIQ= - -fragment-cache@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" - integrity sha1-QpD60n8T6Jvn8zeZxrxaCr//DRk= - dependencies: - map-cache "^0.2.2" - -fresh@0.5.2: - version "0.5.2" - resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.2.tgz#3d8cadd90d976569fa835ab1f8e4b23a105605a7" - integrity sha1-PYyt2Q2XZWn6g1qx+OSyOhBWBac= - -fs-readdir-recursive@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/fs-readdir-recursive/-/fs-readdir-recursive-1.1.0.tgz#e32fc030a2ccee44a6b5371308da54be0b397d27" - integrity sha512-GNanXlVr2pf02+sPN40XN8HG+ePaNcvM0q5mZBd668Obwb0yD5GiUbZOFgwn8kGMY6I3mdyDJzieUy3PTYyTRA== - -fs.realpath@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/fs.realpath/-/fs.realpath-1.0.0.tgz#1504ad2523158caa40db4a2787cb01411994ea4f" - integrity sha1-FQStJSMVjKpA20onh8sBQRmU6k8= - -fsevents@^1.2.7: - version "1.2.13" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-1.2.13.tgz#f325cb0455592428bcf11b383370ef70e3bfcc38" - integrity sha512-oWb1Z6mkHIskLzEJ/XWX0srkpkTQ7vaopMQkyaEIoq0fmtFVxOthb8cCxeT+p3ynTdkk/RZwbgG4brR5BeWECw== - dependencies: - bindings "^1.5.0" - nan "^2.12.1" - -fsevents@^2.1.2: - version "2.1.3" - resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.1.3.tgz#fb738703ae8d2f9fe900c33836ddebee8b97f23e" - integrity sha512-Auw9a4AxqWpa9GUfj370BMPzzyncfBABW8Mab7BGWBYDj4Isgq+cDKtx0i6u9jcX9pQDnswsaaOTgTmA5pEjuQ== - -function-bind@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" - integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== - -functional-red-black-tree@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327" - integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc= - -gensync@^1.0.0-beta.1: - version "1.0.0-beta.1" - resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269" - integrity sha512-r8EC6NO1sngH/zdD9fiRDLdcgnbayXah+mLgManTaIZJqEC1MZstmnox8KpnI2/fxQwrp5OpCOYWLp4rBl4Jcg== - -get-caller-file@^2.0.1: - version "2.0.5" - resolved "https://registry.yarnpkg.com/get-caller-file/-/get-caller-file-2.0.5.tgz#4f94412a82db32f36e3b0b9741f8a97feb031f7e" - integrity sha512-DyFP3BM/3YHTQOCUL/w0OZHR0lpKeGrxotcHWcqNEdnltqFwXVfhEBQ94eIo34AfQpo0rGki4cyIiftY06h2Fg== - -get-package-type@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/get-package-type/-/get-package-type-0.1.0.tgz#8de2d803cff44df3bc6c456e6668b36c3926e11a" - integrity sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q== - -get-stream@^4.0.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" - integrity sha512-GMat4EJ5161kIy2HevLlr4luNjBgvmj413KaQA7jt4V8B4RDsfpHk7WQ9GVqfYyyx8OS/L66Kox+rJRNklLK7w== - dependencies: - pump "^3.0.0" - -get-stream@^5.0.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-5.2.0.tgz#4966a1795ee5ace65e706c4b7beb71257d6e22d3" - integrity sha512-nBF+F1rAZVCu/p7rjzgA+Yb4lfYXrpl7a6VmJrU8wF9I1CKvP/QwPNZHnOlwbTkY6dvtFIzFMSyQXbLoTQPRpA== - dependencies: - pump "^3.0.0" - -get-value@^2.0.3, get-value@^2.0.6: - version "2.0.6" - resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" - integrity sha1-3BXKHGcjh8p2vTesCjlbogQqLCg= - -getpass@^0.1.1: - version "0.1.7" - resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" - integrity sha1-Xv+OPmhNVprkyysSgmBOi6YhSfo= - dependencies: - assert-plus "^1.0.0" - -glob-parent@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-3.1.0.tgz#9e6af6299d8d3bd2bd40430832bd113df906c5ae" - integrity sha1-nmr2KZ2NO9K9QEMIMr0RPfkGxa4= - dependencies: - is-glob "^3.1.0" - path-dirname "^1.0.0" - -glob-parent@^5.0.0: - version "5.1.1" - resolved "https://registry.yarnpkg.com/glob-parent/-/glob-parent-5.1.1.tgz#b6c1ef417c4e5663ea498f1c45afac6916bbc229" - integrity sha512-FnI+VGOpnlGHWZxthPGR+QhR78fuiK0sNLkHQv+bL9fQi57lNNdquIbna/WrfROrolq8GK5Ek6BiMwqL/voRYQ== - dependencies: - is-glob "^4.0.1" - -glob@^7.0.0, glob@^7.1.1, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4: - version "7.1.6" - resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.6.tgz#141f33b81a7c2492e125594307480c46679278a6" - integrity sha512-LwaxwyZ72Lk7vZINtNNrywX0ZuLyStrdDtabefZKAY5ZGJhVtgdznluResxNmPitE0SAO+O26sWTHeKSI2wMBA== - dependencies: - fs.realpath "^1.0.0" - inflight "^1.0.4" - inherits "2" - minimatch "^3.0.4" - once "^1.3.0" - path-is-absolute "^1.0.0" - -globals@^11.1.0: - version "11.12.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-11.12.0.tgz#ab8795338868a0babd8525758018c2a7eb95c42e" - integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== - -globals@^12.1.0: - version "12.4.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-12.4.0.tgz#a18813576a41b00a24a97e7f815918c2e19925f8" - integrity sha512-BWICuzzDvDoH54NHKCseDanAhE3CeDorgDL5MT6LMXXj2WCnd9UC2szdk4AWLfjdgNBCXLUanXYcpBBKOSWGwg== - dependencies: - type-fest "^0.8.1" - -graceful-fs@^4.1.11, graceful-fs@^4.2.4: - version "4.2.4" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.4.tgz#2256bde14d3632958c465ebc96dc467ca07a29fb" - integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== - -growly@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/growly/-/growly-1.3.0.tgz#f10748cbe76af964b7c96c93c6bcc28af120c081" - integrity sha1-8QdIy+dq+WS3yWyTxrzCivEgwIE= - -har-schema@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/har-schema/-/har-schema-2.0.0.tgz#a94c2224ebcac04782a0d9035521f24735b7ec92" - integrity sha1-qUwiJOvKwEeCoNkDVSHyRzW37JI= - -har-validator@~5.1.3: - version "5.1.5" - resolved "https://registry.yarnpkg.com/har-validator/-/har-validator-5.1.5.tgz#1f0803b9f8cb20c0fa13822df1ecddb36bde1efd" - integrity sha512-nmT2T0lljbxdQZfspsno9hgrG3Uir6Ks5afism62poxqBM6sDnMEuPmzTq8XN0OEwqKLLdh1jQI3qyE66Nzb3w== - dependencies: - ajv "^6.12.3" - har-schema "^2.0.0" - -has-flag@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" - integrity sha1-tdRU3CGZriJWmfNGfloH87lVuv0= - -has-flag@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b" - integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ== - -has-symbols@^1.0.0, has-symbols@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8" - integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg== - -has-value@^0.3.1: - version "0.3.1" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" - integrity sha1-ex9YutpiyoJ+wKIHgCVlSEWZXh8= - dependencies: - get-value "^2.0.3" - has-values "^0.1.4" - isobject "^2.0.0" - -has-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" - integrity sha1-GLKB2lhbHFxR3vJMkw7SmgvmsXc= - dependencies: - get-value "^2.0.6" - has-values "^1.0.0" - isobject "^3.0.0" - -has-values@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" - integrity sha1-bWHeldkd/Km5oCCJrThL/49it3E= - -has-values@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" - integrity sha1-lbC2P+whRmGab+V/51Yo1aOe/k8= - dependencies: - is-number "^3.0.0" - kind-of "^4.0.0" - -has@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/has/-/has-1.0.3.tgz#722d7cbfc1f6aa8241f16dd814e011e1f41e8796" - integrity sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw== - dependencies: - function-bind "^1.1.1" - -homedir-polyfill@^1.0.1: - version "1.0.3" - resolved "https://registry.yarnpkg.com/homedir-polyfill/-/homedir-polyfill-1.0.3.tgz#743298cef4e5af3e194161fbadcc2151d3a058e8" - integrity sha512-eSmmWE5bZTK2Nou4g0AI3zZ9rswp7GRKoKXS1BLUkvPviOqs4YTN1djQIqrXy9k5gEtdLPy86JjRwsNM9tnDcA== - dependencies: - parse-passwd "^1.0.0" - -hosted-git-info@^2.1.4: - version "2.8.8" - resolved "https://registry.yarnpkg.com/hosted-git-info/-/hosted-git-info-2.8.8.tgz#7539bd4bc1e0e0a895815a2e0262420b12858488" - integrity sha512-f/wzC2QaWBs7t9IYqB4T3sR1xviIViXJRJTWBlx2Gf3g0Xi5vI7Yy4koXQ1c9OYDGHN9sBy1DQ2AB8fqZBWhUg== - -html-encoding-sniffer@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/html-encoding-sniffer/-/html-encoding-sniffer-1.0.2.tgz#e70d84b94da53aa375e11fe3a351be6642ca46f8" - integrity sha512-71lZziiDnsuabfdYiUeWdCVyKuqwWi23L8YeIgV9jSSZHCtb6wB1BKWooH7L3tn4/FuZJMVWyNaIDr4RGmaSYw== - dependencies: - whatwg-encoding "^1.0.1" - -html-escaper@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/html-escaper/-/html-escaper-2.0.2.tgz#dfd60027da36a36dfcbe236262c00a5822681453" - integrity sha512-H2iMtd0I4Mt5eYiapRdIDjp+XzelXQ0tFE4JS7YFwFevXXMmOp9myNrUvCg0D6ws8iqkRPBfKHgbwig1SmlLfg== - -http-errors@1.7.2: - version "1.7.2" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.2.tgz#4f5029cf13239f31036e5b2e55292bcfbcc85c8f" - integrity sha512-uUQBt3H/cSIVfch6i1EuPNy/YsRSOUBXTVfZ+yR7Zjez3qjBz6i9+i4zjNaoqcoFVI4lQJ5plg63TvGfRSDCRg== - dependencies: - depd "~1.1.2" - inherits "2.0.3" - setprototypeof "1.1.1" - statuses ">= 1.5.0 < 2" - toidentifier "1.0.0" - -http-errors@~1.7.2: - version "1.7.3" - resolved "https://registry.yarnpkg.com/http-errors/-/http-errors-1.7.3.tgz#6c619e4f9c60308c38519498c14fbb10aacebb06" - integrity sha512-ZTTX0MWrsQ2ZAhA1cejAwDLycFsd7I7nVtnkT3Ol0aqodaKW+0CTZDQ1uBv5whptCnc8e8HeRRJxRs0kmm/Qfw== - dependencies: - depd "~1.1.2" - inherits "2.0.4" - setprototypeof "1.1.1" - statuses ">= 1.5.0 < 2" - toidentifier "1.0.0" - -http-signature@~1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/http-signature/-/http-signature-1.2.0.tgz#9aecd925114772f3d95b65a60abb8f7c18fbace1" - integrity sha1-muzZJRFHcvPZW2WmCruPfBj7rOE= - dependencies: - assert-plus "^1.0.0" - jsprim "^1.2.2" - sshpk "^1.7.0" - -human-signals@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/human-signals/-/human-signals-1.1.1.tgz#c5b1cd14f50aeae09ab6c59fe63ba3395fe4dfa3" - integrity sha512-SEQu7vl8KjNL2eoGBLF3+wAjpsNfA9XMlXAYj/3EdaNfAlxKthD1xjEQfGOUhllCGGJVNY34bRr6lPINhNjyZw== - -iconv-lite@0.4.24, iconv-lite@^0.4.24: - version "0.4.24" - resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b" - integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA== - dependencies: - safer-buffer ">= 2.1.2 < 3" - -ignore@^4.0.6: - version "4.0.6" - resolved "https://registry.yarnpkg.com/ignore/-/ignore-4.0.6.tgz#750e3db5862087b4737ebac8207ffd1ef27b25fc" - integrity sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg== - -import-fresh@^3.0.0: - version "3.2.1" - resolved "https://registry.yarnpkg.com/import-fresh/-/import-fresh-3.2.1.tgz#633ff618506e793af5ac91bf48b72677e15cbe66" - integrity sha512-6e1q1cnWP2RXD9/keSkxHScg508CdXqXWgWBaETNhyuBFz+kUZlKboh+ISK+bU++DmbHimVBrOz/zzPe0sZ3sQ== - dependencies: - parent-module "^1.0.0" - resolve-from "^4.0.0" - -import-local@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/import-local/-/import-local-3.0.2.tgz#a8cfd0431d1de4a2199703d003e3e62364fa6db6" - integrity sha512-vjL3+w0oulAVZ0hBHnxa/Nm5TAurf9YLQJDhqRZyqb+VKGOB6LU8t9H1Nr5CIo16vh9XfJTOoHwU0B71S557gA== - dependencies: - pkg-dir "^4.2.0" - resolve-cwd "^3.0.0" - -imurmurhash@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/imurmurhash/-/imurmurhash-0.1.4.tgz#9218b9b2b928a238b13dc4fb6b6d576f231453ea" - integrity sha1-khi5srkoojixPcT7a21XbyMUU+o= - -inflight@^1.0.4: - version "1.0.6" - resolved "https://registry.yarnpkg.com/inflight/-/inflight-1.0.6.tgz#49bd6331d7d02d0c09bc910a1075ba8165b56df9" - integrity sha1-Sb1jMdfQLQwJvJEKEHW6gWW1bfk= - dependencies: - once "^1.3.0" - wrappy "1" - -inherits@2, inherits@2.0.4, inherits@^2.0.3, inherits@~2.0.3: - version "2.0.4" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.4.tgz#0fa2c64f932917c3433a0ded55363aae37416b7c" - integrity sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ== - -inherits@2.0.3: - version "2.0.3" - resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de" - integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4= - -inquirer@^7.0.0: - version "7.3.3" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-7.3.3.tgz#04d176b2af04afc157a83fd7c100e98ee0aad003" - integrity sha512-JG3eIAj5V9CwcGvuOmoo6LB9kbAYT8HXffUl6memuszlwDC/qvFAJw49XJ5NROSFNPxp3iQg1GqkFhaY/CR0IA== - dependencies: - ansi-escapes "^4.2.1" - chalk "^4.1.0" - cli-cursor "^3.1.0" - cli-width "^3.0.0" - external-editor "^3.0.3" - figures "^3.0.0" - lodash "^4.17.19" - mute-stream "0.0.8" - run-async "^2.4.0" - rxjs "^6.6.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - through "^2.3.6" - -invariant@^2.2.2, invariant@^2.2.4: - version "2.2.4" - resolved "https://registry.yarnpkg.com/invariant/-/invariant-2.2.4.tgz#610f3c92c9359ce1db616e538008d23ff35158e6" - integrity sha512-phJfQVBuaJM5raOpJjSfkiD6BpbCE4Ns//LaXl6wGYtUBY83nWS6Rf9tXm2e8VaK60JEjYldbPif/A2B1C2gNA== - dependencies: - loose-envify "^1.0.0" - -ip-regex@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/ip-regex/-/ip-regex-2.1.0.tgz#fa78bf5d2e6913c911ce9f819ee5146bb6d844e9" - integrity sha1-+ni/XS5pE8kRzp+BnuUUa7bYROk= - -ipaddr.js@1.9.1: - version "1.9.1" - resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.9.1.tgz#bff38543eeb8984825079ff3a2a8e6cbd46781b3" - integrity sha512-0KI/607xoxSToH7GjN1FfSbLoU0+btTicjsQSWQlh/hZykN8KpmMf7uYwPW3R+akZ6R/w18ZlXSHBYXiYUPO3g== - -is-accessor-descriptor@^0.1.6: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" - integrity sha1-qeEss66Nh2cn7u84Q/igiXtcmNY= - dependencies: - kind-of "^3.0.2" - -is-accessor-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" - integrity sha512-m5hnHTkcVsPfqx3AKlyttIPb7J+XykHvJP2B9bZDjlhLIoEq4XoK64Vg7boZlVWYK6LUY94dYPEE7Lh0ZkZKcQ== - dependencies: - kind-of "^6.0.0" - -is-arrayish@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" - integrity sha1-d8mYQFJ6qOyxqLppe4BkWnqSap0= - -is-binary-path@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-binary-path/-/is-binary-path-1.0.1.tgz#75f16642b480f187a711c814161fd3a4a7655898" - integrity sha1-dfFmQrSA8YenEcgUFh/TpKdlWJg= - dependencies: - binary-extensions "^1.0.0" - -is-buffer@^1.1.5: - version "1.1.6" - resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be" - integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w== - -is-callable@^1.1.4, is-callable@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.0.tgz#83336560b54a38e35e3a2df7afd0454d691468bb" - integrity sha512-pyVD9AaGLxtg6srb2Ng6ynWJqkHU9bEM087AKck0w8QwDarTfNcpIYoU8x8Hv2Icm8u6kFJM18Dag8lyqGkviw== - -is-ci@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-2.0.0.tgz#6bc6334181810e04b5c22b3d589fdca55026404c" - integrity sha512-YfJT7rkpQB0updsdHLGWrvhBJfcfzNNawYDNIyQXJz0IViGf75O8EBPKSdvw2rF+LGCsX4FZ8tcr3b19LcZq4w== - dependencies: - ci-info "^2.0.0" - -is-data-descriptor@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" - integrity sha1-C17mSDiOLIYCgueT8YVv7D8wG1Y= - dependencies: - kind-of "^3.0.2" - -is-data-descriptor@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" - integrity sha512-jbRXy1FmtAoCjQkVmIVYwuuqDFUbaOeDjmed1tOGPrsMhtJA4rD9tkgA0F1qJ3gRFRXcHYVkdeaP50Q5rE/jLQ== - dependencies: - kind-of "^6.0.0" - -is-date-object@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-date-object/-/is-date-object-1.0.2.tgz#bda736f2cd8fd06d32844e7743bfa7494c3bfd7e" - integrity sha512-USlDT524woQ08aoZFzh3/Z6ch9Y/EWXEHQ/AaRN0SkKq4t2Jw2R2339tSXmwuVoY7LLlBCbOIlx2myP/L5zk0g== - -is-descriptor@^0.1.0: - version "0.1.6" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" - integrity sha512-avDYr0SB3DwO9zsMov0gKCESFYqCnE4hq/4z3TdUlukEy5t9C0YRq7HLrsN52NAcqXKaepeCD0n+B0arnVG3Hg== - dependencies: - is-accessor-descriptor "^0.1.6" - is-data-descriptor "^0.1.4" - kind-of "^5.0.0" - -is-descriptor@^1.0.0, is-descriptor@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" - integrity sha512-2eis5WqQGV7peooDyLmNEPUrps9+SXX5c9pL3xEB+4e9HnGuDa7mB7kHxHw4CbqS9k1T2hOH3miL8n8WtiYVtg== - dependencies: - is-accessor-descriptor "^1.0.0" - is-data-descriptor "^1.0.0" - kind-of "^6.0.2" - -is-docker@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-docker/-/is-docker-2.1.1.tgz#4125a88e44e450d384e09047ede71adc2d144156" - integrity sha512-ZOoqiXfEwtGknTiuDEy8pN2CfE3TxMHprvNer1mXiqwkOT77Rw3YVrUQ52EqAOU3QAWDQ+bQdx7HJzrv7LS2Hw== - -is-extendable@^0.1.0, is-extendable@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" - integrity sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik= - -is-extendable@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" - integrity sha512-arnXMxT1hhoKo9k1LZdmlNyJdDDfy2v0fXjFlmok4+i8ul/6WlbVge9bhM74OpNPQPMGUToDtz+KXa1PneJxOA== - dependencies: - is-plain-object "^2.0.4" - -is-extglob@^2.1.0, is-extglob@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-2.1.1.tgz#a88c02535791f02ed37c76a1b9ea9773c833f8c2" - integrity sha1-qIwCU1eR8C7TfHahueqXc8gz+MI= - -is-fullwidth-code-point@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz#a3b30a5c4f199183167aaab93beefae3ddfb654f" - integrity sha1-o7MKXE8ZkYMWeqq5O+764937ZU8= - -is-fullwidth-code-point@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz#f116f8064fe90b3f7844a38997c0b75051269f1d" - integrity sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg== - -is-generator-fn@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/is-generator-fn/-/is-generator-fn-2.1.0.tgz#7d140adc389aaf3011a8f2a2a4cfa6faadffb118" - integrity sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ== - -is-glob@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-3.1.0.tgz#7ba5ae24217804ac70707b96922567486cc3e84a" - integrity sha1-e6WuJCF4BKxwcHuWkiVnSGzD6Eo= - dependencies: - is-extglob "^2.1.0" - -is-glob@^4.0.0, is-glob@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/is-glob/-/is-glob-4.0.1.tgz#7567dbe9f2f5e2467bc77ab83c4a29482407a5dc" - integrity sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg== - dependencies: - is-extglob "^2.1.1" - -is-number@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-3.0.0.tgz#24fd6201a4782cf50561c810276afc7d12d71195" - integrity sha1-JP1iAaR4LPUFYcgQJ2r8fRLXEZU= - dependencies: - kind-of "^3.0.2" - -is-number@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" - integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== - -is-plain-object@^2.0.3, is-plain-object@^2.0.4: - version "2.0.4" - resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" - integrity sha512-h5PpgXkWitc38BBMYawTYMWJHFZJVnBquFE57xFpjB8pJFiF6gZ+bU+WyI/yqXiFR5mdLsgYNaPe8uao6Uv9Og== - dependencies: - isobject "^3.0.1" - -is-regex@^1.1.0: - version "1.1.1" - resolved "https://registry.yarnpkg.com/is-regex/-/is-regex-1.1.1.tgz#c6f98aacc546f6cec5468a07b7b153ab564a57b9" - integrity sha512-1+QkEcxiLlB7VEyFtyBg94e08OAsvq7FUBgApTq/w2ymCLyKJgDPsybBENVtA7XCQEgEXxKPonG+mvYRxh/LIg== - dependencies: - has-symbols "^1.0.1" - -is-stream@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" - integrity sha1-EtSj3U5o4Lec6428hBc66A2RykQ= - -is-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-2.0.0.tgz#bde9c32680d6fae04129d6ac9d921ce7815f78e3" - integrity sha512-XCoy+WlUr7d1+Z8GgSuXmpuUFC9fOhRXglJMx+dwLKTkL44Cjd4W1Z5P+BQZpr+cR93aGP4S/s7Ftw6Nd/kiEw== - -is-symbol@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/is-symbol/-/is-symbol-1.0.3.tgz#38e1014b9e6329be0de9d24a414fd7441ec61937" - integrity sha512-OwijhaRSgqvhm/0ZdAcXNZt9lYdKFpcRDT5ULUuYXPoT794UNOdU+gpT6Rzo7b4V2HUl/op6GqY894AZwv9faQ== - dependencies: - has-symbols "^1.0.1" - -is-typedarray@^1.0.0, is-typedarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/is-typedarray/-/is-typedarray-1.0.0.tgz#e479c80858df0c1b11ddda6940f96011fcda4a9a" - integrity sha1-5HnICFjfDBsR3dppQPlgEfzaSpo= - -is-windows@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" - integrity sha512-eXK1UInq2bPmjyX6e3VHIzMLobc4J94i4AWn+Hpq3OU5KkrRC96OAcR3PRJ/pGu6m8TRnBHP9dkXQVsT/COVIA== - -is-wsl@^2.1.1: - version "2.2.0" - resolved "https://registry.yarnpkg.com/is-wsl/-/is-wsl-2.2.0.tgz#74a4c76e77ca9fd3f932f290c17ea326cd157271" - integrity sha512-fKzAra0rGJUUBwGBgNkHZuToZcn+TtXHpeCgmkMJMMYx1sQDYaCSyjJBSCa2nH1DGm7s3n1oBnohoVTBaN7Lww== - dependencies: - is-docker "^2.0.0" - -isarray@1.0.0, isarray@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isarray/-/isarray-1.0.0.tgz#bb935d48582cba168c06834957a54a3e07124f11" - integrity sha1-u5NdSFgsuhaMBoNJV6VKPgcSTxE= - -isexe@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/isexe/-/isexe-2.0.0.tgz#e8fbf374dc556ff8947a10dcb0572d633f2cfa10" - integrity sha1-6PvzdNxVb/iUehDcsFctYz8s+hA= - -isobject@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-2.1.0.tgz#f065561096a3f1da2ef46272f815c840d87e0c89" - integrity sha1-8GVWEJaj8dou9GJy+BXIQNh+DIk= - dependencies: - isarray "1.0.0" - -isobject@^3.0.0, isobject@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" - integrity sha1-TkMekrEalzFjaqH5yNHMvP2reN8= - -isstream@~0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" - integrity sha1-R+Y/evVa+m+S4VAOaQ64uFKcCZo= - -istanbul-lib-coverage@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-coverage/-/istanbul-lib-coverage-3.0.0.tgz#f5944a37c70b550b02a78a5c3b2055b280cec8ec" - integrity sha512-UiUIqxMgRDET6eR+o5HbfRYP1l0hqkWOs7vNxC/mggutCMUIhWMm8gAHb8tHlyfD3/l6rlgNA5cKdDzEAf6hEg== - -istanbul-lib-instrument@^4.0.0: - version "4.0.3" - resolved "https://registry.yarnpkg.com/istanbul-lib-instrument/-/istanbul-lib-instrument-4.0.3.tgz#873c6fff897450118222774696a3f28902d77c1d" - integrity sha512-BXgQl9kf4WTCPCCpmFGoJkz/+uhvm7h7PFKUYxh7qarQd3ER33vHG//qaE8eN25l07YqZPpHXU9I09l/RD5aGQ== - dependencies: - "@babel/core" "^7.7.5" - "@istanbuljs/schema" "^0.1.2" - istanbul-lib-coverage "^3.0.0" - semver "^6.3.0" - -istanbul-lib-report@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-report/-/istanbul-lib-report-3.0.0.tgz#7518fe52ea44de372f460a76b5ecda9ffb73d8a6" - integrity sha512-wcdi+uAKzfiGT2abPpKZ0hSU1rGQjUQnLvtY5MpQ7QCTahD3VODhcu4wcfY1YtkGaDD5yuydOLINXsfbus9ROw== - dependencies: - istanbul-lib-coverage "^3.0.0" - make-dir "^3.0.0" - supports-color "^7.1.0" - -istanbul-lib-source-maps@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/istanbul-lib-source-maps/-/istanbul-lib-source-maps-4.0.0.tgz#75743ce6d96bb86dc7ee4352cf6366a23f0b1ad9" - integrity sha512-c16LpFRkR8vQXyHZ5nLpY35JZtzj1PQY1iZmesUbf1FZHbIupcWfjgOXBY9YHkLEQ6puz1u4Dgj6qmU/DisrZg== - dependencies: - debug "^4.1.1" - istanbul-lib-coverage "^3.0.0" - source-map "^0.6.1" - -istanbul-reports@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/istanbul-reports/-/istanbul-reports-3.0.2.tgz#d593210e5000683750cb09fc0644e4b6e27fd53b" - integrity sha512-9tZvz7AiR3PEDNGiV9vIouQ/EAcqMXFmkcA1CDFTwOB98OZVDL0PH9glHotf5Ugp6GCOTypfzGWI/OqjWNCRUw== - dependencies: - html-escaper "^2.0.0" - istanbul-lib-report "^3.0.0" - -jest-changed-files@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-25.5.0.tgz#141cc23567ceb3f534526f8614ba39421383634c" - integrity sha512-EOw9QEqapsDT7mKF162m8HFzRPbmP8qJQny6ldVOdOVBz3ACgPm/1nAn5fPQ/NDaYhX/AHkrGwwkCncpAVSXcw== - dependencies: - "@jest/types" "^25.5.0" - execa "^3.2.0" - throat "^5.0.0" - -jest-cli@^25.5.4: - version "25.5.4" - resolved "https://registry.yarnpkg.com/jest-cli/-/jest-cli-25.5.4.tgz#b9f1a84d1301a92c5c217684cb79840831db9f0d" - integrity sha512-rG8uJkIiOUpnREh1768/N3n27Cm+xPFkSNFO91tgg+8o2rXeVLStz+vkXkGr4UtzH6t1SNbjwoiswd7p4AhHTw== - dependencies: - "@jest/core" "^25.5.4" - "@jest/test-result" "^25.5.0" - "@jest/types" "^25.5.0" - chalk "^3.0.0" - exit "^0.1.2" - graceful-fs "^4.2.4" - import-local "^3.0.2" - is-ci "^2.0.0" - jest-config "^25.5.4" - jest-util "^25.5.0" - jest-validate "^25.5.0" - prompts "^2.0.1" - realpath-native "^2.0.0" - yargs "^15.3.1" - -jest-config@^25.5.4: - version "25.5.4" - resolved "https://registry.yarnpkg.com/jest-config/-/jest-config-25.5.4.tgz#38e2057b3f976ef7309b2b2c8dcd2a708a67f02c" - integrity sha512-SZwR91SwcdK6bz7Gco8qL7YY2sx8tFJYzvg216DLihTWf+LKY/DoJXpM9nTzYakSyfblbqeU48p/p7Jzy05Atg== - dependencies: - "@babel/core" "^7.1.0" - "@jest/test-sequencer" "^25.5.4" - "@jest/types" "^25.5.0" - babel-jest "^25.5.1" - chalk "^3.0.0" - deepmerge "^4.2.2" - glob "^7.1.1" - graceful-fs "^4.2.4" - jest-environment-jsdom "^25.5.0" - jest-environment-node "^25.5.0" - jest-get-type "^25.2.6" - jest-jasmine2 "^25.5.4" - jest-regex-util "^25.2.6" - jest-resolve "^25.5.1" - jest-util "^25.5.0" - jest-validate "^25.5.0" - micromatch "^4.0.2" - pretty-format "^25.5.0" - realpath-native "^2.0.0" - -jest-diff@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-25.5.0.tgz#1dd26ed64f96667c068cef026b677dfa01afcfa9" - integrity sha512-z1kygetuPiREYdNIumRpAHY6RXiGmp70YHptjdaxTWGmA085W3iCnXNx0DhflK3vwrKmrRWyY1wUpkPMVxMK7A== - dependencies: - chalk "^3.0.0" - diff-sequences "^25.2.6" - jest-get-type "^25.2.6" - pretty-format "^25.5.0" - -jest-docblock@^25.3.0: - version "25.3.0" - resolved "https://registry.yarnpkg.com/jest-docblock/-/jest-docblock-25.3.0.tgz#8b777a27e3477cd77a168c05290c471a575623ef" - integrity sha512-aktF0kCar8+zxRHxQZwxMy70stc9R1mOmrLsT5VO3pIT0uzGRSDAXxSlz4NqQWpuLjPpuMhPRl7H+5FRsvIQAg== - dependencies: - detect-newline "^3.0.0" - -jest-each@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-each/-/jest-each-25.5.0.tgz#0c3c2797e8225cb7bec7e4d249dcd96b934be516" - integrity sha512-QBogUxna3D8vtiItvn54xXde7+vuzqRrEeaw8r1s+1TG9eZLVJE5ZkKoSUlqFwRjnlaA4hyKGiu9OlkFIuKnjA== - dependencies: - "@jest/types" "^25.5.0" - chalk "^3.0.0" - jest-get-type "^25.2.6" - jest-util "^25.5.0" - pretty-format "^25.5.0" - -jest-environment-jsdom@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-environment-jsdom/-/jest-environment-jsdom-25.5.0.tgz#dcbe4da2ea997707997040ecf6e2560aec4e9834" - integrity sha512-7Jr02ydaq4jaWMZLY+Skn8wL5nVIYpWvmeatOHL3tOcV3Zw8sjnPpx+ZdeBfc457p8jCR9J6YCc+Lga0oIy62A== - dependencies: - "@jest/environment" "^25.5.0" - "@jest/fake-timers" "^25.5.0" - "@jest/types" "^25.5.0" - jest-mock "^25.5.0" - jest-util "^25.5.0" - jsdom "^15.2.1" - -jest-environment-node@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-environment-node/-/jest-environment-node-25.5.0.tgz#0f55270d94804902988e64adca37c6ce0f7d07a1" - integrity sha512-iuxK6rQR2En9EID+2k+IBs5fCFd919gVVK5BeND82fYeLWPqvRcFNPKu9+gxTwfB5XwBGBvZ0HFQa+cHtIoslA== - dependencies: - "@jest/environment" "^25.5.0" - "@jest/fake-timers" "^25.5.0" - "@jest/types" "^25.5.0" - jest-mock "^25.5.0" - jest-util "^25.5.0" - semver "^6.3.0" - -jest-get-type@^25.2.6: - version "25.2.6" - resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-25.2.6.tgz#0b0a32fab8908b44d508be81681487dbabb8d877" - integrity sha512-DxjtyzOHjObRM+sM1knti6or+eOgcGU4xVSb2HNP1TqO4ahsT+rqZg+nyqHWJSvWgKC5cG3QjGFBqxLghiF/Ig== - -jest-haste-map@^25.5.1: - version "25.5.1" - resolved "https://registry.yarnpkg.com/jest-haste-map/-/jest-haste-map-25.5.1.tgz#1df10f716c1d94e60a1ebf7798c9fb3da2620943" - integrity sha512-dddgh9UZjV7SCDQUrQ+5t9yy8iEgKc1AKqZR9YDww8xsVOtzPQSMVLDChc21+g29oTRexb9/B0bIlZL+sWmvAQ== - dependencies: - "@jest/types" "^25.5.0" - "@types/graceful-fs" "^4.1.2" - anymatch "^3.0.3" - fb-watchman "^2.0.0" - graceful-fs "^4.2.4" - jest-serializer "^25.5.0" - jest-util "^25.5.0" - jest-worker "^25.5.0" - micromatch "^4.0.2" - sane "^4.0.3" - walker "^1.0.7" - which "^2.0.2" - optionalDependencies: - fsevents "^2.1.2" - -jest-jasmine2@^25.5.4: - version "25.5.4" - resolved "https://registry.yarnpkg.com/jest-jasmine2/-/jest-jasmine2-25.5.4.tgz#66ca8b328fb1a3c5364816f8958f6970a8526968" - integrity sha512-9acbWEfbmS8UpdcfqnDO+uBUgKa/9hcRh983IHdM+pKmJPL77G0sWAAK0V0kr5LK3a8cSBfkFSoncXwQlRZfkQ== - dependencies: - "@babel/traverse" "^7.1.0" - "@jest/environment" "^25.5.0" - "@jest/source-map" "^25.5.0" - "@jest/test-result" "^25.5.0" - "@jest/types" "^25.5.0" - chalk "^3.0.0" - co "^4.6.0" - expect "^25.5.0" - is-generator-fn "^2.0.0" - jest-each "^25.5.0" - jest-matcher-utils "^25.5.0" - jest-message-util "^25.5.0" - jest-runtime "^25.5.4" - jest-snapshot "^25.5.1" - jest-util "^25.5.0" - pretty-format "^25.5.0" - throat "^5.0.0" - -jest-leak-detector@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-leak-detector/-/jest-leak-detector-25.5.0.tgz#2291c6294b0ce404241bb56fe60e2d0c3e34f0bb" - integrity sha512-rV7JdLsanS8OkdDpZtgBf61L5xZ4NnYLBq72r6ldxahJWWczZjXawRsoHyXzibM5ed7C2QRjpp6ypgwGdKyoVA== - dependencies: - jest-get-type "^25.2.6" - pretty-format "^25.5.0" - -jest-matcher-utils@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-25.5.0.tgz#fbc98a12d730e5d2453d7f1ed4a4d948e34b7867" - integrity sha512-VWI269+9JS5cpndnpCwm7dy7JtGQT30UHfrnM3mXl22gHGt/b7NkjBqXfbhZ8V4B7ANUsjK18PlSBmG0YH7gjw== - dependencies: - chalk "^3.0.0" - jest-diff "^25.5.0" - jest-get-type "^25.2.6" - pretty-format "^25.5.0" - -jest-message-util@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-25.5.0.tgz#ea11d93204cc7ae97456e1d8716251185b8880ea" - integrity sha512-ezddz3YCT/LT0SKAmylVyWWIGYoKHOFOFXx3/nA4m794lfVUskMcwhip6vTgdVrOtYdjeQeis2ypzes9mZb4EA== - dependencies: - "@babel/code-frame" "^7.0.0" - "@jest/types" "^25.5.0" - "@types/stack-utils" "^1.0.1" - chalk "^3.0.0" - graceful-fs "^4.2.4" - micromatch "^4.0.2" - slash "^3.0.0" - stack-utils "^1.0.1" - -jest-mock@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-mock/-/jest-mock-25.5.0.tgz#a91a54dabd14e37ecd61665d6b6e06360a55387a" - integrity sha512-eXWuTV8mKzp/ovHc5+3USJMYsTBhyQ+5A1Mak35dey/RG8GlM4YWVylZuGgVXinaW6tpvk/RSecmF37FKUlpXA== - dependencies: - "@jest/types" "^25.5.0" - -jest-pnp-resolver@^1.2.1: - version "1.2.2" - resolved "https://registry.yarnpkg.com/jest-pnp-resolver/-/jest-pnp-resolver-1.2.2.tgz#b704ac0ae028a89108a4d040b3f919dfddc8e33c" - integrity sha512-olV41bKSMm8BdnuMsewT4jqlZ8+3TCARAXjZGT9jcoSnrfUnRCqnMoF9XEeoWjbzObpqF9dRhHQj0Xb9QdF6/w== - -jest-regex-util@^25.2.6: - version "25.2.6" - resolved "https://registry.yarnpkg.com/jest-regex-util/-/jest-regex-util-25.2.6.tgz#d847d38ba15d2118d3b06390056028d0f2fd3964" - integrity sha512-KQqf7a0NrtCkYmZZzodPftn7fL1cq3GQAFVMn5Hg8uKx/fIenLEobNanUxb7abQ1sjADHBseG/2FGpsv/wr+Qw== - -jest-resolve-dependencies@^25.5.4: - version "25.5.4" - resolved "https://registry.yarnpkg.com/jest-resolve-dependencies/-/jest-resolve-dependencies-25.5.4.tgz#85501f53957c8e3be446e863a74777b5a17397a7" - integrity sha512-yFmbPd+DAQjJQg88HveObcGBA32nqNZ02fjYmtL16t1xw9bAttSn5UGRRhzMHIQbsep7znWvAvnD4kDqOFM0Uw== - dependencies: - "@jest/types" "^25.5.0" - jest-regex-util "^25.2.6" - jest-snapshot "^25.5.1" - -jest-resolve@^25.5.1: - version "25.5.1" - resolved "https://registry.yarnpkg.com/jest-resolve/-/jest-resolve-25.5.1.tgz#0e6fbcfa7c26d2a5fe8f456088dc332a79266829" - integrity sha512-Hc09hYch5aWdtejsUZhA+vSzcotf7fajSlPA6EZPE1RmPBAD39XtJhvHWFStid58iit4IPDLI/Da4cwdDmAHiQ== - dependencies: - "@jest/types" "^25.5.0" - browser-resolve "^1.11.3" - chalk "^3.0.0" - graceful-fs "^4.2.4" - jest-pnp-resolver "^1.2.1" - read-pkg-up "^7.0.1" - realpath-native "^2.0.0" - resolve "^1.17.0" - slash "^3.0.0" - -jest-runner@^25.5.4: - version "25.5.4" - resolved "https://registry.yarnpkg.com/jest-runner/-/jest-runner-25.5.4.tgz#ffec5df3875da5f5c878ae6d0a17b8e4ecd7c71d" - integrity sha512-V/2R7fKZo6blP8E9BL9vJ8aTU4TH2beuqGNxHbxi6t14XzTb+x90B3FRgdvuHm41GY8ch4xxvf0ATH4hdpjTqg== - dependencies: - "@jest/console" "^25.5.0" - "@jest/environment" "^25.5.0" - "@jest/test-result" "^25.5.0" - "@jest/types" "^25.5.0" - chalk "^3.0.0" - exit "^0.1.2" - graceful-fs "^4.2.4" - jest-config "^25.5.4" - jest-docblock "^25.3.0" - jest-haste-map "^25.5.1" - jest-jasmine2 "^25.5.4" - jest-leak-detector "^25.5.0" - jest-message-util "^25.5.0" - jest-resolve "^25.5.1" - jest-runtime "^25.5.4" - jest-util "^25.5.0" - jest-worker "^25.5.0" - source-map-support "^0.5.6" - throat "^5.0.0" - -jest-runtime@^25.5.4: - version "25.5.4" - resolved "https://registry.yarnpkg.com/jest-runtime/-/jest-runtime-25.5.4.tgz#dc981fe2cb2137abcd319e74ccae7f7eeffbfaab" - integrity sha512-RWTt8LeWh3GvjYtASH2eezkc8AehVoWKK20udV6n3/gC87wlTbE1kIA+opCvNWyyPeBs6ptYsc6nyHUb1GlUVQ== - dependencies: - "@jest/console" "^25.5.0" - "@jest/environment" "^25.5.0" - "@jest/globals" "^25.5.2" - "@jest/source-map" "^25.5.0" - "@jest/test-result" "^25.5.0" - "@jest/transform" "^25.5.1" - "@jest/types" "^25.5.0" - "@types/yargs" "^15.0.0" - chalk "^3.0.0" - collect-v8-coverage "^1.0.0" - exit "^0.1.2" - glob "^7.1.3" - graceful-fs "^4.2.4" - jest-config "^25.5.4" - jest-haste-map "^25.5.1" - jest-message-util "^25.5.0" - jest-mock "^25.5.0" - jest-regex-util "^25.2.6" - jest-resolve "^25.5.1" - jest-snapshot "^25.5.1" - jest-util "^25.5.0" - jest-validate "^25.5.0" - realpath-native "^2.0.0" - slash "^3.0.0" - strip-bom "^4.0.0" - yargs "^15.3.1" - -jest-serializer@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-serializer/-/jest-serializer-25.5.0.tgz#a993f484e769b4ed54e70e0efdb74007f503072b" - integrity sha512-LxD8fY1lByomEPflwur9o4e2a5twSQ7TaVNLlFUuToIdoJuBt8tzHfCsZ42Ok6LkKXWzFWf3AGmheuLAA7LcCA== - dependencies: - graceful-fs "^4.2.4" - -jest-snapshot@^25.5.1: - version "25.5.1" - resolved "https://registry.yarnpkg.com/jest-snapshot/-/jest-snapshot-25.5.1.tgz#1a2a576491f9961eb8d00c2e5fd479bc28e5ff7f" - integrity sha512-C02JE1TUe64p2v1auUJ2ze5vcuv32tkv9PyhEb318e8XOKF7MOyXdJ7kdjbvrp3ChPLU2usI7Rjxs97Dj5P0uQ== - dependencies: - "@babel/types" "^7.0.0" - "@jest/types" "^25.5.0" - "@types/prettier" "^1.19.0" - chalk "^3.0.0" - expect "^25.5.0" - graceful-fs "^4.2.4" - jest-diff "^25.5.0" - jest-get-type "^25.2.6" - jest-matcher-utils "^25.5.0" - jest-message-util "^25.5.0" - jest-resolve "^25.5.1" - make-dir "^3.0.0" - natural-compare "^1.4.0" - pretty-format "^25.5.0" - semver "^6.3.0" - -jest-util@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-25.5.0.tgz#31c63b5d6e901274d264a4fec849230aa3fa35b0" - integrity sha512-KVlX+WWg1zUTB9ktvhsg2PXZVdkI1NBevOJSkTKYAyXyH4QSvh+Lay/e/v+bmaFfrkfx43xD8QTfgobzlEXdIA== - dependencies: - "@jest/types" "^25.5.0" - chalk "^3.0.0" - graceful-fs "^4.2.4" - is-ci "^2.0.0" - make-dir "^3.0.0" - -jest-validate@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-validate/-/jest-validate-25.5.0.tgz#fb4c93f332c2e4cf70151a628e58a35e459a413a" - integrity sha512-okUFKqhZIpo3jDdtUXUZ2LxGUZJIlfdYBvZb1aczzxrlyMlqdnnws9MOxezoLGhSaFc2XYaHNReNQfj5zPIWyQ== - dependencies: - "@jest/types" "^25.5.0" - camelcase "^5.3.1" - chalk "^3.0.0" - jest-get-type "^25.2.6" - leven "^3.1.0" - pretty-format "^25.5.0" - -jest-watcher@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-watcher/-/jest-watcher-25.5.0.tgz#d6110d101df98badebe435003956fd4a465e8456" - integrity sha512-XrSfJnVASEl+5+bb51V0Q7WQx65dTSk7NL4yDdVjPnRNpM0hG+ncFmDYJo9O8jaSRcAitVbuVawyXCRoxGrT5Q== - dependencies: - "@jest/test-result" "^25.5.0" - "@jest/types" "^25.5.0" - ansi-escapes "^4.2.1" - chalk "^3.0.0" - jest-util "^25.5.0" - string-length "^3.1.0" - -jest-worker@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-25.5.0.tgz#2611d071b79cea0f43ee57a3d118593ac1547db1" - integrity sha512-/dsSmUkIy5EBGfv/IjjqmFxrNAUpBERfGs1oHROyD7yxjG/w+t0GOJDX8O1k32ySmd7+a5IhnJU2qQFcJ4n1vw== - dependencies: - merge-stream "^2.0.0" - supports-color "^7.0.0" - -jest@^25.5.2: - version "25.5.4" - resolved "https://registry.yarnpkg.com/jest/-/jest-25.5.4.tgz#f21107b6489cfe32b076ce2adcadee3587acb9db" - integrity sha512-hHFJROBTqZahnO+X+PMtT6G2/ztqAZJveGqz//FnWWHurizkD05PQGzRZOhF3XP6z7SJmL+5tCfW8qV06JypwQ== - dependencies: - "@jest/core" "^25.5.4" - import-local "^3.0.2" - jest-cli "^25.5.4" - -"js-tokens@^3.0.0 || ^4.0.0", js-tokens@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499" - integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== - -js-yaml@^3.13.1: - version "3.14.0" - resolved "https://registry.yarnpkg.com/js-yaml/-/js-yaml-3.14.0.tgz#a7a34170f26a21bb162424d8adacb4113a69e482" - integrity sha512-/4IbIeHcD9VMHFqDR/gQ7EdZdLimOvW2DdcxFjdyyZ9NsbS+ccrXqVWDtab/lRl5AlUqmpBx8EhPaWR+OtY17A== - dependencies: - argparse "^1.0.7" - esprima "^4.0.0" - -jsbn@~0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/jsbn/-/jsbn-0.1.1.tgz#a5e654c2e5a2deb5f201d96cefbca80c0ef2f513" - integrity sha1-peZUwuWi3rXyAdls77yoDA7y9RM= - -jsdom@^15.2.1: - version "15.2.1" - resolved "https://registry.yarnpkg.com/jsdom/-/jsdom-15.2.1.tgz#d2feb1aef7183f86be521b8c6833ff5296d07ec5" - integrity sha512-fAl1W0/7T2G5vURSyxBzrJ1LSdQn6Tr5UX/xD4PXDx/PDgwygedfW6El/KIj3xJ7FU61TTYnc/l/B7P49Eqt6g== - dependencies: - abab "^2.0.0" - acorn "^7.1.0" - acorn-globals "^4.3.2" - array-equal "^1.0.0" - cssom "^0.4.1" - cssstyle "^2.0.0" - data-urls "^1.1.0" - domexception "^1.0.1" - escodegen "^1.11.1" - html-encoding-sniffer "^1.0.2" - nwsapi "^2.2.0" - parse5 "5.1.0" - pn "^1.1.0" - request "^2.88.0" - request-promise-native "^1.0.7" - saxes "^3.1.9" - symbol-tree "^3.2.2" - tough-cookie "^3.0.1" - w3c-hr-time "^1.0.1" - w3c-xmlserializer "^1.1.2" - webidl-conversions "^4.0.2" - whatwg-encoding "^1.0.5" - whatwg-mimetype "^2.3.0" - whatwg-url "^7.0.0" - ws "^7.0.0" - xml-name-validator "^3.0.0" - -jsesc@^2.5.1: - version "2.5.2" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-2.5.2.tgz#80564d2e483dacf6e8ef209650a67df3f0c283a4" - integrity sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA== - -jsesc@~0.5.0: - version "0.5.0" - resolved "https://registry.yarnpkg.com/jsesc/-/jsesc-0.5.0.tgz#e7dee66e35d6fc16f710fe91d5cf69f70f08911d" - integrity sha1-597mbjXW/Bb3EP6R1c9p9w8IkR0= - -json-parse-even-better-errors@^2.3.0: - version "2.3.1" - resolved "https://registry.yarnpkg.com/json-parse-even-better-errors/-/json-parse-even-better-errors-2.3.1.tgz#7c47805a94319928e05777405dc12e1f7a4ee02d" - integrity sha512-xyFwyhro/JEof6Ghe2iz2NcXoj2sloNsWr/XsERDK/oiPCfaNhl5ONfp+jQdAZRQQ0IJWNzH9zIZF7li91kh2w== - -json-schema-traverse@^0.4.1: - version "0.4.1" - resolved "https://registry.yarnpkg.com/json-schema-traverse/-/json-schema-traverse-0.4.1.tgz#69f6a87d9513ab8bb8fe63bdb0979c448e684660" - integrity sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg== - -json-schema@0.2.3: - version "0.2.3" - resolved "https://registry.yarnpkg.com/json-schema/-/json-schema-0.2.3.tgz#b480c892e59a2f05954ce727bd3f2a4e882f9e13" - integrity sha1-tIDIkuWaLwWVTOcnvT8qTogvnhM= - -json-stable-stringify-without-jsonify@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz#9db7b59496ad3f3cfef30a75142d2d930ad72651" - integrity sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE= - -json-stringify-safe@~5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz#1296a2d58fd45f19a0f6ce01d65701e2c735b6eb" - integrity sha1-Epai1Y/UXxmg9s4B1lcB4sc1tus= - -json5@^2.1.2: - version "2.1.3" - resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.3.tgz#c9b0f7fa9233bfe5807fe66fcf3a5617ed597d43" - integrity sha512-KXPvOm8K9IJKFM0bmdn8QXh7udDh1g/giieX0NLCaMnb4hEiVFqnop2ImTXCc5e0/oHz3LTqmHGtExn5hfMkOA== - dependencies: - minimist "^1.2.5" - -jsprim@^1.2.2: - version "1.4.1" - resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" - integrity sha1-MT5mvB5cwG5Di8G3SZwuXFastqI= - dependencies: - assert-plus "1.0.0" - extsprintf "1.3.0" - json-schema "0.2.3" - verror "1.10.0" - -keygrip@~1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/keygrip/-/keygrip-1.0.3.tgz#399d709f0aed2bab0a059e0cdd3a5023a053e1dc" - integrity sha512-/PpesirAIfaklxUzp4Yb7xBper9MwP6hNRA6BGGUFCgbJ+BM5CKBtsoxinNXkLHAr+GXS1/lSlF2rP7cv5Fl+g== - -kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: - version "3.2.2" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" - integrity sha1-MeohpzS6ubuw8yRm2JOupR5KPGQ= - dependencies: - is-buffer "^1.1.5" - -kind-of@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-4.0.0.tgz#20813df3d712928b207378691a45066fae72dd57" - integrity sha1-IIE989cSkosgc3hpGkUGb65y3Vc= - dependencies: - is-buffer "^1.1.5" - -kind-of@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" - integrity sha512-NGEErnH6F2vUuXDh+OlbcKW7/wOcfdRHaZ7VWtqCztfHri/++YKmP51OdWeGPuqCOba6kk2OTe5d02VmTB80Pw== - -kind-of@^6.0.0, kind-of@^6.0.2: - version "6.0.3" - resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.3.tgz#07c05034a6c349fa06e24fa35aa76db4580ce4dd" - integrity sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw== - -kleur@^3.0.3: - version "3.0.3" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-3.0.3.tgz#a79c9ecc86ee1ce3fa6206d1216c501f147fc07e" - integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== - -leven@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/leven/-/leven-3.1.0.tgz#77891de834064cccba82ae7842bb6b14a13ed7f2" - integrity sha512-qsda+H8jTaUaN/x5vzW2rzc+8Rw4TAQ/4KjB46IwK5VH+IlVeeeje/EoZRpiXvIqjFgK84QffqPztGI3VBLG1A== - -levenary@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/levenary/-/levenary-1.1.1.tgz#842a9ee98d2075aa7faeedbe32679e9205f46f77" - integrity sha512-mkAdOIt79FD6irqjYSs4rdbnlT5vRonMEvBVPVb3XmevfS8kgRXwfes0dhPdEtzTWD/1eNE/Bm/G1iRt6DcnQQ== - dependencies: - leven "^3.1.0" - -levn@^0.3.0, levn@~0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/levn/-/levn-0.3.0.tgz#3b09924edf9f083c0490fdd4c0bc4421e04764ee" - integrity sha1-OwmSTt+fCDwEkP3UwLxEIeBHZO4= - dependencies: - prelude-ls "~1.1.2" - type-check "~0.3.2" - -lines-and-columns@^1.1.6: - version "1.1.6" - resolved "https://registry.yarnpkg.com/lines-and-columns/-/lines-and-columns-1.1.6.tgz#1c00c743b433cd0a4e80758f7b64a57440d9ff00" - integrity sha1-HADHQ7QzzQpOgHWPe2SldEDZ/wA= - -locate-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-3.0.0.tgz#dbec3b3ab759758071b58fe59fc41871af21400e" - integrity sha512-7AO748wWnIhNqAuaty2ZWHkQHRSNfPVIsPIfwEOWO22AmaoVrWavlOcMR5nzTLNYvp36X220/maaRsrec1G65A== - dependencies: - p-locate "^3.0.0" - path-exists "^3.0.0" - -locate-path@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/locate-path/-/locate-path-5.0.0.tgz#1afba396afd676a6d42504d0a67a3a7eb9f62aa0" - integrity sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g== - dependencies: - p-locate "^4.1.0" - -lodash.sortby@^4.7.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/lodash.sortby/-/lodash.sortby-4.7.0.tgz#edd14c824e2cc9c1e0b0a1b42bb5210516a42438" - integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= - -lodash@^4.17.14, lodash@^4.17.19: - version "4.17.20" - resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.20.tgz#b44a9b6297bcb698f1c51a3545a2b3b368d59c52" - integrity sha512-PlhdFcillOINfeV7Ni6oF1TAEayyZBoZ8bcshTHqOYJYlrqzRK5hagpagky5o4HfCzzd1TRkXPMFq6cKk9rGmA== - -lolex@^5.0.0: - version "5.1.2" - resolved "https://registry.yarnpkg.com/lolex/-/lolex-5.1.2.tgz#953694d098ce7c07bc5ed6d0e42bc6c0c6d5a367" - integrity sha512-h4hmjAvHTmd+25JSwrtTIuwbKdwg5NzZVRMLn9saij4SZaepCrTCxPr35H/3bjwfMJtN+t3CX8672UIkglz28A== - dependencies: - "@sinonjs/commons" "^1.7.0" - -loose-envify@^1.0.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/loose-envify/-/loose-envify-1.4.0.tgz#71ee51fa7be4caec1a63839f7e682d8132d30caf" - integrity sha512-lyuxPGr/Wfhrlem2CL/UcnUc1zcqKAImBDzukY7Y5F/yQiNdko6+fRLevlw1HgMySw7f611UIY408EtxRSoK3Q== - dependencies: - js-tokens "^3.0.0 || ^4.0.0" - -make-dir@^2.0.0, make-dir@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-2.1.0.tgz#5f0310e18b8be898cc07009295a30ae41e91e6f5" - integrity sha512-LS9X+dc8KLxXCb8dni79fLIIUA5VyZoyjSMCwTluaXA0o27cCK0bhXkpgw+sTXVpPy/lSO57ilRixqk0vDmtRA== - dependencies: - pify "^4.0.1" - semver "^5.6.0" - -make-dir@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-3.1.0.tgz#415e967046b3a7f1d185277d84aa58203726a13f" - integrity sha512-g3FeP20LNwhALb/6Cz6Dd4F2ngze0jz7tbzrD2wAV+o9FeNHe4rL+yK2md0J/fiSf1sa1ADhXqi5+oVwOM/eGw== - dependencies: - semver "^6.0.0" - -makeerror@1.0.x: - version "1.0.11" - resolved "https://registry.yarnpkg.com/makeerror/-/makeerror-1.0.11.tgz#e01a5c9109f2af79660e4e8b9587790184f5a96c" - integrity sha1-4BpckQnyr3lmDk6LlYd5AYT1qWw= - dependencies: - tmpl "1.0.x" - -map-cache@^0.2.2: - version "0.2.2" - resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" - integrity sha1-wyq9C9ZSXZsFFkW7TyasXcmKDb8= - -map-visit@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" - integrity sha1-7Nyo8TFE5mDxtb1B8S80edmN+48= - dependencies: - object-visit "^1.0.0" - -media-typer@0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" - integrity sha1-hxDXrwqmJvj/+hzgAWhUUmMlV0g= - -merge-descriptors@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" - integrity sha1-sAqqVW3YtEVoFQ7J0blT8/kMu2E= - -merge-stream@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/merge-stream/-/merge-stream-2.0.0.tgz#52823629a14dd00c9770fb6ad47dc6310f2c1f60" - integrity sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w== - -methods@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/methods/-/methods-1.1.2.tgz#5529a4d67654134edcc5266656835b0f851afcee" - integrity sha1-VSmk1nZUE07cxSZmVoNbD4Ua/O4= - -micromatch@^3.1.10, micromatch@^3.1.4: - version "3.1.10" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" - integrity sha512-MWikgl9n9M3w+bpsY3He8L+w9eF9338xRl8IAO5viDizwSzziFEyUzo2xrrloB64ADbTf8uA8vRqqttDTOmccg== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - braces "^2.3.1" - define-property "^2.0.2" - extend-shallow "^3.0.2" - extglob "^2.0.4" - fragment-cache "^0.2.1" - kind-of "^6.0.2" - nanomatch "^1.2.9" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.2" - -micromatch@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.2.tgz#4fcb0999bf9fbc2fcbdd212f6d629b9a56c39259" - integrity sha512-y7FpHSbMUMoyPbYUSzO6PaZ6FyRnQOpHuKwbo1G+Knck95XVU4QAiKdGEnj5wwoS7PlOgthX/09u5iFJ+aYf5Q== - dependencies: - braces "^3.0.1" - picomatch "^2.0.5" - -mime-db@1.44.0: - version "1.44.0" - resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.44.0.tgz#fa11c5eb0aca1334b4233cb4d52f10c5a6272f92" - integrity sha512-/NOTfLrsPBVeH7YtFPgsVWveuL+4SjjYxaQ1xtM1KMFj7HdxlBlxeyNLzhyJVx7r4rZGJAZ/6lkKCitSc/Nmpg== - -mime-types@^2.1.12, mime-types@~2.1.19, mime-types@~2.1.24: - version "2.1.27" - resolved "https://registry.yarnpkg.com/mime-types/-/mime-types-2.1.27.tgz#47949f98e279ea53119f5722e0f34e529bec009f" - integrity sha512-JIhqnCasI9yD+SsmkquHBxTSEuZdQX5BuQnS2Vc7puQQQ+8yiP5AY5uWhpdv4YL4VM5c6iliiYWPgJ/nJQLp7w== - dependencies: - mime-db "1.44.0" - -mime@1.6.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/mime/-/mime-1.6.0.tgz#32cd9e5c64553bd58d19a568af452acff04981b1" - integrity sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg== - -mimic-fn@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/mimic-fn/-/mimic-fn-2.1.0.tgz#7ed2c2ccccaf84d3ffcb7a69b57711fc2083401b" - integrity sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg== - -minimatch@^3.0.4: - version "3.0.4" - resolved "https://registry.yarnpkg.com/minimatch/-/minimatch-3.0.4.tgz#5166e286457f03306064be5497e8dbb0c3d32083" - integrity sha512-yJHVQEhyqPLUTgt9B83PXu6W3rx4MvvHvSUvToogpwoGDOUQ+yDrR0HRot+yOCdCO7u4hX3pWft6kWBBcqh0UA== - dependencies: - brace-expansion "^1.1.7" - -minimist@^1.1.1, minimist@^1.2.0, minimist@^1.2.5: - version "1.2.5" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.5.tgz#67d66014b66a6a8aaa0c083c5fd58df4e4e97602" - integrity sha512-FM9nNUYrRBAELZQT3xeZQ7fmMOBg6nWNmJKTcgsJeaLstP/UODVpGsr5OhXhhXg6f+qtJ8uiZ+PUxkDWcgIXLw== - -mixin-deep@^1.2.0: - version "1.3.2" - resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.2.tgz#1120b43dc359a785dce65b55b82e257ccf479566" - integrity sha512-WRoDn//mXBiJ1H40rqa3vH0toePwSsGb45iInWlTySa+Uu4k3tYUSxa2v1KqAiLtvlrSzaExqS1gtk96A9zvEA== - dependencies: - for-in "^1.0.2" - is-extendable "^1.0.1" - -mkdirp@^0.5.1: - version "0.5.5" - resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def" - integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ== - dependencies: - minimist "^1.2.5" - -ms@2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.0.0.tgz#5608aeadfc00be6c2901df5f9861788de0d597c8" - integrity sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g= - -ms@2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.1.tgz#30a5864eb3ebb0a66f2ebe6d727af06a09d86e0a" - integrity sha512-tgp+dl5cGk28utYktBsrFqA7HKgrhgPsg6Z/EfhWI4gl1Hwq8B/GmY/0oXZ6nF8hDVesS/FpnYaD/kOWhYQvyg== - -ms@^2.1.1: - version "2.1.2" - resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.2.tgz#d09d1f357b443f493382a8eb3ccd183872ae6009" - integrity sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w== - -mute-stream@0.0.8: - version "0.0.8" - resolved "https://registry.yarnpkg.com/mute-stream/-/mute-stream-0.0.8.tgz#1630c42b2251ff81e2a283de96a5497ea92e5e0d" - integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA== - -nan@^2.12.1: - version "2.14.1" - resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.1.tgz#d7be34dfa3105b91494c3147089315eff8874b01" - integrity sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw== - -nanomatch@^1.2.9: - version "1.2.13" - resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.13.tgz#b87a8aa4fc0de8fe6be88895b38983ff265bd119" - integrity sha512-fpoe2T0RbHwBTBUOftAfBPaDEi06ufaUai0mE6Yn1kacc3SnTErfb/h+X94VXzI64rKFHYImXSvdwGGCmwOqCA== - dependencies: - arr-diff "^4.0.0" - array-unique "^0.3.2" - define-property "^2.0.2" - extend-shallow "^3.0.2" - fragment-cache "^0.2.1" - is-windows "^1.0.2" - kind-of "^6.0.2" - object.pick "^1.3.0" - regex-not "^1.0.0" - snapdragon "^0.8.1" - to-regex "^3.0.1" - -natural-compare@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7" - integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc= - -negotiator@0.6.2: - version "0.6.2" - resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb" - integrity sha512-hZXc7K2e+PgeI1eDBe/10Ard4ekbfrrqG8Ep+8Jmf4JID2bNg7NvCPOZN+kfF574pFQI7mum2AUqDidoKqcTOw== - -nice-try@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/nice-try/-/nice-try-1.0.5.tgz#a3378a7696ce7d223e88fc9b764bd7ef1089e366" - integrity sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ== - -node-environment-flags@^1.0.5: - version "1.0.6" - resolved "https://registry.yarnpkg.com/node-environment-flags/-/node-environment-flags-1.0.6.tgz#a30ac13621f6f7d674260a54dede048c3982c088" - integrity sha512-5Evy2epuL+6TM0lCQGpFIj6KwiEsGh1SrHUhTbNX+sLbBtjidPZFAnVK9y5yU1+h//RitLbRHTIMyxQPtxMdHw== - dependencies: - object.getownpropertydescriptors "^2.0.3" - semver "^5.7.0" - -node-int64@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/node-int64/-/node-int64-0.4.0.tgz#87a9065cdb355d3182d8f94ce11188b825c68a3b" - integrity sha1-h6kGXNs1XTGC2PlM4RGIuCXGijs= - -node-modules-regexp@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/node-modules-regexp/-/node-modules-regexp-1.0.0.tgz#8d9dbe28964a4ac5712e9131642107c71e90ec40" - integrity sha1-jZ2+KJZKSsVxLpExZCEHxx6Q7EA= - -node-notifier@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/node-notifier/-/node-notifier-6.0.0.tgz#cea319e06baa16deec8ce5cd7f133c4a46b68e12" - integrity sha512-SVfQ/wMw+DesunOm5cKqr6yDcvUTDl/yc97ybGHMrteNEY6oekXpNpS3lZwgLlwz0FLgHoiW28ZpmBHUDg37cw== - dependencies: - growly "^1.3.0" - is-wsl "^2.1.1" - semver "^6.3.0" - shellwords "^0.1.1" - which "^1.3.1" - -node-releases@^1.1.60: - version "1.1.60" - resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.60.tgz#6948bdfce8286f0b5d0e5a88e8384e954dfe7084" - integrity sha512-gsO4vjEdQaTusZAEebUWp2a5d7dF5DYoIpDG7WySnk7BuZDW+GPpHXoXXuYawRBr/9t5q54tirPz79kFIWg4dA== - -normalize-package-data@^2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8" - integrity sha512-/5CMN3T0R4XTj4DcGaexo+roZSdSFW/0AOOTROrjxzCG1wrWXEsGbRKevjlIL+ZDE4sZlJr5ED4YW0yqmkK+eA== - dependencies: - hosted-git-info "^2.1.4" - resolve "^1.10.0" - semver "2 || 3 || 4 || 5" - validate-npm-package-license "^3.0.1" - -normalize-path@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-2.1.1.tgz#1ab28b556e198363a8c1a6f7e6fa20137fe6aed9" - integrity sha1-GrKLVW4Zg2Oowab35vogE3/mrtk= - dependencies: - remove-trailing-separator "^1.0.1" - -normalize-path@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/normalize-path/-/normalize-path-3.0.0.tgz#0dcd69ff23a1c9b11fd0978316644a0388216a65" - integrity sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA== - -npm-run-path@^2.0.0: - version "2.0.2" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" - integrity sha1-NakjLfo11wZ7TLLd8jV7GHFTbF8= - dependencies: - path-key "^2.0.0" - -npm-run-path@^4.0.0: - version "4.0.1" - resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-4.0.1.tgz#b7ecd1e5ed53da8e37a55e1c2269e0b97ed748ea" - integrity sha512-S48WzZW777zhNIrn7gxOlISNAqi9ZC/uQFnRdbeIHhZhCA6UqpkOT8T1G7BvfdgP4Er8gF4sUbaS0i7QvIfCWw== - dependencies: - path-key "^3.0.0" - -nwsapi@^2.2.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/nwsapi/-/nwsapi-2.2.0.tgz#204879a9e3d068ff2a55139c2c772780681a38b7" - integrity sha512-h2AatdwYH+JHiZpv7pt/gSX1XoRGb7L/qSIeuqA6GwYoF9w1vP1cw42TO0aI2pNyshRK5893hNSl+1//vHK7hQ== - -oauth-sign@~0.9.0: - version "0.9.0" - resolved "https://registry.yarnpkg.com/oauth-sign/-/oauth-sign-0.9.0.tgz#47a7b016baa68b5fa0ecf3dee08a85c679ac6455" - integrity sha512-fexhUFFPTGV8ybAtSIGbV6gOkSv8UtRbDBnAyLQw4QPKkgNlsH2ByPGtMUqdWkos6YCRmAqViwgZrJc/mRDzZQ== - -object-assign@^4: - version "4.1.1" - resolved "https://registry.yarnpkg.com/object-assign/-/object-assign-4.1.1.tgz#2109adc7965887cfc05cbbd442cac8bfbb360863" - integrity sha1-IQmtx5ZYh8/AXLvUQsrIv7s2CGM= - -object-copy@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" - integrity sha1-fn2Fi3gb18mRpBupde04EnVOmYw= - dependencies: - copy-descriptor "^0.1.0" - define-property "^0.2.5" - kind-of "^3.0.3" - -object-inspect@^1.7.0: - version "1.8.0" - resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.8.0.tgz#df807e5ecf53a609cc6bfe93eac3cc7be5b3a9d0" - integrity sha512-jLdtEOB112fORuypAyl/50VRVIBIdVQOSUUGQHzJ4xBSbit81zRarz7GThkEFZy1RceYrWYcPcBFPQwHyAc1gA== - -object-keys@^1.0.11, object-keys@^1.0.12, object-keys@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/object-keys/-/object-keys-1.1.1.tgz#1c47f272df277f3b1daf061677d9c82e2322c60e" - integrity sha512-NuAESUOUMrlIXOfHKzD6bpPu3tYt3xvjNdRIQ+FeT0lNb4K8WR70CaDxhuNguS2XG+GjkyMwOzsN5ZktImfhLA== - -object-visit@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" - integrity sha1-95xEk68MU3e1n+OdOV5BBC3QRbs= - dependencies: - isobject "^3.0.0" - -object.assign@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.0.tgz#968bf1100d7956bb3ca086f006f846b3bc4008da" - integrity sha512-exHJeq6kBKj58mqGyTQ9DFvrZC/eR6OwxzoM9YRoGBqrXYonaFyGiFMuc9VZrXf7DarreEwMpurG3dd+CNyW5w== - dependencies: - define-properties "^1.1.2" - function-bind "^1.1.1" - has-symbols "^1.0.0" - object-keys "^1.0.11" - -object.getownpropertydescriptors@^2.0.3: - version "2.1.0" - resolved "https://registry.yarnpkg.com/object.getownpropertydescriptors/-/object.getownpropertydescriptors-2.1.0.tgz#369bf1f9592d8ab89d712dced5cb81c7c5352649" - integrity sha512-Z53Oah9A3TdLoblT7VKJaTDdXdT+lQO+cNpKVnya5JDe9uLvzu1YyY1yFDFrcxrlRgWrEFH0jJtD/IbuwjcEVg== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.0-next.1" - -object.pick@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" - integrity sha1-h6EKxMFpS9Lhy/U1kaZhQftd10c= - dependencies: - isobject "^3.0.1" - -on-finished@~2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" - integrity sha1-IPEzZIGwg811M3mSoWlxqi2QaUc= - dependencies: - ee-first "1.1.1" - -once@^1.3.0, once@^1.3.1, once@^1.4.0: - version "1.4.0" - resolved "https://registry.yarnpkg.com/once/-/once-1.4.0.tgz#583b1aa775961d4b113ac17d9c50baef9dd76bd1" - integrity sha1-WDsap3WWHUsROsF9nFC6753Xa9E= - dependencies: - wrappy "1" - -onetime@^5.1.0: - version "5.1.2" - resolved "https://registry.yarnpkg.com/onetime/-/onetime-5.1.2.tgz#d0e96ebb56b07476df1dd9c4806e5237985ca45e" - integrity sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg== - dependencies: - mimic-fn "^2.1.0" - -optionator@^0.8.1, optionator@^0.8.3: - version "0.8.3" - resolved "https://registry.yarnpkg.com/optionator/-/optionator-0.8.3.tgz#84fa1d036fe9d3c7e21d99884b601167ec8fb495" - integrity sha512-+IW9pACdk3XWmmTXG8m3upGUJst5XRGzxMRjXzAuJ1XnIFNvfhjjIuYkDvysnPQ7qzqVzLt78BCruntqRhWQbA== - dependencies: - deep-is "~0.1.3" - fast-levenshtein "~2.0.6" - levn "~0.3.0" - prelude-ls "~1.1.2" - type-check "~0.3.2" - word-wrap "~1.2.3" - -os-tmpdir@~1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274" - integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ= - -p-each-series@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/p-each-series/-/p-each-series-2.1.0.tgz#961c8dd3f195ea96c747e636b262b800a6b1af48" - integrity sha512-ZuRs1miPT4HrjFa+9fRfOFXxGJfORgelKV9f9nNOWw2gl6gVsRaVDOQP0+MI0G0wGKns1Yacsu0GjOFbTK0JFQ== - -p-finally@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-1.0.0.tgz#3fbcfb15b899a44123b34b6dcc18b724336a2cae" - integrity sha1-P7z7FbiZpEEjs0ttzBi3JDNqLK4= - -p-finally@^2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/p-finally/-/p-finally-2.0.1.tgz#bd6fcaa9c559a096b680806f4d657b3f0f240561" - integrity sha512-vpm09aKwq6H9phqRQzecoDpD8TmVyGw70qmWlyq5onxY7tqyTTFVvxMykxQSQKILBSFlbXpypIw2T1Ml7+DDtw== - -p-limit@^2.0.0, p-limit@^2.2.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/p-limit/-/p-limit-2.3.0.tgz#3dd33c647a214fdfffd835933eb086da0dc21db1" - integrity sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w== - dependencies: - p-try "^2.0.0" - -p-locate@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-3.0.0.tgz#322d69a05c0264b25997d9f40cd8a891ab0064a4" - integrity sha512-x+12w/To+4GFfgJhBEpiDcLozRJGegY+Ei7/z0tSLkMmxGZNybVMSfWj9aJn8Z5Fc7dBUNJOOVgPv2H7IwulSQ== - dependencies: - p-limit "^2.0.0" - -p-locate@^4.1.0: - version "4.1.0" - resolved "https://registry.yarnpkg.com/p-locate/-/p-locate-4.1.0.tgz#a3428bb7088b3a60292f66919278b7c297ad4f07" - integrity sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A== - dependencies: - p-limit "^2.2.0" - -p-try@^2.0.0: - version "2.2.0" - resolved "https://registry.yarnpkg.com/p-try/-/p-try-2.2.0.tgz#cb2868540e313d61de58fafbe35ce9004d5540e6" - integrity sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ== - -parent-module@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/parent-module/-/parent-module-1.0.1.tgz#691d2709e78c79fae3a156622452d00762caaaa2" - integrity sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g== - dependencies: - callsites "^3.0.0" - -parse-json@^5.0.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/parse-json/-/parse-json-5.1.0.tgz#f96088cdf24a8faa9aea9a009f2d9d942c999646" - integrity sha512-+mi/lmVVNKFNVyLXV31ERiy2CY5E1/F6QtJFEzoChPRwwngMNXRDQ9GJ5WdE2Z2P4AujsOi0/+2qHID68KwfIQ== - dependencies: - "@babel/code-frame" "^7.0.0" - error-ex "^1.3.1" - json-parse-even-better-errors "^2.3.0" - lines-and-columns "^1.1.6" - -parse-passwd@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/parse-passwd/-/parse-passwd-1.0.0.tgz#6d5b934a456993b23d37f40a382d6f1666a8e5c6" - integrity sha1-bVuTSkVpk7I9N/QKOC1vFmao5cY= - -parse5@5.1.0: - version "5.1.0" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-5.1.0.tgz#c59341c9723f414c452975564c7c00a68d58acd2" - integrity sha512-fxNG2sQjHvlVAYmzBZS9YlDp6PTSSDwa98vkD4QgVDDCAo84z5X1t5XyJQ62ImdLXx5NdIIfihey6xpum9/gRQ== - -parseurl@~1.3.3: - version "1.3.3" - resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.3.tgz#9da19e7bee8d12dff0513ed5b76957793bc2e8d4" - integrity sha512-CiyeOxFT/JZyN5m0z9PfXw4SCBJ6Sygz1Dpl0wqjlhDEGGBP1GnsUVEL0p63hoG1fcj3fHynXi9NYO4nWOL+qQ== - -pascalcase@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" - integrity sha1-s2PlXoAGym/iF4TS2yK9FdeRfxQ= - -path-dirname@^1.0.0: - version "1.0.2" - resolved "https://registry.yarnpkg.com/path-dirname/-/path-dirname-1.0.2.tgz#cc33d24d525e099a5388c0336c6e32b9160609e0" - integrity sha1-zDPSTVJeCZpTiMAzbG4yuRYGCeA= - -path-exists@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-3.0.0.tgz#ce0ebeaa5f78cb18925ea7d810d7b59b010fd515" - integrity sha1-zg6+ql94yxiSXqfYENe1mwEP1RU= - -path-exists@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-4.0.0.tgz#513bdbe2d3b95d7762e8c1137efa195c6c61b5b3" - integrity sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w== - -path-is-absolute@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/path-is-absolute/-/path-is-absolute-1.0.1.tgz#174b9268735534ffbc7ace6bf53a5a9e1b5c5f5f" - integrity sha1-F0uSaHNVNP+8es5r9TpanhtcX18= - -path-key@^2.0.0, path-key@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-2.0.1.tgz#411cadb574c5a140d3a4b1910d40d80cc9f40b40" - integrity sha1-QRyttXTFoUDTpLGRDUDYDMn0C0A= - -path-key@^3.0.0, path-key@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/path-key/-/path-key-3.1.1.tgz#581f6ade658cbba65a0d3380de7753295054f375" - integrity sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q== - -path-parse@^1.0.6: - version "1.0.6" - resolved "https://registry.yarnpkg.com/path-parse/-/path-parse-1.0.6.tgz#d62dbb5679405d72c4737ec58600e9ddcf06d24c" - integrity sha512-GSmOT2EbHrINBf9SR7CDELwlJ8AENk3Qn7OikK4nFYAu3Ote2+JYNVvkpAEQm3/TLNEJFD/xZJjzyxg3KBWOzw== - -path-to-regexp@0.1.7: - version "0.1.7" - resolved "https://registry.yarnpkg.com/path-to-regexp/-/path-to-regexp-0.1.7.tgz#df604178005f522f15eb4490e7247a1bfaa67f8c" - integrity sha1-32BBeABfUi8V60SQ5yR6G/qmf4w= - -performance-now@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" - integrity sha1-Ywn04OX6kT7BxpMHrjZLSzd8nns= - -picomatch@^2.0.4, picomatch@^2.0.5: - version "2.2.2" - resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" - integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== - -pify@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/pify/-/pify-4.0.1.tgz#4b2cd25c50d598735c50292224fd8c6df41e3231" - integrity sha512-uB80kBFb/tfd68bVleG9T5GGsGPjJrLAUpR5PZIrhBnIaRTQRjqdJSsIKkOP6OAIFbj7GOrcudc5pNjZ+geV2g== - -pirates@^4.0.0, pirates@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/pirates/-/pirates-4.0.1.tgz#643a92caf894566f91b2b986d2c66950a8e2fb87" - integrity sha512-WuNqLTbMI3tmfef2TKxlQmAiLHKtFhlsCZnPIpuv2Ow0RDVO8lfy1Opf4NUzlMXLjPl+Men7AuVdX6TA+s+uGA== - dependencies: - node-modules-regexp "^1.0.0" - -pkg-dir@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-3.0.0.tgz#2749020f239ed990881b1f71210d51eb6523bea3" - integrity sha512-/E57AYkoeQ25qkxMj5PBOVgF8Kiu/h7cYS30Z5+R7WaiCCBfLq58ZI/dSeaEKb9WVJV5n/03QwrN3IeWIFllvw== - dependencies: - find-up "^3.0.0" - -pkg-dir@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-4.2.0.tgz#f099133df7ede422e81d1d8448270eeb3e4261f3" - integrity sha512-HRDzbaKjC+AOWVXxAU/x54COGeIv9eb+6CkDSQoNTt4XyWoIJvuPsXizxu/Fr23EiekbtZwmh1IcIG/l/a10GQ== - dependencies: - find-up "^4.0.0" - -pn@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/pn/-/pn-1.1.0.tgz#e2f4cef0e219f463c179ab37463e4e1ecdccbafb" - integrity sha512-2qHaIQr2VLRFoxe2nASzsV6ef4yOOH+Fi9FBOVH6cqeSgUnoyySPZkxzLuzd+RYOQTRpROA0ztTMqxROKSb/nA== - -posix-character-classes@^0.1.0: - version "0.1.1" - resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" - integrity sha1-AerA/jta9xoqbAL+q7jB/vfgDqs= - -prelude-ls@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" - integrity sha1-IZMqVJ9eUv/ZqCf1cOBL5iqX2lQ= - -pretty-format@^25.5.0: - version "25.5.0" - resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-25.5.0.tgz#7873c1d774f682c34b8d48b6743a2bf2ac55791a" - integrity sha512-kbo/kq2LQ/A/is0PQwsEHM7Ca6//bGPPvU6UnsdDRSKTWxT/ru/xb88v4BJf6a69H+uTytOEsTusT9ksd/1iWQ== - dependencies: - "@jest/types" "^25.5.0" - ansi-regex "^5.0.0" - ansi-styles "^4.0.0" - react-is "^16.12.0" - -process-nextick-args@~2.0.0: - version "2.0.1" - resolved "https://registry.yarnpkg.com/process-nextick-args/-/process-nextick-args-2.0.1.tgz#7820d9b16120cc55ca9ae7792680ae7dba6d7fe2" - integrity sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag== - -progress@^2.0.0: - version "2.0.3" - resolved "https://registry.yarnpkg.com/progress/-/progress-2.0.3.tgz#7e8cf8d8f5b8f239c1bc68beb4eb78567d572ef8" - integrity sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA== - -prompts@^2.0.1: - version "2.3.2" - resolved "https://registry.yarnpkg.com/prompts/-/prompts-2.3.2.tgz#480572d89ecf39566d2bd3fe2c9fccb7c4c0b068" - integrity sha512-Q06uKs2CkNYVID0VqwfAl9mipo99zkBv/n2JtWY89Yxa3ZabWSrs0e2KTudKVa3peLUvYXMefDqIleLPVUBZMA== - dependencies: - kleur "^3.0.3" - sisteransi "^1.0.4" - -proxy-addr@~2.0.5: - version "2.0.6" - resolved "https://registry.yarnpkg.com/proxy-addr/-/proxy-addr-2.0.6.tgz#fdc2336505447d3f2f2c638ed272caf614bbb2bf" - integrity sha512-dh/frvCBVmSsDYzw6n926jv974gddhkFPfiN8hPOi30Wax25QZyZEGveluCgliBnqmuM+UJmBErbAUFIoDbjOw== - dependencies: - forwarded "~0.1.2" - ipaddr.js "1.9.1" - -psl@^1.1.28: - version "1.8.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" - integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== - -pump@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/pump/-/pump-3.0.0.tgz#b4a2116815bde2f4e1ea602354e8c75565107a64" - integrity sha512-LwZy+p3SFs1Pytd/jYct4wpv49HiYCqd9Rlc5ZVdk0V+8Yzv6jR5Blk3TRmPL1ft69TxP0IMZGJ+WPFU2BFhww== - dependencies: - end-of-stream "^1.1.0" - once "^1.3.1" - -punycode@^2.1.0, punycode@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/punycode/-/punycode-2.1.1.tgz#b58b010ac40c22c5657616c8d2c2c02c7bf479ec" - integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== - -qs@6.7.0: - version "6.7.0" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.7.0.tgz#41dc1a015e3d581f1621776be31afb2876a9b1bc" - integrity sha512-VCdBRNFTX1fyE7Nb6FYoURo/SPe62QCaAyzJvUjwRaIsc+NePBEniHlvxFmmX56+HZphIGtV0XeCirBtpDrTyQ== - -qs@~6.5.2: - version "6.5.2" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.5.2.tgz#cb3ae806e8740444584ef154ce8ee98d403f3e36" - integrity sha512-N5ZAX4/LxJmF+7wN74pUD6qAh9/wnvdQcjq9TZjevvXzSUo7bfmw91saqMjzGS2xq91/odN2dW/WOl7qQHNDGA== - -range-parser@~1.2.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/range-parser/-/range-parser-1.2.1.tgz#3cf37023d199e1c24d1a55b84800c2f3e6468031" - integrity sha512-Hrgsx+orqoygnmhFbKaHE6c296J+HTAQXoxEF6gNupROmmGJRoyzfG3ccAveqCBrwr/2yxQ5BVd/GTl5agOwSg== - -raw-body@2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/raw-body/-/raw-body-2.4.0.tgz#a1ce6fb9c9bc356ca52e89256ab59059e13d0332" - integrity sha512-4Oz8DUIwdvoa5qMJelxipzi/iJIi40O5cGV1wNYp5hvZP8ZN0T+jiNkL0QepXs+EsQ9XJ8ipEDoiH70ySUJP3Q== - dependencies: - bytes "3.1.0" - http-errors "1.7.2" - iconv-lite "0.4.24" - unpipe "1.0.0" - -react-is@^16.12.0: - version "16.13.1" - resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" - integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ== - -read-pkg-up@^7.0.1: - version "7.0.1" - resolved "https://registry.yarnpkg.com/read-pkg-up/-/read-pkg-up-7.0.1.tgz#f3a6135758459733ae2b95638056e1854e7ef507" - integrity sha512-zK0TB7Xd6JpCLmlLmufqykGE+/TlOePD6qKClNW7hHDKFh/J7/7gCWGR7joEQEW1bKq3a3yUZSObOoWLFQ4ohg== - dependencies: - find-up "^4.1.0" - read-pkg "^5.2.0" - type-fest "^0.8.1" - -read-pkg@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/read-pkg/-/read-pkg-5.2.0.tgz#7bf295438ca5a33e56cd30e053b34ee7250c93cc" - integrity sha512-Ug69mNOpfvKDAc2Q8DRpMjjzdtrnv9HcSMX+4VsZxD1aZ6ZzrIE7rlzXBtWTyhULSMKg076AW6WR5iZpD0JiOg== - dependencies: - "@types/normalize-package-data" "^2.4.0" - normalize-package-data "^2.5.0" - parse-json "^5.0.0" - type-fest "^0.6.0" - -readable-stream@^2.0.2: - version "2.3.7" - resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" - integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw== - dependencies: - core-util-is "~1.0.0" - inherits "~2.0.3" - isarray "~1.0.0" - process-nextick-args "~2.0.0" - safe-buffer "~5.1.1" - string_decoder "~1.1.1" - util-deprecate "~1.0.1" - -readdirp@^2.2.1: - version "2.2.1" - resolved "https://registry.yarnpkg.com/readdirp/-/readdirp-2.2.1.tgz#0e87622a3325aa33e892285caf8b4e846529a525" - integrity sha512-1JU/8q+VgFZyxwrJ+SVIOsh+KywWGpds3NTqikiKpDMZWScmAYyKIgqkO+ARvNWJfXeXR1zxz7aHF4u4CyH6vQ== - dependencies: - graceful-fs "^4.1.11" - micromatch "^3.1.10" - readable-stream "^2.0.2" - -realpath-native@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/realpath-native/-/realpath-native-2.0.0.tgz#7377ac429b6e1fd599dc38d08ed942d0d7beb866" - integrity sha512-v1SEYUOXXdbBZK8ZuNgO4TBjamPsiSgcFr0aP+tEKpQZK8vooEUqV6nm6Cv502mX4NF2EfsnVqtNAHG+/6Ur1Q== - -redis-commands@^1.5.0: - version "1.6.0" - resolved "https://registry.yarnpkg.com/redis-commands/-/redis-commands-1.6.0.tgz#36d4ca42ae9ed29815cdb30ad9f97982eba1ce23" - integrity sha512-2jnZ0IkjZxvguITjFTrGiLyzQZcTvaw8DAaCXxZq/dsHXz7KfMQ3OUJy7Tz9vnRtZRVz6VRCPDvruvU8Ts44wQ== - -redis-errors@^1.0.0, redis-errors@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/redis-errors/-/redis-errors-1.2.0.tgz#eb62d2adb15e4eaf4610c04afe1529384250abad" - integrity sha1-62LSrbFeTq9GEMBK/hUpOEJQq60= - -redis-parser@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/redis-parser/-/redis-parser-3.0.0.tgz#b66d828cdcafe6b4b8a428a7def4c6bcac31c8b4" - integrity sha1-tm2CjNyv5rS4pCin3vTGvKwxyLQ= - dependencies: - redis-errors "^1.0.0" - -redis@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/redis/-/redis-3.0.2.tgz#bd47067b8a4a3e6a2e556e57f71cc82c7360150a" - integrity sha512-PNhLCrjU6vKVuMOyFu7oSP296mwBkcE6lrAjruBYG5LgdSqtRBoVQIylrMyVZD/lkF24RSNNatzvYag6HRBHjQ== - dependencies: - denque "^1.4.1" - redis-commands "^1.5.0" - redis-errors "^1.2.0" - redis-parser "^3.0.0" - -regenerate-unicode-properties@^8.2.0: - version "8.2.0" - resolved "https://registry.yarnpkg.com/regenerate-unicode-properties/-/regenerate-unicode-properties-8.2.0.tgz#e5de7111d655e7ba60c057dbe9ff37c87e65cdec" - integrity sha512-F9DjY1vKLo/tPePDycuH3dn9H1OTPIkVD9Kz4LODu+F2C75mgjAJ7x/gwy6ZcSNRAAkhNlJSOHRe8k3p+K9WhA== - dependencies: - regenerate "^1.4.0" - -regenerate@^1.4.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.1.tgz#cad92ad8e6b591773485fbe05a485caf4f457e6f" - integrity sha512-j2+C8+NtXQgEKWk49MMP5P/u2GhnahTtVkRIHr5R5lVRlbKvmQ+oS+A5aLKWp2ma5VkT8sh6v+v4hbH0YHR66A== - -regenerator-runtime@^0.13.4: - version "0.13.7" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.7.tgz#cac2dacc8a1ea675feaabaeb8ae833898ae46f55" - integrity sha512-a54FxoJDIr27pgf7IgeQGxmqUNYrcV338lf/6gH456HZ/PhX+5BcwHXG9ajESmwe6WRO0tAzRUrRmNONWgkrew== - -regenerator-transform@^0.14.2: - version "0.14.5" - resolved "https://registry.yarnpkg.com/regenerator-transform/-/regenerator-transform-0.14.5.tgz#c98da154683671c9c4dcb16ece736517e1b7feb4" - integrity sha512-eOf6vka5IO151Jfsw2NO9WpGX58W6wWmefK3I1zEGr0lOD0u8rwPaNqQL1aRxUaxLeKO3ArNh3VYg1KbaD+FFw== - dependencies: - "@babel/runtime" "^7.8.4" - -regex-not@^1.0.0, regex-not@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" - integrity sha512-J6SDjUgDxQj5NusnOtdFxDwN/+HWykR8GELwctJ7mdqhcyy1xEc4SRFHUXvxTp661YaVKAjfRLZ9cCqS6tn32A== - dependencies: - extend-shallow "^3.0.2" - safe-regex "^1.1.0" - -regexpp@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/regexpp/-/regexpp-2.0.1.tgz#8d19d31cf632482b589049f8281f93dbcba4d07f" - integrity sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw== - -regexpu-core@^4.7.0: - version "4.7.0" - resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-4.7.0.tgz#fcbf458c50431b0bb7b45d6967b8192d91f3d938" - integrity sha512-TQ4KXRnIn6tz6tjnrXEkD/sshygKH/j5KzK86X8MkeHyZ8qst/LZ89j3X4/8HEIfHANTFIP/AbXakeRhWIl5YQ== - dependencies: - regenerate "^1.4.0" - regenerate-unicode-properties "^8.2.0" - regjsgen "^0.5.1" - regjsparser "^0.6.4" - unicode-match-property-ecmascript "^1.0.4" - unicode-match-property-value-ecmascript "^1.2.0" - -regjsgen@^0.5.1: - version "0.5.2" - resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.2.tgz#92ff295fb1deecbf6ecdab2543d207e91aa33733" - integrity sha512-OFFT3MfrH90xIW8OOSyUrk6QHD5E9JOTeGodiJeBS3J6IwlgzJMNE/1bZklWz5oTg+9dCMyEetclvCVXOPoN3A== - -regjsparser@^0.6.4: - version "0.6.4" - resolved "https://registry.yarnpkg.com/regjsparser/-/regjsparser-0.6.4.tgz#a769f8684308401a66e9b529d2436ff4d0666272" - integrity sha512-64O87/dPDgfk8/RQqC4gkZoGyyWFIEUTTh80CU6CWuK5vkCGyekIx+oKcEIYtP/RAxSQltCZHCNu/mdd7fqlJw== - dependencies: - jsesc "~0.5.0" - -remove-trailing-separator@^1.0.1: - version "1.1.0" - resolved "https://registry.yarnpkg.com/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz#c24bce2a283adad5bc3f58e0d48249b92379d8ef" - integrity sha1-wkvOKig62tW8P1jg1IJJuSN52O8= - -repeat-element@^1.1.2: - version "1.1.3" - resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.3.tgz#782e0d825c0c5a3bb39731f84efee6b742e6b1ce" - integrity sha512-ahGq0ZnV5m5XtZLMb+vP76kcAM5nkLqk0lpqAuojSKGgQtn4eRi4ZZGm2olo2zKFH+sMsWaqOCW1dqAnOru72g== - -repeat-string@^1.6.1: - version "1.6.1" - resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" - integrity sha1-jcrkcOHIirwtYA//Sndihtp15jc= - -request-promise-core@1.1.4: - version "1.1.4" - resolved "https://registry.yarnpkg.com/request-promise-core/-/request-promise-core-1.1.4.tgz#3eedd4223208d419867b78ce815167d10593a22f" - integrity sha512-TTbAfBBRdWD7aNNOoVOBH4pN/KigV6LyapYNNlAPA8JwbovRti1E88m3sYAwsLi5ryhPKsE9APwnjFTgdUjTpw== - dependencies: - lodash "^4.17.19" - -request-promise-native@^1.0.7: - version "1.0.9" - resolved "https://registry.yarnpkg.com/request-promise-native/-/request-promise-native-1.0.9.tgz#e407120526a5efdc9a39b28a5679bf47b9d9dc28" - integrity sha512-wcW+sIUiWnKgNY0dqCpOZkUbF/I+YPi+f09JZIDa39Ec+q82CpSYniDp+ISgTTbKmnpJWASeJBPZmoxH84wt3g== - dependencies: - request-promise-core "1.1.4" - stealthy-require "^1.1.1" - tough-cookie "^2.3.3" - -request@^2.88.0: - version "2.88.2" - resolved "https://registry.yarnpkg.com/request/-/request-2.88.2.tgz#d73c918731cb5a87da047e207234146f664d12b3" - integrity sha512-MsvtOrfG9ZcrOwAW+Qi+F6HbD0CWXEh9ou77uOb7FM2WPhwT7smM833PzanhJLsgXjN89Ir6V2PczXNnMpwKhw== - dependencies: - aws-sign2 "~0.7.0" - aws4 "^1.8.0" - caseless "~0.12.0" - combined-stream "~1.0.6" - extend "~3.0.2" - forever-agent "~0.6.1" - form-data "~2.3.2" - har-validator "~5.1.3" - http-signature "~1.2.0" - is-typedarray "~1.0.0" - isstream "~0.1.2" - json-stringify-safe "~5.0.1" - mime-types "~2.1.19" - oauth-sign "~0.9.0" - performance-now "^2.1.0" - qs "~6.5.2" - safe-buffer "^5.1.2" - tough-cookie "~2.5.0" - tunnel-agent "^0.6.0" - uuid "^3.3.2" - -require-directory@^2.1.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/require-directory/-/require-directory-2.1.1.tgz#8c64ad5fd30dab1c976e2344ffe7f792a6a6df42" - integrity sha1-jGStX9MNqxyXbiNE/+f3kqam30I= - -require-main-filename@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" - integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== - -resolve-cwd@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/resolve-cwd/-/resolve-cwd-3.0.0.tgz#0f0075f1bb2544766cf73ba6a6e2adfebcb13f2d" - integrity sha512-OrZaX2Mb+rJCpH/6CpSqt9xFVpN++x01XnN2ie9g6P5/3xelLAkXWVADpdz1IHD/KFfEXyE6V0U01OQ3UO2rEg== - dependencies: - resolve-from "^5.0.0" - -resolve-from@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-4.0.0.tgz#4abcd852ad32dd7baabfe9b40e00a36db5f392e6" - integrity sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g== - -resolve-from@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-5.0.0.tgz#c35225843df8f776df21c57557bc087e9dfdfc69" - integrity sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw== - -resolve-url@^0.2.1: - version "0.2.1" - resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" - integrity sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo= - -resolve@1.1.7: - version "1.1.7" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.1.7.tgz#203114d82ad2c5ed9e8e0411b3932875e889e97b" - integrity sha1-IDEU2CrSxe2ejgQRs5ModeiJ6Xs= - -resolve@^1.10.0, resolve@^1.13.1, resolve@^1.17.0, resolve@^1.3.2, resolve@^1.8.1: - version "1.17.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.17.0.tgz#b25941b54968231cc2d1bb76a79cb7f2c0bf8444" - integrity sha512-ic+7JYiV8Vi2yzQGFWOkiZD5Z9z7O2Zhm9XMaTxdJExKasieFCr+yXZ/WmXsckHiKl12ar0y6XiXDx3m4RHn1w== - dependencies: - path-parse "^1.0.6" - -restore-cursor@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-3.1.0.tgz#39f67c54b3a7a58cea5236d95cf0034239631f7e" - integrity sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA== - dependencies: - onetime "^5.1.0" - signal-exit "^3.0.2" - -ret@~0.1.10: - version "0.1.15" - resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" - integrity sha512-TTlYpa+OL+vMMNG24xSlQGEJ3B/RzEfUlLct7b5G/ytav+wPrplCpVMFuwzXbkecJrb6IYo1iFb0S9v37754mg== - -rimraf@2.6.3: - version "2.6.3" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.6.3.tgz#b2d104fe0d8fb27cf9e0a1cda8262dd3833c6cab" - integrity sha512-mwqeW5XsA2qAejG46gYdENaxXjx9onRNCfn7L0duuP4hCuTIi/QO7PDK07KJfp1d+izWPrzEJDcSqBa0OZQriA== - dependencies: - glob "^7.1.3" - -rimraf@^3.0.0: - version "3.0.2" - resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-3.0.2.tgz#f1a5402ba6220ad52cc1282bac1ae3aa49fd061a" - integrity sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA== - dependencies: - glob "^7.1.3" - -rsvp@^4.8.4: - version "4.8.5" - resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.8.5.tgz#c8f155311d167f68f21e168df71ec5b083113734" - integrity sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA== - -run-async@^2.4.0: - version "2.4.1" - resolved "https://registry.yarnpkg.com/run-async/-/run-async-2.4.1.tgz#8440eccf99ea3e70bd409d49aab88e10c189a455" - integrity sha512-tvVnVv01b8c1RrA6Ep7JkStj85Guv/YrMcwqYQnwjsAS2cTmmPGBBjAjpCW7RrSodNSoE2/qg9O4bceNvUuDgQ== - -rxjs@^6.6.0: - version "6.6.2" - resolved "https://registry.yarnpkg.com/rxjs/-/rxjs-6.6.2.tgz#8096a7ac03f2cc4fe5860ef6e572810d9e01c0d2" - integrity sha512-BHdBMVoWC2sL26w//BCu3YzKT4s2jip/WhwsGEDmeKYBhKDZeYezVUnHatYB7L85v5xs0BAQmg6BEYJEKxBabg== - dependencies: - tslib "^1.9.0" - -safe-buffer@5.1.2, safe-buffer@~5.1.0, safe-buffer@~5.1.1: - version "5.1.2" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.1.2.tgz#991ec69d296e0313747d59bdfd2b745c35f8828d" - integrity sha512-Gd2UZBJDkXlY7GbJxfsE8/nvKkUEU1G38c1siN6QP6a9PT9MmHB8GnpscSmMJSoF8LOIrt8ud/wPtojys4G6+g== - -safe-buffer@^5.0.1, safe-buffer@^5.1.2: - version "5.2.1" - resolved "https://registry.yarnpkg.com/safe-buffer/-/safe-buffer-5.2.1.tgz#1eaf9fa9bdb1fdd4ec75f58f9cdb4e6b7827eec6" - integrity sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ== - -safe-regex@^1.1.0: - version "1.1.0" - resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" - integrity sha1-QKNmnzsHfR6UPURinhV91IAjvy4= - dependencies: - ret "~0.1.10" - -"safer-buffer@>= 2.1.2 < 3", safer-buffer@^2.0.2, safer-buffer@^2.1.0, safer-buffer@~2.1.0: - version "2.1.2" - resolved "https://registry.yarnpkg.com/safer-buffer/-/safer-buffer-2.1.2.tgz#44fa161b0187b9549dd84bb91802f9bd8385cd6a" - integrity sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg== - -sane@^4.0.3: - version "4.1.0" - resolved "https://registry.yarnpkg.com/sane/-/sane-4.1.0.tgz#ed881fd922733a6c461bc189dc2b6c006f3ffded" - integrity sha512-hhbzAgTIX8O7SHfp2c8/kREfEn4qO/9q8C9beyY6+tvZ87EpoZ3i1RIEvp27YBswnNbY9mWd6paKVmKbAgLfZA== - dependencies: - "@cnakazawa/watch" "^1.0.3" - anymatch "^2.0.0" - capture-exit "^2.0.0" - exec-sh "^0.3.2" - execa "^1.0.0" - fb-watchman "^2.0.0" - micromatch "^3.1.4" - minimist "^1.1.1" - walker "~1.0.5" - -saxes@^3.1.9: - version "3.1.11" - resolved "https://registry.yarnpkg.com/saxes/-/saxes-3.1.11.tgz#d59d1fd332ec92ad98a2e0b2ee644702384b1c5b" - integrity sha512-Ydydq3zC+WYDJK1+gRxRapLIED9PWeSuuS41wqyoRmzvhhh9nc+QQrVMKJYzJFULazeGhzSV0QleN2wD3boh2g== - dependencies: - xmlchars "^2.1.1" - -"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0, semver@^5.7.0: - version "5.7.1" - resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" - integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== - -semver@7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.0.0.tgz#5f3ca35761e47e05b206c6daff2cf814f0316b8e" - integrity sha512-+GB6zVA9LWh6zovYQLALHwv5rb2PHGlJi3lfiqIHxR0uuwCgefcOJc59v9fv1w8GbStwxuuqqAjI9NMAOOgq1A== - -semver@^6.0.0, semver@^6.1.2, semver@^6.3.0: - version "6.3.0" - resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" - integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== - -send@0.17.1: - version "0.17.1" - resolved "https://registry.yarnpkg.com/send/-/send-0.17.1.tgz#c1d8b059f7900f7466dd4938bdc44e11ddb376c8" - integrity sha512-BsVKsiGcQMFwT8UxypobUKyv7irCNRHk1T0G680vk88yf6LBByGcZJOTJCrTP2xVN6yI+XjPJcNuE3V4fT9sAg== - dependencies: - debug "2.6.9" - depd "~1.1.2" - destroy "~1.0.4" - encodeurl "~1.0.2" - escape-html "~1.0.3" - etag "~1.8.1" - fresh "0.5.2" - http-errors "~1.7.2" - mime "1.6.0" - ms "2.1.1" - on-finished "~2.3.0" - range-parser "~1.2.1" - statuses "~1.5.0" - -serve-static@1.14.1: - version "1.14.1" - resolved "https://registry.yarnpkg.com/serve-static/-/serve-static-1.14.1.tgz#666e636dc4f010f7ef29970a88a674320898b2f9" - integrity sha512-JMrvUwE54emCYWlTI+hGrGv5I8dEwmco/00EvkzIIsR7MqrHonbD9pO2MOfFnpFntl7ecpZs+3mW+XbQZu9QCg== - dependencies: - encodeurl "~1.0.2" - escape-html "~1.0.3" - parseurl "~1.3.3" - send "0.17.1" - -set-blocking@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7" - integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc= - -set-value@^2.0.0, set-value@^2.0.1: - version "2.0.1" - resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.1.tgz#a18d40530e6f07de4228c7defe4227af8cad005b" - integrity sha512-JxHc1weCN68wRY0fhCoXpyK55m/XPHafOmK4UWD7m2CI14GMcFypt4w/0+NV5f/ZMby2F6S2wwA7fgynh9gWSw== - dependencies: - extend-shallow "^2.0.1" - is-extendable "^0.1.1" - is-plain-object "^2.0.3" - split-string "^3.0.1" - -setprototypeof@1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.1.1.tgz#7e95acb24aa92f5885e0abef5ba131330d4ae683" - integrity sha512-JvdAWfbXeIGaZ9cILp38HntZSFSo3mWg6xGcJJsd+d4aRMOqauag1C63dJfDw7OaMYwEbHMOxEZ1lqVRYP2OAw== - -shebang-command@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-1.2.0.tgz#44aac65b695b03398968c39f363fee5deafdf1ea" - integrity sha1-RKrGW2lbAzmJaMOfNj/uXer98eo= - dependencies: - shebang-regex "^1.0.0" - -shebang-command@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/shebang-command/-/shebang-command-2.0.0.tgz#ccd0af4f8835fbdc265b82461aaf0c36663f34ea" - integrity sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA== - dependencies: - shebang-regex "^3.0.0" - -shebang-regex@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-1.0.0.tgz#da42f49740c0b42db2ca9728571cb190c98efea3" - integrity sha1-2kL0l0DAtC2yypcoVxyxkMmO/qM= - -shebang-regex@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/shebang-regex/-/shebang-regex-3.0.0.tgz#ae16f1644d873ecad843b0307b143362d4c42172" - integrity sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A== - -shellwords@^0.1.1: - version "0.1.1" - resolved "https://registry.yarnpkg.com/shellwords/-/shellwords-0.1.1.tgz#d6b9181c1a48d397324c84871efbcfc73fc0654b" - integrity sha512-vFwSUfQvqybiICwZY5+DAWIPLKsWO31Q91JSKl3UYv+K5c2QRPzn0qzec6QPu1Qc9eHYItiP3NdJqNVqetYAww== - -signal-exit@^3.0.0, signal-exit@^3.0.2: - version "3.0.3" - resolved "https://registry.yarnpkg.com/signal-exit/-/signal-exit-3.0.3.tgz#a1410c2edd8f077b08b4e253c8eacfcaf057461c" - integrity sha512-VUJ49FC8U1OxwZLxIbTTrDvLnf/6TDgxZcK8wxR8zs13xpx7xbG60ndBlhNrFi2EMuFRoeDoJO7wthSLq42EjA== - -sisteransi@^1.0.4: - version "1.0.5" - resolved "https://registry.yarnpkg.com/sisteransi/-/sisteransi-1.0.5.tgz#134d681297756437cc05ca01370d3a7a571075ed" - integrity sha512-bLGGlR1QxBcynn2d5YmDX4MGjlZvy2MRBDRNHLJ8VI6l6+9FUiyTFNJ0IveOSP0bcXgVDPRcfGqA0pjaqUpfVg== - -slash@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-2.0.0.tgz#de552851a1759df3a8f206535442f5ec4ddeab44" - integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A== - -slash@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634" - integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q== - -slice-ansi@^2.1.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/slice-ansi/-/slice-ansi-2.1.0.tgz#cacd7693461a637a5788d92a7dd4fba068e81636" - integrity sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ== - dependencies: - ansi-styles "^3.2.0" - astral-regex "^1.0.0" - is-fullwidth-code-point "^2.0.0" - -snapdragon-node@^2.0.1: - version "2.1.1" - resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" - integrity sha512-O27l4xaMYt/RSQ5TR3vpWCAB5Kb/czIcqUFOM/C4fYcLnbZUc1PkjTAMjof2pBWaSTwOUd6qUHcFGVGj7aIwnw== - dependencies: - define-property "^1.0.0" - isobject "^3.0.0" - snapdragon-util "^3.0.1" - -snapdragon-util@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" - integrity sha512-mbKkMdQKsjX4BAL4bRYTj21edOf8cN7XHdYUJEe+Zn99hVEYcMvKPct1IqNe7+AZPirn8BCDOQBHQZknqmKlZQ== - dependencies: - kind-of "^3.2.0" - -snapdragon@^0.8.1: - version "0.8.2" - resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" - integrity sha512-FtyOnWN/wCHTVXOMwvSv26d+ko5vWlIDD6zoUJ7LW8vh+ZBC8QdljveRP+crNrtBwioEUWy/4dMtbBjA4ioNlg== - dependencies: - base "^0.11.1" - debug "^2.2.0" - define-property "^0.2.5" - extend-shallow "^2.0.1" - map-cache "^0.2.2" - source-map "^0.5.6" - source-map-resolve "^0.5.0" - use "^3.1.0" - -source-map-resolve@^0.5.0: - version "0.5.3" - resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.3.tgz#190866bece7553e1f8f267a2ee82c606b5509a1a" - integrity sha512-Htz+RnsXWk5+P2slx5Jh3Q66vhQj1Cllm0zvnaY98+NFx+Dv2CF/f5O/t8x+KaNdrdIAsruNzoh/KpialbqAnw== - dependencies: - atob "^2.1.2" - decode-uri-component "^0.2.0" - resolve-url "^0.2.1" - source-map-url "^0.4.0" - urix "^0.1.0" - -source-map-support@^0.5.16, source-map-support@^0.5.6: - version "0.5.19" - resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.19.tgz#a98b62f86dcaf4f67399648c085291ab9e8fed61" - integrity sha512-Wonm7zOCIJzBGQdB+thsPar0kYuCIzYvxZwlBa87yi/Mdjv7Tip2cyVbLj5o0cFPN4EVkuTwb3GDDyUx2DGnGw== - dependencies: - buffer-from "^1.0.0" - source-map "^0.6.0" - -source-map-url@^0.4.0: - version "0.4.0" - resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" - integrity sha1-PpNdfd1zYxuXZZlW1VEo6HtQhKM= - -source-map@^0.5.0, source-map@^0.5.6: - version "0.5.7" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" - integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= - -source-map@^0.6.0, source-map@^0.6.1, source-map@~0.6.1: - version "0.6.1" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.6.1.tgz#74722af32e9614e9c287a8d0bbde48b5e2f1a263" - integrity sha512-UjgapumWlbMhkBgzT7Ykc5YXUT46F0iKu8SGXq0bcwP5dz/h0Plj6enJqjz1Zbq2l5WaqYnrVbwWOWMyF3F47g== - -source-map@^0.7.3: - version "0.7.3" - resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.7.3.tgz#5302f8169031735226544092e64981f751750383" - integrity sha512-CkCj6giN3S+n9qrYiBTX5gystlENnRW5jZeNLHpe6aue+SrHcG5VYwujhW9s4dY31mEGsxBDrHR6oI69fTXsaQ== - -spdx-correct@^3.0.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/spdx-correct/-/spdx-correct-3.1.1.tgz#dece81ac9c1e6713e5f7d1b6f17d468fa53d89a9" - integrity sha512-cOYcUWwhCuHCXi49RhFRCyJEK3iPj1Ziz9DpViV3tbZOwXD49QzIN3MpOLJNxh2qwq2lJJZaKMVw9qNi4jTC0w== - dependencies: - spdx-expression-parse "^3.0.0" - spdx-license-ids "^3.0.0" - -spdx-exceptions@^2.1.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d" - integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A== - -spdx-expression-parse@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/spdx-expression-parse/-/spdx-expression-parse-3.0.1.tgz#cf70f50482eefdc98e3ce0a6833e4a53ceeba679" - integrity sha512-cbqHunsQWnJNE6KhVSMsMeH5H/L9EpymbzqTQ3uLwNCLZ1Q481oWaofqH7nO6V07xlXwY6PhQdQ2IedWx/ZK4Q== - dependencies: - spdx-exceptions "^2.1.0" - spdx-license-ids "^3.0.0" - -spdx-license-ids@^3.0.0: - version "3.0.5" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.5.tgz#3694b5804567a458d3c8045842a6358632f62654" - integrity sha512-J+FWzZoynJEXGphVIS+XEh3kFSjZX/1i9gFBaWQcB+/tmpe2qUsSBABpcxqxnAxFdiUFEgAX1bjYGQvIZmoz9Q== - -split-string@^3.0.1, split-string@^3.0.2: - version "3.1.0" - resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" - integrity sha512-NzNVhJDYpwceVVii8/Hu6DKfD2G+NrQHlS/V/qgv763EYudVwEcMQNxd2lh+0VrUByXN/oJkl5grOhYWvQUYiw== - dependencies: - extend-shallow "^3.0.0" - -sprintf-js@~1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" - integrity sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw= - -sshpk@^1.7.0: - version "1.16.1" - resolved "https://registry.yarnpkg.com/sshpk/-/sshpk-1.16.1.tgz#fb661c0bef29b39db40769ee39fa70093d6f6877" - integrity sha512-HXXqVUq7+pcKeLqqZj6mHFUMvXtOJt1uoUx09pFW6011inTMxqI8BA8PM95myrIyyKwdnzjdFjLiE6KBPVtJIg== - dependencies: - asn1 "~0.2.3" - assert-plus "^1.0.0" - bcrypt-pbkdf "^1.0.0" - dashdash "^1.12.0" - ecc-jsbn "~0.1.1" - getpass "^0.1.1" - jsbn "~0.1.0" - safer-buffer "^2.0.2" - tweetnacl "~0.14.0" - -stack-utils@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-1.0.2.tgz#33eba3897788558bebfc2db059dc158ec36cebb8" - integrity sha512-MTX+MeG5U994cazkjd/9KNAapsHnibjMLnfXodlkXw76JEea0UiNzrqidzo1emMwk7w5Qhc9jd4Bn9TBb1MFwA== - -static-extend@^0.1.1: - version "0.1.2" - resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" - integrity sha1-YICcOcv/VTNyJv1eC1IPNB8ftcY= - dependencies: - define-property "^0.2.5" - object-copy "^0.1.0" - -"statuses@>= 1.5.0 < 2", statuses@~1.5.0: - version "1.5.0" - resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.5.0.tgz#161c7dac177659fd9811f43771fa99381478628c" - integrity sha1-Fhx9rBd2Wf2YEfQ3cfqZOBR4Yow= - -stealthy-require@^1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/stealthy-require/-/stealthy-require-1.1.1.tgz#35b09875b4ff49f26a777e509b3090a3226bf24b" - integrity sha1-NbCYdbT/SfJqd35QmzCQoyJr8ks= - -string-length@^3.1.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/string-length/-/string-length-3.1.0.tgz#107ef8c23456e187a8abd4a61162ff4ac6e25837" - integrity sha512-Ttp5YvkGm5v9Ijagtaz1BnN+k9ObpvS0eIBblPMp2YWL8FBmi9qblQ9fexc2k/CXFgrTIteU3jAw3payCnwSTA== - dependencies: - astral-regex "^1.0.0" - strip-ansi "^5.2.0" - -string-width@^3.0.0: - version "3.1.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-3.1.0.tgz#22767be21b62af1081574306f69ac51b62203961" - integrity sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w== - dependencies: - emoji-regex "^7.0.1" - is-fullwidth-code-point "^2.0.0" - strip-ansi "^5.1.0" - -string-width@^4.1.0, string-width@^4.2.0: - version "4.2.0" - resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5" - integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg== - dependencies: - emoji-regex "^8.0.0" - is-fullwidth-code-point "^3.0.0" - strip-ansi "^6.0.0" - -string.prototype.trimend@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.1.tgz#85812a6b847ac002270f5808146064c995fb6913" - integrity sha512-LRPxFUaTtpqYsTeNKaFOw3R4bxIzWOnbQ837QfBylo8jIxtcbK/A/sMV7Q+OAV/vWo+7s25pOE10KYSjaSO06g== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.5" - -string.prototype.trimstart@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.1.tgz#14af6d9f34b053f7cfc89b72f8f2ee14b9039a54" - integrity sha512-XxZn+QpvrBI1FOcg6dIpxUPgWCPuNXvMD72aaRaUQv1eD4e/Qy8i/hFTe0BUmD60p/QA6bh1avmuPTfNjqVWRw== - dependencies: - define-properties "^1.1.3" - es-abstract "^1.17.5" - -string_decoder@~1.1.1: - version "1.1.1" - resolved "https://registry.yarnpkg.com/string_decoder/-/string_decoder-1.1.1.tgz#9cf1611ba62685d7030ae9e4ba34149c3af03fc8" - integrity sha512-n/ShnvDi6FHbbVfviro+WojiFzv+s8MPMHBczVePfUpDJLwoLT0ht1l4YwBCbi8pJAveEEdnkHyPyTP/mzRfwg== - dependencies: - safe-buffer "~5.1.0" - -strip-ansi@^5.1.0, strip-ansi@^5.2.0: - version "5.2.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-5.2.0.tgz#8c9a536feb6afc962bdfa5b104a5091c1ad9c0ae" - integrity sha512-DuRs1gKbBqsMKIZlrffwlug8MHkcnpjs5VPmL1PAh+mA30U0DTotfDZ0d2UUsXpPmPmMMJ6W773MaA3J+lbiWA== - dependencies: - ansi-regex "^4.1.0" - -strip-ansi@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/strip-ansi/-/strip-ansi-6.0.0.tgz#0b1571dd7669ccd4f3e06e14ef1eed26225ae532" - integrity sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w== - dependencies: - ansi-regex "^5.0.0" - -strip-bom@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-4.0.0.tgz#9c3505c1db45bcedca3d9cf7a16f5c5aa3901878" - integrity sha512-3xurFv5tEgii33Zi8Jtp55wEIILR9eh34FAW00PZf+JnSsTmV/ioewSgQl97JHvgjoRGwPShsWm+IdrxB35d0w== - -strip-eof@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" - integrity sha1-u0P/VZim6wXYm1n80SnJgzE2Br8= - -strip-final-newline@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/strip-final-newline/-/strip-final-newline-2.0.0.tgz#89b852fb2fcbe936f6f4b3187afb0a12c1ab58ad" - integrity sha512-BrpvfNAE3dcvq7ll3xVumzjKjZQ5tI1sEUIKr3Uoks0XUl45St3FlatVqef9prk4jRDzhW6WZg+3bk93y6pLjA== - -strip-json-comments@^3.0.1: - version "3.1.1" - resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.1.tgz#31f1281b3832630434831c310c01cccda8cbe006" - integrity sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig== - -supports-color@^5.3.0: - version "5.5.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" - integrity sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow== - dependencies: - has-flag "^3.0.0" - -supports-color@^7.0.0, supports-color@^7.1.0: - version "7.2.0" - resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da" - integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw== - dependencies: - has-flag "^4.0.0" - -supports-hyperlinks@^2.0.0: - version "2.1.0" - resolved "https://registry.yarnpkg.com/supports-hyperlinks/-/supports-hyperlinks-2.1.0.tgz#f663df252af5f37c5d49bbd7eeefa9e0b9e59e47" - integrity sha512-zoE5/e+dnEijk6ASB6/qrK+oYdm2do1hjoLWrqUC/8WEIW1gbxFcKuBof7sW8ArN6e+AYvsE8HBGiVRWL/F5CA== - dependencies: - has-flag "^4.0.0" - supports-color "^7.0.0" - -symbol-tree@^3.2.2: - version "3.2.4" - resolved "https://registry.yarnpkg.com/symbol-tree/-/symbol-tree-3.2.4.tgz#430637d248ba77e078883951fb9aa0eed7c63fa2" - integrity sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw== - -table@^5.2.3: - version "5.4.6" - resolved "https://registry.yarnpkg.com/table/-/table-5.4.6.tgz#1292d19500ce3f86053b05f0e8e7e4a3bb21079e" - integrity sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug== - dependencies: - ajv "^6.10.2" - lodash "^4.17.14" - slice-ansi "^2.1.0" - string-width "^3.0.0" - -terminal-link@^2.0.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/terminal-link/-/terminal-link-2.1.1.tgz#14a64a27ab3c0df933ea546fba55f2d078edc994" - integrity sha512-un0FmiRUQNr5PJqy9kP7c40F5BOfpGlYTrxonDChEZB7pzZxRNp/bt+ymiy9/npwXya9KH99nJ/GXFIiUkYGFQ== - dependencies: - ansi-escapes "^4.2.1" - supports-hyperlinks "^2.0.0" - -test-exclude@^6.0.0: - version "6.0.0" - resolved "https://registry.yarnpkg.com/test-exclude/-/test-exclude-6.0.0.tgz#04a8698661d805ea6fa293b6cb9e63ac044ef15e" - integrity sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w== - dependencies: - "@istanbuljs/schema" "^0.1.2" - glob "^7.1.4" - minimatch "^3.0.4" - -text-table@^0.2.0: - version "0.2.0" - resolved "https://registry.yarnpkg.com/text-table/-/text-table-0.2.0.tgz#7f5ee823ae805207c00af2df4a84ec3fcfa570b4" - integrity sha1-f17oI66AUgfACvLfSoTsP8+lcLQ= - -throat@^5.0.0: - version "5.0.0" - resolved "https://registry.yarnpkg.com/throat/-/throat-5.0.0.tgz#c5199235803aad18754a667d659b5e72ce16764b" - integrity sha512-fcwX4mndzpLQKBS1DVYhGAcYaYt7vsHNIvQV+WXMvnow5cgjPphq5CaayLaGsjRdSCKZFNGt7/GYAuXaNOiYCA== - -through@^2.3.6: - version "2.3.8" - resolved "https://registry.yarnpkg.com/through/-/through-2.3.8.tgz#0dd4c9ffaabc357960b1b724115d7e0e86a2e1f5" - integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= - -tmp@^0.0.33: - version "0.0.33" - resolved "https://registry.yarnpkg.com/tmp/-/tmp-0.0.33.tgz#6d34335889768d21b2bcda0aa277ced3b1bfadf9" - integrity sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw== - dependencies: - os-tmpdir "~1.0.2" - -tmpl@1.0.x: - version "1.0.4" - resolved "https://registry.yarnpkg.com/tmpl/-/tmpl-1.0.4.tgz#23640dd7b42d00433911140820e5cf440e521dd1" - integrity sha1-I2QN17QtAEM5ERQIIOXPRA5SHdE= - -to-fast-properties@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/to-fast-properties/-/to-fast-properties-2.0.0.tgz#dc5e698cbd079265bc73e0377681a4e4e83f616e" - integrity sha1-3F5pjL0HkmW8c+A3doGk5Og/YW4= - -to-object-path@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" - integrity sha1-KXWIt7Dn4KwI4E5nL4XB9JmeF68= - dependencies: - kind-of "^3.0.2" - -to-regex-range@^2.1.0: - version "2.1.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" - integrity sha1-fIDBe53+vlmeJzZ+DU3VWQFB2zg= - dependencies: - is-number "^3.0.0" - repeat-string "^1.6.1" - -to-regex-range@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4" - integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ== - dependencies: - is-number "^7.0.0" - -to-regex@^3.0.1, to-regex@^3.0.2: - version "3.0.2" - resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" - integrity sha512-FWtleNAtZ/Ki2qtqej2CXTOayOH9bHDQF+Q48VpWyDXjbYxA4Yz8iDB31zXOBUlOHHKidDbqGVrTUvQMPmBGBw== - dependencies: - define-property "^2.0.2" - extend-shallow "^3.0.2" - regex-not "^1.0.2" - safe-regex "^1.1.0" - -toidentifier@1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/toidentifier/-/toidentifier-1.0.0.tgz#7e1be3470f1e77948bc43d94a3c8f4d7752ba553" - integrity sha512-yaOH/Pk/VEhBWWTlhI+qXxDFXlejDGcQipMlyxda9nthulaxLZUNcUqFxokp0vcYnvteJln5FNQDRrxj3YcbVw== - -tough-cookie@^2.3.3, tough-cookie@~2.5.0: - version "2.5.0" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.5.0.tgz#cd9fb2a0aa1d5a12b473bd9fb96fa3dcff65ade2" - integrity sha512-nlLsUzgm1kfLXSXfRZMc1KLAugd4hqJHDTvc2hDIwS3mZAfMEuMbc03SujMF+GEcpaX/qboeycw6iO8JwVv2+g== - dependencies: - psl "^1.1.28" - punycode "^2.1.1" - -tough-cookie@^3.0.1: - version "3.0.1" - resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-3.0.1.tgz#9df4f57e739c26930a018184887f4adb7dca73b2" - integrity sha512-yQyJ0u4pZsv9D4clxO69OEjLWYw+jbgspjTue4lTQZLfV0c5l1VmK2y1JK8E9ahdpltPOaAThPcp5nKPUgSnsg== - dependencies: - ip-regex "^2.1.0" - psl "^1.1.28" - punycode "^2.1.1" - -tr46@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/tr46/-/tr46-1.0.1.tgz#a8b13fd6bfd2489519674ccde55ba3693b706d09" - integrity sha1-qLE/1r/SSJUZZ0zN5VujaTtwbQk= - dependencies: - punycode "^2.1.0" - -tslib@^1.9.0: - version "1.13.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.13.0.tgz#c881e13cc7015894ed914862d276436fa9a47043" - integrity sha512-i/6DQjL8Xf3be4K/E6Wgpekn5Qasl1usyw++dAA35Ue5orEn65VIxOA+YvNNl9HV3qv70T7CNwjODHZrLwvd1Q== - -tunnel-agent@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/tunnel-agent/-/tunnel-agent-0.6.0.tgz#27a5dea06b36b04a0a9966774b290868f0fc40fd" - integrity sha1-J6XeoGs2sEoKmWZ3SykIaPD8QP0= - dependencies: - safe-buffer "^5.0.1" - -tweetnacl@^0.14.3, tweetnacl@~0.14.0: - version "0.14.5" - resolved "https://registry.yarnpkg.com/tweetnacl/-/tweetnacl-0.14.5.tgz#5ae68177f192d4456269d108afa93ff8743f4f64" - integrity sha1-WuaBd/GS1EViadEIr6k/+HQ/T2Q= - -type-check@~0.3.2: - version "0.3.2" - resolved "https://registry.yarnpkg.com/type-check/-/type-check-0.3.2.tgz#5884cab512cf1d355e3fb784f30804b2b520db72" - integrity sha1-WITKtRLPHTVeP7eE8wgEsrUg23I= - dependencies: - prelude-ls "~1.1.2" - -type-detect@4.0.8: - version "4.0.8" - resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c" - integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g== - -type-fest@^0.11.0: - version "0.11.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.11.0.tgz#97abf0872310fed88a5c466b25681576145e33f1" - integrity sha512-OdjXJxnCN1AvyLSzeKIgXTXxV+99ZuXl3Hpo9XpJAv9MBcHrrJOQ5kV7ypXOuQie+AmWG25hLbiKdwYTifzcfQ== - -type-fest@^0.6.0: - version "0.6.0" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.6.0.tgz#8d2a2370d3df886eb5c90ada1c5bf6188acf838b" - integrity sha512-q+MB8nYR1KDLrgr4G5yemftpMC7/QLqVndBmEEdqzmNj5dcFOO4Oo8qlwZE3ULT3+Zim1F8Kq4cBnikNhlCMlg== - -type-fest@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.8.1.tgz#09e249ebde851d3b1e48d27c105444667f17b83d" - integrity sha512-4dbzIzqvjtgiM5rw1k5rEHtBANKmdudhGyBEajN01fEyhaAIhsoKNy6y7+IN93IfpFtwY9iqi7kD+xwKhQsNJA== - -type-is@~1.6.17, type-is@~1.6.18: - version "1.6.18" - resolved "https://registry.yarnpkg.com/type-is/-/type-is-1.6.18.tgz#4e552cd05df09467dcbc4ef739de89f2cf37c131" - integrity sha512-TkRKr9sUTxEH8MdfuCSP7VizJyzRNMjj2J2do2Jr3Kym598JVdEksuzPQCnlFPW4ky9Q+iA+ma9BGm06XQBy8g== - dependencies: - media-typer "0.3.0" - mime-types "~2.1.24" - -typedarray-to-buffer@^3.1.5: - version "3.1.5" - resolved "https://registry.yarnpkg.com/typedarray-to-buffer/-/typedarray-to-buffer-3.1.5.tgz#a97ee7a9ff42691b9f783ff1bc5112fe3fca9080" - integrity sha512-zdu8XMNEDepKKR+XYOXAVPtWui0ly0NtohUscw+UmaHiAWT8hrV1rr//H6V+0DvJ3OQ19S979M0laLfX8rm82Q== - dependencies: - is-typedarray "^1.0.0" - -unicode-canonical-property-names-ecmascript@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-1.0.4.tgz#2619800c4c825800efdd8343af7dd9933cbe2818" - integrity sha512-jDrNnXWHd4oHiTZnx/ZG7gtUTVp+gCcTTKr8L0HjlwphROEW3+Him+IpvC+xcJEFegapiMZyZe02CyuOnRmbnQ== - -unicode-match-property-ecmascript@^1.0.4: - version "1.0.4" - resolved "https://registry.yarnpkg.com/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-1.0.4.tgz#8ed2a32569961bce9227d09cd3ffbb8fed5f020c" - integrity sha512-L4Qoh15vTfntsn4P1zqnHulG0LdXgjSO035fEpdtp6YxXhMT51Q6vgM5lYdG/5X3MjS+k/Y9Xw4SFCY9IkR0rg== - dependencies: - unicode-canonical-property-names-ecmascript "^1.0.4" - unicode-property-aliases-ecmascript "^1.0.4" - -unicode-match-property-value-ecmascript@^1.2.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-1.2.0.tgz#0d91f600eeeb3096aa962b1d6fc88876e64ea531" - integrity sha512-wjuQHGQVofmSJv1uVISKLE5zO2rNGzM/KCYZch/QQvez7C1hUhBIuZ701fYXExuufJFMPhv2SyL8CyoIfMLbIQ== - -unicode-property-aliases-ecmascript@^1.0.4: - version "1.1.0" - resolved "https://registry.yarnpkg.com/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-1.1.0.tgz#dd57a99f6207bedff4628abefb94c50db941c8f4" - integrity sha512-PqSoPh/pWetQ2phoj5RLiaqIk4kCNwoV3CI+LfGmWLKI3rE3kl1h59XpX2BjgDrmbxD9ARtQobPGU1SguCYuQg== - -union-value@^1.0.0: - version "1.0.1" - resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.1.tgz#0b6fe7b835aecda61c6ea4d4f02c14221e109847" - integrity sha512-tJfXmxMeWYnczCVs7XAEvIV7ieppALdyepWMkHkwciRpZraG/xwT+s2JN8+pr1+8jCRf80FFzvr+MpQeeoF4Xg== - dependencies: - arr-union "^3.1.0" - get-value "^2.0.6" - is-extendable "^0.1.1" - set-value "^2.0.1" - -unpipe@1.0.0, unpipe@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" - integrity sha1-sr9O6FFKrmFltIF4KdIbLvSZBOw= - -unset-value@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" - integrity sha1-g3aHP30jNRef+x5vw6jtDfyKtVk= - dependencies: - has-value "^0.3.1" - isobject "^3.0.0" - -upath@^1.1.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/upath/-/upath-1.2.0.tgz#8f66dbcd55a883acdae4408af8b035a5044c1894" - integrity sha512-aZwGpamFO61g3OlfT7OQCHqhGnW43ieH9WZeP7QxN/G/jS4jfqUkZxoryvJgVPEcrl5NL/ggHsSmLMHuH64Lhg== - -uri-js@^4.2.2: - version "4.4.0" - resolved "https://registry.yarnpkg.com/uri-js/-/uri-js-4.4.0.tgz#aa714261de793e8a82347a7bcc9ce74e86f28602" - integrity sha512-B0yRTzYdUCCn9n+F4+Gh4yIDtMQcaJsmYBDsTSG8g/OejKBodLQ2IHfN3bM7jUsRXndopT7OIXWdYqc1fjmV6g== - dependencies: - punycode "^2.1.0" - -urix@^0.1.0: - version "0.1.0" - resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" - integrity sha1-2pN/emLiH+wf0Y1Js1wpNQZ6bHI= - -use@^3.1.0: - version "3.1.1" - resolved "https://registry.yarnpkg.com/use/-/use-3.1.1.tgz#d50c8cac79a19fbc20f2911f56eb973f4e10070f" - integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== - -util-deprecate@~1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" - integrity sha1-RQ1Nyfpw3nMnYvvS1KKJgUGaDM8= - -utils-merge@1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/utils-merge/-/utils-merge-1.0.1.tgz#9f95710f50a267947b2ccc124741c1028427e713" - integrity sha1-n5VxD1CiZ5R7LMwSR0HBAoQn5xM= - -uuid@^3.3.2: - version "3.4.0" - resolved "https://registry.yarnpkg.com/uuid/-/uuid-3.4.0.tgz#b23e4358afa8a202fe7a100af1f5f883f02007ee" - integrity sha512-HjSDRw6gZE5JMggctHBcjVak08+KEVhSIiDzFnT9S9aegmp85S/bReBVTb4QTFaRNptJ9kuYaNhnbNEOkbKb/A== - -v8-compile-cache@^2.0.3: - version "2.1.1" - resolved "https://registry.yarnpkg.com/v8-compile-cache/-/v8-compile-cache-2.1.1.tgz#54bc3cdd43317bca91e35dcaf305b1a7237de745" - integrity sha512-8OQ9CL+VWyt3JStj7HX7/ciTL2V3Rl1Wf5OL+SNTm0yK1KvtReVulksyeRnCANHHuUxHlQig+JJDlUhBt1NQDQ== - -v8-to-istanbul@^4.1.3: - version "4.1.4" - resolved "https://registry.yarnpkg.com/v8-to-istanbul/-/v8-to-istanbul-4.1.4.tgz#b97936f21c0e2d9996d4985e5c5156e9d4e49cd6" - integrity sha512-Rw6vJHj1mbdK8edjR7+zuJrpDtKIgNdAvTSAcpYfgMIw+u2dPDntD3dgN4XQFLU2/fvFQdzj+EeSGfd/jnY5fQ== - dependencies: - "@types/istanbul-lib-coverage" "^2.0.1" - convert-source-map "^1.6.0" - source-map "^0.7.3" - -v8flags@^3.1.1: - version "3.2.0" - resolved "https://registry.yarnpkg.com/v8flags/-/v8flags-3.2.0.tgz#b243e3b4dfd731fa774e7492128109a0fe66d656" - integrity sha512-mH8etigqMfiGWdeXpaaqGfs6BndypxusHHcv2qSHyZkGEznCd/qAXCWWRzeowtL54147cktFOC4P5y+kl8d8Jg== - dependencies: - homedir-polyfill "^1.0.1" - -validate-npm-package-license@^3.0.1: - version "3.0.4" - resolved "https://registry.yarnpkg.com/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz#fc91f6b9c7ba15c857f4cb2c5defeec39d4f410a" - integrity sha512-DpKm2Ui/xN7/HQKCtpZxoRWBhZ9Z0kqtygG8XCgNQ8ZlDnxuQmWhj566j8fN4Cu3/JmbhsDo7fcAJq4s9h27Ew== - dependencies: - spdx-correct "^3.0.0" - spdx-expression-parse "^3.0.0" - -vary@^1, vary@~1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/vary/-/vary-1.1.2.tgz#2299f02c6ded30d4a5961b0b9f74524a18f634fc" - integrity sha1-IpnwLG3tMNSllhsLn3RSShj2NPw= - -verror@1.10.0: - version "1.10.0" - resolved "https://registry.yarnpkg.com/verror/-/verror-1.10.0.tgz#3a105ca17053af55d6e270c1f8288682e18da400" - integrity sha1-OhBcoXBTr1XW4nDB+CiGguGNpAA= - dependencies: - assert-plus "^1.0.0" - core-util-is "1.0.2" - extsprintf "^1.2.0" - -w3c-hr-time@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/w3c-hr-time/-/w3c-hr-time-1.0.2.tgz#0a89cdf5cc15822df9c360543676963e0cc308cd" - integrity sha512-z8P5DvDNjKDoFIHK7q8r8lackT6l+jo/Ye3HOle7l9nICP9lf1Ci25fy9vHd0JOWewkIFzXIEig3TdKT7JQ5fQ== - dependencies: - browser-process-hrtime "^1.0.0" - -w3c-xmlserializer@^1.1.2: - version "1.1.2" - resolved "https://registry.yarnpkg.com/w3c-xmlserializer/-/w3c-xmlserializer-1.1.2.tgz#30485ca7d70a6fd052420a3d12fd90e6339ce794" - integrity sha512-p10l/ayESzrBMYWRID6xbuCKh2Fp77+sA0doRuGn4tTIMrrZVeqfpKjXHY+oDh3K4nLdPgNwMTVP6Vp4pvqbNg== - dependencies: - domexception "^1.0.1" - webidl-conversions "^4.0.2" - xml-name-validator "^3.0.0" - -walker@^1.0.7, walker@~1.0.5: - version "1.0.7" - resolved "https://registry.yarnpkg.com/walker/-/walker-1.0.7.tgz#2f7f9b8fd10d677262b18a884e28d19618e028fb" - integrity sha1-L3+bj9ENZ3JisYqITijRlhjgKPs= - dependencies: - makeerror "1.0.x" - -webidl-conversions@^4.0.2: - version "4.0.2" - resolved "https://registry.yarnpkg.com/webidl-conversions/-/webidl-conversions-4.0.2.tgz#a855980b1f0b6b359ba1d5d9fb39ae941faa63ad" - integrity sha512-YQ+BmxuTgd6UXZW3+ICGfyqRyHXVlD5GtQr5+qjiNW7bF0cqrzX500HVXPBOvgXb5YnzDd+h0zqyv61KUD7+Sg== - -whatwg-encoding@^1.0.1, whatwg-encoding@^1.0.5: - version "1.0.5" - resolved "https://registry.yarnpkg.com/whatwg-encoding/-/whatwg-encoding-1.0.5.tgz#5abacf777c32166a51d085d6b4f3e7d27113ddb0" - integrity sha512-b5lim54JOPN9HtzvK9HFXvBma/rnfFeqsic0hSpjtDbVxR3dJKLc+KB4V6GgiGOvl7CY/KNh8rxSo9DKQrnUEw== - dependencies: - iconv-lite "0.4.24" - -whatwg-mimetype@^2.2.0, whatwg-mimetype@^2.3.0: - version "2.3.0" - resolved "https://registry.yarnpkg.com/whatwg-mimetype/-/whatwg-mimetype-2.3.0.tgz#3d4b1e0312d2079879f826aff18dbeeca5960fbf" - integrity sha512-M4yMwr6mAnQz76TbJm914+gPpB/nCwvZbJU28cUD6dR004SAxDLOOSUaB1JDRqLtaOV/vi0IC5lEAGFgrjGv/g== - -whatwg-url@^7.0.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-7.1.0.tgz#c2c492f1eca612988efd3d2266be1b9fc6170d06" - integrity sha512-WUu7Rg1DroM7oQvGWfOiAK21n74Gg+T4elXEQYkOhtyLeWiJFoOGLXPKI/9gzIie9CtwVLm8wtw6YJdKyxSjeg== - dependencies: - lodash.sortby "^4.7.0" - tr46 "^1.0.1" - webidl-conversions "^4.0.2" - -which-module@^2.0.0: - version "2.0.0" - resolved "https://registry.yarnpkg.com/which-module/-/which-module-2.0.0.tgz#d9ef07dce77b9902b8a3a8fa4b31c3e3f7e6e87a" - integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= - -which@^1.2.9, which@^1.3.1: - version "1.3.1" - resolved "https://registry.yarnpkg.com/which/-/which-1.3.1.tgz#a45043d54f5805316da8d62f9f50918d3da70b0a" - integrity sha512-HxJdYWq1MTIQbJ3nw0cqssHoTNU267KlrDuGZ1WYlxDStUtKUhOaJmh112/TZmHxxUfuJqPXSOm7tDyas0OSIQ== - dependencies: - isexe "^2.0.0" - -which@^2.0.1, which@^2.0.2: - version "2.0.2" - resolved "https://registry.yarnpkg.com/which/-/which-2.0.2.tgz#7c6a8dd0a636a0327e10b59c9286eee93f3f51b1" - integrity sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA== - dependencies: - isexe "^2.0.0" - -word-wrap@~1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c" - integrity sha512-Hz/mrNwitNRh/HUAtM/VT/5VH+ygD6DV7mYKZAtHOrbs8U7lvPS6xf7EJKMF0uW1KJCl0H701g3ZGus+muE5vQ== - -wrap-ansi@^6.2.0: - version "6.2.0" - resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" - integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== - dependencies: - ansi-styles "^4.0.0" - string-width "^4.1.0" - strip-ansi "^6.0.0" - -wrappy@1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" - integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= - -write-file-atomic@^3.0.0: - version "3.0.3" - resolved "https://registry.yarnpkg.com/write-file-atomic/-/write-file-atomic-3.0.3.tgz#56bd5c5a5c70481cd19c571bd39ab965a5de56e8" - integrity sha512-AvHcyZ5JnSfq3ioSyjrBkH9yW4m7Ayk8/9My/DD9onKeu/94fwrMocemO2QAJFAlnnDN+ZDS+ZjAR5ua1/PV/Q== - dependencies: - imurmurhash "^0.1.4" - is-typedarray "^1.0.0" - signal-exit "^3.0.2" - typedarray-to-buffer "^3.1.5" - -write@1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/write/-/write-1.0.3.tgz#0800e14523b923a387e415123c865616aae0f5c3" - integrity sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig== - dependencies: - mkdirp "^0.5.1" - -ws@^7.0.0: - version "7.3.1" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.3.1.tgz#d0547bf67f7ce4f12a72dfe31262c68d7dc551c8" - integrity sha512-D3RuNkynyHmEJIpD2qrgVkc9DQ23OrN/moAwZX4L8DfvszsJxpjQuUq3LMx6HoYji9fbIOBY18XWBsAux1ZZUA== - -xml-name-validator@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/xml-name-validator/-/xml-name-validator-3.0.0.tgz#6ae73e06de4d8c6e47f9fb181f78d648ad457c6a" - integrity sha512-A5CUptxDsvxKJEU3yO6DuWBSJz/qizqzJKOMIfUJHETbBw/sFaDxgd6fxm1ewUaM0jZ444Fc5vC5ROYurg/4Pw== - -xmlchars@^2.1.1: - version "2.2.0" - resolved "https://registry.yarnpkg.com/xmlchars/-/xmlchars-2.2.0.tgz#060fe1bcb7f9c76fe2a17db86a9bc3ab894210cb" - integrity sha512-JZnDKK8B0RCDw84FNdDAIpZK+JuJw+s7Lz8nksI7SIuU3UXJJslUthsi+uWBUYOwPFwW7W7PRLRfUKpxjtjFCw== - -y18n@^4.0.0: - version "4.0.0" - resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b" - integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w== - -yargs-parser@^18.1.2: - version "18.1.3" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0" - integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== - dependencies: - camelcase "^5.0.0" - decamelize "^1.2.0" - -yargs@^15.3.1: - version "15.4.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.4.1.tgz#0d87a16de01aee9d8bec2bfbf74f67851730f4f8" - integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== - dependencies: - cliui "^6.0.0" - decamelize "^1.2.0" - find-up "^4.1.0" - get-caller-file "^2.0.1" - require-directory "^2.1.1" - require-main-filename "^2.0.0" - set-blocking "^2.0.0" - string-width "^4.2.0" - which-module "^2.0.0" - y18n "^4.0.0" - yargs-parser "^18.1.2" diff --git a/packages/medusa-plugin-algolia/CHANGELOG.md b/packages/medusa-plugin-algolia/CHANGELOG.md index 3d3dfddcbe..51677c32aa 100644 --- a/packages/medusa-plugin-algolia/CHANGELOG.md +++ b/packages/medusa-plugin-algolia/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [0.2.3](https://github.com/medusajs/medusa/compare/medusa-plugin-algolia@0.2.2...medusa-plugin-algolia@0.2.3) (2022-07-05) + +**Note:** Version bump only for package medusa-plugin-algolia + + + + + ## [0.2.2](https://github.com/medusajs/medusa/compare/medusa-plugin-algolia@0.2.0...medusa-plugin-algolia@0.2.2) (2022-06-19) **Note:** Version bump only for package medusa-plugin-algolia diff --git a/packages/medusa-plugin-algolia/package.json b/packages/medusa-plugin-algolia/package.json index 62e95ace9e..e297904dbe 100644 --- a/packages/medusa-plugin-algolia/package.json +++ b/packages/medusa-plugin-algolia/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-algolia", - "version": "0.2.2", + "version": "0.2.3", "description": "Search support for algolia", "main": "index.js", "repository": { @@ -25,7 +25,7 @@ "body-parser": "^1.19.0", "lodash": "^4.17.21", "medusa-core-utils": "^1.1.31", - "medusa-interfaces": "^1.3.0" + "medusa-interfaces": "^1.3.1" }, "devDependencies": { "@babel/cli": "^7.7.5", diff --git a/packages/medusa-plugin-brightpearl/CHANGELOG.md b/packages/medusa-plugin-brightpearl/CHANGELOG.md index 1d8bace5fe..e1c8152fd2 100644 --- a/packages/medusa-plugin-brightpearl/CHANGELOG.md +++ b/packages/medusa-plugin-brightpearl/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.1](https://github.com/medusajs/medusa/compare/medusa-plugin-brightpearl@1.3.0...medusa-plugin-brightpearl@1.3.1) (2022-07-05) + +**Note:** Version bump only for package medusa-plugin-brightpearl + + + + + # [1.3.0](https://github.com/medusajs/medusa/compare/medusa-plugin-brightpearl@1.2.1...medusa-plugin-brightpearl@1.3.0) (2022-05-01) **Note:** Version bump only for package medusa-plugin-brightpearl diff --git a/packages/medusa-plugin-brightpearl/package.json b/packages/medusa-plugin-brightpearl/package.json index 4bc2f34901..036bcc91d1 100644 --- a/packages/medusa-plugin-brightpearl/package.json +++ b/packages/medusa-plugin-brightpearl/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-brightpearl", - "version": "1.3.0", + "version": "1.3.1", "description": "Brightpearl plugin for Medusa Commerce", "main": "index.js", "repository": { @@ -27,7 +27,7 @@ "cross-env": "^7.0.2", "eslint": "^6.8.0", "jest": "^25.5.2", - "medusa-interfaces": "^1.3.0", + "medusa-interfaces": "^1.3.1", "medusa-test-utils": "^1.1.37", "prettier": "^2.0.5" }, diff --git a/packages/medusa-plugin-contentful/CHANGELOG.md b/packages/medusa-plugin-contentful/CHANGELOG.md index 19c3ba8dac..b547906cd1 100644 --- a/packages/medusa-plugin-contentful/CHANGELOG.md +++ b/packages/medusa-plugin-contentful/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.3](https://github.com/medusajs/medusa/compare/medusa-plugin-contentful@1.2.2...medusa-plugin-contentful@1.2.3) (2022-07-05) + +**Note:** Version bump only for package medusa-plugin-contentful + + + + + ## [1.2.2](https://github.com/medusajs/medusa/compare/medusa-plugin-contentful@1.2.0...medusa-plugin-contentful@1.2.2) (2022-06-19) **Note:** Version bump only for package medusa-plugin-contentful diff --git a/packages/medusa-plugin-contentful/package.json b/packages/medusa-plugin-contentful/package.json index 6ac07f21c7..cdc1f625a7 100644 --- a/packages/medusa-plugin-contentful/package.json +++ b/packages/medusa-plugin-contentful/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-contentful", - "version": "1.2.2", + "version": "1.2.3", "description": "Contentful plugin for Medusa Commerce", "main": "index.js", "repository": { diff --git a/packages/medusa-plugin-ip-lookup/CHANGELOG.md b/packages/medusa-plugin-ip-lookup/CHANGELOG.md index e3d1e75ae2..0cfe88ff2a 100644 --- a/packages/medusa-plugin-ip-lookup/CHANGELOG.md +++ b/packages/medusa-plugin-ip-lookup/CHANGELOG.md @@ -3,6 +3,17 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.1](https://github.com/medusajs/medusa/compare/medusa-plugin-ip-lookup@1.2.0...medusa-plugin-ip-lookup@1.2.1) (2022-07-05) + + +### Bug Fixes + +* **medusa:** Remove deps `mongoose` + `mongodb` ([#1218](https://github.com/medusajs/medusa/issues/1218)) ([c76e23e](https://github.com/medusajs/medusa/commit/c76e23e84dd8cb08c3c709f9f95c4c17b9685439)) + + + + + # [1.2.0](https://github.com/medusajs/medusa/compare/medusa-plugin-ip-lookup@1.1.19...medusa-plugin-ip-lookup@1.2.0) (2022-05-01) **Note:** Version bump only for package medusa-plugin-ip-lookup diff --git a/packages/medusa-plugin-ip-lookup/package.json b/packages/medusa-plugin-ip-lookup/package.json index dd2e5b600e..4f3166619e 100644 --- a/packages/medusa-plugin-ip-lookup/package.json +++ b/packages/medusa-plugin-ip-lookup/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-ip-lookup", - "version": "1.2.0", + "version": "1.2.1", "description": "IP lookup middleware for Medusa core", "main": "dist/index.js", "repository": { @@ -32,8 +32,7 @@ "medusa-interfaces": "1.x" }, "dependencies": { - "axios": "^0.20.0", - "mongoose": "^5.8.0" + "axios": "^0.20.0" }, "gitHead": "41a5425405aea5045a26def95c0dc00cf4a5a44d" } diff --git a/packages/medusa-plugin-mailchimp/CHANGELOG.md b/packages/medusa-plugin-mailchimp/CHANGELOG.md index f4827577b2..80cff843e5 100644 --- a/packages/medusa-plugin-mailchimp/CHANGELOG.md +++ b/packages/medusa-plugin-mailchimp/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.1.42](https://github.com/medusajs/medusa/compare/medusa-plugin-mailchimp@1.1.41...medusa-plugin-mailchimp@1.1.42) (2022-07-05) + +**Note:** Version bump only for package medusa-plugin-mailchimp + + + + + ## [1.1.41](https://github.com/medusajs/medusa/compare/medusa-plugin-mailchimp@1.1.39...medusa-plugin-mailchimp@1.1.41) (2022-06-19) **Note:** Version bump only for package medusa-plugin-mailchimp diff --git a/packages/medusa-plugin-mailchimp/package.json b/packages/medusa-plugin-mailchimp/package.json index e47b27832c..f1e5e3d3e4 100644 --- a/packages/medusa-plugin-mailchimp/package.json +++ b/packages/medusa-plugin-mailchimp/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-mailchimp", - "version": "1.1.41", + "version": "1.1.42", "description": "Mailchimp newsletter subscriptions", "main": "index.js", "repository": { diff --git a/packages/medusa-plugin-meilisearch/CHANGELOG.md b/packages/medusa-plugin-meilisearch/CHANGELOG.md index 0e32b29c57..24a0d25c90 100644 --- a/packages/medusa-plugin-meilisearch/CHANGELOG.md +++ b/packages/medusa-plugin-meilisearch/CHANGELOG.md @@ -3,6 +3,17 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [0.2.3](https://github.com/medusajs/medusa/compare/medusa-plugin-meilisearch@0.2.2...medusa-plugin-meilisearch@0.2.3) (2022-07-05) + + +### Bug Fixes + +* **meilisearch:** remove medusa-interfaces dependency ([#1751](https://github.com/medusajs/medusa/issues/1751)) ([f7e300e](https://github.com/medusajs/medusa/commit/f7e300e8cec082cab23626907333682e9a643238)) + + + + + ## [0.2.2](https://github.com/medusajs/medusa/compare/medusa-plugin-meilisearch@0.2.0...medusa-plugin-meilisearch@0.2.2) (2022-06-19) **Note:** Version bump only for package medusa-plugin-meilisearch diff --git a/packages/medusa-plugin-meilisearch/package.json b/packages/medusa-plugin-meilisearch/package.json index 5e2e95a69f..72a9bb9322 100644 --- a/packages/medusa-plugin-meilisearch/package.json +++ b/packages/medusa-plugin-meilisearch/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-meilisearch", - "version": "0.2.2", + "version": "0.2.3", "description": "A starter for Medusa projects.", "main": "index.js", "repository": { @@ -23,7 +23,6 @@ "body-parser": "^1.19.0", "lodash": "^4.17.21", "medusa-core-utils": "^1.1.31", - "medusa-interfaces": "^1.3.0", "meilisearch": "^0.24.0" }, "devDependencies": { diff --git a/packages/medusa-plugin-permissions/.babelrc b/packages/medusa-plugin-permissions/.babelrc deleted file mode 100644 index 301218e056..0000000000 --- a/packages/medusa-plugin-permissions/.babelrc +++ /dev/null @@ -1,13 +0,0 @@ -{ - "plugins": [ - "@babel/plugin-proposal-class-properties", - "@babel/plugin-transform-classes", - "@babel/plugin-transform-instanceof" - ], - "presets": ["@babel/preset-env"], - "env": { - "test": { - "plugins": ["@babel/plugin-transform-runtime"] - } - } -} diff --git a/packages/medusa-plugin-permissions/.gitignore b/packages/medusa-plugin-permissions/.gitignore deleted file mode 100644 index 2653074ca0..0000000000 --- a/packages/medusa-plugin-permissions/.gitignore +++ /dev/null @@ -1,14 +0,0 @@ -dist/ -node_modules/ -.DS_store -.env* -/*.js -!index.js -yarn.lock -yarn-error.log - -/api -/services -/models -/subscribers - diff --git a/packages/medusa-plugin-permissions/CHANGELOG.md b/packages/medusa-plugin-permissions/CHANGELOG.md deleted file mode 100644 index 1f255096c8..0000000000 --- a/packages/medusa-plugin-permissions/CHANGELOG.md +++ /dev/null @@ -1,225 +0,0 @@ -# Change Log - -All notable changes to this project will be documented in this file. -See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. - -## [1.1.37](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.36...medusa-plugin-permissions@1.1.37) (2022-01-11) - -**Note:** Version bump only for package medusa-plugin-permissions - - - - - -## [1.1.36](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.35...medusa-plugin-permissions@1.1.36) (2021-12-29) - -**Note:** Version bump only for package medusa-plugin-permissions - - - - - -## [1.1.35](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.34...medusa-plugin-permissions@1.1.35) (2021-12-17) - -**Note:** Version bump only for package medusa-plugin-permissions - - - - - -## [1.1.34](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.33...medusa-plugin-permissions@1.1.34) (2021-12-08) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.33](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.32...medusa-plugin-permissions@1.1.33) (2021-11-23) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.32](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.31...medusa-plugin-permissions@1.1.32) (2021-11-22) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.31](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.30...medusa-plugin-permissions@1.1.31) (2021-11-19) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.30](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.29...medusa-plugin-permissions@1.1.30) (2021-11-19) - -### Features - -- Typescript for API layer ([#817](https://github.com/medusajs/medusa/issues/817)) ([373532e](https://github.com/medusajs/medusa/commit/373532ecbc8196f47e71af95a8cf82a14a4b1f9e)) - -## [1.1.29](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.28...medusa-plugin-permissions@1.1.29) (2021-10-18) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.28](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.27...medusa-plugin-permissions@1.1.28) (2021-10-18) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.27](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.25...medusa-plugin-permissions@1.1.27) (2021-10-18) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.26](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.25...medusa-plugin-permissions@1.1.26) (2021-10-18) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.25](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.24...medusa-plugin-permissions@1.1.25) (2021-09-15) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.24](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.23...medusa-plugin-permissions@1.1.24) (2021-09-14) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.23](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.22...medusa-plugin-permissions@1.1.23) (2021-08-05) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.22](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.21...medusa-plugin-permissions@1.1.22) (2021-07-26) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.21](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.19...medusa-plugin-permissions@1.1.21) (2021-07-15) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.20](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.19...medusa-plugin-permissions@1.1.20) (2021-07-15) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.19](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.18...medusa-plugin-permissions@1.1.19) (2021-07-02) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.18](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.17...medusa-plugin-permissions@1.1.18) (2021-06-22) - -### Bug Fixes - -- release assist ([668e8a7](https://github.com/medusajs/medusa/commit/668e8a740200847fc2a41c91d2979097f1392532)) - -## [1.1.17](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.16...medusa-plugin-permissions@1.1.17) (2021-06-09) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.16](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.15...medusa-plugin-permissions@1.1.16) (2021-06-09) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.15](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.14...medusa-plugin-permissions@1.1.15) (2021-06-09) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.14](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.13...medusa-plugin-permissions@1.1.14) (2021-06-09) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.13](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.12...medusa-plugin-permissions@1.1.13) (2021-06-08) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.12](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.9...medusa-plugin-permissions@1.1.12) (2021-04-28) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.11](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.10...medusa-plugin-permissions@1.1.11) (2021-04-20) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.10](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.9...medusa-plugin-permissions@1.1.10) (2021-04-20) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.9](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.8...medusa-plugin-permissions@1.1.9) (2021-04-13) - -### Bug Fixes - -- merge develop ([2982a8e](https://github.com/medusajs/medusa/commit/2982a8e682e90beb4549d969d9d3b04d78a46a2d)) -- merge develop ([a468c45](https://github.com/medusajs/medusa/commit/a468c451e82c68f41b5005a2e480057f6124aaa6)) - -## [1.1.8](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.7...medusa-plugin-permissions@1.1.8) (2021-04-13) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.7](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.6...medusa-plugin-permissions@1.1.7) (2021-03-30) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.6](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.5...medusa-plugin-permissions@1.1.6) (2021-03-17) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.5](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.3...medusa-plugin-permissions@1.1.5) (2021-03-17) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.4](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.3...medusa-plugin-permissions@1.1.4) (2021-03-17) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.3](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.2...medusa-plugin-permissions@1.1.3) (2021-02-17) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.2](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.1...medusa-plugin-permissions@1.1.2) (2021-02-03) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.1.1](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.1.0...medusa-plugin-permissions@1.1.1) (2021-01-27) - -**Note:** Version bump only for package medusa-plugin-permissions - -# [1.1.0](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.0.13...medusa-plugin-permissions@1.1.0) (2021-01-26) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.0.13](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.0.12...medusa-plugin-permissions@1.0.13) (2020-12-17) - -**Note:** Version bump only for package medusa-plugin-permissions - -## [1.0.12](https://github.com/medusajs/medusa/compare/medusa-plugin-permissions@1.0.11...medusa-plugin-permissions@1.0.12) (2020-11-24) - -**Note:** Version bump only for package medusa-plugin-permissions - -## 1.0.11 (2020-10-19) - -## 1.0.10 (2020-09-09) - -### Bug Fixes - -- updates license ([db519fb](https://github.com/medusajs/medusa/commit/db519fbaa6f8ad02c19cbecba5d4f28ba1ee81aa)) - -## 1.0.7 (2020-09-07) - -## 1.0.1 (2020-09-05) - -## 1.0.1-beta.0 (2020-09-04) - -# 1.0.0 (2020-09-03) - -# 1.0.0-alpha.30 (2020-08-28) - -# 1.0.0-alpha.27 (2020-08-27) - -# 1.0.0-alpha.26 (2020-08-27) - -# 1.0.0-alpha.24 (2020-08-27) - -# 1.0.0-alpha.3 (2020-08-20) - -# 1.0.0-alpha.2 (2020-08-20) - -# 1.0.0-alpha.1 (2020-08-20) - -# 1.0.0-alpha.0 (2020-08-20) - -### Reverts - -- Revert "[medusa-interfaces] : Adds decorator functionality to BaseService (#39)" (#41) ([2273cc5](https://github.com/medusajs/medusa/commit/2273cc519ad4d6ae16157173aba3955d16745e1d)), closes [#39](https://github.com/medusajs/medusa/issues/39) [#41](https://github.com/medusajs/medusa/issues/41) - -## [1.0.10](https://github.com/medusajs/medusa/compare/v1.0.9...v1.0.10) (2020-09-09) - -### Bug Fixes - -- updates license ([db519fb](https://github.com/medusajs/medusa/commit/db519fbaa6f8ad02c19cbecba5d4f28ba1ee81aa)) diff --git a/packages/medusa-plugin-permissions/package.json b/packages/medusa-plugin-permissions/package.json deleted file mode 100644 index 7897223185..0000000000 --- a/packages/medusa-plugin-permissions/package.json +++ /dev/null @@ -1,40 +0,0 @@ -{ - "name": "medusa-plugin-permissions", - "version": "1.1.37", - "description": "Role permission for Medusa core", - "main": "dist/index.js", - "repository": { - "type": "git", - "url": "https://github.com/medusajs/medusa", - "directory": "packages/medusa-plugin-permissions" - }, - "scripts": { - "build": "babel src --out-dir . --ignore **/__tests__", - "prepare": "cross-env NODE_ENV=production npm run build", - "watch": "babel -w src --out-dir . --ignore **/__tests__", - "test": "jest" - }, - "author": "Oliver Juhl", - "license": "MIT", - "devDependencies": { - "@babel/cli": "^7.7.5", - "@babel/core": "^7.7.5", - "@babel/plugin-proposal-class-properties": "^7.7.4", - "@babel/plugin-transform-classes": "^7.9.5", - "@babel/plugin-transform-instanceof": "^7.8.3", - "@babel/plugin-transform-runtime": "^7.7.6", - "@babel/preset-env": "^7.7.5", - "@babel/runtime": "^7.7.6", - "cross-env": "^5.2.1", - "jest": "^25.5.2" - }, - "peerDependencies": { - "medusa-interfaces": "1.x" - }, - "dependencies": { - "medusa-core-utils": "^1.1.31", - "medusa-test-utils": "^1.1.37", - "mongoose": "^5.8.0" - }, - "gitHead": "c46300d58fbfd0b2dc2c02745ae143e6247e885b" -} diff --git a/packages/medusa-plugin-permissions/src/api/medusa-middleware.js b/packages/medusa-plugin-permissions/src/api/medusa-middleware.js deleted file mode 100644 index 56b624b101..0000000000 --- a/packages/medusa-plugin-permissions/src/api/medusa-middleware.js +++ /dev/null @@ -1,16 +0,0 @@ -// This middleware is injected to ensure authorization of requests -// Since this middleware uses the user object on the request, this should be -// injected after authentication in the core middleware, hence we name -// the middleware postAuth. -export default { - postAuthentication: () => { - return (err, req, res, next) => { - const permissionService = req.scope.resolve("permissionService") - if (permissionService.hasPermission(req.user, req.method, req.path)) { - next() - } else { - res.status(422) - } - } - } -} diff --git a/packages/medusa-plugin-permissions/src/models/__mocks__/role.js b/packages/medusa-plugin-permissions/src/models/__mocks__/role.js deleted file mode 100644 index d840f21080..0000000000 --- a/packages/medusa-plugin-permissions/src/models/__mocks__/role.js +++ /dev/null @@ -1,36 +0,0 @@ -import { IdMap } from "medusa-test-utils" - -export const permissions = { - productEditorPermission: { - _id: IdMap.getId("product_editor"), - name: "product_editor", - permissions: [ - { - method: "POST", - endpoint: "/products", - }, - { - method: "GET", - endpoint: "/products", - }, - { - method: "PUT", - endpoint: "/products", - }, - ], - }, -} - -export const RoleModelMock = { - create: jest.fn().mockReturnValue(Promise.resolve()), - deleteOne: jest.fn().mockReturnValue(Promise.resolve()), - findOne: jest.fn().mockImplementation(query => { - if (query.name === "product_editor") { - return Promise.resolve(permissions.productEditorPermission) - } - return Promise.resolve(undefined) - }), - updateOne: jest.fn().mockImplementation((query, update) => { - return Promise.resolve() - }), -} diff --git a/packages/medusa-plugin-permissions/src/models/role.js b/packages/medusa-plugin-permissions/src/models/role.js deleted file mode 100644 index 0ab1148fda..0000000000 --- a/packages/medusa-plugin-permissions/src/models/role.js +++ /dev/null @@ -1,13 +0,0 @@ -import { BaseModel } from "medusa-interfaces" - -import PermissionSchema from "./schemas/permission" - -class RoleModel extends BaseModel { - static modelName = "Role" - static schema = { - name: { type: String, required: true, unique: true }, - permissions: { type: [PermissionSchema], required: true, default: [] }, - } -} - -export default RoleModel diff --git a/packages/medusa-plugin-permissions/src/models/schemas/permission.js b/packages/medusa-plugin-permissions/src/models/schemas/permission.js deleted file mode 100644 index 48da1d9ebc..0000000000 --- a/packages/medusa-plugin-permissions/src/models/schemas/permission.js +++ /dev/null @@ -1,6 +0,0 @@ -import mongoose from "mongoose" - -export default new mongoose.Schema({ - method: { type: String }, - endpoint: { type: String }, -}) diff --git a/packages/medusa-plugin-permissions/src/services/permission.js b/packages/medusa-plugin-permissions/src/services/permission.js deleted file mode 100644 index c62469b099..0000000000 --- a/packages/medusa-plugin-permissions/src/services/permission.js +++ /dev/null @@ -1,162 +0,0 @@ -import { BaseService } from "medusa-interfaces" -import { Validator, MedusaError } from "medusa-core-utils" - -class PermissionService extends BaseService { - constructor({ userService, roleModel }) { - super() - - /** @private @const {UserService} */ - this.userService_ = userService - - /** @private @const {RoleModel} */ - this.roleModel_ = roleModel - } - - validatePermission_(permission) { - const schema = Validator.object({ - method: Validator.string().valid( - "POST", - "GET", - "PUT", - "PATCH", - "DELETE", - "CONNECT", - "OPTIONS", - "HEAD", - "TRACE" - ), - endpoint: Validator.string(), - }) - - const { value, error } = schema.validate(permission) - - if (error) { - throw new MedusaError( - MedusaError.Types.INVALID_ARGUMENT, - "Permission is not valid" - ) - } - - return value - } - - async retrieveRole(name) { - const role = await this.roleModel_.findOne({ name }).catch((err) => { - throw new MedusaError(MedusaError.Types.DB_ERROR, err.message) - }) - - if (!role) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `${name} does not exist. Use method createRole to create it.` - ) - } - return role - } - - async hasPermission(user, method, endpoint) { - if (!user) return false - for (let i = 0; i < user.metadata.roles.length; i++) { - const role = user.metadata.roles[i] - const permissions = await this.retrieveRole(role) - return permissions.permissions.some((action) => { - return action.method === method && action.endpoint === endpoint - }) - } - return false - } - - async createRole(roleName, permissions) { - const validatedPermissions = permissions.map((permission) => - this.validatePermission_(permission) - ) - - return this.retrieveRole(roleName) - .then((role) => { - throw new MedusaError( - MedusaError.Types.INVALID_ARGUMENT, - `${role.name} already exists` - ) - }) - .catch((error) => { - if (error.name === MedusaError.Types.NOT_FOUND) { - return this.roleModel_.create({ - name: roleName, - permissions: validatedPermissions, - }) - } else { - throw error - } - }) - } - - async deleteRole(roleName) { - const role = await this.retrieve(roleName) - // Delete is idempotent, but we return a promise to allow then-chaining - if (!role) { - return Promise.resolve() - } - - return this.roleModel_ - .deleteOne({ - _id: role._id, - }) - .catch((err) => { - throw new MedusaError(MedusaError.Types.DB_ERROR, err.message) - }) - } - - async addPermission(roleName, permission) { - const role = await this.retrieveRole(roleName) - const validatedPermission = this.validatePermission_(permission) - - return this.roleModel_.updateOne( - { _id: role._id }, - { $push: { permissions: validatedPermission } } - ) - } - - async removePermission(roleName, permission) { - const role = await this.retrieveRole(roleName) - const validatedPermission = this.validatePermission_(permission) - - return this.roleModel_.updateOne( - { _id: role._id }, - { $pull: { permissions: validatedPermission } } - ) - } - - async grantRole(userId, roleName) { - const role = await this.retrieveRole(roleName) - const user = await this.userService_.retrieve(userId) - - if (!user.metadata.roles) { - return this.userService_.setMetadata(userId, "roles", [roleName]) - } - - if (user.metadata.roles.includes(role.name)) { - throw new MedusaError( - MedusaError.Types.DB_ERROR, - `User already has role: ${role.name}` - ) - } - - user.metadata.roles.push(roleName) - return this.userService_.setMetadata(userId, "roles", user.metadata.roles) - } - - async revokeRole(userId, roleName) { - const user = await this.userService_.retrieve(userId) - - if (!user.metadata.roles || !user.metadata.roles.includes(roleName)) { - // revokeRole is idempotent, we return a promise to allow then-chaining - return Promise.resolve() - } - // remove role from metadata.roles - const newRoles = user.metadata.roles.filter((r) => r !== roleName) - - return this.userService_.setMetadata(userId, "roles", newRoles) - } -} - -export default PermissionService diff --git a/packages/medusa-plugin-sendgrid/CHANGELOG.md b/packages/medusa-plugin-sendgrid/CHANGELOG.md index c5478aee8b..3d98b10585 100644 --- a/packages/medusa-plugin-sendgrid/CHANGELOG.md +++ b/packages/medusa-plugin-sendgrid/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.3](https://github.com/medusajs/medusa/compare/medusa-plugin-sendgrid@1.3.2...medusa-plugin-sendgrid@1.3.3) (2022-07-05) + +**Note:** Version bump only for package medusa-plugin-sendgrid + + + + + ## [1.3.2](https://github.com/medusajs/medusa/compare/medusa-plugin-sendgrid@1.3.0...medusa-plugin-sendgrid@1.3.2) (2022-06-19) **Note:** Version bump only for package medusa-plugin-sendgrid diff --git a/packages/medusa-plugin-sendgrid/package.json b/packages/medusa-plugin-sendgrid/package.json index 59fb459409..ec4ddc5402 100644 --- a/packages/medusa-plugin-sendgrid/package.json +++ b/packages/medusa-plugin-sendgrid/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-sendgrid", - "version": "1.3.2", + "version": "1.3.3", "description": "SendGrid transactional emails", "main": "index.js", "repository": { diff --git a/packages/medusa-plugin-slack-notification/CHANGELOG.md b/packages/medusa-plugin-slack-notification/CHANGELOG.md index b78ce19eda..10026026d5 100644 --- a/packages/medusa-plugin-slack-notification/CHANGELOG.md +++ b/packages/medusa-plugin-slack-notification/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.3](https://github.com/medusajs/medusa/compare/medusa-plugin-slack-notification@1.3.2...medusa-plugin-slack-notification@1.3.3) (2022-07-05) + +**Note:** Version bump only for package medusa-plugin-slack-notification + + + + + ## [1.3.2](https://github.com/medusajs/medusa/compare/medusa-plugin-slack-notification@1.3.0...medusa-plugin-slack-notification@1.3.2) (2022-06-19) **Note:** Version bump only for package medusa-plugin-slack-notification diff --git a/packages/medusa-plugin-slack-notification/package-lock.json b/packages/medusa-plugin-slack-notification/package-lock.json index c8c4d67df4..8cc47a39ec 100644 --- a/packages/medusa-plugin-slack-notification/package-lock.json +++ b/packages/medusa-plugin-slack-notification/package-lock.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-economic", - "version": "1.3.2", + "version": "1.3.3", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/packages/medusa-plugin-slack-notification/package.json b/packages/medusa-plugin-slack-notification/package.json index daaf647f13..ee70e707ce 100644 --- a/packages/medusa-plugin-slack-notification/package.json +++ b/packages/medusa-plugin-slack-notification/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-slack-notification", - "version": "1.3.2", + "version": "1.3.3", "description": "Slack notifications", "main": "index.js", "repository": { diff --git a/packages/medusa-plugin-twilio-sms/CHANGELOG.md b/packages/medusa-plugin-twilio-sms/CHANGELOG.md index 77253b896f..aaa10e6cc5 100644 --- a/packages/medusa-plugin-twilio-sms/CHANGELOG.md +++ b/packages/medusa-plugin-twilio-sms/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.3](https://github.com/medusajs/medusa/compare/medusa-plugin-twilio-sms@1.2.2...medusa-plugin-twilio-sms@1.2.3) (2022-07-05) + +**Note:** Version bump only for package medusa-plugin-twilio-sms + + + + + ## [1.2.2](https://github.com/medusajs/medusa/compare/medusa-plugin-twilio-sms@1.2.0...medusa-plugin-twilio-sms@1.2.2) (2022-06-19) **Note:** Version bump only for package medusa-plugin-twilio-sms diff --git a/packages/medusa-plugin-twilio-sms/package.json b/packages/medusa-plugin-twilio-sms/package.json index 92f24d4002..8d59982d21 100644 --- a/packages/medusa-plugin-twilio-sms/package.json +++ b/packages/medusa-plugin-twilio-sms/package.json @@ -1,6 +1,6 @@ { "name": "medusa-plugin-twilio-sms", - "version": "1.2.2", + "version": "1.2.3", "main": "index.js", "repository": { "type": "git", diff --git a/packages/medusa-react/CHANGELOG.md b/packages/medusa-react/CHANGELOG.md index 4c9b77a0d1..107334133d 100644 --- a/packages/medusa-react/CHANGELOG.md +++ b/packages/medusa-react/CHANGELOG.md @@ -3,6 +3,22 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [0.3.3](https://github.com/medusajs/medusa/compare/medusa-react@0.3.2...medusa-react@0.3.3) (2022-07-05) + + +### Bug Fixes + +* **medusa-react:** Allow to not invalidate any cache ([#1756](https://github.com/medusajs/medusa/issues/1756)) ([9e0f65d](https://github.com/medusajs/medusa/commit/9e0f65dee3b3329eb4eabe093f44a8e3b7d6e820)) + + +### Features + +* **medusa,medusa-js,medusa-react:** Add BatchJob API support in `medusa-js` + `medusa-react` ([#1704](https://github.com/medusajs/medusa/issues/1704)) ([7302d76](https://github.com/medusajs/medusa/commit/7302d76e12683c989f340d2fcfaf4338dca6554a)) + + + + + ## [0.3.2](https://github.com/medusajs/medusa/compare/medusa-react@0.3.0...medusa-react@0.3.2) (2022-06-19) diff --git a/packages/medusa-react/README.md b/packages/medusa-react/README.md index 086593796c..2523690e34 100644 --- a/packages/medusa-react/README.md +++ b/packages/medusa-react/README.md @@ -9,9 +9,9 @@ The library uses [react-query](https://react-query.tanstack.com/overview) as a s In order to install the package, run the following ```bash -npm install medusa-react react-query +npm install medusa-react react-query @medusajs/medusa # or -yarn add medusa-react react-query +yarn add medusa-react react-query @medusajs/medusa ``` ## Quick Start diff --git a/packages/medusa-react/mocks/data/fixtures.json b/packages/medusa-react/mocks/data/fixtures.json index a1afaea812..99fc9da78d 100644 --- a/packages/medusa-react/mocks/data/fixtures.json +++ b/packages/medusa-react/mocks/data/fixtures.json @@ -1247,6 +1247,20 @@ "fulfillment_option": { "provider_id": "test-ful", "options": [] + }, + "batch_job": { + "id": "batch_01F0YES4R67TXXC1QBQ8P54A8Y", + "type": "product_export", + "created_by": "usr_123412341234", + "context": null, + "result": null, + "dry_run": false, + "updated_at": "2021-03-16T21:24:00.389Z", + "created_at": "2021-03-16T21:24:00.389Z", + "deleted_at": null + }, + "upload": { + "url": "test-url" } } } diff --git a/packages/medusa-react/mocks/handlers/admin.ts b/packages/medusa-react/mocks/handlers/admin.ts index fbaf3fb107..49ec7d072a 100644 --- a/packages/medusa-react/mocks/handlers/admin.ts +++ b/packages/medusa-react/mocks/handlers/admin.ts @@ -1,7 +1,57 @@ import { rest } from "msw" +import { body } from "msw/lib/types/context" import { fixtures } from "../data" export const adminHandlers = [ + rest.post("/admin/batch-jobs/", (req, res, ctx) => { + const body = req.body as Record + return res( + ctx.status(200), + ctx.json({ + batch_job: { + ...fixtures.get("batch_job"), + ...body, + }, + }) + ) + }), + + rest.get("/admin/batch-jobs/", (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + batch_jobs: fixtures.list("batch_job"), + }) + ) + }), + + rest.get("/admin/batch-jobs/:id", (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + batch_job: fixtures.get("batch_job"), + }) + ) + }), + + rest.post("/admin/batch-jobs/:id/confirm", (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + batch_job: fixtures.get("batch_job"), + }) + ) + }), + + rest.post("/admin/batch-jobs/:id/cancel", (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + batch_job: fixtures.get("batch_job"), + }) + ) + }), + rest.post("/admin/collections/", (req, res, ctx) => { const body = req.body as Record return res( @@ -253,27 +303,33 @@ export const adminHandlers = [ ) }), - rest.delete("/admin/price-lists/:id/products/:product_id/prices", (req, res, ctx) => { - return res( - ctx.status(200), - ctx.json({ - ids: [], - object: "money-amount", - deleted: true, - }) - ) - }), + rest.delete( + "/admin/price-lists/:id/products/:product_id/prices", + (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + ids: [], + object: "money-amount", + deleted: true, + }) + ) + } + ), - rest.delete("/admin/price-lists/:id/variants/:variant_id/prices", (req, res, ctx) => { - return res( - ctx.status(200), - ctx.json({ - ids: [], - object: "money-amount", - deleted: true, - }) - ) - }), + rest.delete( + "/admin/price-lists/:id/variants/:variant_id/prices", + (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + ids: [], + object: "money-amount", + deleted: true, + }) + ) + } + ), rest.post("/admin/return-reasons/", (req, res, ctx) => { const body = req.body as Record @@ -1596,4 +1652,24 @@ export const adminHandlers = [ rest.delete("/admin/auth", (req, res, ctx) => { return res(ctx.status(200)) }), + + rest.delete("/admin/uploads", (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + id: (req.body as any).file_key, + object: "file", + deleted: true, + }) + ) + }), + + rest.post("/admin/uploads/download-url", (req, res, ctx) => { + return res( + ctx.status(200), + ctx.json({ + download_url: fixtures.get("upload").url, + }) + ) + }), ] diff --git a/packages/medusa-react/package.json b/packages/medusa-react/package.json index 7c77bef3ac..80bf0381fc 100644 --- a/packages/medusa-react/package.json +++ b/packages/medusa-react/package.json @@ -1,5 +1,5 @@ { - "version": "0.3.2", + "version": "0.3.3", "license": "MIT", "main": "dist/index.js", "typings": "dist/index.d.ts", @@ -22,6 +22,7 @@ "build-storybook": "build-storybook" }, "peerDependencies": { + "@medusajs/medusa": "^1.3.2", "react": ">=16", "react-query": ">= 3.29.0" }, @@ -80,8 +81,7 @@ "typescript": "^4.5.2" }, "dependencies": { - "@medusajs/medusa": "^1.3.2", - "@medusajs/medusa-js": "^1.2.2", + "@medusajs/medusa-js": "^1.2.3", "lodash": "^4.17.21", "lodash-es": "^4.17.21", "react-query": "^3.31.0" diff --git a/packages/medusa-react/src/hooks/admin/batch-jobs/index.ts b/packages/medusa-react/src/hooks/admin/batch-jobs/index.ts new file mode 100644 index 0000000000..a494946b87 --- /dev/null +++ b/packages/medusa-react/src/hooks/admin/batch-jobs/index.ts @@ -0,0 +1,2 @@ +export * from "./queries" +export * from "./mutations" diff --git a/packages/medusa-react/src/hooks/admin/batch-jobs/mutations.ts b/packages/medusa-react/src/hooks/admin/batch-jobs/mutations.ts new file mode 100644 index 0000000000..c3d6356634 --- /dev/null +++ b/packages/medusa-react/src/hooks/admin/batch-jobs/mutations.ts @@ -0,0 +1,74 @@ +import { AdminBatchJobRes, AdminPostBatchesReq } from "@medusajs/medusa" +import { Response } from "@medusajs/medusa-js" +import { useMutation, UseMutationOptions, useQueryClient } from "react-query" + +import { useMedusa } from "../../../contexts" +import { buildOptions } from "../../utils/buildOptions" +import { adminBatchJobsKeys } from "./queries" + +/** + * Hook returns functions for creating batch jobs. + * + * @param options + */ +export const useAdminCreateBatchJob = ( + options?: UseMutationOptions< + Response, + Error, + AdminPostBatchesReq + > +) => { + const { client } = useMedusa() + const queryClient = useQueryClient() + + return useMutation( + (payload: AdminPostBatchesReq) => client.admin.batchJobs.create(payload), + buildOptions(queryClient, adminBatchJobsKeys.lists(), options) + ) +} + +/** + * Hook return functions for canceling a batch job + * + * @param id - id of the batch job + * @param options + */ +export const useAdminCancelBatchJob = ( + id: string, + options?: UseMutationOptions, Error> +) => { + const { client } = useMedusa() + const queryClient = useQueryClient() + + return useMutation( + () => client.admin.batchJobs.cancel(id), + buildOptions( + queryClient, + [adminBatchJobsKeys.lists(), adminBatchJobsKeys.detail(id)], + options + ) + ) +} + +/** + * Hook return functions for confirming a batch job + * + * @param id - id of the batch job + * @param options + */ +export const useAdminConfirmBatchJob = ( + id: string, + options?: UseMutationOptions, Error> +) => { + const { client } = useMedusa() + const queryClient = useQueryClient() + + return useMutation( + () => client.admin.batchJobs.confirm(id), + buildOptions( + queryClient, + [adminBatchJobsKeys.lists(), adminBatchJobsKeys.detail(id)], + options + ) + ) +} diff --git a/packages/medusa-react/src/hooks/admin/batch-jobs/queries.ts b/packages/medusa-react/src/hooks/admin/batch-jobs/queries.ts new file mode 100644 index 0000000000..f7e27eb9ce --- /dev/null +++ b/packages/medusa-react/src/hooks/admin/batch-jobs/queries.ts @@ -0,0 +1,50 @@ +import { + AdminBatchJobListRes, + AdminBatchJobRes, + AdminGetBatchParams, +} from "@medusajs/medusa" +import { Response } from "@medusajs/medusa-js" +import { useQuery } from "react-query" +import { useMedusa } from "../../../contexts" +import { UseQueryOptionsWrapper } from "../../../types" +import { queryKeysFactory } from "../../utils/index" + +const ADMIN_COLLECTIONS_QUERY_KEY = `admin_batches` as const + +export const adminBatchJobsKeys = queryKeysFactory(ADMIN_COLLECTIONS_QUERY_KEY) + +type BatchJobsQueryKey = typeof adminBatchJobsKeys + +export const useAdminBatchJobs = ( + query?: AdminGetBatchParams, + options?: UseQueryOptionsWrapper< + Response, + Error, + ReturnType + > +) => { + const { client } = useMedusa() + const { data, ...rest } = useQuery( + adminBatchJobsKeys.list(query), + () => client.admin.batchJobs.list(query), + options + ) + return { ...data, ...rest } as const +} + +export const useAdminBatchJob = ( + id: string, + options?: UseQueryOptionsWrapper< + Response, + Error, + ReturnType + > +) => { + const { client } = useMedusa() + const { data, ...rest } = useQuery( + adminBatchJobsKeys.detail(id), + () => client.admin.batchJobs.retrieve(id), + options + ) + return { ...data, ...rest } as const +} diff --git a/packages/medusa-react/src/hooks/admin/index.ts b/packages/medusa-react/src/hooks/admin/index.ts index 01968c7e2d..89873d1ebe 100644 --- a/packages/medusa-react/src/hooks/admin/index.ts +++ b/packages/medusa-react/src/hooks/admin/index.ts @@ -1,26 +1,28 @@ export * from "./auth" -export * from "./collections" +export * from "./batch-jobs" export * from "./claims" -export * from "./customers" +export * from "./collections" export * from "./customer-groups" +export * from "./customers" export * from "./discounts" export * from "./draft-orders" export * from "./gift-cards" +export * from "./invites" +export * from "./notes" +export * from "./notifications" export * from "./orders" -export * from "./products" +export * from "./price-lists" export * from "./product-tags" export * from "./product-types" -export * from "./price-lists" -export * from "./return-reasons" +export * from "./products" export * from "./regions" +export * from "./return-reasons" +export * from "./returns" export * from "./shipping-options" export * from "./shipping-profiles" -export * from "./notes" -export * from "./invites" -export * from "./notifications" -export * from "./returns" export * from "./store" export * from "./swaps" +export * from "./tax-rates" export * from "./users" export * from "./variants" -export * from "./tax-rates" +export * from "./uploads" diff --git a/packages/medusa-react/src/hooks/admin/uploads/index.ts b/packages/medusa-react/src/hooks/admin/uploads/index.ts new file mode 100644 index 0000000000..bd086bcaef --- /dev/null +++ b/packages/medusa-react/src/hooks/admin/uploads/index.ts @@ -0,0 +1 @@ +export * from "./mutations" diff --git a/packages/medusa-react/src/hooks/admin/uploads/mutations.ts b/packages/medusa-react/src/hooks/admin/uploads/mutations.ts new file mode 100644 index 0000000000..b5b136684e --- /dev/null +++ b/packages/medusa-react/src/hooks/admin/uploads/mutations.ts @@ -0,0 +1,60 @@ +import { + AdminDeleteUploadsReq, + IAdminPostUploadsFileReq, + AdminDeleteUploadsRes, + AdminPostUploadsDownloadUrlReq, + AdminUploadsDownloadUrlRes, + AdminUploadsRes, +} from "@medusajs/medusa" +import { Response } from "@medusajs/medusa-js" +import { useMutation, UseMutationOptions, useQueryClient } from "react-query" +import { useMedusa } from "../../../contexts" +import { buildOptions } from "../../utils/buildOptions" + +export const useAdminUploadFile = ( + options?: UseMutationOptions< + Response, + Error, + IAdminPostUploadsFileReq + > +) => { + const { client } = useMedusa() + const queryClient = useQueryClient() + + return useMutation((payload: IAdminPostUploadsFileReq) => { + return client.admin.uploads.create(payload) + }, buildOptions(queryClient, undefined, options)) +} + +export const useAdminCreatePresignedDownloadUrl = ( + options?: UseMutationOptions< + Response, + Error, + AdminPostUploadsDownloadUrlReq + > +) => { + const { client } = useMedusa() + const queryClient = useQueryClient() + + return useMutation( + (payload: AdminPostUploadsDownloadUrlReq) => + client.admin.uploads.getPresignedDownloadUrl(payload), + buildOptions(queryClient, undefined, options) + ) +} + +export const useAdminDeleteFile = ( + options?: UseMutationOptions< + Response, + Error, + AdminDeleteUploadsReq + > +) => { + const { client } = useMedusa() + const queryClient = useQueryClient() + + return useMutation( + (payload: AdminDeleteUploadsReq) => client.admin.uploads.delete(payload), + buildOptions(queryClient, undefined, options) + ) +} diff --git a/packages/medusa-react/src/hooks/utils/buildOptions.ts b/packages/medusa-react/src/hooks/utils/buildOptions.ts index 0c8c9435e8..145ff13ff9 100644 --- a/packages/medusa-react/src/hooks/utils/buildOptions.ts +++ b/packages/medusa-react/src/hooks/utils/buildOptions.ts @@ -8,7 +8,7 @@ export const buildOptions = < TKey extends Array >( queryClient: QueryClient, - queryKey: TKey[] | TKey, + queryKey?: TKey[] | TKey, options?: UseMutationOptions ): UseMutationOptions => { return { @@ -18,10 +18,12 @@ export const buildOptions = < return options.onSuccess(...args) } - if (queryKey.filter(Array.isArray).length > 0) { - queryKey.forEach(key => queryClient.invalidateQueries(key)) - } else { - queryClient.invalidateQueries(queryKey) + if (queryKey !== undefined) { + if (queryKey.filter(Array.isArray).length > 0) { + queryKey.forEach(key => queryClient.invalidateQueries(key)) + } else { + queryClient.invalidateQueries(queryKey) + } } }, } diff --git a/packages/medusa-react/test/hooks/admin/batch-jobs/mutations.test.ts b/packages/medusa-react/test/hooks/admin/batch-jobs/mutations.test.ts new file mode 100644 index 0000000000..a49b91a37d --- /dev/null +++ b/packages/medusa-react/test/hooks/admin/batch-jobs/mutations.test.ts @@ -0,0 +1,78 @@ +import { renderHook } from "@testing-library/react-hooks" +import { fixtures } from "../../../../mocks/data" +import { + useAdminCancelBatchJob, + useAdminConfirmBatchJob, + useAdminCreateBatchJob, +} from "../../../../src" +import { createWrapper } from "../../../utils" + +describe("useAdminCreateBatchJob hook", () => { + test("creates a batch job and returns it", async () => { + const batch = { + type: "product_export", + dry_run: false, + context: {}, + } + + const { result, waitFor } = renderHook(() => useAdminCreateBatchJob(), { + wrapper: createWrapper(), + }) + + result.current.mutate(batch) + + await waitFor(() => result.current.isSuccess) + + expect(result.current.data?.response.status).toEqual(200) + expect(result.current.data?.batch_job).toEqual( + expect.objectContaining({ + ...fixtures.get("batch_job"), + ...batch, + }) + ) + }) +}) + +describe("useAdminCancelBatchJob hook", () => { + test("cancels a batch job and returns it", async () => { + const { result, waitFor } = renderHook( + () => useAdminCancelBatchJob(fixtures.get("batch_job").id), + { + wrapper: createWrapper(), + } + ) + + result.current.mutate() + + await waitFor(() => result.current.isSuccess) + + expect(result.current.data?.response.status).toEqual(200) + expect(result.current.data?.batch_job).toEqual( + expect.objectContaining({ + ...fixtures.get("batch_job"), + }) + ) + }) +}) + +describe("useAdminConfirmBatchJob hook", () => { + test("confirms a batch job and returns it", async () => { + const { result, waitFor } = renderHook( + () => useAdminConfirmBatchJob(fixtures.get("batch_job").id), + { + wrapper: createWrapper(), + } + ) + + result.current.mutate() + + await waitFor(() => result.current.isSuccess) + + expect(result.current.data?.response.status).toEqual(200) + expect(result.current.data?.batch_job).toEqual( + expect.objectContaining({ + ...fixtures.get("batch_job"), + }) + ) + }) +}) diff --git a/packages/medusa-react/test/hooks/admin/batch-jobs/queries.test.ts b/packages/medusa-react/test/hooks/admin/batch-jobs/queries.test.ts new file mode 100644 index 0000000000..e213146f91 --- /dev/null +++ b/packages/medusa-react/test/hooks/admin/batch-jobs/queries.test.ts @@ -0,0 +1,35 @@ +import { renderHook } from "@testing-library/react-hooks" +import { fixtures } from "../../../../mocks/data" +import { useAdminBatchJob, useAdminBatchJobs } from "../../../../src" +import { createWrapper } from "../../../utils" + +describe("useAdminBatchJobs hook", () => { + test("returns a list of batch job", async () => { + const batchJobs = fixtures.list("batch_job") + const { result, waitFor } = renderHook(() => useAdminBatchJobs(), { + wrapper: createWrapper(), + }) + + await waitFor(() => result.current.isSuccess) + + expect(result.current.response?.status).toEqual(200) + expect(result.current.batch_jobs).toEqual(batchJobs) + }) +}) + +describe("useAdminBatchJob hook", () => { + test("returns a batch job", async () => { + const batchJob = fixtures.get("batch_job") + const { result, waitFor } = renderHook( + () => useAdminBatchJob(batchJob.id), + { + wrapper: createWrapper(), + } + ) + + await waitFor(() => result.current.isSuccess) + + expect(result.current.response?.status).toEqual(200) + expect(result.current.batch_job).toEqual(batchJob) + }) +}) diff --git a/packages/medusa-react/test/hooks/admin/uploads/mutations.test.ts b/packages/medusa-react/test/hooks/admin/uploads/mutations.test.ts new file mode 100644 index 0000000000..2393217b02 --- /dev/null +++ b/packages/medusa-react/test/hooks/admin/uploads/mutations.test.ts @@ -0,0 +1,46 @@ +import { renderHook } from "@testing-library/react-hooks" +import { fixtures } from "../../../../mocks/data" +import { + useAdminDeleteFile, + useAdminCreatePresignedDownloadUrl, +} from "../../../../src" +import { createWrapper } from "../../../utils" + +describe("useAdminDeleteFile hook", () => { + test("Removes file with key and returns deleteresult", async () => { + const file_key = "test" + + const { result, waitFor } = renderHook(() => useAdminDeleteFile(), { + wrapper: createWrapper(), + }) + + result.current.mutate({ file_key }) + + await waitFor(() => result.current.isSuccess) + + expect(result.current.data.response.status).toEqual(200) + expect(result.current.data).toEqual( + expect.objectContaining({ id: file_key, object: "file", deleted: true }) + ) + }) +}) + +describe("useAdminCreatePresignedDownloadUrl hook", () => { + test("", async () => { + const file_key = "test" + + const { result, waitFor } = renderHook( + () => useAdminCreatePresignedDownloadUrl(), + { + wrapper: createWrapper(), + } + ) + + result.current.mutate({ file_key }) + + await waitFor(() => result.current.isSuccess) + + expect(result.current.data.response.status).toEqual(200) + expect(result.current.data.download_url).toEqual(fixtures.get("upload").url) + }) +}) diff --git a/packages/medusa-source-shopify/CHANGELOG.md b/packages/medusa-source-shopify/CHANGELOG.md index 559114ce99..86aa1a05d7 100644 --- a/packages/medusa-source-shopify/CHANGELOG.md +++ b/packages/medusa-source-shopify/CHANGELOG.md @@ -3,6 +3,14 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.2.1](https://github.com/medusajs/medusa/compare/medusa-source-shopify@1.2.0...medusa-source-shopify@1.2.1) (2022-07-05) + +**Note:** Version bump only for package medusa-source-shopify + + + + + # [1.2.0](https://github.com/medusajs/medusa/compare/medusa-source-shopify@1.1.1...medusa-source-shopify@1.2.0) (2022-05-01) diff --git a/packages/medusa-source-shopify/package.json b/packages/medusa-source-shopify/package.json index 400be37ddd..206494459e 100644 --- a/packages/medusa-source-shopify/package.json +++ b/packages/medusa-source-shopify/package.json @@ -1,6 +1,6 @@ { "name": "medusa-source-shopify", - "version": "1.2.0", + "version": "1.2.1", "description": "Source plugin that allows users to import products from a Shopify store", "main": "index.js", "repository": { @@ -29,7 +29,7 @@ "ioredis": "^4.27.9", "lodash": "^4.17.21", "medusa-core-utils": "^1.1.31", - "medusa-interfaces": "^1.3.0", + "medusa-interfaces": "^1.3.1", "medusa-test-utils": "^1.1.37" }, "devDependencies": { diff --git a/packages/medusa/CHANGELOG.md b/packages/medusa/CHANGELOG.md index 375d9b2cdb..86c66f1866 100644 --- a/packages/medusa/CHANGELOG.md +++ b/packages/medusa/CHANGELOG.md @@ -3,6 +3,49 @@ All notable changes to this project will be documented in this file. See [Conventional Commits](https://conventionalcommits.org) for commit guidelines. +## [1.3.3](https://github.com/medusajs/medusa/compare/@medusajs/medusa@1.3.2...@medusajs/medusa@1.3.3) (2022-07-05) + + +### Bug Fixes + +* introduce listAndCount for gift cards to enable pagination ([#1754](https://github.com/medusajs/medusa/issues/1754)) ([9a14b84](https://github.com/medusajs/medusa/commit/9a14b84e58a7db2b38edaf9ce59dcb4416736c30)) +* **medusa:** Remove deps `mongoose` + `mongodb` ([#1218](https://github.com/medusajs/medusa/issues/1218)) ([c76e23e](https://github.com/medusajs/medusa/commit/c76e23e84dd8cb08c3c709f9f95c4c17b9685439)) +* add shipping taxes ([#1759](https://github.com/medusajs/medusa/issues/1759)) ([fee0f88](https://github.com/medusajs/medusa/commit/fee0f88a62d7e00c844fcd021d090f130ab4a532)) +* **medusa:** Add images relation ([#1693](https://github.com/medusajs/medusa/issues/1693)) ([765c794](https://github.com/medusajs/medusa/commit/765c794b9775a12fffbed59a6312beb87442dc1a)) +* **medusa:** Normalizes email before saving customer ([#1719](https://github.com/medusajs/medusa/issues/1719)) ([2a32609](https://github.com/medusajs/medusa/commit/2a32609b7458c12f047d3f9ba45d426fdc784d58)) +* **medusa:** Plugin loader must also check for TransactionBaseService ([#1601](https://github.com/medusajs/medusa/issues/1601)) ([28ddf10](https://github.com/medusajs/medusa/commit/28ddf10446e689a32bf9d4d05dedd4aa090d66a0)) +* **medusa:** Product export strategy ([#1713](https://github.com/medusajs/medusa/issues/1713)) ([89cb717](https://github.com/medusajs/medusa/commit/89cb7174613ffbced8abb3a7e8a4539134bd867c)) +* **medusa:** Remove duplicate DiscountRuleType import ([#1699](https://github.com/medusajs/medusa/issues/1699)) ([46a6e1a](https://github.com/medusajs/medusa/commit/46a6e1a4d368241288d50cdd0aa35ac7dbf14764)) +* **medusa:** update cron schedule to be every 6 hours ([#1658](https://github.com/medusajs/medusa/issues/1658)) ([cffb03d](https://github.com/medusajs/medusa/commit/cffb03d1978b6fe019007f9c7683b67171300255)) +* adds tax calculation to product pricing ([#1354](https://github.com/medusajs/medusa/issues/1354)) ([14366f5](https://github.com/medusajs/medusa/commit/14366f536decc88546658e23521961a82409e842)) +* includes variant prices when listing products using a search query ([#1607](https://github.com/medusajs/medusa/issues/1607)) ([247ad6d](https://github.com/medusajs/medusa/commit/247ad6dc6d7f55bdec5d9d1b59f96c380dbba9c9)) + + +### Features + +* add customer to fetch-draft-order payload ([#1444](https://github.com/medusajs/medusa/issues/1444)) ([3fb74bf](https://github.com/medusajs/medusa/commit/3fb74bf512644c4dc45615a608f27650a95fa791)) +* **medusa:** Add batch strategy for order exports ([#1603](https://github.com/medusajs/medusa/issues/1603)) ([bf47d1a](https://github.com/medusajs/medusa/commit/bf47d1aecd74f4489667609444a8b09393e894d3)) +* **medusa:** Add file size calculation for product export ([#1726](https://github.com/medusajs/medusa/issues/1726)) ([fb7abbf](https://github.com/medusajs/medusa/commit/fb7abbf40784dcc66dbde25d400f3af54141c237)) +* **medusa:** Add line item totals to cart totals decoration ([#1740](https://github.com/medusajs/medusa/issues/1740)) ([c6dc908](https://github.com/medusajs/medusa/commit/c6dc9086cfa272db0c1a7f98f670bd3ed8ccfa78)) +* **medusa:** Allow to filter the batch jobs with nullable date ([#1747](https://github.com/medusajs/medusa/issues/1747)) ([c0f624a](https://github.com/medusajs/medusa/commit/c0f624ad3b8ae507438c0c84d867dc19904f08ae)) +* **medusa:** Apply query transformers to Collection and CustomerGroups ([#1667](https://github.com/medusajs/medusa/issues/1667)) ([e53c06e](https://github.com/medusajs/medusa/commit/e53c06eab8a37579f33c547d0373edc082cc308b)) +* **medusa:** BatchJobStrategy and loaders ([#1434](https://github.com/medusajs/medusa/issues/1434)) ([886dcbc](https://github.com/medusajs/medusa/commit/886dcbc82fc5ec784e699ddf7b18f710535fdada)) +* **medusa:** Convert GiftCardService to Typescript ([#1664](https://github.com/medusajs/medusa/issues/1664)) ([1585b7a](https://github.com/medusajs/medusa/commit/1585b7ae2b063adad9c22f6aac9d1e426ccac29f)) +* **medusa:** Delete and download url endpoints ([#1705](https://github.com/medusajs/medusa/issues/1705)) ([cc29b64](https://github.com/medusajs/medusa/commit/cc29b641c9358415b46179371988e7ddc11d2664)) +* **medusa:** Extend file-service interface + move to core ([#1577](https://github.com/medusajs/medusa/issues/1577)) ([8e42d37](https://github.com/medusajs/medusa/commit/8e42d37e84e80c003b9c0311117ab8a8871aa61b)) +* **medusa:** Migrate utils to TS ([#1415](https://github.com/medusajs/medusa/issues/1415)) ([d98cd85](https://github.com/medusajs/medusa/commit/d98cd85d2370f179044ddfec43479dc7cdcc39bd)) +* **medusa:** Parsing CSV files ([#1572](https://github.com/medusajs/medusa/issues/1572)) ([9e686a8](https://github.com/medusajs/medusa/commit/9e686a8e47c567ffdb57bb43af796dd38049294f)) +* **medusa:** Support batch-job API ([#1547](https://github.com/medusajs/medusa/issues/1547)) ([4536886](https://github.com/medusajs/medusa/commit/453688682c79032737ea47197c00ea14e84aab02)), closes [#1434](https://github.com/medusajs/medusa/issues/1434) [#1548](https://github.com/medusajs/medusa/issues/1548) [#1453](https://github.com/medusajs/medusa/issues/1453) +* **medusa:** Support transformQuery/Body middleware, introduction of pipe feature ([#1593](https://github.com/medusajs/medusa/issues/1593)) ([3359e18](https://github.com/medusajs/medusa/commit/3359e189a70533692f85fbbff9b09018872abbf4)) +* **medusa:** Update BuildQuery typings ([#1672](https://github.com/medusajs/medusa/issues/1672)) ([93aaaa7](https://github.com/medusajs/medusa/commit/93aaaa71bd07ba0675be05ea503580f3e9ec6806)) +* **medusa-cli:** Allow to revert migrations from the CLI ([#1353](https://github.com/medusajs/medusa/issues/1353)) ([012513b](https://github.com/medusajs/medusa/commit/012513b6a1e90169e9e0e53f7a59841a34fbaeb3)) +* **medusa,medusa-js,medusa-react:** Add BatchJob API support in `medusa-js` + `medusa-react` ([#1704](https://github.com/medusajs/medusa/issues/1704)) ([7302d76](https://github.com/medusajs/medusa/commit/7302d76e12683c989f340d2fcfaf4338dca6554a)) +* **medusa/product-export-strategy:** Implement the Product export strategy ([#1688](https://github.com/medusajs/medusa/issues/1688)) ([7b09b8c](https://github.com/medusajs/medusa/commit/7b09b8c36cf8777ee874deed795bc98ba6653aa8)) + + + + + ## [1.3.2](https://github.com/medusajs/medusa/compare/@medusajs/medusa@1.3.0...@medusajs/medusa@1.3.2) (2022-06-19) diff --git a/packages/medusa/package.json b/packages/medusa/package.json index cda4f8b21b..73f0e381de 100644 --- a/packages/medusa/package.json +++ b/packages/medusa/package.json @@ -1,6 +1,6 @@ { "name": "@medusajs/medusa", - "version": "1.3.2", + "version": "1.3.3", "description": "E-commerce for JAMstack", "main": "dist/index.js", "bin": { @@ -23,11 +23,12 @@ "@types/express": "^4.17.13", "@types/jest": "^27.5.0", "@types/jsonwebtoken": "^8.5.5", + "@types/multer": "^1.4.7", "babel-preset-medusa-package": "^1.1.19", "cross-env": "^5.2.1", "eslint": "^7.32.0", "jest": "^25.5.2", - "medusa-interfaces": "^1.3.0", + "medusa-interfaces": "^1.3.1", "nodemon": "^2.0.1", "prettier": "^1.19.1", "sqlite3": "^5.0.2", @@ -50,7 +51,7 @@ }, "dependencies": { "@hapi/joi": "^16.1.8", - "@medusajs/medusa-cli": "^1.3.0", + "@medusajs/medusa-cli": "^1.3.1", "@types/lodash": "^4.14.168", "awilix": "^4.2.3", "body-parser": "^1.19.0", @@ -78,6 +79,7 @@ "morgan": "^1.9.1", "multer": "^1.4.2", "node-schedule": "^2.1.0", + "papaparse": "^5.3.2", "passport": "^0.4.0", "passport-http-bearer": "^1.0.1", "passport-jwt": "^4.0.0", diff --git a/packages/medusa/src/api/index.js b/packages/medusa/src/api/index.js index 290b631600..63f19acd92 100644 --- a/packages/medusa/src/api/index.js +++ b/packages/medusa/src/api/index.js @@ -16,33 +16,33 @@ export default (container, config) => { } // Admin -export * from "./routes/admin/collections" export * from "./routes/admin/auth" -export * from "./routes/admin/customers" +export * from "./routes/admin/batch" +export * from "./routes/admin/collections" export * from "./routes/admin/customer-groups" +export * from "./routes/admin/customers" export * from "./routes/admin/discounts" export * from "./routes/admin/draft-orders" export * from "./routes/admin/gift-cards" export * from "./routes/admin/invites" export * from "./routes/admin/notes" export * from "./routes/admin/notifications" -export * from "./routes/admin/shipping-profiles" -export * from "./routes/admin/store" -export * from "./routes/admin/products" -export * from "./routes/admin/users" export * from "./routes/admin/orders" -export * from "./routes/admin/variants" -export * from "./routes/admin/return-reasons" -export * from "./routes/admin/swaps" -export * from "./routes/admin/uploads" -export * from "./routes/admin/returns" -export * from "./routes/admin/tax-rates" -export * from "./routes/admin/shipping-options" -export * from "./routes/admin/regions" +export * from "./routes/admin/price-lists" export * from "./routes/admin/product-tags" export * from "./routes/admin/product-types" -export * from "./routes/admin/price-lists" - +export * from "./routes/admin/products" +export * from "./routes/admin/regions" +export * from "./routes/admin/return-reasons" +export * from "./routes/admin/returns" +export * from "./routes/admin/shipping-options" +export * from "./routes/admin/shipping-profiles" +export * from "./routes/admin/store" +export * from "./routes/admin/swaps" +export * from "./routes/admin/tax-rates" +export * from "./routes/admin/uploads" +export * from "./routes/admin/users" +export * from "./routes/admin/variants" // Store export * from "./routes/store/auth" export * from "./routes/store/carts" diff --git a/packages/medusa/src/api/middlewares/authenticate-customer.js b/packages/medusa/src/api/middlewares/authenticate-customer.ts similarity index 54% rename from packages/medusa/src/api/middlewares/authenticate-customer.js rename to packages/medusa/src/api/middlewares/authenticate-customer.ts index 61ca8e1b3a..ae86c0b9b3 100644 --- a/packages/medusa/src/api/middlewares/authenticate-customer.js +++ b/packages/medusa/src/api/middlewares/authenticate-customer.ts @@ -1,12 +1,12 @@ import passport from "passport" +import { Request, Response, NextFunction, RequestHandler } from "express" -export default () => { - // Always go to next - return (req, res, next) => { +export default (): RequestHandler => { + return (req: Request, res: Response, next: NextFunction): void => { passport.authenticate( ["jwt", "bearer"], { session: false }, - (err, user, info) => { + (err, user) => { if (err) { return next(err) } diff --git a/packages/medusa/src/api/middlewares/authenticate.js b/packages/medusa/src/api/middlewares/authenticate.js deleted file mode 100644 index d536b8a039..0000000000 --- a/packages/medusa/src/api/middlewares/authenticate.js +++ /dev/null @@ -1,7 +0,0 @@ -import passport from "passport" - -export default () => { - return (req, res, next) => { - passport.authenticate(["jwt", "bearer"], { session: false })(req, res, next) - } -} diff --git a/packages/medusa/src/api/middlewares/authenticate.ts b/packages/medusa/src/api/middlewares/authenticate.ts new file mode 100644 index 0000000000..d5aa4d4ed6 --- /dev/null +++ b/packages/medusa/src/api/middlewares/authenticate.ts @@ -0,0 +1,8 @@ +import passport from "passport" +import { Request, Response, NextFunction, RequestHandler } from "express" + +export default (): RequestHandler => { + return (req: Request, res: Response, next: NextFunction): void => { + passport.authenticate(["jwt", "bearer"], { session: false })(req, res, next) + } +} diff --git a/packages/medusa/src/api/middlewares/await-middleware.js b/packages/medusa/src/api/middlewares/await-middleware.js deleted file mode 100644 index ca4821e6af..0000000000 --- a/packages/medusa/src/api/middlewares/await-middleware.js +++ /dev/null @@ -1,3 +0,0 @@ -export default (fn) => - (...args) => - fn(...args).catch(args[2]) diff --git a/packages/medusa/src/api/middlewares/await-middleware.ts b/packages/medusa/src/api/middlewares/await-middleware.ts new file mode 100644 index 0000000000..b47f9f5246 --- /dev/null +++ b/packages/medusa/src/api/middlewares/await-middleware.ts @@ -0,0 +1,9 @@ +import { NextFunction, Request, RequestHandler, Response } from "express" + +type handler = (req: Request, res: Response) => Promise + +export default (fn: handler): RequestHandler => { + return (req: Request, res: Response, next: NextFunction) => { + return fn(req, res).catch(next) + } +} diff --git a/packages/medusa/src/api/middlewares/error-handler.js b/packages/medusa/src/api/middlewares/error-handler.ts similarity index 86% rename from packages/medusa/src/api/middlewares/error-handler.js rename to packages/medusa/src/api/middlewares/error-handler.ts index ee4ea47932..37496b09ad 100644 --- a/packages/medusa/src/api/middlewares/error-handler.js +++ b/packages/medusa/src/api/middlewares/error-handler.ts @@ -1,4 +1,6 @@ import { MedusaError } from "medusa-core-utils" +import { NextFunction, Request, Response } from "express" +import { Logger } from "../../types/global" const QUERY_RUNNER_RELEASED = "QueryRunnerAlreadyReleasedError" const TRANSACTION_STARTED = "TransactionAlreadyStartedError" @@ -9,8 +11,13 @@ const INVALID_REQUEST_ERROR = "invalid_request_error" const INVALID_STATE_ERROR = "invalid_state_error" export default () => { - return (err, req, res, next) => { - const logger = req.scope.resolve("logger") + return ( + err: MedusaError, + req: Request, + res: Response, + next: NextFunction + ) => { + const logger: Logger = req.scope.resolve("logger") logger.error(err) const errorType = err.type || err.name diff --git a/packages/medusa/src/api/middlewares/feature-flag-enabled.ts b/packages/medusa/src/api/middlewares/feature-flag-enabled.ts new file mode 100644 index 0000000000..c2eeb54452 --- /dev/null +++ b/packages/medusa/src/api/middlewares/feature-flag-enabled.ts @@ -0,0 +1,18 @@ +import { NextFunction, Request, Response } from "express" +import { FlagRouter } from "../../utils/flag-router" + +export function isFeatureFlagEnabled( + flagKey: string +): (req: Request, res: Response, next: NextFunction) => Promise { + return async (req: Request, res: Response, next: NextFunction) => { + const featureFlagRouter = req.scope.resolve( + "featureFlagRouter" + ) as FlagRouter + + if (!featureFlagRouter.isFeatureEnabled(flagKey)) { + res.sendStatus(404) + } else { + next() + } + } +} diff --git a/packages/medusa/src/api/middlewares/index.js b/packages/medusa/src/api/middlewares/index.ts similarity index 82% rename from packages/medusa/src/api/middlewares/index.js rename to packages/medusa/src/api/middlewares/index.ts index 5844904f49..08d79c2701 100644 --- a/packages/medusa/src/api/middlewares/index.js +++ b/packages/medusa/src/api/middlewares/index.ts @@ -5,6 +5,8 @@ import { default as wrap } from "./await-middleware" export { getRequestedBatchJob } from "./batch-job/get-requested-batch-job" export { canAccessBatchJob } from "./batch-job/can-access-batch-job" +export { transformQuery } from "./transform-query" +export { transformBody } from "./transform-body" export default { authenticate, diff --git a/packages/medusa/src/api/middlewares/normalized-query.js b/packages/medusa/src/api/middlewares/normalized-query.js deleted file mode 100644 index 82e2233b06..0000000000 --- a/packages/medusa/src/api/middlewares/normalized-query.js +++ /dev/null @@ -1,16 +0,0 @@ -export default () => { - return (req, res, next) => { - const clean = Object.entries(req.query).reduce((acc, [key, val]) => { - if (Array.isArray(val) && val.length === 1) { - acc[key] = val[0].split(",") - } else { - acc[key] = val - } - return acc - }, {}) - - req.query = clean - - next() - } -} diff --git a/packages/medusa/src/api/middlewares/normalized-query.ts b/packages/medusa/src/api/middlewares/normalized-query.ts new file mode 100644 index 0000000000..7cc6e90231 --- /dev/null +++ b/packages/medusa/src/api/middlewares/normalized-query.ts @@ -0,0 +1,24 @@ +import { NextFunction, Request, Response } from "express" + +/** + * Normalize an input query, especially from array like query params to an array type + * e.g: /admin/orders/?fields[]=id,status,cart_id becomes { fields: ["id", "status", "cart_id"] } + */ +export default (): (( + req: Request, + res: Response, + next: NextFunction +) => void) => { + return (req: Request, res: Response, next: NextFunction): void => { + req.query = Object.entries(req.query).reduce((acc, [key, val]) => { + if (Array.isArray(val) && val.length === 1) { + acc[key] = (val as string[])[0].split(",") + } else { + acc[key] = val + } + return acc + }, {}) + + next() + } +} diff --git a/packages/medusa/src/api/middlewares/transform-body.ts b/packages/medusa/src/api/middlewares/transform-body.ts new file mode 100644 index 0000000000..bdbc0d5908 --- /dev/null +++ b/packages/medusa/src/api/middlewares/transform-body.ts @@ -0,0 +1,18 @@ +import { NextFunction, Request, Response } from "express" +import { ClassConstructor } from "../../types/global" +import { ValidatorOptions } from "class-validator" +import { validator } from "../../utils/validator" + +export function transformBody( + plainToClass: ClassConstructor, + config: ValidatorOptions = {} +): (req: Request, res: Response, next: NextFunction) => Promise { + return async (req: Request, res: Response, next: NextFunction) => { + try { + req.validatedBody = await validator(plainToClass, req.body, config) + next() + } catch (e) { + next(e) + } + } +} diff --git a/packages/medusa/src/api/middlewares/transform-query.ts b/packages/medusa/src/api/middlewares/transform-query.ts new file mode 100644 index 0000000000..142c2f55d7 --- /dev/null +++ b/packages/medusa/src/api/middlewares/transform-query.ts @@ -0,0 +1,57 @@ +import { NextFunction, Request, Response } from "express" +import { ClassConstructor } from "../../types/global" +import { validator } from "../../utils/validator" +import { ValidatorOptions } from "class-validator" +import { default as normalizeQuery } from "./normalized-query" +import { + prepareListQuery, + prepareRetrieveQuery, +} from "../../utils/get-query-config" +import { BaseEntity } from "../../interfaces/models/base-entity" +import { FindConfig, QueryConfig, RequestQueryFields } from "../../types/common" +import { omit } from "lodash" + +export function transformQuery< + T extends RequestQueryFields, + TEntity extends BaseEntity +>( + plainToClass: ClassConstructor, + queryConfig?: QueryConfig, + config: ValidatorOptions = {} +): (req: Request, res: Response, next: NextFunction) => Promise { + return async (req: Request, res: Response, next: NextFunction) => { + try { + normalizeQuery()(req, res, () => void 0) + const validated: T = await validator>( + plainToClass, + req.query, + config + ) + req.validatedQuery = validated + + req.filterableFields = omit(validated, [ + "limit", + "offset", + "expand", + "fields", + "order", + ]) + + if (queryConfig?.isList) { + req.listConfig = prepareListQuery( + validated, + queryConfig + ) as FindConfig + } else { + req.retrieveConfig = prepareRetrieveQuery( + validated, + queryConfig + ) as FindConfig + } + + next() + } catch (e) { + next(e) + } + } +} diff --git a/packages/medusa/src/api/routes/admin/batch/cancel-batch-job.ts b/packages/medusa/src/api/routes/admin/batch/cancel-batch-job.ts new file mode 100644 index 0000000000..f5dcaab4b4 --- /dev/null +++ b/packages/medusa/src/api/routes/admin/batch/cancel-batch-job.ts @@ -0,0 +1,30 @@ +import { BatchJobService } from "../../../../services" + +/** + * @oas [post] /batch-jobs/{id}/cancel + * operationId: "PostBatchJobsBatchJobCancel" + * summary: "Marks a batch job as canceled" + * description: "Marks a batch job as canceled" + * x-authenticated: true + * parameters: + * - (path) id=* {string} The id of the batch job. + * tags: + * - Batch Job + * responses: + * "200": + * description: OK + * content: + * application/json: + * schema: + * properties: + * batch_job: + * $ref: "#/components/schemas/batch_job" + */ +export default async (req, res) => { + let batch_job = req.batch_job + + const batchJobService: BatchJobService = req.scope.resolve("batchJobService") + batch_job = await batchJobService.cancel(batch_job) + + res.json({ batch_job }) +} diff --git a/packages/medusa/src/api/routes/admin/batch/confirm-batch-job.ts b/packages/medusa/src/api/routes/admin/batch/confirm-batch-job.ts new file mode 100644 index 0000000000..b5bb86903e --- /dev/null +++ b/packages/medusa/src/api/routes/admin/batch/confirm-batch-job.ts @@ -0,0 +1,31 @@ +import { BatchJobService } from "../../../../services" + +/** + * @oas [post] /batch-jobs/{id}/confirm + * operationId: "PostBatchJobsBatchJobConfirmProcessing" + * summary: "Confirm a batch job" + * description: "Confirms that a previously requested batch job should be executed." + * x-authenticated: true + * parameters: + * - (path) id=* {string} The id of the batch job. + * tags: + * - Batch Job + * responses: + * "200": + * description: OK + * content: + * application/json: + * schema: + * properties:x + * batch_job: + * $ref: "#/components/schemas/batch_job" + */ + +export default async (req, res) => { + let batch_job = req.batch_job + + const batchJobService: BatchJobService = req.scope.resolve("batchJobService") + batch_job = await batchJobService.confirm(batch_job) + + res.json({ batch_job }) +} diff --git a/packages/medusa/src/api/routes/admin/batch/create-batch-job.ts b/packages/medusa/src/api/routes/admin/batch/create-batch-job.ts new file mode 100644 index 0000000000..af7d022dcc --- /dev/null +++ b/packages/medusa/src/api/routes/admin/batch/create-batch-job.ts @@ -0,0 +1,57 @@ +import { IsBoolean, IsObject, IsOptional, IsString } from "class-validator" +import BatchJobService from "../../../../services/batch-job" +import { validator } from "../../../../utils/validator" +import { BatchJob } from "../../../../models" + +/** + * @oas [post] /batch-jobs + * operationId: "PostBatchJobs" + * summary: "Create a Batch Job" + * description: "Creates a Batch Job." + * x-authenticated: true + * parameters: + * - (body) type=* {string} The type of batch job to start. + * - (body) context=* {string} Additional infomration regarding the batch to be used for processing. + * - (body) dry_run=* {boolean} Set a batch job in dry_run mode to get some information on what will be done without applying any modifications. + * tags: + * - Customer + * responses: + * 201: + * description: OK + * content: + * application/json: + * schema: + * properties: + * batch_job: + * $ref: "#/components/schemas/batch_job" + */ +export default async (req, res) => { + const validated = await validator(AdminPostBatchesReq, req.body) + + const batchJobService: BatchJobService = req.scope.resolve("batchJobService") + const toCreate = await batchJobService.prepareBatchJobForProcessing( + validated, + req + ) + + const userId = req.user.id ?? req.user.userId + + const batch_job = await batchJobService.create({ + ...toCreate, + created_by: userId, + }) + + res.status(201).json({ batch_job }) +} + +export class AdminPostBatchesReq { + @IsString() + type: string + + @IsObject() + context: BatchJob["context"] + + @IsBoolean() + @IsOptional() + dry_run = false +} diff --git a/packages/medusa/src/api/routes/admin/batch/get-batch-job.ts b/packages/medusa/src/api/routes/admin/batch/get-batch-job.ts new file mode 100644 index 0000000000..4bef63e026 --- /dev/null +++ b/packages/medusa/src/api/routes/admin/batch/get-batch-job.ts @@ -0,0 +1,24 @@ +/** + * @oas [get] /batch-jobs/{id} + * operationId: "GetBatchJobsBatchJob" + * summary: "Retrieve a Batch Job" + * description: "Retrieves a Batch Job." + * x-authenticated: true + * parameters: + * - (path) id=* {string} The id of the Batch Job + * tags: + * - Batch Job + * responses: + * "200": + * description: OK + * content: + * application/json: + * schema: + * properties: + * batch_job: + * $ref: "#/components/schemas/batch_job" + */ +export default async (req, res) => { + const batch_job = req.batch_job + res.status(200).json({ batch_job: batch_job }) +} diff --git a/packages/medusa/src/api/routes/admin/batch/index.ts b/packages/medusa/src/api/routes/admin/batch/index.ts index af5f81e7b5..49ebc99a53 100644 --- a/packages/medusa/src/api/routes/admin/batch/index.ts +++ b/packages/medusa/src/api/routes/admin/batch/index.ts @@ -1,18 +1,44 @@ import { Router } from "express" import { BatchJob } from "../../../.." import { DeleteResponse, PaginatedResponse } from "../../../../types/common" -import middlewares from "../../../middlewares" +import middlewares, { + canAccessBatchJob, + getRequestedBatchJob, + transformQuery, +} from "../../../middlewares" +import { AdminGetBatchParams } from "./list-batch-jobs" export default (app) => { const route = Router() - app.use("/batch", route) + app.use("/batch-jobs", route) route.get( "/", - middlewares.normalizeQuery(), + transformQuery(AdminGetBatchParams, { + defaultFields: defaultAdminBatchFields, + isList: true, + }), middlewares.wrap(require("./list-batch-jobs").default) ) + route.post("/", middlewares.wrap(require("./create-batch-job").default)) + + const batchJobRouter = Router({ mergeParams: true }) + + route.use("/:id", getRequestedBatchJob, canAccessBatchJob, batchJobRouter) + + batchJobRouter.get("/", middlewares.wrap(require("./get-batch-job").default)) + + batchJobRouter.post( + "/confirm", + middlewares.wrap(require("./confirm-batch-job").default) + ) + + batchJobRouter.post( + "/cancel", + middlewares.wrap(require("./cancel-batch-job").default) + ) + return app } @@ -28,7 +54,6 @@ export type AdminBatchJobListRes = PaginatedResponse & { export const defaultAdminBatchFields = [ "id", - "status", "type", "context", "result", @@ -36,6 +61,17 @@ export const defaultAdminBatchFields = [ "created_at", "updated_at", "deleted_at", + "confirmed_at", + "pre_processed_at", + "confirmed_at", + "processing_at", + "completed_at", + "canceled_at", + "failed_at", ] +export * from "./cancel-batch-job" +export * from "./confirm-batch-job" +export * from "./create-batch-job" +export * from "./get-batch-job" export * from "./list-batch-jobs" diff --git a/packages/medusa/src/api/routes/admin/batch/list-batch-jobs.ts b/packages/medusa/src/api/routes/admin/batch/list-batch-jobs.ts index 432849e4b2..48a138b8a1 100644 --- a/packages/medusa/src/api/routes/admin/batch/list-batch-jobs.ts +++ b/packages/medusa/src/api/routes/admin/batch/list-batch-jobs.ts @@ -1,26 +1,19 @@ -import { MedusaError } from "medusa-core-utils" -import { Type } from "class-transformer" +import { Transform, Type } from "class-transformer" import { IsArray, - IsEnum, IsNumber, IsOptional, IsString, - ValidateNested, } from "class-validator" -import { pickBy, omit, identity } from "lodash" -import { defaultAdminBatchFields } from "." +import { pickBy } from "lodash" import BatchJobService from "../../../../services/batch-job" -import { BatchJob } from "../../../../models" -import { BatchJobStatus } from "../../../../types/batch-job" import { DateComparisonOperator } from "../../../../types/common" import { IsType } from "../../../../utils/validators/is-type" -import { getListConfig } from "../../../../utils/get-query-config" -import { validator } from "../../../../utils/validator" +import { Request } from "express" /** - * @oas [get] /batch - * operationId: "GetBatch" + * @oas [get] /batch-jobs + * operationId: "GetBatchJobs" * summary: "List Batch Jobs" * description: "Retrieve a list of Batch Jobs." * x-authenticated: true @@ -28,11 +21,15 @@ import { validator } from "../../../../utils/validator" * - (query) limit {string} The number of collections to return. * - (query) offset {string} The offset of collections to return. * - (query) type {string | string[]} Filter by the batch type - * - (query) status {string} Filter by the status of the batch operation + * - (query) confirmed_at {DateComparisonOperator | null} Date comparison for when resulting collections was confirmed, i.e. less than, greater than etc. + * - (query) pre_processed_at {DateComparisonOperator | null} Date comparison for when resulting collections was pre processed, i.e. less than, greater than etc. + * - (query) completed_at {DateComparisonOperator | null} Date comparison for when resulting collections was completed, i.e. less than, greater than etc. + * - (query) failed_at {DateComparisonOperator | null} Date comparison for when resulting collections was failed, i.e. less than, greater than etc. + * - (query) canceled_at {DateComparisonOperator | null} Date comparison for when resulting collections was canceled, i.e. less than, greater than etc. * - (query) order {string} Order used when retrieving batch jobs * - (query) expand[] {string} (Comma separated) Which fields should be expanded in each order of the result. * - (query) fields[] {string} (Comma separated) Which fields should be included in each order of the result. - * - (query) deleted_at {DateComparisonOperator} Date comparison for when resulting collections was deleted, i.e. less than, greater than etc. + * - (query) deleted_at {DateComparisonOperator | null} Date comparison for when resulting collections was deleted, i.e. less than, greater than etc. * - (query) created_at {DateComparisonOperator} Date comparison for when resulting collections was created, i.e. less than, greater than etc. * - (query) updated_at {DateComparisonOperator} Date comparison for when resulting collections was updated, i.e. less than, greater than etc. * tags: @@ -47,47 +44,25 @@ import { validator } from "../../../../utils/validator" * batch_job: * $ref: "#/components/schemas/batch_job" */ -export default async (req, res) => { - const { fields, expand, order, limit, offset, ...filterableFields } = - await validator(AdminGetBatchParams, req.query) - +export default async (req: Request, res) => { const batchService: BatchJobService = req.scope.resolve("batchJobService") - let orderBy: { [k: symbol]: "DESC" | "ASC" } | undefined - if (typeof order !== "undefined") { - if (order.startsWith("-")) { - const [, field] = order.split("-") - orderBy = { [field]: "DESC" } - } else { - orderBy = { [order]: "ASC" } - } - } - - const listConfig = getListConfig( - defaultAdminBatchFields as (keyof BatchJob)[], - [], - fields?.split(",") as (keyof BatchJob)[], - expand?.split(","), - limit, - offset, - orderBy - ) - - const created_by: string = req.user.id || req.user.userId + const created_by = req.user?.id || req.user?.userId const [jobs, count] = await batchService.listAndCount( pickBy( - { created_by, ...filterableFields }, + { created_by, ...(req.filterableFields ?? {}) }, (val) => typeof val !== "undefined" ), - listConfig + req.listConfig ) + const { limit, offset } = req.validatedQuery res.status(200).json({ batch_jobs: jobs, count, - offset: offset, - limit: limit, + offset, + limit, }) } @@ -121,22 +96,40 @@ export class AdminGetBatchParams extends AdminGetBatchPaginationParams { @IsType([String, [String]]) id?: string | string[] - @IsOptional() - @IsArray() - @IsEnum(BatchJobStatus, { each: true }) - status?: BatchJobStatus[] - @IsArray() @IsOptional() type?: string[] @IsOptional() - @ValidateNested() + @Transform(({ value }) => (value === "null" ? null : value)) @Type(() => DateComparisonOperator) + confirmed_at?: DateComparisonOperator | null + + @IsOptional() + @Transform(({ value }) => (value === "null" ? null : value)) + @Type(() => DateComparisonOperator) + pre_processed_at?: DateComparisonOperator | null + + @IsOptional() + @Transform(({ value }) => (value === "null" ? null : value)) + @Type(() => DateComparisonOperator) + completed_at?: DateComparisonOperator | null + + @IsOptional() + @Transform(({ value }) => (value === "null" ? null : value)) + @Type(() => DateComparisonOperator) + failed_at?: DateComparisonOperator | null + + @IsOptional() + @Transform(({ value }) => (value === "null" ? null : value)) + @Type(() => DateComparisonOperator) + canceled_at?: DateComparisonOperator | null + + @IsType([DateComparisonOperator]) + @IsOptional() created_at?: DateComparisonOperator @IsOptional() - @ValidateNested() @Type(() => DateComparisonOperator) updated_at?: DateComparisonOperator } diff --git a/packages/medusa/src/api/routes/admin/collections/add-products.ts b/packages/medusa/src/api/routes/admin/collections/add-products.ts index 15f8171f5b..65eedaf59c 100644 --- a/packages/medusa/src/api/routes/admin/collections/add-products.ts +++ b/packages/medusa/src/api/routes/admin/collections/add-products.ts @@ -1,6 +1,7 @@ import { ArrayNotEmpty, IsString } from "class-validator" import ProductCollectionService from "../../../../services/product-collection" -import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" + /** * @oas [post] /collections/{id}/products/batch * operationId: "PostProductsToCollection" @@ -28,10 +29,9 @@ import { validator } from "../../../../utils/validator" * "200": * description: OK */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params - - const validated = await validator(AdminPostProductsToCollectionReq, req.body) + const { validatedBody } = req as { validatedBody: AdminPostProductsToCollectionReq } const productCollectionService: ProductCollectionService = req.scope.resolve( "productCollectionService" @@ -39,7 +39,7 @@ export default async (req, res) => { const collection = await productCollectionService.addProducts( id, - validated.product_ids + validatedBody.product_ids ) res.status(200).json({ collection }) diff --git a/packages/medusa/src/api/routes/admin/collections/create-collection.ts b/packages/medusa/src/api/routes/admin/collections/create-collection.ts index 5cd23ed468..509cdd909c 100644 --- a/packages/medusa/src/api/routes/admin/collections/create-collection.ts +++ b/packages/medusa/src/api/routes/admin/collections/create-collection.ts @@ -1,6 +1,7 @@ import { IsNotEmpty, IsObject, IsOptional, IsString } from "class-validator" import ProductCollectionService from "../../../../services/product-collection" -import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" + /** * @oas [post] /collections * operationId: "PostCollections" @@ -35,14 +36,14 @@ import { validator } from "../../../../utils/validator" * collection: * $ref: "#/components/schemas/product_collection" */ -export default async (req, res) => { - const validated = await validator(AdminPostCollectionsReq, req.body) +export default async (req: Request, res: Response) => { + const { validatedBody } = req const productCollectionService: ProductCollectionService = req.scope.resolve( "productCollectionService" ) - const created = await productCollectionService.create(validated) + const created = await productCollectionService.create(validatedBody) const collection = await productCollectionService.retrieve(created.id) res.status(200).json({ collection }) diff --git a/packages/medusa/src/api/routes/admin/collections/delete-collection.ts b/packages/medusa/src/api/routes/admin/collections/delete-collection.ts index fc8fc6de50..60cbd7d412 100644 --- a/packages/medusa/src/api/routes/admin/collections/delete-collection.ts +++ b/packages/medusa/src/api/routes/admin/collections/delete-collection.ts @@ -1,4 +1,5 @@ import ProductCollectionService from "../../../../services/product-collection" +import { Request, Response } from "express" /** * @oas [delete] /collections/{id} @@ -26,7 +27,7 @@ import ProductCollectionService from "../../../../services/product-collection" * deleted: * type: boolean */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params const productCollectionService: ProductCollectionService = req.scope.resolve( diff --git a/packages/medusa/src/api/routes/admin/collections/get-collection.ts b/packages/medusa/src/api/routes/admin/collections/get-collection.ts index cb0a449c35..2b7c298808 100644 --- a/packages/medusa/src/api/routes/admin/collections/get-collection.ts +++ b/packages/medusa/src/api/routes/admin/collections/get-collection.ts @@ -1,5 +1,6 @@ import { defaultAdminCollectionsRelations } from "." import ProductCollectionService from "../../../../services/product-collection" +import { Request, Response } from "express" /** * @oas [get] /collections/{id} * operationId: "GetCollectionsCollection" @@ -20,17 +21,15 @@ import ProductCollectionService from "../../../../services/product-collection" * collection: * $ref: "#/components/schemas/product_collection" */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params - const retrieveConfig = { - relations: defaultAdminCollectionsRelations, - } - const productCollectionService: ProductCollectionService = req.scope.resolve( "productCollectionService" ) - const collection = await productCollectionService.retrieve(id, retrieveConfig) + const collection = await productCollectionService.retrieve(id, { + relations: defaultAdminCollectionsRelations, + }) res.status(200).json({ collection }) } diff --git a/packages/medusa/src/api/routes/admin/collections/index.ts b/packages/medusa/src/api/routes/admin/collections/index.ts index c1e33a1089..81bb985ff5 100644 --- a/packages/medusa/src/api/routes/admin/collections/index.ts +++ b/packages/medusa/src/api/routes/admin/collections/index.ts @@ -2,27 +2,58 @@ import { Router } from "express" import "reflect-metadata" import { ProductCollection } from "../../../.." import { DeleteResponse, PaginatedResponse } from "../../../../types/common" -import middlewares from "../../../middlewares" - -const route = Router() +import middlewares, { transformBody, transformQuery } from "../../../middlewares" +import { AdminGetCollectionsParams } from "./list-collections" +import { AdminPostCollectionsReq } from "./create-collection" +import { AdminPostCollectionsCollectionReq } from "./update-collection" +import { AdminPostProductsToCollectionReq } from "./add-products" +import { AdminDeleteProductsFromCollectionReq } from "./remove-products" export default (app) => { + const route = Router() app.use("/collections", route) - route.post("/", middlewares.wrap(require("./create-collection").default)) - route.post("/:id", middlewares.wrap(require("./update-collection").default)) - - route.delete("/:id", middlewares.wrap(require("./delete-collection").default)) - - route.get("/:id", middlewares.wrap(require("./get-collection").default)) - route.get("/", middlewares.wrap(require("./list-collections").default)) - route.post( - "/:id/products/batch", + "/", + transformBody(AdminPostCollectionsReq), + middlewares.wrap(require("./create-collection").default) + ) + route.get( + "/", + transformQuery( + AdminGetCollectionsParams, + { + defaultRelations: defaultAdminCollectionsRelations, + defaultFields: defaultAdminCollectionsFields, + isList: true, + } + ), + middlewares.wrap(require("./list-collections").default) + ) + + const collectionRouter = Router({ mergeParams: true }) + route.use("/:id", collectionRouter) + collectionRouter.post( + "/", + transformBody(AdminPostCollectionsCollectionReq), + middlewares.wrap(require("./update-collection").default) + ) + collectionRouter.get( + "/", + middlewares.wrap(require("./get-collection").default) + ) + collectionRouter.delete( + "/", + middlewares.wrap(require("./delete-collection").default) + ) + collectionRouter.post( + "/products/batch", + transformBody(AdminPostProductsToCollectionReq), middlewares.wrap(require("./add-products").default) ) - route.delete( - "/:id/products/batch", + collectionRouter.delete( + "/products/batch", + transformBody(AdminDeleteProductsFromCollectionReq), middlewares.wrap(require("./remove-products").default) ) diff --git a/packages/medusa/src/api/routes/admin/collections/list-collections.ts b/packages/medusa/src/api/routes/admin/collections/list-collections.ts index a9dc443d03..67550b2527 100644 --- a/packages/medusa/src/api/routes/admin/collections/list-collections.ts +++ b/packages/medusa/src/api/routes/admin/collections/list-collections.ts @@ -1,13 +1,10 @@ import { Type } from "class-transformer" import { IsNumber, IsOptional, IsString, ValidateNested } from "class-validator" import _, { identity } from "lodash" -import { - defaultAdminCollectionsFields, - defaultAdminCollectionsRelations, -} from "." import ProductCollectionService from "../../../../services/product-collection" import { DateComparisonOperator } from "../../../../types/common" -import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" + /** * @oas [get] /collections * operationId: "GetCollections" @@ -34,21 +31,16 @@ import { validator } from "../../../../utils/validator" * collection: * $ref: "#/components/schemas/product_collection" */ -export default async (req, res) => { - const validated = await validator(AdminGetCollectionsParams, req.query) - +export default async (req: Request, res: Response) => { const productCollectionService: ProductCollectionService = req.scope.resolve( "productCollectionService" ) - const listConfig = { - select: defaultAdminCollectionsFields, - relations: defaultAdminCollectionsRelations, - skip: validated.offset, - take: validated.limit, - } - - const filterableFields = _.omit(validated, ["limit", "offset"]) + const { + validatedQuery: { limit, offset }, + filterableFields, + listConfig + } = req const [collections, count] = await productCollectionService.listAndCount( _.pickBy(filterableFields, identity), @@ -58,8 +50,8 @@ export default async (req, res) => { res.status(200).json({ collections, count, - offset: validated.offset, - limit: validated.limit, + offset, + limit, }) } diff --git a/packages/medusa/src/api/routes/admin/collections/remove-products.ts b/packages/medusa/src/api/routes/admin/collections/remove-products.ts index 54b9d7f4af..862b7c77eb 100644 --- a/packages/medusa/src/api/routes/admin/collections/remove-products.ts +++ b/packages/medusa/src/api/routes/admin/collections/remove-products.ts @@ -1,6 +1,7 @@ import { ArrayNotEmpty, IsString } from "class-validator" import ProductCollectionService from "../../../../services/product-collection" -import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" + /** * @oas [delete] /collections/{id}/products/batch * operationId: "DeleteProductsFromCollection" @@ -28,24 +29,20 @@ import { validator } from "../../../../utils/validator" * "200": * description: OK */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params - - const validated = await validator( - AdminDeleteProductsFromCollectionReq, - req.body - ) + const { validatedBody } = req as { validatedBody: AdminDeleteProductsFromCollectionReq } const productCollectionService: ProductCollectionService = req.scope.resolve( "productCollectionService" ) - await productCollectionService.removeProducts(id, validated.product_ids) + await productCollectionService.removeProducts(id, validatedBody.product_ids) res.json({ id, object: "product-collection", - removed_products: validated.product_ids, + removed_products: validatedBody.product_ids, }) } diff --git a/packages/medusa/src/api/routes/admin/collections/update-collection.ts b/packages/medusa/src/api/routes/admin/collections/update-collection.ts index 5adcdd46a3..072d8c22d2 100644 --- a/packages/medusa/src/api/routes/admin/collections/update-collection.ts +++ b/packages/medusa/src/api/routes/admin/collections/update-collection.ts @@ -1,6 +1,6 @@ import { IsObject, IsOptional, IsString } from "class-validator" import ProductCollectionService from "../../../../services/product-collection" -import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" /** * @oas [post] /collections/{id} * operationId: "PostCollectionsCollection" @@ -35,15 +35,15 @@ import { validator } from "../../../../utils/validator" * collection: * $ref: "#/components/schemas/product_collection" */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params + const { validatedBody } = req - const validated = await validator(AdminPostCollectionsCollectionReq, req.body) const productCollectionService: ProductCollectionService = req.scope.resolve( "productCollectionService" ) - const updated = await productCollectionService.update(id, validated) + const updated = await productCollectionService.update(id, validatedBody) const collection = await productCollectionService.retrieve(updated.id) res.status(200).json({ collection }) diff --git a/packages/medusa/src/api/routes/admin/customer-groups/add-customers-batch.ts b/packages/medusa/src/api/routes/admin/customer-groups/add-customers-batch.ts index 8778f7ac72..96aa4b5ead 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/add-customers-batch.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/add-customers-batch.ts @@ -3,6 +3,7 @@ import { ValidateNested } from "class-validator" import { CustomerGroupService } from "../../../../services" import { CustomerGroupsBatchCustomer } from "../../../../types/customer-groups" import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" /** * @oas [post] /customer-groups/{id}/customers/batch @@ -26,7 +27,7 @@ import { validator } from "../../../../utils/validator" * $ref: "#/components/schemas/customergroup" */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params const validated = await validator( AdminPostCustomerGroupsGroupCustomersBatchReq, diff --git a/packages/medusa/src/api/routes/admin/customer-groups/create-customer-group.ts b/packages/medusa/src/api/routes/admin/customer-groups/create-customer-group.ts index 8be4b65270..633bd94e2a 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/create-customer-group.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/create-customer-group.ts @@ -1,6 +1,7 @@ import { IsObject, IsOptional, IsString } from "class-validator" import { CustomerGroupService } from "../../../../services" import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" /** * @oas [post] /customer-groups @@ -24,7 +25,7 @@ import { validator } from "../../../../utils/validator" * $ref: "#/components/schemas/customer_group" */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const validated = await validator(AdminPostCustomerGroupsReq, req.body) const customerGroupService: CustomerGroupService = req.scope.resolve( diff --git a/packages/medusa/src/api/routes/admin/customer-groups/delete-customer-group.ts b/packages/medusa/src/api/routes/admin/customer-groups/delete-customer-group.ts index cc6f580ebb..4c2ba7f2ef 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/delete-customer-group.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/delete-customer-group.ts @@ -1,4 +1,5 @@ import { CustomerGroupService } from "../../../../services" +import { Request, Response } from "express" /** * @oas [delete] /customer-groups/{id} @@ -27,7 +28,7 @@ import { CustomerGroupService } from "../../../../services" * type: boolean */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params const customerGroupService: CustomerGroupService = req.scope.resolve( diff --git a/packages/medusa/src/api/routes/admin/customer-groups/delete-customers-batch.ts b/packages/medusa/src/api/routes/admin/customer-groups/delete-customers-batch.ts index 1ea51f6ca4..fbca982372 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/delete-customers-batch.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/delete-customers-batch.ts @@ -3,6 +3,7 @@ import { ValidateNested } from "class-validator" import { CustomerGroupService } from "../../../../services" import { CustomerGroupsBatchCustomer } from "../../../../types/customer-groups" import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" /** * @oas [delete] /customer-groups/{id}/customers/batch @@ -26,7 +27,7 @@ import { validator } from "../../../../utils/validator" * $ref: "#/components/schemas/customergroup" */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params const validated = await validator( AdminDeleteCustomerGroupsGroupCustomerBatchReq, diff --git a/packages/medusa/src/api/routes/admin/customer-groups/get-customer-group-customers.ts b/packages/medusa/src/api/routes/admin/customer-groups/get-customer-group-customers.ts index b8bbda49bb..265a81f490 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/get-customer-group-customers.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/get-customer-group-customers.ts @@ -1,4 +1,5 @@ import CustomerController from "../../../../controllers/customers" +import { Request, Response } from "express" /** * @oas [get] /customer-groups/{id}/customers @@ -18,7 +19,7 @@ import CustomerController from "../../../../controllers/customers" * customer: * $ref: "#/components/schemas/customer" */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params req.query.groups = [id] diff --git a/packages/medusa/src/api/routes/admin/customer-groups/get-customer-group.ts b/packages/medusa/src/api/routes/admin/customer-groups/get-customer-group.ts index dd77de94e0..764b9b752b 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/get-customer-group.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/get-customer-group.ts @@ -1,7 +1,6 @@ import { CustomerGroupService } from "../../../../services" import { FindParams } from "../../../../types/common" -import { validator } from "../../../../utils/validator" -import { defaultAdminCustomerGroupsRelations } from "." +import { Request, Response } from "express" /** * @oas [get] /customer-groups/{id} @@ -23,30 +22,17 @@ import { defaultAdminCustomerGroupsRelations } from "." * customer_group: * $ref: "#/components/schemas/customer_group" */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params - const validated = await validator( - AdminGetCustomerGroupsGroupParams, - req.query - ) - const customerGroupService: CustomerGroupService = req.scope.resolve( "customerGroupService" ) - let expandFields: string[] = [] - if (validated.expand) { - expandFields = validated.expand.split(",") - } - - const findConfig = { - relations: expandFields.length - ? expandFields - : defaultAdminCustomerGroupsRelations, - } - - const customerGroup = await customerGroupService.retrieve(id, findConfig) + const customerGroup = await customerGroupService.retrieve( + id, + req.retrieveConfig + ) res.json({ customer_group: customerGroup }) } diff --git a/packages/medusa/src/api/routes/admin/customer-groups/index.ts b/packages/medusa/src/api/routes/admin/customer-groups/index.ts index f6418e78f3..a01bfd42b0 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/index.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/index.ts @@ -1,38 +1,53 @@ import { Router } from "express" import { CustomerGroup } from "../../../.." import { DeleteResponse, PaginatedResponse } from "../../../../types/common" -import middlewares from "../../../middlewares" +import middlewares, { transformQuery } from "../../../middlewares" +import { AdminGetCustomerGroupsGroupParams } from "./get-customer-group" +import { AdminGetCustomerGroupsParams } from "./list-customer-groups" const route = Router() export default (app) => { app.use("/customer-groups", route) - route.get("/", middlewares.wrap(require("./list-customer-groups").default)) - route.get("/:id", middlewares.wrap(require("./get-customer-group").default)) route.post("/", middlewares.wrap(require("./create-customer-group").default)) - route.post( - "/:id/customers/batch", - middlewares.wrap(require("./add-customers-batch").default) - ) - route.delete( - "/:id/customers/batch", - middlewares.wrap(require("./delete-customers-batch").default) + route.get( + "/", + transformQuery(AdminGetCustomerGroupsParams, { + defaultRelations: defaultAdminCustomerGroupsRelations, + isList: true, + }), + middlewares.wrap(require("./list-customer-groups").default) ) - route.delete( - "/:id", + const customerGroupRouter = Router({ mergeParams: true }) + route.use("/:id", customerGroupRouter) + customerGroupRouter.get( + "/", + transformQuery(AdminGetCustomerGroupsGroupParams, { + defaultRelations: defaultAdminCustomerGroupsRelations, + }), + middlewares.wrap(require("./get-customer-group").default) + ) + customerGroupRouter.delete( + "/", middlewares.wrap(require("./delete-customer-group").default) ) - - route.get( - "/:id/customers", + customerGroupRouter.post( + "/", + middlewares.wrap(require("./update-customer-group").default) + ) + customerGroupRouter.get( + "/customers", middlewares.wrap(require("./get-customer-group-customers").default) ) - - route.post( - "/:id", - middlewares.wrap(require("./update-customer-group").default) + customerGroupRouter.post( + "/customers/batch", + middlewares.wrap(require("./add-customers-batch").default) + ) + customerGroupRouter.delete( + "/customers/batch", + middlewares.wrap(require("./delete-customers-batch").default) ) return app diff --git a/packages/medusa/src/api/routes/admin/customer-groups/list-customer-groups.ts b/packages/medusa/src/api/routes/admin/customer-groups/list-customer-groups.ts index 1685f3c2df..130460524e 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/list-customer-groups.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/list-customer-groups.ts @@ -1,12 +1,8 @@ import { Type } from "class-transformer" import { IsNumber, IsOptional, IsString } from "class-validator" -import omit from "lodash/omit" -import { defaultAdminCustomerGroupsRelations } from "." -import { CustomerGroup } from "../../../../models/customer-group" import { CustomerGroupService } from "../../../../services" -import { FindConfig } from "../../../../types/common" import { FilterableCustomerGroupProps } from "../../../../types/customer-groups" -import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" /** * @oas [get] /customer-groups @@ -37,53 +33,22 @@ import { validator } from "../../../../utils/validator" * customerGroup: * $ref: "#/components/schemas/customer_group" */ -export default async (req, res) => { - const validated = await validator(AdminGetCustomerGroupsParams, req.query) - +export default async (req: Request, res: Response) => { const customerGroupService: CustomerGroupService = req.scope.resolve( "customerGroupService" ) - let expandFields: string[] = [] - if (validated.expand) { - expandFields = validated.expand.split(",") - } - - const listConfig: FindConfig = { - relations: expandFields.length - ? expandFields - : defaultAdminCustomerGroupsRelations, - skip: validated.offset, - take: validated.limit, - order: { created_at: "DESC" } as { [k: string]: "DESC" }, - } - - if (typeof validated.order !== "undefined") { - if (validated.order.startsWith("-")) { - const [, field] = validated.order.split("-") - listConfig.order = { [field]: "DESC" } - } else { - listConfig.order = { [validated.order]: "ASC" } - } - } - - const filterableFields = omit(validated, [ - "limit", - "offset", - "expand", - "order", - ]) - const [data, count] = await customerGroupService.listAndCount( - filterableFields, - listConfig + req.filterableFields, + req.listConfig ) + const { limit, offset } = req.validatedQuery res.json({ count, customer_groups: data, - offset: validated.offset, - limit: validated.limit, + offset, + limit, }) } diff --git a/packages/medusa/src/api/routes/admin/customer-groups/update-customer-group.ts b/packages/medusa/src/api/routes/admin/customer-groups/update-customer-group.ts index c89497f630..1d6a5e5889 100644 --- a/packages/medusa/src/api/routes/admin/customer-groups/update-customer-group.ts +++ b/packages/medusa/src/api/routes/admin/customer-groups/update-customer-group.ts @@ -4,6 +4,7 @@ import { defaultAdminCustomerGroupsRelations } from "." import { CustomerGroupService } from "../../../../services" import { FindParams } from "../../../../types/common" import { validator } from "../../../../utils/validator" +import { Request, Response } from "express" /** * @oas [post] /customer-groups/{id} @@ -28,7 +29,7 @@ import { validator } from "../../../../utils/validator" * $ref: "#/components/schemas/customer_group" */ -export default async (req, res) => { +export default async (req: Request, res: Response) => { const { id } = req.params const validatedBody = await validator( diff --git a/packages/medusa/src/api/routes/admin/customers/create-customer.ts b/packages/medusa/src/api/routes/admin/customers/create-customer.ts index a54476b38e..34e9960656 100644 --- a/packages/medusa/src/api/routes/admin/customers/create-customer.ts +++ b/packages/medusa/src/api/routes/admin/customers/create-customer.ts @@ -53,5 +53,5 @@ export class AdminPostCustomersReq { @IsObject() @IsOptional() - metadata?: object + metadata?: Record } diff --git a/packages/medusa/src/api/routes/admin/customers/update-customer.ts b/packages/medusa/src/api/routes/admin/customers/update-customer.ts index 32607ff6cc..42d635875f 100644 --- a/packages/medusa/src/api/routes/admin/customers/update-customer.ts +++ b/packages/medusa/src/api/routes/admin/customers/update-customer.ts @@ -129,7 +129,7 @@ export class AdminPostCustomersCustomerReq { @IsObject() @IsOptional() - metadata?: object + metadata?: Record @IsArray() @IsOptional() diff --git a/packages/medusa/src/api/routes/admin/draft-orders/index.ts b/packages/medusa/src/api/routes/admin/draft-orders/index.ts index 5edffd00b2..8a64dbfbd4 100644 --- a/packages/medusa/src/api/routes/admin/draft-orders/index.ts +++ b/packages/medusa/src/api/routes/admin/draft-orders/index.ts @@ -65,6 +65,7 @@ export const defaultAdminDraftOrdersCartRelations = [ "payment_sessions", "shipping_methods.shipping_option", "discounts", + "customer", "discounts.rule", ] @@ -77,7 +78,7 @@ export const defaultAdminDraftOrdersCartFields: (keyof Cart)[] = [ "total", ] -export const defaultAdminDraftOrdersFields = [ +export const defaultAdminDraftOrdersFields: (keyof DraftOrder)[] = [ "id", "status", "display_id", diff --git a/packages/medusa/src/api/routes/admin/draft-orders/list-draft-orders.ts b/packages/medusa/src/api/routes/admin/draft-orders/list-draft-orders.ts index 808d5ed8d2..0981168086 100644 --- a/packages/medusa/src/api/routes/admin/draft-orders/list-draft-orders.ts +++ b/packages/medusa/src/api/routes/admin/draft-orders/list-draft-orders.ts @@ -7,6 +7,8 @@ import { IsNumber, IsOptional, IsString } from "class-validator" import { validator } from "../../../../utils/validator" import { Type } from "class-transformer" import { DraftOrderListSelector } from "../../../../types/draft-orders" +import { FindConfig } from "../../../../types/common" +import { DraftOrder } from "../../../../models" /** * @oas [get] /draft-orders * operationId: "GetDraftOrders" @@ -38,11 +40,11 @@ export default async (req, res) => { selector.q = validated.q } - const listConfig = { + const listConfig: FindConfig = { select: defaultAdminDraftOrdersFields, relations: defaultAdminDraftOrdersRelations, - skip: validated.offset, - take: validated.limit, + skip: validated.offset ?? 0, + take: validated.limit ?? 50, order: { created_at: "DESC" }, } diff --git a/packages/medusa/src/api/routes/admin/gift-cards/create-gift-card.ts b/packages/medusa/src/api/routes/admin/gift-cards/create-gift-card.ts index 6676b32e27..e8b16f167a 100644 --- a/packages/medusa/src/api/routes/admin/gift-cards/create-gift-card.ts +++ b/packages/medusa/src/api/routes/admin/gift-cards/create-gift-card.ts @@ -77,10 +77,9 @@ export class AdminPostGiftCardsReq { @IsBoolean() is_disabled?: boolean - @IsOptional() @IsString() - region_id?: string + region_id: string @IsOptional() - metadata?: object + metadata?: Record } diff --git a/packages/medusa/src/api/routes/admin/gift-cards/index.ts b/packages/medusa/src/api/routes/admin/gift-cards/index.ts index 53a4048a14..63619b5725 100644 --- a/packages/medusa/src/api/routes/admin/gift-cards/index.ts +++ b/packages/medusa/src/api/routes/admin/gift-cards/index.ts @@ -2,14 +2,23 @@ import { Router } from "express" import "reflect-metadata" import { GiftCard } from "../../../.." import { DeleteResponse, PaginatedResponse } from "../../../../types/common" -import middlewares from "../../../middlewares" +import middlewares, { transformQuery } from "../../../middlewares" +import { AdminGetGiftCardsParams } from "./list-gift-cards" const route = Router() export default (app) => { app.use("/gift-cards", route) - route.get("/", middlewares.wrap(require("./list-gift-cards").default)) + route.get( + "/", + transformQuery(AdminGetGiftCardsParams, { + defaultFields: defaultAdminGiftCardFields, + defaultRelations: defaultAdminGiftCardRelations, + isList: true, + }), + middlewares.wrap(require("./list-gift-cards").default) + ) route.post("/", middlewares.wrap(require("./create-gift-card").default)) @@ -22,7 +31,7 @@ export default (app) => { return app } -export const defaultAdminGiftCardFields = [ +export const defaultAdminGiftCardFields: (keyof GiftCard)[] = [ "id", "code", "value", diff --git a/packages/medusa/src/api/routes/admin/gift-cards/list-gift-cards.ts b/packages/medusa/src/api/routes/admin/gift-cards/list-gift-cards.ts index 45ba1a5ec2..9aa990cd56 100644 --- a/packages/medusa/src/api/routes/admin/gift-cards/list-gift-cards.ts +++ b/packages/medusa/src/api/routes/admin/gift-cards/list-gift-cards.ts @@ -1,6 +1,6 @@ import { Type } from "class-transformer" import { IsInt, IsOptional, IsString } from "class-validator" -import { defaultAdminGiftCardFields, defaultAdminGiftCardRelations } from "." +import { pickBy } from "lodash" import { GiftCardService } from "../../../../services" import { validator } from "../../../../utils/validator" @@ -27,25 +27,16 @@ import { validator } from "../../../../utils/validator" export default async (req, res) => { const validated = await validator(AdminGetGiftCardsParams, req.query) - const selector = {} - - if (validated.q && typeof validated.q !== "undefined") { - selector["q"] = validated.q - } - const giftCardService: GiftCardService = req.scope.resolve("giftCardService") - const giftCards = await giftCardService.list(selector, { - select: defaultAdminGiftCardFields, - relations: defaultAdminGiftCardRelations, - order: { created_at: "DESC" }, - limit: validated.limit, - skip: validated.offset, - }) + const [giftCards, count] = await giftCardService.listAndCount( + pickBy(req.filterableFields, (val) => typeof val !== "undefined"), + req.listConfig + ) res.status(200).json({ gift_cards: giftCards, - count: giftCards.length, + count, offset: validated.offset, limit: validated.limit, }) diff --git a/packages/medusa/src/api/routes/admin/gift-cards/update-gift-card.ts b/packages/medusa/src/api/routes/admin/gift-cards/update-gift-card.ts index f42540576a..7579943934 100644 --- a/packages/medusa/src/api/routes/admin/gift-cards/update-gift-card.ts +++ b/packages/medusa/src/api/routes/admin/gift-cards/update-gift-card.ts @@ -83,5 +83,5 @@ export class AdminPostGiftCardsGiftCardReq { region_id?: string @IsOptional() - metadata?: object + metadata?: Record } diff --git a/packages/medusa/src/api/routes/admin/orders/create-claim.ts b/packages/medusa/src/api/routes/admin/orders/create-claim.ts index afda9f3730..43a5c0a41a 100644 --- a/packages/medusa/src/api/routes/admin/orders/create-claim.ts +++ b/packages/medusa/src/api/routes/admin/orders/create-claim.ts @@ -14,6 +14,8 @@ import { MedusaError } from "medusa-core-utils" import { defaultAdminOrdersFields, defaultAdminOrdersRelations } from "." import { AddressPayload } from "../../../../types/common" import { validator } from "../../../../utils/validator" +import { ClaimTypeValue } from "../../../../types/claim" +import { ClaimType, ClaimReason } from "../../../../models" /** * @oas [post] /order/{id}/claims @@ -332,26 +334,10 @@ export default async (req, res) => { res.status(idempotencyKey.response_code).json(idempotencyKey.response_body) } -enum ClaimTypeEnum { - replace = "replace", - refund = "refund", -} - -type ClaimType = `${ClaimTypeEnum}` - -enum ClaimItemReasonEnum { - missing_item = "missing_item", - wrong_item = "wrong_item", - production_failure = "production_failure", - other = "other", -} - -type ClaimItemReasonType = `${ClaimItemReasonEnum}` - export class AdminPostOrdersOrderClaimsReq { - @IsEnum(ClaimTypeEnum) + @IsEnum(ClaimType) @IsNotEmpty() - type: ClaimType + type: ClaimTypeValue @IsArray() @IsNotEmpty() @@ -432,9 +418,9 @@ class Item { @IsOptional() note?: string - @IsEnum(ClaimItemReasonEnum) + @IsEnum(ClaimReason) @IsOptional() - reason?: ClaimItemReasonType + reason?: ClaimReason @IsArray() @IsOptional() diff --git a/packages/medusa/src/api/routes/admin/orders/fulfill-claim.ts b/packages/medusa/src/api/routes/admin/orders/fulfill-claim.ts index bdad54cb4e..ad413f2f27 100644 --- a/packages/medusa/src/api/routes/admin/orders/fulfill-claim.ts +++ b/packages/medusa/src/api/routes/admin/orders/fulfill-claim.ts @@ -65,7 +65,7 @@ export default async (req, res) => { export class AdminPostOrdersOrderClaimsClaimFulfillmentsReq { @IsObject() @IsOptional() - metadata?: object + metadata?: Record @IsBoolean() @IsOptional() diff --git a/packages/medusa/src/api/routes/admin/orders/update-claim.ts b/packages/medusa/src/api/routes/admin/orders/update-claim.ts index 6c1f671a2f..ff42936289 100644 --- a/packages/medusa/src/api/routes/admin/orders/update-claim.ts +++ b/packages/medusa/src/api/routes/admin/orders/update-claim.ts @@ -133,7 +133,7 @@ export class AdminPostOrdersOrderClaimsClaimReq { @IsObject() @IsOptional() - metadata?: object + metadata?: Record } class ShippingMethod { diff --git a/packages/medusa/src/api/routes/admin/price-lists/create-price-list.ts b/packages/medusa/src/api/routes/admin/price-lists/create-price-list.ts index 5372b3c81c..5dc18c283c 100644 --- a/packages/medusa/src/api/routes/admin/price-lists/create-price-list.ts +++ b/packages/medusa/src/api/routes/admin/price-lists/create-price-list.ts @@ -9,10 +9,11 @@ import { import PriceListService from "../../../../services/price-list" import { AdminPriceListPricesCreateReq, + CreatePriceListInput, PriceListStatus, PriceListType, } from "../../../../types/price-list" -import { validator } from "../../../../utils/validator" +import { Request } from "express" /** * @oas [post] /price_lists @@ -85,13 +86,13 @@ import { validator } from "../../../../utils/validator" * product: * $ref: "#/components/schemas/price_list" */ -export default async (req, res) => { - const validated = await validator(AdminPostPriceListsPriceListReq, req.body) - +export default async (req: Request, res) => { const priceListService: PriceListService = req.scope.resolve("priceListService") - const priceList = await priceListService.create(validated) + const priceList = await priceListService.create( + req.validatedBody as CreatePriceListInput + ) res.json({ price_list: priceList }) } diff --git a/packages/medusa/src/api/routes/admin/price-lists/index.ts b/packages/medusa/src/api/routes/admin/price-lists/index.ts index ef10783591..a77bdc0647 100644 --- a/packages/medusa/src/api/routes/admin/price-lists/index.ts +++ b/packages/medusa/src/api/routes/admin/price-lists/index.ts @@ -2,7 +2,18 @@ import { Router } from "express" import "reflect-metadata" import { PriceList } from "../../../.." import { DeleteResponse, PaginatedResponse } from "../../../../types/common" -import middlewares from "../../../middlewares" +import middlewares, { + transformQuery, + transformBody, +} from "../../../middlewares" +import { AdminGetPriceListPaginationParams } from "./list-price-lists" +import { AdminGetPriceListsPriceListProductsParams } from "./list-price-list-products" +import { + allowedAdminProductFields, + defaultAdminProductFields, + defaultAdminProductRelations, +} from "../products" +import { AdminPostPriceListsPriceListReq } from "./create-price-list" const route = Router() @@ -13,12 +24,21 @@ export default (app) => { route.get( "/", - middlewares.normalizeQuery(), + transformQuery(AdminGetPriceListPaginationParams, { isList: true }), middlewares.wrap(require("./list-price-lists").default) ) route.get( "/:id/products", + transformQuery(AdminGetPriceListsPriceListProductsParams, { + allowedFields: allowedAdminProductFields, + defaultFields: defaultAdminProductFields, + defaultRelations: defaultAdminProductRelations.filter( + (r) => r !== "variants.prices" + ), + defaultLimit: 50, + isList: true, + }), middlewares.wrap(require("./list-price-list-products").default) ) @@ -31,7 +51,11 @@ export default (app) => { middlewares.wrap(require("./delete-variant-prices").default) ) - route.post("/", middlewares.wrap(require("./create-price-list").default)) + route.post( + "/", + transformBody(AdminPostPriceListsPriceListReq), + middlewares.wrap(require("./create-price-list").default) + ) route.post("/:id", middlewares.wrap(require("./update-price-list").default)) diff --git a/packages/medusa/src/api/routes/admin/price-lists/list-price-list-products.ts b/packages/medusa/src/api/routes/admin/price-lists/list-price-list-products.ts index 6b4c6de109..85b2dc8807 100644 --- a/packages/medusa/src/api/routes/admin/price-lists/list-price-list-products.ts +++ b/packages/medusa/src/api/routes/admin/price-lists/list-price-list-products.ts @@ -1,5 +1,5 @@ import { Type } from "class-transformer" -import { omit, pickBy } from "lodash" +import { pickBy } from "lodash" import { IsArray, IsBoolean, @@ -8,19 +8,12 @@ import { IsString, ValidateNested, } from "class-validator" -import { Product } from "../../../../models" +import { ProductStatus } from "../../../../models" import { DateComparisonOperator } from "../../../../types/common" -import { validator } from "../../../../utils/validator" import { FilterableProductProps } from "../../../../types/product" -import { - AdminGetProductsPaginationParams, - allowedAdminProductFields, - defaultAdminProductFields, - defaultAdminProductRelations, -} from "../products" -import { MedusaError } from "medusa-core-utils" -import { getListConfig } from "../../../../utils/get-query-config" +import { AdminGetProductsPaginationParams } from "../products" import PriceListService from "../../../../services/price-list" +import { Request } from "express" /** * @oas [get] /price-lists/:id/products @@ -70,81 +63,22 @@ import PriceListService from "../../../../services/price-list" * items: * $ref: "#/components/schemas/product" */ -export default async (req, res) => { +export default async (req: Request, res) => { const { id } = req.params - - const validatedParams = await validator( - AdminGetPriceListsPriceListProductsParams, - req.query - ) - - req.query.price_list_id = [id] - - const query: FilterableProductProps = omit(req.query, [ - "limit", - "offset", - "expand", - "fields", - "order", - ]) - - const limit = validatedParams.limit ?? 50 - const offset = validatedParams.offset ?? 0 - const expand = validatedParams.expand - const fields = validatedParams.fields - const order = validatedParams.order - const allowedFields = allowedAdminProductFields - const defaultFields = defaultAdminProductFields as (keyof Product)[] - const defaultRelations = defaultAdminProductRelations.filter( - (r) => r !== "variants.prices" - ) + const { offset, limit } = req.validatedQuery const priceListService: PriceListService = req.scope.resolve("priceListService") - let includeFields: (keyof Product)[] | undefined - if (fields) { - includeFields = fields.split(",") as (keyof Product)[] + const filterableFields: FilterableProductProps = { + ...req.filterableFields, + price_list_id: [id], } - let expandFields: string[] | undefined - if (expand) { - expandFields = expand.split(",") - } - - let orderBy: { [k: symbol]: "DESC" | "ASC" } | undefined - if (typeof order !== "undefined") { - let orderField = order - if (order.startsWith("-")) { - const [, field] = order.split("-") - orderField = field - orderBy = { [field]: "DESC" } - } else { - orderBy = { [order]: "ASC" } - } - - if (!(allowedFields || []).includes(orderField)) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - "Order field must be a valid product field" - ) - } - } - - const listConfig = getListConfig( - defaultFields ?? [], - defaultRelations ?? [], - includeFields, - expandFields, - limit, - offset, - orderBy - ) - const [products, count] = await priceListService.listProducts( id, - pickBy(query, (val) => typeof val !== "undefined"), - listConfig + pickBy(filterableFields, (val) => typeof val !== "undefined"), + req.listConfig ) res.json({ @@ -155,13 +89,6 @@ export default async (req, res) => { }) } -enum ProductStatus { - DRAFT = "draft", - PROPOSED = "proposed", - PUBLISHED = "published", - REJECTED = "rejected", -} - export class AdminGetPriceListsPriceListProductsParams extends AdminGetProductsPaginationParams { @IsString() @IsOptional() diff --git a/packages/medusa/src/api/routes/admin/price-lists/list-price-lists.ts b/packages/medusa/src/api/routes/admin/price-lists/list-price-lists.ts index 61a9ee2aec..437119ffc5 100644 --- a/packages/medusa/src/api/routes/admin/price-lists/list-price-lists.ts +++ b/packages/medusa/src/api/routes/admin/price-lists/list-price-lists.ts @@ -6,6 +6,7 @@ import PriceListService from "../../../../services/price-list" import { FindConfig } from "../../../../types/common" import { FilterablePriceListProps } from "../../../../types/price-list" import { validator } from "../../../../utils/validator" +import { Request } from "express" /** * @oas [get] /price-lists * operationId: "GetPriceLists" @@ -35,46 +36,15 @@ import { validator } from "../../../../utils/validator" * description: The limit of the Price List query. * type: integer */ -export default async (req, res) => { - const validated = await validator( - AdminGetPriceListPaginationParams, - req.query - ) +export default async (req: Request, res) => { + const validated = req.validatedQuery const priceListService: PriceListService = req.scope.resolve("priceListService") - let expandFields: string[] = [] - if (validated.expand) { - expandFields = validated.expand.split(",") - } - - const listConfig: FindConfig = { - relations: expandFields, - skip: validated.offset, - take: validated.limit, - order: { created_at: "DESC" } as { [k: string]: "DESC" }, - } - - if (typeof validated.order !== "undefined") { - if (validated.order.startsWith("-")) { - const [, field] = validated.order.split("-") - listConfig.order = { [field]: "DESC" } - } else { - listConfig.order = { [validated.order]: "ASC" } - } - } - - const filterableFields: FilterablePriceListProps = omit(validated, [ - "limit", - "offset", - "expand", - "order", - ]) - const [price_lists, count] = await priceListService.listAndCount( - filterableFields, - listConfig + req.filterableFields, + req.listConfig ) res.json({ diff --git a/packages/medusa/src/api/routes/admin/products/add-option.ts b/packages/medusa/src/api/routes/admin/products/add-option.ts index c91ebc8a91..445dd35292 100644 --- a/packages/medusa/src/api/routes/admin/products/add-option.ts +++ b/packages/medusa/src/api/routes/admin/products/add-option.ts @@ -1,6 +1,6 @@ import { IsString } from "class-validator" import { defaultAdminProductFields, defaultAdminProductRelations } from "." -import { ProductService } from "../../../../services" +import { ProductService, PricingService } from "../../../../services" import { validator } from "../../../../utils/validator" /** @@ -42,13 +42,16 @@ export default async (req, res) => { ) const productService: ProductService = req.scope.resolve("productService") + const pricingService: PricingService = req.scope.resolve("pricingService") await productService.addOption(id, validated.title) - const product = await productService.retrieve(id, { + const rawProduct = await productService.retrieve(id, { select: defaultAdminProductFields, relations: defaultAdminProductRelations, }) + const [product] = await pricingService.setProductPrices([rawProduct]) + res.json({ product }) } diff --git a/packages/medusa/src/api/routes/admin/products/create-product.ts b/packages/medusa/src/api/routes/admin/products/create-product.ts index ef82b7a49d..48475ce267 100644 --- a/packages/medusa/src/api/routes/admin/products/create-product.ts +++ b/packages/medusa/src/api/routes/admin/products/create-product.ts @@ -13,10 +13,11 @@ import { EntityManager } from "typeorm" import { defaultAdminProductFields, defaultAdminProductRelations } from "." import { ProductService, + PricingService, ProductVariantService, ShippingProfileService, } from "../../../../services" -import { ProductStatus } from "../../../../types/product" +import { ProductStatus } from "../../../../models" import { ProductVariantPricesCreateReq } from "../../../../types/product-variant" import { validator } from "../../../../utils/validator" @@ -211,6 +212,7 @@ export default async (req, res) => { const validated = await validator(AdminPostProductsReq, req.body) const productService: ProductService = req.scope.resolve("productService") + const pricingService: PricingService = req.scope.resolve("pricingService") const productVariantService: ProductVariantService = req.scope.resolve( "productVariantService" ) @@ -270,11 +272,13 @@ export default async (req, res) => { } }) - const product = await productService.retrieve(newProduct.id, { + const rawProduct = await productService.retrieve(newProduct.id, { select: defaultAdminProductFields, relations: defaultAdminProductRelations, }) + const [product] = await pricingService.setProductPrices([rawProduct]) + res.json({ product }) } @@ -372,7 +376,7 @@ class ProductVariantReq { @IsObject() @IsOptional() - metadata?: object + metadata?: Record @IsArray() @ValidateNested({ each: true }) @@ -481,5 +485,5 @@ export class AdminPostProductsReq { @IsObject() @IsOptional() - metadata?: object + metadata?: Record } diff --git a/packages/medusa/src/api/routes/admin/products/delete-variant.ts b/packages/medusa/src/api/routes/admin/products/delete-variant.ts index 17740cc435..3a28989f4f 100644 --- a/packages/medusa/src/api/routes/admin/products/delete-variant.ts +++ b/packages/medusa/src/api/routes/admin/products/delete-variant.ts @@ -1,5 +1,9 @@ import { defaultAdminProductFields, defaultAdminProductRelations } from "." -import { ProductService, ProductVariantService } from "../../../../services" +import { + ProductService, + PricingService, + ProductVariantService, +} from "../../../../services" /** * @oas [delete] /products/{id}/variants/{variant_id} @@ -35,6 +39,7 @@ export default async (req, res) => { "productVariantService" ) const productService: ProductService = req.scope.resolve("productService") + const pricingService: PricingService = req.scope.resolve("pricingService") await productVariantService.delete(variant_id) @@ -42,11 +47,12 @@ export default async (req, res) => { select: defaultAdminProductFields, relations: defaultAdminProductRelations, }) + const [product] = await pricingService.setProductPrices([data]) res.json({ variant_id, object: "product-variant", deleted: true, - product: data, + product, }) } diff --git a/packages/medusa/src/api/routes/admin/products/get-product.ts b/packages/medusa/src/api/routes/admin/products/get-product.ts index 60d3145ac0..3935385888 100644 --- a/packages/medusa/src/api/routes/admin/products/get-product.ts +++ b/packages/medusa/src/api/routes/admin/products/get-product.ts @@ -1,5 +1,5 @@ import { defaultAdminProductFields, defaultAdminProductRelations } from "." -import { ProductService } from "../../../../services" +import { ProductService, PricingService } from "../../../../services" /** * @oas [get] /products/{id} @@ -25,11 +25,14 @@ export default async (req, res) => { const { id } = req.params const productService: ProductService = req.scope.resolve("productService") + const pricingService: PricingService = req.scope.resolve("pricingService") - const product = await productService.retrieve(id, { + const rawProduct = await productService.retrieve(id, { select: defaultAdminProductFields, relations: defaultAdminProductRelations, }) + const [product] = await pricingService.setProductPrices([rawProduct]) + res.json({ product }) } diff --git a/packages/medusa/src/api/routes/admin/products/index.ts b/packages/medusa/src/api/routes/admin/products/index.ts index c4f756856e..2b3931680f 100644 --- a/packages/medusa/src/api/routes/admin/products/index.ts +++ b/packages/medusa/src/api/routes/admin/products/index.ts @@ -1,5 +1,6 @@ import { Router } from "express" import "reflect-metadata" +import { PricedProduct } from "../../../../types/pricing" import { Product, ProductTag, ProductType } from "../../../.." import { PaginatedResponse } from "../../../../types/common" import middlewares from "../../../middlewares" @@ -74,7 +75,7 @@ export const defaultAdminProductRelations = [ "collection", ] -export const defaultAdminProductFields = [ +export const defaultAdminProductFields: (keyof Product)[] = [ "id", "title", "subtitle", @@ -102,10 +103,7 @@ export const defaultAdminProductFields = [ "metadata", ] -export const defaultAdminGetProductsVariantsFields = [ - "id", - "product_id" -] +export const defaultAdminGetProductsVariantsFields = ["id", "product_id"] export const allowedAdminProductFields = [ "id", @@ -166,7 +164,7 @@ export type AdminProductsDeleteRes = { } export type AdminProductsListRes = PaginatedResponse & { - products: Product[] + products: (PricedProduct | Product)[] } export type AdminProductsListTypesRes = { diff --git a/packages/medusa/src/api/routes/admin/products/list-products.ts b/packages/medusa/src/api/routes/admin/products/list-products.ts index c102aae7f9..22468e13b6 100644 --- a/packages/medusa/src/api/routes/admin/products/list-products.ts +++ b/packages/medusa/src/api/routes/admin/products/list-products.ts @@ -9,7 +9,7 @@ import { ValidateNested, } from "class-validator" import { omit } from "lodash" -import { Product } from "../../../../models/product" +import { Product, ProductStatus } from "../../../../models/product" import { DateComparisonOperator } from "../../../../types/common" import { allowedAdminProductFields, @@ -97,13 +97,6 @@ export default async (req, res) => { res.json(result) } -export enum ProductStatus { - DRAFT = "draft", - PROPOSED = "proposed", - PUBLISHED = "published", - REJECTED = "rejected", -} - export class AdminGetProductsPaginationParams { @IsNumber() @IsOptional() diff --git a/packages/medusa/src/api/routes/admin/products/update-product.ts b/packages/medusa/src/api/routes/admin/products/update-product.ts index d80e986e74..d5935c2f3f 100644 --- a/packages/medusa/src/api/routes/admin/products/update-product.ts +++ b/packages/medusa/src/api/routes/admin/products/update-product.ts @@ -12,12 +12,9 @@ import { ValidateIf, ValidateNested, } from "class-validator" -import { - defaultAdminProductFields, - defaultAdminProductRelations, - ProductStatus, -} from "." -import { ProductService } from "../../../../services" +import { defaultAdminProductFields, defaultAdminProductRelations } from "." +import { ProductStatus } from "../../../../models" +import { ProductService, PricingService } from "../../../../services" import { ProductVariantPricesUpdateReq } from "../../../../types/product-variant" import { validator } from "../../../../utils/validator" @@ -210,13 +207,15 @@ export default async (req, res) => { const validated = await validator(AdminPostProductsProductReq, req.body) const productService: ProductService = req.scope.resolve("productService") + const pricingService: PricingService = req.scope.resolve("pricingService") await productService.update(id, validated) - const product = await productService.retrieve(id, { + const rawProduct = await productService.retrieve(id, { select: defaultAdminProductFields, relations: defaultAdminProductRelations, }) + const [product] = await pricingService.setProductPrices([rawProduct]) res.json({ product }) } @@ -318,7 +317,7 @@ class ProductVariantReq { @IsObject() @IsOptional() - metadata?: object + metadata?: Record @IsArray() @IsOptional() @@ -422,5 +421,5 @@ export class AdminPostProductsProductReq { @IsObject() @IsOptional() - metadata?: object + metadata?: Record } diff --git a/packages/medusa/src/api/routes/admin/products/update-variant.ts b/packages/medusa/src/api/routes/admin/products/update-variant.ts index 64489cfbc1..894a6db7fa 100644 --- a/packages/medusa/src/api/routes/admin/products/update-variant.ts +++ b/packages/medusa/src/api/routes/admin/products/update-variant.ts @@ -9,7 +9,11 @@ import { ValidateNested, } from "class-validator" import { defaultAdminProductFields, defaultAdminProductRelations } from "." -import { ProductService, ProductVariantService } from "../../../../services" +import { + ProductService, + PricingService, + ProductVariantService, +} from "../../../../services" import { PriceSelectionParams } from "../../../../types/price-selection" import { ProductVariantPricesUpdateReq } from "../../../../types/product-variant" import { validator } from "../../../../utils/validator" @@ -134,6 +138,7 @@ export default async (req, res) => { const validatedQueryParams = await validator(PriceSelectionParams, req.query) const productService: ProductService = req.scope.resolve("productService") + const pricingService: PricingService = req.scope.resolve("pricingService") const productVariantService: ProductVariantService = req.scope.resolve( "productVariantService" ) @@ -143,14 +148,14 @@ export default async (req, res) => { ...validated, }) - await productVariantService.retrieve(variant_id) - - const product = await productService.retrieve(id, { + const rawProduct = await productService.retrieve(id, { select: defaultAdminProductFields, relations: defaultAdminProductRelations, ...validatedQueryParams, }) + const [product] = await pricingService.setProductPrices([rawProduct]) + res.json({ product }) } diff --git a/packages/medusa/src/api/routes/admin/shipping-options/list-shipping-options.ts b/packages/medusa/src/api/routes/admin/shipping-options/list-shipping-options.ts index 1632acf626..f6790d8732 100644 --- a/packages/medusa/src/api/routes/admin/shipping-options/list-shipping-options.ts +++ b/packages/medusa/src/api/routes/admin/shipping-options/list-shipping-options.ts @@ -1,6 +1,7 @@ import { Transform } from "class-transformer" import { IsBoolean, IsOptional, IsString } from "class-validator" import { defaultFields, defaultRelations } from "." +import { PricingService } from "../../../../services" import { validator } from "../../../../utils/validator" import { optionalBooleanMapper } from "../../../../utils/validators/is-boolean" @@ -50,12 +51,15 @@ export default async (req, res) => { ) const optionService = req.scope.resolve("shippingOptionService") + const pricingService: PricingService = req.scope.resolve("pricingService") const [data, count] = await optionService.listAndCount(validatedParams, { select: defaultFields, relations: defaultRelations, }) - res.status(200).json({ shipping_options: data, count }) + const options = await pricingService.setShippingOptionPrices(data) + + res.status(200).json({ shipping_options: options, count }) } export class AdminGetShippingOptionsParams { diff --git a/packages/medusa/src/api/routes/admin/store/__tests__/get-store.js b/packages/medusa/src/api/routes/admin/store/__tests__/get-store.js index b2db40b1e7..cfa1257d42 100644 --- a/packages/medusa/src/api/routes/admin/store/__tests__/get-store.js +++ b/packages/medusa/src/api/routes/admin/store/__tests__/get-store.js @@ -22,10 +22,12 @@ describe("GET /admin/store", () => { it("calls service retrieve", () => { expect(StoreServiceMock.retrieve).toHaveBeenCalledTimes(1) - expect(StoreServiceMock.retrieve).toHaveBeenCalledWith([ - "currencies", - "default_currency", - ]) + expect(StoreServiceMock.retrieve).toHaveBeenCalledWith({ + relations: [ + "currencies", + "default_currency", + ] + }) }) }) }) diff --git a/packages/medusa/src/api/routes/admin/store/get-store.ts b/packages/medusa/src/api/routes/admin/store/get-store.ts index c27507fc8b..1498d4b38f 100644 --- a/packages/medusa/src/api/routes/admin/store/get-store.ts +++ b/packages/medusa/src/api/routes/admin/store/get-store.ts @@ -3,6 +3,7 @@ import { PaymentProviderService, StoreService, } from "../../../../services" +import { FulfillmentProvider, PaymentProvider, Store } from "../../../../models" /** * @oas [get] /store @@ -30,7 +31,12 @@ export default async (req, res) => { const fulfillmentProviderService: FulfillmentProviderService = req.scope.resolve("fulfillmentProviderService") - const data = await storeService.retrieve(["currencies", "default_currency"]) + const data = (await storeService.retrieve({ + relations: ["currencies", "default_currency"], + })) as Store & { + payment_providers: PaymentProvider[] + fulfillment_providers: FulfillmentProvider[] + } const paymentProviders = await paymentProviderService.list() const fulfillmentProviders = await fulfillmentProviderService.list() diff --git a/packages/medusa/src/api/routes/admin/store/update-store.ts b/packages/medusa/src/api/routes/admin/store/update-store.ts index 2f6b88789b..9e8de32132 100644 --- a/packages/medusa/src/api/routes/admin/store/update-store.ts +++ b/packages/medusa/src/api/routes/admin/store/update-store.ts @@ -1,4 +1,4 @@ -import { IsArray, IsOptional, IsString } from "class-validator" +import { IsArray, IsOptional, IsString, IsObject } from "class-validator" import { StoreService } from "../../../../services" import { validator } from "../../../../utils/validator" @@ -75,4 +75,8 @@ export class AdminPostStoreReq { @IsString({ each: true }) @IsOptional() currencies?: string[] + + @IsObject() + @IsOptional() + metadata?: Record } diff --git a/packages/medusa/src/api/routes/admin/uploads/create-upload.ts b/packages/medusa/src/api/routes/admin/uploads/create-upload.ts index 86320c33db..5c9f7e1ba1 100644 --- a/packages/medusa/src/api/routes/admin/uploads/create-upload.ts +++ b/packages/medusa/src/api/routes/admin/uploads/create-upload.ts @@ -37,7 +37,7 @@ export default async (req, res) => { } } -export class IAdminPostUploadsFile { +export class IAdminPostUploadsFileReq { originalName: string path: string } diff --git a/packages/medusa/src/api/routes/admin/uploads/delete-upload.ts b/packages/medusa/src/api/routes/admin/uploads/delete-upload.ts index 52e65c2ded..25be5f5d1d 100644 --- a/packages/medusa/src/api/routes/admin/uploads/delete-upload.ts +++ b/packages/medusa/src/api/routes/admin/uploads/delete-upload.ts @@ -1,6 +1,8 @@ +import { IsString } from "class-validator" + /** * [delete] /uploads - * operationId: "AdminDeleteUpload" + * operationId: "AdminDeleteUploads" * summary: "Removes an uploaded file" * description: "Removes an uploaded file using the installed fileservice" * x-authenticated: true @@ -11,14 +13,18 @@ * description: OK */ export default async (req, res) => { - try { - const fileService = req.scope.resolve("fileService") + const validated = req.validatedBody as AdminDeleteUploadsReq - await fileService.delete(req.body.file) + const fileService = req.scope.resolve("fileService") - res.status(200).send({ id: "", object: "file", deleted: true }) - } catch (err) { - console.log(err) - throw err - } + await fileService.delete(validated) + + res + .status(200) + .send({ id: validated.file_key, object: "file", deleted: true }) +} + +export class AdminDeleteUploadsReq { + @IsString() + file_key: string } diff --git a/packages/medusa/src/api/routes/admin/uploads/get-download-url.ts b/packages/medusa/src/api/routes/admin/uploads/get-download-url.ts new file mode 100644 index 0000000000..44aa2b2e52 --- /dev/null +++ b/packages/medusa/src/api/routes/admin/uploads/get-download-url.ts @@ -0,0 +1,40 @@ +import { IsString } from "class-validator" +import { AbstractFileService } from "../../../../interfaces" + +/** + * [post] /uploads/download-url + * operationId: "PostUploadsDownloadUrl" + * summary: "Creates a presigned download url for a file" + * description: "Creates a presigned download url for a file" + * x-authenticated: true + * requestBody: + * content: + * application/json: + * schema: + * required: + * - file_key + * properties: + * file_key: + * description: "key of the file to obtain the download link for" + * type: string + * - (path) fileKey=* {string} key of the file to obtain the download link for. + * tags: + * - Uploads + * responses: + * 200: + * description: OK + */ +export default async (req, res) => { + const fileService: AbstractFileService = req.scope.resolve("fileService") + + const url = await fileService.getPresignedDownloadUrl({ + fileKey: (req.validatedBody as AdminPostUploadsDownloadUrlReq).file_key, + }) + + res.status(200).send({ download_url: url }) +} + +export class AdminPostUploadsDownloadUrlReq { + @IsString() + file_key: string +} diff --git a/packages/medusa/src/api/routes/admin/uploads/index.ts b/packages/medusa/src/api/routes/admin/uploads/index.ts index ceeceb50fa..c5d0daa042 100644 --- a/packages/medusa/src/api/routes/admin/uploads/index.ts +++ b/packages/medusa/src/api/routes/admin/uploads/index.ts @@ -2,7 +2,9 @@ import { Router } from "express" import multer from "multer" import { DeleteResponse } from "../../../../types/common" -import middlewares from "../../../middlewares" +import middlewares, { transformBody } from "../../../middlewares" +import { AdminDeleteUploadsReq } from "./delete-upload" +import { AdminPostUploadsDownloadUrlReq } from "./get-download-url" const route = Router() const upload = multer({ dest: "uploads/" }) @@ -16,17 +18,31 @@ export default (app) => { middlewares.wrap(require("./create-upload").default) ) - // removed on purpose - // route.post("/delete", middlewares.wrap(require("./delete-upload").default)) + route.delete( + "/", + transformBody(AdminDeleteUploadsReq), + middlewares.wrap(require("./delete-upload").default) + ) + + route.post( + "/download-url", + transformBody(AdminPostUploadsDownloadUrlReq), + middlewares.wrap(require("./get-download-url").default) + ) return app } -export type AdminUploadRes = { - uploads: unknown[] +export type AdminUploadsRes = { + uploads: { url: string }[] } -export type AdminDeleteUploadRes = DeleteResponse +export type AdminDeleteUploadsRes = DeleteResponse + +export type AdminUploadsDownloadUrlRes = { + download_url: string +} export * from "./create-upload" -// export * from "./delete-upload" +export * from "./delete-upload" +export * from "./get-download-url" diff --git a/packages/medusa/src/api/routes/store/carts/add-shipping-method.ts b/packages/medusa/src/api/routes/store/carts/add-shipping-method.ts index 7a73dd1ab4..f891fcc8e7 100644 --- a/packages/medusa/src/api/routes/store/carts/add-shipping-method.ts +++ b/packages/medusa/src/api/routes/store/carts/add-shipping-method.ts @@ -3,6 +3,7 @@ import { EntityManager } from "typeorm" import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" import { validator } from "../../../../utils/validator" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts/{id}/shipping-methods @@ -59,7 +60,9 @@ export default async (req, res) => { relations: defaultStoreCartRelations, }) - res.status(200).json({ cart: updatedCart }) + const data = await decorateLineItemsWithTotals(updatedCart, req) + + res.status(200).json({ cart: data }) } export class StorePostCartsCartShippingMethodReq { diff --git a/packages/medusa/src/api/routes/store/carts/calculate-taxes.ts b/packages/medusa/src/api/routes/store/carts/calculate-taxes.ts index ed055e1648..d13d5af8da 100644 --- a/packages/medusa/src/api/routes/store/carts/calculate-taxes.ts +++ b/packages/medusa/src/api/routes/store/carts/calculate-taxes.ts @@ -1,6 +1,7 @@ import { EntityManager } from "typeorm" import { IdempotencyKey } from "../../../../models/idempotency-key" import { CartService, IdempotencyKeyService } from "../../../../services" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts/{id}/taxes @@ -64,6 +65,7 @@ export default async (req, res) => { const cart = await cartService.withTransaction(manager).retrieve( id, { + relations: ["items", "items.adjustments"], select: [ "total", "subtotal", @@ -76,9 +78,13 @@ export default async (req, res) => { { force_taxes: true } ) + const data = await decorateLineItemsWithTotals(cart, req, { + force_taxes: true, + }) + return { response_code: 200, - response_body: { cart }, + response_body: { cart: data }, } } ) diff --git a/packages/medusa/src/api/routes/store/carts/create-cart.ts b/packages/medusa/src/api/routes/store/carts/create-cart.ts index 02f8dde4bf..4fdaa9d9c8 100644 --- a/packages/medusa/src/api/routes/store/carts/create-cart.ts +++ b/packages/medusa/src/api/routes/store/carts/create-cart.ts @@ -15,6 +15,7 @@ import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService, LineItemService } from "../../../../services" import { validator } from "../../../../utils/validator" import { AddressPayload } from "../../../../types/common" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts @@ -144,7 +145,9 @@ export default async (req, res) => { relations: defaultStoreCartRelations, }) - res.status(200).json({ cart }) + const data = await decorateLineItemsWithTotals(cart, req) + + res.status(200).json({ cart: data }) }) } diff --git a/packages/medusa/src/api/routes/store/carts/create-line-item.ts b/packages/medusa/src/api/routes/store/carts/create-line-item.ts index 8153eef1a2..eaa069a431 100644 --- a/packages/medusa/src/api/routes/store/carts/create-line-item.ts +++ b/packages/medusa/src/api/routes/store/carts/create-line-item.ts @@ -3,6 +3,7 @@ import { EntityManager } from "typeorm" import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService, LineItemService } from "../../../../services" import { validator } from "../../../../utils/validator" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts/{id}/line-items @@ -63,7 +64,9 @@ export default async (req, res) => { relations: defaultStoreCartRelations, }) - res.status(200).json({ cart }) + const data = await decorateLineItemsWithTotals(cart, req) + + res.status(200).json({ cart: data }) } export class StorePostCartsCartLineItemsReq { diff --git a/packages/medusa/src/api/routes/store/carts/create-payment-sessions.ts b/packages/medusa/src/api/routes/store/carts/create-payment-sessions.ts index 14e7ef0e66..3c61a72791 100644 --- a/packages/medusa/src/api/routes/store/carts/create-payment-sessions.ts +++ b/packages/medusa/src/api/routes/store/carts/create-payment-sessions.ts @@ -1,5 +1,6 @@ import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts/{id}/payment-sessions @@ -32,5 +33,6 @@ export default async (req, res) => { relations: defaultStoreCartRelations, }) - res.status(200).json({ cart }) + const data = await decorateLineItemsWithTotals(cart, req) + res.status(200).json({ cart: data }) } diff --git a/packages/medusa/src/api/routes/store/carts/decorate-line-items-with-totals.ts b/packages/medusa/src/api/routes/store/carts/decorate-line-items-with-totals.ts new file mode 100644 index 0000000000..167e3ed36e --- /dev/null +++ b/packages/medusa/src/api/routes/store/carts/decorate-line-items-with-totals.ts @@ -0,0 +1,27 @@ +import { Request } from "express" +import { TotalsService } from "../../../../services" +import { Cart, LineItem } from "../../../../models" + +export const decorateLineItemsWithTotals = async ( + cart: Cart, + req: Request, + options: { force_taxes: boolean } = { force_taxes: false } +): Promise => { + const totalsService: TotalsService = req.scope.resolve("totalsService") + + if (cart.items && cart.region) { + const items = await Promise.all( + cart.items.map(async (item: LineItem) => { + const itemTotals = await totalsService.getLineItemTotals(item, cart, { + include_tax: options.force_taxes || cart.region.automatic_taxes, + }) + + return Object.assign(item, itemTotals) + }) + ) + + return Object.assign(cart, { items }) + } + + return cart +} diff --git a/packages/medusa/src/api/routes/store/carts/delete-discount.ts b/packages/medusa/src/api/routes/store/carts/delete-discount.ts index 668d0218c8..cdca0cdcc9 100644 --- a/packages/medusa/src/api/routes/store/carts/delete-discount.ts +++ b/packages/medusa/src/api/routes/store/carts/delete-discount.ts @@ -1,6 +1,7 @@ import { EntityManager } from "typeorm" import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [delete] /carts/{id}/discounts/{code} @@ -46,6 +47,7 @@ export default async (req, res) => { select: defaultStoreCartFields, relations: defaultStoreCartRelations, }) + const data = await decorateLineItemsWithTotals(cart, req) - res.status(200).json({ cart }) + res.status(200).json({ cart: data }) } diff --git a/packages/medusa/src/api/routes/store/carts/delete-line-item.ts b/packages/medusa/src/api/routes/store/carts/delete-line-item.ts index 51d4959322..e667ad26c7 100644 --- a/packages/medusa/src/api/routes/store/carts/delete-line-item.ts +++ b/packages/medusa/src/api/routes/store/carts/delete-line-item.ts @@ -1,6 +1,7 @@ import { EntityManager } from "typeorm" import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [delete] /carts/{id}/line-items/{line_id} @@ -46,6 +47,7 @@ export default async (req, res) => { select: defaultStoreCartFields, relations: defaultStoreCartRelations, }) + const data = await decorateLineItemsWithTotals(cart, req) - res.status(200).json({ cart }) + res.status(200).json({ cart: data }) } diff --git a/packages/medusa/src/api/routes/store/carts/delete-payment-session.ts b/packages/medusa/src/api/routes/store/carts/delete-payment-session.ts index efe488656e..a14b5ce1d5 100644 --- a/packages/medusa/src/api/routes/store/carts/delete-payment-session.ts +++ b/packages/medusa/src/api/routes/store/carts/delete-payment-session.ts @@ -1,5 +1,6 @@ import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [delete] /carts/{id}/payment-sessions/{provider_id} @@ -32,5 +33,6 @@ export default async (req, res) => { relations: defaultStoreCartRelations, }) - res.status(200).json({ cart }) + const data = await decorateLineItemsWithTotals(cart, req) + res.status(200).json({ cart: data }) } diff --git a/packages/medusa/src/api/routes/store/carts/get-cart.ts b/packages/medusa/src/api/routes/store/carts/get-cart.ts index 599716c85f..99c043f75b 100644 --- a/packages/medusa/src/api/routes/store/carts/get-cart.ts +++ b/packages/medusa/src/api/routes/store/carts/get-cart.ts @@ -1,5 +1,6 @@ import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [get] /carts/{id} @@ -47,5 +48,6 @@ export default async (req, res) => { relations: defaultStoreCartRelations, }) - res.json({ cart }) + const data = await decorateLineItemsWithTotals(cart, req) + res.json({ cart: data }) } diff --git a/packages/medusa/src/api/routes/store/carts/refresh-payment-session.ts b/packages/medusa/src/api/routes/store/carts/refresh-payment-session.ts index 80eb42a4e1..d120072f5b 100644 --- a/packages/medusa/src/api/routes/store/carts/refresh-payment-session.ts +++ b/packages/medusa/src/api/routes/store/carts/refresh-payment-session.ts @@ -1,4 +1,5 @@ import { CartService } from "../../../../services" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts/{id}/payment-sessions/{provider_id}/refresh @@ -44,5 +45,6 @@ export default async (req, res) => { ], }) - res.status(200).json({ cart }) + const data = await decorateLineItemsWithTotals(cart, req) + res.status(200).json({ cart: data }) } diff --git a/packages/medusa/src/api/routes/store/carts/set-payment-session.ts b/packages/medusa/src/api/routes/store/carts/set-payment-session.ts index 98da5677f7..b3c1b1d2ee 100644 --- a/packages/medusa/src/api/routes/store/carts/set-payment-session.ts +++ b/packages/medusa/src/api/routes/store/carts/set-payment-session.ts @@ -2,6 +2,7 @@ import { IsString } from "class-validator" import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" import { validator } from "../../../../utils/validator" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts/{id}/payment-session @@ -39,7 +40,8 @@ export default async (req, res) => { relations: defaultStoreCartRelations, }) - res.status(200).json({ cart }) + const data = await decorateLineItemsWithTotals(cart, req) + res.status(200).json({ cart: data }) } export class StorePostCartsCartPaymentSessionReq { diff --git a/packages/medusa/src/api/routes/store/carts/update-cart.ts b/packages/medusa/src/api/routes/store/carts/update-cart.ts index 1874c6b398..9459d712da 100644 --- a/packages/medusa/src/api/routes/store/carts/update-cart.ts +++ b/packages/medusa/src/api/routes/store/carts/update-cart.ts @@ -12,6 +12,7 @@ import { CartUpdateProps } from "../../../../types/cart" import { AddressPayload } from "../../../../types/common" import { validator } from "../../../../utils/validator" import { IsType } from "../../../../utils/validators/is-type" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /store/carts/{id} @@ -118,8 +119,9 @@ export default async (req, res) => { select: defaultStoreCartFields, relations: defaultStoreCartRelations, }) + const data = await decorateLineItemsWithTotals(cart, req) - res.json({ cart }) + res.json({ cart: data }) } class GiftCard { diff --git a/packages/medusa/src/api/routes/store/carts/update-line-item.ts b/packages/medusa/src/api/routes/store/carts/update-line-item.ts index cea8c8f3a0..7e107f5aa8 100644 --- a/packages/medusa/src/api/routes/store/carts/update-line-item.ts +++ b/packages/medusa/src/api/routes/store/carts/update-line-item.ts @@ -4,6 +4,7 @@ import { EntityManager } from "typeorm" import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" import { validator } from "../../../../utils/validator" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts/{id}/line-items/{line_id} @@ -78,8 +79,9 @@ export default async (req, res) => { select: defaultStoreCartFields, relations: defaultStoreCartRelations, }) + const data = await decorateLineItemsWithTotals(cart, req) - res.status(200).json({ cart }) + res.status(200).json({ cart: data }) } export class StorePostCartsCartLineItemsItemReq { diff --git a/packages/medusa/src/api/routes/store/carts/update-payment-session.ts b/packages/medusa/src/api/routes/store/carts/update-payment-session.ts index 803f4811b6..378876e87c 100644 --- a/packages/medusa/src/api/routes/store/carts/update-payment-session.ts +++ b/packages/medusa/src/api/routes/store/carts/update-payment-session.ts @@ -2,6 +2,7 @@ import { IsObject } from "class-validator" import { defaultStoreCartFields, defaultStoreCartRelations } from "." import { CartService } from "../../../../services" import { validator } from "../../../../utils/validator" +import { decorateLineItemsWithTotals } from "./decorate-line-items-with-totals" /** * @oas [post] /carts/{id}/payment-sessions/{provider_id} @@ -41,8 +42,9 @@ export default async (req, res) => { select: defaultStoreCartFields, relations: defaultStoreCartRelations, }) + const data = await decorateLineItemsWithTotals(cart, req) - res.status(200).json({ cart }) + res.status(200).json({ cart: data }) } export class StorePostCartsCartPaymentSessionUpdateReq { diff --git a/packages/medusa/src/api/routes/store/customers/get-payment-methods.ts b/packages/medusa/src/api/routes/store/customers/get-payment-methods.ts index da8e2d1174..c964102669 100644 --- a/packages/medusa/src/api/routes/store/customers/get-payment-methods.ts +++ b/packages/medusa/src/api/routes/store/customers/get-payment-methods.ts @@ -2,6 +2,7 @@ import { Customer } from "../../../.." import CustomerService from "../../../../services/customer" import PaymentProviderService from "../../../../services/payment-provider" import StoreService from "../../../../services/store" +import { PaymentProvider } from "../../../../models" /** * @oas [get] /customers/me/payment-methods @@ -32,8 +33,6 @@ import StoreService from "../../../../services/store" export default async (req, res) => { const id = req.user.customer_id - const storeService: StoreService = req.scope.resolve("storeService") - const paymentProviderService: PaymentProviderService = req.scope.resolve( "paymentProviderService" ) @@ -42,15 +41,18 @@ export default async (req, res) => { const customer: Customer = await customerService.retrieve(id) - const store = await storeService.retrieve(["payment_providers"]) + const paymentProviders: PaymentProvider[] = + await paymentProviderService.list() const methods = await Promise.all( - store.payment_providers.map(async (next: string) => { - const provider = paymentProviderService.retrieveProvider(next) + paymentProviders.map(async (paymentProvider: PaymentProvider) => { + const provider = paymentProviderService.retrieveProvider( + paymentProvider.id + ) const pMethods = await provider.retrieveSavedMethods(customer) return pMethods.map((m) => ({ - provider_id: next, + provider_id: paymentProvider.id, data: m, })) }) diff --git a/packages/medusa/src/api/routes/store/customers/index.ts b/packages/medusa/src/api/routes/store/customers/index.ts index 2a5d400831..f884948201 100644 --- a/packages/medusa/src/api/routes/store/customers/index.ts +++ b/packages/medusa/src/api/routes/store/customers/index.ts @@ -64,7 +64,7 @@ export const defaultStoreCustomersRelations = [ "billing_address", ] -export const defaultStoreCustomersFields = [ +export const defaultStoreCustomersFields: (keyof Customer)[] = [ "id", "email", "first_name", diff --git a/packages/medusa/src/api/routes/store/customers/update-address.ts b/packages/medusa/src/api/routes/store/customers/update-address.ts index cb4a9dbd52..2e1716b37e 100644 --- a/packages/medusa/src/api/routes/store/customers/update-address.ts +++ b/packages/medusa/src/api/routes/store/customers/update-address.ts @@ -45,9 +45,9 @@ export default async (req, res) => { "customerService" ) as CustomerService - let customer = await customerService.updateAddress(id, address_id, validated) + await customerService.updateAddress(id, address_id, validated) - customer = await customerService.retrieve(id, { + const customer = await customerService.retrieve(id, { relations: defaultStoreCustomersRelations, select: defaultStoreCustomersFields, }) diff --git a/packages/medusa/src/api/routes/store/customers/update-customer.ts b/packages/medusa/src/api/routes/store/customers/update-customer.ts index 498178b232..e223c4982d 100644 --- a/packages/medusa/src/api/routes/store/customers/update-customer.ts +++ b/packages/medusa/src/api/routes/store/customers/update-customer.ts @@ -93,5 +93,5 @@ export class StorePostCustomersCustomerReq { @IsOptional() @IsObject() - metadata?: object + metadata?: Record } diff --git a/packages/medusa/src/api/routes/store/gift-cards/index.ts b/packages/medusa/src/api/routes/store/gift-cards/index.ts index 7f633e909e..d414565b9b 100644 --- a/packages/medusa/src/api/routes/store/gift-cards/index.ts +++ b/packages/medusa/src/api/routes/store/gift-cards/index.ts @@ -14,7 +14,12 @@ export default (app) => { export const defaultStoreGiftCardRelations = ["region"] -export const defaultStoreGiftCardFields = ["id", "code", "value", "balance"] +export const defaultStoreGiftCardFields: (keyof GiftCard)[] = [ + "id", + "code", + "value", + "balance", +] export const allowedStoreGiftCardRelations = ["region"] diff --git a/packages/medusa/src/api/routes/store/products/__tests__/get-product.js b/packages/medusa/src/api/routes/store/products/__tests__/get-product.js index 62057b0812..e440993945 100644 --- a/packages/medusa/src/api/routes/store/products/__tests__/get-product.js +++ b/packages/medusa/src/api/routes/store/products/__tests__/get-product.js @@ -23,10 +23,6 @@ describe("Get product by id", () => { IdMap.getId("product1"), { relations: defaultStoreProductsRelations, - cart_id: undefined, - currency_code: undefined, - region_id: undefined, - include_discount_prices: true, } ) }) @@ -59,10 +55,6 @@ describe("Get product by id", () => { IdMap.getId("variantsWithPrices"), { relations: defaultStoreProductsRelations, - cart_id: undefined, - currency_code: undefined, - region_id: undefined, - include_discount_prices: true, } ) }) diff --git a/packages/medusa/src/api/routes/store/products/__tests__/list-products.js b/packages/medusa/src/api/routes/store/products/__tests__/list-products.js index fa838cb708..d99efbe365 100644 --- a/packages/medusa/src/api/routes/store/products/__tests__/list-products.js +++ b/packages/medusa/src/api/routes/store/products/__tests__/list-products.js @@ -23,10 +23,6 @@ describe("GET /store/products", () => { relations: defaultStoreProductsRelations, skip: 0, take: 100, - cart_id: undefined, - currency_code: undefined, - region_id: undefined, - include_discount_prices: true, } ) }) @@ -54,10 +50,6 @@ describe("GET /store/products", () => { relations: defaultStoreProductsRelations, skip: 0, take: 100, - cart_id: undefined, - currency_code: undefined, - region_id: undefined, - include_discount_prices: true, } ) }) diff --git a/packages/medusa/src/api/routes/store/products/get-product.ts b/packages/medusa/src/api/routes/store/products/get-product.ts index 52d6180a2c..53a3a6ee20 100644 --- a/packages/medusa/src/api/routes/store/products/get-product.ts +++ b/packages/medusa/src/api/routes/store/products/get-product.ts @@ -1,5 +1,10 @@ import { defaultStoreProductsRelations } from "." -import { ProductService } from "../../../../services" +import { + ProductService, + PricingService, + CartService, + RegionService, +} from "../../../../services" import { PriceSelectionParams } from "../../../../types/price-selection" import { validator } from "../../../../utils/validator" @@ -30,12 +35,31 @@ export default async (req, res) => { const customer_id = req.user?.customer_id const productService: ProductService = req.scope.resolve("productService") - const product = await productService.retrieve(id, { + const pricingService: PricingService = req.scope.resolve("pricingService") + const cartService: CartService = req.scope.resolve("cartService") + const regionService: RegionService = req.scope.resolve("regionService") + const rawProduct = await productService.retrieve(id, { relations: defaultStoreProductsRelations, + }) + + let regionId = validated.region_id + let currencyCode = validated.currency_code + if (validated.cart_id) { + const cart = await cartService.retrieve(validated.cart_id, { + select: ["id", "region_id"], + }) + const region = await regionService.retrieve(cart.region_id, { + select: ["id", "currency_code"], + }) + regionId = region.id + currencyCode = region.currency_code + } + + const [product] = await pricingService.setProductPrices([rawProduct], { cart_id: validated.cart_id, customer_id: customer_id, - region_id: validated.region_id, - currency_code: validated.currency_code, + region_id: regionId, + currency_code: currencyCode, include_discount_prices: true, }) diff --git a/packages/medusa/src/api/routes/store/products/list-products.ts b/packages/medusa/src/api/routes/store/products/list-products.ts index 944227f163..586ac505f4 100644 --- a/packages/medusa/src/api/routes/store/products/list-products.ts +++ b/packages/medusa/src/api/routes/store/products/list-products.ts @@ -9,12 +9,18 @@ import { } from "class-validator" import { omit, pickBy } from "lodash" import { defaultStoreProductsRelations } from "." -import { ProductService } from "../../../../services" +import { + ProductService, + RegionService, + CartService, +} from "../../../../services" +import PricingService from "../../../../services/pricing" import { DateComparisonOperator } from "../../../../types/common" import { PriceSelectionParams } from "../../../../types/price-selection" import { validator } from "../../../../utils/validator" import { IsType } from "../../../../utils/validators/is-type" import { optionalBooleanMapper } from "../../../../utils/validators/is-boolean" +import { Product } from "../../../../models" /** * @oas [get] /products @@ -61,6 +67,9 @@ import { optionalBooleanMapper } from "../../../../utils/validators/is-boolean" */ export default async (req, res) => { const productService: ProductService = req.scope.resolve("productService") + const pricingService: PricingService = req.scope.resolve("pricingService") + const cartService: CartService = req.scope.resolve("cartService") + const regionService: RegionService = req.scope.resolve("regionService") const validated = await validator(StoreGetProductsParams, req.query) @@ -77,9 +86,9 @@ export default async (req, res) => { // get only published products for store endpoint filterableFields["status"] = ["published"] - let includeFields: string[] = [] + let includeFields: (keyof Product)[] = [] if (validated.fields) { - const set = new Set(validated.fields.split(",")) + const set = new Set(validated.fields.split(",")) as Set set.add("id") includeFields = [...set] } @@ -96,18 +105,34 @@ export default async (req, res) => { : defaultStoreProductsRelations, skip: validated.offset, take: validated.limit, - cart_id: validated.cart_id, - region_id: validated.region_id, - currency_code: validated.currency_code, - customer_id: req.user?.customer_id, - include_discount_prices: true, } - const [products, count] = await productService.listAndCount( + const [rawProducts, count] = await productService.listAndCount( pickBy(filterableFields, (val) => typeof val !== "undefined"), listConfig ) + let regionId = validated.region_id + let currencyCode = validated.currency_code + if (validated.cart_id) { + const cart = await cartService.retrieve(validated.cart_id, { + select: ["id", "region_id"], + }) + const region = await regionService.retrieve(cart.region_id, { + select: ["id", "currency_code"], + }) + regionId = region.id + currencyCode = region.currency_code + } + + const products = await pricingService.setProductPrices(rawProducts, { + cart_id: validated.cart_id, + region_id: regionId, + currency_code: currencyCode, + customer_id: req.user?.customer_id, + include_discount_prices: true, + }) + res.json({ products, count, diff --git a/packages/medusa/src/api/routes/store/shipping-options/list-options.ts b/packages/medusa/src/api/routes/store/shipping-options/list-options.ts index d18cbc8d1a..1d2f7ae04b 100644 --- a/packages/medusa/src/api/routes/store/shipping-options/list-options.ts +++ b/packages/medusa/src/api/routes/store/shipping-options/list-options.ts @@ -1,5 +1,5 @@ import { IsBooleanString, IsOptional, IsString } from "class-validator" -import ProductService from "../../../../services/product" +import { PricingService, ProductService } from "../../../../services" import ShippingOptionService from "../../../../services/shipping-option" import { validator } from "../../../../utils/validator" @@ -33,6 +33,7 @@ export default async (req, res) => { (validated.product_ids && validated.product_ids.split(",")) || [] const regionId = validated.region_id const productService: ProductService = req.scope.resolve("productService") + const pricingService: PricingService = req.scope.resolve("pricingService") const shippingOptionService: ShippingOptionService = req.scope.resolve( "shippingOptionService" ) @@ -59,7 +60,9 @@ export default async (req, res) => { relations: ["requirements"], }) - res.status(200).json({ shipping_options: options }) + const data = await pricingService.setShippingOptionPrices(options) + + res.status(200).json({ shipping_options: data }) } export class StoreGetShippingOptionsParams { diff --git a/packages/medusa/src/api/routes/store/shipping-options/list-shipping-options.ts b/packages/medusa/src/api/routes/store/shipping-options/list-shipping-options.ts index 20c44ff660..7b339be8cf 100644 --- a/packages/medusa/src/api/routes/store/shipping-options/list-shipping-options.ts +++ b/packages/medusa/src/api/routes/store/shipping-options/list-shipping-options.ts @@ -1,4 +1,4 @@ -import CartService from "../../../../services/cart" +import { CartService, PricingService } from "../../../../services" import ShippingProfileService from "../../../../services/shipping-profile" /** @@ -26,6 +26,7 @@ export default async (req, res) => { const { cart_id } = req.params const cartService: CartService = req.scope.resolve("cartService") + const pricingService: PricingService = req.scope.resolve("pricingService") const shippingProfileService: ShippingProfileService = req.scope.resolve( "shippingProfileService" ) @@ -36,6 +37,9 @@ export default async (req, res) => { }) const options = await shippingProfileService.fetchCartOptions(cart) + const data = await pricingService.setShippingOptionPrices(options, { + cart_id, + }) - res.status(200).json({ shipping_options: options }) + res.status(200).json({ shipping_options: data }) } diff --git a/packages/medusa/src/api/routes/store/variants/__tests__/get-variant.js b/packages/medusa/src/api/routes/store/variants/__tests__/get-variant.js index 4f8a13cc2f..5090457c54 100644 --- a/packages/medusa/src/api/routes/store/variants/__tests__/get-variant.js +++ b/packages/medusa/src/api/routes/store/variants/__tests__/get-variant.js @@ -16,10 +16,6 @@ describe("Get variant by id", () => { expect(ProductVariantServiceMock.retrieve).toHaveBeenCalledTimes(1) expect(ProductVariantServiceMock.retrieve).toHaveBeenCalledWith("1", { relations: ["prices", "options"], - cart_id: undefined, - currency_code: undefined, - region_id: undefined, - include_discount_prices: true, }) }) diff --git a/packages/medusa/src/api/routes/store/variants/get-variant.ts b/packages/medusa/src/api/routes/store/variants/get-variant.ts index 424eff61aa..2c3442dc75 100644 --- a/packages/medusa/src/api/routes/store/variants/get-variant.ts +++ b/packages/medusa/src/api/routes/store/variants/get-variant.ts @@ -1,5 +1,10 @@ import { defaultStoreVariantRelations } from "." -import ProductVariantService from "../../../../services/product-variant" +import { + CartService, + RegionService, + ProductVariantService, + PricingService, +} from "../../../../services" import { PriceSelectionParams } from "../../../../types/price-selection" import { validator } from "../../../../utils/validator" @@ -30,15 +35,34 @@ export default async (req, res) => { const variantService: ProductVariantService = req.scope.resolve( "productVariantService" ) + const pricingService: PricingService = req.scope.resolve("pricingService") + const cartService: CartService = req.scope.resolve("cartService") + const regionService: RegionService = req.scope.resolve("regionService") const customer_id = req.user?.customer_id - const variant = await variantService.retrieve(id, { + const rawVariant = await variantService.retrieve(id, { relations: defaultStoreVariantRelations, + }) + + let regionId = validated.region_id + let currencyCode = validated.currency_code + if (validated.cart_id) { + const cart = await cartService.retrieve(validated.cart_id, { + select: ["id", "region_id"], + }) + const region = await regionService.retrieve(cart.region_id, { + select: ["id", "currency_code"], + }) + regionId = region.id + currencyCode = region.currency_code + } + + const [variant] = await pricingService.setVariantPrices([rawVariant], { cart_id: validated.cart_id, customer_id: customer_id, - region_id: validated.region_id, - currency_code: validated.currency_code, + region_id: regionId, + currency_code: currencyCode, include_discount_prices: true, }) diff --git a/packages/medusa/src/api/routes/store/variants/list-variants.ts b/packages/medusa/src/api/routes/store/variants/list-variants.ts index 06f9634f2a..f57eb240b7 100644 --- a/packages/medusa/src/api/routes/store/variants/list-variants.ts +++ b/packages/medusa/src/api/routes/store/variants/list-variants.ts @@ -3,7 +3,12 @@ import { omit } from "lodash" import { IsInt, IsOptional, IsString } from "class-validator" import { defaultStoreVariantRelations } from "." import { FilterableProductVariantProps } from "../../../../types/product-variant" -import ProductVariantService from "../../../../services/product-variant" +import { + CartService, + RegionService, + ProductVariantService, + PricingService, +} from "../../../../services" import { validator } from "../../../../utils/validator" import { IsType } from "../../../../utils/validators/is-type" import { NumericalComparisonOperator } from "../../../../types/common" @@ -50,11 +55,6 @@ export default async (req, res) => { : defaultStoreVariantRelations, skip: offset, take: limit, - cart_id: validated.cart_id, - region_id: validated.region_id, - currency_code: validated.currency_code, - customer_id: customer_id, - include_discount_prices: true, } const filterableFields: FilterableProductVariantProps = omit(validated, [ @@ -71,10 +71,35 @@ export default async (req, res) => { filterableFields.id = validated.ids.split(",") } + const pricingService: PricingService = req.scope.resolve("pricingService") const variantService: ProductVariantService = req.scope.resolve( "productVariantService" ) - const variants = await variantService.list(filterableFields, listConfig) + const cartService: CartService = req.scope.resolve("cartService") + const regionService: RegionService = req.scope.resolve("regionService") + + const rawVariants = await variantService.list(filterableFields, listConfig) + + let regionId = validated.region_id + let currencyCode = validated.currency_code + if (validated.cart_id) { + const cart = await cartService.retrieve(validated.cart_id, { + select: ["id", "region_id"], + }) + const region = await regionService.retrieve(cart.region_id, { + select: ["id", "currency_code"], + }) + regionId = region.id + currencyCode = region.currency_code + } + + const variants = await pricingService.setVariantPrices(rawVariants, { + cart_id: validated.cart_id, + region_id: regionId, + currency_code: currencyCode, + customer_id: customer_id, + include_discount_prices: true, + }) res.json({ variants }) } diff --git a/packages/medusa/src/commands/migrate.js b/packages/medusa/src/commands/migrate.js index fe95afcf6d..6800571223 100644 --- a/packages/medusa/src/commands/migrate.js +++ b/packages/medusa/src/commands/migrate.js @@ -1,24 +1,27 @@ import { createConnection } from "typeorm" import { getConfigFile } from "medusa-core-utils" - +import featureFlagLoader from "../loaders/feature-flags" import Logger from "../loaders/logger" import getMigrations from "./utils/get-migrations" -const t = async function({ directory }) { +const t = async function ({ directory }) { const args = process.argv args.shift() args.shift() args.shift() const { configModule } = getConfigFile(directory, `medusa-config`) - const migrationDirs = getMigrations(directory) + + const featureFlagRouter = featureFlagLoader(configModule) + + const enabledMigrations = await getMigrations(directory, featureFlagRouter) const connection = await createConnection({ type: configModule.projectConfig.database_type, url: configModule.projectConfig.database_url, extra: configModule.projectConfig.database_extra || {}, - migrations: migrationDirs, + migrations: enabledMigrations, logging: true, }) @@ -27,6 +30,11 @@ const t = async function({ directory }) { await connection.close() Logger.info("Migrations completed.") process.exit() + } else if (args[0] === "revert") { + await connection.undoLastMigration({ transaction: "all" }) + await connection.close() + Logger.info("Migrations reverted.") + process.exit() } else if (args[0] === "show") { const unapplied = await connection.showMigrations() await connection.close() diff --git a/packages/medusa/src/commands/seed.js b/packages/medusa/src/commands/seed.js index 2d2c329e83..1a372f0e35 100644 --- a/packages/medusa/src/commands/seed.js +++ b/packages/medusa/src/commands/seed.js @@ -9,9 +9,11 @@ import { track } from "medusa-telemetry" import Logger from "../loaders/logger" import loaders from "../loaders" +import featureFlagLoader from "../loaders/feature-flags" + import getMigrations from "./utils/get-migrations" -const t = async function({ directory, migrate, seedFile }) { +const t = async function ({ directory, migrate, seedFile }) { track("CLI_SEED") let resolvedPath = seedFile @@ -28,9 +30,12 @@ const t = async function({ directory, migrate, seedFile }) { } const { configModule } = getConfigFile(directory, `medusa-config`) + + const featureFlagRouter = featureFlagLoader(configModule) + const dbType = configModule.projectConfig.database_type if (migrate && dbType !== "sqlite") { - const migrationDirs = getMigrations(directory) + const migrationDirs = await getMigrations(directory, featureFlagRouter) const connection = await createConnection({ type: configModule.projectConfig.database_type, database: configModule.projectConfig.database_database, @@ -61,7 +66,7 @@ const t = async function({ directory, migrate, seedFile }) { const shippingOptionService = container.resolve("shippingOptionService") const shippingProfileService = container.resolve("shippingProfileService") - await manager.transaction(async tx => { + await manager.transaction(async (tx) => { const { store, regions, products, shipping_options, users } = JSON.parse( fs.readFileSync(resolvedPath, `utf-8`) ) @@ -74,14 +79,14 @@ const t = async function({ directory, migrate, seedFile }) { } for (const u of users) { - let pass = u.password + const pass = u.password if (pass) { delete u.password } await userService.withTransaction(tx).create(u, pass) } - let regionIds = {} + const regionIds = {} for (const r of regions) { let dummyId if (!r.id || !r.id.startsWith("reg_")) { @@ -126,7 +131,7 @@ const t = async function({ directory, migrate, seedFile }) { if (variants && variants.length) { const optionIds = p.options.map( - o => newProd.options.find(newO => newO.title === o.title).id + (o) => newProd.options.find((newO) => newO.title === o.title).id ) for (const v of variants) { diff --git a/packages/medusa/src/commands/start.js b/packages/medusa/src/commands/start.js index 0b78911cd9..7771a34773 100644 --- a/packages/medusa/src/commands/start.js +++ b/packages/medusa/src/commands/start.js @@ -8,7 +8,7 @@ import { scheduleJob } from "node-schedule" import loaders from "../loaders" import Logger from "../loaders/logger" -const EVERY_SIXTH_HOUR = "* */6 * * *" +const EVERY_SIXTH_HOUR = "0 */6 * * *" const CRON_SCHEDULE = EVERY_SIXTH_HOUR export default async function ({ port, directory }) { diff --git a/packages/medusa/src/commands/utils/get-migrations.js b/packages/medusa/src/commands/utils/get-migrations.js index ba5d6b0a3f..eb4afb91f7 100644 --- a/packages/medusa/src/commands/utils/get-migrations.js +++ b/packages/medusa/src/commands/utils/get-migrations.js @@ -1,3 +1,4 @@ +import glob from "glob" import path from "path" import fs from "fs" import { isString } from "lodash" @@ -33,7 +34,7 @@ function resolvePlugin(pluginName) { fs.readFileSync(`${resolvedPath}/package.json`, `utf-8`) ) const name = packageJSON.name || pluginName - //warnOnIncompatiblePeerDependency(name, packageJSON) + // warnOnIncompatiblePeerDependency(name, packageJSON) return { resolve: resolvedPath, @@ -86,11 +87,11 @@ function resolvePlugin(pluginName) { } } -export default directory => { +export default async (directory, featureFlagRouter) => { const { configModule } = getConfigFile(directory, `medusa-config`) const { plugins } = configModule - const resolved = plugins.map(plugin => { + const resolved = plugins.map((plugin) => { if (isString(plugin)) { return resolvePlugin(plugin) } @@ -123,5 +124,26 @@ export default directory => { } } - return migrationDirs + return getEnabledMigrations(migrationDirs, (flag) => + featureFlagRouter.isFeatureEnabled(flag) + ) +} + +export const getEnabledMigrations = (migrationDirs, isFlagEnabled) => { + const allMigrations = migrationDirs.flatMap((dir) => { + return glob.sync(dir) + }) + return allMigrations + .map((file) => { + const loaded = require(file) + if ( + typeof loaded.featureFlag === "undefined" || + isFlagEnabled(loaded.featureFlag) + ) { + return file + } + + return false + }) + .filter(Boolean) } diff --git a/packages/medusa/src/controllers/products/admin-list-products.ts b/packages/medusa/src/controllers/products/admin-list-products.ts index 9c37eadc09..417a1ba2b8 100644 --- a/packages/medusa/src/controllers/products/admin-list-products.ts +++ b/packages/medusa/src/controllers/products/admin-list-products.ts @@ -3,9 +3,10 @@ import { AdminProductsListRes } from "../../api" import { pickBy } from "lodash" import { MedusaError } from "medusa-core-utils" import { Product } from "../../models/product" -import { ProductService } from "../../services" +import { ProductService, PricingService } from "../../services" import { getListConfig } from "../../utils/get-query-config" import { FilterableProductProps } from "../../types/product" +import { PricedProduct } from "../../types/pricing" type ListContext = { limit: number @@ -28,6 +29,7 @@ const listAndCount = async ( context const productService: ProductService = scope.resolve("productService") + const pricingService: PricingService = scope.resolve("pricingService") let includeFields: (keyof Product)[] | undefined if (context.fields) { includeFields = context.fields.split(",") as (keyof Product)[] @@ -67,11 +69,20 @@ const listAndCount = async ( orderBy ) - const [products, count] = await productService.listAndCount( + const [rawProducts, count] = await productService.listAndCount( pickBy(query, (val) => typeof val !== "undefined"), listConfig ) + let products: (Product | PricedProduct)[] = rawProducts + + const includesPricing = ["variants", "variants.prices"].every((relation) => + listConfig?.relations?.includes(relation) + ) + if (includesPricing) { + products = await pricingService.setProductPrices(rawProducts) + } + return { products, count, diff --git a/packages/medusa/src/helpers/test-request.js b/packages/medusa/src/helpers/test-request.js index de28d6eec0..f2ede4eb30 100644 --- a/packages/medusa/src/helpers/test-request.js +++ b/packages/medusa/src/helpers/test-request.js @@ -7,6 +7,7 @@ import supertest from "supertest" import querystring from "querystring" import apiLoader from "../loaders/api" import passportLoader from "../loaders/passport" +import featureFlagLoader from "../loaders/feature-flags" import servicesLoader from "../loaders/services" import strategiesLoader from "../loaders/strategies" @@ -24,17 +25,21 @@ const clientSessionOpts = { const config = { projectConfig: { - jwt_secret: 'supersecret', - cookie_secret: 'superSecret', - admin_cors: '', - store_cors: '' - } + jwt_secret: "supersecret", + cookie_secret: "superSecret", + admin_cors: "", + store_cors: "", + }, } const testApp = express() const container = createContainer() -container.register('configModule', asValue(config)) + +const featureFlagRouter = featureFlagLoader(config) + +container.register("featureFlagRouter", asValue(featureFlagRouter)) +container.register("configModule", asValue(config)) container.register({ logger: asValue({ error: () => {}, @@ -69,10 +74,16 @@ apiLoader({ container, app: testApp, configModule: config }) const supertestRequest = supertest(testApp) export async function request(method, url, opts = {}) { - const { payload, query, headers = {} } = opts + const { payload, query, headers = {}, flags = [] } = opts - const queryParams = query && querystring.stringify(query); - const req = supertestRequest[method.toLowerCase()](`${url}${queryParams ? "?" + queryParams : ''}`) + flags.forEach((flag) => { + featureFlagRouter.setFlag(flag, true) + }) + + const queryParams = query && querystring.stringify(query) + const req = supertestRequest[method.toLowerCase()]( + `${url}${queryParams ? "?" + queryParams : ""}` + ) headers.Cookie = headers.Cookie || "" if (opts.adminSession) { if (opts.adminSession.jwt) { diff --git a/packages/medusa/src/interfaces/abstract-parser.ts b/packages/medusa/src/interfaces/abstract-parser.ts new file mode 100644 index 0000000000..6368c29195 --- /dev/null +++ b/packages/medusa/src/interfaces/abstract-parser.ts @@ -0,0 +1,42 @@ +/** + * Generic parsing interface. All different parsing implementations (csv, json, etc.) should implement this interface + */ +export interface IParser { + /** + * + * @param readableStream readable stream to parse + * @param options options used for parsing by underlying parser implementation + */ + parse( + readableStream: NodeJS.ReadableStream, + options?: TParseOptions + ): Promise +} + +/** + * Abstract class implementation of the IParser interface. All different parsing implementations should extend this class + */ +export abstract class AbstractParser< + TSchema, + TParserResult, + TParseOptions, + TOutputResult +> implements IParser +{ + protected readonly $$schema: TSchema + + protected constructor(schema: TSchema) { + this.$$schema = schema + } + + public abstract parse( + readableStream: NodeJS.ReadableStream, + options?: TParseOptions + ): Promise + + /** + * + * @param data data to be built after parsing. Includes validation according to schema, transformation of values, etc. + */ + public abstract buildData(data: TParserResult[]): Promise +} diff --git a/packages/medusa/src/interfaces/batch-job-strategy.ts b/packages/medusa/src/interfaces/batch-job-strategy.ts new file mode 100644 index 0000000000..a7f733e9dd --- /dev/null +++ b/packages/medusa/src/interfaces/batch-job-strategy.ts @@ -0,0 +1,118 @@ +import { TransactionBaseService } from "./transaction-base-service" +import { BatchJobResultError, CreateBatchJobInput } from "../types/batch-job" +import { ProductExportBatchJob } from "../strategies/batch-jobs/product" +import { BatchJobService } from "../services" +import { BatchJob } from "../models" + +export interface IBatchJobStrategy> + extends TransactionBaseService { + /** + * Method for preparing a batch job for processing + */ + prepareBatchJobForProcessing( + batchJobEntity: CreateBatchJobInput, + req: Express.Request + ): Promise + + /** + * Method for pre-processing a batch job + */ + preProcessBatchJob(batchJobId: string): Promise + + /** + * Method does the actual processing of the job. Should report back on the progress of the operation. + */ + processJob(batchJobId: string): Promise + + /** + * Builds and returns a template file that can be downloaded and filled in + */ + buildTemplate(): Promise +} + +export abstract class AbstractBatchJobStrategy< + T extends TransactionBaseService, + TContainer = unknown + > + extends TransactionBaseService + implements IBatchJobStrategy +{ + static identifier: string + static batchType: string + + protected abstract batchJobService_: BatchJobService + + async prepareBatchJobForProcessing( + batchJob: CreateBatchJobInput, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + req: Express.Request + ): Promise { + return batchJob + } + + // eslint-disable-next-line @typescript-eslint/no-unused-vars + public async preProcessBatchJob(batchJobId: string): Promise { + return + } + + public abstract processJob(batchJobId: string): Promise + + public abstract buildTemplate(): Promise + + protected async shouldRetryOnProcessingError( + // eslint-disable-next-line @typescript-eslint/no-unused-vars + batchJob: BatchJob, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + err: unknown + ): Promise { + return false + } + + protected async handleProcessingError( + batchJobId: string, + err: unknown, + result: T + ): Promise { + return await this.atomicPhase_(async (transactionManager) => { + const batchJob = (await this.batchJobService_ + .withTransaction(transactionManager) + .retrieve(batchJobId)) as ProductExportBatchJob + + const shouldRetry = await this.shouldRetryOnProcessingError(batchJob, err) + + const errMessage = + (err as { message: string }).message ?? + `Something went wrong with the batchJob ${batchJob.id}` + const errCode = (err as { code: string | number }).code ?? "unknown" + const resultError = { message: errMessage, code: errCode, err } + + if (shouldRetry) { + const existingErrors = + batchJob?.result?.errors ?? ([] as BatchJobResultError[]) + const retryCount = batchJob.context.retry_count ?? 0 + + await this.batchJobService_ + .withTransaction(transactionManager) + .update(batchJobId, { + context: { + retry_count: retryCount + 1, + }, + result: { + ...result, + errors: [...existingErrors, resultError], + }, + }) + } else { + await this.batchJobService_ + .withTransaction(transactionManager) + .setFailed(batchJob, resultError) + } + }) + } +} + +export function isBatchJobStrategy( + object: unknown +): object is IBatchJobStrategy { + return object instanceof AbstractBatchJobStrategy +} diff --git a/packages/medusa/src/interfaces/csv-parser.ts b/packages/medusa/src/interfaces/csv-parser.ts new file mode 100644 index 0000000000..25c05a9990 --- /dev/null +++ b/packages/medusa/src/interfaces/csv-parser.ts @@ -0,0 +1,73 @@ +import { AwilixContainer } from "awilix" + +/** + * Generic validation interface used to run validation logic on every line or record. + * All different validation objects should implement this interface + */ +export interface ICsvValidator { + /** + * + * @param value value of column or property + * @param context includes contextual information such as line number, line, etc. + */ + validate: ( + value: TBuiltLine, + context: CsvParserContext + ) => Promise +} + +export type CsvParserContext = LineContext & { + column: string +} + +export type LineContext = { + lineNumber: number + line: TLine +} + +/** + * Abstract class implementation of the IValidator interface. + * All validation objects part of the schema should extend this class. + */ +export abstract class AbstractCsvValidator + implements ICsvValidator +{ + constructor(protected readonly container: AwilixContainer) {} + + abstract validate( + builtLine: TBuiltLine, + context: CsvParserContext + ): Promise +} + +export type CsvSchemaColumn = { + name: string + required?: boolean + validator?: AbstractCsvValidator +} & ( + | { + mapTo?: string + transform?: ColumnTransformer + } + | { + match?: RegExp + reducer?: ColumnReducer + transform?: ColumnTransformer + } +) + +export type ColumnTransformer = ( + value: string, + context: CsvParserContext +) => unknown + +export type ColumnReducer = ( + builtLine: TBuiltLine, + key: string, + value: string, + context: CsvParserContext +) => TBuiltLine + +export type CsvSchema = { + columns: CsvSchemaColumn[] +} diff --git a/packages/medusa/src/interfaces/file-service.ts b/packages/medusa/src/interfaces/file-service.ts new file mode 100644 index 0000000000..e64a765fcb --- /dev/null +++ b/packages/medusa/src/interfaces/file-service.ts @@ -0,0 +1,97 @@ +import stream from "stream" +import { TransactionBaseService } from "./transaction-base-service" + +export type FileServiceUploadResult = { + url: string +} + +export type FileServiceGetUploadStreamResult = { + writeStream: stream.PassThrough + promise: Promise + url: string + fileKey: string + [x: string]: unknown +} + +export type GetUploadedFileType = { + fileKey: string + [x: string]: unknown +} + +export type DeleteFileType = { + fileKey: string + [x: string]: unknown +} + +export type UploadStreamDescriptorType = { + name: string + ext?: string + acl?: string + [x: string]: unknown +} + +export interface IFileService> + extends TransactionBaseService { + /** + * upload file to fileservice + * @param file Multer file from express multipart/form-data + * */ + upload(file: Express.Multer.File): Promise + + /** + * remove file from fileservice + * @param fileData Remove file described by record + * */ + delete(fileData: DeleteFileType): Promise + + /** + * upload file to fileservice from stream + * @param fileData file metadata relevant for fileservice to create and upload the file + * @param fileStream readable stream of the file to upload + * */ + getUploadStreamDescriptor( + fileData: UploadStreamDescriptorType + ): Promise + + /** + * download file from fileservice as stream + * @param fileData file metadata relevant for fileservice to download the file + * @returns readable stream of the file to download + * */ + getDownloadStream( + fileData: GetUploadedFileType + ): Promise + + /** + * Generate a presigned download url to obtain a file + * @param fileData file metadata relevant for fileservice to download the file + * @returns presigned url to download the file + * */ + getPresignedDownloadUrl(fileData: GetUploadedFileType): Promise +} +export abstract class AbstractFileService> + extends TransactionBaseService + implements IFileService +{ + abstract upload( + fileData: Express.Multer.File + ): Promise + + abstract delete(fileData: DeleteFileType): Promise + + abstract getUploadStreamDescriptor( + fileData: UploadStreamDescriptorType + ): Promise + + abstract getDownloadStream( + fileData: GetUploadedFileType + ): Promise + + abstract getPresignedDownloadUrl( + fileData: GetUploadedFileType + ): Promise +} + +export const isFileService = (object: unknown): boolean => { + return object instanceof AbstractFileService +} diff --git a/packages/medusa/src/interfaces/index.ts b/packages/medusa/src/interfaces/index.ts index a3faeabb80..97fc1ffe72 100644 --- a/packages/medusa/src/interfaces/index.ts +++ b/packages/medusa/src/interfaces/index.ts @@ -2,5 +2,7 @@ export * from "./tax-calculation-strategy" export * from "./cart-completion-strategy" export * from "./tax-service" export * from "./transaction-base-service" +export * from "./batch-job-strategy" +export * from "./file-service" export * from "./models/base-entity" export * from "./models/soft-deletable-entity" diff --git a/packages/medusa/src/loaders/__tests__/feature-flags.spec.ts b/packages/medusa/src/loaders/__tests__/feature-flags.spec.ts new file mode 100644 index 0000000000..32adee8b6e --- /dev/null +++ b/packages/medusa/src/loaders/__tests__/feature-flags.spec.ts @@ -0,0 +1,130 @@ +import { resolve } from "path" +import { mkdirSync, rmSync, writeFileSync } from "fs" + +import loadFeatureFlags from "../feature-flags" + +const distTestTargetDirectorPath = resolve(__dirname, "__ff-test__") + +const getFolderTestTargetDirectoryPath = (folderName: string): string => { + return resolve(distTestTargetDirectorPath, folderName) +} + +const buildFeatureFlag = ( + key: string, + defaultVal: string | boolean +): string => { + const snakeCaseKey = key.replace(/-/g, "_") + + return ` + export default { + description: "${key} descr", + key: "${snakeCaseKey}", + env_key: "MEDUSA_FF_${snakeCaseKey.toUpperCase()}", + default_val: ${defaultVal}, + } + ` +} + +describe("feature flags", () => { + const OLD_ENV = { ...process.env } + + beforeEach(() => { + jest.resetModules() + jest.clearAllMocks() + + process.env = { ...OLD_ENV } + + rmSync(distTestTargetDirectorPath, { recursive: true, force: true }) + + mkdirSync(getFolderTestTargetDirectoryPath("project"), { + mode: "777", + recursive: true, + }) + + mkdirSync(getFolderTestTargetDirectoryPath("flags"), { + mode: "777", + recursive: true, + }) + }) + + afterAll(() => { + process.env = OLD_ENV + rmSync(distTestTargetDirectorPath, { recursive: true, force: true }) + }) + + it("should load the flag from project", async () => { + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("flags"), "flag-1.js"), + buildFeatureFlag("flag-1", true) + ) + + const flags = await loadFeatureFlags( + { featureFlags: { flag_1: false } }, + undefined, + getFolderTestTargetDirectoryPath("flags") + ) + + expect(flags.isFeatureEnabled("flag_1")).toEqual(false) + }) + + it("should load the default feature flags", async () => { + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("flags"), "flag-1.js"), + buildFeatureFlag("flag-1", true) + ) + + const flags = await loadFeatureFlags( + {}, + undefined, + getFolderTestTargetDirectoryPath("flags") + ) + + expect(flags.isFeatureEnabled("flag_1")).toEqual(true) + }) + + it("should load the flag from env", async () => { + process.env.MEDUSA_FF_FLAG_1 = "false" + + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("flags"), "flag-1.js"), + buildFeatureFlag("flag-1", true) + ) + + const flags = await loadFeatureFlags( + {}, + undefined, + getFolderTestTargetDirectoryPath("flags") + ) + + expect(flags.isFeatureEnabled("flag_1")).toEqual(false) + }) + + it("should load mix of flags", async () => { + process.env.MEDUSA_FF_FLAG_3 = "false" + + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("flags"), "flag-1.js"), + buildFeatureFlag("flag-1", true) + ) + + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("flags"), "flag-2.js"), + buildFeatureFlag("flag-2", true) + ) + + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("flags"), "flag-3.js"), + buildFeatureFlag("flag-3", true) + ) + + const flags = await loadFeatureFlags( + { featureFlags: { flag_2: false } }, + undefined, + getFolderTestTargetDirectoryPath("flags") + ) + + expect(flags.isFeatureEnabled("flag_1")).toEqual(true) + expect(flags.isFeatureEnabled("flag_2")).toEqual(false) + expect(flags.isFeatureEnabled("flag_3")).toEqual(false) + }) +}) diff --git a/packages/medusa/src/loaders/__tests__/plugins.spec.ts b/packages/medusa/src/loaders/__tests__/plugins.spec.ts index e193a70d8c..1f35edc23b 100644 --- a/packages/medusa/src/loaders/__tests__/plugins.spec.ts +++ b/packages/medusa/src/loaders/__tests__/plugins.spec.ts @@ -1,21 +1,140 @@ -import { createContainer, asValue } from "awilix" -import { mkdirSync, rmSync, rmdirSync, writeFileSync } from "fs" +import { + createContainer, + asValue, + Resolver, + ClassOrFunctionReturning, + asFunction, + AwilixContainer, +} from "awilix" +import { mkdirSync, rmSync, writeFileSync } from "fs" import { resolve } from "path" import Logger from "../logger" -import { registerServices } from "../plugins" +import { registerServices, registerStrategies } from "../plugins" import { MedusaContainer } from "../../types/global" -const distTestTargetDirectorPath = resolve(__dirname, "__pluginsLoaderTest__") -const servicesTestTargetDirectoryPath = resolve(distTestTargetDirectorPath, "services") -const buildServiceTemplate = (name: string) => { +// ***** TEMPLATES ***** +const buildServiceTemplate = (name: string): string => { return ` import { BaseService } from "medusa-interfaces" export default class ${name}Service extends BaseService {} ` } +const buildTransactionBaseServiceServiceTemplate = (name: string) => { + return ` + import { TransactionBaseService } from "${resolve( + __dirname, + "../../interfaces" + )}" + export default class ${name}Service extends TransactionBaseService {} + ` +} -describe('plugins loader', () => { +const buildBatchJobStrategyTemplate = (name: string, type: string): string => { + return ` + import { AbstractBatchJobStrategy } from "../../../../interfaces/batch-job-strategy" + + class ${name}BatchStrategy extends AbstractBatchJobStrategy{ + static identifier = '${name}-identifier'; + static batchType = '${type}'; + + manager_ + transactionManager_ + + validateContext(context) { + throw new Error("Method not implemented.") + } + processJob(batchJobId) { + throw new Error("Method not implemented.") + } + completeJob(batchJobId) { + throw new Error("Method not implemented.") + } + validateFile(fileLocation) { + throw new Error("Method not implemented.") + } + async buildTemplate() { + throw new Error("Method not implemented.") + } + } + + export default ${name}BatchStrategy + ` +} + +const buildPriceSelectionStrategyTemplate = (name: string): string => { + return ` + import { AbstractPriceSelectionStrategy } from "../../../../interfaces/price-selection-strategy" + + class ${name}PriceSelectionStrategy extends AbstractPriceSelectionStrategy { + withTransaction() { + throw new Error("Method not implemented."); + } + calculateVariantPrice(variant_id, context) { + throw new Error("Method not implemented."); + } + } + + export default ${name}PriceSelectionStrategy + ` +} + +const buildTaxCalcStrategyTemplate = (name: string): string => { + return ` + class ${name}TaxCalculationStrategy { + calculate(items, taxLines, calculationContext) { + throw new Error("Method not implemented.") + } + } + + export default ${name}TaxCalculationStrategy + ` +} + +// ***** UTILS ***** + +const distTestTargetDirectorPath = resolve(__dirname, "__pluginsLoaderTest__") + +const getFolderTestTargetDirectoryPath = (folderName: string): string => { + return resolve(distTestTargetDirectorPath, folderName) +} + +function asArray( + resolvers: (ClassOrFunctionReturning | Resolver)[] +): { resolve: (container: AwilixContainer) => unknown[] } { + return { + resolve: (container: AwilixContainer): unknown[] => + resolvers.map((resolver) => container.build(resolver)), + } +} + +// ***** TESTS ***** + +describe("plugins loader", () => { const container = createContainer() as MedusaContainer + container.registerAdd = function ( + this: MedusaContainer, + name: string, + registration: typeof asFunction | typeof asValue + ): MedusaContainer { + const storeKey = name + "_STORE" + + if (this.registrations[storeKey] === undefined) { + this.register(storeKey, asValue([] as Resolver[])) + } + const store = this.resolve(storeKey) as ( + | ClassOrFunctionReturning + | Resolver + )[] + + if (this.registrations[name] === undefined) { + this.register(name, asArray(store)) + } + store.unshift(registration) + + return this + }.bind(container) + + container.register("logger", asValue(Logger)) const pluginsDetails = { resolve: resolve(__dirname, "__pluginsLoaderTest__"), name: `project-plugin`, @@ -23,24 +142,154 @@ describe('plugins loader', () => { options: {}, version: '"fakeVersion', } + let err - describe("registerServices", function() { - beforeAll(() => { - container.register("logger", asValue(Logger)) - mkdirSync(servicesTestTargetDirectoryPath, { mode: "777", recursive: true }) - writeFileSync(resolve(servicesTestTargetDirectoryPath, "test.js"), buildServiceTemplate("test")) - writeFileSync(resolve(servicesTestTargetDirectoryPath, "test2.js"), buildServiceTemplate("test2")) - writeFileSync(resolve(servicesTestTargetDirectoryPath, "test2.js.map"), "map:file") - writeFileSync(resolve(servicesTestTargetDirectoryPath, "test2.d.ts"), "export interface Test {}") + afterAll(() => { + rmSync(distTestTargetDirectorPath, { recursive: true, force: true }) + jest.clearAllMocks() + }) + + describe("registerStrategies", function () { + beforeAll(async () => { + mkdirSync(getFolderTestTargetDirectoryPath("strategies"), { + mode: "777", + recursive: true, + }) + writeFileSync( + resolve( + getFolderTestTargetDirectoryPath("strategies"), + "test-batch-1.js" + ), + buildBatchJobStrategyTemplate("testBatch1", "type-1") + ) + writeFileSync( + resolve( + getFolderTestTargetDirectoryPath("strategies"), + "test-price-selection.js" + ), + buildPriceSelectionStrategyTemplate("test") + ) + writeFileSync( + resolve( + getFolderTestTargetDirectoryPath("strategies"), + "test-batch-2.js" + ), + buildBatchJobStrategyTemplate("testBatch2", "type-1") + ) + writeFileSync( + resolve( + getFolderTestTargetDirectoryPath("strategies"), + "test-batch-3.js" + ), + buildBatchJobStrategyTemplate("testBatch3", "type-2") + ) + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("strategies"), "test-tax.js"), + buildTaxCalcStrategyTemplate("test") + ) + + try { + await registerStrategies(pluginsDetails, container) + } catch (e) { + err = e + } }) afterAll(() => { - rmSync(distTestTargetDirectorPath, { recursive: true, force: true }) jest.clearAllMocks() }) - it('should load the services from the services directory but only js files', async () => { - let err; + it("err should be falsy", () => { + expect(err).toBeFalsy() + }) + + it("registers price selection strategy", () => { + const priceSelectionStrategy = + container.resolve("priceSelectionStrategy") as (...args: unknown[]) => any + + expect(priceSelectionStrategy).toBeTruthy() + expect(priceSelectionStrategy.constructor.name).toBe( + "testPriceSelectionStrategy" + ) + }) + + it("registers tax calculation strategy", () => { + const taxCalculationStrategy = + container.resolve("taxCalculationStrategy") as (...args: unknown[]) => any + + expect(taxCalculationStrategy).toBeTruthy() + expect(taxCalculationStrategy.constructor.name).toBe( + "testTaxCalculationStrategy" + ) + }) + + it("registers batch job strategies as single array", () => { + const batchJobStrategies = + container.resolve("batchJobStrategies") as (...args: unknown[]) => any + + expect(batchJobStrategies).toBeTruthy() + expect(Array.isArray(batchJobStrategies)).toBeTruthy() + expect(batchJobStrategies.length).toBe(3) + }) + + it("registers batch job strategies by type and only keep the last", () => { + const batchJobStrategy = + container.resolve("batchType_type-1") as (...args: unknown[]) => any + + expect(batchJobStrategy).toBeTruthy() + expect(batchJobStrategy.constructor.name).toBe("testBatch2BatchStrategy") + expect((batchJobStrategy.constructor as any).batchType).toBe("type-1") + expect((batchJobStrategy.constructor as any).identifier).toBe( + "testBatch2-identifier" + ) + }) + + it("registers batch job strategies by identifier", () => { + const batchJobStrategy = container.resolve( + "batch_testBatch3-identifier" + ) as (...args: unknown[]) => any + + expect(batchJobStrategy).toBeTruthy() + expect(Array.isArray(batchJobStrategy)).toBeFalsy() + expect(batchJobStrategy.constructor.name).toBe("testBatch3BatchStrategy") + }) + }) + + describe("registerServices", function () { + beforeAll(() => { + container.register("logger", asValue(Logger)) + mkdirSync(getFolderTestTargetDirectoryPath("services"), { + mode: "777", + recursive: true, + }) + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("services"), "test.js"), + buildServiceTemplate("test") + ) + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("services"), "test2.js"), + buildServiceTemplate("test2") + ) + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("services"), "test3.js"), + buildTransactionBaseServiceServiceTemplate("test3") + ) + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("services"), "test2.js.map"), + "map:file" + ) + writeFileSync( + resolve(getFolderTestTargetDirectoryPath("services"), "test2.d.ts"), + "export interface Test {}" + ) + }) + + afterAll(() => { + jest.clearAllMocks() + }) + + it("should load the services from the services directory but only js files", async () => { + let err try { await registerServices(pluginsDetails, container) } catch (e) { @@ -49,13 +298,19 @@ describe('plugins loader', () => { expect(err).toBeFalsy() - const testService: (...args: unknown[]) => any = container.resolve("testService") - const test2Service: (...args: unknown[]) => any = container.resolve("test2Service") + const testService: (...args: unknown[]) => any = + container.resolve("testService") + const test2Service: (...args: unknown[]) => any = + container.resolve("test2Service") + const test3Service: (...args: unknown[]) => any = + container.resolve("test3Service") expect(testService).toBeTruthy() expect(testService.constructor.name).toBe("testService") expect(test2Service).toBeTruthy() expect(test2Service.constructor.name).toBe("test2Service") + expect(test3Service).toBeTruthy() + expect(test3Service.constructor.name).toBe("test3Service") }) }) -}) \ No newline at end of file +}) diff --git a/packages/medusa/src/loaders/config.ts b/packages/medusa/src/loaders/config.ts index 6302f604fd..13c8be0ada 100644 --- a/packages/medusa/src/loaders/config.ts +++ b/packages/medusa/src/loaders/config.ts @@ -56,6 +56,7 @@ export default (rootDirectory: string): ConfigModule => { cookie_secret: cookie_secret ?? "supersecret", ...configModule?.projectConfig, }, + featureFlags: configModule?.featureFlags ?? {}, plugins: configModule?.plugins ?? [], } } diff --git a/packages/medusa/src/loaders/feature-flags/index.ts b/packages/medusa/src/loaders/feature-flags/index.ts new file mode 100644 index 0000000000..33844084a4 --- /dev/null +++ b/packages/medusa/src/loaders/feature-flags/index.ts @@ -0,0 +1,68 @@ +import path from "path" +import glob from "glob" + +import { FlagSettings } from "../../types/feature-flags" +import { FlagRouter } from "../../utils/flag-router" +import { Logger } from "../../types/global" + +const isTruthy = (val: string | boolean | undefined): boolean => { + if (typeof val === "string") { + return val.toLowerCase() === "true" + } + return !!val +} + +export default ( + configModule: { featureFlags?: Record } = {}, + logger?: Logger, + flagDirectory?: string +): FlagRouter => { + const { featureFlags: projectConfigFlags = {} } = configModule + + const flagDir = path.join(flagDirectory || __dirname, "*.js") + const supportedFlags = glob.sync(flagDir, { + ignore: ["**/index.js"], + }) + + const flagConfig: Record = {} + for (const flag of supportedFlags) { + // eslint-disable-next-line @typescript-eslint/no-var-requires + const importedModule = require(flag) + if (!importedModule.default) { + continue + } + + const flagSettings: FlagSettings = importedModule.default + + switch (true) { + case typeof process.env[flagSettings.env_key] !== "undefined": + if (logger) { + logger.info( + `Using flag ${flagSettings.env_key} from environment with value ${ + process.env[flagSettings.env_key] + }` + ) + } + flagConfig[flagSettings.key] = isTruthy( + process.env[flagSettings.env_key] + ) + break + case typeof projectConfigFlags[flagSettings.key] !== "undefined": + if (logger) { + logger.info( + `Using flag ${flagSettings.key} from project config with value ${ + projectConfigFlags[flagSettings.key] + }` + ) + } + flagConfig[flagSettings.key] = isTruthy( + projectConfigFlags[flagSettings.key] + ) + break + default: + flagConfig[flagSettings.key] = flagSettings.default_val + } + } + + return new FlagRouter(flagConfig) +} diff --git a/packages/medusa/src/loaders/index.ts b/packages/medusa/src/loaders/index.ts index cdfeaf4b95..7c67e47b1b 100644 --- a/packages/medusa/src/loaders/index.ts +++ b/packages/medusa/src/loaders/index.ts @@ -1,10 +1,11 @@ -import loadConfig from './config' +import loadConfig from "./config" import "reflect-metadata" import Logger from "./logger" import apiLoader from "./api" +import featureFlagsLoader from "./feature-flags" import databaseLoader from "./database" import defaultsLoader from "./defaults" -import expressLoader from "./express" +import expressLoader from "./express" import modelsLoader from "./models" import passportLoader from "./passport" import pluginsLoader, { registerPluginModels } from "./plugins" @@ -18,35 +19,50 @@ import subscribersLoader from "./subscribers" import { ClassOrFunctionReturning } from "awilix/lib/container" import { Connection, getManager } from "typeorm" import { Express, NextFunction, Request, Response } from "express" -import { asFunction, asValue, AwilixContainer, createContainer, Resolver } from "awilix" +import { + asFunction, + asValue, + AwilixContainer, + createContainer, + Resolver, +} from "awilix" import { track } from "medusa-telemetry" import { MedusaContainer } from "../types/global" type Options = { - directory: string; - expressApp: Express; + directory: string + expressApp: Express isTest: boolean } -export default async ( - { - directory: rootDirectory, - expressApp, - isTest - }: Options -): Promise<{ container: MedusaContainer; dbConnection: Connection; app: Express }> => { +export default async ({ + directory: rootDirectory, + expressApp, + isTest, +}: Options): Promise<{ + container: MedusaContainer + dbConnection: Connection + app: Express +}> => { const configModule = loadConfig(rootDirectory) const container = createContainer() as MedusaContainer - container.register('configModule', asValue(configModule)) + container.register("configModule", asValue(configModule)) - container.registerAdd = function (this: MedusaContainer, name: string, registration: typeof asFunction | typeof asValue) { + container.registerAdd = function ( + this: MedusaContainer, + name: string, + registration: typeof asFunction | typeof asValue + ) { const storeKey = name + "_STORE" if (this.registrations[storeKey] === undefined) { this.register(storeKey, asValue([] as Resolver[])) } - const store = this.resolve(storeKey) as (ClassOrFunctionReturning | Resolver)[] + const store = this.resolve(storeKey) as ( + | ClassOrFunctionReturning + | Resolver + )[] if (this.registrations[name] === undefined) { this.register(name, asArray(store)) @@ -59,15 +75,17 @@ export default async ( // Add additional information to context of request expressApp.use((req: Request, res: Response, next: NextFunction) => { const ipAddress = requestIp.getClientIp(req) as string - - (req as any).request_context = { + ;(req as any).request_context = { ip_address: ipAddress, } next() }) + const featureFlagRouter = featureFlagsLoader(configModule, Logger) + container.register({ - logger: asValue(Logger) + logger: asValue(Logger), + featureFlagRouter: asValue(featureFlagRouter), }) await redisLoader({ container, configModule, logger: Logger }) @@ -83,7 +101,7 @@ export default async ( await registerPluginModels({ rootDirectory, container, - configModule + configModule, }) const pmAct = Logger.success(pmActivity, "Plugin models initialized") || {} track("PLUGIN_MODELS_INIT_COMPLETED", { duration: pmAct.duration }) @@ -100,7 +118,7 @@ export default async ( const dbAct = Logger.success(dbActivity, "Database initialized") || {} track("DATABASE_INIT_COMPLETED", { duration: dbAct.duration }) - container.register({ manager: asValue(dbConnection.manager), }) + container.register({ manager: asValue(dbConnection.manager) }) const stratActivity = Logger.activity("Initializing strategies") track("STRATEGIES_INIT_STARTED") @@ -123,8 +141,8 @@ export default async ( // Add the registered services to the request scope expressApp.use((req: Request, res: Response, next: NextFunction) => { - container.register({ manager: asValue(getManager()) }); - (req as any).scope = container.createScope() + container.register({ manager: asValue(getManager()) }) + ;(req as any).scope = container.createScope() next() }) diff --git a/packages/medusa/src/loaders/plugins.ts b/packages/medusa/src/loaders/plugins.ts index 7c24df9b17..e777fcbbb8 100644 --- a/packages/medusa/src/loaders/plugins.ts +++ b/packages/medusa/src/loaders/plugins.ts @@ -1,8 +1,8 @@ import glob from "glob" -import { Express } from 'express' +import { Express } from "express" import { EntitySchema } from "typeorm" import { - BaseService, + BaseService as LegacyBaseService, PaymentService, FulfillmentService, NotificationService, @@ -10,16 +10,29 @@ import { OauthService, SearchService, } from "medusa-interfaces" -import { getConfigFile, createRequireFromPath } from "medusa-core-utils" +import { createRequireFromPath } from "medusa-core-utils" import _ from "lodash" import path from "path" import fs from "fs" import { asValue, asClass, asFunction, aliasTo } from "awilix" import { sync as existsSync } from "fs-exists-cached" -import { AbstractTaxService, isTaxCalculationStrategy } from "../interfaces" +import { + AbstractTaxService, + isFileService, + isTaxCalculationStrategy, + TransactionBaseService as BaseService, +} from "../interfaces" import formatRegistrationName from "../utils/format-registration-name" -import { ClassConstructor, ConfigModule, Logger, MedusaContainer } from "../types/global" +import { + ClassConstructor, + ConfigModule, + Logger, + MedusaContainer, +} from "../types/global" import { MiddlewareService } from "../services" +import { isBatchJobStrategy } from "../interfaces/batch-job-strategy" +import { isPriceSelectionStrategy } from "../interfaces/price-selection-strategy" +import logger from "./logger" type Options = { rootDirectory: string @@ -40,14 +53,20 @@ type PluginDetails = { /** * Registers all services in the services directory */ -export default async ({ rootDirectory, container, app, configModule, activityId }: Options): Promise => { +export default async ({ + rootDirectory, + container, + app, + configModule, + activityId, +}: Options): Promise => { const resolved = getResolvedPlugins(rootDirectory, configModule) || [] await Promise.all( resolved.map(async (pluginDetails) => { registerRepositories(pluginDetails, container) await registerServices(pluginDetails, container) - registerMedusaApi(pluginDetails, container) + await registerMedusaApi(pluginDetails, container) registerApi(pluginDetails, app, rootDirectory, container, activityId) registerCoreRouters(pluginDetails, container) registerSubscribers(pluginDetails, container) @@ -59,7 +78,10 @@ export default async ({ rootDirectory, container, app, configModule, activityId ) } -function getResolvedPlugins(rootDirectory: string, configModule: ConfigModule): undefined | PluginDetails[] { +function getResolvedPlugins( + rootDirectory: string, + configModule: ConfigModule +): undefined | PluginDetails[] { const { plugins } = configModule const resolved = plugins.map((plugin) => { @@ -85,10 +107,14 @@ function getResolvedPlugins(rootDirectory: string, configModule: ConfigModule): } export async function registerPluginModels({ - rootDirectory, - container, - configModule -}: { rootDirectory: string; container: MedusaContainer; configModule: ConfigModule; }): Promise { + rootDirectory, + container, + configModule, +}: { + rootDirectory: string + container: MedusaContainer + configModule: ConfigModule +}): Promise { const resolved = getResolvedPlugins(rootDirectory, configModule) || [] await Promise.all( resolved.map(async (pluginDetails) => { @@ -97,7 +123,10 @@ export async function registerPluginModels({ ) } -async function runLoaders(pluginDetails: PluginDetails, container: MedusaContainer): Promise { +async function runLoaders( + pluginDetails: PluginDetails, + container: MedusaContainer +): Promise { const loaderFiles = glob.sync( `${pluginDetails.resolve}/loaders/[!__]*.js`, {} @@ -118,39 +147,89 @@ async function runLoaders(pluginDetails: PluginDetails, container: MedusaContain ) } -function registerMedusaApi(pluginDetails: PluginDetails, container: MedusaContainer): void { +async function registerMedusaApi( + pluginDetails: PluginDetails, + container: MedusaContainer +): Promise { registerMedusaMiddleware(pluginDetails, container) registerStrategies(pluginDetails, container) } -function registerStrategies(pluginDetails: PluginDetails, container: MedusaContainer): void { - let module - try { - const path = `${pluginDetails.resolve}/strategies/tax-calculation` - if (existsSync(path)) { - module = require(path).default - } else { - return - } - } catch (err) { - return - } +export function registerStrategies( + pluginDetails: PluginDetails, + container: MedusaContainer +): void { + const files = glob.sync(`${pluginDetails.resolve}/strategies/[!__]*.js`, { + ignore: ["**/__fixtures__/**", "**/index.js", "**/index.ts"], + }) + const registeredServices = {} - if (isTaxCalculationStrategy(module.prototype)) { - container.register({ - taxCalculationStrategy: asFunction( - (cradle) => new module(cradle, pluginDetails.options) - ).singleton(), - }) - } else { - const logger = container.resolve("logger") - logger.warn( - `${pluginDetails.resolve}/strategies/tax-calculation did not export a class that implements ITaxCalculationStrategy. Your Medusa server will still work, but if you have written custom tax calculation logic it will not be used. Make sure to implement the ITaxCalculationStrategy interface.` - ) - } + files.map((file) => { + const module = require(file).default + + switch (true) { + case isTaxCalculationStrategy(module.prototype): { + if (!("taxCalculationStrategy" in registeredServices)) { + container.register({ + taxCalculationStrategy: asFunction( + (cradle) => new module(cradle, pluginDetails.options) + ).singleton(), + }) + registeredServices["taxCalculationStrategy"] = file + } else { + logger.warn( + `Cannot register ${file}. A tax calculation strategy is already registered` + ) + } + break + } + + case isBatchJobStrategy(module.prototype): { + container.registerAdd( + "batchJobStrategies", + asFunction((cradle) => new module(cradle, pluginDetails.options)) + ) + + const name = formatRegistrationName(file) + container.register({ + [name]: asFunction( + (cradle) => new module(cradle, pluginDetails.options) + ).singleton(), + [`batch_${module.identifier}`]: aliasTo(name), + [`batchType_${module.batchType}`]: aliasTo(name), + }) + break + } + + case isPriceSelectionStrategy(module.prototype): { + if (!("priceSelectionStrategy" in registeredServices)) { + container.register({ + priceSelectionStrategy: asFunction( + (cradle) => new module(cradle, pluginDetails.options) + ).singleton(), + }) + + registeredServices["priceSelectionStrategy"] = file + } else { + logger.warn( + `Cannot register ${file}. A price selection strategy is already registered` + ) + } + break + } + + default: + logger.warn( + `${file} did not export a class that implements a strategy interface. Your Medusa server will still work, but if you have written custom strategy logic it will not be used. Make sure to implement the proper interface.` + ) + } + }) } -function registerMedusaMiddleware(pluginDetails: PluginDetails, container: MedusaContainer): void { +function registerMedusaMiddleware( + pluginDetails: PluginDetails, + container: MedusaContainer +): void { let module try { module = require(`${pluginDetails.resolve}/api/medusa-middleware`).default @@ -158,7 +237,8 @@ function registerMedusaMiddleware(pluginDetails: PluginDetails, container: Medus return } - const middlewareService = container.resolve("middlewareService") + const middlewareService = + container.resolve("middlewareService") if (module.postAuthentication) { middlewareService.addPostAuthentication( module.postAuthentication, @@ -178,8 +258,12 @@ function registerMedusaMiddleware(pluginDetails: PluginDetails, container: Medus } } -function registerCoreRouters(pluginDetails: PluginDetails, container: MedusaContainer): void { - const middlewareService = container.resolve("middlewareService") +function registerCoreRouters( + pluginDetails: PluginDetails, + container: MedusaContainer +): void { + const middlewareService = + container.resolve("middlewareService") const { resolve } = pluginDetails const adminFiles = glob.sync(`${resolve}/api/admin/[!__]*.js`, {}) const storeFiles = glob.sync(`${resolve}/api/store/[!__]*.js`, {}) @@ -245,16 +329,22 @@ function registerApi( * registered * @return {void} */ -export async function registerServices(pluginDetails: PluginDetails, container: MedusaContainer): Promise { +export async function registerServices( + pluginDetails: PluginDetails, + container: MedusaContainer +): Promise { const files = glob.sync(`${pluginDetails.resolve}/services/[!__]*.js`, {}) await Promise.all( files.map(async (fn) => { const loaded = require(fn).default const name = formatRegistrationName(fn) - if (!(loaded.prototype instanceof BaseService)) { + if ( + !(loaded.prototype instanceof LegacyBaseService) && + !(loaded.prototype instanceof BaseService) + ) { const logger = container.resolve("logger") - const message = `Services must inherit from BaseService, please check ${fn}` + const message = `The class must be a valid service implementation, please check ${fn}` logger.error(message) throw new Error(message) } @@ -277,7 +367,8 @@ export async function registerServices(pluginDetails: PluginDetails, container: } else if (loaded.prototype instanceof OauthService) { const appDetails = loaded.getAppDetails(pluginDetails.options) - const oauthService = container.resolve("oauthService") + const oauthService = + container.resolve("oauthService") await oauthService.registerOauthApp(appDetails) const name = appDetails.application_name @@ -315,7 +406,10 @@ export async function registerServices(pluginDetails: PluginDetails, container: ).singleton(), [`noti_${loaded.identifier}`]: aliasTo(name), }) - } else if (loaded.prototype instanceof FileService) { + } else if ( + loaded.prototype instanceof FileService || + isFileService(loaded.prototype) + ) { // Add the service directly to the container in order to make simple // resolution if we already know which file storage provider we need to use container.register({ @@ -365,7 +459,10 @@ export async function registerServices(pluginDetails: PluginDetails, container: * registered * @return {void} */ -function registerSubscribers(pluginDetails: PluginDetails, container: MedusaContainer): void { +function registerSubscribers( + pluginDetails: PluginDetails, + container: MedusaContainer +): void { const files = glob.sync(`${pluginDetails.resolve}/subscribers/*.js`, {}) files.forEach((fn) => { const loaded = require(fn).default @@ -387,19 +484,24 @@ function registerSubscribers(pluginDetails: PluginDetails, container: MedusaCont * registered * @return {void} */ -function registerRepositories(pluginDetails: PluginDetails, container: MedusaContainer): void { +function registerRepositories( + pluginDetails: PluginDetails, + container: MedusaContainer +): void { const files = glob.sync(`${pluginDetails.resolve}/repositories/*.js`, {}) files.forEach((fn) => { const loaded = require(fn) as ClassConstructor - Object.entries(loaded).map(([, val]: [string, ClassConstructor]) => { - if (typeof val === "function") { - const name = formatRegistrationName(fn) - container.register({ - [name]: asClass(val), - }) + Object.entries(loaded).map( + ([, val]: [string, ClassConstructor]) => { + if (typeof val === "function") { + const name = formatRegistrationName(fn) + container.register({ + [name]: asClass(val), + }) + } } - }) + ) }) } @@ -414,21 +516,26 @@ function registerRepositories(pluginDetails: PluginDetails, container: MedusaCon * registered * @return {void} */ -function registerModels(pluginDetails: PluginDetails, container: MedusaContainer): void { +function registerModels( + pluginDetails: PluginDetails, + container: MedusaContainer +): void { const files = glob.sync(`${pluginDetails.resolve}/models/*.js`, {}) files.forEach((fn) => { const loaded = require(fn) as ClassConstructor | EntitySchema - Object.entries(loaded).map(([, val]: [string, ClassConstructor | EntitySchema]) => { - if (typeof val === "function" || val instanceof EntitySchema) { - const name = formatRegistrationName(fn) - container.register({ - [name]: asValue(val), - }) + Object.entries(loaded).map( + ([, val]: [string, ClassConstructor | EntitySchema]) => { + if (typeof val === "function" || val instanceof EntitySchema) { + const name = formatRegistrationName(fn) + container.register({ + [name]: asValue(val), + }) - container.registerAdd("db_entities", asValue(val)) + container.registerAdd("db_entities", asValue(val)) + } } - }) + ) }) } @@ -446,11 +553,11 @@ function createPluginId(name: string): string { * @return {object} the plugin details */ function resolvePlugin(pluginName: string): { - resolve: string; - id: string; - name: string; + resolve: string + id: string + name: string options: Record - version: string; + version: string } { // Only find plugins when we're not given an absolute path if (!existsSync(pluginName)) { diff --git a/packages/medusa/src/loaders/redis.ts b/packages/medusa/src/loaders/redis.ts index f10a63d9df..7519eb4ce0 100644 --- a/packages/medusa/src/loaders/redis.ts +++ b/packages/medusa/src/loaders/redis.ts @@ -5,12 +5,16 @@ import { ConfigModule, MedusaContainer } from "../types/global" import { Logger } from "../types/global" type Options = { - container: MedusaContainer; - configModule: ConfigModule; - logger: Logger; + container: MedusaContainer + configModule: ConfigModule + logger: Logger } -async function redisLoader({ container, configModule, logger }: Options): Promise { +async function redisLoader({ + container, + configModule, + logger, +}: Options): Promise { if (configModule.projectConfig.redis_url) { // Economical way of dealing with redis clients const client = new RealRedis(configModule.projectConfig.redis_url) diff --git a/packages/medusa/src/loaders/search-index.ts b/packages/medusa/src/loaders/search-index.ts index ea8937cfef..d6d6ba89fe 100644 --- a/packages/medusa/src/loaders/search-index.ts +++ b/packages/medusa/src/loaders/search-index.ts @@ -42,6 +42,7 @@ async function loadProductsIntoSearchEngine( "type", "collection", "variants.prices", + "images", "variants.options", "options", ], diff --git a/packages/medusa/src/loaders/strategies.ts b/packages/medusa/src/loaders/strategies.ts index 3aeef61e59..9063fcfef6 100644 --- a/packages/medusa/src/loaders/strategies.ts +++ b/packages/medusa/src/loaders/strategies.ts @@ -1,11 +1,13 @@ import glob from "glob" import path from "path" -import { AwilixContainer, asFunction } from "awilix" +import { asFunction, aliasTo } from "awilix" import formatRegistrationName from "../utils/format-registration-name" +import { isBatchJobStrategy } from "../interfaces" +import { MedusaContainer } from "../types/global" type LoaderOptions = { - container: AwilixContainer + container: MedusaContainer configModule: object isTest?: boolean } @@ -19,19 +21,40 @@ export default ({ container, configModule, isTest }: LoaderOptions): void => { typeof isTest !== "undefined" ? isTest : process.env.NODE_ENV === "test" const corePath = useMock - ? "../strategies/__mocks__/*.js" - : "../strategies/*.js" + ? "../strategies/__mocks__/[!__]*.js" + : "../strategies/**/[!__]*.js" + const coreFull = path.join(__dirname, corePath) - const core = glob.sync(coreFull, { cwd: __dirname }) + const core = glob.sync(coreFull, { + cwd: __dirname, + ignore: ["**/__fixtures__/**", "**/index.js", "**/index.ts"], + }) + core.forEach((fn) => { // eslint-disable-next-line @typescript-eslint/no-var-requires const loaded = require(fn).default const name = formatRegistrationName(fn) - container.register({ - [name]: asFunction( - (cradle) => new loaded(cradle, configModule) - ).singleton(), - }) + + if (isBatchJobStrategy(loaded.prototype)) { + container.registerAdd( + "batchJobStrategies", + asFunction((cradle) => new loaded(cradle, configModule)) + ) + + container.register({ + [name]: asFunction( + (cradle) => new loaded(cradle, configModule) + ).singleton(), + [`batch_${loaded.identifier}`]: aliasTo(name), + [`batchType_${loaded.batchType}`]: aliasTo(name), + }) + } else { + container.register({ + [name]: asFunction( + (cradle) => new loaded(cradle, configModule) + ).singleton(), + }) + } }) } diff --git a/packages/medusa/src/migrations/1649775522087-add_batch_job_model.ts b/packages/medusa/src/migrations/1649775522087-add_batch_job_model.ts index 9ffd8588dd..b6fccc109a 100644 --- a/packages/medusa/src/migrations/1649775522087-add_batch_job_model.ts +++ b/packages/medusa/src/migrations/1649775522087-add_batch_job_model.ts @@ -5,15 +5,38 @@ export class addBatchJobModel1649775522087 implements MigrationInterface { public async up(queryRunner: QueryRunner): Promise { await queryRunner.query( - `CREATE TYPE "batch_job_status_enum" AS ENUM('created', 'processing', 'awaiting_confirmation', 'completed')` + `CREATE TABLE "batch_job" + ( + "id" character varying NOT NULL, + "type" text NOT NULL, + "created_by" character varying, + "context" jsonb, + "result" jsonb, + "dry_run" boolean NOT NULL DEFAULT FALSE, + "created_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), + "pre_processed_at" TIMESTAMP WITH TIME ZONE, + "confirmed_at" TIMESTAMP WITH TIME ZONE, + "processing_at" TIMESTAMP WITH TIME ZONE, + "completed_at" TIMESTAMP WITH TIME ZONE, + "failed_at" TIMESTAMP WITH TIME ZONE, + "canceled_at" TIMESTAMP WITH TIME ZONE, + "updated_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), + "deleted_at" TIMESTAMP WITH TIME ZONE, + CONSTRAINT "PK_e57f84d485145d5be96bc6d871e" PRIMARY KEY ("id") + )` ) + await queryRunner.query( - `CREATE TABLE "batch_job" ("id" character varying NOT NULL, "type" text NOT NULL, "status" "public"."batch_job_status_enum" NOT NULL, "created_by" text, "context" jsonb, "result" jsonb, "created_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "updated_at" TIMESTAMP WITH TIME ZONE NOT NULL DEFAULT now(), "deleted_at" TIMESTAMP WITH TIME ZONE, CONSTRAINT "PK_e57f84d485145d5be96bc6d871e" PRIMARY KEY ("id"))` + `ALTER TABLE "batch_job" + ADD CONSTRAINT "FK_fa53ca4f5fd90605b532802a626" FOREIGN KEY ("created_by") REFERENCES "user" ("id") ON DELETE NO ACTION ON UPDATE NO ACTION` ) } public async down(queryRunner: QueryRunner): Promise { + await queryRunner.query( + `ALTER TABLE "batch_job" + DROP CONSTRAINT "FK_fa53ca4f5fd90605b532802a626"` + ) await queryRunner.query(`DROP TABLE "batch_job"`) - await queryRunner.query(`DROP TYPE "batch_job_status_enum"`) } -} +} \ No newline at end of file diff --git a/packages/medusa/src/models/batch-job.ts b/packages/medusa/src/models/batch-job.ts index 944619926e..921e35f91e 100644 --- a/packages/medusa/src/models/batch-job.ts +++ b/packages/medusa/src/models/batch-job.ts @@ -1,30 +1,105 @@ -import { BeforeInsert, Column, Entity, PrimaryColumn } from "typeorm" -import { BatchJobStatus } from "../types/batch-job" -import { DbAwareColumn } from "../utils/db-aware-column" +import { + AfterLoad, + BeforeInsert, + Column, + Entity, + JoinColumn, + ManyToOne, +} from "typeorm" +import { + BatchJobResultError, + BatchJobResultStatDescriptor, + BatchJobStatus, +} from "../types/batch-job" +import { DbAwareColumn, resolveDbType } from "../utils/db-aware-column" import { SoftDeletableEntity } from "../interfaces/models/soft-deletable-entity" import { generateEntityId } from "../utils/generate-entity-id" +import { User } from "./user" +import { RequestQueryFields, Selector } from "../types/common" @Entity() export class BatchJob extends SoftDeletableEntity { @DbAwareColumn({ type: "text" }) type: string - @DbAwareColumn({ type: "enum", enum: BatchJobStatus }) - status: BatchJobStatus - - @Column({ type: "text", nullable: true }) + @Column({ nullable: true }) created_by: string | null + @ManyToOne(() => User) + @JoinColumn({ name: "created_by" }) + created_by_user: User + @DbAwareColumn({ type: "jsonb", nullable: true }) context: Record @DbAwareColumn({ type: "jsonb", nullable: true }) - result: Record + result: { + count?: number + advancement_count?: number + progress?: number + errors?: BatchJobResultError[] + stat_descriptors?: BatchJobResultStatDescriptor[] + file_key?: string + file_size?: number + } & Record + + @Column({ type: "boolean", nullable: false, default: false }) + dry_run = false + + @Column({ type: resolveDbType("timestamptz"), nullable: true }) + pre_processed_at?: Date + + @Column({ type: resolveDbType("timestamptz"), nullable: true }) + processing_at?: Date + + @Column({ type: resolveDbType("timestamptz"), nullable: true }) + confirmed_at?: Date + + @Column({ type: resolveDbType("timestamptz"), nullable: true }) + completed_at?: Date + + @Column({ type: resolveDbType("timestamptz"), nullable: true }) + canceled_at?: Date + + @Column({ type: resolveDbType("timestamptz"), nullable: true }) + failed_at?: Date + + status: BatchJobStatus + + @AfterLoad() + loadStatus(): void { + /* Always keep the status order consistent. */ + if (this.pre_processed_at) { + this.status = BatchJobStatus.PRE_PROCESSED + } + if (this.confirmed_at) { + this.status = BatchJobStatus.CONFIRMED + } + if (this.processing_at) { + this.status = BatchJobStatus.PROCESSING + } + if (this.completed_at) { + this.status = BatchJobStatus.COMPLETED + } + if (this.canceled_at) { + this.status = BatchJobStatus.CANCELED + } + if (this.failed_at) { + this.status = BatchJobStatus.FAILED + } + + this.status = this.status ?? BatchJobStatus.CREATED + } @BeforeInsert() private beforeInsert(): void { this.id = generateEntityId(this.id, "batch") } + + toJSON() { + this.loadStatus() + return this + } } /** @@ -47,18 +122,40 @@ export class BatchJob extends SoftDeletableEntity { * type: string * enum: * - created + * - pre_processed * - processing - * - awaiting_confirmation * - completed + * - canceled + * - failed * created_by: * description: "The unique identifier of the user that created the batch job." * type: string * context: * description: "The context of the batch job, the type of the batch job determines what the context should contain." * type: object + * dry_run: + * description: "Specify if the job must apply the modifications or not." + * type: boolean + * default: false * result: * description: "The result of the batch job." * type: object + * pre_processed_at: + * description: "The date from which the job has been pre processed." + * type: string + * format: date-time + * confirmed_at: + * description: "The date when the confirmation has been done." + * type: string + * format: date-time + * completed_at: + * description: "The date of the completion." + * type: string + * format: date-time + * canceled_at: + * description: "The date of the concellation." + * type: string + * format: date-time * created_at: * description: "The date with timezone at which the resource was created." * type: string diff --git a/packages/medusa/src/models/customer.ts b/packages/medusa/src/models/customer.ts index 22d4e5dd72..042939fae1 100644 --- a/packages/medusa/src/models/customer.ts +++ b/packages/medusa/src/models/customer.ts @@ -31,7 +31,7 @@ export class Customer extends SoftDeletableEntity { @Index() @Column({ nullable: true }) - billing_address_id: string + billing_address_id: string | null @OneToOne(() => Address) @JoinColumn({ name: "billing_address_id" }) diff --git a/packages/medusa/src/models/draft-order.ts b/packages/medusa/src/models/draft-order.ts index 00eb84df91..54c44f42b9 100644 --- a/packages/medusa/src/models/draft-order.ts +++ b/packages/medusa/src/models/draft-order.ts @@ -18,7 +18,7 @@ import { Cart } from "./cart" import { Order } from "./order" import { generateEntityId } from "../utils/generate-entity-id" -enum DraftOrderStatus { +export enum DraftOrderStatus { OPEN = "open", COMPLETED = "completed", } diff --git a/packages/medusa/src/models/line-item.ts b/packages/medusa/src/models/line-item.ts index 0701961754..2f92981cec 100644 --- a/packages/medusa/src/models/line-item.ts +++ b/packages/medusa/src/models/line-item.ts @@ -72,8 +72,8 @@ export class LineItem extends BaseEntity { @Column({ nullable: true }) description: string - @Column({ nullable: true }) - thumbnail: string + @Column({ type: "text", nullable: true }) + thumbnail: string | null @Column({ default: false }) is_return: boolean @@ -116,7 +116,14 @@ export class LineItem extends BaseEntity { @DbAwareColumn({ type: "jsonb", nullable: true }) metadata: Record - refundable: number | null + refundable?: number | null + subtotal?: number | null + tax_total?: number | null + total?: number | null + original_total?: number | null + original_tax_total?: number | null + discount_total?: number | null + gift_card_total?: number | null @BeforeInsert() private beforeInsert(): void { diff --git a/packages/medusa/src/models/product.ts b/packages/medusa/src/models/product.ts index cfe025eeb2..f3d6f84aa6 100644 --- a/packages/medusa/src/models/product.ts +++ b/packages/medusa/src/models/product.ts @@ -21,7 +21,7 @@ import { ShippingProfile } from "./shipping-profile" import { SoftDeletableEntity } from "../interfaces/models/soft-deletable-entity" import { generateEntityId } from "../utils/generate-entity-id" -export enum Status { +export enum ProductStatus { DRAFT = "draft", PROPOSED = "proposed", PUBLISHED = "published", @@ -33,21 +33,21 @@ export class Product extends SoftDeletableEntity { @Column() title: string - @Column({ nullable: true }) - subtitle: string + @Column({ type: "text", nullable: true }) + subtitle: string | null - @Column({ nullable: true }) - description: string + @Column({ type: "text", nullable: true }) + description: string | null @Index({ unique: true, where: "deleted_at IS NULL" }) - @Column({ nullable: true }) - handle: string + @Column({ type: "text", nullable: true }) + handle: string | null @Column({ default: false }) is_giftcard: boolean - @DbAwareColumn({ type: "enum", enum: Status, default: "draft" }) - status: Status + @DbAwareColumn({ type: "enum", enum: ProductStatus, default: "draft" }) + status: ProductStatus @ManyToMany(() => Image, { cascade: ["insert"] }) @JoinTable({ @@ -63,15 +63,22 @@ export class Product extends SoftDeletableEntity { }) images: Image[] - @Column({ nullable: true }) - thumbnail: string + @Column({ type: "text", nullable: true }) + thumbnail: string | null - @OneToMany(() => ProductOption, (productOption) => productOption.product) + @OneToMany( + () => ProductOption, + (productOption) => productOption.product + ) options: ProductOption[] - @OneToMany(() => ProductVariant, (variant) => variant.product, { - cascade: true, - }) + @OneToMany( + () => ProductVariant, + (variant) => variant.product, + { + cascade: true, + } + ) variants: ProductVariant[] @Index() @@ -83,38 +90,38 @@ export class Product extends SoftDeletableEntity { profile: ShippingProfile @Column({ type: "int", nullable: true }) - weight: number + weight: number | null @Column({ type: "int", nullable: true }) - length: number + length: number | null @Column({ type: "int", nullable: true }) - height: number + height: number | null @Column({ type: "int", nullable: true }) - width: number + width: number | null - @Column({ nullable: true }) - hs_code: string + @Column({ type: "text", nullable: true }) + hs_code: string | null - @Column({ nullable: true }) - origin_country: string + @Column({ type: "text", nullable: true }) + origin_country: string | null - @Column({ nullable: true }) - mid_code: string + @Column({ type: "text", nullable: true }) + mid_code: string | null - @Column({ nullable: true }) - material: string + @Column({ type: "text", nullable: true }) + material: string | null - @Column({ nullable: true }) + @Column({ type: "text", nullable: true }) collection_id: string | null @ManyToOne(() => ProductCollection) @JoinColumn({ name: "collection_id" }) collection: ProductCollection - @Column({ nullable: true }) - type_id: string + @Column({ type: "text", nullable: true }) + type_id: string | null @ManyToOne(() => ProductType) @JoinColumn({ name: "type_id" }) @@ -137,11 +144,11 @@ export class Product extends SoftDeletableEntity { @Column({ default: true }) discountable: boolean - @Column({ nullable: true }) - external_id: string + @Column({ type: "text", nullable: true }) + external_id: string | null @DbAwareColumn({ type: "jsonb", nullable: true }) - metadata: Record + metadata: Record | null @BeforeInsert() private beforeInsert(): void { diff --git a/packages/medusa/src/models/shipping-method.ts b/packages/medusa/src/models/shipping-method.ts index 7ea26c4888..698a0223b9 100644 --- a/packages/medusa/src/models/shipping-method.ts +++ b/packages/medusa/src/models/shipping-method.ts @@ -44,7 +44,7 @@ export class ShippingMethod { @Index() @Column({ nullable: true }) - claim_order_id: string + claim_order_id: string | null @ManyToOne(() => ClaimOrder) @JoinColumn({ name: "claim_order_id" }) diff --git a/packages/medusa/src/models/shipping-option.ts b/packages/medusa/src/models/shipping-option.ts index 989ad22dbd..d42ffd2861 100644 --- a/packages/medusa/src/models/shipping-option.ts +++ b/packages/medusa/src/models/shipping-option.ts @@ -56,7 +56,7 @@ export class ShippingOption extends SoftDeletableEntity { price_type: ShippingOptionPriceType @Column({ type: "int", nullable: true }) - amount: number + amount: number | null @Column({ default: false }) is_return: boolean diff --git a/packages/medusa/src/repositories/customer.ts b/packages/medusa/src/repositories/customer.ts index 9e87b43d4c..3bf79e0262 100644 --- a/packages/medusa/src/repositories/customer.ts +++ b/packages/medusa/src/repositories/customer.ts @@ -1,23 +1,46 @@ -import { EntityRepository, Repository } from "typeorm" +import { Brackets, EntityRepository, ILike, Repository } from "typeorm" import { Customer } from "../models/customer" +import { ExtendedFindConfig, Selector } from "../types/common" @EntityRepository(Customer) export class CustomerRepository extends Repository { - async listAndCount(query, groups): Promise<[Customer[], number]> { - let qb = this.createQueryBuilder("customer") - .where(query.where) + async listAndCount( + query: ExtendedFindConfig>, + q: string | undefined = undefined + ): Promise<[Customer[], number]> { + const groups = query.where.groups as { value: string[] } + delete query.where.groups + + const qb = this.createQueryBuilder("customer") .skip(query.skip) .take(query.take) + if (q) { + delete query.where.email + delete query.where.first_name + delete query.where.last_name + + qb.where( + new Brackets((qb) => { + qb.where({ email: ILike(`%${q}%`) }) + .orWhere({ first_name: ILike(`%${q}%`) }) + .orWhere({ last_name: ILike(`%${q}%`) }) + }) + ) + } + + qb.andWhere(query.where) + if (groups) { - qb = qb - .leftJoinAndSelect("customer.groups", "group") - .andWhere(`group.id IN (:...ids)`, { ids: groups.value }) + qb.leftJoinAndSelect("customer.groups", "group").andWhere( + `group.id IN (:...ids)`, + { ids: groups.value } + ) } if (query.relations?.length) { query.relations.forEach((rel) => { - qb = qb.leftJoinAndSelect(`customer.${rel}`, rel) + qb.leftJoinAndSelect(`customer.${rel}`, rel) }) } diff --git a/packages/medusa/src/repositories/gift-card.ts b/packages/medusa/src/repositories/gift-card.ts index f3ebcf9afa..48e2a6cafb 100644 --- a/packages/medusa/src/repositories/gift-card.ts +++ b/packages/medusa/src/repositories/gift-card.ts @@ -1,21 +1,32 @@ -import { flatten, groupBy, map, merge } from "lodash" -import { EntityRepository, FindManyOptions, Repository } from "typeorm" +import { flatten, groupBy, merge } from "lodash" +import { + Brackets, + EntityRepository, + FindManyOptions, + Repository, +} from "typeorm" import { GiftCard } from "../models/gift-card" +import { ExtendedFindConfig, QuerySelector, Writable } from "../types/common" @EntityRepository(GiftCard) export class GiftCardRepository extends Repository { public async findWithRelations( - relations: Array = [], - idsOrOptionsWithoutRelations: Omit< - FindManyOptions, - "relations" - > = {} - ): Promise { - let entities + relations: (keyof GiftCard | string)[] = [], + idsOrOptionsWithoutRelations: + | Omit, "relations"> + | string[] = {} + ): Promise<[GiftCard[], number]> { + let entities: GiftCard[] = [] + let count = 0 if (Array.isArray(idsOrOptionsWithoutRelations)) { entities = await this.findByIds(idsOrOptionsWithoutRelations) + count = idsOrOptionsWithoutRelations.length } else { - entities = await this.find(idsOrOptionsWithoutRelations) + const [results, resultCount] = await this.findAndCount( + idsOrOptionsWithoutRelations + ) + entities = results + count = resultCount } const entitiesIds = entities.map(({ id }) => id) @@ -40,9 +51,79 @@ export class GiftCardRepository extends Repository { const entitiesAndRelations = entitiesIdsWithRelations.concat(entities) const entitiesAndRelationsById = groupBy(entitiesAndRelations, "id") - return map(entitiesAndRelationsById, entityAndRelations => - merge({}, ...entityAndRelations) + return [ + Object.values(entitiesAndRelationsById).map((v) => merge({}, ...v)), + count, + ] + } + + protected async queryGiftCards( + q: string, + where: Partial>>, + rels: (keyof GiftCard | string)[], + shouldCount = false + ): Promise<[GiftCard[], number]> { + const qb = this.createQueryBuilder("gift_card") + .leftJoinAndSelect("gift_card.order", "order") + .select(["gift_card.id"]) + .where(where) + .andWhere( + new Brackets((qb) => { + return qb + .where(`gift_card.code ILIKE :q`, { q: `%${q}%` }) + .orWhere(`display_id::varchar(255) ILIKE :dId`, { dId: `${q}` }) + }) + ) + + let raw: GiftCard[] = [] + let count = 0 + if (shouldCount) { + const [results, resultCount] = await qb.getManyAndCount() + raw = results + count = resultCount + } else { + raw = await qb.getMany() + } + + const [results] = await this.findWithRelations( + rels, + raw.map((i) => i.id) ) + + return [results, count] + } + + public async listGiftCardsAndCount( + inputQuery: ExtendedFindConfig>, + rels: (keyof GiftCard | string)[] = [], + q?: string + ): Promise<[GiftCard[], number]> { + const query = { ...inputQuery } + + if (q) { + const where = query.where + delete where.id + + return await this.queryGiftCards(q, where, rels, true) + } + return await this.findWithRelations(rels, query) + } + + public async listGiftCards( + query: ExtendedFindConfig>, + rels: (keyof GiftCard | string)[] = [], + q?: string + ): Promise { + if (q) { + const where = query.where + delete where.id + + const [result] = await this.queryGiftCards(q, where, rels) + return result + } + + const [results] = await this.findWithRelations(rels, query) + return results } public async findOneWithRelations( @@ -52,7 +133,7 @@ export class GiftCardRepository extends Repository { // Limit 1 optionsWithoutRelations.take = 1 - const result = await this.findWithRelations( + const [result] = await this.findWithRelations( relations, optionsWithoutRelations ) diff --git a/packages/medusa/src/repositories/image.ts b/packages/medusa/src/repositories/image.ts index c8799ca37d..fac2aad568 100644 --- a/packages/medusa/src/repositories/image.ts +++ b/packages/medusa/src/repositories/image.ts @@ -1,5 +1,31 @@ -import { EntityRepository, Repository } from "typeorm" +import { EntityRepository, In, Repository } from "typeorm" import { Image } from "../models/image" @EntityRepository(Image) -export class ImageRepository extends Repository {} +export class ImageRepository extends Repository { + public async upsertImages(imageUrls: string[]) { + const existingImages = await this.find({ + where: { + url: In(imageUrls), + }, + }) + const existingImagesMap = new Map( + existingImages.map<[string, Image]>((img) => [img.url, img]) + ) + + const upsertedImgs: Image[] = [] + + for (const url of imageUrls) { + const aImg = existingImagesMap.get(url) + if (aImg) { + upsertedImgs.push(aImg) + } else { + const newImg = this.create({ url }) + const savedImg = await this.save(newImg) + upsertedImgs.push(savedImg) + } + } + + return upsertedImgs + } +} diff --git a/packages/medusa/src/repositories/price-list.ts b/packages/medusa/src/repositories/price-list.ts index 17badaef2d..38012f9fc2 100644 --- a/packages/medusa/src/repositories/price-list.ts +++ b/packages/medusa/src/repositories/price-list.ts @@ -11,7 +11,10 @@ import { CustomFindOptions, ExtendedFindConfig } from "../types/common" import { CustomerGroup } from "../models" import { FilterablePriceListProps } from "../types/price-list" -export type PriceListFindOptions = CustomFindOptions +export type PriceListFindOptions = CustomFindOptions< + PriceList, + "status" | "type" +> @EntityRepository(PriceList) export class PriceListRepository extends Repository { @@ -108,10 +111,10 @@ export class PriceListRepository extends Repository { .take(query.take) if (groups) { - qb.leftJoinAndSelect("price_list.customer_groups", "group").andWhere( - "group.id IN (:...ids)", - { ids: groups.value } - ) + qb.leftJoinAndSelect( + "price_list.customer_groups", + "group" + ).andWhere("group.id IN (:...ids)", { ids: groups.value }) } if (query.relations?.length) { diff --git a/packages/medusa/src/repositories/product-tag.ts b/packages/medusa/src/repositories/product-tag.ts index 0d937cfb5b..f2a5412578 100644 --- a/packages/medusa/src/repositories/product-tag.ts +++ b/packages/medusa/src/repositories/product-tag.ts @@ -1,5 +1,50 @@ -import { EntityRepository, Repository } from "typeorm" +import { EntityRepository, In, Repository } from "typeorm" import { ProductTag } from "../models/product-tag" +type UpsertTagsInput = (Partial & { + value: string +})[] + @EntityRepository(ProductTag) -export class ProductTagRepository extends Repository {} +export class ProductTagRepository extends Repository { + public async listTagsByUsage(count = 10): Promise { + return await this.query( + ` + SELECT id, COUNT(pts.product_tag_id) as usage_count, pt.value + FROM product_tag pt + LEFT JOIN product_tags pts ON pt.id = pts.product_tag_id + GROUP BY id + ORDER BY usage_count DESC + LIMIT $1 + `, + [count] + ) + } + + public async upsertTags(tags: UpsertTagsInput): Promise { + const tagsValues = tags.map((tag) => tag.value) + const existingTags = await this.find({ + where: { + value: In(tagsValues), + }, + }) + const existingTagsMap = new Map( + existingTags.map<[string, ProductTag]>((tag) => [tag.value, tag]) + ) + + const upsertedTags: ProductTag[] = [] + + for (const tag of tags) { + const aTag = existingTagsMap.get(tag.value) + if (aTag) { + upsertedTags.push(aTag) + } else { + const newTag = this.create(tag) + const savedTag = await this.save(newTag) + upsertedTags.push(savedTag) + } + } + + return upsertedTags + } +} diff --git a/packages/medusa/src/repositories/product-type.ts b/packages/medusa/src/repositories/product-type.ts index 1510eef480..eb1e9391ea 100644 --- a/packages/medusa/src/repositories/product-type.ts +++ b/packages/medusa/src/repositories/product-type.ts @@ -1,5 +1,29 @@ import { EntityRepository, Repository } from "typeorm" import { ProductType } from "../models/product-type" +type UpsertTypeInput = Partial & { + value: string +} @EntityRepository(ProductType) -export class ProductTypeRepository extends Repository {} +export class ProductTypeRepository extends Repository { + async upsertType(type?: UpsertTypeInput): Promise { + if (!type) { + return null + } + + const existing = await this.findOne({ + where: { value: type.value }, + }) + + if (existing) { + return existing + } + + const created = this.create({ + value: type.value, + }) + const result = await this.save(created) + + return result + } +} diff --git a/packages/medusa/src/repositories/product-variant.ts b/packages/medusa/src/repositories/product-variant.ts index 117881bd1c..c1109b4109 100644 --- a/packages/medusa/src/repositories/product-variant.ts +++ b/packages/medusa/src/repositories/product-variant.ts @@ -82,8 +82,7 @@ export class ProductVariantRepository extends Repository { ): Promise { const entitiesIdsWithRelations = await Promise.all( Object.entries(groupedRelations).map(([toplevel, rels]) => { - let querybuilder = this.createQueryBuilder("pv") - querybuilder = querybuilder.leftJoinAndSelect( + let querybuilder = this.createQueryBuilder("pv").leftJoinAndSelect( `pv.${toplevel}`, toplevel ) @@ -94,20 +93,20 @@ export class ProductVariantRepository extends Repository { continue } // Regex matches all '.' except the rightmost - querybuilder = querybuilder.leftJoinAndSelect( + querybuilder.leftJoinAndSelect( rel.replace(/\.(?=[^.]*\.)/g, "__"), rel.replace(".", "__") ) } if (withDeleted) { - querybuilder = querybuilder + querybuilder .where("pv.id IN (:...entitiesIds)", { entitiesIds: entityIds, }) .withDeleted() } else { - querybuilder = querybuilder.where( + querybuilder.where( "pv.deleted_at IS NULL AND pv.id IN (:...entitiesIds)", { entitiesIds: entityIds, diff --git a/packages/medusa/src/repositories/product.ts b/packages/medusa/src/repositories/product.ts index 348ac0aa5d..6935e01d35 100644 --- a/packages/medusa/src/repositories/product.ts +++ b/packages/medusa/src/repositories/product.ts @@ -1,31 +1,33 @@ import { flatten, groupBy, map, merge } from "lodash" import { + Brackets, EntityRepository, - FindManyOptions, FindOperator, In, - OrderByCondition, Repository, } from "typeorm" -import { ProductTag } from ".." -import { Product } from "../models/product" import { PriceList } from "../models/price-list" +import { Product } from "../models/product" +import { + ExtendedFindConfig, + Selector, + WithRequiredProperty, +} from "../types/common" -type DefaultWithoutRelations = Omit, "relations"> - -type CustomOptions = { - select?: DefaultWithoutRelations["select"] - where?: DefaultWithoutRelations["where"] & { - tags?: FindOperator - price_list_id?: FindOperator - } - order?: OrderByCondition - skip?: number - take?: number - withDeleted?: boolean +export type ProductSelector = Omit, "tags"> & { + tags: FindOperator } -type FindWithRelationsOptions = CustomOptions +export type DefaultWithoutRelations = Omit< + ExtendedFindConfig, + "relations" +> + +export type FindWithoutRelationsOptions = DefaultWithoutRelations & { + where: DefaultWithoutRelations["where"] & { + price_list_id?: FindOperator + } +} @EntityRepository(Product) export class ProductRepository extends Repository { @@ -39,7 +41,7 @@ export class ProductRepository extends Repository { } private async queryProducts( - optionsWithoutRelations: FindWithRelationsOptions, + optionsWithoutRelations: FindWithoutRelationsOptions, shouldCount = false ): Promise<[Product[], number]> { const tags = optionsWithoutRelations?.where?.tags @@ -103,7 +105,9 @@ export class ProductRepository extends Repository { return [entities, count] } - private getGroupedRelations(relations: Array): { + private getGroupedRelations( + relations: string[] + ): { [toplevel: string]: string[] } { const groupedRelations: { [toplevel: string]: string[] } = {} @@ -185,8 +189,8 @@ export class ProductRepository extends Repository { } public async findWithRelationsAndCount( - relations: Array = [], - idsOrOptionsWithoutRelations: FindWithRelationsOptions = { where: {} } + relations: string[] = [], + idsOrOptionsWithoutRelations: FindWithoutRelationsOptions = { where: {} } ): Promise<[Product[], number]> { let count: number let entities: Product[] @@ -227,15 +231,18 @@ export class ProductRepository extends Repository { ) const entitiesAndRelations = entitiesIdsWithRelations.concat(entities) - const entitiesToReturn = - this.mergeEntitiesWithRelations(entitiesAndRelations) + const entitiesToReturn = this.mergeEntitiesWithRelations( + entitiesAndRelations + ) return [entitiesToReturn, count] } public async findWithRelations( - relations: Array = [], - idsOrOptionsWithoutRelations: FindWithRelationsOptions = {}, + relations: string[] = [], + idsOrOptionsWithoutRelations: FindWithoutRelationsOptions | string[] = { + where: {}, + }, withDeleted = false ): Promise { let entities: Product[] @@ -257,7 +264,10 @@ export class ProductRepository extends Repository { return [] } - if (relations.length === 0) { + if ( + relations.length === 0 && + !Array.isArray(idsOrOptionsWithoutRelations) + ) { return await this.findByIds(entitiesIds, idsOrOptionsWithoutRelations) } @@ -269,15 +279,16 @@ export class ProductRepository extends Repository { ) const entitiesAndRelations = entitiesIdsWithRelations.concat(entities) - const entitiesToReturn = - this.mergeEntitiesWithRelations(entitiesAndRelations) + const entitiesToReturn = this.mergeEntitiesWithRelations( + entitiesAndRelations + ) return entitiesToReturn } public async findOneWithRelations( - relations: Array = [], - optionsWithoutRelations: FindWithRelationsOptions = { where: {} } + relations: string[] = [], + optionsWithoutRelations: FindWithoutRelationsOptions = { where: {} } ): Promise { // Limit 1 optionsWithoutRelations.take = 1 @@ -314,4 +325,60 @@ export class ProductRepository extends Repository { return this.findByIds(productIds) } + + public async getFreeTextSearchResultsAndCount( + q: string, + options: FindWithoutRelationsOptions = { where: {} }, + relations: string[] = [] + ): Promise<[Product[], number]> { + const cleanedOptions = this._cleanOptions(options) + + let qb = this.createQueryBuilder("product") + .leftJoinAndSelect("product.variants", "variant") + .leftJoinAndSelect("product.collection", "collection") + .select(["product.id"]) + .where(cleanedOptions.where) + .andWhere( + new Brackets((qb) => { + qb.where(`product.description ILIKE :q`, { q: `%${q}%` }) + .orWhere(`product.title ILIKE :q`, { q: `%${q}%` }) + .orWhere(`variant.title ILIKE :q`, { q: `%${q}%` }) + .orWhere(`variant.sku ILIKE :q`, { q: `%${q}%` }) + .orWhere(`collection.title ILIKE :q`, { q: `%${q}%` }) + }) + ) + .skip(cleanedOptions.skip) + .take(cleanedOptions.take) + + if (cleanedOptions.withDeleted) { + qb = qb.withDeleted() + } + + const [results, count] = await qb.getManyAndCount() + + const products = await this.findWithRelations( + relations, + results.map((r) => r.id), + cleanedOptions.withDeleted + ) + + return [products, count] + } + + private _cleanOptions( + options: FindWithoutRelationsOptions + ): WithRequiredProperty { + const where = options.where ?? {} + if ("description" in where) { + delete where.description + } + if ("title" in where) { + delete where.title + } + + return { + ...options, + where, + } + } } diff --git a/packages/medusa/src/repositories/tax-rate.ts b/packages/medusa/src/repositories/tax-rate.ts index 72a897d759..4536a93661 100644 --- a/packages/medusa/src/repositories/tax-rate.ts +++ b/packages/medusa/src/repositories/tax-rate.ts @@ -254,21 +254,11 @@ export class TaxRateRepository extends Repository { return unionBy(...results, (txr) => txr.id) } - async listByShippingOption(optionId: string, config: TaxRateListByConfig) { + async listByShippingOption(optionId: string) { let rates = this.createQueryBuilder("txr") .leftJoin(ShippingTaxRate, "ptr", "ptr.rate_id = txr.id") - .leftJoin( - ShippingMethod, - "sm", - "sm.shipping_option_id = ptr.shipping_option_id" - ) - .where("sm.shipping_option_id = :optionId", { optionId }) + .where("ptr.shipping_option_id = :optionId", { optionId }) - if (typeof config.region_id !== "undefined") { - rates.andWhere("txr.region_id = :regionId", { - regionId: config.region_id, - }) - } return await rates.getMany() } } diff --git a/packages/medusa/src/scripts/mongo-sql-migration.js b/packages/medusa/src/scripts/mongo-sql-migration.js deleted file mode 100644 index 9f7d6f90d0..0000000000 --- a/packages/medusa/src/scripts/mongo-sql-migration.js +++ /dev/null @@ -1,1098 +0,0 @@ -#!/usr/bin/env node - -import path from "path" -import glob from "glob" -import mongo from "mongodb" -import chalk from "chalk" -import { QueryRunner, In, createConnection } from "typeorm" - -import { getConfigFile, createRequireFromPath } from "medusa-core-utils" - -import { MoneyAmount } from "../models/money-amount" -import { Country } from "../models/country" -import { Currency } from "../models/currency" -import { Discount } from "../models/discount" -import { Customer } from "../models/customer" -import { Order } from "../models/order" -import { LineItem } from "../models/line-item" -import { Fulfillment } from "../models/fulfillment" -import { FulfillmentItem } from "../models/fulfillment-item" -import { ReturnItem } from "../models/return-item" -import { FulfillmentProvider } from "../models/fulfillment-provider" -import { PaymentProvider } from "../models/payment-provider" -import { Payment } from "../models/payment" -import { Swap } from "../models/swap" -import { GiftCard } from "../models/gift-card" -import { Region } from "../models/region" -import { Refund } from "../models/refund" -import { Return } from "../models/return" -import { Address } from "../models/address" -import { ProductVariant } from "../models/product-variant" -import { ShippingMethod } from "../models/shipping-method" -import { ShippingOption } from "../models/shipping-option" -import { ShippingProfile } from "../models/shipping-profile" -import { DiscountRule } from "../models/discount-rule" -import { Store } from "../models/store" -import { ProductOption } from "../models/product-option" -import { ProductOptionValue } from "../models/product-option-value" -import { ShippingOptionRequirement } from "../models/shipping-option-requirement" - -import { RegionRepository } from "../repositories/region" -import { DiscountRepository } from "../repositories/discount" -import { GiftCardRepository } from "../repositories/gift-card" -import { ShippingProfileRepository } from "../repositories/shipping-profile" -import { ShippingOptionRepository } from "../repositories/shipping-option" -import { ProductRepository } from "../repositories/product" -import { ProductVariantRepository } from "../repositories/product-variant" - -/** - * Migrate store - * @param {MongoDb} mongodb - * @param {QueryRunner} queryRunner - */ -const migrateStore = async (mongodb, queryRunner) => { - const dcol = mongodb.collection("stores") - - const dcur = dcol.find({}) - const stores = await dcur.toArray() - - const storeRepo = queryRunner.manager.getRepository(Store) - const currencyRepo = queryRunner.manager.getRepository(Currency) - - for (const d of stores) { - const newly = storeRepo.create({ - name: d.name, - default_currency_code: d.default_currency.toLowerCase(), - currencies: await Promise.all( - d.currencies.map(c => currencyRepo.findOne({ code: c.toLowerCase() })) - ), - swap_link_template: d.swap_link_template, - }) - await storeRepo.save(newly) - } -} - -/** - * Migrates Regions - * @param {MongoDb} mongodb - * @param {QueryRunner} queryRunner - */ -const migrateRegions = async (mongodb, queryRunner) => { - const rcol = mongodb.collection("regions") - const regCursor = rcol.find({}) - const regions = await regCursor.toArray() - - const countryRepository = queryRunner.manager.getRepository(Country) - const payRepository = queryRunner.manager.getRepository(PaymentProvider) - const fulRepository = queryRunner.manager.getRepository(FulfillmentProvider) - - const regionRepository = queryRunner.manager.getCustomRepository( - RegionRepository - ) - - for (const reg of regions) { - const countries = await countryRepository.find({ - iso_2: In(reg.countries.map(c => c.toLowerCase())), - }) - - const newRegion = regionRepository.create({ - id: `${reg._id}`, - name: reg.name, - currency_code: reg.currency_code.toLowerCase(), - tax_rate: reg.tax_rate * 100, - tax_code: reg.tax_code, - countries, - }) - - newRegion.payment_providers = [] - for (const pp of reg.payment_providers) { - let exists = await payRepository.findOne({ id: pp }) - if (!exists) { - let newly = payRepository.create({ - id: pp, - is_installed: false, - }) - exists = await payRepository.save(newly) - } - - newRegion.payment_providers.push(exists) - } - - newRegion.fulfillment_providers = [] - for (const pp of reg.fulfillment_providers) { - let exists = await fulRepository.findOne({ id: pp }) - if (!exists) { - let newly = fulRepository.create({ - id: pp, - is_installed: false, - }) - exists = await fulRepository.save(newly) - } - - newRegion.fulfillment_providers.push(exists) - } - - await regionRepository.save(newRegion) - } -} - -/** - * Migrates Shipping Options - * @param {MongoDb} mongodb - * @param {QueryRunner} queryRunner - */ -const migrateShippingOptions = async (mongodb, queryRunner) => { - const col = mongodb.collection("shippingoptions") - const cursor = col.find({}) - const options = await cursor.toArray() - - // const rCol = mongodb.collection("regions") - // const rCursor = rCol.find({}) - // const regions = await rCursor.toArray() - - const pCol = mongodb.collection("shippingprofiles") - const pCursor = pCol.find({}) - const profiles = await pCursor.toArray() - - const reqRepo = queryRunner.manager.getRepository(ShippingOptionRequirement) - //const regionRepository = queryRunner.manager.getCustomRepository( - // RegionRepository - //) - const optionRepository = queryRunner.manager.getCustomRepository( - ShippingOptionRepository - ) - const profileRepo = queryRunner.manager.getCustomRepository( - ShippingProfileRepository - ) - - for (const option of options) { - // const mongoReg = regions.find(r => r._id.equals(option.region_id)) - // const region = await regionRepository.findOne({ name: mongoReg.name }) - - const mongoProfile = profiles.find(p => p._id.equals(option.profile_id)) - let profile - if (mongoProfile.name === "default_shipping_profile") { - profile = await profileRepo.findOne({ type: "default" }) - } else if ((mongoProfile.name = "default_gift_card_profile")) { - profile = await profileRepo.findOne({ type: "gift_card" }) - } - - const newOption = optionRepository.create({ - id: `${option._id}`, - name: option.name, - region_id: option.region_id, - profile, - provider_id: option.provider_id, - price_type: option.price.type, - amount: Math.round(option.price.amount * 100), - is_return: !!option.is_return, - data: option.data, - requirements: option.requirements.map(r => - reqRepo.create({ - type: r.type, - amount: Math.round(r.value * 100), - }) - ), - }) - await optionRepository.save(newOption) - } -} - -/** - * Migrates products and product variants - * @param {MongoDb} mongodb - * @param {QueryRunner} queryRunner - */ -const migrateProducts = async (mongodb, queryRunner) => { - const col = mongodb.collection("products") - const cursor = col.find({}) - const products = await cursor.toArray() - - const variantCol = mongodb.collection("productvariants") - - const maRepo = queryRunner.manager.getRepository(MoneyAmount) - const optValRepo = queryRunner.manager.getRepository(ProductOptionValue) - const optRepo = queryRunner.manager.getRepository(ProductOption) - const varRepo = queryRunner.manager.getCustomRepository( - ProductVariantRepository - ) - const prodRepo = queryRunner.manager.getCustomRepository(ProductRepository) - const profileRepo = queryRunner.manager.getCustomRepository( - ShippingProfileRepository - ) - - const defProf = await profileRepo.findOne({ type: "default" }) - const gcProf = await profileRepo.findOne({ type: "gift_card" }) - - for (const p of products) { - const newOptions = await Promise.all( - p.options.map(o => { - const newO = optRepo.create({ - id: `${o._id}`, - title: o.title, - }) - return optRepo.save(newO) - }) - ) - - const varCur = variantCol.find({ - _id: { $in: p.variants.map(id => new mongo.ObjectID(id)) }, - }) - const mongoVariants = await varCur.toArray() - - const newVariants = await Promise.all( - mongoVariants.map(v => { - const newV = varRepo.create({ - id: `${v._id}`, - title: v.title, - barcode: v.ean, - ean: v.ean, - sku: v.sku, - manage_inventory: v.manage_inventory, - allow_backorder: v.allow_backorder, - inventory_quantity: v.inventory_quantity, - options: v.options.map((o, idx) => { - const opt = newOptions[idx] - return optValRepo.create({ - value: o.value, - option: opt, - }) - }), - prices: v.prices.map(p => - maRepo.create({ - currency_code: p.currency_code.toLowerCase(), - amount: Math.round(p.amount * 100), - }) - ), - hs_code: v.metadata && v.metadata.hs_code, - origin_country: v.metadata && v.metadata.origin_country, - metadata: v.metadata && { - alternative_size: v.metadata.alternative_size, - color: v.metadata.color, - }, - }) - return newV - }) - ) - - const newProd = prodRepo.create({ - id: `${p._id}`, - title: p.title, - tags: p.tags || null, - description: p.description, - handle: p.handle, - is_giftcard: p.is_giftcard, - thumbnail: p.thumbnail, - profile: p.is_giftcard ? gcProf : defProf, - options: newOptions, - variants: newVariants, - }) - await prodRepo.save(newProd) - } -} - -const createDiscount = async (mongodb, queryRunner, d) => { - const rcol = mongodb.collection("regions") - - const ruleRepo = queryRunner.manager.getRepository(DiscountRule) - const gcRepo = queryRunner.manager.getCustomRepository(GiftCardRepository) - const discountRepo = queryRunner.manager.getCustomRepository( - DiscountRepository - ) - const regRepo = queryRunner.manager.getCustomRepository(RegionRepository) - - if (d.is_giftcard) { - const rcur = rcol.find({ - _id: mongo.ObjectID(d.regions[0]), - }) - const mongoRegs = await rcur.toArray() - const region = await regRepo.findOne({ name: mongoRegs[0].name }) - - const newD = gcRepo.create({ - id: `${d._id}`, - code: d.code, - is_disabled: d.disabled, - value: !!d.original_amount ? Math.round(d.original_amount * 100) : 0, - balance: Math.round(d.discount_rule.value * 100), - region, - }) - - return gcRepo.save(newD) - } else { - const rcur = rcol.find({ - _id: { $in: d.regions.map(id => mongo.ObjectID(id)) }, - }) - const mongoRegs = await rcur.toArray() - const regions = await regRepo.find({ - id: In(mongoRegs.map(r => `${r._id}`)), - }) - const newD = discountRepo.create({ - id: `${d._id}`, - code: d.code, - is_dynamic: !!d.is_dynamic, - rule: ruleRepo.create({ - description: d.discount_rule.description, - type: d.discount_rule.type, - allocation: d.discount_rule.allocation, - value: - d.discount_rule.type === "percentage" - ? d.discount_rule.value - : Math.round(d.discount_rule.value * 100), - usage_limit: d.discount_rule.usage_limit, - }), - is_disabled: d.disabled, - regions, - }) - - return discountRepo.save(newD) - } -} - -/** - * Migrates discounts - * @param {MongoDb} mongodb - * @param {QueryRunner} queryRunner - */ -const migrateDiscounts = async (mongodb, queryRunner) => { - const dcol = mongodb.collection("discounts") - - const dcur = dcol.find({}) - const discounts = await dcur.toArray() - for (const d of discounts) { - await createDiscount(mongodb, queryRunner, d) - } - - await migrateDynamicDiscounts(mongodb, queryRunner) -} - -/** - * Migrates dynamic discounts - * @param {MongoDb} mongodb - * @param {QueryRunner} queryRunner - */ -const migrateDynamicDiscounts = async (mongodb, queryRunner) => { - const dcol = mongodb.collection("discounts") - const dyncol = mongodb.collection("dynamicdiscountcodes") - - const dcur = dyncol.find({}) - const dynamicCodes = await dcur.toArray() - - const discountRepo = queryRunner.manager.getCustomRepository( - DiscountRepository - ) - - const mongoV = {} - const visited = {} - - const toSave = [] - for (const d of dynamicCodes) { - let disc - if (mongoV[d.discount_id]) { - disc = mongoV[d.discount_id] - } else { - disc = await dcol.findOne({ _id: mongo.ObjectID(d.discount_id) }) - mongoV[d.discount_id] = disc - } - - let discount - if (visited[disc.code]) { - discount = visited[disc.code] - } else { - const pare = await discountRepo.findOne({ code: disc.code }) - discount = pare - visited[disc.code] = pare - } - - const newD = discountRepo.create({ - code: d.code, - is_dynamic: true, - is_disabled: d.disabled, - parent_discount: discount, - rule_id: discount.rule_id, - }) - - toSave.push(newD) - } - - return discountRepo.save(toSave) -} - -/** - * Migrates customers - * @param {MongoDb} mongodb - * @param {QueryRunner} queryRunner - */ -const migrateCustomers = async (mongodb, queryRunner) => { - const col = mongodb.collection("customers") - - const cur = col.find({}) - const customers = await cur.toArray() - - const customerRepo = queryRunner.manager.getRepository(Customer) - - const toSave = [] - for (const c of customers) { - if (toSave.find(s => s.email === c.email.toLowerCase())) { - continue - } - - const newC = customerRepo.create({ - id: `${c._id}`, - email: c.email.toLowerCase(), - first_name: c.first_name, - last_name: c.last_name, - phone: c.phone, - has_account: c.has_account, - password_hash: c.password_hash, - metadata: c.metadata, - }) - toSave.push(newC) - } - return customerRepo.save(toSave, { chunk: 1000 }) -} - -/** - * Migrates orders - * @param {MongoDb} mongodb - * @param {QueryRunner} queryRunner - */ -const migrateOrders = async (mongodb, queryRunner) => { - const swapCol = mongodb.collection("swaps") - - const col = mongodb.collection("orders") - - const cur = col.find({}) - const orders = await cur.toArray() - - const customerRepo = queryRunner.manager.getRepository(Customer) - const orderRepo = queryRunner.manager.getRepository(Order) - const lineItemRepo = queryRunner.manager.getRepository(LineItem) - const fulItemRepo = queryRunner.manager.getRepository(FulfillmentItem) - const retItemRepo = queryRunner.manager.getRepository(ReturnItem) - const fulfillmentRepo = queryRunner.manager.getRepository(Fulfillment) - const paymentRepo = queryRunner.manager.getRepository(Payment) - const refundRepo = queryRunner.manager.getRepository(Refund) - const returnRepo = queryRunner.manager.getRepository(Return) - const gcRepo = queryRunner.manager.getRepository(GiftCard) - const swapRepo = queryRunner.manager.getRepository(Swap) - const discountRepo = queryRunner.manager.getRepository(Discount) - const methodRepo = queryRunner.manager.getRepository(ShippingMethod) - const optionRepo = queryRunner.manager.getRepository(ShippingOption) - const addressRepo = queryRunner.manager.getRepository(Address) - const profileRepo = queryRunner.manager.getRepository(ShippingProfile) - const fulProvRepo = queryRunner.manager.getRepository(FulfillmentProvider) - - const paymentsToSave = [] - const refundsToSave = [] - const returnsToSave = [] - const swapsToSave = [] - const shippingMethodsToSave = [] - const lineItemsToSave = [] - const ordersToSave = [] - const giftCardsToSave = [] - const discountsToSave = [] - const fulfillToSave = [] - - for (const o of orders) { - // const mongoreg = regions.find(r => r._id.equals(o.region_id)) - // const region = await regionRepo.findOne({ name: mongoreg.name }) - - /************************************************************************* - * SHIPPING METHODS - *************************************************************************/ - const createShippingMethod = async m => { - let shippingOption = await optionRepo.findOne({ - id: `${m._id}`, - }) - if (!shippingOption) { - const profile = await profileRepo.findOne({ type: "default" }) - let provider = await fulProvRepo.findOne({ id: m.provider_id }) - if (!provider) { - const newly = fulProvRepo.create({ - id: m.provider_id, - is_installed: false, - }) - provider = await fulProvRepo.save(newly) - } - const newly = optionRepo.create({ - name: m.name, - region_id: o.region_id, - profile_id: profile.id, - price_type: "flat_rate", - amount: Math.round(m.price * 100), - data: m.data, - is_return: false, - deleted_at: new Date(), - provider, - }) - shippingOption = await optionRepo.save(newly) - } - - return methodRepo.create({ - order_id: `${o._id}`, - shipping_option_id: shippingOption.id, - price: Math.round(m.price * 100), - data: m.data, - }) - } - - for (const m of o.shipping_methods) { - const method = await createShippingMethod(m) - shippingMethodsToSave.push(method) - } - - /************************************************************************* - * CUSTOMER - *************************************************************************/ - let customer = await customerRepo.findOne({ email: o.email.toLowerCase() }) - if (!customer) { - const n = customerRepo.create({ - email: o.email.toLowerCase(), - }) - customer = await customerRepo.save(n) - } - - /************************************************************************* - * LINE ITEMS - *************************************************************************/ - const createLineItem = (li, custom = {}) => { - let fulfilled_quantity = Math.min(li.fulfilled_quantity || 0, li.quantity) - let shipped_quantity = Math.min( - li.fulfilled_quantity || 0, - li.shipped_quantity || 0 - ) - let returned_quantity = Math.min( - li.shipped_quantity || 0, - li.returned_quantity || 0 - ) - - return lineItemRepo.create({ - ...custom, - id: `${li._id}`, - title: li.title, - description: li.description, - quantity: li.quantity, - is_giftcard: !!li.is_giftcard, - should_merge: !!li.should_merge, - allow_discounts: !li.no_discount, - thumbnail: li.thumbnail, - unit_price: Math.round(li.content.unit_price * 100), - variant_id: li.content.variant._id ? `${li.content.variant._id}` : null, - fulfilled_quantity, - shipped_quantity, - returned_quantity, - metadata: li.metadata, - }) - } - - for (const li of o.items) { - const lineitem = createLineItem(li, { - order_id: `${o._id}`, - }) - lineItemsToSave.push(lineitem) - } - - /************************************************************************* - * DISCOUNT - *************************************************************************/ - const giftCards = [] - const discounts = [] - for (const d of o.discounts) { - if (d.is_giftcard) { - let gc = await gcRepo.findOne({ code: d.code }) - if (!gc) { - gc = await createDiscount(mongodb, queryRunner, d) - } - giftCards.push(gc) - } else { - let disc = await discountRepo.findOne({ code: d.code }) - if (!disc) { - disc = await createDiscount(mongodb, queryRunner, d) - } - discounts.push(disc) - } - } - - /************************************************************************* - * ADDREESS - *************************************************************************/ - const address = addressRepo.create({ - customer, - first_name: o.shipping_address.first_name, - last_name: o.shipping_address.last_name, - address_1: o.shipping_address.address_1, - address_2: o.shipping_address.address_2, - city: o.shipping_address.city, - country_code: o.shipping_address.country_code.toLowerCase(), - province: o.shipping_address.province, - postal_code: o.shipping_address.postal_code, - phone: o.shipping_address.phone, - }) - - /************************************************************************* - * CREATE ORDER - *************************************************************************/ - const nOrder = orderRepo.create({ - id: `${o._id}`, - display_id: o.display_id, - tax_rate: o.tax_rate * 100, - currency_code: o.currency_code.toLowerCase(), - email: o.email.toLowerCase(), - status: o.status, - fulfillment_status: o.fulfillment_status, - payment_status: o.payment_status, - shipping_address: address, - billing_address: address, - // shipping_methods: shippingMethods, - // items: lineItems, - gift_cards: giftCards, - region_id: `${o.region_id}`, - customer, - discounts, - created_at: new Date(parseInt(o.created)), - canceled_at: o.status === "canceled" ? new Date() : null, - }) - - ordersToSave.push(nOrder) - //let or = await orderRepo.save(nOrder) - //or.display_id = o.display_id - - /************************************************************************* - * FULFILLMENTS - *************************************************************************/ - const createFulfillment = (f, custom = {}) => { - if (!f || !f._id) { - console.log("found empty") - } - - const items = f.items.map(fi => { - return fulItemRepo.create({ - item_id: `${fi._id}`, - quantity: fi.quantity, - }) - }) - - const toCreate = { - id: `${f._id}`, - ...custom, - items, - provider_id: f.provider_id, - tracking_numbers: f.tracking_numbers, - data: {}, - metadata: f.metadata, - canceled_at: f.is_canceled ? new Date() : null, - shipped_at: f.shipped_at ? new Date(parseInt(f.shipped_at)) : null, - } - - if (!!f.created) { - toCreate.created_at = new Date(parseInt(f.created)) - } - - return fulfillmentRepo.create(toCreate) - } - - for (const f of o.fulfillments) { - if (!f || !f._id) { - continue - } - const ful = createFulfillment(f, { order_id: `${o._id}` }) - fulfillToSave.push(ful) - } - - /************************************************************************* - * REFUNDS - *************************************************************************/ - const refunds = [] - const totalRefund = 0 - for (const r of o.refunds) { - const reason = r.reason || "return" - totalRefund += r.amount - refundsToSave.push( - refundRepo.create({ - order_id: `${o._id}`, - currency_code: o.currency_code.toLowerCase(), - amount: Math.round(r.amount * 100), - reason, - note: r.note, - created_at: new Date(parseInt(r.created)), - }) - ) - } - // or.refunds = refunds - - const createReturn = async (r, custom = {}) => { - const m = r.shipping_method - let method - if (m && m.name) { - let shippingOption = await optionRepo.findOne({ - name: m.name, - region_id: o.region_id, - }) - if (!shippingOption) { - const profile = await profileRepo.findOne({ type: "default" }) - let provider = await fulProvRepo.findOne({ id: m.provider_id }) - if (!provider) { - const newly = fulProvRepo.create({ - id: m.provider_id, - is_installed: false, - }) - provider = await fulProvRepo.save(newly) - } - const newly = optionRepo.create({ - name: m.name, - region_id: o.region_id, - profile_id: profile.id, - price_type: "flat_rate", - amount: Math.round(m.price * 100), - data: m.data, - is_return: true, - deleted_at: new Date(), - provider, - }) - shippingOption = await optionRepo.save(newly) - } - - method = methodRepo.create({ - shipping_option_id: shippingOption.id, - price: Math.round(m.price * 100), - data: m.data, - }) - } - - const items = r.items.map(raw => { - //const ri = o.items.find(i => i._id.equals(raw.item_id)) - //const original = or.items.find( - // li => li.title === ri.title && li.description === ri.description - //) - - return retItemRepo.create({ - item_id: raw.item_id, - quantity: raw.quantity, - requested_quantity: raw.is_requested ? raw.quantity : null, - received_quantity: raw.is_registered ? raw.quantity : null, - }) - }) - - return returnRepo.create({ - id: `${r._id}`, - status: r.status || "received", - ...custom, - refund_amount: Math.round(r.refund_amount * 100), - shipping_method: method, - shipping_data: r.shipping_data, - items, - received_at: r.status === "received" ? new Date() : null, - created_at: new Date(parseInt(r.created)), - metadata: r.metadata, - }) - } - - /************************************************************************* - * RETURNS - *************************************************************************/ - for (const r of o.returns) { - if (r.items.length === 0) { - continue - } - - const ret = await createReturn(r, { order_id: `${o._id}` }) - returnsToSave.push(ret) - } - - // or.returns = returns - - /************************************************************************* - * SWAPS - *************************************************************************/ - if (o.swaps) { - const swapCur = swapCol.find({ - _id: { $in: o.swaps.map(i => mongo.ObjectID(i)) }, - }) - const oSwaps = await swapCur.toArray() - if (oSwaps.length) { - // let swaps = [] - for (const s of oSwaps) { - if (!s.return) continue - - for (const li of s.additional_items) { - lineItemsToSave.push(createLineItem(li, { swap_id: `${s._id}` })) - } - - const toCreate = { - id: `${s._id}`, - order_id: `${o._id}`, - fulfillment_status: - s.fulfillment_status === "shipped" ? "shipped" : "not_fulfilled", - payment_status: s.payment_status, - shipping_methods: await Promise.all( - s.shipping_methods.map(createShippingMethod) - ), - created_at: new Date(parseInt(s.created)), - } - - if (s.shipping_address) { - const address = addressRepo.create({ - customer, - first_name: s.shipping_address.first_name, - last_name: s.shipping_address.last_name, - address_1: s.shipping_address.address_1, - address_2: s.shipping_address.address_2, - city: s.shipping_address.city, - country_code: s.shipping_address.country_code.toLowerCase(), - province: s.shipping_address.province, - postal_code: s.shipping_address.postal_code, - phone: s.shipping_address.phone, - }) - toCreate.shipping_address = address - } - - if (s.return) { - returnsToSave.push( - await createReturn(s.return, { - swap_id: `${s._id}`, - }) - ) - } - - if (s.payment_method) { - toCreate.payment = paymentRepo.create({ - amount: - (s.payment_method.data && s.payment_method.data.amount) || 0, - currency_code: s.currency_code.toLowerCase(), - amount_refunded: 0, - provider_id: o.payment_method.provider_id, - data: o.payment_method.data, - canceled_at: o.payment_status === "canceled" ? new Date() : null, - captured_at: - o.payment_status === "captured" || - o.payment_status === "refunded" || - o.payment_status === "partially" - ? new Date() - : null, - }) - } - - if ((s.fulfillments && s.fulfillments.length) > 0) { - for (const f of s.fulfillments) { - if (!f || !f._id) { - continue - } - fulfillToSave.push(createFulfillment(f, { swap_id: `${s._id}` })) - } - } - - const newly = swapRepo.create(toCreate) - swapsToSave.push(newly) - } - - // or.swaps = swaps - } - } - - /************************************************************************* - * PAYMENTS - *************************************************************************/ - const amount = - o.payment_method.provider_id === "stripe" - ? o.payment_method.data.amount - : o.payment_method.data.order_amount || - (o.payment_method.data.amount && - o.payment_method.data.amount.value) || - 0 - paymentsToSave.push( - paymentRepo.create({ - order_id: `${o._id}`, - amount, - currency_code: o.currency_code.toLowerCase(), - amount_refunded: Math.round(totalRefund * 100), - provider_id: o.payment_method.provider_id, - data: o.payment_method.data, - canceled_at: o.payment_status === "canceled" ? new Date() : null, - captured_at: - o.payment_status === "captured" || - o.payment_status === "refunded" || - o.payment_status === "partially_refunded" - ? new Date() - : null, - }) - ) - - // await orderRepo.save(or) - - if (o.display_id % 100 === 0) { - console.log(o.display_id) - } - } - - const newOs = await orderRepo.save(ordersToSave, { chunk: 1000 }) - await swapRepo.save(swapsToSave, { chunk: 1000 }) - await lineItemRepo.save(lineItemsToSave, { chunk: 1000 }) - await methodRepo.save(shippingMethodsToSave, { chunk: 1000 }) - await refundRepo.save(refundsToSave, { chunk: 1000 }) - await returnRepo.save(returnsToSave, { chunk: 1000 }) - await gcRepo.save(giftCardsToSave, { chunk: 1000 }) - console.log("done with gcs") - await discountRepo.save(discountsToSave, { chunk: 1000 }) - console.log("done with discounts") - await fulfillmentRepo.save(fulfillToSave, { chunk: 1000 }) - - for (const o of orders) { - await queryRunner.query(`UPDATE "order" SET display_id=$1 WHERE id=$2`, [ - o.display_id, - `${o._id}`, - ]) - } - - const last = orders[orders.length - 1] - await queryRunner.query( - `ALTER SEQUENCE order_display_id_seq RESTART WITH ${parseInt( - last.display_id - ) + 1}` - ) -} - -const migrate = async () => { - const root = path.resolve(".") - const { configModule } = getConfigFile(root, "medusa-config") - const { - mongo_url, - database_type, - database_url, - database_extra, - } = configModule.projectConfig - - if (!mongo_url) { - throw new Error( - "Cannot run migration script without a mongo_url in medusa-config" - ) - } - - if (!database_type || !database_url) { - throw new Error( - "Cannot run migration script without a database_type and database_url in medusa-config" - ) - } - - const mPath = path.resolve(__dirname, "../models") - - console.log(chalk.blue("MONGO:"), "Connecting to ", mongo_url) - const client = await mongo.MongoClient.connect(mongo_url, { - useNewUrlParser: true, - useUnifiedTopology: true, - }) - - const db = client.db(client.dbName) - console.log(chalk.green("MONGO:"), "Connecting created") - - console.log(chalk.blue("SQL:"), "Connecting to ", database_url) - const sqlConnection = await createConnection({ - type: database_type, - url: database_url, - extra: database_extra || {}, - entities: [`${mPath}/*.js`], - // logging: true, - }) - const queryRunner = sqlConnection.createQueryRunner() - await queryRunner.connect() - await queryRunner.startTransaction() - console.log(chalk.green("SQL:"), "Connecting created") - - let error - try { - await queryRunner.query( - "UPDATE country SET region_id=NULL WHERE iso_2 IS NOT NULL" - ) - await queryRunner.query(`DELETE FROM store WHERE id IS NOT NULL`) - await queryRunner.query( - `DELETE FROM return_item WHERE return_id IS NOT NULL` - ) - await queryRunner.query( - `DELETE FROM fulfillment_item WHERE fulfillment_id IS NOT NULL` - ) - await queryRunner.query(`DELETE FROM line_item WHERE id IS NOT NULL`) - await queryRunner.query("DELETE FROM gift_card WHERE code IS NOT NULL") - await queryRunner.query("DELETE FROM discount WHERE code IS NOT NULL") - await queryRunner.query("DELETE FROM discount_rule WHERE type IS NOT NULL") - await queryRunner.query( - "DELETE FROM money_amount WHERE currency_code IS NOT NULL" - ) - await queryRunner.query( - `DELETE FROM product_option_value WHERE value IS NOT NULL` - ) - await queryRunner.query( - `DELETE FROM product_option WHERE title IS NOT NULL` - ) - await queryRunner.query( - `DELETE FROM product_variant WHERE title IS NOT NULL` - ) - await queryRunner.query(`DELETE FROM product WHERE title is NOT NULL`) - await queryRunner.query( - `DELETE FROM shipping_option_requirement WHERE id IS NOT NULL` - ) - await queryRunner.query(`DELETE FROM shipping_method WHERE id IS NOT NULL`) - await queryRunner.query( - `DELETE FROM shipping_option WHERE name IS NOT NULL` - ) - await queryRunner.query( - `DELETE FROM order_discounts WHERE order_id IS NOT NULL` - ) - await queryRunner.query(`DELETE FROM payment WHERE id IS NOT NULL`) - await queryRunner.query(`DELETE FROM fulfillment WHERE id IS NOT NULL`) - await queryRunner.query(`DELETE FROM return WHERE id IS NOT NULL`) - await queryRunner.query(`DELETE FROM swap WHERE id IS NOT NULL`) - await queryRunner.query(`DELETE FROM refund WHERE id IS NOT NULL`) - await queryRunner.query(`DELETE FROM "order" WHERE id IS NOT NULL`) - await queryRunner.query(`DELETE FROM address WHERE id IS NOT NULL`) - await queryRunner.query(`DELETE FROM region WHERE name IS NOT NULL`) - await queryRunner.query(`DELETE FROM customer WHERE email IS NOT NULL`) - - await migrateStore(db, queryRunner).then(() => { - console.log(chalk.green("SUCCESS: "), "Store migrated") - }) - - await migrateRegions(db, queryRunner).then(() => { - console.log(chalk.green("SUCCESS: "), "Regions migrated") - }) - - await migrateShippingOptions(db, queryRunner).then(() => { - console.log(chalk.green("SUCCESS: "), "Shipping Options Migrated") - }) - - await migrateProducts(db, queryRunner).then(() => { - console.log(chalk.green("SUCCESS: "), "Products migrated") - }) - - await migrateDiscounts(db, queryRunner).then(() => { - console.log(chalk.green("SUCCESS: "), "Discounts migrated") - }) - - await migrateCustomers(db, queryRunner).then(() => { - console.log(chalk.green("SUCCESS: "), "Customers migrated") - }) - - await migrateOrders(db, queryRunner).then(() => { - console.log(chalk.green("SUCCESS: "), "Orders migrated") - }) - - await queryRunner.commitTransaction() - } catch (err) { - await queryRunner.rollbackTransaction() - error = err - } finally { - await queryRunner.release() - } - - if (error) { - throw error - } -} - -migrate() - .then(() => { - console.log("Migration complete") - process.exit() - }) - .catch(err => { - console.log(err) - process.exit(1) - }) diff --git a/packages/medusa/src/services/__mocks__/customer.js b/packages/medusa/src/services/__mocks__/customer.js index d324cb8968..067c0d97dc 100644 --- a/packages/medusa/src/services/__mocks__/customer.js +++ b/packages/medusa/src/services/__mocks__/customer.js @@ -2,6 +2,9 @@ import { IdMap } from "medusa-test-utils" import Scrypt from "scrypt-kdf" export const CustomerServiceMock = { + withTransaction: function () { + return this + }, create: jest.fn().mockImplementation((data) => { return Promise.resolve({ ...data, id: IdMap.getId("lebron") }) }), diff --git a/packages/medusa/src/services/__mocks__/pricing.js b/packages/medusa/src/services/__mocks__/pricing.js new file mode 100644 index 0000000000..b16e0e3812 --- /dev/null +++ b/packages/medusa/src/services/__mocks__/pricing.js @@ -0,0 +1,20 @@ +export const PricingServiceMock = { + withTransaction: function () { + return this + }, + setProductPrices: jest.fn().mockImplementation((prod) => { + return Promise.resolve(prod) + }), + setVariantPrices: jest.fn().mockImplementation((variant) => { + return Promise.resolve(variant) + }), + setShippingOptionPrices: jest.fn().mockImplementation((opts) => { + return Promise.resolve(opts) + }), +} + +const mock = jest.fn().mockImplementation(() => { + return PricingServiceMock +}) + +export default mock diff --git a/packages/medusa/src/services/__mocks__/user.js b/packages/medusa/src/services/__mocks__/user.js index 968e714551..192fb3d9fb 100644 --- a/packages/medusa/src/services/__mocks__/user.js +++ b/packages/medusa/src/services/__mocks__/user.js @@ -26,6 +26,9 @@ export const users = { } export const UserServiceMock = { + withTransaction: function () { + return this + }, create: jest.fn().mockImplementation(data => { if (data.email === "oliver@test.dk") { return Promise.resolve(users.testUser) diff --git a/packages/medusa/src/services/__tests__/auth.js b/packages/medusa/src/services/__tests__/auth.js index a6d1b6f212..5b39e33a37 100644 --- a/packages/medusa/src/services/__tests__/auth.js +++ b/packages/medusa/src/services/__tests__/auth.js @@ -1,11 +1,18 @@ import AuthService from "../auth" +import { MockManager } from "medusa-test-utils" import { users, UserServiceMock } from "../__mocks__/user" -import { customers, CustomerServiceMock } from "../__mocks__/customer" +import { CustomerServiceMock } from "../__mocks__/customer" + +const managerMock = MockManager describe("AuthService", () => { + const authService = new AuthService({ + manager: managerMock, + userService: UserServiceMock, + customerService: CustomerServiceMock + }) + describe("authenticate", () => { - let authService - authService = new AuthService({ userService: UserServiceMock }) beforeEach(() => { jest.clearAllMocks() }) @@ -33,8 +40,6 @@ describe("AuthService", () => { }) describe("authenticateCustomer", () => { - let authService - authService = new AuthService({ customerService: CustomerServiceMock }) beforeEach(() => { jest.clearAllMocks() }) @@ -62,8 +67,6 @@ describe("AuthService", () => { }) describe("authenticateAPIToken", () => { - let authService - authService = new AuthService({ userService: UserServiceMock }) beforeEach(() => { jest.clearAllMocks() }) diff --git a/packages/medusa/src/services/__tests__/batch-job.ts b/packages/medusa/src/services/__tests__/batch-job.ts new file mode 100644 index 0000000000..3c7b41a3f6 --- /dev/null +++ b/packages/medusa/src/services/__tests__/batch-job.ts @@ -0,0 +1,199 @@ +import { IdMap, MockManager, MockRepository } from "medusa-test-utils" +import BatchJobService from "../batch-job" +import { EventBusService } from "../index" +import { BatchJobStatus } from "../../types/batch-job" +import { BatchJob } from "../../models" + +const eventBusServiceMock = { + emit: jest.fn(), + withTransaction: function() { + return this + }, +} as unknown as EventBusService +const batchJobRepositoryMock = MockRepository({ + create: jest.fn().mockImplementation((data) => { + return Object.assign(new BatchJob(), data) + }) +}) + +describe('BatchJobService', () => { + const batchJobId_1 = IdMap.getId("batchJob_1") + const batchJobService = new BatchJobService({ + manager: MockManager, + eventBusService: eventBusServiceMock, + batchJobRepository: batchJobRepositoryMock + }) + + afterEach(() => { + jest.clearAllMocks() + }) + + describe('update status', () => { + describe("confirm", () => { + it('should be able to confirm_processing a batch job to emit the processing event', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + dry_run: true, + status: BatchJobStatus.PRE_PROCESSED + }) + + const updatedBatchJob = await batchJobService.confirm(batchJob) + expect(updatedBatchJob.processing_at).not.toBeTruthy() + expect(eventBusServiceMock.emit) + .toHaveBeenCalledWith(BatchJobService.Events.CONFIRMED, { id: batchJobId_1 }) + }) + + it('should not be able to confirm a batch job with the wrong status', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + dry_run: true, + status: BatchJobStatus.CREATED + }) + + const err = await batchJobService.confirm(batchJob) + .catch(e => e) + expect(err).toBeTruthy() + expect(err.message).toBe("Cannot confirm processing for a batch job that is not pre processed") + expect(eventBusServiceMock.emit).toHaveBeenCalledTimes(0) + }) + }) + + describe("complete", () => { + it('should be able to complete a batch job', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + dry_run: true, + status: BatchJobStatus.PROCESSING + }) + + const updatedBatchJob = await batchJobService.complete(batchJob) + expect(updatedBatchJob.completed_at).toBeTruthy() + expect(eventBusServiceMock.emit) + .toHaveBeenCalledWith(BatchJobService.Events.COMPLETED, { id: batchJobId_1 }) + + const batchJob2 = batchJobRepositoryMock.create({ + id: batchJobId_1, + dry_run: false, + status: BatchJobStatus.PROCESSING + }) + + const updatedBatchJob2 = await batchJobService.complete(batchJob2) + expect(updatedBatchJob2.completed_at).toBeTruthy() + expect(eventBusServiceMock.emit) + .toHaveBeenCalledWith(BatchJobService.Events.COMPLETED, { id: batchJobId_1 }) + }) + + it('should not be able to complete a batch job with the wrong status', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + dry_run: true, + status: BatchJobStatus.CREATED + }) + + const err = await batchJobService.complete(batchJob) + .catch(e => e) + expect(err).toBeTruthy() + expect(err.message).toBe( `Cannot complete a batch job with status "${batchJob.status}". The batch job must be processing`) + expect(eventBusServiceMock.emit).toHaveBeenCalledTimes(0) + + const batchJob2 = batchJobRepositoryMock.create({ + id: batchJobId_1, + dry_run: false, + status: BatchJobStatus.PRE_PROCESSED + }) + + const err2 = await batchJobService.complete(batchJob2) + .catch(e => e) + expect(err2).toBeTruthy() + expect(err2.message).toBe( `Cannot complete a batch job with status "${batchJob2.status}". The batch job must be processing`) + expect(eventBusServiceMock.emit).toHaveBeenCalledTimes(0) + }) + }) + + describe("pre processed", () => { + it('should be able to mark as pre processed a batch job in dry_run', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + dry_run: true, + status: BatchJobStatus.CREATED + }) + + const updatedBatchJob = await batchJobService.setPreProcessingDone(batchJob) + expect(updatedBatchJob.pre_processed_at).toBeTruthy() + expect(eventBusServiceMock.emit) + .toHaveBeenCalledWith(BatchJobService.Events.PRE_PROCESSED, { id: batchJobId_1 }) + }) + + it('should be able to mark as completed a batch job that has been pre processed but not in dry_run', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + dry_run: false, + status: BatchJobStatus.CREATED + }) + + const updatedBatchJob = await batchJobService.setPreProcessingDone(batchJob) + expect(updatedBatchJob.pre_processed_at).toBeTruthy() + expect(updatedBatchJob.confirmed_at).toBeTruthy() + expect(eventBusServiceMock.emit).toHaveBeenCalledTimes(2) + expect(eventBusServiceMock.emit) + .toHaveBeenCalledWith(BatchJobService.Events.PRE_PROCESSED, { id: batchJobId_1 }) + expect(eventBusServiceMock.emit) + .toHaveBeenLastCalledWith(BatchJobService.Events.CONFIRMED, { id: batchJobId_1 }) + }) + }) + + describe("cancel", () => { + it('should be able to cancel a batch job', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + status: BatchJobStatus.CREATED + }) + + const updatedBatchJob = await batchJobService.cancel(batchJob) + expect(updatedBatchJob.canceled_at).toBeTruthy() + expect(eventBusServiceMock.emit) + .toHaveBeenCalledWith(BatchJobService.Events.CANCELED, { id: batchJobId_1 }) + }) + + it('should not be able to cancel a batch job with the wrong status', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + status: BatchJobStatus.COMPLETED + }) + + const err = await batchJobService.cancel(batchJob) + .catch(e => e) + expect(err).toBeTruthy() + expect(err.message).toBe("Cannot cancel completed batch job") + expect(eventBusServiceMock.emit).toHaveBeenCalledTimes(0) + }) + }) + + describe("processing", () => { + it('should be able to mark as processing a batch job', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + status: BatchJobStatus.CONFIRMED + }) + + const updatedBatchJob = await batchJobService.setProcessing(batchJob) + expect(updatedBatchJob.processing_at).toBeTruthy() + expect(eventBusServiceMock.emit) + .toHaveBeenCalledWith(BatchJobService.Events.PROCESSING, { id: batchJobId_1 }) + }) + + it('should not be able to mark as processing a batch job with the wrong status', async () => { + const batchJob = batchJobRepositoryMock.create({ + id: batchJobId_1, + status: BatchJobStatus.COMPLETED + }) + + const err = await batchJobService.setProcessing(batchJob) + .catch(e => e) + expect(err).toBeTruthy() + expect(err.message).toBe("Cannot mark a batch job as processing if the status is different that confirmed") + expect(eventBusServiceMock.emit).toHaveBeenCalledTimes(0) + }) + }) + }) +}) \ No newline at end of file diff --git a/packages/medusa/src/services/__tests__/claim.js b/packages/medusa/src/services/__tests__/claim.js index d008037bbe..19fb11bc0e 100644 --- a/packages/medusa/src/services/__tests__/claim.js +++ b/packages/medusa/src/services/__tests__/claim.js @@ -58,6 +58,10 @@ describe("ClaimService", () => { create: (d) => ({ id: "claim_134", ...d }), }) + const lineItemRepository = MockRepository({ + create: (d) => ({ id: "claim_item_134", ...d }), + }) + const taxProviderService = { createTaxLines: jest.fn(), withTransaction: function () { @@ -103,6 +107,7 @@ describe("ClaimService", () => { const claimService = new ClaimService({ manager: MockManager, claimRepository: claimRepo, + lineItemRepository: lineItemRepository, taxProviderService, totalsService, returnService, diff --git a/packages/medusa/src/services/__tests__/csv-parser.js b/packages/medusa/src/services/__tests__/csv-parser.js new file mode 100644 index 0000000000..6c72ea26b4 --- /dev/null +++ b/packages/medusa/src/services/__tests__/csv-parser.js @@ -0,0 +1,403 @@ +import { createContainer } from "awilix" +import { Readable } from "stream" +import { AbstractCsvValidator } from "../../interfaces/csv-parser" +import CsvParser from "../csv-parser" +import { currencies } from "../../utils/currencies" + +describe("CsvParser", () => { + describe("parse", () => { + const csvParser = new CsvParser(createContainer(), { + columns: [], + }) + + let csvContent = + 'title,subtitle\n"T-shirt","summer tee"\n"Sunglasses","Red sunglasses"' + + let expectedProducts = [ + { + title: "T-shirt", + subtitle: "summer tee", + }, + { + title: "Sunglasses", + subtitle: "Red sunglasses", + }, + ] + + afterEach(() => { + jest.clearAllMocks() + }) + + it("given a readable stream, can parse the stream content", async () => { + const stream = Readable.from(csvContent) + const content = await csvParser.parse(stream) + + expect(content).toEqual(expectedProducts) + }) + }) + + describe("buildData", () => { + describe("schema validation", () => { + class TitleValidator extends AbstractCsvValidator { + async validate(builtLine) { + if (/\d/.test(builtLine["title"])) { + throw new Error("title should not contain a number") + } + return true + } + } + + const schema = { + columns: [ + { + name: "title", + validator: new TitleValidator(createContainer()), + }, + { + name: "size", + }, + { + name: "height", + }, + ], + } + + const csvParser = new CsvParser(createContainer(), schema) + + it("given a line containing a column which is not defined in the schema, then validation should fail", async () => { + try { + await csvParser.buildData([ + { + title: "sunglasses", + size: "M", + height: "100", + first_name: "lebron", + }, + ]) + } catch (err) { + expect(err.message).toEqual( + "Unable to treat column first_name from the csv file. No target column found in the provided schema" + ) + } + }) + + it("given a line containing a column which does not pass a validation constraint, then validation should fail", async () => { + try { + await csvParser.buildData([ + { title: "contains a number 1", size: "M", height: "100" }, + ]) + } catch (err) { + expect(err.message).toEqual("title should not contain a number") + } + }) + + it("given a line which passes all validation constraints, then should returned validated content", async () => { + const content = await csvParser.buildData([ + { title: "great product", size: "M", height: "100" }, + ]) + + expect(content).toEqual([ + { + title: "great product", + size: "M", + height: "100", + }, + ]) + }) + + it("given a line which does not provide a value for a required column, then should throw an error", async () => { + try { + await csvParser.buildData([{ size: "S", height: "100" }]) + } catch (err) { + expect(err.message).toEqual( + `Missing column(s) "title" from the given csv file` + ) + } + }) + + it("given a line which does not provide a value for multiple required columns, then should throw an error", async () => { + try { + await csvParser.buildData([{ size: "S" }]) + } catch (err) { + expect(err.message).toEqual( + `Missing column(s) "title", "height" from the given csv file` + ) + } + }) + + it("given a line which does not provide a value for a required column, then should throw an error", async () => { + try { + await csvParser.buildData([ + { title: "t-shirt", height: "100", size: "" }, + ]) + } catch (err) { + expect(err.message).toEqual( + `No value found for target column "size" in line 1 of the given csv file` + ) + } + }) + }) + + describe("mapTo", () => { + const csvParser = new CsvParser(createContainer(), { + columns: [ + { + name: "title", + mapTo: "product_title", + }, + ], + }) + + it("given a mapTo field for a column, when building data including that column, should rename the column name to what mapTo refers to", async () => { + const content = await csvParser.buildData([{ title: "a product" }]) + + expect(content).toEqual([ + { + product_title: "a product", + }, + ]) + }) + }) + + describe("transformer", () => { + const csvParser = new CsvParser(createContainer(), { + columns: [ + { + name: "title", + }, + { + name: "price usd", + transform: (value) => Math.round(Number(value) * 100), + }, + ], + }) + + it("given a transformer function for a column, when building data, should transform that column's value according to the transformation function", async () => { + const content = await csvParser.buildData([ + { title: "medusa t-shirt", "price usd": "19.99" }, + ]) + + expect(content).toEqual([ + { + title: "medusa t-shirt", + "price usd": 1999, + }, + ]) + }) + }) + + describe("match", () => { + describe("regex", () => { + const csvParser = new CsvParser(createContainer(), { + columns: [ + { + name: "title", + }, + { + name: "prices", + match: /.*Variant Price.*/i, + transform: (value) => Math.round(Number(value) * 100), + }, + ], + }) + + it("given a column with the match property as regex and a transformer, when building data, should resolve that column for all entries in the line that match the regex", async () => { + const content = await csvParser.buildData([ + { + title: "medusa t-shirt", + "variant price usd": "19.99", + "variant price cad": "26.79", + "variant price dkk": "1389", + }, + { + title: "medusa sunglasses", + "variant price usd": "9.99", + "variant price cad": "16.79", + "variant price dkk": "389", + }, + ]) + + expect(content).toEqual([ + { + title: "medusa t-shirt", + "variant price usd": 1999, + "variant price cad": 2679, + "variant price dkk": 138900, + }, + { + title: "medusa sunglasses", + "variant price usd": 999, + "variant price cad": 1679, + "variant price dkk": 38900, + }, + ]) + }) + }) + + describe("reducer", () => { + const schema = { + columns: [ + { + name: "title", + }, + { + name: "prices", + match: /.*Variant Price ([a-z]+).*/i, + reducer: (builtLine, key, value) => { + const [, currency_code] = key.match( + /.*Variant Price ([a-z]+).*/i + ) + const existingPrices = builtLine.prices ?? [] + const price = { + amount: Math.round(Number(value) * 100), + currency_code, + } + return { + ...builtLine, + prices: [...existingPrices, price], + } + }, + validator: { + validate: (builtLine) => { + const unexistingCurrency = builtLine.prices?.find( + (price) => !currencies[price.currency_code.toUpperCase()] + ) + if (unexistingCurrency) { + throw new Error( + `wrong currency: ${unexistingCurrency.currency_code}` + ) + } + return true + }, + }, + }, + ], + } + const csvParser = new CsvParser(createContainer(), schema) + + it("given a column with match and reducer properties, when building data, should return the result of the reducer function", async () => { + const content = await csvParser.buildData([ + { + title: "medusa t-shirt", + "variant price usd": "19.99", + "variant price cad": "26.79", + "variant price dkk": "1389", + }, + { + title: "medusa sunglasses", + "variant price usd": "9.99", + "variant price cad": "16.79", + "variant price dkk": "389", + }, + ]) + + expect(content).toEqual([ + { + title: "medusa t-shirt", + prices: [ + { + currency_code: "usd", + amount: 1999, + }, + { + currency_code: "cad", + amount: 2679, + }, + { + currency_code: "dkk", + amount: 138900, + }, + ], + }, + { + title: "medusa sunglasses", + prices: [ + { + currency_code: "usd", + amount: 999, + }, + { + currency_code: "cad", + amount: 1679, + }, + { + currency_code: "dkk", + amount: 38900, + }, + ], + }, + ]) + }) + + it("given a column with match and reducer properties, when building data, should run validation on the built data", async () => { + try { + await csvParser.buildData([ + { + title: "medusa t-shirt", + "variant price usd": "19.99", + "variant price cad": "26.79", + "variant price grp": "1389", + }, + { + title: "medusa sunglasses", + "variant price usd": "9.99", + "variant price cad": "16.79", + "variant price grp": "389", + }, + ]) + } catch (err) { + expect(err.message).toEqual("wrong currency: grp") + } + }) + + describe("invalid column properties", () => { + const schema = { + columns: [ + { + name: "title", + }, + { + name: "variants", + match: /.*Variant Price ([a-z]+).*/i, + mapTo: "prices", + }, + ], + } + const csvParser = new CsvParser(createContainer(), schema) + + it("given a column with match and mapTo property, when building data, then the mapTo property should be ignored", async () => { + const content = await csvParser.buildData([ + { + title: "medusa t-shirt", + "variant price usd": "19.99", + "variant price cad": "26.79", + "variant price dkk": "1389", + }, + { + title: "medusa sunglasses", + "variant price usd": "9.99", + "variant price cad": "16.79", + "variant price dkk": "389", + }, + ]) + + expect(content).toEqual([ + { + title: "medusa t-shirt", + "variant price usd": "19.99", + "variant price cad": "26.79", + "variant price dkk": "1389", + }, + { + title: "medusa sunglasses", + "variant price usd": "9.99", + "variant price cad": "16.79", + "variant price dkk": "389", + }, + ]) + }) + }) + }) + }) + }) +}) diff --git a/packages/medusa/src/services/__tests__/custom-shipping-option.js b/packages/medusa/src/services/__tests__/custom-shipping-option.js index a4a8d31cfb..6536495501 100644 --- a/packages/medusa/src/services/__tests__/custom-shipping-option.js +++ b/packages/medusa/src/services/__tests__/custom-shipping-option.js @@ -4,7 +4,7 @@ import { MockManager, MockRepository, IdMap } from "medusa-test-utils" describe("CustomShippingOptionService", () => { describe("list", () => { const customShippingOptionRepository = MockRepository({ - find: q => { + find: (q) => { return Promise.resolve([ { id: "cso-test", @@ -44,7 +44,7 @@ describe("CustomShippingOptionService", () => { describe("retrieve", () => { const customShippingOptionRepository = MockRepository({ - findOne: q => { + findOne: (q) => { if (q.where.id === "cso-test") { return Promise.resolve({ id: "cso-test", @@ -88,8 +88,8 @@ describe("CustomShippingOptionService", () => { const customShippingOptionRepository = MockRepository({ create: jest .fn() - .mockImplementation(f => Promise.resolve({ id: "test-cso", ...f })), - save: jest.fn().mockImplementation(f => Promise.resolve(f)), + .mockImplementation((f) => Promise.resolve({ id: "test-cso", ...f })), + save: jest.fn().mockImplementation((f) => Promise.resolve(f)), }) const customShippingOptionService = new CustomShippingOptionService({ @@ -114,7 +114,7 @@ describe("CustomShippingOptionService", () => { cart_id: "test-cso-cart", shipping_option_id: "test-so", price: 30, - metadata: {}, + metadata: undefined, }) expect(customShippingOptionRepository.save).toHaveBeenCalledTimes(1) @@ -123,7 +123,7 @@ describe("CustomShippingOptionService", () => { cart_id: "test-cso-cart", shipping_option_id: "test-so", price: 30, - metadata: {}, + metadata: undefined, }) }) }) diff --git a/packages/medusa/src/services/__tests__/discount.js b/packages/medusa/src/services/__tests__/discount.js index 733439d430..e598c5d5ad 100644 --- a/packages/medusa/src/services/__tests__/discount.js +++ b/packages/medusa/src/services/__tests__/discount.js @@ -761,7 +761,9 @@ describe("DiscountService", () => { let discountService beforeEach(async () => { - discountService = new DiscountService({}) + discountService = new DiscountService({ + manager: MockManager + }) const hasReachedLimitMock = jest.fn().mockImplementation(() => false) const isDisabledMock = jest.fn().mockImplementation(() => false) const isValidForRegionMock = jest @@ -1064,7 +1066,9 @@ describe("DiscountService", () => { } }) - const discountService = new DiscountService({}) + const discountService = new DiscountService({ + manager: MockManager + }) discountService.retrieve = retrieveMock beforeEach(() => { diff --git a/packages/medusa/src/services/__tests__/draft-order.js b/packages/medusa/src/services/__tests__/draft-order.js index 0b0bb5471d..1847f5379b 100644 --- a/packages/medusa/src/services/__tests__/draft-order.js +++ b/packages/medusa/src/services/__tests__/draft-order.js @@ -1,4 +1,3 @@ -import _ from "lodash" import { MockRepository, MockManager } from "medusa-test-utils" import { EventBusServiceMock } from "../__mocks__/event-bus" import DraftOrderService from "../draft-order" diff --git a/packages/medusa/src/services/__tests__/event-bus.js b/packages/medusa/src/services/__tests__/event-bus.js index 9dfc2b5f81..83e59c77ce 100644 --- a/packages/medusa/src/services/__tests__/event-bus.js +++ b/packages/medusa/src/services/__tests__/event-bus.js @@ -32,7 +32,7 @@ describe("EventBusService", () => { }) afterAll(async () => { - await await eventBus.stopEnqueuer() + await eventBus.stopEnqueuer() }) it("creates bull queue", () => { @@ -64,7 +64,7 @@ describe("EventBusService", () => { }) it("added the subscriber to the queue", () => { - expect(eventBus.observers_["eventName"].length).toEqual(1) + expect(eventBus.observers_.get("eventName").length).toEqual(1) }) }) @@ -138,7 +138,7 @@ describe("EventBusService", () => { manager: MockManager, stagedJobRepository, logger: loggerMock, - }) + }, {}) eventBus.subscribe("eventName", () => Promise.resolve("hi")) result = await eventBus.worker_({ data: { eventName: "eventName", data: {} }, @@ -191,13 +191,13 @@ describe("EventBusService", () => { it("calls logger warn on rejections", () => { expect(loggerMock.warn).toHaveBeenCalledTimes(3) expect(loggerMock.warn).toHaveBeenCalledWith( - "An error occured while processing eventName: fail1" + "An error occurred while processing eventName: fail1" ) expect(loggerMock.warn).toHaveBeenCalledWith( - "An error occured while processing eventName: fail2" + "An error occurred while processing eventName: fail2" ) expect(loggerMock.warn).toHaveBeenCalledWith( - "An error occured while processing eventName: fail3" + "An error occurred while processing eventName: fail3" ) }) diff --git a/packages/medusa/src/services/__tests__/fulfillment.js b/packages/medusa/src/services/__tests__/fulfillment.js index 2a4f465a37..960426e38b 100644 --- a/packages/medusa/src/services/__tests__/fulfillment.js +++ b/packages/medusa/src/services/__tests__/fulfillment.js @@ -6,13 +6,13 @@ describe("FulfillmentService", () => { const fulfillmentRepository = MockRepository({}) const fulfillmentProviderService = { - createFulfillment: jest.fn().mockImplementation(data => { + createFulfillment: jest.fn().mockImplementation((data) => { return Promise.resolve(data) }), } const shippingProfileService = { - retrieve: jest.fn().mockImplementation(data => { + retrieve: jest.fn().mockImplementation((data) => { return Promise.resolve({ id: IdMap.getId("default"), name: "default_profile", @@ -22,11 +22,18 @@ describe("FulfillmentService", () => { }), } + const lineItemRepository = { + create: jest.fn().mockImplementation((data) => { + return data + }), + } + const fulfillmentService = new FulfillmentService({ manager: MockManager, fulfillmentProviderService, fulfillmentRepository, shippingProfileService, + lineItemRepository, }) beforeEach(async () => { @@ -101,7 +108,7 @@ describe("FulfillmentService", () => { canceled_at: new Date(), items: [{ item_id: 1, quantity: 2 }], }), - save: f => f, + save: (f) => f, }) const lineItemService = { @@ -111,13 +118,13 @@ describe("FulfillmentService", () => { Promise.resolve({ id: 1, fulfilled_quantity: 2 }) ), update: jest.fn(), - withTransaction: function() { + withTransaction: function () { return this }, } const fulfillmentProviderService = { - cancelFulfillment: f => f, + cancelFulfillment: (f) => f, } const fulfillmentService = new FulfillmentService({ @@ -150,9 +157,9 @@ describe("FulfillmentService", () => { }) describe("createShipment", () => { - const trackingLinkRepository = MockRepository({ create: c => c }) + const trackingLinkRepository = MockRepository({ create: (c) => c }) const fulfillmentRepository = MockRepository({ - findOne: q => { + findOne: (q) => { switch (q.where.id) { case IdMap.getId("canceled"): return Promise.resolve({ canceled_at: new Date() }) diff --git a/packages/medusa/src/services/__tests__/gift-card.js b/packages/medusa/src/services/__tests__/gift-card.js index 792edec5e3..e144fd4bac 100644 --- a/packages/medusa/src/services/__tests__/gift-card.js +++ b/packages/medusa/src/services/__tests__/gift-card.js @@ -5,23 +5,23 @@ import GiftCardService from "../gift-card" describe("GiftCardService", () => { const eventBusService = { emit: jest.fn(), - withTransaction: function() { + withTransaction: function () { return this }, } describe("create", () => { const giftCardRepo = MockRepository({ - create: s => { + create: (s) => { return Promise.resolve(s) }, - save: s => { + save: (s) => { return Promise.resolve(s) }, }) const regionService = { - withTransaction: function() { + withTransaction: function () { return this }, retrieve: () => { @@ -59,18 +59,6 @@ describe("GiftCardService", () => { code: expect.any(String), }) }) - - it("fails to create giftcard if no region is provided", async () => { - const card = { - ...giftCard, - } - - card.region_id = undefined - - await expect(giftCardService.create(card)).rejects.toThrow( - "Gift card is missing region_id" - ) - }) }) describe("retrieve", () => { @@ -152,16 +140,16 @@ describe("GiftCardService", () => { } const giftCardRepo = MockRepository({ - findOneWithRelations: s => { + findOneWithRelations: (s) => { return Promise.resolve(giftCard) }, - save: s => { + save: (s) => { return Promise.resolve(s) }, }) const regionService = { - withTransaction: function() { + withTransaction: function () { return this }, retrieve: () => { @@ -198,7 +186,7 @@ describe("GiftCardService", () => { it.each([[-100], [6000]])( "fails to update balance with illegal input '%s'", - async input => { + async (input) => { await expect( giftCardService.update(IdMap.getId("giftcard-id"), { balance: input, @@ -215,7 +203,7 @@ describe("GiftCardService", () => { } const giftCardRepo = MockRepository({ - findOne: s => { + findOne: (s) => { switch (s.where.id) { case IdMap.getId("gift-card"): return Promise.resolve(giftCard) @@ -223,7 +211,7 @@ describe("GiftCardService", () => { return Promise.resolve() } }, - softRemove: s => { + softRemove: (s) => { return Promise.resolve() }, }) diff --git a/packages/medusa/src/services/__tests__/line-item.js b/packages/medusa/src/services/__tests__/line-item.js index d36bd8363a..e9cd5d6b9a 100644 --- a/packages/medusa/src/services/__tests__/line-item.js +++ b/packages/medusa/src/services/__tests__/line-item.js @@ -4,7 +4,7 @@ import LineItemService from "../line-item" describe("LineItemService", () => { describe("create", () => { const lineItemRepository = MockRepository({ - create: (data) => data + create: (data) => data, }) const cartRepository = MockRepository({ @@ -15,7 +15,7 @@ describe("LineItemService", () => { }) const regionService = { - withTransaction: function() { + withTransaction: function () { return this }, retrieve: () => { @@ -26,10 +26,10 @@ describe("LineItemService", () => { } const productVariantService = { - withTransaction: function() { + withTransaction: function () { return this }, - retrieve: query => { + retrieve: (query) => { if (query === IdMap.getId("test-giftcard")) { return { id: IdMap.getId("test-giftcard"), @@ -54,8 +54,25 @@ describe("LineItemService", () => { getRegionPrice: () => 100, } + const pricingService = { + withTransaction: function () { + return this + }, + getProductVariantPricingById: () => { + return { + calculated_price: 100, + } + }, + getProductVariantPricing: () => { + return { + calculated_price: 100, + } + }, + } + const lineItemService = new LineItemService({ manager: MockManager, + pricingService, lineItemRepository, productVariantService, regionService, @@ -119,19 +136,22 @@ describe("LineItemService", () => { }) expect(lineItemRepository.create).toHaveBeenCalledTimes(2) - expect(lineItemRepository.create).toHaveBeenNthCalledWith(2, expect.objectContaining({ - allow_discounts: false, - variant_id: IdMap.getId("test-giftcard"), - cart_id: IdMap.getId("test-cart"), - title: "Test product", - description: "Test variant", - thumbnail: "", - unit_price: 100, - quantity: 1, - is_giftcard: true, - should_merge: true, - metadata: {}, - })) + expect(lineItemRepository.create).toHaveBeenNthCalledWith( + 2, + expect.objectContaining({ + allow_discounts: false, + variant_id: IdMap.getId("test-giftcard"), + cart_id: IdMap.getId("test-cart"), + title: "Test product", + description: "Test variant", + thumbnail: "", + unit_price: 100, + quantity: 1, + is_giftcard: true, + should_merge: true, + metadata: {}, + }) + ) }) }) diff --git a/packages/medusa/src/services/__tests__/middleware.js b/packages/medusa/src/services/__tests__/middleware.js index 40fc740f4b..ca9fb56ca4 100644 --- a/packages/medusa/src/services/__tests__/middleware.js +++ b/packages/medusa/src/services/__tests__/middleware.js @@ -1,5 +1,3 @@ -import mongoose from "mongoose" -import { IdMap } from "medusa-test-utils" import MiddlewareService from "../middleware" describe("MiddlewareService", () => { diff --git a/packages/medusa/src/services/__tests__/price-list.js b/packages/medusa/src/services/__tests__/price-list.js index 1fe9bc8495..d794530f4f 100644 --- a/packages/medusa/src/services/__tests__/price-list.js +++ b/packages/medusa/src/services/__tests__/price-list.js @@ -1,42 +1,41 @@ import { MedusaError } from "medusa-core-utils" import { IdMap, MockManager, MockRepository } from "medusa-test-utils" import PriceListService from "../price-list" +import { MoneyAmountRepository } from "../../repositories/money-amount" + +const priceListRepository = MockRepository({ + findOne: (q) => { + if (q === IdMap.getId("batman")) { + return Promise.resolve(undefined) + } + return Promise.resolve({ id: IdMap.getId("ironman") }) + }, + create: (data) => { + return Promise.resolve({ id: IdMap.getId("ironman"), ...data }) + }, + save: (data) => Promise.resolve(data), +}) + +const customerGroupService = { + retrieve: jest.fn((id) => { + if (id === IdMap.getId("group")) { + return Promise.resolve({ id: IdMap.getId("group") }) + } + + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `CustomerGroup with id ${id} was not found` + ) + }), +} describe("PriceListService", () => { - const priceListRepository = MockRepository({ - findOne: (q) => { - if (q === IdMap.getId("batman")) { - return Promise.resolve(undefined) - } - return Promise.resolve({ id: IdMap.getId("ironman") }) - }, - create: (data) => { - return Promise.resolve({ id: IdMap.getId("ironman"), ...data }) - }, - save: (data) => Promise.resolve(data), - }) - - const customerGroupService = { - retrieve: jest.fn((id) => { - if (id === IdMap.getId("group")) { - return Promise.resolve({ id: IdMap.getId("group") }) - } - - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `CustomerGroup with id ${id} was not found` - ) - }), - } - const moneyAmountRepository = MockRepository() moneyAmountRepository.addPriceListPrices = jest.fn(() => Promise.resolve()) moneyAmountRepository.removePriceListPrices = jest.fn(() => Promise.resolve()) moneyAmountRepository.updatePriceListPrices = jest.fn(() => Promise.resolve()) - const defaultRelations = ["prices", "customer_groups"] - const priceListService = new PriceListService({ manager: MockManager, customerGroupService, @@ -73,7 +72,7 @@ describe("PriceListService", () => { describe("create", () => { it("creates a new Price List", async () => { - const result = await priceListService.create({ + await priceListService.create({ name: "VIP winter sale", description: "Winter sale for VIP customers. 25% off selected items.", type: "sale", @@ -118,4 +117,62 @@ describe("PriceListService", () => { ) }) }) + + describe("update", () => { + const updateRelatedMoneyAmountRepository = MockRepository() + updateRelatedMoneyAmountRepository.create = jest.fn().mockImplementation((rawEntity) => Promise.resolve(rawEntity)) + updateRelatedMoneyAmountRepository.save = jest.fn().mockImplementation(() => Promise.resolve()) + updateRelatedMoneyAmountRepository.updatePriceListPrices = (new MoneyAmountRepository()).updatePriceListPrices + + const updateRelatedPriceListService = new PriceListService({ + manager: MockManager, + customerGroupService, + priceListRepository, + moneyAmountRepository: updateRelatedMoneyAmountRepository, + }) + + it("update only existing price lists and related money amount", async () => { + await updateRelatedPriceListService.update(IdMap.getId("ironman"), { + description: "Updated description", + name: "Updated name", + prices: [ + { + id: "pl_dakjn", + amount: 100, + currency_code: "usd", + min_quantity: 1, + max_quantity: 100, + }, + ], + }) + + expect(updateRelatedMoneyAmountRepository.create).not.toHaveBeenCalled() + expect(updateRelatedMoneyAmountRepository.save).toHaveBeenCalled() + }) + + it("update only existing and create new price lists and related money amount", async () => { + await updateRelatedPriceListService.update(IdMap.getId("ironman"), { + description: "Updated description", + name: "Updated name", + prices: [ + { + id: "pl_dakjn", + amount: 100, + currency_code: "usd", + min_quantity: 1, + max_quantity: 100, + }, + { + amount: 100, + currency_code: "usd", + min_quantity: 1, + max_quantity: 100, + }, + ], + }) + + expect(updateRelatedMoneyAmountRepository.create).toHaveBeenCalledTimes(1) + expect(updateRelatedMoneyAmountRepository.save).toHaveBeenCalled() + }) + }) }) diff --git a/packages/medusa/src/services/__tests__/product.js b/packages/medusa/src/services/__tests__/product.js index bbfc3bb278..18049d646a 100644 --- a/packages/medusa/src/services/__tests__/product.js +++ b/packages/medusa/src/services/__tests__/product.js @@ -3,11 +3,28 @@ import ProductService from "../product" const eventBusService = { emit: jest.fn(), - withTransaction: function () { + withTransaction: function() { return this }, } +const mockUpsertTags = jest.fn().mockImplementation((data) => + Promise.resolve( + data.map(({ value, id }) => ({ + value, + id: id || (value === "title" ? "tag-1" : "tag-2"), + })) + ) +) + +const mockUpsertType = jest.fn().mockImplementation((value) => { + const productType = { + id: "type", + value: value, + } + return Promise.resolve(productType) +}) + describe("ProductService", () => { describe("retrieve", () => { const productRepo = MockRepository({ @@ -81,15 +98,17 @@ describe("ProductService", () => { } }, }) + productTagRepository.upsertTags = mockUpsertTags const productTypeRepository = MockRepository({ findOne: () => Promise.resolve(undefined), create: (data) => { return { id: "type", value: "type1" } }, }) + productTypeRepository.upsertType = mockUpsertType const productCollectionService = { - withTransaction: function () { + withTransaction: function() { return this }, retrieve: (id) => @@ -148,13 +167,9 @@ describe("ProductService", () => { ], }) - expect(productTagRepository.findOne).toHaveBeenCalledTimes(2) - // We add two tags, that does not exist therefore we make sure - // that create is also called - expect(productTagRepository.create).toHaveBeenCalledTimes(2) + expect(productTagRepository.upsertTags).toHaveBeenCalledTimes(1) - expect(productTypeRepository.findOne).toHaveBeenCalledTimes(1) - expect(productTypeRepository.create).toHaveBeenCalledTimes(1) + expect(productTypeRepository.upsertType).toHaveBeenCalledTimes(1) expect(productRepository.save).toHaveBeenCalledTimes(1) expect(productRepository.save).toHaveBeenCalledWith({ @@ -227,11 +242,12 @@ describe("ProductService", () => { return { id: "type", value: "type1" } }, }) + productTypeRepository.upsertType = mockUpsertType const productVariantRepository = MockRepository() const productVariantService = { - withTransaction: function () { + withTransaction: function() { return this }, update: (variant, update) => { @@ -252,6 +268,7 @@ describe("ProductService", () => { } }, }) + productTagRepository.upsertTags = mockUpsertTags const cartRepository = MockRepository({ findOne: (data) => { @@ -470,7 +487,7 @@ describe("ProductService", () => { }) const productVariantService = { - withTransaction: function () { + withTransaction: function() { return this }, addOptionValue: jest.fn(), @@ -590,72 +607,6 @@ describe("ProductService", () => { }) }) - describe("reorderOptions", () => { - const productRepository = MockRepository({ - findOneWithRelations: (query) => - Promise.resolve({ - id: IdMap.getId("ironman"), - options: [ - { id: IdMap.getId("material") }, - { id: IdMap.getId("color") }, - ], - }), - }) - - const productService = new ProductService({ - manager: MockManager, - productRepository, - eventBusService, - }) - - beforeEach(() => { - jest.clearAllMocks() - }) - - it("reorders options", async () => { - await productService.reorderOptions(IdMap.getId("ironman"), [ - IdMap.getId("color"), - IdMap.getId("material"), - ]) - - expect(productRepository.save).toBeCalledTimes(1) - expect(productRepository.save).toBeCalledWith({ - id: IdMap.getId("ironman"), - options: [ - { id: IdMap.getId("color") }, - { id: IdMap.getId("material") }, - ], - }) - }) - - it("throws if one option id is not in the product options", async () => { - try { - await productService.reorderOptions(IdMap.getId("ironman"), [ - IdMap.getId("packaging"), - IdMap.getId("material"), - ]) - } catch (err) { - expect(err.message).toEqual( - `Product has no option with id: ${IdMap.getId("packaging")}` - ) - } - }) - - it("throws if order length and product option lengths differ", async () => { - try { - await productService.reorderOptions(IdMap.getId("ironman"), [ - IdMap.getId("size"), - IdMap.getId("color"), - IdMap.getId("material"), - ]) - } catch (err) { - expect(err.message).toEqual( - `Product options and new options order differ in length.` - ) - } - }) - }) - describe("updateOption", () => { const productRepository = MockRepository({ findOneWithRelations: (query) => diff --git a/packages/medusa/src/services/__tests__/shipping-option.js b/packages/medusa/src/services/__tests__/shipping-option.js index 45dae50bca..2a30f9aa2b 100644 --- a/packages/medusa/src/services/__tests__/shipping-option.js +++ b/packages/medusa/src/services/__tests__/shipping-option.js @@ -156,8 +156,8 @@ describe("ShippingOptionService", () => { expect(fulfillmentProviderService.canCalculate).toHaveBeenCalledWith({ amount: null, data: { provider_data: "true" }, - price_type: "calculated", provider_id: "provider", + price_type: "calculated", }) expect(shippingOptionRepository.save).toHaveBeenCalledTimes(1) diff --git a/packages/medusa/src/services/__tests__/store.js b/packages/medusa/src/services/__tests__/store.js index 1045fc14c0..ab79b70f4f 100644 --- a/packages/medusa/src/services/__tests__/store.js +++ b/packages/medusa/src/services/__tests__/store.js @@ -44,7 +44,7 @@ describe("StoreService", () => { }) it("successfully retrieve store", async () => { - await storeService.retrieve() + await storeService.retrieve().catch(() => void 0) expect(storeRepository.findOne).toHaveBeenCalledTimes(1) }) @@ -92,7 +92,7 @@ describe("StoreService", () => { storeService.update({ currencies: ["1cd", "usd"], }) - ).rejects.toThrow("Invalid currency 1cd") + ).rejects.toThrow("Currency with code 1cd does not exist") expect(storeRepository.findOne).toHaveBeenCalledTimes(1) }) @@ -148,8 +148,6 @@ describe("StoreService", () => { await expect(storeService.addCurrency("1cd")).rejects.toThrow( "Currency 1cd not found" ) - - expect(storeRepository.findOne).toHaveBeenCalledTimes(1) }) it("fails if currency already existis", async () => { diff --git a/packages/medusa/src/services/__tests__/swap.js b/packages/medusa/src/services/__tests__/swap.js index 413714b37c..aaf1a4330e 100644 --- a/packages/medusa/src/services/__tests__/swap.js +++ b/packages/medusa/src/services/__tests__/swap.js @@ -974,7 +974,6 @@ describe("SwapService", () => { const swapService = new SwapService({ manager: MockManager, - eventBusService, swapRepository: swapRepo, paymentProviderService, eventBusService, diff --git a/packages/medusa/src/services/auth.ts b/packages/medusa/src/services/auth.ts index c573edb378..492ef5d18a 100644 --- a/packages/medusa/src/services/auth.ts +++ b/packages/medusa/src/services/auth.ts @@ -1,21 +1,32 @@ import Scrypt from "scrypt-kdf" -import { BaseService } from "medusa-interfaces" import { AuthenticateResult } from "../types/auth" -import { User } from "../models/user" -import { Customer } from "../models/customer" +import { User, Customer } from "../models" +import { TransactionBaseService } from "../interfaces" +import UserService from "./user" +import CustomerService from "./customer" +import { EntityManager } from "typeorm" + +type InjectedDependencies = { + manager: EntityManager + userService: UserService + customerService: CustomerService +} /** * Can authenticate a user based on email password combination * @extends BaseService */ -class AuthService extends BaseService { - constructor({ userService, customerService }) { - super() +class AuthService extends TransactionBaseService { + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + protected readonly userService_: UserService + protected readonly customerService_: CustomerService - /** @private @const {UserService} */ + constructor({ manager, userService, customerService }: InjectedDependencies) { + super({ manager, userService, customerService }) + + this.manager_ = manager this.userService_ = userService - - /** @private @const {CustomerService} */ this.customerService_ = customerService } @@ -25,7 +36,10 @@ class AuthService extends BaseService { * @param {string} hash - the hash to compare against * @return {bool} the result of the comparison */ - async comparePassword_(password: string, hash: string): Promise { + protected async comparePassword_( + password: string, + hash: string + ): Promise { const buf = Buffer.from(hash, "base64") return Scrypt.verify(buf, password) } @@ -39,30 +53,36 @@ class AuthService extends BaseService { * error: a string with the error message */ async authenticateAPIToken(token: string): Promise { - if (process.env.NODE_ENV === "development") { + return await this.atomicPhase_(async (transactionManager) => { + if (process.env.NODE_ENV?.startsWith("dev")) { + try { + const user: User = await this.userService_ + .withTransaction(transactionManager) + .retrieve(token) + return { + success: true, + user, + } + } catch (error) { + // ignore + } + } + try { - const user: User = await this.userService_.retrieve(token) + const user: User = await this.userService_ + .withTransaction(transactionManager) + .retrieveByApiToken(token) return { success: true, user, } } catch (error) { - // ignore + return { + success: false, + error: "Invalid API Token", + } } - } - - try { - const user: User = await this.userService_.retrieveByApiToken(token) - return { - success: true, - user, - } - } catch (error) { - return { - success: false, - error: "Invalid API Token", - } - } + }) } /** @@ -79,35 +99,39 @@ class AuthService extends BaseService { email: string, password: string ): Promise { - try { - const userPasswordHash: User = await this.userService_.retrieveByEmail( - email, - { - select: ["password_hash"], - } - ) - - const passwordsMatch = await this.comparePassword_( - password, - userPasswordHash.password_hash - ) - - if (passwordsMatch) { - const user = await this.userService_.retrieveByEmail(email) - - return { - success: true, - user: user, + return await this.atomicPhase_(async (transactionManager) => { + try { + const userPasswordHash: User = await this.userService_ + .withTransaction(transactionManager) + .retrieveByEmail(email, { + select: ["password_hash"], + }) + + const passwordsMatch = await this.comparePassword_( + password, + userPasswordHash.password_hash + ) + + if (passwordsMatch) { + const user = await this.userService_ + .withTransaction(transactionManager) + .retrieveByEmail(email) + + return { + success: true, + user: user, + } } + } catch (error) { + console.log("error ->", error) + // ignore } - } catch (error) { - // ignore - } - return { - success: false, - error: "Invalid email or password", - } + return { + success: false, + error: "Invalid email or password", + } + }) } /** @@ -124,32 +148,39 @@ class AuthService extends BaseService { email: string, password: string ): Promise { - try { - const customerPasswordHash: Customer = - await this.customerService_.retrieveByEmail(email, { - select: ["password_hash"], - }) - if (customerPasswordHash.password_hash) { - const passwordsMatch = await this.comparePassword_( - password, - customerPasswordHash.password_hash - ) + return await this.atomicPhase_(async (transactionManager) => { + try { + const customerPasswordHash: Customer = await this.customerService_ + .withTransaction(transactionManager) + .retrieveByEmail(email, { + select: ["password_hash"], + }) + if (customerPasswordHash.password_hash) { + const passwordsMatch = await this.comparePassword_( + password, + customerPasswordHash.password_hash + ) - if (passwordsMatch) { - const customer = await this.customerService_.retrieveByEmail(email) - return { - success: true, - customer, + if (passwordsMatch) { + const customer = await this.customerService_ + .withTransaction(transactionManager) + .retrieveByEmail(email) + + return { + success: true, + customer, + } } } + } catch (error) { + // ignore } - } catch (error) { - // ignore - } - return { - success: false, - error: "Invalid email or password", - } + + return { + success: false, + error: "Invalid email or password", + } + }) } } diff --git a/packages/medusa/src/services/batch-job.ts b/packages/medusa/src/services/batch-job.ts index 37f27a2b98..77ea2ca514 100644 --- a/packages/medusa/src/services/batch-job.ts +++ b/packages/medusa/src/services/batch-job.ts @@ -1,34 +1,112 @@ import { EntityManager } from "typeorm" - import { BatchJob } from "../models" import { BatchJobRepository } from "../repositories/batch-job" -import { FilterableBatchJobProps } from "../types/batch-job" +import { + BatchJobCreateProps, + BatchJobResultError, + BatchJobStatus, + BatchJobUpdateProps, + CreateBatchJobInput, + FilterableBatchJobProps, +} from "../types/batch-job" import { FindConfig } from "../types/common" -import { TransactionBaseService } from "../interfaces" -import { buildQuery, validateId } from "../utils" +import { AbstractBatchJobStrategy, TransactionBaseService } from "../interfaces" +import { buildQuery } from "../utils" import { MedusaError } from "medusa-core-utils" +import { EventBusService, StrategyResolverService } from "./index" +import { Request } from "express" type InjectedDependencies = { manager: EntityManager + eventBusService: EventBusService batchJobRepository: typeof BatchJobRepository + strategyResolverService: StrategyResolverService } class BatchJobService extends TransactionBaseService { - protected manager_: EntityManager - protected transactionManager_: EntityManager | undefined - protected readonly batchJobRepository_: typeof BatchJobRepository - static readonly Events = { CREATED: "batch.created", UPDATED: "batch.updated", + PRE_PROCESSED: "batch.pre_processed", + CONFIRMED: "batch.confirmed", + PROCESSING: "batch.processing", + COMPLETED: "batch.completed", CANCELED: "batch.canceled", + FAILED: "batch.failed", } - constructor({ manager, batchJobRepository }: InjectedDependencies) { - super({ manager, batchJobRepository }) + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + protected readonly batchJobRepository_: typeof BatchJobRepository + protected readonly eventBus_: EventBusService + protected readonly strategyResolver_: StrategyResolverService + + protected batchJobStatusMapToProps = new Map< + BatchJobStatus, + { entityColumnName: string; eventType: string } + >([ + [ + BatchJobStatus.PRE_PROCESSED, + { + entityColumnName: "pre_processed_at", + eventType: BatchJobService.Events.PRE_PROCESSED, + }, + ], + [ + BatchJobStatus.CONFIRMED, + { + entityColumnName: "confirmed_at", + eventType: BatchJobService.Events.CONFIRMED, + }, + ], + [ + BatchJobStatus.PROCESSING, + { + entityColumnName: "processing_at", + eventType: BatchJobService.Events.PROCESSING, + }, + ], + [ + BatchJobStatus.COMPLETED, + { + entityColumnName: "completed_at", + eventType: BatchJobService.Events.COMPLETED, + }, + ], + [ + BatchJobStatus.CANCELED, + { + entityColumnName: "canceled_at", + eventType: BatchJobService.Events.CANCELED, + }, + ], + [ + BatchJobStatus.FAILED, + { + entityColumnName: "failed_at", + eventType: BatchJobService.Events.FAILED, + }, + ], + ]) + + constructor({ + manager, + batchJobRepository, + eventBusService, + strategyResolverService, + }: InjectedDependencies) { + super({ + manager, + batchJobRepository, + eventBusService, + strategyResolverService, + }) this.manager_ = manager this.batchJobRepository_ = batchJobRepository + this.eventBus_ = eventBusService + this.strategyResolver_ = strategyResolverService } async retrieve( @@ -41,7 +119,7 @@ class BatchJobService extends TransactionBaseService { this.batchJobRepository_ ) - const query = buildQuery({ id: batchJobId }, config) + const query = buildQuery({ id: batchJobId }, config) const batchJob = await batchJobRepo.findOne(query) if (!batchJob) { @@ -71,6 +149,245 @@ class BatchJobService extends TransactionBaseService { } ) } + + async create(data: BatchJobCreateProps): Promise { + return await this.atomicPhase_(async (manager) => { + const batchJobRepo: BatchJobRepository = manager.getCustomRepository( + this.batchJobRepository_ + ) + + const batchJob = batchJobRepo.create(data) + const result = await batchJobRepo.save(batchJob) + + await this.eventBus_ + .withTransaction(manager) + .emit(BatchJobService.Events.CREATED, { + id: result.id, + }) + + return result + }) + } + + async update( + batchJobOrId: BatchJob | string, + data: BatchJobUpdateProps + ): Promise { + return await this.atomicPhase_(async (manager) => { + const batchJobRepo: BatchJobRepository = manager.getCustomRepository( + this.batchJobRepository_ + ) + + let batchJob = batchJobOrId as BatchJob + if (typeof batchJobOrId === "string") { + batchJob = await this.retrieve(batchJobOrId) + } + + const { context, result, ...rest } = data + if (context) { + batchJob.context = { ...batchJob.context, ...context } + } + + if (result) { + batchJob.result = { ...batchJob.result, ...result } + } + + Object.keys(rest) + .filter((key) => typeof rest[key] !== `undefined`) + .forEach((key) => { + batchJob[key] = rest[key] + }) + + batchJob = await batchJobRepo.save(batchJob) + + await this.eventBus_ + .withTransaction(manager) + .emit(BatchJobService.Events.UPDATED, { + id: batchJob.id, + }) + + return batchJob + }) + } + + protected async updateStatus( + batchJobOrId: BatchJob | string, + status: BatchJobStatus + ): Promise { + const transactionManager = this.transactionManager_ ?? this.manager_ + let batchJob: BatchJob = batchJobOrId as BatchJob + if (typeof batchJobOrId === "string") { + batchJob = await this.retrieve(batchJobOrId) + } + + const { entityColumnName, eventType } = + this.batchJobStatusMapToProps.get(status) || {} + + if (!entityColumnName || !eventType) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Unable to update the batch job status from ${batchJob.status} to ${status}. The status doesn't exist` + ) + } + + batchJob[entityColumnName] = new Date() + + const batchJobRepo = transactionManager.getCustomRepository( + this.batchJobRepository_ + ) + batchJob = await batchJobRepo.save(batchJob) + batchJob.loadStatus() + + await this.eventBus_.withTransaction(transactionManager).emit(eventType, { + id: batchJob.id, + }) + + return batchJob + } + + async confirm(batchJobOrId: string | BatchJob): Promise { + return await this.atomicPhase_(async () => { + let batchJob: BatchJob = batchJobOrId as BatchJob + if (typeof batchJobOrId === "string") { + batchJob = await this.retrieve(batchJobOrId) + } + + if (batchJob.status !== BatchJobStatus.PRE_PROCESSED) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Cannot confirm processing for a batch job that is not pre processed" + ) + } + + return await this.updateStatus(batchJob, BatchJobStatus.CONFIRMED) + }) + } + + async complete(batchJobOrId: string | BatchJob): Promise { + return await this.atomicPhase_(async () => { + let batchJob: BatchJob = batchJobOrId as BatchJob + if (typeof batchJobOrId === "string") { + batchJob = await this.retrieve(batchJobOrId) + } + + if (batchJob.status !== BatchJobStatus.PROCESSING) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Cannot complete a batch job with status "${batchJob.status}". The batch job must be processing` + ) + } + + return await this.updateStatus(batchJob, BatchJobStatus.COMPLETED) + }) + } + + async cancel(batchJobOrId: string | BatchJob): Promise { + return await this.atomicPhase_(async () => { + let batchJob: BatchJob = batchJobOrId as BatchJob + if (typeof batchJobOrId === "string") { + batchJob = await this.retrieve(batchJobOrId) + } + + if (batchJob.status === BatchJobStatus.COMPLETED) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Cannot cancel completed batch job" + ) + } + + return await this.updateStatus(batchJob, BatchJobStatus.CANCELED) + }) + } + + async setPreProcessingDone( + batchJobOrId: string | BatchJob + ): Promise { + return await this.atomicPhase_(async () => { + let batchJob: BatchJob = batchJobOrId as BatchJob + if (typeof batchJobOrId === "string") { + batchJob = await this.retrieve(batchJobOrId) + } + + if (batchJob.status === BatchJobStatus.PRE_PROCESSED) { + return batchJob + } + + if (batchJob.status !== BatchJobStatus.CREATED) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Cannot mark a batch job as pre processed if it is not in created status" + ) + } + + batchJob = await this.updateStatus( + batchJobOrId, + BatchJobStatus.PRE_PROCESSED + ) + if (batchJob.dry_run) { + return batchJob + } + + return await this.confirm(batchJob) + }) + } + + async setProcessing( + batchJobOrId: string | BatchJob + ): Promise { + return await this.atomicPhase_(async () => { + let batchJob: BatchJob = batchJobOrId as BatchJob + if (typeof batchJobOrId === "string") { + batchJob = await this.retrieve(batchJobOrId) + } + + if (batchJob.status !== BatchJobStatus.CONFIRMED) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Cannot mark a batch job as processing if the status is different that confirmed" + ) + } + + return await this.updateStatus(batchJob, BatchJobStatus.PROCESSING) + }) + } + + async setFailed( + batchJobOrId: string | BatchJob, + error?: BatchJobResultError + ): Promise { + return await this.atomicPhase_(async () => { + let batchJob = batchJobOrId as BatchJob + + if (error) { + if (typeof batchJobOrId === "string") { + batchJob = await this.retrieve(batchJobOrId) + } + + const result = batchJob.result ?? {} + + await this.update(batchJob, { + result: { + ...result, + errors: [...(result?.errors ?? []), error], + }, + }) + } + + return await this.updateStatus(batchJob, BatchJobStatus.FAILED) + }) + } + + async prepareBatchJobForProcessing( + data: CreateBatchJobInput, + req: Request + ): Promise { + return await this.atomicPhase_(async () => { + const batchStrategy = this.strategyResolver_.resolveBatchJobByType( + data.type + ) + return await batchStrategy.prepareBatchJobForProcessing(data, req) + }) + } } export default BatchJobService diff --git a/packages/medusa/src/services/cart.ts b/packages/medusa/src/services/cart.ts index 55b9fd9261..a9129dc2d7 100644 --- a/packages/medusa/src/services/cart.ts +++ b/packages/medusa/src/services/cart.ts @@ -1,9 +1,8 @@ -import _ from "lodash" +import { isEmpty, isEqual } from "lodash" import { MedusaError, Validator } from "medusa-core-utils" import { DeepPartial, EntityManager, In } from "typeorm" import { TransactionBaseService } from "../interfaces" import { IPriceSelectionStrategy } from "../interfaces/price-selection-strategy" -import { DiscountRuleType } from "../models" import { Address } from "../models/address" import { Cart } from "../models/cart" import { CustomShippingOption } from "../models/custom-shipping-option" @@ -39,6 +38,7 @@ import RegionService from "./region" import ShippingOptionService from "./shipping-option" import TaxProviderService from "./tax-provider" import TotalsService from "./totals" +import { DiscountRuleType } from "../models" type InjectedDependencies = { manager: EntityManager @@ -262,7 +262,7 @@ class CartService extends TransactionBaseService { this.cartRepository_ ) - const query = buildQuery(selector, config) + const query = buildQuery(selector, config) return await cartRepo.find(query) } ) @@ -290,7 +290,7 @@ class CartService extends TransactionBaseService { const { select, relations, totalsToSelect } = this.transformQueryForTotals_(options) - const query = buildQuery( + const query = buildQuery( { id: validatedId }, { ...options, select, relations } ) @@ -546,7 +546,7 @@ class CartService extends TransactionBaseService { if (lineItem.should_merge) { currentItem = cart.items.find((item) => { if (item.should_merge && item.variant_id === lineItem.variant_id) { - return _.isEqual(item.metadata, lineItem.metadata) + return isEqual(item.metadata, lineItem.metadata) } return false }) @@ -706,7 +706,14 @@ class CartService extends TransactionBaseService { cart.shipping_methods.map(async (shippingMethod) => { // if free shipping discount is removed, we adjust the shipping // back to its original amount - shippingMethod.price = shippingMethod.shipping_option.amount + // if shipping option amount is null, we assume the option is calculated + shippingMethod.price = + shippingMethod.shipping_option.amount ?? + (await this.shippingOptionService_.getPrice_( + shippingMethod.shipping_option, + shippingMethod.data, + cart + )) return shippingMethodRepository.save(shippingMethod) }) ) @@ -1065,7 +1072,7 @@ class CartService extends TransactionBaseService { let sawNotShipping = false const newDiscounts = toParse.map((discountToParse) => { switch (discountToParse.rule?.type) { - case "free_shipping": + case DiscountRuleType.FREE_SHIPPING: if (discountToParse.rule.type === rule.type) { return discount } @@ -1193,7 +1200,12 @@ class CartService extends TransactionBaseService { const cart = await this.retrieve(cartId, { select: ["total"], - relations: ["region", "payment_sessions"], + relations: [ + "items", + "items.adjustments", + "region", + "payment_sessions", + ], }) if (typeof cart.total === "undefined") { @@ -1779,7 +1791,7 @@ class CartService extends TransactionBaseService { let updated = { ...shippingAddress } // If the country code of a shipping address is set we need to clear it - if (!_.isEmpty(shippingAddress) && shippingAddress.country_code) { + if (!isEmpty(shippingAddress) && shippingAddress.country_code) { updated = { ...updated, country_code: null, @@ -1931,6 +1943,7 @@ class CartService extends TransactionBaseService { "region.tax_rates", ], }) + const calculationContext = this.totalsService_ .withTransaction(transactionManager) .getCalculationContext(cart) diff --git a/packages/medusa/src/services/claim-item.js b/packages/medusa/src/services/claim-item.ts similarity index 71% rename from packages/medusa/src/services/claim-item.js rename to packages/medusa/src/services/claim-item.ts index 9111b6617a..2129368c91 100644 --- a/packages/medusa/src/services/claim-item.js +++ b/packages/medusa/src/services/claim-item.ts @@ -1,13 +1,32 @@ import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" +import { EntityManager } from "typeorm" +import { TransactionBaseService as BaseService } from "../interfaces" +import { ClaimImage, ClaimItem, ClaimTag } from "../models" +import { ClaimImageRepository } from "../repositories/claim-image" +import { ClaimItemRepository } from "../repositories/claim-item" +import { ClaimTagRepository } from "../repositories/claim-tag" +import { CreateClaimItemInput } from "../types/claim" +import { FindConfig, Selector } from "../types/common" +import { buildQuery, setMetadata } from "../utils" +import EventBusService from "./event-bus" +import LineItemService from "./line-item" -class ClaimItemService extends BaseService { +class ClaimItemService extends BaseService { static Events = { CREATED: "claim_item.created", UPDATED: "claim_item.updated", CANCELED: "claim_item.canceled", } + protected readonly lineItemService_: LineItemService + protected readonly eventBus_: EventBusService + protected readonly claimItemRepository_: typeof ClaimItemRepository + protected readonly claimTagRepository_: typeof ClaimTagRepository + protected readonly claimImageRepository_: typeof ClaimImageRepository + + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + constructor({ manager, claimItemRepository, @@ -16,45 +35,22 @@ class ClaimItemService extends BaseService { lineItemService, eventBusService, }) { - super() + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) - /** @private @constant {EntityManager} */ this.manager_ = manager - - /** @private @constant {ClaimRepository} */ this.claimItemRepository_ = claimItemRepository this.claimTagRepository_ = claimTagRepository this.claimImageRepository_ = claimImageRepository - - /** @private @constant {LineItemService} */ this.lineItemService_ = lineItemService - - /** @private @constant {EventBus} */ this.eventBus_ = eventBusService } - withTransaction(manager) { - if (!manager) { - return this - } - - const cloned = new ClaimItemService({ - manager, - claimItemRepository: this.claimItemRepository_, - claimTagRepository: this.claimTagRepository_, - claimImageRepository: this.claimImageRepository_, - lineItemService: this.lineItemService_, - eventBusService: this.eventBus_, - }) - - cloned.transactionManager_ = manager - - return cloned - } - - create(data) { - return this.atomicPhase_(async (manager) => { - const ciRepo = manager.getCustomRepository(this.claimItemRepository_) + async create(data: CreateClaimItemInput): Promise { + return await this.atomicPhase_(async (manager) => { + const ciRepo: ClaimItemRepository = manager.getCustomRepository( + this.claimItemRepository_ + ) const { item_id, reason, quantity, tags, images, ...rest } = data @@ -81,7 +77,7 @@ class ClaimItemService extends BaseService { ) } - let tagsToAdd = [] + let tagsToAdd: ClaimTag[] = [] if (tags && tags.length) { const claimTagRepo = manager.getCustomRepository( this.claimTagRepository_ @@ -100,7 +96,7 @@ class ClaimItemService extends BaseService { ) } - let imagesToAdd = [] + let imagesToAdd: ClaimImage[] = [] if (images && images.length) { const claimImgRepo = manager.getCustomRepository( this.claimImageRepository_ @@ -110,7 +106,7 @@ class ClaimItemService extends BaseService { }) } - const created = ciRepo.create({ + const toCreate: Partial = { ...rest, variant_id: lineItem.variant_id, tags: tagsToAdd, @@ -118,7 +114,8 @@ class ClaimItemService extends BaseService { item_id, reason, quantity, - }) + } + const created = ciRepo.create(toCreate) const result = await ciRepo.save(created) @@ -132,7 +129,7 @@ class ClaimItemService extends BaseService { }) } - update(id, data) { + async update(id, data): Promise { return this.atomicPhase_(async (manager) => { const ciRepo = manager.getCustomRepository(this.claimItemRepository_) const item = await this.retrieve(id, { relations: ["images", "tags"] }) @@ -148,7 +145,7 @@ class ClaimItemService extends BaseService { } if (metadata) { - item.metadata = this.setMetadata_(item, metadata) + item.metadata = setMetadata(item, metadata) } if (tags) { @@ -209,19 +206,21 @@ class ClaimItemService extends BaseService { }) } - async cancel(id) {} - /** * @param {Object} selector - the query object for find * @param {Object} config - the config object for find * @return {Promise} the result of the find operation */ async list( - selector, - config = { skip: 0, take: 50, order: { created_at: "DESC" } } - ) { + selector: Selector, + config: FindConfig = { + skip: 0, + take: 50, + order: { created_at: "DESC" }, + } + ): Promise { const ciRepo = this.manager_.getCustomRepository(this.claimItemRepository_) - const query = this.buildQuery_(selector, config) + const query = buildQuery(selector, config) return ciRepo.find(query) } @@ -231,13 +230,14 @@ class ClaimItemService extends BaseService { * @param {Object} config - configuration for the find operation * @return {Promise} the ClaimItem */ - async retrieve(id, config = {}) { + async retrieve( + id: string, + config: FindConfig = {} + ): Promise { const claimItemRepo = this.manager_.getCustomRepository( this.claimItemRepository_ ) - const validatedId = this.validateId_(id) - - const query = this.buildQuery_({ id: validatedId }, config) + const query = buildQuery({ id }, config) const item = await claimItemRepo.findOne(query) if (!item) { @@ -249,30 +249,6 @@ class ClaimItemService extends BaseService { return item } - - /** - * Dedicated method to delete metadata for an order. - * @param {string} orderId - the order to delete metadata from. - * @param {string} key - key for metadata field - * @return {Promise} resolves to the updated result. - */ - async deleteMetadata(orderId, key) { - const validatedId = this.validateId_(orderId) - - if (typeof key !== "string") { - throw new MedusaError( - MedusaError.Types.INVALID_ARGUMENT, - "Key type is invalid. Metadata keys must be strings" - ) - } - - const keyPath = `metadata.${key}` - return this.orderModel_ - .updateOne({ _id: validatedId }, { $unset: { [keyPath]: "" } }) - .catch((err) => { - throw new MedusaError(MedusaError.Types.DB_ERROR, err.message) - }) - } } export default ClaimItemService diff --git a/packages/medusa/src/services/claim.js b/packages/medusa/src/services/claim.js deleted file mode 100644 index 232d3f0e04..0000000000 --- a/packages/medusa/src/services/claim.js +++ /dev/null @@ -1,753 +0,0 @@ -import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" - -class ClaimService extends BaseService { - static Events = { - CREATED: "claim.created", - UPDATED: "claim.updated", - CANCELED: "claim.canceled", - FULFILLMENT_CREATED: "claim.fulfillment_created", - SHIPMENT_CREATED: "claim.shipment_created", - REFUND_PROCESSED: "claim.refund_processed", - } - - constructor({ - manager, - addressRepository, - claimItemService, - claimRepository, - eventBusService, - fulfillmentProviderService, - fulfillmentService, - inventoryService, - lineItemService, - paymentProviderService, - regionService, - returnService, - shippingOptionService, - taxProviderService, - totalsService, - }) { - super() - - /** @private @constant {EntityManager} */ - this.manager_ = manager - - this.addressRepo_ = addressRepository - this.claimItemService_ = claimItemService - this.claimRepository_ = claimRepository - this.eventBus_ = eventBusService - this.fulfillmentProviderService_ = fulfillmentProviderService - this.fulfillmentService_ = fulfillmentService - this.inventoryService_ = inventoryService - this.lineItemService_ = lineItemService - this.paymentProviderService_ = paymentProviderService - this.regionService_ = regionService - this.returnService_ = returnService - this.shippingOptionService_ = shippingOptionService - this.taxProviderService_ = taxProviderService - this.totalsService_ = totalsService - } - - withTransaction(manager) { - if (!manager) { - return this - } - - const cloned = new ClaimService({ - manager, - addressRepository: this.addressRepo_, - claimItemService: this.claimItemService_, - claimRepository: this.claimRepository_, - eventBusService: this.eventBus_, - fulfillmentProviderService: this.fulfillmentProviderService_, - fulfillmentService: this.fulfillmentService_, - inventoryService: this.inventoryService_, - lineItemService: this.lineItemService_, - paymentProviderService: this.paymentProviderService_, - regionService: this.regionService_, - returnService: this.returnService_, - shippingOptionService: this.shippingOptionService_, - totalsService: this.totalsService_, - taxProviderService: this.taxProviderService_, - }) - - cloned.transactionManager_ = manager - - return cloned - } - - update(id, data) { - return this.atomicPhase_(async (manager) => { - const claimRepo = manager.getCustomRepository(this.claimRepository_) - const claim = await this.retrieve(id, { relations: ["shipping_methods"] }) - - if (claim.canceled_at) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Canceled claim cannot be updated" - ) - } - - const { claim_items, shipping_methods, metadata, no_notification } = data - - if (metadata) { - claim.metadata = this.setMetadata_(claim, metadata) - await claimRepo.save(claim) - } - - if (shipping_methods) { - for (const m of claim.shipping_methods) { - await this.shippingOptionService_ - .withTransaction(manager) - .updateShippingMethod(m.id, { - claim_order_id: null, - }) - } - - for (const method of shipping_methods) { - if (method.id) { - await this.shippingOptionService_ - .withTransaction(manager) - .updateShippingMethod(method.id, { - claim_order_id: claim.id, - }) - } else { - await this.shippingOptionService_ - .withTransaction(manager) - .createShippingMethod(method.option_id, method.data, { - claim_order_id: claim.id, - price: method.price, - }) - } - } - } - - if (no_notification !== undefined) { - claim.no_notification = no_notification - await claimRepo.save(claim) - } - - if (claim_items) { - for (const i of claim_items) { - if (i.id) { - await this.claimItemService_ - .withTransaction(manager) - .update(i.id, i) - } - } - } - - await this.eventBus_ - .withTransaction(manager) - .emit(ClaimService.Events.UPDATED, { - id: claim.id, - no_notification: claim.no_notification, - }) - - return claim - }) - } - - /** - * Creates a Claim on an Order. Claims consists of items that are claimed and - * optionally items to be sent as replacement for the claimed items. The - * shipping address that the new items will be shipped to - * @param {Object} data - the object containing all data required to create a claim - * @return {Object} created claim - */ - create(data) { - return this.atomicPhase_(async (manager) => { - const claimRepo = manager.getCustomRepository(this.claimRepository_) - - const { - type, - claim_items, - order, - return_shipping, - additional_items, - shipping_methods, - refund_amount, - shipping_address, - shipping_address_id, - no_notification, - ...rest - } = data - - for (const item of claim_items) { - const line = await this.lineItemService_.retrieve(item.item_id, { - relations: ["order", "swap", "claim_order", "tax_lines"], - }) - - if ( - line.order?.canceled_at || - line.swap?.canceled_at || - line.claim_order?.canceled_at - ) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Cannot create a claim on a canceled item.` - ) - } - } - - let addressId = shipping_address_id || order.shipping_address_id - if (shipping_address) { - const addressRepo = manager.getCustomRepository(this.addressRepo_) - const created = addressRepo.create(shipping_address) - const saved = await addressRepo.save(created) - addressId = saved.id - } - - if (type !== "refund" && type !== "replace") { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Claim type must be one of "refund" or "replace".` - ) - } - - if (type === "replace" && !additional_items?.length) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Claims with type "replace" must have at least one additional item.` - ) - } - - if (!claim_items?.length) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Claims must have at least one claim item.` - ) - } - - if (refund_amount && type !== "refund") { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Claim has type "${type}" but must be type "refund" to have a refund_amount.` - ) - } - - let toRefund = refund_amount - if (type === "refund" && typeof refund_amount === "undefined") { - const lines = claim_items.map((ci) => { - const allOrderItems = order.items - - if (order.swaps?.length) { - for (const swap of order.swaps) { - swap.additional_items.forEach((it) => { - if ( - it.shipped_quantity || - it.shipped_quantity === it.fulfilled_quantity - ) { - allOrderItems.push(it) - } - }) - } - } - - if (order.claims?.length) { - for (const claim of order.claims) { - claim.additional_items.forEach((it) => { - if ( - it.shipped_quantity || - it.shipped_quantity === it.fulfilled_quantity - ) { - allOrderItems.push(it) - } - }) - } - } - - const orderItem = allOrderItems.find((oi) => oi.id === ci.item_id) - return { - ...orderItem, - quantity: ci.quantity, - } - }) - toRefund = await this.totalsService_.getRefundTotal(order, lines) - } - - let newItems = [] - if (typeof additional_items !== "undefined") { - for (const item of additional_items) { - await this.inventoryService_ - .withTransaction(manager) - .confirmInventory(item.variant_id, item.quantity) - } - - newItems = await Promise.all( - additional_items.map((i) => - this.lineItemService_ - .withTransaction(manager) - .generate(i.variant_id, order.region_id, i.quantity) - ) - ) - - for (const newItem of newItems) { - await this.inventoryService_ - .withTransaction(manager) - .adjustInventory(newItem.variant_id, -newItem.quantity) - } - } - - const evaluatedNoNotification = - no_notification !== undefined ? no_notification : order.no_notification - - const created = claimRepo.create({ - shipping_address_id: addressId, - payment_status: type === "refund" ? "not_refunded" : "na", - ...rest, - refund_amount: toRefund, - type, - additional_items: newItems, - order_id: order.id, - no_notification: evaluatedNoNotification, - }) - - const result = await claimRepo.save(created) - - if (result.additional_items && result.additional_items.length) { - const calcContext = this.totalsService_.getCalculationContext(order) - const lineItems = await this.lineItemService_ - .withTransaction(manager) - .list({ - id: result.additional_items.map((i) => i.id), - }) - await this.taxProviderService_ - .withTransaction(manager) - .createTaxLines(lineItems, calcContext) - } - - if (shipping_methods) { - for (const method of shipping_methods) { - if (method.id) { - await this.shippingOptionService_ - .withTransaction(manager) - .updateShippingMethod(method.id, { - claim_order_id: result.id, - }) - } else { - await this.shippingOptionService_ - .withTransaction(manager) - .createShippingMethod(method.option_id, method.data, { - claim_order_id: result.id, - price: method.price, - }) - } - } - } - - for (const ci of claim_items) { - await this.claimItemService_.withTransaction(manager).create({ - ...ci, - claim_order_id: result.id, - }) - } - - if (return_shipping) { - await this.returnService_.withTransaction(manager).create({ - order_id: order.id, - claim_order_id: result.id, - items: claim_items.map((ci) => ({ - item_id: ci.item_id, - quantity: ci.quantity, - metadata: ci.metadata, - })), - shipping_method: return_shipping, - no_notification: evaluatedNoNotification, - }) - } - - await this.eventBus_ - .withTransaction(manager) - .emit(ClaimService.Events.CREATED, { - id: result.id, - no_notification: result.no_notification, - }) - - return result - }) - } - /** - * @param {string} id - the object containing all data required to create a claim - * @param {Object} config - config object - * @param {Object | undefined} config.metadata - config metadata - * @param {boolean|undefined} config.no_notification - config no notification - * @return {Claim} created claim - */ - createFulfillment( - id, - config = { - metadata: {}, - no_notification: undefined, - } - ) { - const { metadata, no_notification } = config - - return this.atomicPhase_(async (manager) => { - const claim = await this.retrieve(id, { - relations: [ - "additional_items", - "additional_items.tax_lines", - "shipping_methods", - "shipping_methods.tax_lines", - "shipping_address", - "order", - "order.billing_address", - "order.discounts", - "order.discounts.rule", - "order.payments", - ], - }) - - if (claim.canceled_at) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Canceled claim cannot be fulfilled" - ) - } - - const order = claim.order - - if ( - claim.fulfillment_status !== "not_fulfilled" && - claim.fulfillment_status !== "canceled" - ) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "The claim has already been fulfilled." - ) - } - - if (claim.type !== "replace") { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - `Claims with the type "${claim.type}" can not be fulfilled.` - ) - } - - if (!claim.shipping_methods?.length) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Cannot fulfill a claim without a shipping method." - ) - } - - const evaluatedNoNotification = - no_notification !== undefined ? no_notification : claim.no_notification - - const fulfillments = await this.fulfillmentService_ - .withTransaction(manager) - .createFulfillment( - { - ...claim, - email: order.email, - payments: order.payments, - discounts: order.discounts, - currency_code: order.currency_code, - tax_rate: order.tax_rate, - region_id: order.region_id, - display_id: order.display_id, - billing_address: order.billing_address, - items: claim.additional_items, - shipping_methods: claim.shipping_methods, - is_claim: true, - no_notification: evaluatedNoNotification, - }, - claim.additional_items.map((i) => ({ - item_id: i.id, - quantity: i.quantity, - })), - { claim_order_id: id, metadata } - ) - - let successfullyFulfilled = [] - for (const f of fulfillments) { - successfullyFulfilled = successfullyFulfilled.concat(f.items) - } - - claim.fulfillment_status = "fulfilled" - - for (const item of claim.additional_items) { - const fulfillmentItem = successfullyFulfilled.find( - (f) => item.id === f.item_id - ) - - if (fulfillmentItem) { - const fulfilledQuantity = - (item.fulfilled_quantity || 0) + fulfillmentItem.quantity - - // Update the fulfilled quantity - await this.lineItemService_.withTransaction(manager).update(item.id, { - fulfilled_quantity: fulfilledQuantity, - }) - - if (item.quantity !== fulfilledQuantity) { - claim.fulfillment_status = "requires_action" - } - } else { - if (item.quantity !== item.fulfilled_quantity) { - claim.fulfillment_status = "requires_action" - } - } - } - - const claimRepo = manager.getCustomRepository(this.claimRepository_) - const result = await claimRepo.save(claim) - - for (const fulfillment of fulfillments) { - await this.eventBus_ - .withTransaction(manager) - .emit(ClaimService.Events.FULFILLMENT_CREATED, { - id: id, - fulfillment_id: fulfillment.id, - no_notification: claim.no_notification, - }) - } - - return result - }) - } - - async cancelFulfillment(fulfillmentId) { - return this.atomicPhase_(async (manager) => { - const canceled = await this.fulfillmentService_ - .withTransaction(manager) - .cancelFulfillment(fulfillmentId) - - if (!canceled.claim_order_id) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - `Fufillment not related to a claim` - ) - } - - const claim = await this.retrieve(canceled.claim_order_id) - - claim.fulfillment_status = "canceled" - - const claimRepo = manager.getCustomRepository(this.claimRepository_) - const updated = await claimRepo.save(claim) - return updated - }) - } - - async processRefund(id) { - return this.atomicPhase_(async (manager) => { - const claim = await this.retrieve(id, { - relations: ["order", "order.payments"], - }) - - if (claim.canceled_at) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Canceled claim cannot be processed" - ) - } - - if (claim.type !== "refund") { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - `Claim must have type "refund" to create a refund.` - ) - } - - if (claim.refund_amount) { - await this.paymentProviderService_ - .withTransaction(manager) - .refundPayment(claim.order.payments, claim.refund_amount, "claim") - } - - claim.payment_status = "refunded" - - const claimRepo = manager.getCustomRepository(this.claimRepository_) - const result = await claimRepo.save(claim) - - await this.eventBus_ - .withTransaction(manager) - .emit(ClaimService.Events.REFUND_PROCESSED, { - id, - no_notification: result.no_notification, - }) - - return result - }) - } - - async createShipment( - id, - fulfillmentId, - trackingLinks, - config = { - metadata: {}, - no_notification: undefined, - } - ) { - const { metadata, no_notification } = config - - return this.atomicPhase_(async (manager) => { - const claim = await this.retrieve(id, { - relations: ["additional_items"], - }) - - if (claim.canceled_at) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Canceled claim cannot be fulfilled as shipped" - ) - } - const evaluatedNoNotification = - no_notification !== undefined ? no_notification : claim.no_notification - - const shipment = await this.fulfillmentService_ - .withTransaction(manager) - .createShipment(fulfillmentId, trackingLinks, { - metadata, - no_notification: evaluatedNoNotification, - }) - - claim.fulfillment_status = "shipped" - - for (const i of claim.additional_items) { - const shipped = shipment.items.find((si) => si.item_id === i.id) - if (shipped) { - const shippedQty = (i.shipped_quantity || 0) + shipped.quantity - await this.lineItemService_.withTransaction(manager).update(i.id, { - shipped_quantity: shippedQty, - }) - - if (shippedQty !== i.quantity) { - claim.fulfillment_status = "partially_shipped" - } - } else { - if (i.shipped_quantity !== i.quantity) { - claim.fulfillment_status = "partially_shipped" - } - } - } - - const claimRepo = manager.getCustomRepository(this.claimRepository_) - const result = await claimRepo.save(claim) - - await this.eventBus_ - .withTransaction(manager) - .emit(ClaimService.Events.SHIPMENT_CREATED, { - id, - fulfillment_id: shipment.id, - no_notification: evaluatedNoNotification, - }) - - return result - }) - } - - async cancel(id) { - return this.atomicPhase_(async (manager) => { - const claim = await this.retrieve(id, { - relations: ["return_order", "fulfillments", "order", "order.refunds"], - }) - if (claim.refund_amount) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Claim with a refund cannot be canceled" - ) - } - - if (claim.fulfillments) { - for (const f of claim.fulfillments) { - if (!f.canceled_at) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "All fulfillments must be canceled before the claim can be canceled" - ) - } - } - } - - if (claim.return_order && claim.return_order.status !== "canceled") { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Return must be canceled before the claim can be canceled" - ) - } - - claim.fulfillment_status = "canceled" - claim.canceled_at = new Date() - - const claimRepo = manager.getCustomRepository(this.claimRepository_) - const result = await claimRepo.save(claim) - - await this.eventBus_ - .withTransaction(manager) - .emit(ClaimService.Events.CANCELED, { - id: result.id, - no_notification: result.no_notification, - }) - - return result - }) - } - - /** - * @param {Object} selector - the query object for find - * @param {Object} config - the config object containing query settings - * @return {Promise} the result of the find operation - */ - async list( - selector, - config = { skip: 0, take: 50, order: { created_at: "DESC" } } - ) { - const claimRepo = this.manager_.getCustomRepository(this.claimRepository_) - const query = this.buildQuery_(selector, config) - return claimRepo.find(query) - } - - /** - * Gets an order by id. - * @param {string} claimId - id of order to retrieve - * @param {Object} config - the config object containing query settings - * @return {Promise} the order document - */ - async retrieve(claimId, config = {}) { - const claimRepo = this.manager_.getCustomRepository(this.claimRepository_) - const validatedId = this.validateId_(claimId) - - const query = this.buildQuery_({ id: validatedId }, config) - const claim = await claimRepo.findOne(query) - - if (!claim) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Claim with ${claimId} was not found` - ) - } - - return claim - } - - /** - * Dedicated method to delete metadata for an order. - * @param {string} orderId - the order to delete metadata from. - * @param {string} key - key for metadata field - * @return {Promise} resolves to the updated result. - */ - async deleteMetadata(orderId, key) { - const validatedId = this.validateId_(orderId) - - if (typeof key !== "string") { - throw new MedusaError( - MedusaError.Types.INVALID_ARGUMENT, - "Key type is invalid. Metadata keys must be strings" - ) - } - - const keyPath = `metadata.${key}` - return this.orderModel_ - .updateOne({ _id: validatedId }, { $unset: { [keyPath]: "" } }) - .catch((err) => { - throw new MedusaError(MedusaError.Types.DB_ERROR, err.message) - }) - } -} - -export default ClaimService diff --git a/packages/medusa/src/services/claim.ts b/packages/medusa/src/services/claim.ts new file mode 100644 index 0000000000..a241a3421a --- /dev/null +++ b/packages/medusa/src/services/claim.ts @@ -0,0 +1,858 @@ +import ClaimItemService from "./claim-item" +import EventBusService from "./event-bus" +import FulfillmentProviderService from "./fulfillment-provider" +import FulfillmentService from "./fulfillment" +import InventoryService from "./inventory" +import LineItemService from "./line-item" +import PaymentProviderService from "./payment-provider" +import RegionService from "./region" +import ReturnService from "./return" +import ShippingOptionService from "./shipping-option" +import TaxProviderService from "./tax-provider" +import TotalsService from "./totals" +import { AddressRepository } from "../repositories/address" +import { + ClaimFulfillmentStatus, + ClaimOrder, + ClaimPaymentStatus, + ClaimType, + FulfillmentItem, + LineItem, +} from "../models" +import { ClaimRepository } from "../repositories/claim" +import { DeepPartial, EntityManager } from "typeorm" +import { LineItemRepository } from "../repositories/line-item" +import { MedusaError } from "medusa-core-utils" +import { ShippingMethodRepository } from "../repositories/shipping-method" +import { TransactionBaseService } from "../interfaces" +import { buildQuery, setMetadata } from "../utils" +import { FindConfig } from "../types/common" +import { CreateClaimInput, UpdateClaimInput } from "../types/claim" + +type InjectedDependencies = { + manager: EntityManager + addressRepository: typeof AddressRepository + shippingMethodRepository: typeof ShippingMethodRepository + lineItemRepository: typeof LineItemRepository + claimRepository: typeof ClaimRepository + claimItemService: ClaimItemService + eventBusService: EventBusService + fulfillmentProviderService: FulfillmentProviderService + fulfillmentService: FulfillmentService + inventoryService: InventoryService + lineItemService: LineItemService + paymentProviderService: PaymentProviderService + regionService: RegionService + returnService: ReturnService + shippingOptionService: ShippingOptionService + taxProviderService: TaxProviderService + totalsService: TotalsService +} + +export default class ClaimService extends TransactionBaseService< + ClaimService, + InjectedDependencies +> { + static readonly Events = { + CREATED: "claim.created", + UPDATED: "claim.updated", + CANCELED: "claim.canceled", + FULFILLMENT_CREATED: "claim.fulfillment_created", + SHIPMENT_CREATED: "claim.shipment_created", + REFUND_PROCESSED: "claim.refund_processed", + } + + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + protected readonly addressRepository_: typeof AddressRepository + protected readonly claimRepository_: typeof ClaimRepository + protected readonly shippingMethodRepository_: typeof ShippingMethodRepository + protected readonly lineItemRepository_: typeof LineItemRepository + protected readonly claimItemService_: ClaimItemService + protected readonly eventBus_: EventBusService + protected readonly fulfillmentProviderService_: FulfillmentProviderService + protected readonly fulfillmentService_: FulfillmentService + protected readonly inventoryService_: InventoryService + protected readonly lineItemService_: LineItemService + protected readonly paymentProviderService_: PaymentProviderService + protected readonly regionService_: RegionService + protected readonly returnService_: ReturnService + protected readonly shippingOptionService_: ShippingOptionService + protected readonly taxProviderService_: TaxProviderService + protected readonly totalsService_: TotalsService + + constructor({ + manager, + addressRepository, + claimRepository, + shippingMethodRepository, + lineItemRepository, + claimItemService, + eventBusService, + fulfillmentProviderService, + fulfillmentService, + inventoryService, + lineItemService, + paymentProviderService, + regionService, + returnService, + shippingOptionService, + taxProviderService, + totalsService, + }: InjectedDependencies) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) + + this.manager_ = manager + + this.addressRepository_ = addressRepository + this.claimRepository_ = claimRepository + this.shippingMethodRepository_ = shippingMethodRepository + this.lineItemRepository_ = lineItemRepository + this.claimItemService_ = claimItemService + this.eventBus_ = eventBusService + this.fulfillmentProviderService_ = fulfillmentProviderService + this.fulfillmentService_ = fulfillmentService + this.inventoryService_ = inventoryService + this.lineItemService_ = lineItemService + this.paymentProviderService_ = paymentProviderService + this.regionService_ = regionService + this.returnService_ = returnService + this.shippingOptionService_ = shippingOptionService + this.taxProviderService_ = taxProviderService + this.totalsService_ = totalsService + } + + async update(id: string, data: UpdateClaimInput): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + const claim = await this.retrieve(id, { + relations: ["shipping_methods"], + }) + + if (claim.canceled_at) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Canceled claim cannot be updated" + ) + } + + const { claim_items, shipping_methods, metadata, no_notification } = + data + + if (metadata) { + claim.metadata = setMetadata(claim, metadata) + await claimRepo.save(claim) + } + + if (shipping_methods) { + for (const m of claim.shipping_methods) { + await this.shippingOptionService_ + .withTransaction(transactionManager) + .updateShippingMethod(m.id, { + claim_order_id: null, + }) + } + + for (const method of shipping_methods) { + if (method.id) { + await this.shippingOptionService_ + .withTransaction(transactionManager) + .updateShippingMethod(method.id, { + claim_order_id: claim.id, + }) + } else { + await this.shippingOptionService_ + .withTransaction(transactionManager) + .createShippingMethod( + method.option_id as string, + (method as any).data, + { + claim_order_id: claim.id, + price: method.price, + } + ) + } + } + } + + if (no_notification !== undefined) { + claim.no_notification = no_notification + await claimRepo.save(claim) + } + + if (claim_items) { + for (const i of claim_items) { + if (i.id) { + await this.claimItemService_ + .withTransaction(transactionManager) + .update(i.id, i) + } + } + } + + await this.eventBus_ + .withTransaction(transactionManager) + .emit(ClaimService.Events.UPDATED, { + id: claim.id, + no_notification: claim.no_notification, + }) + + return claim + } + ) + } + + /** + * Creates a Claim on an Order. Claims consists of items that are claimed and + * optionally items to be sent as replacement for the claimed items. The + * shipping address that the new items will be shipped to + * @param data - the object containing all data required to create a claim + * @return created claim + */ + async create(data: CreateClaimInput): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + + const { + type, + claim_items, + order, + return_shipping, + additional_items, + shipping_methods, + refund_amount, + shipping_address, + shipping_address_id, + no_notification, + ...rest + } = data + + for (const item of claim_items) { + const line = await this.lineItemService_ + .withTransaction(transactionManager) + .retrieve(item.item_id, { + relations: ["order", "swap", "claim_order", "tax_lines"], + }) + + if ( + line.order?.canceled_at || + line.swap?.canceled_at || + line.claim_order?.canceled_at + ) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Cannot create a claim on a canceled item.` + ) + } + } + + let addressId = shipping_address_id || order.shipping_address_id + if (shipping_address) { + const addressRepo = transactionManager.getCustomRepository( + this.addressRepository_ + ) + const created = addressRepo.create(shipping_address) + const saved = await addressRepo.save(created) + addressId = saved.id + } + + if (type !== ClaimType.REFUND && type !== ClaimType.REPLACE) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Claim type must be one of "refund" or "replace".` + ) + } + + if (type === ClaimType.REPLACE && !additional_items?.length) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Claims with type "replace" must have at least one additional item.` + ) + } + + if (!claim_items?.length) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Claims must have at least one claim item.` + ) + } + + if (refund_amount && type !== ClaimType.REFUND) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Claim has type "${type}" but must be type "refund" to have a refund_amount.` + ) + } + + let toRefund = refund_amount + if (type === ClaimType.REFUND && typeof refund_amount === "undefined") { + const lines = claim_items.map((ci) => { + const allOrderItems = order.items + + if (order.swaps?.length) { + for (const swap of order.swaps) { + swap.additional_items.forEach((it) => { + if ( + it.shipped_quantity || + it.shipped_quantity === it.fulfilled_quantity + ) { + allOrderItems.push(it) + } + }) + } + } + + if (order.claims?.length) { + for (const claim of order.claims) { + claim.additional_items.forEach((it) => { + if ( + it.shipped_quantity || + it.shipped_quantity === it.fulfilled_quantity + ) { + allOrderItems.push(it) + } + }) + } + } + + const orderItem = allOrderItems.find((oi) => oi.id === ci.item_id) + return { + ...orderItem, + quantity: ci.quantity, + } + }) + toRefund = await this.totalsService_.getRefundTotal( + order, + lines as LineItem[] + ) + } + + let newItems: LineItem[] = [] + if (typeof additional_items !== "undefined") { + for (const item of additional_items) { + await this.inventoryService_ + .withTransaction(transactionManager) + .confirmInventory(item.variant_id, item.quantity) + } + + newItems = await Promise.all( + additional_items.map((i) => + this.lineItemService_ + .withTransaction(transactionManager) + .generate(i.variant_id, order.region_id, i.quantity) + ) + ) + + for (const newItem of newItems) { + await this.inventoryService_ + .withTransaction(transactionManager) + .adjustInventory(newItem.variant_id, -newItem.quantity) + } + } + + const evaluatedNoNotification = + no_notification !== undefined + ? no_notification + : order.no_notification + + const created = claimRepo.create({ + shipping_address_id: addressId, + payment_status: type === ClaimType.REFUND ? "not_refunded" : "na", + refund_amount: toRefund, + type, + additional_items: newItems, + order_id: order.id, + no_notification: evaluatedNoNotification, + ...rest, + } as DeepPartial) + + const result: ClaimOrder = await claimRepo.save(created) + + if (result.additional_items && result.additional_items.length) { + const calcContext = this.totalsService_.getCalculationContext(order) + const lineItems = await this.lineItemService_ + .withTransaction(transactionManager) + .list({ + id: result.additional_items.map((i) => i.id), + }) + await this.taxProviderService_ + .withTransaction(transactionManager) + .createTaxLines(lineItems, calcContext) + } + + if (shipping_methods) { + for (const method of shipping_methods) { + if (method.id) { + await this.shippingOptionService_ + .withTransaction(transactionManager) + .updateShippingMethod(method.id, { + claim_order_id: result.id, + }) + } else { + await this.shippingOptionService_ + .withTransaction(transactionManager) + .createShippingMethod( + method.option_id as string, + (method as any).data, + { + claim_order_id: result.id, + price: method.price, + } + ) + } + } + } + + for (const ci of claim_items) { + await this.claimItemService_ + .withTransaction(transactionManager) + .create({ + ...ci, + claim_order_id: result.id, + }) + } + + if (return_shipping) { + await this.returnService_.withTransaction(transactionManager).create({ + order_id: order.id, + claim_order_id: result.id, + items: claim_items.map((ci) => ({ + item_id: ci.item_id, + quantity: ci.quantity, + metadata: (ci as any).metadata, + })), + shipping_method: return_shipping, + no_notification: evaluatedNoNotification, + }) + } + + await this.eventBus_ + .withTransaction(transactionManager) + .emit(ClaimService.Events.CREATED, { + id: result.id, + no_notification: result.no_notification, + }) + + return result + } + ) + } + + /** + * @param id - the object containing all data required to create a claim + * @param config - config object + * @param config.metadata - config metadata + * @param config.no_notification - config no notification + * @return created claim + */ + async createFulfillment( + id: string, + config: { + metadata?: Record + no_notification?: boolean + } = { + metadata: {}, + } + ): Promise { + const { metadata, no_notification } = config + + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const claim = await this.retrieve(id, { + relations: [ + "additional_items", + "additional_items.tax_lines", + "shipping_methods", + "shipping_methods.tax_lines", + "shipping_address", + "order", + "order.billing_address", + "order.discounts", + "order.discounts.rule", + "order.payments", + ], + }) + + if (claim.canceled_at) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Canceled claim cannot be fulfilled" + ) + } + + const order = claim.order + + if ( + claim.fulfillment_status !== "not_fulfilled" && + claim.fulfillment_status !== "canceled" + ) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "The claim has already been fulfilled." + ) + } + + if (claim.type !== "replace") { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + `Claims with the type "${claim.type}" can not be fulfilled.` + ) + } + + if (!claim.shipping_methods?.length) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Cannot fulfill a claim without a shipping method." + ) + } + + const evaluatedNoNotification = + no_notification !== undefined + ? no_notification + : claim.no_notification + + const fulfillments = await this.fulfillmentService_ + .withTransaction(transactionManager) + .createFulfillment( + { + ...claim, + email: order.email, + payments: order.payments, + discounts: order.discounts, + currency_code: order.currency_code, + tax_rate: order.tax_rate, + region_id: order.region_id, + display_id: order.display_id, + billing_address: order.billing_address, + items: claim.additional_items, + shipping_methods: claim.shipping_methods, + is_claim: true, + no_notification: evaluatedNoNotification, + }, + claim.additional_items.map((i) => ({ + item_id: i.id, + quantity: i.quantity, + })), + { claim_order_id: id, metadata } + ) + + let successfullyFulfilledItems: FulfillmentItem[] = [] + for (const fulfillment of fulfillments) { + successfullyFulfilledItems = successfullyFulfilledItems.concat( + fulfillment.items + ) + } + + claim.fulfillment_status = ClaimFulfillmentStatus.FULFILLED + + for (const item of claim.additional_items) { + const fulfillmentItem = successfullyFulfilledItems.find( + (successfullyFulfilledItem) => { + return successfullyFulfilledItem.item_id === item.id + } + ) + + if (fulfillmentItem) { + const fulfilledQuantity = + (item.fulfilled_quantity || 0) + fulfillmentItem.quantity + + // Update the fulfilled quantity + await this.lineItemService_ + .withTransaction(transactionManager) + .update(item.id, { + fulfilled_quantity: fulfilledQuantity, + }) + + if (item.quantity !== fulfilledQuantity) { + claim.fulfillment_status = ClaimFulfillmentStatus.REQUIRES_ACTION + } + } else if (item.quantity !== item.fulfilled_quantity) { + claim.fulfillment_status = ClaimFulfillmentStatus.REQUIRES_ACTION + } + } + + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + const claimOrder = await claimRepo.save(claim) + + for (const fulfillment of fulfillments) { + await this.eventBus_ + .withTransaction(transactionManager) + .emit(ClaimService.Events.FULFILLMENT_CREATED, { + id: id, + fulfillment_id: fulfillment.id, + no_notification: claim.no_notification, + }) + } + + return claimOrder + } + ) + } + + async cancelFulfillment(fulfillmentId: string): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const canceled = await this.fulfillmentService_ + .withTransaction(transactionManager) + .cancelFulfillment(fulfillmentId) + + if (!canceled.claim_order_id) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + `Fufillment not related to a claim` + ) + } + + const claim = await this.retrieve(canceled.claim_order_id) + + claim.fulfillment_status = ClaimFulfillmentStatus.CANCELED + + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + return claimRepo.save(claim) + } + ) + } + + async processRefund(id: string): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const claim = await this.retrieve(id, { + relations: ["order", "order.payments"], + }) + + if (claim.canceled_at) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Canceled claim cannot be processed" + ) + } + + if (claim.type !== "refund") { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + `Claim must have type "refund" to create a refund.` + ) + } + + if (claim.refund_amount) { + await this.paymentProviderService_ + .withTransaction(transactionManager) + .refundPayment(claim.order.payments, claim.refund_amount, "claim") + } + + claim.payment_status = ClaimPaymentStatus.REFUNDED + + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + const claimOrder = await claimRepo.save(claim) + + await this.eventBus_ + .withTransaction(transactionManager) + .emit(ClaimService.Events.REFUND_PROCESSED, { + id, + no_notification: claimOrder.no_notification, + }) + + return claimOrder + } + ) + } + + async createShipment( + id: string, + fulfillmentId: string, + trackingLinks: { tracking_number: string }[] = [], + config = { + metadata: {}, + no_notification: undefined, + } + ): Promise { + const { metadata, no_notification } = config + + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const claim = await this.retrieve(id, { + relations: ["additional_items"], + }) + + if (claim.canceled_at) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Canceled claim cannot be fulfilled as shipped" + ) + } + const evaluatedNoNotification = + no_notification !== undefined + ? no_notification + : claim.no_notification + + const shipment = await this.fulfillmentService_ + .withTransaction(transactionManager) + .createShipment(fulfillmentId, trackingLinks, { + metadata, + no_notification: evaluatedNoNotification, + }) + + claim.fulfillment_status = ClaimFulfillmentStatus.SHIPPED + + for (const additionalItem of claim.additional_items) { + const shipped = shipment.items.find( + (si) => si.item_id === additionalItem.id + ) + if (shipped) { + const shippedQty = + (additionalItem.shipped_quantity || 0) + shipped.quantity + await this.lineItemService_ + .withTransaction(transactionManager) + .update(additionalItem.id, { + shipped_quantity: shippedQty, + }) + + if (shippedQty !== additionalItem.quantity) { + claim.fulfillment_status = + ClaimFulfillmentStatus.PARTIALLY_SHIPPED + } + } else if ( + additionalItem.shipped_quantity !== additionalItem.quantity + ) { + claim.fulfillment_status = ClaimFulfillmentStatus.PARTIALLY_SHIPPED + } + } + + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + const claimOrder = await claimRepo.save(claim) + + await this.eventBus_ + .withTransaction(transactionManager) + .emit(ClaimService.Events.SHIPMENT_CREATED, { + id, + fulfillment_id: shipment.id, + no_notification: evaluatedNoNotification, + }) + + return claimOrder + } + ) + } + + async cancel(id: string): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const claim = await this.retrieve(id, { + relations: ["return_order", "fulfillments", "order", "order.refunds"], + }) + if (claim.refund_amount) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Claim with a refund cannot be canceled" + ) + } + + if (claim.fulfillments) { + for (const f of claim.fulfillments) { + if (!f.canceled_at) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "All fulfillments must be canceled before the claim can be canceled" + ) + } + } + } + + if (claim.return_order && claim.return_order.status !== "canceled") { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Return must be canceled before the claim can be canceled" + ) + } + + claim.fulfillment_status = ClaimFulfillmentStatus.CANCELED + claim.canceled_at = new Date() + + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + const claimOrder = await claimRepo.save(claim) + + await this.eventBus_ + .withTransaction(transactionManager) + .emit(ClaimService.Events.CANCELED, { + id: claimOrder.id, + no_notification: claimOrder.no_notification, + }) + + return claimOrder + } + ) + } + + /** + * @param selector - the query object for find + * @param config - the config object containing query settings + * @return the result of the find operation + */ + async list( + selector, + config: FindConfig = { + skip: 0, + take: 50, + order: { created_at: "DESC" }, + } + ): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + const query = buildQuery(selector, config) + return await claimRepo.find(query) + } + ) + } + + /** + * Gets an order by id. + * @param id - id of the claim order to retrieve + * @param config - the config object containing query settings + * @return the order document + */ + async retrieve( + id: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const claimRepo = transactionManager.getCustomRepository( + this.claimRepository_ + ) + + const query = buildQuery({ id }, config) + const claim = await claimRepo.findOne(query) + + if (!claim) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Claim with ${id} was not found` + ) + } + + return claim + } + ) + } +} diff --git a/packages/medusa/src/services/csv-parser.ts b/packages/medusa/src/services/csv-parser.ts new file mode 100644 index 0000000000..239390b7ba --- /dev/null +++ b/packages/medusa/src/services/csv-parser.ts @@ -0,0 +1,202 @@ +import { AwilixContainer } from "awilix" +import { difference } from "lodash" +import Papa, { ParseConfig } from "papaparse" +import { AbstractParser } from "../interfaces/abstract-parser" +import { CsvParserContext, CsvSchema } from "../interfaces/csv-parser" + +const DEFAULT_PARSE_OPTIONS = { + dynamicTyping: true, + header: true, +} + +class CsvParser< + TSchema extends CsvSchema = CsvSchema, + TParserResult = unknown, + TOutputResult = unknown +> extends AbstractParser { + protected readonly $$delimiter: string = ";" + + constructor( + protected readonly container: AwilixContainer, + schema: TSchema, + delimiter?: string + ) { + super(schema) + if (delimiter) { + this.$$delimiter = delimiter + } + } + + public async parse( + readableStream: NodeJS.ReadableStream, + options: ParseConfig = DEFAULT_PARSE_OPTIONS + ): Promise { + const csvStream = Papa.parse(Papa.NODE_STREAM_INPUT, options) + + const parsedContent: TParserResult[] = [] + readableStream.pipe(csvStream) + for await (const chunk of csvStream) { + parsedContent.push(chunk) + } + + return parsedContent + } + + async buildData(data: TParserResult[]): Promise { + const validatedData = [] as TOutputResult[] + for (let i = 0; i < data.length; i++) { + const builtLine = await this._buildLine(data[i], i + 1) + validatedData.push(builtLine) + } + return validatedData + } + + private async _buildLine( + line: TParserResult, + lineNumber: number + ): Promise { + let outputTuple = {} as TOutputResult + const columnMap = this.buildColumnMap_(this.$$schema.columns) + + const tupleKeys = Object.keys(line) + + /** + * map which keeps track of the columns processed + * used to detect any missing columns which are present in the schema but not in the line + */ + const processedColumns = {} + for (const tupleKey of tupleKeys) { + const column = this.resolveColumn_(tupleKey, columnMap) + + /** + * if the tupleKey does not correspond to any column defined in the schema + */ + if (!column) { + throw new Error( + `Unable to treat column ${tupleKey} from the csv file. No target column found in the provided schema` + ) + } + + processedColumns[column.name] = true + + /** + * if the value corresponding to the tupleKey is empty and the column is required in the schema + */ + if (!line[tupleKey] && column.required) { + throw new Error( + `No value found for target column "${column.name}" in line ${lineNumber} of the given csv file` + ) + } + + const context = { + line, + lineNumber, + column: column.name, + tupleKey, + } + + outputTuple = this.resolveTuple_(outputTuple, column, context) + } + + /** + * missing columns = columns defined in the schema - columns present in the line + */ + const missingColumns = difference( + Object.keys(columnMap), + Object.keys(processedColumns) + ) + + if (missingColumns.length > 0) { + throw new Error( + `Missing column(s) ${formatMissingColumns( + missingColumns + )} from the given csv file` + ) + } + + /** + * Runs the validation defined in the schema columns + */ + for (const column of this.$$schema.columns) { + const context = { + line, + lineNumber, + column: column.name, + } + + if (column.validator) { + await column.validator.validate(outputTuple, context) + } + } + + return outputTuple + } + + private buildColumnMap_( + columns: TSchema["columns"] + ): Record { + return columns.reduce((map, column) => { + if (typeof column.name === "string") { + map[column.name] = column + } + return map + }, {}) + } + + private resolveColumn_( + tupleKey: string, + columnMap: Record + ): TSchema["columns"][number] | undefined { + if (columnMap[tupleKey]) { + return columnMap[tupleKey] + } + + const matchedColumn = this.$$schema.columns.find((column) => + "match" in column && + typeof column.match === "object" && + column.match instanceof RegExp + ? column.match.test(tupleKey) + : false + ) + + return matchedColumn + } + + private resolveTuple_( + tuple: TOutputResult, + column: TSchema["columns"][number], + context: CsvParserContext & { tupleKey: string } + ): TOutputResult { + const outputTuple = { ...tuple } + const { tupleKey, ...csvContext } = context + const { line } = csvContext + + let resolvedKey = tupleKey + /** + * if match is provided, then we should call the reducer if it's defined + * otherwise, before using the mapTo property, we should make sure match was not provided + */ + if ("match" in column && column.reducer) { + return column.reducer(outputTuple, tupleKey, line[tupleKey], csvContext) + } else if (!("match" in column) && "mapTo" in column && column.mapTo) { + resolvedKey = column.mapTo + } + + const resolvedValue = column.transform + ? column.transform(line[tupleKey], csvContext) + : line[tupleKey] + + outputTuple[resolvedKey] = resolvedValue + + return outputTuple + } +} + +const formatMissingColumns = (list: string[]): string => + list.reduce( + (text, curr, i, array) => + text + (i < array.length - 1 ? `"${curr}", ` : `"${curr}"`), + "" + ) + +export default CsvParser diff --git a/packages/medusa/src/services/custom-shipping-option.js b/packages/medusa/src/services/custom-shipping-option.js deleted file mode 100644 index e45c86331e..0000000000 --- a/packages/medusa/src/services/custom-shipping-option.js +++ /dev/null @@ -1,111 +0,0 @@ -import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" - -class CustomShippingOptionService extends BaseService { - constructor({ manager, customShippingOptionRepository }) { - super() - - /** @private @const {EntityManager} */ - this.manager_ = manager - - /** @private @const {CustomShippingOptionRepository} */ - this.customShippingOptionRepository_ = customShippingOptionRepository - } - - /** - * Sets the service's manager to a given transaction manager - * @param {EntityManager} manager - the manager to use - * @return {CustomShippingOptionService} a cloned CustomShippingOption service - */ - withTransaction(manager) { - if (!manager) { - return this - } - - const cloned = new CustomShippingOptionService({ - manager, - customShippingOptionRepository: this.customShippingOptionRepository_, - }) - - cloned.transactionManager_ = manager - return cloned - } - - /** - * Retrieves a specific shipping option. - * @param {string} id - the id of the custom shipping option to retrieve. - * @param {*} config - any options needed to query for the result. - * @return {Promise} which resolves to the requested custom shipping option. - */ - async retrieve(id, config = {}) { - const customShippingOptionRepo = this.manager_.getCustomRepository( - this.customShippingOptionRepository_ - ) - - const validatedId = this.validateId_(id) - const query = this.buildQuery_({ id: validatedId }, config) - - const customShippingOption = await customShippingOptionRepo.findOne(query) - - if (!customShippingOption) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Custom shipping option with id: ${id} was not found.` - ) - } - - return customShippingOption - } - - /** Fetches all custom shipping options related to the given selector - * @param {Object} selector - the query object for find - * @param {Object} config - the configuration used to find the objects. contains relations, skip, and take. - * @return {Promise} custom shipping options matching the query - */ - async list( - selector, - config = { - skip: 0, - take: 50, - relations: [], - } - ) { - const customShippingOptionRepo = this.manager_.getCustomRepository( - this.customShippingOptionRepository_ - ) - - const query = this.buildQuery_(selector, config) - - return customShippingOptionRepo.find(query) - } - - /** - * Creates a custom shipping option associated with a given author - * @param {object} data - the custom shipping option to create - * @param {*} config - any configurations if needed, including meta data - * @return {Promise} resolves to the creation result - */ - async create(data, config = { metadata: {} }) { - const { metadata } = config - - const { cart_id, shipping_option_id, price } = data - - return this.atomicPhase_(async (manager) => { - const customShippingOptionRepo = manager.getCustomRepository( - this.customShippingOptionRepository_ - ) - - const customShippingOption = await customShippingOptionRepo.create({ - cart_id, - shipping_option_id, - price, - metadata, - }) - const result = await customShippingOptionRepo.save(customShippingOption) - - return result - }) - } -} - -export default CustomShippingOptionService diff --git a/packages/medusa/src/services/custom-shipping-option.ts b/packages/medusa/src/services/custom-shipping-option.ts new file mode 100644 index 0000000000..3c437b62e3 --- /dev/null +++ b/packages/medusa/src/services/custom-shipping-option.ts @@ -0,0 +1,111 @@ +import { MedusaError } from "medusa-core-utils" +import { EntityManager } from "typeorm" +import { TransactionBaseService } from "../interfaces" +import { CustomShippingOption } from "../models" +import { CustomShippingOptionRepository } from "../repositories/custom-shipping-option" +import { FindConfig, Selector } from "../types/common" +import { CreateCustomShippingOptionInput } from "../types/shipping-options" +import { buildQuery } from "../utils" + +type InjectedDependencies = { + manager: EntityManager + customShippingOptionRepository: typeof CustomShippingOptionRepository +} +class CustomShippingOptionService extends TransactionBaseService { + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + protected customShippingOptionRepository_: typeof CustomShippingOptionRepository + + constructor({ + manager, + customShippingOptionRepository, + }: InjectedDependencies) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) + + this.manager_ = manager + this.customShippingOptionRepository_ = customShippingOptionRepository + } + + /** + * Retrieves a specific shipping option. + * @param id - the id of the custom shipping option to retrieve. + * @param config - any options needed to query for the result. + * @return the requested custom shipping option. + */ + async retrieve( + id: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_(async (manager) => { + const customShippingOptionRepo = manager.getCustomRepository( + this.customShippingOptionRepository_ + ) + + const query = buildQuery({ id }, config) + + const customShippingOption = await customShippingOptionRepo.findOne(query) + + if (!customShippingOption) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Custom shipping option with id: ${id} was not found.` + ) + } + + return customShippingOption + }) + } + + /** Fetches all custom shipping options based on the given selector + * @param selector - the query object for find + * @param config - the configuration used to find the objects. contains relations, skip, and take. + * @return custom shipping options matching the query + */ + async list( + selector: Selector, + config: FindConfig = { + skip: 0, + take: 50, + relations: [], + } + ): Promise { + return await this.atomicPhase_(async (manager) => { + const customShippingOptionRepo = manager.getCustomRepository( + this.customShippingOptionRepository_ + ) + + const query = buildQuery(selector, config) + + return await customShippingOptionRepo.find(query) + }) + } + + /** + * Creates a custom shipping option + * @param data - the custom shipping option to create + * @param config - any configurations if needed, including meta data + * @return resolves to the creation result + */ + async create( + data: CreateCustomShippingOptionInput + ): Promise { + const { cart_id, shipping_option_id, price, metadata } = data + + return await this.atomicPhase_(async (manager) => { + const customShippingOptionRepo = manager.getCustomRepository( + this.customShippingOptionRepository_ + ) + + const customShippingOption = await customShippingOptionRepo.create({ + cart_id, + shipping_option_id, + price, + metadata, + }) + return await customShippingOptionRepo.save(customShippingOption) + }) + } +} + +export default CustomShippingOptionService diff --git a/packages/medusa/src/services/customer.js b/packages/medusa/src/services/customer.ts similarity index 50% rename from packages/medusa/src/services/customer.js rename to packages/medusa/src/services/customer.ts index c8531d762e..aa5991c15d 100644 --- a/packages/medusa/src/services/customer.js +++ b/packages/medusa/src/services/customer.ts @@ -1,16 +1,36 @@ import jwt from "jsonwebtoken" import _ from "lodash" import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" import Scrypt from "scrypt-kdf" -import { Brackets, ILike } from "typeorm" +import { DeepPartial, EntityManager } from "typeorm" +import { StorePostCustomersCustomerAddressesAddressReq } from "../api" +import { TransactionBaseService } from "../interfaces" +import { Address, Customer, CustomerGroup } from "../models" +import { AddressRepository } from "../repositories/address" +import { CustomerRepository } from "../repositories/customer" +import { AddressCreatePayload, FindConfig, Selector } from "../types/common" +import { CreateCustomerInput, UpdateCustomerInput } from "../types/customers" +import { buildQuery, setMetadata } from "../utils" import { formatException } from "../utils/exception-formatter" +import EventBusService from "./event-bus" +type InjectedDependencies = { + manager: EntityManager + eventBusService: EventBusService + customerRepository: typeof CustomerRepository + addressRepository: typeof AddressRepository +} /** * Provides layer to manipulate customers. - * @implements {BaseService} */ -class CustomerService extends BaseService { +class CustomerService extends TransactionBaseService { + protected readonly customerRepository_: typeof CustomerRepository + protected readonly addressRepository_: typeof AddressRepository + protected readonly eventBusService_: EventBusService + + protected readonly manager_: EntityManager + protected readonly transactionManager_: EntityManager | undefined + static Events = { PASSWORD_RESET: "customer.password_reset", CREATED: "customer.created", @@ -22,39 +42,17 @@ class CustomerService extends BaseService { customerRepository, eventBusService, addressRepository, - }) { - super() + }: InjectedDependencies) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) - /** @private @const {EntityManager} */ this.manager_ = manager - /** @private @const {CustomerRepository} */ this.customerRepository_ = customerRepository - - /** @private @const {EventBus} */ - this.eventBus_ = eventBusService - - /** @private @const {AddressRepository} */ + this.eventBusService_ = eventBusService this.addressRepository_ = addressRepository } - withTransaction(transactionManager) { - if (!transactionManager) { - return this - } - - const cloned = new CustomerService({ - manager: transactionManager, - customerRepository: this.customerRepository_, - eventBusService: this.eventBus_, - addressRepository: this.addressRepository_, - }) - - cloned.transactionManager_ = transactionManager - - return cloned - } - /** * Generate a JSON Web token, that will be sent to a customer, that wishes to * reset password. @@ -64,38 +62,42 @@ class CustomerService extends BaseService { * @param {string} customerId - the customer to reset the password for * @return {string} the generated JSON web token */ - async generateResetPasswordToken(customerId) { - const customer = await this.retrieve(customerId, { - select: [ - "id", - "has_account", - "password_hash", - "email", - "first_name", - "last_name", - ], - }) + async generateResetPasswordToken(customerId: string): Promise { + return await this.atomicPhase_(async (manager) => { + const customer = await this.retrieve(customerId, { + select: [ + "id", + "has_account", + "password_hash", + "email", + "first_name", + "last_name", + ], + }) - if (!customer.has_account) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "You must have an account to reset the password. Create an account first" - ) - } + if (!customer.has_account) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "You must have an account to reset the password. Create an account first" + ) + } - const secret = customer.password_hash - const expiry = Math.floor(Date.now() / 1000) + 60 * 15 // 15 minutes ahead - const payload = { customer_id: customer.id, exp: expiry } - const token = jwt.sign(payload, secret) - // Notify subscribers - this.eventBus_.emit(CustomerService.Events.PASSWORD_RESET, { - id: customerId, - email: customer.email, - first_name: customer.first_name, - last_name: customer.last_name, - token, + const secret = customer.password_hash + const expiry = Math.floor(Date.now() / 1000) + 60 * 15 // 15 minutes ahead + const payload = { customer_id: customer.id, exp: expiry } + const token = jwt.sign(payload, secret) + // Notify subscribers + this.eventBusService_ + .withTransaction(manager) + .emit(CustomerService.Events.PASSWORD_RESET, { + id: customerId, + email: customer.email, + first_name: customer.first_name, + last_name: customer.last_name, + token, + }) + return token }) - return token } /** @@ -103,40 +105,24 @@ class CustomerService extends BaseService { * @param {Object} config - the config object containing query settings * @return {Promise} the result of the find operation */ - async list(selector = {}, config = { relations: [], skip: 0, take: 50 }) { - const customerRepo = this.manager_.getCustomRepository( - this.customerRepository_ - ) + async list( + selector: Selector & { q?: string } = {}, + config: FindConfig = { relations: [], skip: 0, take: 50 } + ): Promise { + return await this.atomicPhase_(async (manager) => { + const customerRepo = manager.getCustomRepository(this.customerRepository_) - let q - if ("q" in selector) { - q = selector.q - delete selector.q - } - - const query = this.buildQuery_(selector, config) - - if (q) { - const where = query.where - - delete where.email - delete where.first_name - delete where.last_name - - query.where = (qb) => { - qb.where(where) - - qb.andWhere( - new Brackets((qb) => { - qb.where({ email: ILike(`%${q}%`) }) - .orWhere({ first_name: ILike(`%${q}%`) }) - .orWhere({ last_name: ILike(`%${q}%`) }) - }) - ) + let q + if ("q" in selector) { + q = selector.q + delete selector.q } - } - return customerRepo.find(query) + const query = buildQuery, Customer>(selector, config) + + const [customers] = await customerRepo.listAndCount(query, q) + return customers + }) } /** @@ -145,76 +131,58 @@ class CustomerService extends BaseService { * @return {Promise} the result of the find operation */ async listAndCount( - selector, - config = { relations: [], skip: 0, take: 50, order: { created_at: "DESC" } } - ) { - const customerRepo = this.manager_.getCustomRepository( - this.customerRepository_ - ) - - let q - if ("q" in selector) { - q = selector.q - delete selector.q + selector: Selector & { q?: string }, + config: FindConfig = { + relations: [], + skip: 0, + take: 50, + order: { created_at: "DESC" }, } + ): Promise<[Customer[], number]> { + return await this.atomicPhase_(async (manager) => { + const customerRepo = manager.getCustomRepository(this.customerRepository_) - const query = this.buildQuery_(selector, config) - - const groups = query.where.groups - delete query.where.groups - - if (q) { - const where = query.where - - delete where.email - delete where.first_name - delete where.last_name - - query.where = (qb) => { - qb.where(where) - qb.andWhere( - new Brackets((qb) => { - qb.where({ email: ILike(`%${q}%`) }) - .orWhere({ first_name: ILike(`%${q}%`) }) - .orWhere({ last_name: ILike(`%${q}%`) }) - }) - ) + let q + if ("q" in selector) { + q = selector.q + delete selector.q } - } - return await customerRepo.listAndCount(query, groups) + const query = buildQuery, Customer>(selector, config) + + return await customerRepo.listAndCount(query, q) + }) } /** * Return the total number of documents in database * @return {Promise} the result of the count operation */ - count() { - const customerRepo = this.manager_.getCustomRepository( - this.customerRepository_ - ) - return customerRepo.count({}) + async count(): Promise { + return await this.atomicPhase_(async (manager) => { + const customerRepo = manager.getCustomRepository(this.customerRepository_) + return await customerRepo.count({}) + }) } - /** - * Gets a customer by id. - * @param {string} customerId - the id of the customer to get. - * @param {Object} config - the config object containing query settings - * @return {Promise} the customer document. - */ - async retrieve(customerId, config = {}) { - const customerRepo = this.manager_.getCustomRepository( - this.customerRepository_ - ) + private async retrieve_( + selector: Selector, + config: FindConfig = {} + ): Promise { + const manager = this.transactionManager_ ?? this.manager_ - const validatedId = this.validateId_(customerId) - const query = this.buildQuery_({ id: validatedId }, config) + const customerRepo = manager.getCustomRepository(this.customerRepository_) + const query = buildQuery(selector, config) const customer = await customerRepo.findOne(query) + if (!customer) { + const selectorConstraints = Object.entries(selector) + .map((key, value) => `${key}: ${value}`) + .join(", ") throw new MedusaError( MedusaError.Types.NOT_FOUND, - `Customer with ${customerId} was not found` + `Customer with ${selectorConstraints} was not found` ) } @@ -227,22 +195,13 @@ class CustomerService extends BaseService { * @param {Object} config - the config object containing query settings * @return {Promise} the customer document. */ - async retrieveByEmail(email, config = {}) { - const customerRepo = this.manager_.getCustomRepository( - this.customerRepository_ - ) - - const query = this.buildQuery_({ email: email.toLowerCase() }, config) - const customer = await customerRepo.findOne(query) - - if (!customer) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Customer with email ${email} was not found` - ) - } - - return customer + async retrieveByEmail( + email: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_(async () => { + return await this.retrieve_({ email: email.toLowerCase() }, config) + }) } /** @@ -251,22 +210,28 @@ class CustomerService extends BaseService { * @param {Object} config - the config object containing query settings * @return {Promise} the customer document. */ - async retrieveByPhone(phone, config = {}) { - const customerRepo = this.manager_.getCustomRepository( - this.customerRepository_ - ) + async retrieveByPhone( + phone: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_(async () => { + return await this.retrieve_({ phone }, config) + }) + } - const query = this.buildQuery_({ phone }, config) - const customer = await customerRepo.findOne(query) - - if (!customer) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Customer with phone ${phone} was not found` - ) - } - - return customer + /** + * Gets a customer by id. + * @param {string} customerId - the id of the customer to get. + * @param {Object} config - the config object containing query settings + * @return {Promise} the customer document. + */ + async retrieve( + customerId: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_(async () => { + return this.retrieve_({ id: customerId }, config) + }) } /** @@ -274,7 +239,7 @@ class CustomerService extends BaseService { * @param {string} password - the value to hash * @return {Promise} hashed password */ - async hashPassword_(password) { + async hashPassword_(password: string): Promise { const buf = await Scrypt.kdf(password, { logN: 1, r: 1, p: 1 }) return buf.toString("base64") } @@ -287,17 +252,16 @@ class CustomerService extends BaseService { * @param {object} customer - the customer to create * @return {Promise} the result of create */ - async create(customer) { - return this.atomicPhase_(async (manager) => { + async create(customer: CreateCustomerInput): Promise { + return await this.atomicPhase_(async (manager) => { const customerRepository = manager.getCustomRepository( this.customerRepository_ ) + customer.email = customer.email.toLowerCase() const { email, password } = customer - const existing = await this.retrieveByEmail(email).catch( - (err) => undefined - ) + const existing = await this.retrieveByEmail(email).catch(() => undefined) if (existing && existing.has_account) { throw new MedusaError( @@ -314,7 +278,7 @@ class CustomerService extends BaseService { const toUpdate = { ...existing, ...customer } const updated = await customerRepository.save(toUpdate) - await this.eventBus_ + await this.eventBusService_ .withTransaction(manager) .emit(CustomerService.Events.UPDATED, updated) return updated @@ -326,9 +290,9 @@ class CustomerService extends BaseService { delete customer.password } - const created = await customerRepository.create(customer) + const created = customerRepository.create(customer) const result = await customerRepository.save(created) - await this.eventBus_ + await this.eventBusService_ .withTransaction(manager) .emit(CustomerService.Events.CREATED, result) return result @@ -343,13 +307,15 @@ class CustomerService extends BaseService { * @param {object} update - an object with the update values. * @return {Promise} resolves to the update result. */ - async update(customerId, update) { - return this.atomicPhase_( + async update( + customerId: string, + update: UpdateCustomerInput + ): Promise { + return await this.atomicPhase_( async (manager) => { const customerRepository = manager.getCustomRepository( this.customerRepository_ ) - const addrRepo = manager.getCustomRepository(this.addressRepository_) const customer = await this.retrieve(customerId) @@ -363,13 +329,13 @@ class CustomerService extends BaseService { } = update if (metadata) { - customer.metadata = this.setMetadata_(customer, metadata) + customer.metadata = setMetadata(customer, metadata) } if ("billing_address_id" in update || "billing_address" in update) { const address = billing_address_id || billing_address if (typeof address !== "undefined") { - await this.updateBillingAddress_(customer, address, addrRepo) + await this.updateBillingAddress_(customer, address) } } @@ -382,12 +348,12 @@ class CustomerService extends BaseService { } if (groups) { - customer.groups = groups + customer.groups = groups as CustomerGroup[] } const updated = await customerRepository.save(customer) - await this.eventBus_ + await this.eventBusService_ .withTransaction(manager) .emit(CustomerService.Events.UPDATED, updated) return updated @@ -405,60 +371,88 @@ class CustomerService extends BaseService { * @param {Object} addrRepo - address repository * @return {Promise} the result of the update operation */ - async updateBillingAddress_(customer, addressOrId, addrRepo) { - if (addressOrId === null) { - customer.billing_address_id = null - return - } + async updateBillingAddress_( + customer: Customer, + addressOrId: string | DeepPartial
| undefined + ): Promise { + return await this.atomicPhase_(async (manager) => { + const addrRepo: AddressRepository = manager.getCustomRepository( + this.addressRepository_ + ) - if (typeof addressOrId === `string`) { - addressOrId = await addrRepo.findOne({ - where: { id: addressOrId }, - }) - } - - addressOrId.country_code = addressOrId.country_code.toLowerCase() - - if (addressOrId.id) { - customer.billing_address_id = addressOrId.id - } else { - if (customer.billing_address_id) { - const addr = await addrRepo.findOne({ - where: { id: customer.billing_address_id }, - }) - - await addrRepo.save({ ...addr, ...addressOrId }) - } else { - const created = addrRepo.create({ - ...addressOrId, - }) - const saved = await addrRepo.save(created) - customer.billing_address = saved + if (addressOrId === null || addressOrId === undefined) { + customer.billing_address_id = null + return } - } + + let address: DeepPartial
+ if (typeof addressOrId === `string`) { + const fetchedAddress = await addrRepo.findOne({ + where: { id: addressOrId }, + }) + + if (!fetchedAddress) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Address with id ${addressOrId} was not found` + ) + } + + address = fetchedAddress + } else { + address = addressOrId + } + + address.country_code = address.country_code?.toLowerCase() + + if (typeof address?.id !== "undefined") { + customer.billing_address_id = address.id + } else { + if (customer.billing_address_id) { + const addr = await addrRepo.findOne({ + where: { id: customer.billing_address_id }, + }) + + await addrRepo.save({ ...addr, ...address }) + } else { + const created = addrRepo.create(address) + const saved: Address = await addrRepo.save(created) + customer.billing_address = saved + } + } + }) } - async updateAddress(customerId, addressId, address) { - return this.atomicPhase_(async (manager) => { + async updateAddress( + customerId: string, + addressId: string, + address: StorePostCustomersCustomerAddressesAddressReq + ): Promise
{ + return await this.atomicPhase_(async (manager) => { const addressRepo = manager.getCustomRepository(this.addressRepository_) - address.country_code = address.country_code.toLowerCase() + address.country_code = address.country_code?.toLowerCase() const toUpdate = await addressRepo.findOne({ where: { id: addressId, customer_id: customerId }, }) + if (!toUpdate) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "Could not find address for customer" + ) + } for (const [key, value] of Object.entries(address)) { toUpdate[key] = value } - const result = addressRepo.save(toUpdate) - return result + return addressRepo.save(toUpdate) }) } - async removeAddress(customerId, addressId) { - return this.atomicPhase_(async (manager) => { + async removeAddress(customerId: string, addressId: string): Promise { + return await this.atomicPhase_(async (manager) => { const addressRepo = manager.getCustomRepository(this.addressRepository_) // Should not fail, if user does not exist, since delete is idempotent @@ -467,17 +461,18 @@ class CustomerService extends BaseService { }) if (!address) { - return Promise.resolve() + return } await addressRepo.softRemove(address) - - return Promise.resolve() }) } - async addAddress(customerId, address) { - return this.atomicPhase_(async (manager) => { + async addAddress( + customerId: string, + address: AddressCreatePayload + ): Promise { + return await this.atomicPhase_(async (manager) => { const addressRepository = manager.getCustomRepository( this.addressRepository_ ) @@ -490,7 +485,8 @@ class CustomerService extends BaseService { const shouldAdd = !customer.shipping_addresses.find( (a) => - a.country_code.toLowerCase() === address.country_code.toLowerCase() && + a.country_code?.toLowerCase() === + address.country_code.toLowerCase() && a.address_1 === address.address_1 && a.address_2 === address.address_2 && a.city === address.city && @@ -503,8 +499,8 @@ class CustomerService extends BaseService { if (shouldAdd) { const created = await addressRepository.create({ - customer_id: customerId, ...address, + customer_id: customerId, }) const result = await addressRepository.save(created) return result @@ -520,37 +516,20 @@ class CustomerService extends BaseService { * castable as an ObjectId * @return {Promise} the result of the delete operation. */ - async delete(customerId) { - return this.atomicPhase_(async (manager) => { + async delete(customerId: string): Promise { + return await this.atomicPhase_(async (manager) => { const customerRepo = manager.getCustomRepository(this.customerRepository_) // Should not fail, if user does not exist, since delete is idempotent const customer = await customerRepo.findOne({ where: { id: customerId } }) if (!customer) { - return Promise.resolve() + return } - await customerRepo.softRemove(customer) - - return Promise.resolve() + return await customerRepo.softRemove(customer) }) } - - /** - * Decorates a customer. - * @param {Customer} customer - the cart to decorate. - * @param {string[]} fields - the fields to include. - * @param {string[]} expandFields - fields to expand. - * @return {Customer} return the decorated customer. - */ - async decorate(customer, fields = [], expandFields = []) { - const requiredFields = ["_id", "metadata"] - const decorated = _.pick(customer, fields.concat(requiredFields)) - - const final = await this.runDecorators_(decorated) - return final - } } export default CustomerService diff --git a/packages/medusa/src/services/discount-condition.ts b/packages/medusa/src/services/discount-condition.ts index 49d2eab603..b6217e892b 100644 --- a/packages/medusa/src/services/discount-condition.ts +++ b/packages/medusa/src/services/discount-condition.ts @@ -1,47 +1,51 @@ import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" import { EntityManager } from "typeorm" import { EventBusService } from "." -import { DiscountCondition, DiscountConditionType } from "../models" +import { + DiscountCondition, + DiscountConditionCustomerGroup, + DiscountConditionProduct, + DiscountConditionProductCollection, + DiscountConditionProductTag, + DiscountConditionProductType, + DiscountConditionType, +} from "../models" import { DiscountConditionRepository } from "../repositories/discount-condition" import { FindConfig } from "../types/common" import { UpsertDiscountConditionInput } from "../types/discount" import { PostgresError } from "../utils/exception-formatter" +import { TransactionBaseService } from "../interfaces" +import { buildQuery } from "../utils" + +type InjectedDependencies = { + manager: EntityManager + discountConditionRepository: typeof DiscountConditionRepository + eventBusService: EventBusService +} /** * Provides layer to manipulate discount conditions. * @implements {BaseService} */ -class DiscountConditionService extends BaseService { - protected readonly manager_: EntityManager +class DiscountConditionService extends TransactionBaseService { protected readonly discountConditionRepository_: typeof DiscountConditionRepository protected readonly eventBus_: EventBusService - protected transactionManager_?: EntityManager - constructor({ manager, discountConditionRepository, eventBusService }) { - super() + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + constructor({ + manager, + discountConditionRepository, + eventBusService, + }: InjectedDependencies) { + super({ manager, discountConditionRepository, eventBusService }) this.manager_ = manager this.discountConditionRepository_ = discountConditionRepository this.eventBus_ = eventBusService } - withTransaction(transactionManager: EntityManager): DiscountConditionService { - if (!transactionManager) { - return this - } - - const cloned = new DiscountConditionService({ - manager: transactionManager, - discountConditionRepository: this.discountConditionRepository_, - eventBusService: this.eventBus_, - }) - - cloned.transactionManager_ = transactionManager - - return cloned - } - async retrieve( conditionId: string, config?: FindConfig @@ -51,7 +55,7 @@ class DiscountConditionService extends BaseService { this.discountConditionRepository_ ) - const query = this.buildQuery_({ id: conditionId }, config) + const query = buildQuery({ id: conditionId }, config) const condition = await conditionRepo.findOne(query) @@ -103,7 +107,17 @@ class DiscountConditionService extends BaseService { } } - async upsertCondition(data: UpsertDiscountConditionInput): Promise { + async upsertCondition( + data: UpsertDiscountConditionInput + ): Promise< + ( + | DiscountConditionProduct + | DiscountConditionProductType + | DiscountConditionProductCollection + | DiscountConditionProductTag + | DiscountConditionCustomerGroup + )[] + > { let resolvedConditionType return await this.atomicPhase_( @@ -164,7 +178,7 @@ class DiscountConditionService extends BaseService { ) } - async delete(discountConditionId: string): Promise { + async delete(discountConditionId: string): Promise { return await this.atomicPhase_(async (manager: EntityManager) => { const conditionRepo = manager.getCustomRepository( this.discountConditionRepository_ diff --git a/packages/medusa/src/services/discount.ts b/packages/medusa/src/services/discount.ts index 61906f66ee..0fde9be4fe 100644 --- a/packages/medusa/src/services/discount.ts +++ b/packages/medusa/src/services/discount.ts @@ -1,7 +1,6 @@ import { parse, toSeconds } from "iso8601-duration" import { isEmpty, omit } from "lodash" import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" import { Brackets, DeepPartial, @@ -15,19 +14,17 @@ import { RegionService, TotalsService, } from "." -import { Cart } from "../models/cart" -import { Discount } from "../models/discount" +import { Cart, Discount, LineItem, Region } from "../models" import { AllocationType as DiscountAllocation, DiscountRule, DiscountRuleType, } from "../models/discount-rule" -import { LineItem } from "../models/line-item" import { DiscountRepository } from "../repositories/discount" import { DiscountConditionRepository } from "../repositories/discount-condition" import { DiscountRuleRepository } from "../repositories/discount-rule" import { GiftCardRepository } from "../repositories/gift-card" -import { FindConfig } from "../types/common" +import { FindConfig, Selector } from "../types/common" import { CreateDiscountInput, CreateDiscountRuleInput, @@ -39,22 +36,28 @@ import { import { isFuture, isPast } from "../utils/date-helpers" import { formatException } from "../utils/exception-formatter" import DiscountConditionService from "./discount-condition" +import CustomerService from "./customer" +import { TransactionBaseService } from "../interfaces" +import { buildQuery, setMetadata } from "../utils" /** * Provides layer to manipulate discounts. * @implements {BaseService} */ -class DiscountService extends BaseService { - private manager_: EntityManager - private discountRepository_: typeof DiscountRepository - private discountRuleRepository_: typeof DiscountRuleRepository - private giftCardRepository_: typeof GiftCardRepository - private discountConditionRepository_: typeof DiscountConditionRepository - private discountConditionService_: DiscountConditionService - private totalsService_: TotalsService - private productService_: ProductService - private regionService_: RegionService - private eventBus_: EventBusService +class DiscountService extends TransactionBaseService { + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + protected readonly discountRepository_: typeof DiscountRepository + protected readonly customerService_: CustomerService + protected readonly discountRuleRepository_: typeof DiscountRuleRepository + protected readonly giftCardRepository_: typeof GiftCardRepository + protected readonly discountConditionRepository_: typeof DiscountConditionRepository + protected readonly discountConditionService_: DiscountConditionService + protected readonly totalsService_: TotalsService + protected readonly productService_: ProductService + protected readonly regionService_: RegionService + protected readonly eventBus_: EventBusService constructor({ manager, @@ -69,67 +72,22 @@ class DiscountService extends BaseService { customerService, eventBusService, }) { - super() + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) - /** @private @const {EntityManager} */ this.manager_ = manager - - /** @private @const {DiscountRepository} */ this.discountRepository_ = discountRepository - - /** @private @const {DiscountRuleRepository} */ this.discountRuleRepository_ = discountRuleRepository - - /** @private @const {GiftCardRepository} */ this.giftCardRepository_ = giftCardRepository - - /** @private @const {DiscountConditionRepository} */ this.discountConditionRepository_ = discountConditionRepository - - /** @private @const {DiscountConditionRepository} */ this.discountConditionService_ = discountConditionService - - /** @private @const {TotalsService} */ this.totalsService_ = totalsService - - /** @private @const {ProductService} */ this.productService_ = productService - - /** @private @const {RegionService} */ this.regionService_ = regionService - - /** @private @const {CustomerService} */ this.customerService_ = customerService - - /** @private @const {EventBus} */ this.eventBus_ = eventBusService } - withTransaction(transactionManager: EntityManager): DiscountService { - if (!transactionManager) { - return this - } - - const cloned = new DiscountService({ - manager: transactionManager, - discountRepository: this.discountRepository_, - discountRuleRepository: this.discountRuleRepository_, - giftCardRepository: this.giftCardRepository_, - discountConditionRepository: this.discountConditionRepository_, - discountConditionService: this.discountConditionService_, - totalsService: this.totalsService_, - productService: this.productService_, - regionService: this.regionService_, - customerService: this.customerService_, - eventBusService: this.eventBus_, - }) - - cloned.transactionManager_ = transactionManager - cloned.manager_ = transactionManager - - return cloned - } - /** * Creates a discount rule with provided data given that the data is validated. * @param {DiscountRule} discountRule - the discount rule to create @@ -157,12 +115,14 @@ class DiscountService extends BaseService { selector: FilterableDiscountProps = {}, config: FindConfig = { relations: [], skip: 0, take: 10 } ): Promise { - const discountRepo = this.manager_.getCustomRepository( - this.discountRepository_ - ) + return await this.atomicPhase_(async (transactionManager) => { + const discountRepo = transactionManager.getCustomRepository( + this.discountRepository_ + ) - const query = this.buildQuery_(selector, config) - return discountRepo.find(query) + const query = buildQuery(selector as Selector, config) + return await discountRepo.find(query) + }) } /** @@ -178,37 +138,39 @@ class DiscountService extends BaseService { order: { created_at: "DESC" }, } ): Promise<[Discount[], number]> { - const discountRepo = this.manager_.getCustomRepository( - this.discountRepository_ - ) + return await this.atomicPhase_(async (transactionManager) => { + const discountRepo = transactionManager.getCustomRepository( + this.discountRepository_ + ) - let q - if ("q" in selector) { - q = selector.q - delete selector.q - } - - const query = this.buildQuery_(selector, config) - - if (q) { - const where = query.where - - delete where.code - - query.where = (qb: SelectQueryBuilder): void => { - qb.where(where) - - qb.andWhere( - new Brackets((qb) => { - qb.where({ code: ILike(`%${q}%`) }) - }) - ) + let q + if ("q" in selector) { + q = selector.q + delete selector.q } - } - const [discounts, count] = await discountRepo.findAndCount(query) + const query = buildQuery(selector as Selector, config) - return [discounts, count] + if (q) { + const where = query.where + + delete where.code + + query.where = (qb: SelectQueryBuilder): void => { + qb.where(where) + + qb.andWhere( + new Brackets((qb) => { + qb.where({ code: ILike(`%${q}%`) }) + }) + ) + } + } + + const [discounts, count] = await discountRepo.findAndCount(query) + + return [discounts, count] + }) } /** @@ -218,7 +180,7 @@ class DiscountService extends BaseService { * @return {Promise} the result of the create operation */ async create(discount: CreateDiscountInput): Promise { - return this.atomicPhase_(async (manager: EntityManager) => { + return await this.atomicPhase_(async (manager: EntityManager) => { const discountRepo = manager.getCustomRepository(this.discountRepository_) const ruleRepo = manager.getCustomRepository(this.discountRuleRepository_) @@ -240,11 +202,11 @@ class DiscountService extends BaseService { } try { if (discount.regions) { - discount.regions = await Promise.all( + discount.regions = (await Promise.all( discount.regions.map((regionId) => this.regionService_.withTransaction(manager).retrieve(regionId) ) - ) + )) as Region[] } const discountRule = ruleRepo.create(validatedRule) @@ -286,22 +248,23 @@ class DiscountService extends BaseService { discountId: string, config: FindConfig = {} ): Promise { - const discountRepo = this.manager_.getCustomRepository( - this.discountRepository_ - ) - - const validatedId = this.validateId_(discountId) - const query = this.buildQuery_({ id: validatedId }, config) - const discount = await discountRepo.findOne(query) - - if (!discount) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Discount with ${discountId} was not found` + return await this.atomicPhase_(async (transactionManager) => { + const discountRepo = transactionManager.getCustomRepository( + this.discountRepository_ ) - } - return discount + const query = buildQuery({ id: discountId }, config) + const discount = await discountRepo.findOne(query) + + if (!discount) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Discount with ${discountId} was not found` + ) + } + + return discount + }) } /** @@ -314,29 +277,28 @@ class DiscountService extends BaseService { discountCode: string, config: FindConfig = {} ): Promise { - const discountRepo = this.manager_.getCustomRepository( - this.discountRepository_ - ) + return await this.atomicPhase_(async (transactionManager) => { + const discountRepo = transactionManager.getCustomRepository( + this.discountRepository_ + ) - let query = this.buildQuery_( - { code: discountCode, is_dynamic: false }, - config - ) - let discount = await discountRepo.findOne(query) - - if (!discount) { - query = this.buildQuery_({ code: discountCode, is_dynamic: true }, config) - discount = await discountRepo.findOne(query) + let query = buildQuery({ code: discountCode, is_dynamic: false }, config) + let discount = await discountRepo.findOne(query) if (!discount) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Discount with code ${discountCode} was not found` - ) - } - } + query = buildQuery({ code: discountCode, is_dynamic: true }, config) + discount = await discountRepo.findOne(query) - return discount + if (!discount) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Discount with code ${discountCode} was not found` + ) + } + } + + return discount + }) } /** @@ -349,7 +311,7 @@ class DiscountService extends BaseService { discountId: string, update: UpdateDiscountInput ): Promise { - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const discountRepo: DiscountRepository = manager.getCustomRepository( this.discountRepository_ ) @@ -365,7 +327,7 @@ class DiscountService extends BaseService { const ruleToUpdate = omit(update.rule, "conditions") if (!isEmpty(ruleToUpdate)) { - update.rule = ruleToUpdate + update.rule = ruleToUpdate as UpdateDiscountRuleInput } const { rule, metadata, regions, ...rest } = update @@ -403,7 +365,7 @@ class DiscountService extends BaseService { } if (metadata) { - discount.metadata = await this.setMetadata_(discount.id, metadata) + discount.metadata = await setMetadata(discount, metadata) } if (rule) { @@ -416,12 +378,10 @@ class DiscountService extends BaseService { }) } - const updatedRule = ruleRepo.create({ + discount.rule = ruleRepo.create({ ...discount.rule, ...ruleUpdate, - }) - - discount.rule = updatedRule + } as DiscountRule) } for (const key of Object.keys(rest).filter( @@ -432,8 +392,7 @@ class DiscountService extends BaseService { discount.code = discount.code.toUpperCase() - const updated = await discountRepo.save(discount) - return updated + return await discountRepo.save(discount) }) } @@ -447,7 +406,7 @@ class DiscountService extends BaseService { discountId: string, data: CreateDynamicDiscountInput ): Promise { - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const discountRepo = manager.getCustomRepository(this.discountRepository_) const discount = await this.retrieve(discountId) @@ -483,9 +442,8 @@ class DiscountService extends BaseService { ) toCreate.ends_at = lastValidDate } - const created = await discountRepo.create(toCreate) - const result = await discountRepo.save(created) - return result + const created: Discount = discountRepo.create(toCreate) + return await discountRepo.save(created) }) } @@ -496,19 +454,17 @@ class DiscountService extends BaseService { * @return {Promise} the newly created dynamic code */ async deleteDynamicCode(discountId: string, code: string): Promise { - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const discountRepo = manager.getCustomRepository(this.discountRepository_) const discount = await discountRepo.findOne({ where: { parent_discount_id: discountId, code }, }) if (!discount) { - return Promise.resolve() + return } await discountRepo.softRemove(discount) - - return Promise.resolve() }) } @@ -519,7 +475,7 @@ class DiscountService extends BaseService { * @return {Promise} the result of the update operation */ async addRegion(discountId: string, regionId: string): Promise { - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const discountRepo = manager.getCustomRepository(this.discountRepository_) const discount = await this.retrieve(discountId, { @@ -543,8 +499,7 @@ class DiscountService extends BaseService { discount.regions = [...discount.regions, region] - const updated = await discountRepo.save(discount) - return updated + return await discountRepo.save(discount) }) } @@ -555,7 +510,7 @@ class DiscountService extends BaseService { * @return {Promise} the result of the update operation */ async removeRegion(discountId: string, regionId: string): Promise { - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const discountRepo = manager.getCustomRepository(this.discountRepository_) const discount = await this.retrieve(discountId, { @@ -570,8 +525,7 @@ class DiscountService extends BaseService { discount.regions = discount.regions.filter((r) => r.id !== regionId) - const updated = await discountRepo.save(discount) - return updated + return await discountRepo.save(discount) }) } @@ -581,18 +535,16 @@ class DiscountService extends BaseService { * @return {Promise} the result of the delete operation */ async delete(discountId: string): Promise { - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const discountRepo = manager.getCustomRepository(this.discountRepository_) const discount = await discountRepo.findOne({ where: { id: discountId } }) if (!discount) { - return Promise.resolve() + return } await discountRepo.softRemove(discount) - - return Promise.resolve() }) } @@ -600,7 +552,7 @@ class DiscountService extends BaseService { discountRuleId: string, productId: string | undefined ): Promise { - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const discountConditionRepo: DiscountConditionRepository = manager.getCustomRepository(this.discountConditionRepository_) @@ -626,98 +578,102 @@ class DiscountService extends BaseService { lineItem: LineItem, cart: Cart ): Promise { - let adjustment = 0 + return await this.atomicPhase_(async () => { + let adjustment = 0 - if (!lineItem.allow_discounts) { - return adjustment - } + if (!lineItem.allow_discounts) { + return adjustment + } - const discount = await this.retrieve(discountId, { relations: ["rule"] }) + const discount = await this.retrieve(discountId, { relations: ["rule"] }) - const { type, value, allocation } = discount.rule + const { type, value, allocation } = discount.rule - const fullItemPrice = lineItem.unit_price * lineItem.quantity + const fullItemPrice = lineItem.unit_price * lineItem.quantity - if (type === DiscountRuleType.PERCENTAGE) { - adjustment = Math.round((fullItemPrice / 100) * value) - } else if ( - type === DiscountRuleType.FIXED && - allocation === DiscountAllocation.TOTAL - ) { - // when a fixed discount should be applied to the total, - // we create line adjustments for each item with an amount - // relative to the subtotal - const subtotal = this.totalsService_.getSubtotal(cart, { - excludeNonDiscounts: true, - }) - const nominator = Math.min(value, subtotal) - const itemRelativeToSubtotal = lineItem.unit_price / subtotal - const totalItemPercentage = itemRelativeToSubtotal * lineItem.quantity - adjustment = Math.round(nominator * totalItemPercentage) - } else { - adjustment = value * lineItem.quantity - } - // if the amount of the discount exceeds the total price of the item, - // we return the total item price, else the fixed amount - return adjustment >= fullItemPrice ? fullItemPrice : adjustment + if (type === DiscountRuleType.PERCENTAGE) { + adjustment = Math.round((fullItemPrice / 100) * value) + } else if ( + type === DiscountRuleType.FIXED && + allocation === DiscountAllocation.TOTAL + ) { + // when a fixed discount should be applied to the total, + // we create line adjustments for each item with an amount + // relative to the subtotal + const subtotal = this.totalsService_.getSubtotal(cart, { + excludeNonDiscounts: true, + }) + const nominator = Math.min(value, subtotal) + const itemRelativeToSubtotal = lineItem.unit_price / subtotal + const totalItemPercentage = itemRelativeToSubtotal * lineItem.quantity + adjustment = Math.round(nominator * totalItemPercentage) + } else { + adjustment = value * lineItem.quantity + } + // if the amount of the discount exceeds the total price of the item, + // we return the total item price, else the fixed amount + return adjustment >= fullItemPrice ? fullItemPrice : adjustment + }) } async validateDiscountForCartOrThrow( cart: Cart, discount: Discount ): Promise { - if (this.hasReachedLimit(discount)) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Discount has been used maximum allowed times" - ) - } - - if (this.hasNotStarted(discount)) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Discount is not valid yet" - ) - } - - if (this.hasExpired(discount)) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Discount is expired" - ) - } - - if (this.isDisabled(discount)) { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "The discount code is disabled" - ) - } - - const isValidForRegion = await this.isValidForRegion( - discount, - cart.region_id - ) - if (!isValidForRegion) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - "The discount is not available in current region" - ) - } - - if (cart.customer_id) { - const canApplyForCustomer = await this.canApplyForCustomer( - discount.rule.id, - cart.customer_id - ) - - if (!canApplyForCustomer) { + return await this.atomicPhase_(async () => { + if (this.hasReachedLimit(discount)) { throw new MedusaError( MedusaError.Types.NOT_ALLOWED, - "Discount is not valid for customer" + "Discount has been used maximum allowed times" ) } - } + + if (this.hasNotStarted(discount)) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Discount is not valid yet" + ) + } + + if (this.hasExpired(discount)) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Discount is expired" + ) + } + + if (this.isDisabled(discount)) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "The discount code is disabled" + ) + } + + const isValidForRegion = await this.isValidForRegion( + discount, + cart.region_id + ) + if (!isValidForRegion) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "The discount is not available in current region" + ) + } + + if (cart.customer_id) { + const canApplyForCustomer = await this.canApplyForCustomer( + discount.rule.id, + cart.customer_id + ) + + if (!canApplyForCustomer) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Discount is not valid for customer" + ) + } + } + }) } hasReachedLimit(discount: Discount): boolean { @@ -746,24 +702,26 @@ class DiscountService extends BaseService { discount: Discount, region_id: string ): Promise { - let regions = discount.regions + return await this.atomicPhase_(async () => { + let regions = discount.regions - if (discount.parent_discount_id) { - const parent = await this.retrieve(discount.parent_discount_id, { - relations: ["rule", "regions"], - }) + if (discount.parent_discount_id) { + const parent = await this.retrieve(discount.parent_discount_id, { + relations: ["rule", "regions"], + }) - regions = parent.regions - } + regions = parent.regions + } - return regions.find(({ id }) => id === region_id) !== undefined + return regions.find(({ id }) => id === region_id) !== undefined + }) } async canApplyForCustomer( discountRuleId: string, customerId: string | undefined ): Promise { - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const discountConditionRepo: DiscountConditionRepository = manager.getCustomRepository(this.discountConditionRepository_) diff --git a/packages/medusa/src/services/draft-order.js b/packages/medusa/src/services/draft-order.js deleted file mode 100644 index 4da9958b58..0000000000 --- a/packages/medusa/src/services/draft-order.js +++ /dev/null @@ -1,390 +0,0 @@ -import { BaseService } from "medusa-interfaces" -import { MedusaError } from "medusa-core-utils" -import { Brackets } from "typeorm" - -/** - * Handles draft orders - * @implements {BaseService} - */ -class DraftOrderService extends BaseService { - static Events = { - CREATED: "draft_order.created", - UPDATED: "draft_order.updated", - } - - constructor({ - manager, - draftOrderRepository, - paymentRepository, - orderRepository, - eventBusService, - cartService, - lineItemService, - productVariantService, - shippingOptionService, - }) { - super() - - /** @private @const {EntityManager} */ - this.manager_ = manager - - /** @private @const {DraftOrderRepository} */ - this.draftOrderRepository_ = draftOrderRepository - - /** @private @const {PaymentRepository} */ - this.paymentRepository_ = paymentRepository - - /** @private @const {OrderRepository} */ - this.orderRepository_ = orderRepository - - /** @private @const {LineItemService} */ - this.lineItemService_ = lineItemService - - /** @private @const {CartService} */ - this.cartService_ = cartService - - /** @private @const {ProductVariantService} */ - this.productVariantService_ = productVariantService - - /** @private @const {ShippingOptionService} */ - this.shippingOptionService_ = shippingOptionService - - /** @private @const {EventBusService} */ - this.eventBus_ = eventBusService - } - - withTransaction(transactionManager) { - if (!transactionManager) { - return this - } - - const cloned = new DraftOrderService({ - manager: transactionManager, - draftOrderRepository: this.draftOrderRepository_, - paymentRepository: this.paymentRepository_, - orderRepository: this.orderRepository_, - lineItemService: this.lineItemService_, - cartService: this.cartService_, - productVariantService: this.productVariantService_, - shippingOptionService: this.shippingOptionService_, - eventBusService: this.eventBus_, - }) - - cloned.transactionManager_ = transactionManager - - return cloned - } - - /** - * Retrieves a draft order with the given id. - * @param {string} id - id of the draft order to retrieve - * @param {object} config - query object for findOne - * @return {Promise} the draft order - */ - async retrieve(id, config = {}) { - const draftOrderRepo = this.manager_.getCustomRepository( - this.draftOrderRepository_ - ) - - const validatedId = this.validateId_(id) - - const query = this.buildQuery_({ id: validatedId }, config) - - const draftOrder = await draftOrderRepo.findOne(query) - - if (!draftOrder) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Draft order with ${id} was not found` - ) - } - - return draftOrder - } - - /** - * Retrieves a draft order based on its associated cart id - * @param {string} cartId - cart id that the draft orders's cart has - * @param {object} config - query object for findOne - * @return {Promise} the draft order - */ - async retrieveByCartId(cartId, config = {}) { - const draftOrderRepo = this.manager_.getCustomRepository( - this.draftOrderRepository_ - ) - - const query = this.buildQuery_({ cart_id: cartId }, config) - - const draftOrder = await draftOrderRepo.findOne(query) - - if (!draftOrder) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Draft order was not found` - ) - } - - return draftOrder - } - - /** - * Deletes draft order idempotently. - * @param {string} draftOrderId - id of draft order to delete - * @return {Promise} empty promise - */ - async delete(draftOrderId) { - return this.atomicPhase_(async (manager) => { - const draftOrderRepo = manager.getCustomRepository( - this.draftOrderRepository_ - ) - - const draftOrder = await draftOrderRepo.findOne({ - where: { id: draftOrderId }, - }) - - if (!draftOrder) { - return Promise.resolve() - } - - await draftOrderRepo.remove(draftOrder) - - return Promise.resolve() - }) - } - - /** - * Lists draft orders alongside the count - * @param {object} selector - query selector to filter draft orders - * @param {object} config - query config - * @return {Promise} draft orders - */ - async listAndCount( - selector, - config = { skip: 0, take: 50, order: { created_at: "DESC" } } - ) { - const draftOrderRepository = this.manager_.getCustomRepository( - this.draftOrderRepository_ - ) - - let q - if ("q" in selector) { - q = selector.q - delete selector.q - } - - const query = this.buildQuery_(selector, config) - - if (q) { - const where = query.where - - delete where.display_id - - query.join = { - alias: "draft_order", - innerJoin: { - cart: "draft_order.cart", - }, - } - - query.where = (qb) => { - qb.where(where) - - qb.andWhere( - new Brackets((qb) => { - qb.where(`cart.email ILIKE :q`, { - q: `%${q}%`, - }).orWhere(`draft_order.display_id::varchar(255) ILIKE :dId`, { - dId: `${q}`, - }) - }) - ) - } - } - - const [draftOrders, count] = await draftOrderRepository.findAndCount(query) - - return [draftOrders, count] - } - - /** - * Lists draft orders - * @param {Object} selector - query object for find - * @param {Object} config - configurable attributes for find - * @return {Promise} list of draft orders - */ - async list( - selector, - config = { skip: 0, take: 50, order: { created_at: "DESC" } } - ) { - const draftOrderRepo = this.manager_.getCustomRepository( - this.draftOrderRepository_ - ) - - const query = this.buildQuery_(selector, config) - - return draftOrderRepo.find(query) - } - - /** - * Creates a draft order. - * @param {object} data - data to create draft order from - * @return {Promise} the created draft order - */ - async create(data) { - return this.atomicPhase_(async (manager) => { - const draftOrderRepo = manager.getCustomRepository( - this.draftOrderRepository_ - ) - - if (!data.region_id) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `region_id is required to create a draft order` - ) - } - - if (!data.items || !data.items.length) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Items are required to create a draft order` - ) - } - - const { - shipping_methods, - discounts, - no_notification_order, - items, - ...rest - } = data - - if (discounts) { - for (const { code } of discounts) { - rest.discounts = [] - await this.cartService_ - .withTransaction(manager) - .applyDiscount(rest, code) - } - } - - const createdCart = await this.cartService_ - .withTransaction(manager) - .create({ type: "draft_order", ...rest }) - - const draftOrder = draftOrderRepo.create({ - cart_id: createdCart.id, - no_notification_order, - }) - const result = await draftOrderRepo.save(draftOrder) - - await this.eventBus_ - .withTransaction(manager) - .emit(DraftOrderService.Events.CREATED, { - id: result.id, - }) - - for (const item of items) { - if (item.variant_id) { - const line = await this.lineItemService_ - .withTransaction(manager) - .generate(item.variant_id, data.region_id, item.quantity, { - metadata: item?.metadata || {}, - unit_price: item.unit_price, - cart: createdCart, - }) - - await this.lineItemService_.withTransaction(manager).create({ - cart_id: createdCart.id, - ...line, - }) - } else { - let price - if (typeof item.unit_price === `undefined` || item.unit_price < 0) { - price = 0 - } else { - price = item.unit_price - } - - // custom line items can be added to a draft order - await this.lineItemService_.withTransaction(manager).create({ - cart_id: createdCart.id, - has_shipping: true, - title: item.title || "Custom item", - allow_discounts: false, - unit_price: price, - quantity: item.quantity, - }) - } - } - - for (const method of shipping_methods) { - await this.cartService_ - .withTransaction(manager) - .addShippingMethod(createdCart.id, method.option_id, method.data) - } - - return result - }) - } - - /** - * Registers a draft order as completed, when an order has been completed. - * @param {string} doId - id of draft order to complete - * @param {string} orderId - id of order completed from draft order cart - * @return {Promise} the created order - */ - async registerCartCompletion(doId, orderId) { - return this.atomicPhase_(async (manager) => { - const draftOrderRepo = manager.getCustomRepository( - this.draftOrderRepository_ - ) - const draftOrder = await this.retrieve(doId) - - draftOrder.status = "completed" - draftOrder.completed_at = new Date() - draftOrder.order_id = orderId - - await draftOrderRepo.save(draftOrder) - }) - } - - /** - * Updates a draft order with the given data - * @param {String} doId - id of the draft order - * @param {DraftOrder} data - values to update the order with - * @return {Promise} the updated draft order - */ - async update(doId, data) { - return this.atomicPhase_(async (manager) => { - const doRepo = manager.getCustomRepository(this.draftOrderRepository_) - const draftOrder = await this.retrieve(doId) - let touched = false - - if (draftOrder.status === "completed") { - throw new MedusaError( - MedusaError.Types.NOT_ALLOWED, - "Can't update a draft order which is complete" - ) - } - - if (data.no_notification_order !== undefined) { - touched = true - draftOrder.no_notification_order = data.no_notification_order - } - - if (touched) { - doRepo.save(draftOrder) - - await this.eventBus_ - .withTransaction(manager) - .emit(DraftOrderService.Events.UPDATED, { - id: draftOrder.id, - }) - } - - return draftOrder - }) - } -} - -export default DraftOrderService diff --git a/packages/medusa/src/services/draft-order.ts b/packages/medusa/src/services/draft-order.ts new file mode 100644 index 0000000000..b787135e62 --- /dev/null +++ b/packages/medusa/src/services/draft-order.ts @@ -0,0 +1,432 @@ +import { MedusaError } from "medusa-core-utils" +import { Brackets, EntityManager, FindManyOptions, UpdateResult } from "typeorm" +import { DraftOrderRepository } from "../repositories/draft-order" +import { PaymentRepository } from "../repositories/payment" +import EventBusService from "./event-bus" +import CartService from "./cart" +import LineItemService from "./line-item" +import { OrderRepository } from "../repositories/order" +import ProductVariantService from "./product-variant" +import ShippingOptionService from "./shipping-option" +import { DraftOrder, DraftOrderStatus, Cart, CartType } from "../models" +import { AdminPostDraftOrdersReq } from "../api/routes/admin/draft-orders" +import { TransactionBaseService } from "../interfaces" +import { ExtendedFindConfig, FindConfig } from "../types/common" +import { buildQuery } from "../utils" + +type InjectedDependencies = { + manager: EntityManager + draftOrderRepository: typeof DraftOrderRepository + paymentRepository: typeof PaymentRepository + orderRepository: typeof OrderRepository + eventBusService: EventBusService + cartService: CartService + lineItemService: LineItemService + productVariantService: ProductVariantService + shippingOptionService: ShippingOptionService +} + +/** + * Handles draft orders + * @implements {BaseService} + */ +class DraftOrderService extends TransactionBaseService { + static readonly Events = { + CREATED: "draft_order.created", + UPDATED: "draft_order.updated", + } + + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + protected readonly draftOrderRepository_: typeof DraftOrderRepository + protected readonly paymentRepository_: typeof PaymentRepository + protected readonly orderRepository_: typeof OrderRepository + protected readonly eventBus_: EventBusService + protected readonly cartService_: CartService + protected readonly lineItemService_: LineItemService + protected readonly productVariantService_: ProductVariantService + protected readonly shippingOptionService_: ShippingOptionService + + constructor({ + manager, + draftOrderRepository, + paymentRepository, + orderRepository, + eventBusService, + cartService, + lineItemService, + productVariantService, + shippingOptionService, + }: InjectedDependencies) { + super({ + manager, + draftOrderRepository, + paymentRepository, + orderRepository, + eventBusService, + cartService, + lineItemService, + productVariantService, + shippingOptionService, + }) + + this.manager_ = manager + this.draftOrderRepository_ = draftOrderRepository + this.paymentRepository_ = paymentRepository + this.orderRepository_ = orderRepository + this.lineItemService_ = lineItemService + this.cartService_ = cartService + this.productVariantService_ = productVariantService + this.shippingOptionService_ = shippingOptionService + this.eventBus_ = eventBusService + } + + /** + * Retrieves a draft order with the given id. + * @param id - id of the draft order to retrieve + * @param config - query object for findOne + * @return the draft order + */ + async retrieve( + id: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const draftOrderRepo = transactionManager.getCustomRepository( + this.draftOrderRepository_ + ) + + const query = buildQuery({ id }, config) + const draftOrder = await draftOrderRepo.findOne(query) + if (!draftOrder) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Draft order with ${id} was not found` + ) + } + + return draftOrder + } + ) + } + + /** + * Retrieves a draft order based on its associated cart id + * @param cartId - cart id that the draft orders's cart has + * @param config - query object for findOne + * @return the draft order + */ + async retrieveByCartId( + cartId: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const draftOrderRepo = transactionManager.getCustomRepository( + this.draftOrderRepository_ + ) + + const query = buildQuery({ cart_id: cartId }, config) + const draftOrder = await draftOrderRepo.findOne(query) + if (!draftOrder) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Draft order was not found` + ) + } + + return draftOrder + } + ) + } + + /** + * Deletes draft order idempotently. + * @param {string} draftOrderId - id of draft order to delete + * @return {Promise} empty promise + */ + async delete(draftOrderId: string): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const draftOrderRepo = transactionManager.getCustomRepository( + this.draftOrderRepository_ + ) + const draftOrder = await draftOrderRepo.findOne({ + where: { id: draftOrderId }, + }) + + if (!draftOrder) { + return + } + return await draftOrderRepo.remove(draftOrder) + } + ) + } + + /** + * Lists draft orders alongside the count + * @param selector - query selector to filter draft orders + * @param config - query config + * @return draft orders + */ + async listAndCount( + selector, + config: FindConfig = { + skip: 0, + take: 50, + order: { created_at: "DESC" }, + } + ): Promise<[DraftOrder[], number]> { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const draftOrderRepository = transactionManager.getCustomRepository( + this.draftOrderRepository_ + ) + + const { q, ...restSelector } = selector + const query = buildQuery( + restSelector, + config + ) as FindManyOptions & ExtendedFindConfig + + if (q) { + const where = query.where + delete where?.display_id + + query.join = { + alias: "draft_order", + innerJoin: { + cart: "draft_order.cart", + }, + } + + query.where = (qb): void => { + qb.where(where) + + qb.andWhere( + new Brackets((qb) => { + qb.where(`cart.email ILIKE :q`, { + q: `%${q}%`, + }).orWhere(`draft_order.display_id::TEXT ILIKE :displayId`, { + displayId: `${q}`, + }) + }) + ) + } + } + + return await draftOrderRepository.findAndCount(query) + } + ) + } + + /** + * Lists draft orders + * @param selector - query object for find + * @param config - configurable attributes for find + * @return list of draft orders + */ + async list( + selector, + config: FindConfig = { + skip: 0, + take: 50, + order: { created_at: "DESC" }, + } + ): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const draftOrderRepo = transactionManager.getCustomRepository( + this.draftOrderRepository_ + ) + + const query = buildQuery(selector, config) + + return await draftOrderRepo.find(query) + } + ) + } + + /** + * Creates a draft order. + * @param data - data to create draft order from + * @return the created draft order + */ + async create(data: AdminPostDraftOrdersReq): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const draftOrderRepo = transactionManager.getCustomRepository( + this.draftOrderRepository_ + ) + + if (!data.region_id) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `region_id is required to create a draft order` + ) + } + + if (!data.items || !data.items.length) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Items are required to create a draft order` + ) + } + + const { shipping_methods, no_notification_order, items, ...rawCart } = + data + + if (rawCart.discounts) { + const { discounts } = rawCart + rawCart.discounts = [] + + for (const { code } of discounts) { + await this.cartService_ + .withTransaction(transactionManager) + .applyDiscount(rawCart as Cart, code) + } + } + + const createdCart = await this.cartService_ + .withTransaction(transactionManager) + .create({ type: CartType.DRAFT_ORDER, ...rawCart }) + + const draftOrder = draftOrderRepo.create({ + cart_id: createdCart.id, + no_notification_order, + }) + const result = await draftOrderRepo.save(draftOrder) + + await this.eventBus_ + .withTransaction(transactionManager) + .emit(DraftOrderService.Events.CREATED, { + id: result.id, + }) + + for (const item of items) { + if (item.variant_id) { + const line = await this.lineItemService_ + .withTransaction(transactionManager) + .generate(item.variant_id, data.region_id, item.quantity, { + metadata: item?.metadata || {}, + unit_price: item.unit_price, + cart: createdCart, + }) + + await this.lineItemService_ + .withTransaction(transactionManager) + .create({ + ...line, + cart_id: createdCart.id, + }) + } else { + let price + if (typeof item.unit_price === `undefined` || item.unit_price < 0) { + price = 0 + } else { + price = item.unit_price + } + + // custom line items can be added to a draft order + await this.lineItemService_ + .withTransaction(transactionManager) + .create({ + cart_id: createdCart.id, + has_shipping: true, + title: item.title || "Custom item", + allow_discounts: false, + unit_price: price, + quantity: item.quantity, + }) + } + } + + for (const method of shipping_methods) { + await this.cartService_ + .withTransaction(transactionManager) + .addShippingMethod(createdCart.id, method.option_id, method.data) + } + + return result + } + ) + } + + /** + * Registers a draft order as completed, when an order has been completed. + * @param draftOrderId - id of draft order to complete + * @param orderId - id of order completed from draft order cart + * @return the created order + */ + async registerCartCompletion( + draftOrderId: string, + orderId: string + ): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const draftOrderRepo = transactionManager.getCustomRepository( + this.draftOrderRepository_ + ) + return await draftOrderRepo.update( + { + id: draftOrderId, + }, + { + status: DraftOrderStatus.COMPLETED, + completed_at: new Date(), + order_id: orderId, + } + ) + } + ) + } + + /** + * Updates a draft order with the given data + * @param id - id of the draft order + * @param data - values to update the order with + * @return the updated draft order + */ + async update( + id: string, + data: { no_notification_order: boolean } + ): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const draftOrderRepo = transactionManager.getCustomRepository( + this.draftOrderRepository_ + ) + const draftOrder = await this.retrieve(id) + + if (draftOrder.status === DraftOrderStatus.COMPLETED) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Can't update a draft order which is complete" + ) + } + + let touched = false + if (data.no_notification_order !== undefined) { + touched = true + draftOrder.no_notification_order = data.no_notification_order + } + + if (touched) { + await draftOrderRepo.save(draftOrder) + + await this.eventBus_ + .withTransaction(transactionManager) + .emit(DraftOrderService.Events.UPDATED, { + id: draftOrder.id, + }) + } + + return draftOrder + } + ) + } +} + +export default DraftOrderService diff --git a/packages/medusa/src/services/event-bus.js b/packages/medusa/src/services/event-bus.js deleted file mode 100644 index 10b4e306df..0000000000 --- a/packages/medusa/src/services/event-bus.js +++ /dev/null @@ -1,293 +0,0 @@ -import Bull from "bull" -import Redis from "ioredis" - -/** - * Can keep track of multiple subscribers to different events and run the - * subscribers when events happen. Events will run asynchronously. - * @class - */ -class EventBusService { - constructor( - { manager, logger, stagedJobRepository, redisClient, redisSubscriber }, - config, - singleton = true - ) { - const opts = { - createClient: (type) => { - switch (type) { - case "client": - return redisClient - case "subscriber": - return redisSubscriber - default: - if (config.projectConfig.redis_url) { - return new Redis(config.projectConfig.redis_url) - } - return redisClient - } - }, - } - - this.config_ = config - - /** @private {EntityManager} */ - this.manager_ = manager - - /** @private {logger} */ - this.logger_ = logger - - this.stagedJobRepository_ = stagedJobRepository - - if (singleton) { - /** @private {object} */ - this.observers_ = {} - - /** @private {BullQueue} */ - this.queue_ = new Bull(`${this.constructor.name}:queue`, opts) - - /** @private {object} to handle cron jobs */ - this.cronHandlers_ = {} - - this.redisClient_ = redisClient - this.redisSubscriber_ = redisSubscriber - - /** @private {BullQueue} used for cron jobs */ - this.cronQueue_ = new Bull(`cron-jobs:queue`, opts) - - // Register our worker to handle emit calls - this.queue_.process(this.worker_) - - // Register cron worker - this.cronQueue_.process(this.cronWorker_) - - if (process.env.NODE_ENV !== "test") { - this.startEnqueuer() - } - } - } - - withTransaction(transactionManager) { - if (!transactionManager) { - return this - } - - const cloned = new EventBusService( - { - manager: transactionManager, - stagedJobRepository: this.stagedJobRepository_, - logger: this.logger_, - redisClient: this.redisClient_, - redisSubscriber: this.redisSubscriber_, - }, - this.config_, - false - ) - - cloned.transactionManager_ = transactionManager - cloned.queue_ = this.queue_ - - return cloned - } - - /** - * Adds a function to a list of event subscribers. - * @param {string} event - the event that the subscriber will listen for. - * @param {func} subscriber - the function to be called when a certain event - * happens. Subscribers must return a Promise. - */ - subscribe(event, subscriber) { - if (typeof subscriber !== "function") { - throw new Error("Subscriber must be a function") - } - - if (this.observers_[event]) { - this.observers_[event].push(subscriber) - } else { - this.observers_[event] = [subscriber] - } - } - - /** - * Adds a function to a list of event subscribers. - * @param {string} event - the event that the subscriber will listen for. - * @param {func} subscriber - the function to be called when a certain event - * happens. Subscribers must return a Promise. - */ - unsubscribe(event, subscriber) { - if (typeof subscriber !== "function") { - throw new Error("Subscriber must be a function") - } - - if (this.observers_[event]) { - const index = this.observers_[event].indexOf(subscriber) - if (index !== -1) { - this.observers_[event].splice(index, 1) - } - } - } - - /** - * Adds a function to a list of event subscribers. - * @param {string} event - the event that the subscriber will listen for. - * @param {func} subscriber - the function to be called when a certain event - * happens. Subscribers must return a Promise. - */ - registerCronHandler_(event, subscriber) { - if (typeof subscriber !== "function") { - throw new Error("Handler must be a function") - } - - if (this.observers_[event]) { - this.cronHandlers_[event].push(subscriber) - } else { - this.cronHandlers_[event] = [subscriber] - } - } - - /** - * Calls all subscribers when an event occurs. - * @param {string} eventName - the name of the event to be process. - * @param {?any} data - the data to send to the subscriber. - * @param {?any} options - options to add the job with - * @return {BullJob} - the job from our queue - */ - async emit(eventName, data, options = {}) { - if (this.transactionManager_) { - const stagedJobRepository = this.transactionManager_.getCustomRepository( - this.stagedJobRepository_ - ) - - const created = await stagedJobRepository.create({ - event_name: eventName, - data, - }) - - return stagedJobRepository.save(created) - } else { - const opts = { removeOnComplete: true } - if (typeof options.delay === "number") { - opts.delay = options.delay - } - this.queue_.add({ eventName, data }, opts) - } - } - - async sleep(ms) { - return new Promise((resolve) => { - setTimeout(resolve, ms) - }) - } - - async startEnqueuer() { - this.enRun_ = true - this.enqueue_ = this.enqueuer_() - } - - async stopEnqueuer() { - this.enRun_ = false - await this.enqueue_ - } - - async enqueuer_() { - while (this.enRun_) { - const listConfig = { - relations: [], - skip: 0, - take: 1000, - } - - const sjRepo = this.manager_.getCustomRepository( - this.stagedJobRepository_ - ) - const jobs = await sjRepo.find({}, listConfig) - - await Promise.all( - jobs.map((job) => { - this.queue_ - .add( - { eventName: job.event_name, data: job.data }, - { removeOnComplete: true } - ) - .then(async () => { - await sjRepo.remove(job) - }) - }) - ) - - await this.sleep(3000) - } - } - - /** - * Handles incoming jobs. - * @param {Object} job The job object - * @return {Promise} resolves to the results of the subscriber calls. - */ - worker_ = (job) => { - const { eventName, data } = job.data - const eventObservers = this.observers_[eventName] || [] - const wildcardObservers = this.observers_["*"] || [] - - const observers = eventObservers.concat(wildcardObservers) - - this.logger_.info( - `Processing ${eventName} which has ${eventObservers.length} subscribers` - ) - - return Promise.all( - observers.map((subscriber) => { - return subscriber(data, eventName).catch((err) => { - this.logger_.warn( - `An error occured while processing ${eventName}: ${err}` - ) - console.log(err) - return err - }) - }) - ) - } - - /** - * Handles incoming jobs. - * @param {Object} job The job object - * @return {Promise} resolves to the results of the subscriber calls. - */ - cronWorker_ = (job) => { - const { eventName, data } = job.data - const observers = this.cronHandlers_[eventName] || [] - this.logger_.info(`Processing cron job: ${eventName}`) - - return Promise.all( - observers.map((subscriber) => { - return subscriber(data, eventName).catch((err) => { - this.logger_.warn( - `An error occured while processing ${eventName}: ${err}` - ) - return err - }) - }) - ) - } - - /** - * Registers a cron job. - * @param {string} eventName - the name of the event - * @param {object} data - the data to be sent with the event - * @param {string} cron - the cron pattern - * @param {function} handler - the handler to call on each cron job - * @return {void} - */ - createCronJob(eventName, data, cron, handler) { - this.logger_.info(`Registering ${eventName}`) - this.registerCronHandler_(eventName, handler) - return this.cronQueue_.add( - { - eventName, - data, - }, - { repeat: { cron } } - ) - } -} - -export default EventBusService diff --git a/packages/medusa/src/services/event-bus.ts b/packages/medusa/src/services/event-bus.ts new file mode 100644 index 0000000000..4929949e7f --- /dev/null +++ b/packages/medusa/src/services/event-bus.ts @@ -0,0 +1,324 @@ +import Bull from "bull" +import Redis from "ioredis" +import { EntityManager } from "typeorm" +import { ConfigModule, Logger } from "../types/global" +import { StagedJobRepository } from "../repositories/staged-job" +import { StagedJob } from "../models" +import { sleep } from "../utils/sleep" + +type InjectedDependencies = { + manager: EntityManager + logger: Logger + stagedJobRepository: typeof StagedJobRepository + redisClient: Redis + redisSubscriber: Redis +} + +type Subscriber = (data: T, eventName: string) => Promise + +/** + * Can keep track of multiple subscribers to different events and run the + * subscribers when events happen. Events will run asynchronously. + */ +export default class EventBusService { + protected readonly config_: ConfigModule + protected readonly manager_: EntityManager + protected readonly logger_: Logger + protected readonly stagedJobRepository_: typeof StagedJobRepository + protected readonly observers_: Map + protected readonly cronHandlers_: Map + protected readonly redisClient_: Redis + protected readonly redisSubscriber_: Redis + protected readonly cronQueue_: Bull + protected queue_: Bull + protected shouldEnqueuerRun: boolean + protected transactionManager_: EntityManager | undefined + protected enqueue_: Promise + + constructor( + { + manager, + logger, + stagedJobRepository, + redisClient, + redisSubscriber, + }: InjectedDependencies, + config: ConfigModule, + singleton = true + ) { + const opts = { + createClient: (type: string): Redis => { + switch (type) { + case "client": + return redisClient + case "subscriber": + return redisSubscriber + default: + if (config.projectConfig.redis_url) { + return new Redis(config.projectConfig.redis_url) + } + return redisClient + } + }, + } + + this.config_ = config + this.manager_ = manager + this.logger_ = logger + this.stagedJobRepository_ = stagedJobRepository + + if (singleton) { + this.observers_ = new Map() + this.queue_ = new Bull(`${this.constructor.name}:queue`, opts) + this.cronHandlers_ = new Map() + this.redisClient_ = redisClient + this.redisSubscriber_ = redisSubscriber + this.cronQueue_ = new Bull(`cron-jobs:queue`, opts) + // Register our worker to handle emit calls + this.queue_.process(this.worker_) + // Register cron worker + this.cronQueue_.process(this.cronWorker_) + + if (process.env.NODE_ENV !== "test") { + this.startEnqueuer() + } + } + } + + withTransaction(transactionManager): this | EventBusService { + if (!transactionManager) { + return this + } + + const cloned = new EventBusService( + { + manager: transactionManager, + stagedJobRepository: this.stagedJobRepository_, + logger: this.logger_, + redisClient: this.redisClient_, + redisSubscriber: this.redisSubscriber_, + }, + this.config_, + false + ) + + cloned.transactionManager_ = transactionManager + cloned.queue_ = this.queue_ + + return cloned + } + + /** + * Adds a function to a list of event subscribers. + * @param event - the event that the subscriber will listen for. + * @param subscriber - the function to be called when a certain event + * happens. Subscribers must return a Promise. + * @return this + */ + subscribe(event: string | symbol, subscriber: Subscriber): this { + if (typeof subscriber !== "function") { + throw new Error("Subscriber must be a function") + } + + const observers = this.observers_.get(event) ?? [] + this.observers_.set(event, [...observers, subscriber]) + + return this + } + + /** + * Adds a function to a list of event subscribers. + * @param event - the event that the subscriber will listen for. + * @param subscriber - the function to be called when a certain event + * happens. Subscribers must return a Promise. + * @return this + */ + unsubscribe(event: string | symbol, subscriber: Subscriber): this { + if (typeof subscriber !== "function") { + throw new Error("Subscriber must be a function") + } + + if (this.observers_.get(event)?.length) { + const index = this.observers_.get(event)?.indexOf(subscriber) + if (index !== -1) { + this.observers_.get(event)?.splice(index as number, 1) + } + } + + return this + } + + /** + * Adds a function to a list of event subscribers. + * @param event - the event that the subscriber will listen for. + * @param subscriber - the function to be called when a certain event + * happens. Subscribers must return a Promise. + * @return this + */ + protected registerCronHandler_( + event: string | symbol, + subscriber: Subscriber + ): this { + if (typeof subscriber !== "function") { + throw new Error("Handler must be a function") + } + + const cronHandlers = this.cronHandlers_.get(event) ?? [] + this.cronHandlers_.set(event, [...cronHandlers, subscriber]) + + return this + } + + /** + * Calls all subscribers when an event occurs. + * @param {string} eventName - the name of the event to be process. + * @param data - the data to send to the subscriber. + * @param options - options to add the job with + * @return the job from our queue + */ + async emit( + eventName: string, + data: T, + options: { delay?: number } = {} + ): Promise { + if (this.transactionManager_) { + const stagedJobRepository = this.transactionManager_.getCustomRepository( + this.stagedJobRepository_ + ) + + const stagedJobInstance = stagedJobRepository.create({ + event_name: eventName, + data, + }) + return await stagedJobRepository.save(stagedJobInstance) + } else { + const opts: { removeOnComplete: boolean; delay?: number } = { + removeOnComplete: true, + } + if (typeof options.delay === "number") { + opts.delay = options.delay + } + this.queue_.add({ eventName, data }, opts) + } + } + + startEnqueuer(): void { + this.shouldEnqueuerRun = true + this.enqueue_ = this.enqueuer_() + } + + async stopEnqueuer(): Promise { + this.shouldEnqueuerRun = false + await this.enqueue_ + } + + async enqueuer_(): Promise { + while (this.shouldEnqueuerRun) { + const listConfig = { + relations: [], + skip: 0, + take: 1000, + } + + const stagedJobRepo = this.manager_.getCustomRepository( + this.stagedJobRepository_ + ) + const jobs = await stagedJobRepo.find(listConfig) + + await Promise.all( + jobs.map((job) => { + this.queue_ + .add( + { eventName: job.event_name, data: job.data }, + { removeOnComplete: true } + ) + .then(async () => { + await stagedJobRepo.remove(job) + }) + }) + ) + + await sleep(3000) + } + } + + /** + * Handles incoming jobs. + * @param job The job object + * @return resolves to the results of the subscriber calls. + */ + worker_ = async (job: { + data: { eventName: string; data: T } + }): Promise => { + const { eventName, data } = job.data + const eventObservers = this.observers_.get(eventName) || [] + const wildcardObservers = this.observers_.get("*") || [] + + const observers = eventObservers.concat(wildcardObservers) + + this.logger_.info( + `Processing ${eventName} which has ${eventObservers.length} subscribers` + ) + + return await Promise.all( + observers.map((subscriber) => { + return subscriber(data, eventName).catch((err) => { + this.logger_.warn( + `An error occurred while processing ${eventName}: ${err}` + ) + console.error(err) + return err + }) + }) + ) + } + + /** + * Handles incoming jobs. + * @param job The job object + * @return resolves to the results of the subscriber calls. + */ + cronWorker_ = async (job: { + data: { eventName: string; data: T } + }): Promise => { + const { eventName, data } = job.data + const observers = this.cronHandlers_.get(eventName) || [] + this.logger_.info(`Processing cron job: ${eventName}`) + + return await Promise.all( + observers.map((subscriber) => { + return subscriber(data, eventName).catch((err) => { + this.logger_.warn( + `An error occured while processing ${eventName}: ${err}` + ) + return err + }) + }) + ) + } + + /** + * Registers a cron job. + * @param eventName - the name of the event + * @param data - the data to be sent with the event + * @param cron - the cron pattern + * @param handler - the handler to call on each cron job + * @return void + */ + createCronJob( + eventName: string, + data: T, + cron: string, + handler: Subscriber + ): void { + this.logger_.info(`Registering ${eventName}`) + this.registerCronHandler_(eventName, handler) + return this.cronQueue_.add( + { + eventName, + data, + }, + { repeat: { cron } } + ) + } +} diff --git a/packages/medusa/src/services/file.ts b/packages/medusa/src/services/file.ts new file mode 100644 index 0000000000..c8618d4cae --- /dev/null +++ b/packages/medusa/src/services/file.ts @@ -0,0 +1,50 @@ +import { MedusaError } from "medusa-core-utils" +import { EntityManager } from "typeorm" +import { + AbstractFileService, + FileServiceGetUploadStreamResult, + FileServiceUploadResult, + GetUploadedFileType, + UploadStreamDescriptorType, +} from "../interfaces" + +class DefaultFileService extends AbstractFileService { + upload(fileData: Express.Multer.File): Promise { + throw new MedusaError( + MedusaError.Types.UNEXPECTED_STATE, + "Please add a file service plugin in order to manipulate files in Medusa" + ) + } + delete(fileData: Record): Promise { + throw new MedusaError( + MedusaError.Types.UNEXPECTED_STATE, + "Please add a file service plugin in order to manipulate files in Medusa" + ) + } + getUploadStreamDescriptor( + fileData: UploadStreamDescriptorType + ): Promise { + throw new MedusaError( + MedusaError.Types.UNEXPECTED_STATE, + "Please add a file service plugin in order to manipulate files in Medusa" + ) + } + getDownloadStream( + fileData: GetUploadedFileType + ): Promise { + throw new MedusaError( + MedusaError.Types.UNEXPECTED_STATE, + "Please add a file service plugin in order to manipulate files in Medusa" + ) + } + getPresignedDownloadUrl(fileData: GetUploadedFileType): Promise { + throw new MedusaError( + MedusaError.Types.UNEXPECTED_STATE, + "Please add a file service plugin in order to manipulate files in Medusa" + ) + } + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined +} + +export default DefaultFileService diff --git a/packages/medusa/src/services/fulfillment.js b/packages/medusa/src/services/fulfillment.ts similarity index 53% rename from packages/medusa/src/services/fulfillment.js rename to packages/medusa/src/services/fulfillment.ts index a3b6ab1451..654b03987c 100644 --- a/packages/medusa/src/services/fulfillment.js +++ b/packages/medusa/src/services/fulfillment.ts @@ -1,11 +1,49 @@ -import { BaseService } from "medusa-interfaces" import { MedusaError } from "medusa-core-utils" +import { EntityManager } from "typeorm" +import { ShippingProfileService } from "." +import { TransactionBaseService } from "../interfaces" +import { Fulfillment, LineItem, ShippingMethod } from "../models" +import { FulfillmentRepository } from "../repositories/fulfillment" +import { LineItemRepository } from "../repositories/line-item" +import { TrackingLinkRepository } from "../repositories/tracking-link" +import { FindConfig } from "../types/common" +import { + CreateFulfillmentOrder, + CreateShipmentConfig, + FulfillmentItemPartition, + FulFillmentItemType, +} from "../types/fulfillment" +import { buildQuery } from "../utils" +import FulfillmentProviderService from "./fulfillment-provider" +import LineItemService from "./line-item" +import TotalsService from "./totals" + +type InjectedDependencies = { + manager: EntityManager + totalsService: TotalsService + shippingProfileService: ShippingProfileService + lineItemService: LineItemService + fulfillmentProviderService: FulfillmentProviderService + fulfillmentRepository: typeof FulfillmentRepository + trackingLinkRepository: typeof TrackingLinkRepository + lineItemRepository: typeof LineItemRepository +} /** * Handles Fulfillments - * @extends BaseService */ -class FulfillmentService extends BaseService { +class FulfillmentService extends TransactionBaseService { + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + protected readonly totalsService_: TotalsService + protected readonly lineItemService_: LineItemService + protected readonly shippingProfileService_: ShippingProfileService + protected readonly fulfillmentProviderService_: FulfillmentProviderService + protected readonly fulfillmentRepository_: typeof FulfillmentRepository + protected readonly trackingLinkRepository_: typeof TrackingLinkRepository + protected readonly lineItemRepository_: typeof LineItemRepository + constructor({ manager, totalsService, @@ -14,56 +52,33 @@ class FulfillmentService extends BaseService { shippingProfileService, lineItemService, fulfillmentProviderService, - }) { - super() + lineItemRepository, + }: InjectedDependencies) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) - /** @private @const {EntityManager} */ this.manager_ = manager - /** @private @const {TotalsService} */ + this.lineItemRepository_ = lineItemRepository this.totalsService_ = totalsService - - /** @private @const {FulfillmentRepository} */ this.fulfillmentRepository_ = fulfillmentRepository - - /** @private @const {TrackingLinkRepository} */ this.trackingLinkRepository_ = trackingLinkRepository - - /** @private @const {ShippingProfileService} */ this.shippingProfileService_ = shippingProfileService - - /** @private @const {LineItemService} */ this.lineItemService_ = lineItemService - - /** @private @const {FulfillmentProviderService} */ this.fulfillmentProviderService_ = fulfillmentProviderService } - withTransaction(transactionManager) { - if (!transactionManager) { - return this - } - - const cloned = new FulfillmentService({ - manager: transactionManager, - totalsService: this.totalsService_, - trackingLinkRepository: this.trackingLinkRepository_, - fulfillmentRepository: this.fulfillmentRepository_, - shippingProfileService: this.shippingProfileService_, - lineItemService: this.lineItemService_, - fulfillmentProviderService: this.fulfillmentProviderService_, - }) - - cloned.transactionManager_ = transactionManager - - return cloned - } - - partitionItems_(shippingMethods, items) { - const partitioned = [] + partitionItems_( + shippingMethods: ShippingMethod[], + items: LineItem[] + ): FulfillmentItemPartition[] { + const partitioned: FulfillmentItemPartition[] = [] // partition order items to their dedicated shipping method for (const method of shippingMethods) { - const temp = { shipping_method: method } + const temp: FulfillmentItemPartition = { + shipping_method: method, + items: [], + } // for each method find the items in the order, that are associated // with the profile on the current shipping method @@ -83,19 +98,22 @@ class FulfillmentService extends BaseService { /** * Retrieves the order line items, given an array of items. - * @param {Order} order - the order to get line items from - * @param {{ item_id: string, quantity: number }} items - the items to get - * @param {function} transformer - a function to apply to each of the items + * @param order - the order to get line items from + * @param items - the items to get + * @param transformer - a function to apply to each of the items * retrieved from the order, should return a line item. If the transformer * returns an undefined value the line item will be filtered from the * returned array. - * @return {Promise>} the line items generated by the transformer. + * @return the line items generated by the transformer. */ - async getFulfillmentItems_(order, items, transformer) { + async getFulfillmentItems_( + order: CreateFulfillmentOrder, + items: FulFillmentItemType[] + ): Promise<(LineItem | null)[]> { const toReturn = await Promise.all( items.map(async ({ item_id, quantity }) => { const item = order.items.find((i) => i.id === item_id) - return transformer(item, quantity) + return this.validateFulfillmentLineItem_(item, quantity) }) ) @@ -107,13 +125,19 @@ class FulfillmentService extends BaseService { * fulfillable quantity is lower than the requested fulfillment quantity. * Fulfillable quantity is calculated by subtracting the already fulfilled * quantity from the quantity that was originally purchased. - * @param {LineItem} item - the line item to check has sufficient fulfillable + * @param item - the line item to check has sufficient fulfillable * quantity. - * @param {number} quantity - the quantity that is requested to be fulfilled. - * @return {LineItem} a line item that has the requested fulfillment quantity + * @param quantity - the quantity that is requested to be fulfilled. + * @return a line item that has the requested fulfillment quantity * set. */ - validateFulfillmentLineItem_(item, quantity) { + validateFulfillmentLineItem_( + item: LineItem | undefined, + quantity: number + ): LineItem | null { + const manager = this.transactionManager_ ?? this.manager_ + const lineItemRepo = manager.getCustomRepository(this.lineItemRepository_) + if (!item) { // This will in most cases be called by a webhook so to ensure that // things go through smoothly in instances where extra items outside @@ -127,35 +151,39 @@ class FulfillmentService extends BaseService { "Cannot fulfill more items than have been purchased" ) } - return { + return lineItemRepo.create({ ...item, quantity, - } + }) } /** * Retrieves a fulfillment by its id. - * @param {string} id - the id of the fulfillment to retrieve - * @param {object} config - optional values to include with fulfillmentRepository query - * @return {Fulfillment} the fulfillment + * @param id - the id of the fulfillment to retrieve + * @param config - optional values to include with fulfillmentRepository query + * @return the fulfillment */ - async retrieve(id, config = {}) { - const fulfillmentRepository = this.manager_.getCustomRepository( - this.fulfillmentRepository_ - ) - - const validatedId = this.validateId_(id) - const query = this.buildQuery_({ id: validatedId }, config) - - const fulfillment = await fulfillmentRepository.findOne(query) - - if (!fulfillment) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Fulfillment with id: ${id} was not found` + async retrieve( + id: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_(async (manager) => { + const fulfillmentRepository = manager.getCustomRepository( + this.fulfillmentRepository_ ) - } - return fulfillment + + const query = buildQuery({ id }, config) + + const fulfillment = await fulfillmentRepository.findOne(query) + + if (!fulfillment) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Fulfillment with id: ${id} was not found` + ) + } + return fulfillment + }) } /** @@ -163,27 +191,30 @@ class FulfillmentService extends BaseService { * If items needs to be fulfilled by different provider, we make * sure to partition those items, and create fulfillment for * those partitions. - * @param {Order} order - order to create fulfillment for - * @param {{ item_id: string, quantity: number}[]} itemsToFulfill - the items in the order to fulfill - * @param {object} custom - potential custom values to add - * @return {Fulfillment[]} the created fulfillments + * @param order - order to create fulfillment for + * @param itemsToFulfill - the items in the order to fulfill + * @param custom - potential custom values to add + * @return the created fulfillments */ - async createFulfillment(order, itemsToFulfill, custom = {}) { - return this.atomicPhase_(async (manager) => { + async createFulfillment( + order: CreateFulfillmentOrder, + itemsToFulfill: FulFillmentItemType[], + custom: Partial = {} + ): Promise { + return await this.atomicPhase_(async (manager) => { const fulfillmentRepository = manager.getCustomRepository( this.fulfillmentRepository_ ) - const lineItems = await this.getFulfillmentItems_( - order, - itemsToFulfill, - this.validateFulfillmentLineItem_ - ) + const lineItems = await this.getFulfillmentItems_(order, itemsToFulfill) const { shipping_methods } = order // partition order items to their dedicated shipping method - const fulfillments = this.partitionItems_(shipping_methods, lineItems) + const fulfillments = this.partitionItems_( + shipping_methods, + lineItems as LineItem[] + ) const created = await Promise.all( fulfillments.map(async ({ shipping_method, items }) => { @@ -216,16 +247,19 @@ class FulfillmentService extends BaseService { * Cancels a fulfillment with the fulfillment provider. Will decrement the * fulfillment_quantity on the line items associated with the fulfillment. * Throws if the fulfillment has already been shipped. - * @param {Fulfillment|string} fulfillmentOrId - the fulfillment object or id. - * @return {Promise} the result of the save operation + * @param fulfillmentOrId - the fulfillment object or id. + * @return the result of the save operation * */ - cancelFulfillment(fulfillmentOrId) { - return this.atomicPhase_(async (manager) => { - let id = fulfillmentOrId - if (typeof fulfillmentOrId === "object") { - id = fulfillmentOrId.id - } + async cancelFulfillment( + fulfillmentOrId: Fulfillment | string + ): Promise { + return await this.atomicPhase_(async (manager) => { + const id = + typeof fulfillmentOrId === "string" + ? fulfillmentOrId + : fulfillmentOrId.id + const fulfillment = await this.retrieve(id, { relations: ["items", "claim_order", "swap"], }) @@ -262,22 +296,22 @@ class FulfillmentService extends BaseService { /** * Creates a shipment by marking a fulfillment as shipped. Adds * tracking links and potentially more metadata. - * @param {Order} fulfillmentId - the fulfillment to ship - * @param {TrackingLink[]} trackingLinks - tracking links for the shipment - * @param {object} config - potential configuration settings, such as no_notification and metadata - * @return {Fulfillment} the shipped fulfillment + * @param fulfillmentId - the fulfillment to ship + * @param trackingLinks - tracking links for the shipment + * @param config - potential configuration settings, such as no_notification and metadata + * @return the shipped fulfillment */ async createShipment( - fulfillmentId, - trackingLinks, - config = { + fulfillmentId: string, + trackingLinks: { tracking_number: string }[], + config: CreateShipmentConfig = { metadata: {}, no_notification: undefined, } - ) { + ): Promise { const { metadata, no_notification } = config - return this.atomicPhase_(async (manager) => { + return await this.atomicPhase_(async (manager) => { const fulfillmentRepository = manager.getCustomRepository( this.fulfillmentRepository_ ) @@ -303,7 +337,7 @@ class FulfillmentService extends BaseService { trackingLinkRepo.create(tl) ) - if (no_notification) { + if (typeof no_notification !== "undefined") { fulfillment.no_notification = no_notification } @@ -312,8 +346,7 @@ class FulfillmentService extends BaseService { ...metadata, } - const updated = fulfillmentRepository.save(fulfillment) - return updated + return await fulfillmentRepository.save(fulfillment) }) } } diff --git a/packages/medusa/src/services/gift-card.js b/packages/medusa/src/services/gift-card.js deleted file mode 100644 index ab2da18541..0000000000 --- a/packages/medusa/src/services/gift-card.js +++ /dev/null @@ -1,305 +0,0 @@ -import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" -import randomize from "randomatic" -import { Brackets } from "typeorm" - -/** - * Provides layer to manipulate gift cards. - * @extends BaseService - */ -class GiftCardService extends BaseService { - static Events = { - CREATED: "gift_card.created", - } - - constructor({ - manager, - giftCardRepository, - giftCardTransactionRepository, - regionService, - eventBusService, - }) { - super() - - /** @private @const {EntityManager} */ - this.manager_ = manager - - /** @private @const {GiftCardRepository} */ - this.giftCardRepository_ = giftCardRepository - - /** @private @const {GiftCardRepository} */ - this.giftCardTransactionRepo_ = giftCardTransactionRepository - - /** @private @const {RegionService} */ - this.regionService_ = regionService - - /** @private @const {EventBus} */ - this.eventBus_ = eventBusService - } - - withTransaction(transactionManager) { - if (!transactionManager) { - return this - } - - const cloned = new GiftCardService({ - manager: transactionManager, - giftCardRepository: this.giftCardRepository_, - giftCardTransactionRepository: this.giftCardTransactionRepo_, - regionService: this.regionService_, - eventBusService: this.eventBus_, - }) - - cloned.transactionManager_ = transactionManager - - return cloned - } - - /** - * Generates a 16 character gift card code - * @return {string} the generated gift card code - */ - generateCode_() { - const code = [ - randomize("A0", 4), - randomize("A0", 4), - randomize("A0", 4), - randomize("A0", 4), - ].join("-") - - return code - } - - /** - * @param {Object} selector - the query object for find - * @param {Object} config - the configuration used to find the objects. contains relations, skip, and take. - * @return {Promise} the result of the find operation - */ - async list(selector = {}, config = { relations: [], skip: 0, take: 10 }) { - const giftCardRepo = this.manager_.getCustomRepository( - this.giftCardRepository_ - ) - - let q - if ("q" in selector) { - q = selector.q - delete selector.q - } - - const query = this.buildQuery_(selector, config) - - const rels = query.relations - delete query.relations - - if (q) { - const where = query.where - delete where.id - - const raw = await giftCardRepo - .createQueryBuilder("gift_card") - .leftJoinAndSelect("gift_card.order", "order") - .select(["gift_card.id"]) - .where(where) - .andWhere( - new Brackets((qb) => { - return qb - .where(`gift_card.code ILIKE :q`, { q: `%${q}%` }) - .orWhere(`display_id::varchar(255) ILIKE :dId`, { dId: `${q}` }) - }) - ) - .getMany() - - return giftCardRepo.findWithRelations( - rels, - raw.map((i) => i.id) - ) - } - return giftCardRepo.findWithRelations(rels, query) - } - - async createTransaction(data) { - return this.atomicPhase_(async (manager) => { - const gctRepo = manager.getCustomRepository(this.giftCardTransactionRepo_) - const created = gctRepo.create(data) - const saved = await gctRepo.save(created) - return saved.id - }) - } - - /** - * Creates a gift card with provided data given that the data is validated. - * @param {GiftCard} giftCard - the gift card data to create - * @return {Promise} the result of the create operation - */ - async create(giftCard) { - return this.atomicPhase_(async (manager) => { - const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) - - if (!giftCard.region_id) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Gift card is missing region_id` - ) - } - - // Will throw if region does not exist - const region = await this.regionService_.retrieve(giftCard.region_id) - - const code = this.generateCode_() - - const toCreate = { - code, - region_id: region.id, - ...giftCard, - } - - const created = await giftCardRepo.create(toCreate) - const result = await giftCardRepo.save(created) - - await this.eventBus_ - .withTransaction(manager) - .emit(GiftCardService.Events.CREATED, { - id: result.id, - }) - - return result - }) - } - - /** - * Gets a gift card by id. - * @param {string} giftCardId - id of gift card to retrieve - * @param {object} config - optional values to include with gift card query - * @return {Promise} the gift card - */ - async retrieve(giftCardId, config = {}) { - const giftCardRepo = this.manager_.getCustomRepository( - this.giftCardRepository_ - ) - - const validatedId = this.validateId_(giftCardId) - - const query = { - where: { id: validatedId }, - } - - if (config.select) { - query.select = config.select - } - - if (config.relations) { - query.relations = config.relations - } - - const rels = query.relations - delete query.relations - - const giftCard = await giftCardRepo.findOneWithRelations(rels, query) - - if (!giftCard) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Gift card with ${giftCardId} was not found` - ) - } - - return giftCard - } - - async retrieveByCode(code, config = {}) { - const giftCardRepo = this.manager_.getCustomRepository( - this.giftCardRepository_ - ) - - const query = { - where: { code }, - } - - if (config.select) { - query.select = config.select - } - - if (config.relations) { - query.relations = config.relations - } - - const rels = query.relations - delete query.relations - - const giftCard = await giftCardRepo.findOneWithRelations(rels, query) - - if (!giftCard) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Gift card with ${code} was not found` - ) - } - - return giftCard - } - - /** - * Updates a giftCard. - * @param {string} giftCardId - giftCard id of giftCard to update - * @param {GiftCard} update - the data to update the giftCard with - * @return {Promise} the result of the update operation - */ - async update(giftCardId, update) { - return this.atomicPhase_(async (manager) => { - const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) - - const giftCard = await this.retrieve(giftCardId) - - const { region_id, metadata, balance, ...rest } = update - - if (region_id && region_id !== giftCard.region_id) { - const region = await this.regionService_.retrieve(region_id) - giftCard.region_id = region.id - } - - if (metadata) { - giftCard.metadata = await this.setMetadata_(giftCard.id, metadata) - } - - if (typeof balance !== "undefined") { - if (balance < 0 || giftCard.value < balance) { - throw new MedusaError( - MedusaError.Types.INVALID_ARGUMENT, - "new balance is invalid" - ) - } - giftCard.balance = balance - } - - for (const [key, value] of Object.entries(rest)) { - giftCard[key] = value - } - - const updated = await giftCardRepo.save(giftCard) - return updated - }) - } - - /** - * Deletes a gift card idempotently - * @param {string} giftCardId - id of gift card to delete - * @return {Promise} the result of the delete operation - */ - async delete(giftCardId) { - return this.atomicPhase_(async (manager) => { - const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) - - const giftCard = await giftCardRepo.findOne({ where: { id: giftCardId } }) - - if (!giftCard) { - return Promise.resolve() - } - - await giftCardRepo.softRemove(giftCard) - - return Promise.resolve() - }) - } -} - -export default GiftCardService diff --git a/packages/medusa/src/services/gift-card.ts b/packages/medusa/src/services/gift-card.ts new file mode 100644 index 0000000000..70fd249b0e --- /dev/null +++ b/packages/medusa/src/services/gift-card.ts @@ -0,0 +1,303 @@ +import { MedusaError } from "medusa-core-utils" +import randomize from "randomatic" +import { EntityManager } from "typeorm" +import { EventBusService } from "." +import { TransactionBaseService } from "../interfaces" +import { GiftCard } from "../models" +import { GiftCardRepository } from "../repositories/gift-card" +import { GiftCardTransactionRepository } from "../repositories/gift-card-transaction" +import { + ExtendedFindConfig, + FindConfig, + QuerySelector, + Selector, +} from "../types/common" +import { + CreateGiftCardInput, + CreateGiftCardTransactionInput, + UpdateGiftCardInput, +} from "../types/gift-card" +import { buildQuery, setMetadata } from "../utils" +import RegionService from "./region" + +type InjectedDependencies = { + manager: EntityManager + giftCardRepository: typeof GiftCardRepository + giftCardTransactionRepository: typeof GiftCardTransactionRepository + regionService: RegionService + eventBusService: EventBusService +} +/** + * Provides layer to manipulate gift cards. + */ +class GiftCardService extends TransactionBaseService { + protected readonly giftCardRepository_: typeof GiftCardRepository + protected readonly giftCardTransactionRepo_: typeof GiftCardTransactionRepository + protected readonly regionService_: RegionService + protected readonly eventBus_: EventBusService + + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + static Events = { + CREATED: "gift_card.created", + } + + constructor({ + manager, + giftCardRepository, + giftCardTransactionRepository, + regionService, + eventBusService, + }: InjectedDependencies) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) + + this.manager_ = manager + + this.giftCardRepository_ = giftCardRepository + this.giftCardTransactionRepo_ = giftCardTransactionRepository + this.regionService_ = regionService + this.eventBus_ = eventBusService + } + + /** + * Generates a 16 character gift card code + * @return the generated gift card code + */ + static generateCode(): string { + const code = [ + randomize("A0", 4), + randomize("A0", 4), + randomize("A0", 4), + randomize("A0", 4), + ].join("-") + + return code + } + + /** + * @param selector - the query object for find + * @param config - the configuration used to find the objects. contains relations, skip, and take. + * @return the result of the find operation + */ + async listAndCount( + selector: QuerySelector = {}, + config: FindConfig = { relations: [], skip: 0, take: 10 } + ): Promise<[GiftCard[], number]> { + return await this.atomicPhase_(async (manager) => { + const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) + + let q: string | undefined + if (typeof selector.q !== "undefined") { + q = selector.q + delete selector.q + } + + const query: ExtendedFindConfig< + GiftCard, + QuerySelector + > = buildQuery, GiftCard>(selector, config) + + const rels = query.relations + delete query.relations + + return await giftCardRepo.listGiftCardsAndCount(query, rels, q) + }) + } + + /** + * @param selector - the query object for find + * @param config - the configuration used to find the objects. contains relations, skip, and take. + * @return the result of the find operation + */ + async list( + selector: QuerySelector = {}, + config: FindConfig = { relations: [], skip: 0, take: 10 } + ): Promise { + return await this.atomicPhase_(async (manager) => { + const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) + + let q: string | undefined + if (typeof selector.q !== "undefined") { + q = selector.q + delete selector.q + } + + const query: ExtendedFindConfig< + GiftCard, + QuerySelector + > = buildQuery, GiftCard>(selector, config) + + const rels = query.relations + delete query.relations + + return await giftCardRepo.listGiftCards(query, rels, q) + }) + } + + async createTransaction( + data: CreateGiftCardTransactionInput + ): Promise { + return await this.atomicPhase_(async (manager) => { + const gctRepo = manager.getCustomRepository(this.giftCardTransactionRepo_) + const created = gctRepo.create(data) + const saved = await gctRepo.save(created) + return saved.id + }) + } + + /** + * Creates a gift card with provided data given that the data is validated. + * @param giftCard - the gift card data to create + * @return the result of the create operation + */ + async create(giftCard: CreateGiftCardInput): Promise { + return await this.atomicPhase_(async (manager) => { + const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) + + // Will throw if region does not exist + const region = await this.regionService_ + .withTransaction(manager) + .retrieve(giftCard.region_id) + + const code = GiftCardService.generateCode() + + const toCreate = { + code, + ...giftCard, + region_id: region.id, + } + + const created = giftCardRepo.create(toCreate) + const result = await giftCardRepo.save(created) + + await this.eventBus_ + .withTransaction(manager) + .emit(GiftCardService.Events.CREATED, { + id: result.id, + }) + + return result + }) + } + + protected async retrieve_( + selector: Selector, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_(async (manager) => { + const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) + + const { relations, ...query } = buildQuery(selector, config) + + const giftCard = await giftCardRepo.findOneWithRelations( + relations as (keyof GiftCard)[], + query + ) + + if (!giftCard) { + const selectorConstraints = Object.entries(selector) + .map((key, value) => `${key}: ${value}`) + .join(", ") + + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Gift card with ${selectorConstraints} was not found` + ) + } + + return giftCard + }) + } + + /** + * Gets a gift card by id. + * @param giftCardId - id of gift card to retrieve + * @param config - optional values to include with gift card query + * @return the gift card + */ + async retrieve( + giftCardId: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_(async () => { + return await this.retrieve_({ id: giftCardId }, config) + }) + } + + async retrieveByCode( + code: string, + config: FindConfig = {} + ): Promise { + return await this.atomicPhase_(async () => { + return await this.retrieve_({ code }, config) + }) + } + + /** + * Updates a giftCard. + * @param giftCardId - giftCard id of giftCard to update + * @param update - the data to update the giftCard with + * @return the result of the update operation + */ + async update( + giftCardId: string, + update: UpdateGiftCardInput + ): Promise { + return await this.atomicPhase_(async (manager) => { + const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) + + const giftCard = await this.retrieve(giftCardId) + + const { region_id, metadata, balance, ...rest } = update + + if (region_id && region_id !== giftCard.region_id) { + const region = await this.regionService_.retrieve(region_id) + giftCard.region_id = region.id + } + + if (metadata) { + giftCard.metadata = setMetadata(giftCard, metadata) + } + + if (typeof balance !== "undefined") { + if (balance < 0 || giftCard.value < balance) { + throw new MedusaError( + MedusaError.Types.INVALID_ARGUMENT, + "new balance is invalid" + ) + } + + giftCard.balance = balance + } + + for (const [key, value] of Object.entries(rest)) { + giftCard[key] = value + } + + return await giftCardRepo.save(giftCard) + }) + } + + /** + * Deletes a gift card idempotently + * @param giftCardId - id of gift card to delete + * @return the result of the delete operation + */ + async delete(giftCardId: string): Promise { + return await this.atomicPhase_(async (manager) => { + const giftCardRepo = manager.getCustomRepository(this.giftCardRepository_) + + const giftCard = await giftCardRepo.findOne({ where: { id: giftCardId } }) + + if (!giftCard) { + return + } + + return await giftCardRepo.softRemove(giftCard) + }) + } +} + +export default GiftCardService diff --git a/packages/medusa/src/services/idempotency-key.js b/packages/medusa/src/services/idempotency-key.js index 12c89424c3..3bdfecd774 100644 --- a/packages/medusa/src/services/idempotency-key.js +++ b/packages/medusa/src/services/idempotency-key.js @@ -1,11 +1,11 @@ -import { BaseService } from "medusa-interfaces" import { MedusaError } from "medusa-core-utils" +import { BaseService } from "medusa-interfaces" import { v4 } from "uuid" const KEY_LOCKED_TIMEOUT = 1000 class IdempotencyKeyService extends BaseService { - constructor({ manager, idempotencyKeyRepository, transactionService }) { + constructor({ manager, idempotencyKeyRepository }) { super() /** @private @constant {EntityManager} */ @@ -13,9 +13,6 @@ class IdempotencyKeyService extends BaseService { /** @private @constant {IdempotencyKeyRepository} */ this.idempotencyKeyRepository_ = idempotencyKeyRepository - - /** @private @constant {TransactionService} */ - this.transactionService_ = transactionService } /** @@ -88,7 +85,6 @@ class IdempotencyKeyService extends BaseService { /** * Locks an idempotency. * @param {string} idempotencyKey - key to lock - * @param {object} session - mongoose transaction session * @return {Promise} result of the update operation */ async lock(idempotencyKey) { diff --git a/packages/medusa/src/services/index.ts b/packages/medusa/src/services/index.ts index 1567701c18..03e68e8334 100644 --- a/packages/medusa/src/services/index.ts +++ b/packages/medusa/src/services/index.ts @@ -1,15 +1,16 @@ export { default as AuthService } from "./auth" +export { default as BatchJobService } from "./batch-job" export { default as CartService } from "./cart" -export { default as ClaimService } from "./claim" export { default as ClaimItemService } from "./claim-item" +export { default as ClaimService } from "./claim" export { default as CustomShippingOptionService } from "./custom-shipping-option" -export { default as CustomerService } from "./customer" export { default as CustomerGroupService } from "./customer-group" +export { default as CustomerService } from "./customer" export { default as DiscountService } from "./discount" export { default as DraftOrderService } from "./draft-order" export { default as EventBusService } from "./event-bus" -export { default as FulfillmentService } from "./fulfillment" export { default as FulfillmentProviderService } from "./fulfillment-provider" +export { default as FulfillmentService } from "./fulfillment" export { default as GiftCardService } from "./gift-card" export { default as IdempotencyKeyService } from "./idempotency-key" export { default as InventoryService } from "./inventory" @@ -20,22 +21,23 @@ export { default as NotificationService } from "./notification" export { default as OauthService } from "./oauth" export { default as OrderService } from "./order" export { default as PaymentProviderService } from "./payment-provider" -export { default as ProductService } from "./product" +export { default as PricingService } from "./pricing" export { default as ProductCollectionService } from "./product-collection" +export { default as ProductService } from "./product" +export { default as ProductTypeService } from "./product-type" export { default as ProductVariantService } from "./product-variant" export { default as QueryBuilderService } from "./query-builder" export { default as RegionService } from "./region" -export { default as ReturnService } from "./return" export { default as ReturnReasonService } from "./return-reason" +export { default as ReturnService } from "./return" export { default as SearchService } from "./search" export { default as ShippingOptionService } from "./shipping-option" export { default as ShippingProfileService } from "./shipping-profile" export { default as StoreService } from "./store" +export { default as StrategyResolverService } from "./strategy-resolver" export { default as SwapService } from "./swap" export { default as SystemPaymentProviderService } from "./system-payment-provider" -export { default as TotalsService } from "./totals" -export { default as TransactionService } from "./transaction" -export { default as UserService } from "./user" -export { default as TaxRateService } from "./tax-rate" export { default as TaxProviderService } from "./tax-provider" -export { default as ProductTypeService } from "./product-type" +export { default as TaxRateService } from "./tax-rate" +export { default as TotalsService } from "./totals" +export { default as UserService } from "./user" diff --git a/packages/medusa/src/services/line-item.ts b/packages/medusa/src/services/line-item.ts index 55e55a802c..f11e1ca0b1 100644 --- a/packages/medusa/src/services/line-item.ts +++ b/packages/medusa/src/services/line-item.ts @@ -3,7 +3,12 @@ import { BaseService } from "medusa-interfaces" import { EntityManager } from "typeorm" import { LineItemRepository } from "../repositories/line-item" import { LineItemTaxLineRepository } from "../repositories/line-item-tax-line" -import { ProductService, RegionService, ProductVariantService } from "./index" +import { + PricingService, + ProductService, + RegionService, + ProductVariantService, +} from "./index" import { CartRepository } from "../repositories/cart" import { LineItem } from "../models/line-item" import LineItemAdjustmentService from "./line-item-adjustment" @@ -17,6 +22,7 @@ type InjectedDependencies = { cartRepository: typeof CartRepository productVariantService: ProductVariantService productService: ProductService + pricingService: PricingService regionService: RegionService lineItemAdjustmentService: LineItemAdjustmentService } @@ -41,6 +47,7 @@ class LineItemService extends BaseService { lineItemTaxLineRepository, productVariantService, productService, + pricingService, regionService, cartRepository, lineItemAdjustmentService, @@ -52,6 +59,7 @@ class LineItemService extends BaseService { this.itemTaxLineRepo_ = lineItemTaxLineRepository this.productVariantService_ = productVariantService this.productService_ = productService + this.pricingService_ = pricingService this.regionService_ = regionService this.cartRepository_ = cartRepository this.lineItemAdjustmentService_ = lineItemAdjustmentService @@ -68,6 +76,7 @@ class LineItemService extends BaseService { lineItemTaxLineRepository: this.itemTaxLineRepo_, productVariantService: this.productVariantService_, productService: this.productService_, + pricingService: this.pricingService_, regionService: this.regionService_, cartRepository: this.cartRepository_, lineItemAdjustmentService: this.lineItemAdjustmentService_, @@ -201,7 +210,6 @@ class LineItemService extends BaseService { .withTransaction(transactionManager) .retrieve(variantId, { relations: ["product"], - include_discount_prices: true, }), this.regionService_ .withTransaction(transactionManager) @@ -213,18 +221,19 @@ class LineItemService extends BaseService { if (context.unit_price === undefined || context.unit_price === null) { shouldMerge = true - unit_price = await this.productVariantService_ + const variantPricing = await this.pricingService_ .withTransaction(transactionManager) - .getRegionPrice(variant.id, { - regionId: region.id, + .getProductVariantPricingById(variant.id, { + region_id: region.id, quantity: quantity, customer_id: context?.customer_id, include_discount_prices: true, }) + unit_price = variantPricing.calculated_price } const rawLineItem: Partial = { - unit_price: unit_price as number, + unit_price: unit_price, title: variant.product.title, description: variant.title, thumbnail: variant.product.thumbnail, diff --git a/packages/medusa/src/services/price-list.ts b/packages/medusa/src/services/price-list.ts index 286df6fbc0..97b3afd650 100644 --- a/packages/medusa/src/services/price-list.ts +++ b/packages/medusa/src/services/price-list.ts @@ -23,6 +23,7 @@ import { buildQuery } from "../utils" import { FilterableProductProps } from "../types/product" import ProductVariantService from "./product-variant" import { FilterableProductVariantProps } from "../types/product-variant" +import { ProductVariantRepository } from "../repositories/product-variant" type PriceListConstructorProps = { manager: EntityManager @@ -32,6 +33,7 @@ type PriceListConstructorProps = { productVariantService: ProductVariantService priceListRepository: typeof PriceListRepository moneyAmountRepository: typeof MoneyAmountRepository + productVariantRepository: typeof ProductVariantRepository } /** @@ -48,6 +50,7 @@ class PriceListService extends TransactionBaseService { protected readonly variantService_: ProductVariantService protected readonly priceListRepo_: typeof PriceListRepository protected readonly moneyAmountRepo_: typeof MoneyAmountRepository + protected readonly productVariantRepo_: typeof ProductVariantRepository constructor({ manager, @@ -57,6 +60,7 @@ class PriceListService extends TransactionBaseService { productVariantService, priceListRepository, moneyAmountRepository, + productVariantRepository, }: PriceListConstructorProps) { // eslint-disable-next-line prefer-rest-params super(arguments[0]) @@ -68,6 +72,7 @@ class PriceListService extends TransactionBaseService { this.regionService_ = regionService this.priceListRepo_ = priceListRepository this.moneyAmountRepo_ = moneyAmountRepository + this.productVariantRepo_ = productVariantRepository } /** @@ -247,10 +252,7 @@ class PriceListService extends TransactionBaseService { const priceListRepo = manager.getCustomRepository(this.priceListRepo_) const { q, ...priceListSelector } = selector - const query = buildQuery( - priceListSelector, - config - ) + const query = buildQuery(priceListSelector, config) const groups = query.where.customer_groups as FindOperator query.where.customer_groups = undefined @@ -277,10 +279,10 @@ class PriceListService extends TransactionBaseService { return await this.atomicPhase_(async (manager: EntityManager) => { const priceListRepo = manager.getCustomRepository(this.priceListRepo_) const { q, ...priceListSelector } = selector - const { relations, ...query } = buildQuery( - priceListSelector, - config - ) + const { relations, ...query } = buildQuery< + FilterablePriceListProps, + FilterablePriceListProps + >(priceListSelector, config) const groups = query.where.customer_groups as FindOperator delete query.where.customer_groups @@ -327,6 +329,9 @@ class PriceListService extends TransactionBaseService { requiresPriceList = false ): Promise<[Product[], number]> { return await this.atomicPhase_(async (manager: EntityManager) => { + const productVariantRepo = manager.getCustomRepository( + this.productVariantRepo_ + ) const [products, count] = await this.productService_.listAndCount( selector, config @@ -346,10 +351,10 @@ class PriceListService extends TransactionBaseService { requiresPriceList ) - return { + return productVariantRepo.create({ ...v, prices, - } + }) }) ) } diff --git a/packages/medusa/src/services/pricing.ts b/packages/medusa/src/services/pricing.ts new file mode 100644 index 0000000000..8c93527b87 --- /dev/null +++ b/packages/medusa/src/services/pricing.ts @@ -0,0 +1,493 @@ +import { EntityManager } from "typeorm" +import { MedusaError } from "medusa-core-utils" +import { ProductVariantService, RegionService, TaxProviderService } from "." +import { Product, ProductVariant, ShippingOption } from "../models" +import { TaxServiceRate } from "../types/tax-service" +import { + ProductVariantPricing, + TaxedPricing, + PricingContext, + PricedProduct, + PricedShippingOption, + PricedVariant, +} from "../types/pricing" +import { TransactionBaseService } from "../interfaces" +import { + IPriceSelectionStrategy, + PriceSelectionContext, +} from "../interfaces/price-selection-strategy" + +type InjectedDependencies = { + manager: EntityManager + productVariantService: ProductVariantService + taxProviderService: TaxProviderService + regionService: RegionService + priceSelectionStrategy: IPriceSelectionStrategy +} + +/** + * Allows retrieval of prices. + * @extends BaseService + */ +class PricingService extends TransactionBaseService { + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + protected readonly regionService: RegionService + protected readonly taxProviderService: TaxProviderService + protected readonly priceSelectionStrategy: IPriceSelectionStrategy + protected readonly productVariantService: ProductVariantService + + constructor({ + manager, + productVariantService, + taxProviderService, + regionService, + priceSelectionStrategy, + }: InjectedDependencies) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) + + this.manager_ = manager + this.regionService = regionService + this.taxProviderService = taxProviderService + this.priceSelectionStrategy = priceSelectionStrategy + this.productVariantService = productVariantService + } + + /** + * Collects additional information neccessary for completing the price + * selection. + * @param context - the price selection context to use + * @return The pricing context + */ + async collectPricingContext( + context: PriceSelectionContext + ): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + let automaticTaxes = false + let taxRate = null + let currencyCode = context.currency_code + + if (context.region_id) { + const region = await this.regionService + .withTransaction(transactionManager) + .retrieve(context.region_id, { + select: ["id", "currency_code", "automatic_taxes", "tax_rate"], + }) + + currencyCode = region.currency_code + automaticTaxes = region.automatic_taxes + taxRate = region.tax_rate + } + + return { + price_selection: { + ...context, + currency_code: currencyCode, + }, + automatic_taxes: automaticTaxes, + tax_rate: taxRate, + } + } + ) + } + + /** + * Gets the prices for a product variant + * @param variantPricing - the prices retrieved from a variant + * @param productRates - the tax rates that the product has applied + * @return The tax related variant prices. + */ + async calculateTaxes( + variantPricing: ProductVariantPricing, + productRates: TaxServiceRate[] + ): Promise { + const rate = productRates.reduce( + (accRate: number, nextTaxRate: TaxServiceRate) => { + return accRate + (nextTaxRate.rate || 0) / 100 + }, + 0 + ) + + const taxedPricing: TaxedPricing = { + original_tax: null, + calculated_tax: null, + original_price_incl_tax: null, + calculated_price_incl_tax: null, + tax_rates: productRates, + } + + if (variantPricing.calculated_price !== null) { + const taxAmount = Math.round(variantPricing.calculated_price * rate) + taxedPricing.calculated_tax = taxAmount + taxedPricing.calculated_price_incl_tax = + variantPricing.calculated_price + taxAmount + } + + if (variantPricing.original_price !== null) { + const taxAmount = Math.round(variantPricing.original_price * rate) + taxedPricing.original_tax = taxAmount + taxedPricing.original_price_incl_tax = + variantPricing.original_price + taxAmount + } + + return taxedPricing + } + + private async getProductVariantPricing_( + variantId: string, + taxRates: TaxServiceRate[], + context: PricingContext + ): Promise { + const transactionManager = this.transactionManager_ ?? this.manager_ + const pricing = await this.priceSelectionStrategy + .withTransaction(transactionManager) + .calculateVariantPrice(variantId, context.price_selection) + + const pricingResult: ProductVariantPricing = { + prices: pricing.prices, + original_price: pricing.originalPrice, + calculated_price: pricing.calculatedPrice, + calculated_price_type: pricing.calculatedPriceType, + original_price_incl_tax: null, + calculated_price_incl_tax: null, + original_tax: null, + calculated_tax: null, + tax_rates: null, + } + + if (context.automatic_taxes && context.price_selection.region_id) { + const taxResults = await this.calculateTaxes(pricingResult, taxRates) + + pricingResult.original_price_incl_tax = taxResults.original_price_incl_tax + pricingResult.calculated_price_incl_tax = + taxResults.calculated_price_incl_tax + pricingResult.original_tax = taxResults.original_tax + pricingResult.calculated_tax = taxResults.calculated_tax + pricingResult.tax_rates = taxResults.tax_rates + } + + return pricingResult + } + + /** + * Gets the prices for a product variant. + * @param variant - the id of the variant to get prices for + * @param context - the price selection context to use + * @return The product variant prices + */ + async getProductVariantPricing( + variant: Pick, + context: PriceSelectionContext | PricingContext + ): Promise { + let pricingContext: PricingContext + if ("automatic_taxes" in context) { + pricingContext = context + } else { + pricingContext = await this.collectPricingContext(context) + } + + let productRates: TaxServiceRate[] = [] + if ( + pricingContext.automatic_taxes && + pricingContext.price_selection.region_id + ) { + productRates = await this.taxProviderService.getRegionRatesForProduct( + variant.product_id, + { + id: pricingContext.price_selection.region_id, + tax_rate: pricingContext.tax_rate, + } + ) + } + + return await this.getProductVariantPricing_( + variant.id, + productRates, + pricingContext + ) + } + + /** + * Gets the prices for a product variant by a variant id. + * @param variantId - the id of the variant to get prices for + * @param context - the price selection context to use + * @return The product variant prices + */ + async getProductVariantPricingById( + variantId: string, + context: PriceSelectionContext | PricingContext + ): Promise { + let pricingContext: PricingContext + if ("automatic_taxes" in context) { + pricingContext = context + } else { + pricingContext = await this.collectPricingContext(context) + } + + let productRates: TaxServiceRate[] = [] + if ( + pricingContext.automatic_taxes && + pricingContext.price_selection.region_id + ) { + const { product_id } = await this.productVariantService.retrieve( + variantId, + { select: ["id", "product_id"] } + ) + productRates = await this.taxProviderService.getRegionRatesForProduct( + product_id, + { + id: pricingContext.price_selection.region_id, + tax_rate: pricingContext.tax_rate, + } + ) + } + + return await this.getProductVariantPricing_( + variantId, + productRates, + pricingContext + ) + } + + private async getProductPricing_( + productId: string, + variants: ProductVariant[], + context: PricingContext + ): Promise> { + const transactionManager = this.transactionManager_ ?? this.manager_ + let taxRates: TaxServiceRate[] = [] + if (context.automatic_taxes && context.price_selection.region_id) { + taxRates = await this.taxProviderService + .withTransaction(transactionManager) + .getRegionRatesForProduct(productId, { + id: context.price_selection.region_id, + tax_rate: context.tax_rate, + }) + } + + const pricings = {} + await Promise.all( + variants.map(async ({ id }) => { + const variantPricing = await this.getProductVariantPricing_( + id, + taxRates, + context + ) + pricings[id] = variantPricing + }) + ) + + return pricings + } + + /** + * Gets all the variant prices for a product. All the product's variants will + * be fetched. + * @param product - the product to get pricing for. + * @param context - the price selection context to use + * @return A map of variant ids to their corresponding prices + */ + async getProductPricing( + product: Pick, + context: PriceSelectionContext + ): Promise> { + const pricingContext = await this.collectPricingContext(context) + return await this.getProductPricing_( + product.id, + product.variants, + pricingContext + ) + } + + /** + * Gets all the variant prices for a product by the product id + * @param productId - the id of the product to get prices for + * @param context - the price selection context to use + * @return A map of variant ids to their corresponding prices + */ + async getProductPricingById( + productId: string, + context: PriceSelectionContext + ): Promise> { + const pricingContext = await this.collectPricingContext(context) + const variants = await this.productVariantService.list( + { product_id: productId }, + { select: ["id"] } + ) + return await this.getProductPricing_(productId, variants, pricingContext) + } + + /** + * Set additional prices on a list of product variants. + * @param variants - list of variants on which to set additional prices + * @param context - the price selection context to use + * @return A list of products with variants decorated with prices + */ + async setVariantPrices( + variants: ProductVariant[], + context: PriceSelectionContext + ): Promise { + const pricingContext = await this.collectPricingContext(context) + return await Promise.all( + variants.map(async (variant) => { + const variantPricing = await this.getProductVariantPricing( + variant, + pricingContext + ) + return { + ...variant, + ...variantPricing, + } + }) + ) + } + + /** + * Set additional prices on a list of products. + * @param products - list of products on which to set additional prices + * @param context - the price selection context to use + * @return A list of products with variants decorated with prices + */ + async setProductPrices( + products: Product[], + context: PriceSelectionContext = {} + ): Promise<(Product | PricedProduct)[]> { + const pricingContext = await this.collectPricingContext(context) + return await Promise.all( + products.map(async (product) => { + if (!product?.variants?.length) { + return product + } + + const variantPricing = await this.getProductPricing_( + product.id, + product.variants, + pricingContext + ) + + const pricedVariants = product.variants.map( + (productVariant): PricedVariant => { + const pricing = variantPricing[productVariant.id] + return { + ...productVariant, + ...pricing, + } + } + ) + + const pricedProduct = { + ...product, + variants: pricedVariants, + } + + return pricedProduct + }) + ) + } + + /** + * Gets the prices for a shipping option. + * @param shippingOption - the shipping option to get prices for + * @param context - the price selection context to use + * @return The shipping option prices + */ + async getShippingOptionPricing( + shippingOption: ShippingOption, + context: PriceSelectionContext | PricingContext + ): Promise { + let pricingContext: PricingContext + if ("automatic_taxes" in context) { + pricingContext = context + } else { + pricingContext = await this.collectPricingContext(context) + } + + let shippingOptionRates: TaxServiceRate[] = [] + if ( + pricingContext.automatic_taxes && + pricingContext.price_selection.region_id + ) { + shippingOptionRates = + await this.taxProviderService.getRegionRatesForShipping( + shippingOption.id, + { + id: pricingContext.price_selection.region_id, + tax_rate: pricingContext.tax_rate, + } + ) + } + + const price = shippingOption.amount || 0 + const rate = shippingOptionRates.reduce( + (accRate: number, nextTaxRate: TaxServiceRate) => { + return accRate + (nextTaxRate.rate || 0) / 100 + }, + 0 + ) + const tax = Math.round(price * rate) + const total = price + tax + + return { + ...shippingOption, + price_incl_tax: total, + tax_rates: shippingOptionRates, + } + } + + /** + * Set additional prices on a list of shipping options. + * @param shippingOptions - list of shipping options on which to set additional prices + * @param context - the price selection context to use + * @return A list of shipping options with prices + */ + async setShippingOptionPrices( + shippingOptions: ShippingOption[], + context: Omit = {} + ): Promise { + const regions = new Set() + + for (const shippingOption of shippingOptions) { + regions.add(shippingOption.region_id) + } + + const contexts = await Promise.all( + [...regions].map(async (regionId) => { + return { + context: await this.collectPricingContext({ + ...context, + region_id: regionId, + }), + region_id: regionId, + } + }) + ) + + return await Promise.all( + shippingOptions.map(async (shippingOption) => { + const pricingContext = contexts.find( + (c) => c.region_id === shippingOption.region_id + ) + + if (!pricingContext) { + throw new MedusaError( + MedusaError.Types.UNEXPECTED_STATE, + "Could not find pricing context for shipping option" + ) + } + + const shippingOptionPricing = await this.getShippingOptionPricing( + shippingOption, + pricingContext.context + ) + return { + ...shippingOption, + ...shippingOptionPricing, + } + }) + ) + } +} + +export default PricingService diff --git a/packages/medusa/src/services/product-variant.ts b/packages/medusa/src/services/product-variant.ts index 2d8f6cf4af..9715c15c45 100644 --- a/packages/medusa/src/services/product-variant.ts +++ b/packages/medusa/src/services/product-variant.ts @@ -17,8 +17,8 @@ import { FindWithRelationsOptions, ProductVariantRepository, } from "../repositories/product-variant" -import EventBusService from "../services/event-bus" -import RegionService from "../services/region" +import EventBusService from "./event-bus" +import RegionService from "./region" import { FindConfig } from "../types/common" import { CreateProductVariantInput, @@ -125,12 +125,6 @@ class ProductVariantService extends BaseService { ) const validatedId = this.validateId_(variantId) - const priceIndex = config.relations?.indexOf("prices") ?? -1 - if (priceIndex >= 0 && config.relations) { - config.relations = [...config.relations] - config.relations.splice(priceIndex, 1) - } - const query = this.buildQuery_({ id: validatedId }, config) const variant = await variantRepo.findOne(query) @@ -141,16 +135,7 @@ class ProductVariantService extends BaseService { ) } - return priceIndex >= 0 - ? ((await this.setAdditionalPrices( - variant, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - )) as ProductVariant) - : variant + return variant } /** @@ -185,16 +170,7 @@ class ProductVariantService extends BaseService { ) } - return priceIndex >= 0 - ? ((await this.setAdditionalPrices( - variant, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - )) as ProductVariant) - : variant + return variant } /** @@ -617,12 +593,6 @@ class ProductVariantService extends BaseService { this.productVariantRepository_ ) - const priceIndex = config.relations?.indexOf("prices") ?? -1 - if (priceIndex >= 0 && config.relations) { - config.relations = [...config.relations] - config.relations.splice(priceIndex, 1) - } - const { q, query, relations } = this.prepareListQuery_(selector, config) if (q) { @@ -634,17 +604,6 @@ class ProductVariantService extends BaseService { raw.map((i) => i.id), query.withDeleted ?? false ) - if (priceIndex >= 0) { - const res = await this.setAdditionalPrices( - variants, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - ) - return [res as ProductVariant[], count] - } return [variants, count] } @@ -654,18 +613,6 @@ class ProductVariantService extends BaseService { query ) - if (priceIndex >= 0) { - const res = await this.setAdditionalPrices( - variants, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - ) - return [res as ProductVariant[], count] - } - return [variants, count] } @@ -722,18 +669,7 @@ class ProductVariantService extends BaseService { } } - const variants = await productVariantRepo.find(query) - - return priceIndex >= 0 - ? ((await this.setAdditionalPrices( - variants, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - )) as ProductVariant[]) - : variants + return await productVariantRepo.find(query) } /** @@ -883,72 +819,6 @@ class ProductVariantService extends BaseService { return qb } - - /** - * Set additional prices on a list of variants. - * @param {ProductVariant | ProductVariant[] } variant variant on which to set additional prices - * @param {string} currency_code currency code to fetch prices for - * @param {string} region_id region to fetch prices for - * @param {string} cart_id string of cart to use as a basis for getting currency and region - * @param {string} customer_id id of potentially logged in customer, used to get prices valid for their customer groups - * @param {boolean} include_discount_prices should result include discount pricing - * @return {Promise} A list of variants with variants decorated with "additional_prices" - */ - async setAdditionalPrices( - variant, - currency_code, - region_id, - cart_id, - customer_id, - include_discount_prices = false - ): Promise { - return this.atomicPhase_(async (manager) => { - const cartRepo = manager.getCustomRepository(this.cartRepository_) - - let regionId = region_id - let currencyCode = currency_code - - if (cart_id) { - const cart = await cartRepo.findOne({ - where: { id: cart_id }, - relations: ["region"], - }) - - regionId = cart.region.id - currencyCode = cart.region.currency_code - } - - const variantArray = Array.isArray(variant) ? variant : [variant] - - const priceSelectionStrategy = - this.priceSelectionStrategy_.withTransaction(manager) - - const variantsWithPrices = await Promise.all( - variantArray.map(async (v) => { - const prices = await priceSelectionStrategy.calculateVariantPrice( - v.id, - { - region_id: regionId, - currency_code: currencyCode, - cart_id: cart_id, - customer_id: customer_id, - include_discount_prices: include_discount_prices, - } - ) - - return { - ...v, - prices: prices.prices, - original_price: prices.originalPrice, - calculated_price: prices.calculatedPrice, - calculated_price_type: prices.calculatedPriceType, - } - }) - ) - - return Array.isArray(variant) ? variantsWithPrices : variantsWithPrices[0] - }) - } } export default ProductVariantService diff --git a/packages/medusa/src/services/product.js b/packages/medusa/src/services/product.js deleted file mode 100644 index a041e9c7c5..0000000000 --- a/packages/medusa/src/services/product.js +++ /dev/null @@ -1,1123 +0,0 @@ -import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" -import { Brackets } from "typeorm" -import { formatException } from "../utils/exception-formatter" -import { defaultAdminProductsVariantsRelations } from "../api/routes/admin/products" - -/** - * Provides layer to manipulate products. - * @extends BaseService - */ -class ProductService extends BaseService { - static IndexName = `products` - static Events = { - UPDATED: "product.updated", - CREATED: "product.created", - DELETED: "product.deleted", - } - - constructor({ - manager, - productRepository, - productVariantRepository, - productOptionRepository, - eventBusService, - productVariantService, - productCollectionService, - productTypeRepository, - productTagRepository, - imageRepository, - searchService, - cartRepository, - priceSelectionStrategy, - }) { - super() - - /** @private @const {EntityManager} */ - this.manager_ = manager - - /** @private @const {ProductOption} */ - this.productOptionRepository_ = productOptionRepository - - /** @private @const {Product} */ - this.productRepository_ = productRepository - - /** @private @const {ProductVariant} */ - this.productVariantRepository_ = productVariantRepository - - /** @private @const {EventBus} */ - this.eventBus_ = eventBusService - - /** @private @const {ProductVariantService} */ - this.productVariantService_ = productVariantService - - /** @private @const {ProductCollectionService} */ - this.productCollectionService_ = productCollectionService - - /** @private @const {ProductCollectionService} */ - this.productTypeRepository_ = productTypeRepository - - /** @private @const {ProductCollectionService} */ - this.productTagRepository_ = productTagRepository - - /** @private @const {ImageRepository} */ - this.imageRepository_ = imageRepository - - /** @private @const {SearchService} */ - this.searchService_ = searchService - - /** @private @const {CartRepository} */ - this.cartRepository_ = cartRepository - - /** @private @const {IPriceSelectionStrategy} */ - this.priceSelectionStrategy_ = priceSelectionStrategy - } - - withTransaction(transactionManager) { - if (!transactionManager) { - return this - } - - const cloned = new ProductService({ - manager: transactionManager, - productRepository: this.productRepository_, - productVariantRepository: this.productVariantRepository_, - productOptionRepository: this.productOptionRepository_, - eventBusService: this.eventBus_, - productVariantService: this.productVariantService_, - productCollectionService: this.productCollectionService_, - productTagRepository: this.productTagRepository_, - productTypeRepository: this.productTypeRepository_, - imageRepository: this.imageRepository_, - cartRepository: this.cartRepository_, - priceSelectionStrategy: this.priceSelectionStrategy_, - }) - - cloned.transactionManager_ = transactionManager - - return cloned - } - - /** - * Lists products based on the provided parameters. - * @param {object} selector - an object that defines rules to filter products - * by - * @param {object} config - object that defines the scope for what should be - * returned - * @return {Promise} the result of the find operation - */ - async list( - selector = {}, - config = { - relations: [], - skip: 0, - take: 20, - include_discount_prices: false, - } - ) { - const productRepo = this.manager_.getCustomRepository( - this.productRepository_ - ) - const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 - if (priceIndex >= 0 && config.relations) { - config.relations = [...config.relations] - config.relations.splice(priceIndex, 1) - } - - const { q, query, relations } = this.prepareListQuery_(selector, config) - - if (q) { - const qb = this.getFreeTextQueryBuilder_(productRepo, query, q) - const raw = await qb.getMany() - return productRepo.findWithRelations( - relations, - raw.map((i) => i.id), - query.withDeleted ?? false - ) - } - - const products = productRepo.findWithRelations(relations, query) - - return priceIndex > -1 - ? await this.setAdditionalPrices( - products, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - ) - : products - } - - /** - * Lists products based on the provided parameters and includes the count of - * products that match the query. - * @param {object} selector - an object that defines rules to filter products - * by - * @param {object} config - object that defines the scope for what should be - * returned - * @return {Promise<[Product[], number]>} an array containing the products as - * the first element and the total count of products that matches the query - * as the second element. - */ - async listAndCount( - selector = {}, - config = { - relations: [], - skip: 0, - take: 20, - include_discount_prices: false, - } - ) { - const productRepo = this.manager_.getCustomRepository( - this.productRepository_ - ) - - const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 - if (priceIndex >= 0 && config.relations) { - config.relations = [...config.relations] - config.relations.splice(priceIndex, 1) - } - - const { q, query, relations } = this.prepareListQuery_(selector, config) - - if (q) { - const qb = this.getFreeTextQueryBuilder_(productRepo, query, q) - const [raw, count] = await qb.getManyAndCount() - - const products = await productRepo.findWithRelations( - relations, - raw.map((i) => i.id), - query.withDeleted ?? false - ) - return [products, count] - } - - const [products, count] = await productRepo.findWithRelationsAndCount( - relations, - query - ) - - if (priceIndex > -1) { - const productsWithAdditionalPrices = await this.setAdditionalPrices( - products, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - ) - - return [productsWithAdditionalPrices, count] - } else { - return [products, count] - } - } - - /** - * Return the total number of documents in database - * @param {object} selector - the selector to choose products by - * @return {Promise} the result of the count operation - */ - count(selector = {}) { - const productRepo = this.manager_.getCustomRepository( - this.productRepository_ - ) - const query = this.buildQuery_(selector) - return productRepo.count(query) - } - - /** - * Gets a product by id. - * Throws in case of DB Error and if product was not found. - * @param {string} productId - id of the product to get. - * @param {object} config - object that defines what should be included in the - * query response - * @return {Promise} the result of the find one operation. - */ - async retrieve(productId, config = { include_discount_prices: false }) { - const productRepo = this.manager_.getCustomRepository( - this.productRepository_ - ) - const validatedId = this.validateId_(productId) - - const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 - if (priceIndex >= 0 && config.relations) { - config.relations = [...config.relations] - config.relations.splice(priceIndex, 1) - } - - const query = { where: { id: validatedId } } - - if (config.relations && config.relations.length > 0) { - query.relations = config.relations - } - - if (config.select && config.select.length > 0) { - query.select = config.select - } - - const rels = query.relations - delete query.relations - const product = await productRepo.findOneWithRelations(rels, query) - - if (!product) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Product with id: ${productId} was not found` - ) - } - - return priceIndex > -1 - ? await this.setAdditionalPrices( - product, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - ) - : product - } - - /** - * Gets a product by handle. - * Throws in case of DB Error and if product was not found. - * @param {string} productHandle - handle of the product to get. - * @param {object} config - details about what to get from the product - * @return {Promise} the result of the find one operation. - */ - async retrieveByHandle(productHandle, config = {}) { - const productRepo = this.manager_.getCustomRepository( - this.productRepository_ - ) - - const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 - if (priceIndex >= 0 && config.relations) { - config.relations = [...config.relations] - config.relations.splice(priceIndex, 1) - } - - const query = { where: { handle: productHandle } } - - if (config.relations && config.relations.length > 0) { - query.relations = config.relations - } - - if (config.select && config.select.length > 0) { - query.select = config.select - } - - const rels = query.relations - delete query.relations - const product = await productRepo.findOneWithRelations(rels, query) - - if (!product) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Product with handle: ${productHandle} was not found` - ) - } - - return priceIndex > -1 - ? await this.setAdditionalPrices( - product, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - ) - : product - } - - /** - * Gets a product by external id. - * Throws in case of DB Error and if product was not found. - * @param {string} externalId - handle of the product to get. - * @param {object} config - details about what to get from the product - * @return {Promise} the result of the find one operation. - */ - async retrieveByExternalId(externalId, config = {}) { - const productRepo = this.manager_.getCustomRepository( - this.productRepository_ - ) - - const priceIndex = config.relations?.indexOf("variants.prices") ?? -1 - if (priceIndex >= 0 && config.relations) { - config.relations = [...config.relations] - config.relations.splice(priceIndex, 1) - } - - const query = { where: { external_id: externalId } } - - if (config.relations && config.relations.length > 0) { - query.relations = config.relations - } - - if (config.select && config.select.length > 0) { - query.select = config.select - } - - const rels = query.relations - delete query.relations - const product = await productRepo.findOneWithRelations(rels, query) - - if (!product) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Product with exteral_id: ${externalId} was not found` - ) - } - - return priceIndex > -1 - ? await this.setAdditionalPrices( - product, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - ) - : product - } - - /** - * Gets all variants belonging to a product. - * @param {string} productId - the id of the product to get variants from. - * @param {FindConfig} config - The config to select and configure relations etc... - * @return {Promise} an array of variants - */ - async retrieveVariants( - productId, - config = { - skip: 0, - take: 50, - relations: defaultAdminProductsVariantsRelations, - } - ) { - const product = await this.retrieve(productId, config) - return product.variants - } - - async listTypes() { - const productTypeRepository = this.manager_.getCustomRepository( - this.productTypeRepository_ - ) - - return await productTypeRepository.find({}) - } - - async listTagsByUsage(count = 10) { - const tags = await this.manager_.query( - ` - SELECT ID, O.USAGE_COUNT, PT.VALUE - FROM PRODUCT_TAG PT - LEFT JOIN - (SELECT COUNT(*) AS USAGE_COUNT, - PRODUCT_TAG_ID - FROM PRODUCT_TAGS - GROUP BY PRODUCT_TAG_ID) O ON O.PRODUCT_TAG_ID = PT.ID - ORDER BY O.USAGE_COUNT DESC - LIMIT $1`, - [count] - ) - - return tags - } - - async upsertProductType_(type) { - const productTypeRepository = this.manager_.getCustomRepository( - this.productTypeRepository_ - ) - - if (type === null) { - return null - } - - const existing = await productTypeRepository.findOne({ - where: { value: type.value }, - }) - - if (existing) { - return existing.id - } - - const created = productTypeRepository.create({ - value: type.value, - }) - const result = await productTypeRepository.save(created) - - return result.id - } - - async upsertProductTags_(tags) { - const productTagRepository = this.manager_.getCustomRepository( - this.productTagRepository_ - ) - - const newTags = [] - for (const tag of tags) { - const existing = await productTagRepository.findOne({ - where: { value: tag.value }, - }) - - if (existing) { - newTags.push(existing) - } else { - const created = productTagRepository.create(tag) - const result = await productTagRepository.save(created) - newTags.push(result) - } - } - - return newTags - } - - /** - * Creates a product. - * @param {object} productObject - the product to create - * @return {Promise} resolves to the creation result. - */ - async create(productObject) { - return this.atomicPhase_(async (manager) => { - const productRepo = manager.getCustomRepository(this.productRepository_) - const optionRepo = manager.getCustomRepository( - this.productOptionRepository_ - ) - - const { options, tags, type, images, ...rest } = productObject - - if (!rest.thumbnail && images && images.length) { - rest.thumbnail = images[0] - } - - // if product is a giftcard, we should disallow discounts - if (rest.is_giftcard) { - rest.discountable = false - } - - try { - let product = productRepo.create(rest) - - if (images) { - product.images = await this.upsertImages_(images) - } - - if (tags) { - product.tags = await this.upsertProductTags_(tags) - } - - if (typeof type !== `undefined`) { - product.type_id = await this.upsertProductType_(type) - } - - product = await productRepo.save(product) - - product.options = await Promise.all( - options.map(async (o) => { - const res = optionRepo.create({ ...o, product_id: product.id }) - await optionRepo.save(res) - return res - }) - ) - - const result = await this.retrieve(product.id, { - relations: ["options"], - }) - - await this.eventBus_ - .withTransaction(manager) - .emit(ProductService.Events.CREATED, { - id: result.id, - }) - return result - } catch (error) { - throw formatException(error) - } - }) - } - - async upsertImages_(images) { - const imageRepository = this.manager_.getCustomRepository( - this.imageRepository_ - ) - - const productImages = [] - for (const img of images) { - const existing = await imageRepository.findOne({ - where: { url: img }, - }) - - if (existing) { - productImages.push(existing) - } else { - const created = imageRepository.create({ url: img }) - productImages.push(created) - } - } - - return productImages - } - - /** - * Updates a product. Product variant updates should use dedicated methods, - * e.g. `addVariant`, etc. The function will throw errors if metadata or - * product variant updates are attempted. - * @param {string} productId - the id of the product. Must be a string that - * can be casted to an ObjectId - * @param {object} update - an object with the update values. - * @return {Promise} resolves to the update result. - */ - async update(productId, update) { - return this.atomicPhase_(async (manager) => { - const productRepo = manager.getCustomRepository(this.productRepository_) - const productVariantRepo = manager.getCustomRepository( - this.productVariantRepository_ - ) - - const product = await this.retrieve(productId, { - relations: ["variants", "tags", "images"], - }) - - const { variants, metadata, images, tags, type, ...rest } = update - - if (!product.thumbnail && !update.thumbnail && images?.length) { - product.thumbnail = images[0] - } - - if (images) { - product.images = await this.upsertImages_(images) - } - - if (metadata) { - product.metadata = this.setMetadata_(product, metadata) - } - - if (typeof type !== `undefined`) { - product.type_id = await this.upsertProductType_(type) - } - - if (tags) { - product.tags = await this.upsertProductTags_(tags) - } - - if (variants) { - // Iterate product variants and update their properties accordingly - for (const variant of product.variants) { - const exists = variants.find((v) => v.id && variant.id === v.id) - if (!exists) { - await productVariantRepo.remove(variant) - } - } - - const newVariants = [] - for (const [i, newVariant] of variants.entries()) { - newVariant.variant_rank = i - - if (newVariant.id) { - const variant = product.variants.find((v) => v.id === newVariant.id) - - if (!variant) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Variant with id: ${newVariant.id} is not associated with this product` - ) - } - - const saved = await this.productVariantService_ - .withTransaction(manager) - .update(variant, newVariant) - - newVariants.push(saved) - } else { - // If the provided variant does not have an id, we assume that it - // should be created - const created = await this.productVariantService_ - .withTransaction(manager) - .create(product.id, newVariant) - - newVariants.push(created) - } - } - - product.variants = newVariants - } - - for (const [key, value] of Object.entries(rest)) { - if (typeof value !== `undefined`) { - product[key] = value - } - } - - const result = await productRepo.save(product) - - await this.eventBus_ - .withTransaction(manager) - .emit(ProductService.Events.UPDATED, { - id: result.id, - fields: Object.keys(update), - }) - return result - }) - } - - /** - * Deletes a product from a given product id. The product's associated - * variants will also be deleted. - * @param {string} productId - the id of the product to delete. Must be - * castable as an ObjectId - * @return {Promise} empty promise - */ - async delete(productId) { - return this.atomicPhase_(async (manager) => { - const productRepo = manager.getCustomRepository(this.productRepository_) - - // Should not fail, if product does not exist, since delete is idempotent - const product = await productRepo.findOne( - { id: productId }, - { relations: ["variants", "variants.prices", "variants.options"] } - ) - - if (!product) { - return - } - - await productRepo.softRemove(product) - - await this.eventBus_ - .withTransaction(manager) - .emit(ProductService.Events.DELETED, { - id: productId, - }) - - return Promise.resolve() - }) - } - - /** - * Adds an option to a product. Options can, for example, be "Size", "Color", - * etc. Will update all the products variants with a dummy value for the newly - * created option. The same option cannot be added more than once. - * @param {string} productId - the product to apply the new option to - * @param {string} optionTitle - the display title of the option, e.g. "Size" - * @return {Promise} the result of the model update operation - */ - async addOption(productId, optionTitle) { - return this.atomicPhase_(async (manager) => { - const productOptionRepo = manager.getCustomRepository( - this.productOptionRepository_ - ) - - const product = await this.retrieve(productId, { - relations: ["options", "variants"], - }) - - if (product.options.find((o) => o.title === optionTitle)) { - throw new MedusaError( - MedusaError.Types.DUPLICATE_ERROR, - `An option with the title: ${optionTitle} already exists` - ) - } - - const option = await productOptionRepo.create({ - title: optionTitle, - product_id: productId, - }) - - await productOptionRepo.save(option) - - for (const variant of product.variants) { - this.productVariantService_ - .withTransaction(manager) - .addOptionValue(variant.id, option.id, "Default Value") - } - - const result = await this.retrieve(productId) - - await this.eventBus_ - .withTransaction(manager) - .emit(ProductService.Events.UPDATED, result) - return result - }) - } - - async reorderVariants(productId, variantOrder) { - return this.atomicPhase_(async (manager) => { - const productRepo = manager.getCustomRepository(this.productRepository_) - - const product = await this.retrieve(productId, { - relations: ["variants"], - }) - - if (product.variants.length !== variantOrder.length) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Product variants and new variant order differ in length.` - ) - } - - product.variants = variantOrder.map((vId) => { - const variant = product.variants.find((v) => v.id === vId) - if (!variant) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Product has no variant with id: ${vId}` - ) - } - - return variant - }) - - const result = productRepo.save(product) - await this.eventBus_ - .withTransaction(manager) - .emit(ProductService.Events.UPDATED, result) - return result - }) - } - - /** - * Changes the order of a product's options. Will throw if the length of - * optionOrder and the length of the product's options are different. Will - * throw optionOrder contains an id not associated with the product. - * @param {string} productId - the product whose options we are reordering - * @param {string[]} optionOrder - the ids of the product's options in the - * new order - * @return {Promise} the result of the update operation - */ - async reorderOptions(productId, optionOrder) { - return this.atomicPhase_(async (manager) => { - const productRepo = manager.getCustomRepository(this.productRepository_) - - const product = await this.retrieve(productId, { relations: ["options"] }) - - if (product.options.length !== optionOrder.length) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Product options and new options order differ in length.` - ) - } - - product.options = optionOrder.map((oId) => { - const option = product.options.find((o) => o.id === oId) - if (!option) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Product has no option with id: ${oId}` - ) - } - - return option - }) - - const result = productRepo.save(product) - await this.eventBus_ - .withTransaction(manager) - .emit(ProductService.Events.UPDATED, result) - return result - }) - } - - /** - * Updates a product's option. Throws if the call tries to update an option - * not associated with the product. Throws if the updated title already exists. - * @param {string} productId - the product whose option we are updating - * @param {string} optionId - the id of the option we are updating - * @param {object} data - the data to update the option with - * @return {Promise} the updated product - */ - async updateOption(productId, optionId, data) { - return this.atomicPhase_(async (manager) => { - const productOptionRepo = manager.getCustomRepository( - this.productOptionRepository_ - ) - - const product = await this.retrieve(productId, { relations: ["options"] }) - - const { title, values } = data - - const optionExists = product.options.some( - (o) => - o.title.toUpperCase() === title.toUpperCase() && o.id !== optionId - ) - if (optionExists) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `An option with title ${title} already exists` - ) - } - - const productOption = await productOptionRepo.findOne({ - where: { id: optionId }, - }) - - if (!productOption) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `Option with id: ${optionId} does not exist` - ) - } - - productOption.title = title - productOption.values = values - - await productOptionRepo.save(productOption) - - await this.eventBus_ - .withTransaction(manager) - .emit(ProductService.Events.UPDATED, product) - return product - }) - } - - /** - * Delete an option from a product. - * @param {string} productId - the product to delete an option from - * @param {string} optionId - the option to delete - * @return {Promise} the updated product - */ - async deleteOption(productId, optionId) { - return this.atomicPhase_(async (manager) => { - const productOptionRepo = manager.getCustomRepository( - this.productOptionRepository_ - ) - - const product = await this.retrieve(productId, { - relations: ["variants", "variants.options"], - }) - - const productOption = await productOptionRepo.findOne({ - where: { id: optionId, product_id: productId }, - }) - - if (!productOption) { - return Promise.resolve() - } - - // For the option we want to delete, make sure that all variants have the - // same option values. The reason for doing is, that we want to avoid - // duplicate variants. For example, if we have a product with size and - // color options, that has four variants: (black, 1), (black, 2), - // (blue, 1), (blue, 2) and we delete the size option from the product, - // we would end up with four variants: (black), (black), (blue), (blue). - // We now have two duplicate variants. To ensure that this does not - // happen, we will force the user to select which variants to keep. - const firstVariant = product.variants[0] - - const valueToMatch = firstVariant.options.find( - (o) => o.option_id === optionId - ).value - - const equalsFirst = await Promise.all( - product.variants.map(async (v) => { - const option = v.options.find((o) => o.option_id === optionId) - return option.value === valueToMatch - }) - ) - - if (!equalsFirst.every((v) => v)) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `To delete an option, first delete all variants, such that when an option is deleted, no duplicate variants will exist.` - ) - } - - // If we reach this point, we can safely delete the product option - await productOptionRepo.softRemove(productOption) - - await this.eventBus_ - .withTransaction(manager) - .emit(ProductService.Events.UPDATED, product) - return product - }) - } - - /** - * Decorates a product with product variants. - * @param {string} productId - the productId to decorate. - * @param {string[]} fields - the fields to include. - * @param {string[]} expandFields - fields to expand. - * @param {object} config - retrieve config for price calculation. - * @return {Product} return the decorated product. - */ - async decorate(productId, fields = [], expandFields = [], config = {}) { - const requiredFields = ["id", "metadata"] - - const priceIndex = expandFields.indexOf("variants.prices") ?? -1 - if (priceIndex >= 0 && expandFields.length) { - expandFields = [...expandFields] - expandFields.splice(priceIndex, 1) - } - - fields = fields.concat(requiredFields) - - const product = await this.retrieve(productId, { - select: fields, - relations: expandFields, - }) - - return priceIndex > -1 - ? await this.setAdditionalPrices( - product, - config.currency_code, - config.region_id, - config.cart_id, - config.customer_id, - config.include_discount_prices - ) - : product - } - - /** - * Creates a query object to be used for list queries. - * @param {object} selector - the selector to create the query from - * @param {object} config - the config to use for the query - * @return {object} an object containing the query, relations and free-text - * search param. - */ - prepareListQuery_(selector, config) { - let q - if ("q" in selector) { - q = selector.q - delete selector.q - } - - const query = this.buildQuery_(selector, config) - - if (config.relations && config.relations.length > 0) { - query.relations = config.relations - } - - if (config.select && config.select.length > 0) { - query.select = config.select - } - - const rels = query.relations - delete query.relations - - return { - query, - relations: rels, - q, - } - } - - /** - * Creates a QueryBuilder that can fetch products based on free text. - * @param {ProductRepository} productRepo - an instance of a ProductRepositry - * @param {FindOptions} query - the query to get products by - * @param {string} q - the text to perform free text search from - * @return {QueryBuilder} a query builder that can fetch products - */ - getFreeTextQueryBuilder_(productRepo, query, q) { - const where = query.where - - delete where.description - delete where.title - - let qb = productRepo - .createQueryBuilder("product") - .leftJoinAndSelect("product.variants", "variant") - .leftJoinAndSelect("product.collection", "collection") - .select(["product.id"]) - .where(where) - .andWhere( - new Brackets((qb) => { - qb.where(`product.description ILIKE :q`, { q: `%${q}%` }) - .orWhere(`product.title ILIKE :q`, { q: `%${q}%` }) - .orWhere(`variant.title ILIKE :q`, { q: `%${q}%` }) - .orWhere(`variant.sku ILIKE :q`, { q: `%${q}%` }) - .orWhere(`collection.title ILIKE :q`, { q: `%${q}%` }) - }) - ) - .skip(query.skip) - .take(query.take) - - if (query.withDeleted) { - qb = qb.withDeleted() - } - - return qb - } - - /** - * Set additional prices on a list of products. - * @param {Product[] | Product} products list of products on which to set additional prices - * @param {string} currency_code currency code to fetch prices for - * @param {string} region_id region to fetch prices for - * @param {string} cart_id string of cart to use as a basis for getting currency and region - * @param {string} customer_id id of potentially logged in customer, used to get prices valid for their customer groups - * @param {boolean} include_discount_prices indication wether or not to include sales prices in result - * @return {Promise} A list of products with variants decorated with "additional_prices" - */ - async setAdditionalPrices( - products, - currency_code, - region_id, - cart_id, - customer_id, - include_discount_prices = false - ) { - return this.atomicPhase_(async (manager) => { - const cartRepo = this.manager_.getCustomRepository(this.cartRepository_) - - let regionId = region_id - let currencyCode = currency_code - - if (cart_id) { - const cart = await cartRepo.findOne({ - where: { id: cart_id }, - relations: ["region"], - }) - - regionId = cart.region.id - currencyCode = cart.region.currency_code - } - - const productArray = Array.isArray(products) ? products : [products] - - const priceSelectionStrategy = this.priceSelectionStrategy_.withTransaction( - manager - ) - - const productsWithPrices = await Promise.all( - productArray.map(async (p) => { - if (p.variants?.length) { - p.variants = await Promise.all( - p.variants.map(async (v) => { - const prices = await priceSelectionStrategy.calculateVariantPrice( - v.id, - { - region_id: regionId, - currency_code: currencyCode, - cart_id: cart_id, - customer_id: customer_id, - include_discount_prices, - } - ) - - return { - ...v, - prices: prices.prices, - original_price: prices.originalPrice, - calculated_price: prices.calculatedPrice, - calculated_price_type: prices.calculatedPriceType, - } - }) - ) - } - - return p - }) - ) - - return Array.isArray(products) - ? productsWithPrices - : productsWithPrices[0] - }) - } -} - -export default ProductService diff --git a/packages/medusa/src/services/product.ts b/packages/medusa/src/services/product.ts new file mode 100644 index 0000000000..5b424047e1 --- /dev/null +++ b/packages/medusa/src/services/product.ts @@ -0,0 +1,794 @@ +import { MedusaError } from "medusa-core-utils" +import { EntityManager } from "typeorm" +import { SearchService } from "." +import { TransactionBaseService } from "../interfaces" +import { Product, ProductTag, ProductType, ProductVariant } from "../models" +import { ImageRepository } from "../repositories/image" +import { + FindWithoutRelationsOptions, + ProductRepository, +} from "../repositories/product" +import { ProductOptionRepository } from "../repositories/product-option" +import { ProductTagRepository } from "../repositories/product-tag" +import { ProductTypeRepository } from "../repositories/product-type" +import { ProductVariantRepository } from "../repositories/product-variant" +import { Selector } from "../types/common" +import { + CreateProductInput, + FilterableProductProps, + FindProductConfig, + ProductOptionInput, + UpdateProductInput, +} from "../types/product" +import { buildQuery, setMetadata } from "../utils" +import { formatException } from "../utils/exception-formatter" +import EventBusService from "./event-bus" +import ProductVariantService from "./product-variant" + +type InjectedDependencies = { + manager: EntityManager + productOptionRepository: typeof ProductOptionRepository + productRepository: typeof ProductRepository + productVariantRepository: typeof ProductVariantRepository + productTypeRepository: typeof ProductTypeRepository + productTagRepository: typeof ProductTagRepository + imageRepository: typeof ImageRepository + productVariantService: ProductVariantService + searchService: SearchService + eventBusService: EventBusService +} + +class ProductService extends TransactionBaseService { + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + protected readonly productOptionRepository_: typeof ProductOptionRepository + protected readonly productRepository_: typeof ProductRepository + protected readonly productVariantRepository_: typeof ProductVariantRepository + protected readonly productTypeRepository_: typeof ProductTypeRepository + protected readonly productTagRepository_: typeof ProductTagRepository + protected readonly imageRepository_: typeof ImageRepository + protected readonly productVariantService_: ProductVariantService + protected readonly searchService_: SearchService + protected readonly eventBus_: EventBusService + + static readonly IndexName = `products` + static readonly Events = { + UPDATED: "product.updated", + CREATED: "product.created", + DELETED: "product.deleted", + } + + constructor({ + manager, + productRepository, + productVariantRepository, + productOptionRepository, + eventBusService, + productVariantService, + productTypeRepository, + productTagRepository, + imageRepository, + searchService, + }: InjectedDependencies) { + super({ + manager, + productRepository, + productVariantRepository, + productOptionRepository, + eventBusService, + productVariantService, + productTypeRepository, + productTagRepository, + imageRepository, + searchService, + }) + + this.manager_ = manager + this.productOptionRepository_ = productOptionRepository + this.productRepository_ = productRepository + this.productVariantRepository_ = productVariantRepository + this.eventBus_ = eventBusService + this.productVariantService_ = productVariantService + this.productTypeRepository_ = productTypeRepository + this.productTagRepository_ = productTagRepository + this.imageRepository_ = imageRepository + this.searchService_ = searchService + } + + /** + * Lists products based on the provided parameters. + * @param selector - an object that defines rules to filter products + * by + * @param config - object that defines the scope for what should be + * returned + * @return the result of the find operation + */ + async list( + selector: FilterableProductProps | Selector = {}, + config: FindProductConfig = { + relations: [], + skip: 0, + take: 20, + include_discount_prices: false, + } + ): Promise { + return await this.atomicPhase_(async (manager) => { + const productRepo = manager.getCustomRepository(this.productRepository_) + + const { q, query, relations } = this.prepareListQuery_(selector, config) + if (q) { + const [products] = await productRepo.getFreeTextSearchResultsAndCount( + q, + query, + relations + ) + return products + } + + return await productRepo.findWithRelations(relations, query) + }) + } + + /** + * Lists products based on the provided parameters and includes the count of + * products that match the query. + * @param selector - an object that defines rules to filter products + * by + * @param config - object that defines the scope for what should be + * returned + * @return an array containing the products as + * the first element and the total count of products that matches the query + * as the second element. + */ + async listAndCount( + selector: FilterableProductProps | Selector, + config: FindProductConfig = { + relations: [], + skip: 0, + take: 20, + include_discount_prices: false, + } + ): Promise<[Product[], number]> { + return await this.atomicPhase_(async (manager) => { + const productRepo = manager.getCustomRepository(this.productRepository_) + + const { q, query, relations } = this.prepareListQuery_(selector, config) + + if (q) { + return await productRepo.getFreeTextSearchResultsAndCount( + q, + query, + relations + ) + } + + return await productRepo.findWithRelationsAndCount(relations, query) + }) + } + + /** + * Return the total number of documents in database + * @param {object} selector - the selector to choose products by + * @return {Promise} the result of the count operation + */ + async count(selector: Selector = {}): Promise { + return await this.atomicPhase_(async (manager) => { + const productRepo = manager.getCustomRepository(this.productRepository_) + const query = buildQuery(selector) + return await productRepo.count(query) + }) + } + + /** + * Gets a product by id. + * Throws in case of DB Error and if product was not found. + * @param productId - id of the product to get. + * @param config - object that defines what should be included in the + * query response + * @return the result of the find one operation. + */ + async retrieve( + productId: string, + config: FindProductConfig = { + include_discount_prices: false, + } + ): Promise { + return await this.atomicPhase_(async () => { + return await this.retrieve_({ id: productId }, config) + }) + } + + /** + * Gets a product by handle. + * Throws in case of DB Error and if product was not found. + * @param productHandle - handle of the product to get. + * @param config - details about what to get from the product + * @return the result of the find one operation. + */ + async retrieveByHandle( + productHandle: string, + config: FindProductConfig = {} + ): Promise { + return await this.atomicPhase_(async () => { + return await this.retrieve_({ handle: productHandle }, config) + }) + } + + /** + * Gets a product by external id. + * Throws in case of DB Error and if product was not found. + * @param externalId - handle of the product to get. + * @param config - details about what to get from the product + * @return the result of the find one operation. + */ + async retrieveByExternalId( + externalId: string, + config: FindProductConfig = {} + ): Promise { + return await this.atomicPhase_(async () => { + return await this.retrieve_({ external_id: externalId }, config) + }) + } + + /** + * Gets a product by selector. + * Throws in case of DB Error and if product was not found. + * @param selector - selector object + * @param config - object that defines what should be included in the + * query response + * @return the result of the find one operation. + */ + async retrieve_( + selector: Selector, + config: FindProductConfig = { + include_discount_prices: false, + } + ): Promise { + return await this.atomicPhase_(async (manager) => { + const productRepo = manager.getCustomRepository(this.productRepository_) + + const { relations, ...query } = buildQuery(selector, config) + + const product = await productRepo.findOneWithRelations( + relations, + query as FindWithoutRelationsOptions + ) + + if (!product) { + const selectorConstraints = Object.entries(selector) + .map(([key, value]) => `${key}: ${value}`) + .join(", ") + + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Product with ${selectorConstraints} was not found` + ) + } + + return product + }) + } + + /** + * Gets all variants belonging to a product. + * @param productId - the id of the product to get variants from. + * @param config - The config to select and configure relations etc... + * @return an array of variants + */ + async retrieveVariants( + productId: string, + config: FindProductConfig = { + skip: 0, + take: 50, + } + ): Promise { + return await this.atomicPhase_(async () => { + const givenRelations = config.relations ?? [] + const requiredRelations = ["variants"] + const relationsSet = new Set([...givenRelations, ...requiredRelations]) + + const product = await this.retrieve(productId, { + ...config, + relations: [...relationsSet], + }) + return product.variants + }) + } + + async listTypes(): Promise { + return await this.atomicPhase_(async (manager) => { + const productTypeRepository = manager.getCustomRepository( + this.productTypeRepository_ + ) + + return await productTypeRepository.find({}) + }) + } + + async listTagsByUsage(count = 10): Promise { + return await this.atomicPhase_(async (manager) => { + const productTagRepo = manager.getCustomRepository( + this.productTagRepository_ + ) + + return await productTagRepo.listTagsByUsage(count) + }) + } + + /** + * Creates a product. + * @param productObject - the product to create + * @return resolves to the creation result. + */ + async create(productObject: CreateProductInput): Promise { + return await this.atomicPhase_(async (manager) => { + const productRepo = manager.getCustomRepository(this.productRepository_) + const productTagRepo = manager.getCustomRepository( + this.productTagRepository_ + ) + const productTypeRepo = manager.getCustomRepository( + this.productTypeRepository_ + ) + const imageRepo = manager.getCustomRepository(this.imageRepository_) + const optionRepo = manager.getCustomRepository( + this.productOptionRepository_ + ) + + const { options, tags, type, images, ...rest } = productObject + + if (!rest.thumbnail && images?.length) { + rest.thumbnail = images[0] + } + + // if product is a giftcard, we should disallow discounts + if (rest.is_giftcard) { + rest.discountable = false + } + + try { + let product = productRepo.create(rest) + + if (images?.length) { + product.images = await imageRepo.upsertImages(images) + } + + if (tags?.length) { + product.tags = await productTagRepo.upsertTags(tags) + } + + if (typeof type !== `undefined`) { + product.type_id = (await productTypeRepo.upsertType(type))?.id || null + } + + product = await productRepo.save(product) + + product.options = await Promise.all( + (options ?? []).map(async (option) => { + const res = optionRepo.create({ ...option, product_id: product.id }) + await optionRepo.save(res) + return res + }) + ) + + const result = await this.retrieve(product.id, { + relations: ["options"], + }) + + await this.eventBus_ + .withTransaction(manager) + .emit(ProductService.Events.CREATED, { + id: result.id, + }) + return result + } catch (error) { + throw formatException(error) + } + }) + } + + /** + * Updates a product. Product variant updates should use dedicated methods, + * e.g. `addVariant`, etc. The function will throw errors if metadata or + * product variant updates are attempted. + * @param {string} productId - the id of the product. Must be a string that + * can be casted to an ObjectId + * @param {object} update - an object with the update values. + * @return {Promise} resolves to the update result. + */ + async update( + productId: string, + update: UpdateProductInput + ): Promise { + return await this.atomicPhase_(async (manager) => { + const productRepo = manager.getCustomRepository(this.productRepository_) + const productVariantRepo = manager.getCustomRepository( + this.productVariantRepository_ + ) + const productTagRepo = manager.getCustomRepository( + this.productTagRepository_ + ) + const productTypeRepo = manager.getCustomRepository( + this.productTypeRepository_ + ) + const imageRepo = manager.getCustomRepository(this.imageRepository_) + + const product = await this.retrieve(productId, { + relations: ["variants", "tags", "images"], + }) + + const { variants, metadata, images, tags, type, ...rest } = update + + if (!product.thumbnail && !update.thumbnail && images?.length) { + product.thumbnail = images[0] + } + + if (images) { + product.images = await imageRepo.upsertImages(images) + } + + if (metadata) { + product.metadata = setMetadata(product, metadata) + } + + if (typeof type !== `undefined`) { + product.type_id = (await productTypeRepo.upsertType(type))?.id || null + } + + if (tags) { + product.tags = await productTagRepo.upsertTags(tags) + } + + if (variants) { + // Iterate product variants and update their properties accordingly + for (const variant of product.variants) { + const exists = variants.find((v) => v.id && variant.id === v.id) + if (!exists) { + await productVariantRepo.remove(variant) + } + } + + const newVariants: ProductVariant[] = [] + for (const [i, newVariant] of variants.entries()) { + const variant_rank = i + + if (newVariant.id) { + const variant = product.variants.find((v) => v.id === newVariant.id) + + if (!variant) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Variant with id: ${newVariant.id} is not associated with this product` + ) + } + + const saved = await this.productVariantService_ + .withTransaction(manager) + .update(variant, { + ...newVariant, + variant_rank, + product_id: variant.product_id, + }) + + newVariants.push(saved) + } else { + // If the provided variant does not have an id, we assume that it + // should be created + const created = await this.productVariantService_ + .withTransaction(manager) + .create(product.id, { + ...newVariant, + variant_rank, + options: newVariant.options || [], + prices: newVariant.prices || [], + }) + + newVariants.push(created) + } + } + + product.variants = newVariants + } + + for (const [key, value] of Object.entries(rest)) { + if (typeof value !== `undefined`) { + product[key] = value + } + } + + const result = await productRepo.save(product) + + await this.eventBus_ + .withTransaction(manager) + .emit(ProductService.Events.UPDATED, { + id: result.id, + fields: Object.keys(update), + }) + return result + }) + } + + /** + * Deletes a product from a given product id. The product's associated + * variants will also be deleted. + * @param productId - the id of the product to delete. Must be + * castable as an ObjectId + * @return empty promise + */ + async delete(productId: string): Promise { + return await this.atomicPhase_(async (manager) => { + const productRepo = manager.getCustomRepository(this.productRepository_) + + // Should not fail, if product does not exist, since delete is idempotent + const product = await productRepo.findOne( + { id: productId }, + { relations: ["variants", "variants.prices", "variants.options"] } + ) + + if (!product) { + return + } + + await productRepo.softRemove(product) + + await this.eventBus_ + .withTransaction(manager) + .emit(ProductService.Events.DELETED, { + id: productId, + }) + + return Promise.resolve() + }) + } + + /** + * Adds an option to a product. Options can, for example, be "Size", "Color", + * etc. Will update all the products variants with a dummy value for the newly + * created option. The same option cannot be added more than once. + * @param productId - the product to apply the new option to + * @param optionTitle - the display title of the option, e.g. "Size" + * @return the result of the model update operation + */ + async addOption(productId: string, optionTitle: string): Promise { + return await this.atomicPhase_(async (manager) => { + const productOptionRepo = manager.getCustomRepository( + this.productOptionRepository_ + ) + + const product = await this.retrieve(productId, { + relations: ["options", "variants"], + }) + + if (product.options.find((o) => o.title === optionTitle)) { + throw new MedusaError( + MedusaError.Types.DUPLICATE_ERROR, + `An option with the title: ${optionTitle} already exists` + ) + } + + const option = await productOptionRepo.create({ + title: optionTitle, + product_id: productId, + }) + + await productOptionRepo.save(option) + + for (const variant of product.variants) { + this.productVariantService_ + .withTransaction(manager) + .addOptionValue(variant.id, option.id, "Default Value") + } + + const result = await this.retrieve(productId) + + await this.eventBus_ + .withTransaction(manager) + .emit(ProductService.Events.UPDATED, result) + return result + }) + } + + async reorderVariants( + productId: string, + variantOrder: string[] + ): Promise { + return await this.atomicPhase_(async (manager) => { + const productRepo = manager.getCustomRepository(this.productRepository_) + + const product = await this.retrieve(productId, { + relations: ["variants"], + }) + + if (product.variants.length !== variantOrder.length) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Product variants and new variant order differ in length.` + ) + } + + product.variants = variantOrder.map((vId) => { + const variant = product.variants.find((v) => v.id === vId) + if (!variant) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Product has no variant with id: ${vId}` + ) + } + + return variant + }) + + const result = productRepo.save(product) + await this.eventBus_ + .withTransaction(manager) + .emit(ProductService.Events.UPDATED, result) + return result + }) + } + + /** + * Updates a product's option. Throws if the call tries to update an option + * not associated with the product. Throws if the updated title already exists. + * @param productId - the product whose option we are updating + * @param optionId - the id of the option we are updating + * @param data - the data to update the option with + * @return the updated product + */ + async updateOption( + productId: string, + optionId: string, + data: ProductOptionInput + ): Promise { + return await this.atomicPhase_(async (manager) => { + const productOptionRepo = manager.getCustomRepository( + this.productOptionRepository_ + ) + + const product = await this.retrieve(productId, { relations: ["options"] }) + + const { title, values } = data + + const optionExists = product.options.some( + (o) => + o.title.toUpperCase() === title.toUpperCase() && o.id !== optionId + ) + if (optionExists) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `An option with title ${title} already exists` + ) + } + + const productOption = await productOptionRepo.findOne({ + where: { id: optionId }, + }) + + if (!productOption) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Option with id: ${optionId} does not exist` + ) + } + + productOption.title = title + if (values) { + productOption.values = values + } + + await productOptionRepo.save(productOption) + + await this.eventBus_ + .withTransaction(manager) + .emit(ProductService.Events.UPDATED, product) + return product + }) + } + + /** + * Delete an option from a product. + * @param productId - the product to delete an option from + * @param optionId - the option to delete + * @return the updated product + */ + async deleteOption( + productId: string, + optionId: string + ): Promise { + return await this.atomicPhase_(async (manager) => { + const productOptionRepo = manager.getCustomRepository( + this.productOptionRepository_ + ) + + const product = await this.retrieve(productId, { + relations: ["variants", "variants.options"], + }) + + const productOption = await productOptionRepo.findOne({ + where: { id: optionId, product_id: productId }, + }) + + if (!productOption) { + return Promise.resolve() + } + + // For the option we want to delete, make sure that all variants have the + // same option values. The reason for doing is, that we want to avoid + // duplicate variants. For example, if we have a product with size and + // color options, that has four variants: (black, 1), (black, 2), + // (blue, 1), (blue, 2) and we delete the size option from the product, + // we would end up with four variants: (black), (black), (blue), (blue). + // We now have two duplicate variants. To ensure that this does not + // happen, we will force the user to select which variants to keep. + const firstVariant = product.variants[0] + + const valueToMatch = firstVariant.options.find( + (o) => o.option_id === optionId + )?.value + + const equalsFirst = await Promise.all( + product.variants.map(async (v) => { + const option = v.options.find((o) => o.option_id === optionId) + return option?.value === valueToMatch + }) + ) + + if (!equalsFirst.every((v) => v)) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `To delete an option, first delete all variants, such that when an option is deleted, no duplicate variants will exist.` + ) + } + + // If we reach this point, we can safely delete the product option + await productOptionRepo.softRemove(productOption) + + await this.eventBus_ + .withTransaction(manager) + .emit(ProductService.Events.UPDATED, product) + return product + }) + } + + /** + * Creates a query object to be used for list queries. + * @param selector - the selector to create the query from + * @param config - the config to use for the query + * @return an object containing the query, relations and free-text + * search param. + */ + protected prepareListQuery_( + selector: FilterableProductProps | Selector, + config: FindProductConfig + ): { + q: string + relations: (keyof Product)[] + query: FindWithoutRelationsOptions + } { + let q + if ("q" in selector) { + q = selector.q + delete selector.q + } + + const query = buildQuery(selector, config) + + if (config.relations && config.relations.length > 0) { + query.relations = config.relations + } + + if (config.select && config.select.length > 0) { + query.select = config.select + } + + const rels = query.relations + delete query.relations + + return { + query: query as FindWithoutRelationsOptions, + relations: rels as (keyof Product)[], + q, + } + } +} + +export default ProductService diff --git a/packages/medusa/src/services/region.js b/packages/medusa/src/services/region.js index 34c54e5d23..d31d3af67c 100644 --- a/packages/medusa/src/services/region.js +++ b/packages/medusa/src/services/region.js @@ -302,7 +302,7 @@ class RegionService extends BaseService { async validateCurrency_(currencyCode) { const store = await this.storeService_ .withTransaction(this.transactionManager_) - .retrieve(["currencies"]) + .retrieve({ relations: ["currencies"] }) const storeCurrencies = store.currencies.map((curr) => curr.code) @@ -326,8 +326,10 @@ class RegionService extends BaseService { ) const countryCode = code.toUpperCase() - const validCountry = countries.find((c) => c.alpha2 === countryCode) - if (!validCountry) { + const isCountryExists = countries.some( + (country) => country.alpha2 === countryCode + ) + if (!isCountryExists) { throw new MedusaError( MedusaError.Types.INVALID_DATA, "Invalid country code" diff --git a/packages/medusa/src/services/shipping-option.ts b/packages/medusa/src/services/shipping-option.ts new file mode 100644 index 0000000000..4668f0ede6 --- /dev/null +++ b/packages/medusa/src/services/shipping-option.ts @@ -0,0 +1,698 @@ +import { MedusaError } from "medusa-core-utils" +import { DeepPartial, EntityManager } from "typeorm" +import { TransactionBaseService } from "../interfaces" +import { + Cart, + Order, + ShippingMethod, + ShippingOption, + ShippingOptionPriceType, + ShippingOptionRequirement, +} from "../models" +import { ShippingMethodRepository } from "../repositories/shipping-method" +import { ShippingOptionRepository } from "../repositories/shipping-option" +import { ShippingOptionRequirementRepository } from "../repositories/shipping-option-requirement" +import { ExtendedFindConfig, FindConfig, Selector } from "../types/common" +import { + CreateShippingMethodDto, + ShippingMethodUpdate, + UpdateShippingOptionInput, + CreateShippingOptionInput, +} from "../types/shipping-options" +import { buildQuery, setMetadata } from "../utils" +import FulfillmentProviderService from "./fulfillment-provider" +import RegionService from "./region" + +/** + * Provides layer to manipulate profiles. + */ +class ShippingOptionService extends TransactionBaseService { + protected readonly providerService_: FulfillmentProviderService + protected readonly regionService_: RegionService + protected readonly requirementRepository_: typeof ShippingOptionRequirementRepository + protected readonly optionRepository_: typeof ShippingOptionRepository + protected readonly methodRepository_: typeof ShippingMethodRepository + + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + constructor({ + manager, + shippingOptionRepository, + shippingOptionRequirementRepository, + shippingMethodRepository, + fulfillmentProviderService, + regionService, + }) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) + + this.manager_ = manager + this.optionRepository_ = shippingOptionRepository + this.methodRepository_ = shippingMethodRepository + this.requirementRepository_ = shippingOptionRequirementRepository + this.providerService_ = fulfillmentProviderService + this.regionService_ = regionService + } + + /** + * Validates a requirement + * @param {ShippingOptionRequirement} requirement - the requirement to validate + * @param {string} optionId - the id to validate the requirement + * @return {ShippingOptionRequirement} a validated shipping requirement + */ + async validateRequirement_( + requirement: ShippingOptionRequirement, + optionId: string | undefined = undefined + ): Promise { + return await this.atomicPhase_(async (manager) => { + if (!requirement.type) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "A Shipping Requirement must have a type field" + ) + } + + if ( + requirement.type !== "min_subtotal" && + requirement.type !== "max_subtotal" + ) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "Requirement type must be one of min_subtotal, max_subtotal" + ) + } + + const reqRepo = manager.getCustomRepository(this.requirementRepository_) + + const existingReq = await reqRepo.findOne({ + where: { id: requirement.id }, + }) + + if (!existingReq && requirement.id) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "ID does not exist" + ) + } + + // If no option id is provided, we are currently in the process of creating + // a new shipping option. Therefore, simply return the requirement, such + // that the cascading will take care of the creation of the requirement. + if (!optionId) { + return requirement + } + + let req + if (existingReq) { + req = await reqRepo.save({ + ...existingReq, + ...requirement, + }) + } else { + const created = reqRepo.create({ + ...requirement, + shipping_option_id: optionId, + }) + + req = await reqRepo.save(created) + } + + return req + }) + } + + /** + * @param {Object} selector - the query object for find + * @param {object} config - config object + * @return {Promise} the result of the find operation + */ + async list( + selector: Selector, + config: FindConfig = { skip: 0, take: 50 } + ): Promise { + return await this.atomicPhase_(async (transactionManager) => { + const optRepo = transactionManager.getCustomRepository( + this.optionRepository_ + ) + + const query = buildQuery(selector, config) + return optRepo.find(query) + }) + } + + /** + * @param {Object} selector - the query object for find + * @param {object} config - config object + * @return {Promise} the result of the find operation + */ + async listAndCount( + selector: Selector, + config: FindConfig = { skip: 0, take: 50 } + ): Promise<[ShippingOption[], number]> { + return await this.atomicPhase_(async (transactionManager) => { + const optRepo = transactionManager.getCustomRepository( + this.optionRepository_ + ) + + const query = buildQuery(selector, config) + return await optRepo.findAndCount(query) + }) + } + + /** + * Gets a profile by id. + * Throws in case of DB Error and if profile was not found. + * @param {string} optionId - the id of the profile to get. + * @param {object} options - the options to get a profile + * @return {Promise} the profile document. + */ + async retrieve( + optionId, + options: { select?: (keyof ShippingOption)[]; relations?: string[] } = {} + ): Promise { + return await this.atomicPhase_(async (transactionManager) => { + const soRepo: ShippingOptionRepository = + transactionManager.getCustomRepository(this.optionRepository_) + + const query: ExtendedFindConfig = { + where: { id: optionId }, + } + + if (options.select) { + query.select = options.select + } + + if (options.relations) { + query.relations = options.relations + } + + const option = await soRepo.findOne(query) + + if (!option) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Shipping Option with ${optionId} was not found` + ) + } + + return option + }) + } + + /** + * Updates a shipping method's associations. Useful when a cart is completed + * and its methods should be copied to an order/swap entity. + * @param {string} id - the id of the shipping method to update + * @param {object} update - the values to update the method with + * @return {Promise} the resulting shipping method + */ + async updateShippingMethod( + id: string, + update: ShippingMethodUpdate + ): Promise { + return await this.atomicPhase_(async (manager) => { + const methodRepo: ShippingMethodRepository = manager.getCustomRepository( + this.methodRepository_ + ) + const method = await methodRepo.findOne({ where: { id } }) + + if (!method) { + return undefined + } + + for (const key of Object.keys(update).filter( + (k) => typeof update[k] !== `undefined` + )) { + method[key] = update[key] + } + + return methodRepo.save(method) + }) + } + + /** + * Removes a given shipping method + * @param {ShippingMethod | Array} shippingMethods - the shipping method to remove + * @returns removed shipping methods + */ + async deleteShippingMethods( + shippingMethods: ShippingMethod | ShippingMethod[] + ): Promise { + const removeEntities: ShippingMethod[] = Array.isArray(shippingMethods) + ? shippingMethods + : [shippingMethods] + + return await this.atomicPhase_(async (manager) => { + const methodRepo = manager.getCustomRepository(this.methodRepository_) + return await methodRepo.remove(removeEntities) + }) + } + + /** + * Creates a shipping method for a given cart. + * @param {string} optionId - the id of the option to use for the method. + * @param {object} data - the optional provider data to use. + * @param {object} config - the cart to create the shipping method for. + * @return {ShippingMethod} the resulting shipping method. + */ + async createShippingMethod( + optionId: string, + data: object, + config: CreateShippingMethodDto + ): Promise { + return await this.atomicPhase_(async (manager) => { + const option = await this.retrieve(optionId, { + relations: ["requirements"], + }) + + const methodRepo = manager.getCustomRepository(this.methodRepository_) + + if (typeof config.cart !== "undefined") { + this.validateCartOption(option, config.cart) + } + + const validatedData = await this.providerService_.validateFulfillmentData( + option, + data, + config.cart || {} + ) + + let methodPrice + if (typeof config.price === "number") { + methodPrice = config.price + } else { + methodPrice = await this.getPrice_(option, validatedData, config.cart) + } + + const toCreate: Partial = { + shipping_option_id: option.id, + data: validatedData, + price: methodPrice, + } + + if (config.order) { + toCreate.order_id = config.order.id + } + + if (config.cart) { + toCreate.cart_id = config.cart.id + } + + if (config.cart_id) { + toCreate.cart_id = config.cart_id + } + + if (config.return_id) { + toCreate.return_id = config.return_id + } + + if (config.order_id) { + toCreate.order_id = config.order_id + } + + if (config.claim_order_id) { + toCreate.claim_order_id = config.claim_order_id + } + + const method = await methodRepo.create(toCreate) + + const created = await methodRepo.save(method) + + return methodRepo.findOne({ + where: { id: created.id }, + relations: ["shipping_option"], + }) as unknown as ShippingMethod + }) + } + + /** + * Checks if a given option id is a valid option for a cart. If it is the + * option is returned with the correct price. Throws when region_ids do not + * match, or when the shipping option requirements are not satisfied. + * @param {object} option - the option object to check + * @param {Cart} cart - the cart object to check against + * @return {ShippingOption} the validated shipping option + */ + validateCartOption( + option: ShippingOption, + cart: Cart + ): ShippingOption | null { + if (option.is_return) { + return null + } + + if (cart.region_id !== option.region_id) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "The shipping option is not available in the cart's region" + ) + } + + const subtotal = cart.subtotal as number + + const requirementResults: boolean[] = option.requirements.map( + (requirement) => { + switch (requirement.type) { + case "max_subtotal": + return requirement.amount > subtotal + case "min_subtotal": + return requirement.amount <= subtotal + default: + return true + } + } + ) + + if (!requirementResults.every(Boolean)) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "The Cart does not satisfy the shipping option's requirements" + ) + } + + return option + } + + /** + * Creates a new shipping option. Used both for outbound and inbound shipping + * options. The difference is registered by the `is_return` field which + * defaults to false. + * @param {ShippingOption} data - the data to create shipping options + * @return {Promise} the result of the create operation + */ + async create(data: CreateShippingOptionInput): Promise { + return this.atomicPhase_(async (manager) => { + const optionRepo = manager.getCustomRepository(this.optionRepository_) + const option = await optionRepo.create( + data as DeepPartial + ) + + const region = await this.regionService_ + .withTransaction(manager) + .retrieve(option.region_id, { + relations: ["fulfillment_providers"], + }) + + if ( + !region.fulfillment_providers.find( + ({ id }) => id === option.provider_id + ) + ) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "The fulfillment provider is not available in the provided region" + ) + } + + option.price_type = await this.validatePriceType_(data.price_type, option) + option.amount = + data.price_type === "calculated" ? null : data.amount ?? null + + const isValid = await this.providerService_.validateOption(option) + + if (!isValid) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "The fulfillment provider cannot validate the shipping option" + ) + } + + if (typeof data.requirements !== "undefined") { + const acc: ShippingOptionRequirement[] = [] + for (const r of data.requirements) { + const validated = await this.validateRequirement_(r) + + if (acc.find((raw) => raw.type === validated.type)) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "Only one requirement of each type is allowed" + ) + } + + if ( + acc.find( + (raw) => + (raw.type === "max_subtotal" && + validated.amount > raw.amount) || + (raw.type === "min_subtotal" && validated.amount < raw.amount) + ) + ) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "Max. subtotal must be greater than Min. subtotal" + ) + } + + acc.push(validated) + } + } + + const result = await optionRepo.save(option) + return result + }) + } + + /** + * Validates a shipping option price + * @param {ShippingOptionPriceType} priceType - the price to validate + * @param {ShippingOption} option - the option to validate against + * @return {Promise} the validated price + */ + async validatePriceType_( + priceType: ShippingOptionPriceType, + option: ShippingOption + ): Promise { + if ( + !priceType || + (priceType !== "flat_rate" && priceType !== "calculated") + ) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "The price must be of type flat_rate or calculated" + ) + } + + if (priceType === "calculated") { + const canCalculate = await this.providerService_.canCalculate(option) + if (!canCalculate) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "The fulfillment provider cannot calculate prices for this option" + ) + } + } + + return priceType + } + + /** + * Updates a profile. Metadata updates and product updates should use + * dedicated methods, e.g. `setMetadata`, etc. The function + * will throw errors if metadata or product updates are attempted. + * @param {string} optionId - the id of the option. Must be a string that + * can be casted to an ObjectId + * @param {object} update - an object with the update values. + * @return {Promise} resolves to the update result. + */ + async update( + optionId: string, + update: UpdateShippingOptionInput + ): Promise { + return this.atomicPhase_(async (manager) => { + const option = await this.retrieve(optionId, { + relations: ["requirements"], + }) + + if (typeof update.metadata !== "undefined") { + option.metadata = await setMetadata(option, update.metadata) + } + + if (update.region_id || update.provider_id || update.data) { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "Region and Provider cannot be updated after creation" + ) + } + + if (typeof update.is_return !== "undefined") { + throw new MedusaError( + MedusaError.Types.NOT_ALLOWED, + "is_return cannot be changed after creation" + ) + } + + if (typeof update.requirements !== "undefined") { + const acc: ShippingOptionRequirement[] = [] + for (const r of update.requirements) { + const validated = await this.validateRequirement_(r, optionId) + + if (acc.find((raw) => raw.type === validated.type)) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "Only one requirement of each type is allowed" + ) + } + + if ( + acc.find( + (raw) => + (raw.type === "max_subtotal" && + validated.amount > raw.amount) || + (raw.type === "min_subtotal" && validated.amount < raw.amount) + ) + ) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + "Max. subtotal must be greater than Min. subtotal" + ) + } + + acc.push(validated) + } + + if (option.requirements) { + const accReqs = acc.map((a) => a.id) + const toRemove = option.requirements.filter( + (r) => !accReqs.includes(r.id) + ) + await Promise.all( + toRemove.map(async (req) => { + await this.removeRequirement(req.id) + }) + ) + } + + option.requirements = acc + } + + if (typeof update.price_type !== "undefined") { + option.price_type = await this.validatePriceType_( + update.price_type, + option + ) + if (update.price_type === "calculated") { + option.amount = null + } + } + + if ( + typeof update.amount !== "undefined" && + option.price_type !== "calculated" + ) { + option.amount = update.amount + } + + if (typeof update.name !== "undefined") { + option.name = update.name + } + + if (typeof update.admin_only !== "undefined") { + option.admin_only = update.admin_only + } + + const optionRepo = manager.getCustomRepository(this.optionRepository_) + return await optionRepo.save(option) + }) + } + + /** + * Deletes a profile with a given profile id. + * @param {string} optionId - the id of the profile to delete. Must be + * castable as an ObjectId + * @return {Promise} the result of the delete operation. + */ + async delete(optionId: string): Promise { + return await this.atomicPhase_(async (manager) => { + try { + const option = await this.retrieve(optionId) + + const optionRepo = manager.getCustomRepository(this.optionRepository_) + + return optionRepo.softRemove(option) + } catch (error) { + // Delete is idempotent, but we return a promise to allow then-chaining + return + } + }) + } + + /** + * Adds a requirement to a shipping option. Only 1 requirement of each type + * is allowed. + * @param {string} optionId - the option to add the requirement to. + * @param {ShippingOptionRequirement} requirement - the requirement for the option. + * @return {Promise} the result of update + */ + async addRequirement( + optionId: string, + requirement: ShippingOptionRequirement + ): Promise { + return this.atomicPhase_(async (manager) => { + const option = await this.retrieve(optionId, { + relations: ["requirements"], + }) + const validatedReq = await this.validateRequirement_(requirement) + + if (option.requirements.find((r) => r.type === validatedReq.type)) { + throw new MedusaError( + MedusaError.Types.DUPLICATE_ERROR, + `A requirement with type: ${validatedReq.type} already exists` + ) + } + + option.requirements.push(validatedReq) + + const optionRepo = manager.getCustomRepository(this.optionRepository_) + return optionRepo.save(option) + }) + } + + /** + * Removes a requirement from a shipping option + * @param {string} requirementId - the id of the requirement to remove + * @return {Promise} the result of update + */ + async removeRequirement( + requirementId + ): Promise { + return await this.atomicPhase_(async (manager) => { + const reqRepo: ShippingOptionRequirementRepository = + manager.getCustomRepository(this.requirementRepository_) + + const requirement = await reqRepo.findOne({ + where: { id: requirementId }, + }) + // Delete is idempotent, but we return a promise to allow then-chaining + if (typeof requirement === "undefined") { + return Promise.resolve() + } + + return await reqRepo.softRemove(requirement) + }) + } + + /** + * Returns the amount to be paid for a shipping method. Will ask the + * fulfillment provider to calculate the price if the shipping option has the + * price type "calculated". + * @param {ShippingOption} option - the shipping option to retrieve the price + * for. + * @param {ShippingData} data - the shipping data to retrieve the price. + * @param {Cart | Order} cart - the context in which the price should be + * retrieved. + * @return {Promise} the price of the shipping option. + */ + async getPrice_( + option: ShippingOption, + data: object, + cart: Cart | Order | undefined + ): Promise { + if (option.price_type === "calculated") { + return this.providerService_.calculatePrice(option, data, cart) + } + return option.amount as number + } +} + +export default ShippingOptionService diff --git a/packages/medusa/src/services/shipping-profile.js b/packages/medusa/src/services/shipping-profile.js index 1161d7aafb..ca84aa63df 100644 --- a/packages/medusa/src/services/shipping-profile.js +++ b/packages/medusa/src/services/shipping-profile.js @@ -420,7 +420,7 @@ class ShippingProfileService extends BaseService { * Finds all the shipping profiles that cover the products in a cart, and * validates all options that are available for the cart. * @param {Cart} cart - the cart object to find shipping options for - * @return {[ShippingOption]} a list of the available shipping options + * @return {Promise<[ShippingOption]>} a list of the available shipping options */ async fetchCartOptions(cart) { const profileIds = this.getProfilesInCart_(cart) diff --git a/packages/medusa/src/services/store.js b/packages/medusa/src/services/store.js deleted file mode 100644 index 0f10024092..0000000000 --- a/packages/medusa/src/services/store.js +++ /dev/null @@ -1,271 +0,0 @@ -import { MedusaError } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" -import { currencies } from "../utils/currencies" - -/** - * Provides layer to manipulate store settings. - * @extends BaseService - */ -class StoreService extends BaseService { - constructor({ - manager, - storeRepository, - currencyRepository, - eventBusService, - }) { - super() - - /** @private @const {EntityManager} */ - this.manager_ = manager - - /** @private @const {StoreRepository} */ - this.storeRepository_ = storeRepository - - /** @private @const {CurrencyRepository} */ - this.currencyRepository_ = currencyRepository - - /** @private @const {EventBus} */ - this.eventBus_ = eventBusService - } - - withTransaction(transactionManager) { - if (!transactionManager) { - return this - } - - const cloned = new StoreService({ - manager: transactionManager, - storeRepository: this.storeRepository_, - currencyRepository: this.currencyRepository_, - eventBusService: this.eventBus_, - }) - - cloned.transactionManager_ = transactionManager - - return cloned - } - - /** - * Creates a store if it doesn't already exist. - * @return {Promise} the store. - */ - async create() { - return this.atomicPhase_(async (manager) => { - const storeRepository = manager.getCustomRepository(this.storeRepository_) - const currencyRepository = manager.getCustomRepository( - this.currencyRepository_ - ) - - let store = await this.retrieve() - - if (!store) { - const s = await storeRepository.create() - // Add default currency (USD) to store currencies - const usd = await currencyRepository.findOne({ - code: "usd", - }) - - if (usd) { - s.currencies = [usd] - } - - store = await storeRepository.save(s) - } - - return store - }) - } - - /** - * Retrieve the store settings. There is always a maximum of one store. - * @param {string[]} relations - relations to fetch with store - * @return {Promise} the store - */ - async retrieve(relations = []) { - const storeRepo = this.manager_.getCustomRepository(this.storeRepository_) - - const store = await storeRepo.findOne({ relations }) - - return store - } - - getDefaultCurrency_(code) { - const currencyObject = currencies[code.toUpperCase()] - - return { - code: currencyObject.code.toLowerCase(), - symbol: currencyObject.symbol, - symbol_native: currencyObject.symbol_native, - name: currencyObject.name, - } - } - - /** - * Updates a store - * @param {object} update - an object with the update values. - * @return {Promise} resolves to the update result. - */ - async update(update) { - return this.atomicPhase_(async (manager) => { - const storeRepository = manager.getCustomRepository(this.storeRepository_) - const currencyRepository = manager.getCustomRepository( - this.currencyRepository_ - ) - - const store = await this.retrieve(["currencies"]) - - const { - metadata, - default_currency_code, - currencies: storeCurrencies, - ...rest - } = update - - if (metadata) { - store.metadata = this.setMetadata_(store.id, metadata) - } - - if (storeCurrencies) { - const defaultCurr = default_currency_code ?? store.default_currency_code - const hasDefCurrency = storeCurrencies.find( - (c) => c.toLowerCase() === defaultCurr.toLowerCase() - ) - - // throw if we are trying to remove a currency from store currently used as default - if (!hasDefCurrency) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `You are not allowed to remove default currency from store currencies without replacing it as well` - ) - } - - store.currencies = await Promise.all( - storeCurrencies.map(async (curr) => { - const currency = await currencyRepository.findOne({ - where: { code: curr.toLowerCase() }, - }) - - if (!currency) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Invalid currency ${curr}` - ) - } - - return currency - }) - ) - } - - if (default_currency_code) { - const storeCurrCodes = store.currencies.map((c) => c.code) - const hasDefCurrency = storeCurrCodes.find( - (c) => c === default_currency_code.toLowerCase() - ) - - // throw if store currencies does not have default currency - if (!hasDefCurrency) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Store does not have currency: ${default_currency_code}` - ) - } - - const curr = await currencyRepository.findOne({ - code: default_currency_code.toLowerCase(), - }) - - if (!curr) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Currency ${default_currency_code} not found` - ) - } - - store.default_currency = curr - store.default_currency_code = curr.code - } - - for (const [key, value] of Object.entries(rest)) { - store[key] = value - } - - const result = await storeRepository.save(store) - return result - }) - } - - /** - * Add a currency to the store - * @param {string} code - 3 character ISO currency code - * @return {Promise} result after update - */ - async addCurrency(code) { - return this.atomicPhase_(async (manager) => { - const storeRepo = manager.getCustomRepository(this.storeRepository_) - const currencyRepository = manager.getCustomRepository( - this.currencyRepository_ - ) - const store = await this.retrieve(["currencies"]) - - const curr = await currencyRepository.findOne({ - where: { code: code.toLowerCase() }, - }) - - if (!curr) { - throw new MedusaError( - MedusaError.Types.INVALID_DATA, - `Currency ${code} not found` - ) - } - - if ( - store.currencies.map((c) => c.code).includes(curr.code.toLowerCase()) - ) { - throw new MedusaError( - MedusaError.Types.DUPLICATE_ERROR, - `Currency already added` - ) - } - - store.currencies = [...store.currencies, curr] - const updated = await storeRepo.save(store) - return updated - }) - } - - /** - * Removes a currency from the store - * @param {string} code - 3 character ISO currency code - * @return {Promise} result after update - */ - async removeCurrency(code) { - return this.atomicPhase_(async (manager) => { - const storeRepo = manager.getCustomRepository(this.storeRepository_) - const store = await this.retrieve(["currencies"]) - - const exists = store.currencies.find((c) => c.code === code.toLowerCase()) - // If currency does not exist, return early - if (!exists) { - return store - } - - store.currencies = store.currencies.filter((c) => c.code !== code) - const updated = await storeRepo.save(store) - return updated - }) - } - - /** - * Decorates a store object. - * @param {Store} store - the store to decorate. - * @param {string[]} fields - the fields to include. - * @param {string[]} expandFields - fields to expand. - * @return {Store} return the decorated Store. - */ - async decorate(store, fields, expandFields = []) { - return store - } -} - -export default StoreService diff --git a/packages/medusa/src/services/store.ts b/packages/medusa/src/services/store.ts new file mode 100644 index 0000000000..b8eb4d118c --- /dev/null +++ b/packages/medusa/src/services/store.ts @@ -0,0 +1,284 @@ +import { MedusaError } from "medusa-core-utils" +import { currencies, Currency } from "../utils/currencies" +import { EntityManager } from "typeorm" +import { StoreRepository } from "../repositories/store" +import { CurrencyRepository } from "../repositories/currency" +import EventBusService from "./event-bus" +import { Store } from "../models" +import { AdminPostStoreReq } from "../api/routes/admin/store" +import { FindConfig } from "../types/common" +import { TransactionBaseService } from "../interfaces" +import { buildQuery, setMetadata } from "../utils" +import { UpdateStoreInput } from "../types/store" + +type InjectedDependencies = { + manager: EntityManager + storeRepository: typeof StoreRepository + currencyRepository: typeof CurrencyRepository + eventBusService: EventBusService +} + +/** + * Provides layer to manipulate store settings. + * @extends BaseService + */ +class StoreService extends TransactionBaseService { + protected manager_: EntityManager + protected transactionManager_: EntityManager + + protected readonly storeRepository_: typeof StoreRepository + protected readonly currencyRepository_: typeof CurrencyRepository + protected readonly eventBus_: EventBusService + + constructor({ + manager, + storeRepository, + currencyRepository, + eventBusService, + }: InjectedDependencies) { + super({ + manager, + storeRepository, + currencyRepository, + eventBusService, + }) + + this.manager_ = manager + this.storeRepository_ = storeRepository + this.currencyRepository_ = currencyRepository + this.eventBus_ = eventBusService + } + + /** + * Creates a store if it doesn't already exist. + * @return The store. + */ + async create(): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const storeRepository = transactionManager.getCustomRepository( + this.storeRepository_ + ) + const currencyRepository = transactionManager.getCustomRepository( + this.currencyRepository_ + ) + + let store = await this.retrieve().catch(() => void 0) + if (store) { + return store + } + + const newStore = await storeRepository.create() + // Add default currency (USD) to store currencies + const usd = await currencyRepository.findOne({ + code: "usd", + }) + + if (usd) { + newStore.currencies = [usd] + } + + store = await storeRepository.save(newStore) + return store + } + ) + } + + /** + * Retrieve the store settings. There is always a maximum of one store. + * @param config The config object from which the query will be built + * @return the store + */ + async retrieve(config: FindConfig = {}): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const storeRepo = transactionManager.getCustomRepository( + this.storeRepository_ + ) + const query = buildQuery({}, config) + const store = await storeRepo.findOne(query) + + if (!store) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + "Store does not exist" + ) + } + + return store + } + ) + } + + protected getDefaultCurrency_(code: string): Partial { + const currencyObject = currencies[code.toUpperCase()] + + return { + code: currencyObject.code.toLowerCase(), + symbol: currencyObject.symbol, + symbol_native: currencyObject.symbol_native, + name: currencyObject.name, + } + } + + /** + * Updates a store + * @param data - an object with the update values. + * @return resolves to the update result. + */ + async update(data: UpdateStoreInput): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const storeRepository = transactionManager.getCustomRepository( + this.storeRepository_ + ) + const currencyRepository = transactionManager.getCustomRepository( + this.currencyRepository_ + ) + + const { + metadata, + default_currency_code, + currencies: storeCurrencies, + ...rest + } = data + + const store = await this.retrieve({ relations: ["currencies"] }) + + if (metadata) { + store.metadata = setMetadata(store, metadata) + } + + if (storeCurrencies) { + const defaultCurr = + default_currency_code ?? store.default_currency_code + const hasDefCurrency = storeCurrencies.find( + (c) => c.toLowerCase() === defaultCurr.toLowerCase() + ) + + // throw if we are trying to remove a currency from store currently used as default + if (!hasDefCurrency) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `You are not allowed to remove default currency from store currencies without replacing it as well` + ) + } + + store.currencies = await Promise.all( + storeCurrencies.map(async (curr) => { + const currency = await currencyRepository.findOne({ + where: { code: curr.toLowerCase() }, + }) + + if (!currency) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Currency with code ${curr} does not exist` + ) + } + + return currency + }) + ) + } + + if (default_currency_code) { + const hasDefCurrency = store.currencies.find( + (c) => c.code.toLowerCase() === default_currency_code.toLowerCase() + ) + + if (!hasDefCurrency) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Store does not have currency: ${default_currency_code}` + ) + } + + const curr = (await currencyRepository.findOne({ + code: default_currency_code.toLowerCase(), + })) as Currency + + store.default_currency = curr + store.default_currency_code = curr.code + } + + for (const [key, value] of Object.entries(rest)) { + store[key] = value + } + + return await storeRepository.save(store) + } + ) + } + + /** + * Add a currency to the store + * @param code - 3 character ISO currency code + * @return result after update + */ + async addCurrency(code: string): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const storeRepo = transactionManager.getCustomRepository( + this.storeRepository_ + ) + const currencyRepository = transactionManager.getCustomRepository( + this.currencyRepository_ + ) + + const curr = await currencyRepository.findOne({ + where: { code: code.toLowerCase() }, + }) + + if (!curr) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Currency ${code} not found` + ) + } + + const store = await this.retrieve({ relations: ["currencies"] }) + + const doesStoreInclCurrency = store.currencies + .map((c) => c.code.toLowerCase()) + .includes(curr.code.toLowerCase()) + if (doesStoreInclCurrency) { + throw new MedusaError( + MedusaError.Types.DUPLICATE_ERROR, + `Currency already added` + ) + } + + store.currencies = [...store.currencies, curr] + return await storeRepo.save(store) + } + ) + } + + /** + * Removes a currency from the store + * @param code - 3 character ISO currency code + * @return result after update + */ + async removeCurrency(code: string): Promise { + return await this.atomicPhase_( + async (transactionManager: EntityManager) => { + const storeRepo = transactionManager.getCustomRepository( + this.storeRepository_ + ) + const store = await this.retrieve({ relations: ["currencies"] }) + const doesCurrencyExists = store.currencies.some( + (c) => c.code === code.toLowerCase() + ) + if (!doesCurrencyExists) { + return store + } + + store.currencies = store.currencies.filter((c) => c.code !== code) + return await storeRepo.save(store) + } + ) + } +} + +export default StoreService diff --git a/packages/medusa/src/services/strategy-resolver.ts b/packages/medusa/src/services/strategy-resolver.ts new file mode 100644 index 0000000000..8983a3246e --- /dev/null +++ b/packages/medusa/src/services/strategy-resolver.ts @@ -0,0 +1,38 @@ +import { AbstractBatchJobStrategy, TransactionBaseService } from "../interfaces" +import { EntityManager } from "typeorm" +import { MedusaError } from "medusa-core-utils" + +type InjectedDependencies = { + manager: EntityManager + [key: string]: unknown +} + +export default class StrategyResolver extends TransactionBaseService< + StrategyResolver, + InjectedDependencies +> { + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + constructor(container: InjectedDependencies) { + super(container) + this.manager_ = container.manager + } + + resolveBatchJobByType>( + type: string + ): AbstractBatchJobStrategy { + let resolved: AbstractBatchJobStrategy + try { + resolved = this.container[ + `batchType_${type}` + ] as AbstractBatchJobStrategy + } catch (e) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `Unable to find a BatchJob strategy with the type ${type}` + ) + } + return resolved + } +} diff --git a/packages/medusa/src/services/tax-provider.ts b/packages/medusa/src/services/tax-provider.ts index 0f96d2c916..c8a50d7fc0 100644 --- a/packages/medusa/src/services/tax-provider.ts +++ b/packages/medusa/src/services/tax-provider.ts @@ -28,6 +28,11 @@ import TaxRateService from "./tax-rate" const CACHE_TIME = 30 // seconds +type RegionDetails = { + id: string + tax_rate: number | null +} + /** * Finds tax providers and assists in tax related operations. */ @@ -321,22 +326,21 @@ class TaxProviderService extends BaseService { * Gets the tax rates configured for a shipping option. The rates are cached * between calls. * @param optionId - the option id of the shipping method. - * @param region - the region to get configured rates for. + * @param regionDetails - the region to get configured rates for. * @return the tax rates configured for the shipping option. */ async getRegionRatesForShipping( optionId: string, - region: Region + regionDetails: RegionDetails ): Promise { - const cacheHit = await this.getCacheEntry(optionId, region.id) + const cacheHit = await this.getCacheEntry(optionId, regionDetails.id) if (cacheHit) { return cacheHit } let toReturn: TaxServiceRate[] = [] const optionRates = await this.taxRateService_.listByShippingOption( - optionId, - { region_id: region.id } + optionId ) if (optionRates.length > 0) { @@ -352,14 +356,14 @@ class TaxProviderService extends BaseService { if (toReturn.length === 0) { toReturn = [ { - rate: region.tax_rate, + rate: regionDetails.tax_rate, name: "default", code: "default", }, ] } - await this.setCache(optionId, region.id, toReturn) + await this.setCache(optionId, regionDetails.id, toReturn) return toReturn } @@ -373,7 +377,7 @@ class TaxProviderService extends BaseService { */ async getRegionRatesForProduct( productId: string, - region: Region + region: RegionDetails ): Promise { const cacheHit = await this.getCacheEntry(productId, region.id) if (cacheHit) { diff --git a/packages/medusa/src/services/tax-rate.ts b/packages/medusa/src/services/tax-rate.ts index 70a593ac58..c6bc2e8d97 100644 --- a/packages/medusa/src/services/tax-rate.ts +++ b/packages/medusa/src/services/tax-rate.ts @@ -329,13 +329,10 @@ class TaxRateService extends BaseService { }) } - async listByShippingOption( - shippingOptionId: string, - config: TaxRateListByConfig - ): Promise { + async listByShippingOption(shippingOptionId: string): Promise { return await this.atomicPhase_(async (manager: EntityManager) => { const taxRateRepo = manager.getCustomRepository(this.taxRateRepository_) - return await taxRateRepo.listByShippingOption(shippingOptionId, config) + return await taxRateRepo.listByShippingOption(shippingOptionId) }) } } diff --git a/packages/medusa/src/services/totals.ts b/packages/medusa/src/services/totals.ts index 53322f14bc..ad2a327bd0 100644 --- a/packages/medusa/src/services/totals.ts +++ b/packages/medusa/src/services/totals.ts @@ -689,7 +689,6 @@ class TotalsService extends BaseService { const calculationContext = this.getCalculationContext(cartOrOrder, { exclude_shipping: true, }) - const lineItemAllocation = calculationContext.allocation_map[lineItem.id] || {} diff --git a/packages/medusa/src/services/transaction.js b/packages/medusa/src/services/transaction.js deleted file mode 100644 index 7f57599ec6..0000000000 --- a/packages/medusa/src/services/transaction.js +++ /dev/null @@ -1,14 +0,0 @@ -import { BaseService } from "medusa-interfaces" -import mongoose from "mongoose" - -class TransactionService extends BaseService { - constructor() { - super() - } - - async createSession() { - return mongoose.startSession() - } -} - -export default TransactionService diff --git a/packages/medusa/src/services/user.ts b/packages/medusa/src/services/user.ts index 733934eca1..c306e5f7d3 100644 --- a/packages/medusa/src/services/user.ts +++ b/packages/medusa/src/services/user.ts @@ -1,9 +1,8 @@ import jwt from "jsonwebtoken" -import { MedusaError, Validator } from "medusa-core-utils" -import { BaseService } from "medusa-interfaces" import Scrypt from "scrypt-kdf" +import { MedusaError, Validator } from "medusa-core-utils" import { EntityManager } from "typeorm" -import { User } from "../models/user" +import { User } from "../models" import { UserRepository } from "../repositories/user" import { FindConfig } from "../types/common" import { @@ -12,6 +11,8 @@ import { UpdateUserInput, } from "../types/user" import EventBusService from "./event-bus" +import { TransactionBaseService } from "../interfaces" +import { buildQuery, setMetadata } from "../utils" type UserServiceProps = { userRepository: typeof UserRepository @@ -23,45 +24,24 @@ type UserServiceProps = { * Provides layer to manipulate users. * @extends BaseService */ -class UserService extends BaseService { +class UserService extends TransactionBaseService { static Events = { PASSWORD_RESET: "user.password_reset", } - private userRepository_: typeof UserRepository - private eventBus_: EventBusService - private manager_: EntityManager - private transactionManager_: EntityManager + protected manager_: EntityManager + protected transactionManager_: EntityManager + protected readonly userRepository_: typeof UserRepository + protected readonly eventBus_: EventBusService constructor({ userRepository, eventBusService, manager }: UserServiceProps) { - super() + super({ userRepository, eventBusService, manager }) - /** @private @const {UserRepository} */ this.userRepository_ = userRepository - - /** @private @const {EventBus} */ this.eventBus_ = eventBusService - - /** @private @const {EntityManager} */ this.manager_ = manager } - withTransaction(transactionManager: EntityManager): UserService { - if (!transactionManager) { - return this - } - - const cloned = new UserService({ - manager: transactionManager, - userRepository: this.userRepository_, - eventBusService: this.eventBus_, - }) - - cloned.transactionManager_ = transactionManager - - return cloned - } - /** * Used to validate user email. * @param {string} email - email to validate @@ -86,8 +66,12 @@ class UserService extends BaseService { * @return {Promise} the result of the find operation */ async list(selector: FilterableUserProps, config = {}): Promise { - const userRepo = this.manager_.getCustomRepository(this.userRepository_) - return userRepo.find(this.buildQuery_(selector, config)) + return await this.atomicPhase_(async (transactionManager) => { + const userRepo = transactionManager.getCustomRepository( + this.userRepository_ + ) + return await userRepo.find(buildQuery(selector, config)) + }) } /** @@ -98,20 +82,23 @@ class UserService extends BaseService { * @return {Promise} the user document. */ async retrieve(userId: string, config: FindConfig = {}): Promise { - const userRepo = this.manager_.getCustomRepository(this.userRepository_) - const validatedId = this.validateId_(userId) - const query = this.buildQuery_({ id: validatedId }, config) - - const user = await userRepo.findOne(query) - - if (!user) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `User with id: ${userId} was not found` + return await this.atomicPhase_(async (transactionManager) => { + const userRepo = transactionManager.getCustomRepository( + this.userRepository_ ) - } + const query = buildQuery({ id: userId }, config) - return user + const user = await userRepo.findOne(query) + + if (!user) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `User with id: ${userId} was not found` + ) + } + + return user + }) } /** @@ -125,21 +112,25 @@ class UserService extends BaseService { apiToken: string, relations: string[] = [] ): Promise { - const userRepo = this.manager_.getCustomRepository(this.userRepository_) - - const user = await userRepo.findOne({ - where: { api_token: apiToken }, - relations, - }) - - if (!user) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `User with api token: ${apiToken} was not found` + return await this.atomicPhase_(async (transactionManager) => { + const userRepo = transactionManager.getCustomRepository( + this.userRepository_ ) - } - return user + const user = await userRepo.findOne({ + where: { api_token: apiToken }, + relations, + }) + + if (!user) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `User with api token: ${apiToken} was not found` + ) + } + + return user + }) } /** @@ -153,19 +144,23 @@ class UserService extends BaseService { email: string, config: FindConfig = {} ): Promise { - const userRepo = this.manager_.getCustomRepository(this.userRepository_) - - const query = this.buildQuery_({ email: email.toLowerCase() }, config) - const user = await userRepo.findOne(query) - - if (!user) { - throw new MedusaError( - MedusaError.Types.NOT_FOUND, - `User with email: ${email} was not found` + return await this.atomicPhase_(async (transactionManager) => { + const userRepo = transactionManager.getCustomRepository( + this.userRepository_ ) - } - return user + const query = buildQuery({ email: email.toLowerCase() }, config) + const user = await userRepo.findOne(query) + + if (!user) { + throw new MedusaError( + MedusaError.Types.NOT_FOUND, + `User with email: ${email} was not found` + ) + } + + return user + }) } /** @@ -186,7 +181,7 @@ class UserService extends BaseService { * @return {Promise} the result of create */ async create(user: CreateUserInput, password: string): Promise { - return this.atomicPhase_(async (manager: EntityManager) => { + return await this.atomicPhase_(async (manager: EntityManager) => { const userRepo = manager.getCustomRepository(this.userRepository_) const createData = { ...user } as CreateUserInput & { @@ -203,7 +198,7 @@ class UserService extends BaseService { const created = userRepo.create(createData) - return userRepo.save(created) + return await userRepo.save(created) }) } @@ -214,11 +209,10 @@ class UserService extends BaseService { * @return {Promise} the result of create */ async update(userId: string, update: UpdateUserInput): Promise { - return this.atomicPhase_(async (manager: EntityManager) => { + return await this.atomicPhase_(async (manager: EntityManager) => { const userRepo = manager.getCustomRepository(this.userRepository_) - const validatedId = this.validateId_(userId) - const user = await this.retrieve(validatedId) + const user = await this.retrieve(userId) const { email, password_hash, metadata, ...rest } = update @@ -237,14 +231,14 @@ class UserService extends BaseService { } if (metadata) { - user.metadata = this.setMetadata_(user, metadata) + user.metadata = setMetadata(user, metadata) } for (const [key, value] of Object.entries(rest)) { user[key as keyof User] = value } - return userRepo.save(user) + return await userRepo.save(user) }) } @@ -254,8 +248,8 @@ class UserService extends BaseService { * castable as an ObjectId * @return {Promise} the result of the delete operation. */ - async delete(userId: string): Promise { - return this.atomicPhase_(async (manager: EntityManager) => { + async delete(userId: string): Promise { + return await this.atomicPhase_(async (manager: EntityManager) => { const userRepo = manager.getCustomRepository(this.userRepository_) // Should not fail, if user does not exist, since delete is idempotent @@ -280,7 +274,7 @@ class UserService extends BaseService { * @return {Promise} the result of the update operation */ async setPassword_(userId: string, password: string): Promise { - return this.atomicPhase_(async (manager: EntityManager) => { + return await this.atomicPhase_(async (manager: EntityManager) => { const userRepo = manager.getCustomRepository(this.userRepository_) const user = await this.retrieve(userId) @@ -295,7 +289,7 @@ class UserService extends BaseService { user.password_hash = hashedPassword - return userRepo.save(user) + return await userRepo.save(user) }) } @@ -309,20 +303,25 @@ class UserService extends BaseService { * @return {string} the generated JSON web token */ async generateResetPasswordToken(userId: string): Promise { - const user = await this.retrieve(userId, { - select: ["id", "email", "password_hash"], - }) - const secret = user.password_hash - const expiry = Math.floor(Date.now() / 1000) + 60 * 15 - const payload = { user_id: user.id, email: user.email, exp: expiry } - const token = jwt.sign(payload, secret) + return await this.atomicPhase_(async (transactionManager) => { + const user = await this.retrieve(userId, { + select: ["id", "email", "password_hash"], + }) + const secret = user.password_hash + const expiry = Math.floor(Date.now() / 1000) + 60 * 15 + const payload = { user_id: user.id, email: user.email, exp: expiry } + const token = jwt.sign(payload, secret) - // Notify subscribers - this.eventBus_.emit(UserService.Events.PASSWORD_RESET, { - email: user.email, - token, + // Notify subscribers + await this.eventBus_ + .withTransaction(transactionManager) + .emit(UserService.Events.PASSWORD_RESET, { + email: user.email, + token, + }) + + return token }) - return token } } diff --git a/packages/medusa/src/strategies/__fixtures__/order-export-data.ts b/packages/medusa/src/strategies/__fixtures__/order-export-data.ts new file mode 100644 index 0000000000..c993150f1b --- /dev/null +++ b/packages/medusa/src/strategies/__fixtures__/order-export-data.ts @@ -0,0 +1,74 @@ +import { DeepPartial } from "typeorm" +import { + FulfillmentStatus, + Order, + OrderStatus, + PaymentStatus, +} from "../../models" + +const createdAtDate = new Date("2019-01-01T00:00:00.000Z") + +export const ordersToExport: DeepPartial[] = [ + { + id: "order_1", + created_at: createdAtDate, + display_id: 123, + status: OrderStatus.PENDING, + fulfillment_status: FulfillmentStatus.PARTIALLY_FULFILLED, + payment_status: PaymentStatus.CAPTURED, + subtotal: 10, + shipping_total: 10, + discount_total: 0, + gift_card_total: 0, + refunded_total: 0, + tax_total: 5, + total: 25, + currency_code: "usd", + region_id: "region_1", + shipping_address: { + id: "address_1", + address_1: "123 Main St", + address_2: "", + city: "New York", + country_code: "US", + postal_code: "10001", + }, + customer: { + id: "customer_1", + first_name: "John", + last_name: "Doe", + email: "John@Doe.com", + }, + }, + { + id: "order_2", + created_at: createdAtDate, + display_id: 124, + status: OrderStatus.COMPLETED, + fulfillment_status: FulfillmentStatus.FULFILLED, + payment_status: PaymentStatus.CAPTURED, + subtotal: 125, + shipping_total: 10, + discount_total: 0, + gift_card_total: 0, + refunded_total: 0, + tax_total: 0, + total: 135, + currency_code: "eur", + region_id: "region_2", + shipping_address: { + id: "address_2", + address_1: "Hovedgaden 1", + address_2: "", + city: "Copenhagen", + country_code: "DK", + postal_code: "1150", + }, + customer: { + id: "customer_2", + first_name: "Jane", + last_name: "Doe", + email: "Jane@Doe.com", + }, + }, +] diff --git a/packages/medusa/src/strategies/__fixtures__/product-export-data.ts b/packages/medusa/src/strategies/__fixtures__/product-export-data.ts new file mode 100644 index 0000000000..1403170edf --- /dev/null +++ b/packages/medusa/src/strategies/__fixtures__/product-export-data.ts @@ -0,0 +1,398 @@ +import { IdMap } from "medusa-test-utils" + +const productIds = [ + "product-export-strategy-product-1", + "product-export-strategy-product-2", +] +const variantIds = [ + "product-export-strategy-variant-1", + "product-export-strategy-variant-2", + "product-export-strategy-variant-3", +] +export const productsToExport = [ + { + collection: { + created_at: "randomString", + deleted_at: null, + handle: "test-collection1", + id: IdMap.getId("product-export-collection_1"), + metadata: null, + title: "Test collection 1", + updated_at: "randomString", + }, + collection_id: IdMap.getId("product-export-collection_1"), + created_at: "randomString", + deleted_at: null, + description: "test-product-description-1", + discountable: true, + external_id: null, + handle: "test-product-product-1", + height: null, + hs_code: null, + id: productIds[0], + images: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-image_1"), + metadata: null, + updated_at: "randomString", + url: "test-image.png", + }, + ], + is_giftcard: false, + length: null, + material: null, + metadata: null, + mid_code: null, + options: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-option_1"), + metadata: null, + product_id: productIds[0], + title: "test-option-1", + updated_at: "randomString", + }, + { + created_at: "randomString2", + deleted_at: null, + id: IdMap.getId("product-export-option_2"), + metadata: null, + product_id: productIds[0], + title: "test-option-2", + updated_at: "randomString2", + }, + ], + origin_country: null, + profile_id: IdMap.getId("product-export-profile_1"), + profile: { + id: IdMap.getId("product-export-profile_1"), + name: "profile_1", + type: "profile_type_1", + }, + status: "draft", + subtitle: null, + tags: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-tag_1"), + metadata: null, + updated_at: "randomString", + value: "123_1", + }, + ], + thumbnail: null, + title: "Test product", + type: { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-type_1"), + metadata: null, + updated_at: "randomString", + value: "test-type-1", + }, + type_id: IdMap.getId("product-export-type_1"), + updated_at: "randomString", + variants: [ + { + allow_backorder: false, + barcode: "test-barcode", + calculated_price: null, + created_at: "randomString", + deleted_at: null, + ean: "test-ean", + height: null, + hs_code: null, + id: variantIds[0], + inventory_quantity: 10, + length: null, + manage_inventory: true, + material: null, + metadata: null, + mid_code: null, + options: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-variant_option_1"), + metadata: null, + option_id: IdMap.getId("product-export-option_1"), + updated_at: "randomString", + value: "option 1 value 1", + variant_id: variantIds[0], + }, + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-variant_option_2"), + metadata: null, + option_id: IdMap.getId("product-export-option_2"), + updated_at: "randomString", + value: "option 2 value 1", + variant_id: variantIds[0], + }, + ], + origin_country: null, + original_price: null, + prices: [ + { + amount: 100, + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-price_1"), + region_id: IdMap.getId("product-export-region_1"), + max_quantity: null, + min_quantity: null, + price_list: null, + price_list_id: null, + region: { + id: IdMap.getId("product-export-region_1"), + currency_code: "usd", + name: "france", + }, + updated_at: "randomString", + variant_id: variantIds[0], + }, + { + amount: 110, + created_at: "randomString", + currency_code: "usd", + deleted_at: null, + id: IdMap.getId("product-export-price_1"), + region_id: null, + max_quantity: null, + min_quantity: null, + price_list: null, + price_list_id: null, + updated_at: "randomString", + variant_id: variantIds[0], + }, + { + amount: 130, + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-price_1"), + region_id: IdMap.getId("product-export-region_1"), + max_quantity: null, + min_quantity: null, + price_list: null, + price_list_id: null, + region: { + id: IdMap.getId("product-export-region_3"), + name: "denmark", + currency_code: "dkk", + }, + updated_at: "randomString", + variant_id: variantIds[0], + }, + ], + product_id: IdMap.getId("product-export-product_1"), + sku: "test-sku", + title: "Test variant", + upc: "test-upc", + updated_at: "randomString", + weight: null, + width: null, + }, + ], + weight: null, + width: null, + }, + { + collection: { + created_at: "randomString", + deleted_at: null, + handle: "test-collection2", + id: IdMap.getId("product-export-collection_2"), + metadata: null, + title: "Test collection", + updated_at: "randomString", + }, + collection_id: "test-collection", + created_at: "randomString", + deleted_at: null, + description: "test-product-description", + discountable: true, + external_id: null, + handle: "test-product-product-2", + height: null, + hs_code: null, + id: productIds[1], + images: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-image_2"), + metadata: null, + updated_at: "randomString", + url: "test-image.png", + }, + ], + is_giftcard: false, + length: null, + material: null, + metadata: null, + mid_code: null, + options: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-option_2"), + metadata: null, + product_id: productIds[1], + title: "test-option", + updated_at: "randomString", + }, + ], + origin_country: null, + profile_id: IdMap.getId("product-export-profile_2"), + profile: { + id: IdMap.getId("product-export-profile_2"), + name: "profile_2", + type: "profile_type_2", + }, + status: "draft", + subtitle: null, + tags: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-tag_2"), + metadata: null, + updated_at: "randomString", + value: "123", + }, + ], + thumbnail: null, + title: "Test product", + type: { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-type_2"), + metadata: null, + updated_at: "randomString", + value: "test-type", + }, + type_id: "test-type", + updated_at: "randomString", + variants: [ + { + allow_backorder: false, + barcode: "test-barcode", + calculated_price: null, + created_at: "randomString", + deleted_at: null, + ean: "test-ean", + height: null, + hs_code: null, + id: variantIds[1], + inventory_quantity: 10, + length: null, + manage_inventory: true, + material: null, + metadata: null, + mid_code: null, + options: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-variant_option_2"), + metadata: null, + option_id: IdMap.getId("product-export-option_2"), + updated_at: "randomString", + value: "Option 1 value 1", + variant_id: variantIds[1], + }, + ], + origin_country: null, + original_price: null, + prices: [ + { + amount: 110, + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-price_2"), + max_quantity: null, + min_quantity: null, + price_list: null, + price_list_id: null, + region_id: IdMap.getId("product-export-region_2"), + region: { + id: IdMap.getId("product-export-region_2"), + name: "Denmark", + currency_code: "dkk", + }, + updated_at: "randomString", + variant_id: variantIds[1], + }, + ], + product_id: IdMap.getId("product-export-product_2"), + sku: "test-sku", + title: "Test variant", + upc: "test-upc", + updated_at: "randomString", + weight: null, + width: null, + }, + { + allow_backorder: false, + barcode: "test-barcode", + calculated_price: null, + created_at: "randomString", + deleted_at: null, + ean: "test-ean", + height: null, + hs_code: null, + id: variantIds[2], + inventory_quantity: 10, + length: null, + manage_inventory: true, + material: null, + metadata: null, + mid_code: null, + options: [ + { + created_at: "randomString", + deleted_at: null, + id: IdMap.getId("product-export-variant_option_2"), + metadata: null, + option_id: IdMap.getId("product-export-option_2"), + updated_at: "randomString", + value: "Option 1 Value 1", + variant_id: variantIds[2], + }, + ], + origin_country: null, + original_price: null, + prices: [ + { + amount: 120, + created_at: "randomString", + currency_code: "usd", + deleted_at: null, + id: IdMap.getId("product-export-price_2"), + max_quantity: null, + min_quantity: null, + price_list: null, + price_list_id: null, + region_id: IdMap.getId("product-export-region_1"), + updated_at: "randomString", + variant_id: variantIds[2], + }, + ], + product_id: productIds[1], + sku: "test-sku", + title: "Test variant", + upc: "test-upc", + updated_at: "randomString", + weight: null, + width: null, + }, + ], + weight: null, + width: null, + }, +] diff --git a/packages/medusa/src/strategies/__tests__/batch-jobs/order/__snapshots__/order-export.ts.snap b/packages/medusa/src/strategies/__tests__/batch-jobs/order/__snapshots__/order-export.ts.snap new file mode 100644 index 0000000000..150d2d471e --- /dev/null +++ b/packages/medusa/src/strategies/__tests__/batch-jobs/order/__snapshots__/order-export.ts.snap @@ -0,0 +1,12 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Order export strategy should process the batch job and generate the appropriate output 1`] = ` +Array [ + "Order_ID;Display_ID;Order status;Date;Customer First name;Customer Last name;Customer Email;Customer ID;Shipping Address 1;Shipping Address 2;Shipping Country Code;Shipping City;Shipping Postal Code;Shipping Region ID;Fulfillment Status;Payment Status;Subtotal;Shipping Total;Discount Total;Gift Card Total;Refunded Total;Tax Total;Total;Currency Code +", + "order_1;123;pending;Tue, 01 Jan 2019 00:00:00 GMT;John;Doe;John@Doe.com;customer_1;123 Main St;;US;New York;10001;region_1;partially_fulfilled;captured;10;10;0;0;0;5;25;usd +", + "order_2;124;completed;Tue, 01 Jan 2019 00:00:00 GMT;Jane;Doe;Jane@Doe.com;customer_2;Hovedgaden 1;;DK;Copenhagen;1150;region_2;fulfilled;captured;125;10;0;0;0;0;135;eur +", +] +`; diff --git a/packages/medusa/src/strategies/__tests__/batch-jobs/order/order-export.ts b/packages/medusa/src/strategies/__tests__/batch-jobs/order/order-export.ts new file mode 100644 index 0000000000..b0f747aa1a --- /dev/null +++ b/packages/medusa/src/strategies/__tests__/batch-jobs/order/order-export.ts @@ -0,0 +1,138 @@ +import OrderExportStrategy from "../../../batch-jobs/order/export" +import { IdMap, MockManager } from "medusa-test-utils" +import { User } from "../../../../models" +import { BatchJobStatus } from "../../../../types/batch-job" +import { ordersToExport } from "../../../__fixtures__/order-export-data" + +const outputDataStorage: string[] = [] + +let fakeJob = { + id: IdMap.getId("order-export-job"), + type: "order-export", + context: { + params: {}, + list_config: { + select: [ + "id", + "display_id", + "status", + "created_at", + "fulfillment_status", + "payment_status", + "subtotal", + "shipping_total", + "discount_total", + "gift_card_total", + "refunded_total", + "tax_total", + "total", + "currency_code", + "region_id", + ], + relations: ["customer", "shipping_address"], + }, + }, + created_by: IdMap.getId("order-export-job-creator"), + created_by_user: {} as User, + result: {}, + dry_run: false, + status: BatchJobStatus.PROCESSING, +} + +const fileServiceMock = { + delete: jest.fn(), + withTransaction: function () { + return this + }, + getUploadStreamDescriptor: jest.fn().mockImplementation(() => { + return Promise.resolve({ + writeStream: { + write: (data: string) => { + outputDataStorage.push(data) + }, + end: () => void 0, + }, + promise: Promise.resolve(), + fileKey: "product-export.csv", + }) + }), +} +const batchJobServiceMock = { + withTransaction: function (): any { + return this + }, + update: jest.fn().mockImplementation(async (data) => { + fakeJob = { + ...fakeJob, + ...data, + } + return fakeJob + }), + complete: jest.fn().mockImplementation(async () => { + fakeJob.status = BatchJobStatus.COMPLETED + return fakeJob + }), + ready: jest.fn().mockImplementation(async () => { + fakeJob.status = BatchJobStatus.READY + return fakeJob + }), + retrieve: jest.fn().mockImplementation(async () => { + return fakeJob + }), +} +const orderServiceMock = { + withTransaction: function (): any { + return this + }, + listAndCount: jest + .fn() + .mockImplementation(() => + Promise.resolve([ordersToExport, ordersToExport.length]) + ), + list: jest.fn().mockImplementation(() => Promise.resolve(ordersToExport)), +} + +describe("Order export strategy", () => { + const orderExportStrategy = new OrderExportStrategy({ + batchJobService: batchJobServiceMock as any, + fileService: fileServiceMock as any, + orderService: orderServiceMock as any, + manager: MockManager, + }) + + it("Should generate header as template", async () => { + const template = await orderExportStrategy.buildTemplate() + expect(template.split(";")).toEqual([ + "Order_ID", + "Display_ID", + "Order status", + "Date", + "Customer First name", + "Customer Last name", + "Customer Email", + "Customer ID", + "Shipping Address 1", + "Shipping Address 2", + "Shipping Country Code", + "Shipping City", + "Shipping Postal Code", + "Shipping Region ID", + "Fulfillment Status", + "Payment Status", + "Subtotal", + "Shipping Total", + "Discount Total", + "Gift Card Total", + "Refunded Total", + "Tax Total", + "Total", + "Currency Code\r\n", + ]) + }) + + it("should process the batch job and generate the appropriate output", async () => { + await orderExportStrategy.processJob(fakeJob.id) + + expect(outputDataStorage).toMatchSnapshot() + }) +}) diff --git a/packages/medusa/src/strategies/__tests__/batch-jobs/product/__snapshots__/export.ts.snap b/packages/medusa/src/strategies/__tests__/batch-jobs/product/__snapshots__/export.ts.snap new file mode 100644 index 0000000000..537bc939a9 --- /dev/null +++ b/packages/medusa/src/strategies/__tests__/batch-jobs/product/__snapshots__/export.ts.snap @@ -0,0 +1,14 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`Product export strategy should process the batch job and generate the appropriate output 1`] = ` +Array [ + "Product ID;Product Handle;Product Title;Product Subtitle;Product Description;Product Status;Product Thumbnail;Product Weight;Product Length;Product Width;Product Height;Product HS Code;Product Origin Country;Product MID Code;Product Material;Product Collection Title;Product Collection Handle;Product Type;Product Tags;Product Discountable;Product External ID;Product Profile Name;Product Profile Type;Variant ID;Variant Title;Variant SKU;Variant Barcode;Variant Inventory Quantity;Variant Allow backorder;Variant Manage inventory;Variant Weight;Variant Length;Variant Width;Variant Height;Variant HS Code;Variant Origin Country;Variant MID Code;Variant Material;Price france [USD];Price USD;Price denmark [DKK];Price Denmark [DKK];Option 1 Name;Option 1 Value;Option 2 Name;Option 2 Value;Image 1 Url +", + "product-export-strategy-product-1;test-product-product-1;Test product;;test-product-description-1;draft;;;;;;;;;;Test collection 1;test-collection1;test-type-1;123_1;true;;profile_1;profile_type_1;product-export-strategy-variant-1;Test variant;test-sku;test-barcode;10;false;true;;;;;;;;;100;110;130;;test-option-1;option 1 value 1;test-option-2;option 2 value 1;test-image.png +", + "product-export-strategy-product-2;test-product-product-2;Test product;;test-product-description;draft;;;;;;;;;;Test collection;test-collection2;test-type;123;true;;profile_2;profile_type_2;product-export-strategy-variant-2;Test variant;test-sku;test-barcode;10;false;true;;;;;;;;;;;;110;test-option;Option 1 value 1;;;test-image.png +", + "product-export-strategy-product-2;test-product-product-2;Test product;;test-product-description;draft;;;;;;;;;;Test collection;test-collection2;test-type;123;true;;profile_2;profile_type_2;product-export-strategy-variant-3;Test variant;test-sku;test-barcode;10;false;true;;;;;;;;;;120;;;test-option;Option 1 Value 1;;;test-image.png +", +] +`; diff --git a/packages/medusa/src/strategies/__tests__/batch-jobs/product/export.ts b/packages/medusa/src/strategies/__tests__/batch-jobs/product/export.ts new file mode 100644 index 0000000000..9dce14bfa0 --- /dev/null +++ b/packages/medusa/src/strategies/__tests__/batch-jobs/product/export.ts @@ -0,0 +1,209 @@ +import ProductExportStrategy from "../../../batch-jobs/product/export" +import { IdMap, MockManager } from "medusa-test-utils" +import { User } from "../../../../models" +import { BatchJobStatus } from "../../../../types/batch-job" +import { productsToExport } from "../../../__fixtures__/product-export-data" +import { AdminPostBatchesReq } from "../../../../api/routes/admin/batch/create-batch-job" +import { defaultAdminProductRelations } from "../../../../api/routes/admin/products" +import { ProductExportBatchJob } from "../../../batch-jobs/product" +import { Request } from "express" + +const outputDataStorage: string[] = [] + +let fakeJob = { + id: IdMap.getId("product-export-job"), + type: 'product-export', + created_by: IdMap.getId("product-export-job-creator"), + created_by_user: {} as User, + context: {}, + result: {}, + dry_run: false, + status: BatchJobStatus.PROCESSING as BatchJobStatus +} as ProductExportBatchJob + +const fileServiceMock = { + delete: jest.fn(), + getUploadStreamDescriptor: jest.fn().mockImplementation(() => { + return Promise.resolve({ + writeStream: { + write: (data: string) => { + outputDataStorage.push(data) + }, + end: () => void 0 + }, + promise: Promise.resolve(), + url: 'product-export.csv' + }) + }), + withTransaction: function () { + return this + } +} +const batchJobServiceMock = { + withTransaction: function () { + return this + }, + update: jest.fn().mockImplementation((job, data) => { + fakeJob = { + ...fakeJob, + ...data, + context: { ...fakeJob?.context, ...data?.context }, + result: { ...fakeJob?.result, ...data?.result } + } + return Promise.resolve(fakeJob) + }), + updateStatus: jest.fn().mockImplementation((status) => { + fakeJob.status = status + return Promise.resolve(fakeJob) + }), + complete: jest.fn().mockImplementation(() => { + fakeJob.status = BatchJobStatus.COMPLETED + return Promise.resolve(fakeJob) + }), + retrieve: jest.fn().mockImplementation(() => { + return Promise.resolve(fakeJob) + }), + setFailed: jest.fn().mockImplementation((...args) => { + console.error(...args) + }) +} +const productServiceMock = { + withTransaction: function () { + return this + }, + list: jest.fn().mockImplementation(() => Promise.resolve(productsToExport)), + count: jest.fn().mockImplementation(() => Promise.resolve(productsToExport.length)), + listAndCount: jest.fn().mockImplementation(() => { + return Promise.resolve([productsToExport, productsToExport.length]) + }), +} +const managerMock = MockManager + +describe("Product export strategy", () => { + const productExportStrategy = new ProductExportStrategy({ + manager: managerMock, + fileService: fileServiceMock as any, + batchJobService: batchJobServiceMock as any, + productService: productServiceMock as any, + }) + + it('should generate the appropriate template', async () => { + await productExportStrategy.prepareBatchJobForProcessing(fakeJob, {} as Request) + await productExportStrategy.preProcessBatchJob(fakeJob.id) + const template = await productExportStrategy.buildHeader(fakeJob) + expect(template).toMatch(/.*Product ID.*/) + expect(template).toMatch(/.*Product Handle.*/) + expect(template).toMatch(/.*Product Title.*/) + expect(template).toMatch(/.*Product Subtitle.*/) + expect(template).toMatch(/.*Product Description.*/) + expect(template).toMatch(/.*Product Status.*/) + expect(template).toMatch(/.*Product Thumbnail.*/) + expect(template).toMatch(/.*Product Weight.*/) + expect(template).toMatch(/.*Product Length.*/) + expect(template).toMatch(/.*Product Width.*/) + expect(template).toMatch(/.*Product Height.*/) + expect(template).toMatch(/.*Product HS Code.*/) + expect(template).toMatch(/.*Product Origin Country.*/) + expect(template).toMatch(/.*Product MID Code.*/) + expect(template).toMatch(/.*Product Material.*/) + expect(template).toMatch(/.*Product Collection Title.*/) + expect(template).toMatch(/.*Product Collection Handle.*/) + expect(template).toMatch(/.*Product Type.*/) + expect(template).toMatch(/.*Product Tags.*/) + expect(template).toMatch(/.*Product Discountable.*/) + expect(template).toMatch(/.*Product External ID.*/) + expect(template).toMatch(/.*Product Profile Name.*/) + expect(template).toMatch(/.*Product Profile Type.*/) + expect(template).toMatch(/.*Product Profile Type.*/) + + expect(template).toMatch(/.*Variant ID.*/) + expect(template).toMatch(/.*Variant Title.*/) + expect(template).toMatch(/.*Variant SKU.*/) + expect(template).toMatch(/.*Variant Barcode.*/) + expect(template).toMatch(/.*Variant Allow backorder.*/) + expect(template).toMatch(/.*Variant Manage inventory.*/) + expect(template).toMatch(/.*Variant Weight.*/) + expect(template).toMatch(/.*Variant Length.*/) + expect(template).toMatch(/.*Variant Width.*/) + expect(template).toMatch(/.*Variant Height.*/) + expect(template).toMatch(/.*Variant HS Code.*/) + expect(template).toMatch(/.*Variant Origin Country.*/) + expect(template).toMatch(/.*Variant MID Code.*/) + expect(template).toMatch(/.*Variant Material.*/) + + expect(template).toMatch(/.*Option 1 Name.*/) + expect(template).toMatch(/.*Option 1 Value.*/) + expect(template).toMatch(/.*Option 2 Name.*/) + expect(template).toMatch(/.*Option 2 Value.*/) + + expect(template).toMatch(/.*Price USD.*/) + expect(template).toMatch(/.*Price france \[USD\].*/) + expect(template).toMatch(/.*Price denmark \[DKK\].*/) + expect(template).toMatch(/.*Price Denmark \[DKK\].*/) + + expect(template).toMatch(/.*Image 1 Url.*/) + }) + + it('should process the batch job and generate the appropriate output', async () => { + await productExportStrategy.prepareBatchJobForProcessing(fakeJob, {} as Request) + await productExportStrategy.preProcessBatchJob(fakeJob.id) + await productExportStrategy.processJob(fakeJob.id) + expect(outputDataStorage).toMatchSnapshot() + }) + + it('should prepare the job to be pre proccessed', async () => { + const fakeJob1: AdminPostBatchesReq = { + type: 'product-export', + context: { + limit: 10, + offset: 10, + expand: "variants", + fields: "title", + order: "-title", + filterable_fields: { title: "test" } + }, + dry_run: false + } + + const output1 = await productExportStrategy.prepareBatchJobForProcessing( + fakeJob1, + {} as Express.Request + ) + + expect(output1.context).toEqual(expect.objectContaining({ + list_config: { + select: ["title", "created_at", "id"], + order: { title: "DESC" }, + relations: ["variants"], + skip: 10, + take: 10, + }, + filterable_fields: { title: "test" } + })) + + const fakeJob2: AdminPostBatchesReq = { + type: 'product-export', + context: {}, + dry_run: false + } + + const output2 = await productExportStrategy.prepareBatchJobForProcessing( + fakeJob2, + {} as Express.Request + ) + + expect(output2.context).toEqual(expect.objectContaining({ + list_config: { + select: undefined, + order: { created_at: "DESC" }, + relations: [ + ...defaultAdminProductRelations, + "variants.prices.region" + ], + skip: 0, + take: 50, + }, + filterable_fields: undefined + })) + }) +}) diff --git a/packages/medusa/src/strategies/batch-jobs/order/export.ts b/packages/medusa/src/strategies/batch-jobs/order/export.ts new file mode 100644 index 0000000000..a14a7d8a1c --- /dev/null +++ b/packages/medusa/src/strategies/batch-jobs/order/export.ts @@ -0,0 +1,295 @@ +import { EntityManager } from "typeorm" +import { + OrderDescriptor, + OrderExportBatchJob, + OrderExportBatchJobContext, + orderExportPropertiesDescriptors, +} from "." +import { AdminPostBatchesReq } from "../../../api/routes/admin/batch/create-batch-job" +import { IFileService } from "../../../interfaces" +import { AbstractBatchJobStrategy } from "../../../interfaces/batch-job-strategy" +import { Order } from "../../../models" +import { OrderService } from "../../../services" +import BatchJobService from "../../../services/batch-job" +import { BatchJobStatus } from "../../../types/batch-job" +import { prepareListQuery } from "../../../utils/get-query-config" + +type InjectedDependencies = { + fileService: IFileService + orderService: OrderService + batchJobService: BatchJobService + manager: EntityManager +} + +class OrderExportStrategy extends AbstractBatchJobStrategy { + public static identifier = "order-export-strategy" + public static batchType = "order-export" + + public defaultMaxRetry = 3 + + protected readonly DEFAULT_LIMIT = 100 + protected readonly NEWLINE = "\r\n" + protected readonly DELIMITER = ";" + + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + protected readonly fileService_: IFileService + protected readonly batchJobService_: BatchJobService + protected readonly orderService_: OrderService + + protected readonly defaultRelations_ = ["customer", "shipping_address"] + protected readonly defaultFields_ = [ + "id", + "display_id", + "status", + "created_at", + "fulfillment_status", + "payment_status", + "subtotal", + "shipping_total", + "discount_total", + "gift_card_total", + "refunded_total", + "tax_total", + "total", + "currency_code", + "region_id", + ] + + constructor({ + fileService, + batchJobService, + orderService, + manager, + }: InjectedDependencies) { + // eslint-disable-next-line prefer-rest-params + super(arguments[0]) + + this.manager_ = manager + this.fileService_ = fileService + this.batchJobService_ = batchJobService + this.orderService_ = orderService + } + + async prepareBatchJobForProcessing( + batchJob: AdminPostBatchesReq, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + req: Express.Request + ): Promise { + const { + limit, + offset, + order, + fields, + expand, + filterable_fields, + ...context + } = batchJob.context as OrderExportBatchJobContext + + const listConfig = prepareListQuery( + { + limit, + offset, + order, + fields, + expand, + }, + { + isList: true, + defaultRelations: this.defaultRelations_, + defaultFields: this.defaultFields_, + } + ) + + batchJob.context = { + ...(context ?? {}), + list_config: listConfig, + filterable_fields, + } + + return batchJob + } + + async preProcessBatchJob(batchJobId: string): Promise { + return await this.atomicPhase_(async (transactionManager) => { + const batchJob = (await this.batchJobService_ + .withTransaction(transactionManager) + .retrieve(batchJobId)) as OrderExportBatchJob + + const offset = batchJob.context?.list_config?.skip ?? 0 + const limit = batchJob.context?.list_config?.take ?? this.DEFAULT_LIMIT + + const { list_config = {}, filterable_fields = {} } = batchJob.context + + let count = batchJob.context?.batch_size + + if (!count) { + const [, orderCount] = await this.orderService_ + .withTransaction(transactionManager) + .listAndCount(filterable_fields, { + ...(list_config ?? {}), + skip: offset as number, + order: { created_at: "DESC" }, + take: Math.min(batchJob.context.batch_size ?? Infinity, limit), + }) + count = orderCount + } + + await this.batchJobService_ + .withTransaction(transactionManager) + .update(batchJob, { + result: { + stat_descriptors: [ + { + key: "order-export-count", + name: "Order count to export", + message: `There will be ${count} orders exported by this action`, + }, + ], + }, + }) + }) + } + + async processJob(batchJobId: string): Promise { + let offset = 0 + let limit = this.DEFAULT_LIMIT + let advancementCount = 0 + let orderCount = 0 + let approximateFileSize = 0 + + return await this.atomicPhase_( + async (transactionManager) => { + let batchJob = (await this.batchJobService_ + .withTransaction(transactionManager) + .retrieve(batchJobId)) as OrderExportBatchJob + + const { writeStream, fileKey, promise } = await this.fileService_ + .withTransaction(transactionManager) + .getUploadStreamDescriptor({ + name: `exports/orders/order-export-${Date.now()}`, + ext: "csv", + }) + + advancementCount = + batchJob.result?.advancement_count ?? advancementCount + offset = (batchJob.context?.list_config?.skip ?? 0) + advancementCount + limit = batchJob.context?.list_config?.take ?? limit + + const { list_config = {}, filterable_fields = {} } = batchJob.context + const [, count] = await this.orderService_.listAndCount( + filterable_fields, + { + ...list_config, + order: { created_at: "DESC" }, + skip: offset, + take: Math.min(batchJob.context.batch_size ?? Infinity, limit), + } + ) + + const lineDescriptor = this.getLineDescriptor( + list_config.select as string[], + list_config.relations as string[] + ) + + const header = this.buildHeader(lineDescriptor) + approximateFileSize += Buffer.from(header).byteLength + writeStream.write(header) + + orderCount = batchJob.context?.batch_size ?? count + let orders = [] + + while (offset < orderCount) { + orders = await this.orderService_ + .withTransaction(transactionManager) + .list(filterable_fields, { + ...list_config, + skip: offset, + take: Math.min(orderCount - offset, limit), + }) + + orders.forEach((order) => { + const line = this.buildCSVLine(order, lineDescriptor) + approximateFileSize += Buffer.from(line).byteLength + writeStream.write(line) + }) + + advancementCount += orders.length + offset += orders.length + + batchJob = (await this.batchJobService_ + .withTransaction(transactionManager) + .update(batchJobId, { + result: { + file_key: fileKey, + file_size: approximateFileSize, + count: orderCount, + advancement_count: advancementCount, + progress: advancementCount / orderCount, + }, + })) as OrderExportBatchJob + + if (batchJob.status === BatchJobStatus.CANCELED) { + writeStream.end() + + await this.fileService_ + .withTransaction(transactionManager) + .delete({ key: fileKey }) + + return + } + } + + writeStream.end() + + await promise + }, + "REPEATABLE READ", + async (err: Error) => { + this.handleProcessingError(batchJobId, err, { + offset, + count: orderCount, + progress: offset / orderCount, + }) + } + ) + } + + public async buildTemplate(): Promise { + return this.buildHeader( + this.getLineDescriptor(this.defaultFields_, this.defaultRelations_) + ) + } + + private buildHeader( + lineDescriptor: OrderDescriptor[] = orderExportPropertiesDescriptors + ): string { + return ( + [...lineDescriptor.map(({ title }) => title)].join(this.DELIMITER) + + this.NEWLINE + ) + } + + private buildCSVLine( + order: Order, + lineDescriptor: OrderDescriptor[] + ): string { + return ( + [...lineDescriptor.map(({ accessor }) => accessor(order))].join( + this.DELIMITER + ) + this.NEWLINE + ) + } + + private getLineDescriptor( + fields: string[], + relations: string[] + ): OrderDescriptor[] { + return orderExportPropertiesDescriptors.filter( + ({ fieldName }) => + fields.indexOf(fieldName) !== -1 || relations.indexOf(fieldName) !== -1 + ) + } +} + +export default OrderExportStrategy diff --git a/packages/medusa/src/strategies/batch-jobs/order/index.ts b/packages/medusa/src/strategies/batch-jobs/order/index.ts new file mode 100644 index 0000000000..f0cba7d1ff --- /dev/null +++ b/packages/medusa/src/strategies/batch-jobs/order/index.ts @@ -0,0 +1,153 @@ +import { BatchJob, Order } from "../../../models" +import { Selector } from "../../../types/common" + +export type OrderExportBatchJobContext = { + offset?: number + limit?: number + order?: string + fields?: string + expand?: string + + list_config?: { + select?: string[] + relations?: string[] + skip?: number + take?: number + order?: Record + } + filterable_fields?: Selector + + retry_count?: number + max_retry?: number + batch_size?: number +} + +export type OrderExportBatchJob = BatchJob & { + context: OrderExportBatchJobContext +} + +export type OrderDescriptor = { + fieldName: string + title: string + accessor: (entity: Order) => string +} + +export const orderExportPropertiesDescriptors: OrderDescriptor[] = [ + { + fieldName: "id", + title: "Order_ID", + accessor: (order: Order): string => order.id, + }, + { + fieldName: "display_id", + title: "Display_ID", + accessor: (order: Order): string => order.display_id.toString(), + }, + { + fieldName: "status", + title: "Order status", + accessor: (order: Order): string => order.status.toString(), + }, + + { + fieldName: "created_at", + title: "Date", + accessor: (order: Order): string => order.created_at.toUTCString(), + }, + + { + fieldName: "customer", + title: [ + "Customer First name", + "Customer Last name", + "Customer Email", + "Customer ID", + ].join(";"), + accessor: (order: Order): string => + [ + order.customer.first_name, + order.customer.last_name, + order.customer.email, + order.customer.id, + ].join(";"), + }, + + { + fieldName: "shipping_address", + title: [ + "Shipping Address 1", + "Shipping Address 2", + "Shipping Country Code", + "Shipping City", + "Shipping Postal Code", + "Shipping Region ID", + ].join(";"), + accessor: (order: Order): string => + [ + order.shipping_address?.address_1, + order.shipping_address?.address_2, + order.shipping_address?.country_code, + order.shipping_address?.city, + order.shipping_address?.postal_code, + order.region_id, + ].join(";"), + }, + + { + fieldName: "fulfillment_status", + title: "Fulfillment Status", + accessor: (order: Order): string => order.fulfillment_status, + }, + + { + fieldName: "payment_status", + title: "Payment Status", + accessor: (order: Order): string => order.payment_status, + }, + + { + fieldName: "subtotal", + title: "Subtotal", + accessor: (order: Order): string => order.subtotal.toString(), + }, + + { + fieldName: "shipping_total", + title: "Shipping Total", + accessor: (order: Order): string => order.shipping_total.toString(), + }, + + { + fieldName: "discount_total", + title: "Discount Total", + accessor: (order: Order): string => order.discount_total.toString(), + }, + + { + fieldName: "gift_card_total", + title: "Gift Card Total", + accessor: (order: Order): string => order.gift_card_total.toString(), + }, + + { + fieldName: "refunded_total", + title: "Refunded Total", + accessor: (order: Order): string => order.refunded_total.toString(), + }, + { + fieldName: "tax_total", + title: "Tax Total", + accessor: (order: Order): string => order.tax_total.toString(), + }, + { + fieldName: "total", + title: "Total", + accessor: (order: Order): string => order.total.toString(), + }, + + { + fieldName: "currency_code", + title: "Currency Code", + accessor: (order: Order): string => order.currency_code, + }, +] diff --git a/packages/medusa/src/strategies/batch-jobs/product/export.ts b/packages/medusa/src/strategies/batch-jobs/product/export.ts new file mode 100644 index 0000000000..8fe854f8d0 --- /dev/null +++ b/packages/medusa/src/strategies/batch-jobs/product/export.ts @@ -0,0 +1,430 @@ +import { EntityManager } from "typeorm" +import { AbstractBatchJobStrategy, IFileService } from "../../../interfaces" +import { Product, ProductVariant } from "../../../models" +import { BatchJobService, ProductService } from "../../../services" +import { BatchJobStatus, CreateBatchJobInput } from "../../../types/batch-job" +import { defaultAdminProductRelations } from "../../../api/routes/admin/products" +import { prepareListQuery } from "../../../utils/get-query-config" +import { + ProductExportBatchJob, + ProductExportBatchJobContext, + ProductExportColumnSchemaDescriptor, + ProductExportPriceData, + productExportSchemaDescriptors, +} from "./index" +import { FindProductConfig } from "../../../types/product" + +type InjectedDependencies = { + manager: EntityManager + batchJobService: BatchJobService + productService: ProductService + fileService: IFileService +} + +export default class ProductExportStrategy extends AbstractBatchJobStrategy< + ProductExportStrategy, + InjectedDependencies +> { + public static identifier = "product-export-strategy" + public static batchType = "product-export" + + protected manager_: EntityManager + protected transactionManager_: EntityManager | undefined + + protected readonly batchJobService_: BatchJobService + protected readonly productService_: ProductService + protected readonly fileService_: IFileService + + protected readonly defaultRelations_ = [ + ...defaultAdminProductRelations, + "variants.prices.region", + ] + /* + * + * The dynamic columns corresponding to the lowest level of relations are built later on. + * You can have a look at the buildHeader method that take care of appending the other + * column descriptors to this map. + * + */ + protected readonly columnDescriptors: Map< + string, + ProductExportColumnSchemaDescriptor + > = productExportSchemaDescriptors + + private readonly NEWLINE_ = "\r\n" + private readonly DELIMITER_ = ";" + private readonly DEFAULT_LIMIT = 50 + + constructor({ + manager, + batchJobService, + productService, + fileService, + }: InjectedDependencies) { + super({ + manager, + batchJobService, + productService, + fileService, + }) + + this.manager_ = manager + this.batchJobService_ = batchJobService + this.productService_ = productService + this.fileService_ = fileService + } + + async buildTemplate(): Promise { + return "" + } + + async prepareBatchJobForProcessing( + batchJob: CreateBatchJobInput, + // eslint-disable-next-line @typescript-eslint/no-unused-vars + req: Express.Request + ): Promise { + const { + limit, + offset, + order, + fields, + expand, + filterable_fields, + ...context + } = (batchJob?.context ?? {}) as ProductExportBatchJobContext + + const listConfig = prepareListQuery( + { + limit, + offset, + order, + fields, + expand, + }, + { + isList: true, + defaultRelations: this.defaultRelations_, + } + ) + + batchJob.context = { + ...(context ?? {}), + list_config: listConfig, + filterable_fields, + } + + return batchJob + } + + async preProcessBatchJob(batchJobId: string): Promise { + return await this.atomicPhase_(async (transactionManager) => { + const batchJob = (await this.batchJobService_ + .withTransaction(transactionManager) + .retrieve(batchJobId)) as ProductExportBatchJob + + let offset = batchJob.context?.list_config?.skip ?? 0 + const limit = batchJob.context?.list_config?.take ?? this.DEFAULT_LIMIT + + const { list_config = {}, filterable_fields = {} } = batchJob.context + const [productList, count] = await this.productService_ + .withTransaction(transactionManager) + .listAndCount(filterable_fields, { + ...(list_config ?? {}), + take: Math.min(batchJob.context.batch_size ?? Infinity, limit), + } as FindProductConfig) + + const productCount = batchJob.context?.batch_size ?? count + let products: Product[] = productList + + let dynamicOptionColumnCount = 0 + let dynamicImageColumnCount = 0 + + const pricesData = new Set() + + while (offset < productCount) { + if (!products?.length) { + products = await this.productService_ + .withTransaction(transactionManager) + .list(filterable_fields, { + ...list_config, + skip: offset, + take: Math.min(productCount - offset, limit), + } as FindProductConfig) + } + + // Retrieve the highest count of each object to build the dynamic columns later + for (const product of products) { + const optionsCount = product?.options?.length ?? 0 + dynamicOptionColumnCount = Math.max( + dynamicOptionColumnCount, + optionsCount + ) + + const imageCount = product?.images?.length ?? 0 + dynamicImageColumnCount = Math.max( + dynamicImageColumnCount, + imageCount + ) + + for (const variant of product?.variants ?? []) { + if (variant.prices?.length) { + variant.prices.forEach((price) => { + pricesData.add( + JSON.stringify({ + currency_code: price.currency_code, + region: price.region + ? { + currency_code: price.region.currency_code, + name: price.region.name, + id: price.region.id, + } + : null, + }) + ) + }) + } + } + } + + offset += products.length + products = [] + } + + await this.batchJobService_ + .withTransaction(transactionManager) + .update(batchJob, { + context: { + shape: { + dynamicImageColumnCount, + dynamicOptionColumnCount, + prices: [...pricesData].map((stringifyData) => + JSON.parse(stringifyData) + ), + }, + }, + result: { + stat_descriptors: [ + { + key: "product-export-count", + name: "Product count to export", + message: `There will be ${productCount} products exported by this action`, + }, + ], + }, + }) + }) + } + + async processJob(batchJobId: string): Promise { + let offset = 0 + let limit = this.DEFAULT_LIMIT + let advancementCount = 0 + let productCount = 0 + let approximateFileSize = 0 + + return await this.atomicPhase_( + async (transactionManager) => { + let batchJob = (await this.batchJobService_ + .withTransaction(transactionManager) + .retrieve(batchJobId)) as ProductExportBatchJob + + const { writeStream, fileKey, promise } = await this.fileService_ + .withTransaction(transactionManager) + .getUploadStreamDescriptor({ + name: `exports/products/product-export-${Date.now()}`, + ext: "csv", + }) + + const header = await this.buildHeader(batchJob) + + approximateFileSize += Buffer.from(header).byteLength + writeStream.write(header) + + advancementCount = + batchJob.result?.advancement_count ?? advancementCount + offset = (batchJob.context?.list_config?.skip ?? 0) + advancementCount + limit = batchJob.context?.list_config?.take ?? limit + + const { list_config = {}, filterable_fields = {} } = batchJob.context + const [productList, count] = await this.productService_ + .withTransaction(transactionManager) + .listAndCount(filterable_fields, { + ...list_config, + skip: offset, + take: Math.min(batchJob.context.batch_size ?? Infinity, limit), + } as FindProductConfig) + + productCount = batchJob.context?.batch_size ?? count + let products: Product[] = productList + + while (offset < productCount) { + if (!products?.length) { + products = await this.productService_ + .withTransaction(transactionManager) + .list(filterable_fields, { + ...list_config, + skip: offset, + take: Math.min(productCount - offset, limit), + } as FindProductConfig) + } + + products.forEach((product: Product) => { + const lines = this.buildProductVariantLines(product) + lines.forEach((line) => { + approximateFileSize += Buffer.from(line).byteLength + writeStream.write(line) + }) + }) + + advancementCount += products.length + offset += products.length + products = [] + + batchJob = (await this.batchJobService_ + .withTransaction(transactionManager) + .update(batchJobId, { + result: { + file_key: fileKey, + file_size: approximateFileSize, + count: productCount, + advancement_count: advancementCount, + progress: advancementCount / productCount, + }, + })) as ProductExportBatchJob + + if (batchJob.status === BatchJobStatus.CANCELED) { + writeStream.end() + + await this.fileService_ + .withTransaction(transactionManager) + .delete({ fileKey }) + return + } + } + + writeStream.end() + + return await promise + }, + "REPEATABLE READ", + async (err) => + this.handleProcessingError(batchJobId, err, { + count: productCount, + advancement_count: advancementCount, + progress: advancementCount / productCount, + }) + ) + } + + public async buildHeader(batchJob: ProductExportBatchJob): Promise { + const { + prices = [], + dynamicImageColumnCount, + dynamicOptionColumnCount, + } = batchJob?.context?.shape ?? {} + + this.appendMoneyAmountDescriptors(prices) + this.appendOptionsDescriptors(dynamicOptionColumnCount) + this.appendImagesDescriptors(dynamicImageColumnCount) + + return ( + [...this.columnDescriptors.keys()].join(this.DELIMITER_) + this.NEWLINE_ + ) + } + + private appendImagesDescriptors(maxImagesCount: number): void { + for (let i = 0; i < maxImagesCount; ++i) { + this.columnDescriptors.set(`Image ${i + 1} Url`, { + accessor: (product: Product) => product?.images[i]?.url ?? "", + entityName: "product", + }) + } + } + + private appendOptionsDescriptors(maxOptionsCount: number): void { + for (let i = 0; i < maxOptionsCount; ++i) { + this.columnDescriptors + .set(`Option ${i + 1} Name`, { + accessor: (productOption: Product) => + productOption?.options[i]?.title ?? "", + entityName: "product", + }) + .set(`Option ${i + 1} Value`, { + accessor: (variant: ProductVariant) => + variant?.options[i]?.value ?? "", + entityName: "variant", + }) + } + } + + private appendMoneyAmountDescriptors( + pricesData: ProductExportPriceData[] + ): void { + for (const priceData of pricesData) { + if (priceData.currency_code) { + this.columnDescriptors.set( + `Price ${priceData.currency_code?.toUpperCase()}`, + { + accessor: (variant: ProductVariant) => { + const price = variant.prices.find((variantPrice) => { + return ( + variantPrice.currency_code && + priceData.currency_code && + variantPrice.currency_code.toLowerCase() === + priceData.currency_code.toLowerCase() + ) + }) + return price?.amount?.toString() ?? "" + }, + entityName: "variant", + } + ) + } + + if (priceData.region) { + this.columnDescriptors.set( + `Price ${priceData.region.name} ${ + priceData.region?.currency_code + ? "[" + priceData.region?.currency_code.toUpperCase() + "]" + : "" + }`, + { + accessor: (variant: ProductVariant) => { + const price = variant.prices.find((variantPrice) => { + return ( + variantPrice.region && + priceData.region && + variantPrice.region?.name?.toLowerCase() === + priceData.region?.name?.toLowerCase() && + variantPrice.region?.id?.toLowerCase() === + priceData.region?.id?.toLowerCase() + ) + }) + return price?.amount?.toString() ?? "" + }, + entityName: "variant", + } + ) + } + } + } + + private buildProductVariantLines(product: Product): string[] { + const outputLineData: string[] = [] + + for (const variant of product.variants) { + const variantLineData: string[] = [] + for (const [, columnSchema] of this.columnDescriptors.entries()) { + if (columnSchema.entityName === "product") { + variantLineData.push(columnSchema.accessor(product)) + } + if (columnSchema.entityName === "variant") { + variantLineData.push(columnSchema.accessor(variant)) + } + } + outputLineData.push(variantLineData.join(this.DELIMITER_) + this.NEWLINE_) + } + + return outputLineData + } +} diff --git a/packages/medusa/src/strategies/batch-jobs/product/index.ts b/packages/medusa/src/strategies/batch-jobs/product/index.ts new file mode 100644 index 0000000000..47743c0485 --- /dev/null +++ b/packages/medusa/src/strategies/batch-jobs/product/index.ts @@ -0,0 +1,336 @@ +import { BatchJob, Product, ProductVariant } from "../../../models" +import { Selector } from "../../../types/common" + +export type ProductExportBatchJobContext = { + retry_count?: number + max_retry?: number + offset?: number + limit?: number + batch_size?: number + order?: string + fields?: string + expand?: string + shape: { + prices: ProductExportPriceData[] + dynamicOptionColumnCount: number + dynamicImageColumnCount: number + } + list_config?: { + select?: string[] + relations?: string[] + skip?: number + take?: number + order?: Record + } + filterable_fields?: Selector +} + +export type ProductExportPriceData = { + currency_code?: string + region?: { name: string; currency_code: string; id: string } +} + +export type ProductExportBatchJob = BatchJob & { + context: ProductExportBatchJobContext +} + +export type ProductExportColumnSchemaEntity = "product" | "variant" + +export type ProductExportColumnSchemaDescriptor = + | { + accessor: (product: Product) => string + entityName: Extract + } + | { + accessor: (variant: ProductVariant) => string + entityName: Extract + } + +export const productExportSchemaDescriptors = new Map< + string, + ProductExportColumnSchemaDescriptor +>([ + [ + "Product ID", + { + accessor: (product: Product): string => product?.id ?? "", + entityName: "product", + }, + ], + [ + "Product Handle", + { + accessor: (product: Product): string => product?.handle ?? "", + entityName: "product", + }, + ], + [ + "Product Title", + { + accessor: (product: Product): string => product?.title ?? "", + entityName: "product", + }, + ], + [ + "Product Subtitle", + { + accessor: (product: Product): string => product?.subtitle ?? "", + entityName: "product", + }, + ], + [ + "Product Description", + { + accessor: (product: Product): string => product?.description ?? "", + entityName: "product", + }, + ], + [ + "Product Status", + { + accessor: (product: Product): string => product?.status ?? "", + entityName: "product", + }, + ], + [ + "Product Thumbnail", + { + accessor: (product: Product): string => product?.thumbnail ?? "", + entityName: "product", + }, + ], + [ + "Product Weight", + { + accessor: (product: Product): string => product?.weight?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product Length", + { + accessor: (product: Product): string => product?.length?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product Width", + { + accessor: (product: Product): string => product?.width?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product Height", + { + accessor: (product: Product): string => product?.height?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product HS Code", + { + accessor: (product: Product): string => + product?.hs_code?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product Origin Country", + { + accessor: (product: Product): string => + product?.origin_country?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product MID Code", + { + accessor: (product: Product): string => + product?.mid_code?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product Material", + { + accessor: (product: Product): string => + product?.material?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product Collection Title", + { + accessor: (product: Product): string => product?.collection?.title ?? "", + entityName: "product", + }, + ], + [ + "Product Collection Handle", + { + accessor: (product: Product): string => product?.collection?.handle ?? "", + entityName: "product", + }, + ], + [ + "Product Type", + { + accessor: (product: Product): string => product?.type?.value ?? "", + entityName: "product", + }, + ], + [ + "Product Tags", + { + accessor: (product: Product): string => + (product.tags.map((t) => t.value) ?? []).join(","), + entityName: "product", + }, + ], + [ + "Product Discountable", + { + accessor: (product: Product): string => + product?.discountable?.toString() ?? "", + entityName: "product", + }, + ], + [ + "Product External ID", + { + accessor: (product: Product): string => product?.external_id ?? "", + entityName: "product", + }, + ], + [ + "Product Profile Name", + { + accessor: (product: Product): string => product?.profile?.name ?? "", + entityName: "product", + }, + ], + [ + "Product Profile Type", + { + accessor: (product: Product): string => product?.profile?.type ?? "", + entityName: "product", + }, + ], + [ + "Variant ID", + { + accessor: (variant: ProductVariant): string => variant?.id ?? "", + entityName: "variant", + }, + ], + [ + "Variant Title", + { + accessor: (variant: ProductVariant): string => variant?.title ?? "", + entityName: "variant", + }, + ], + [ + "Variant SKU", + { + accessor: (variant: ProductVariant): string => variant?.sku ?? "", + entityName: "variant", + }, + ], + [ + "Variant Barcode", + { + accessor: (variant: ProductVariant): string => variant?.barcode ?? "", + entityName: "variant", + }, + ], + [ + "Variant Inventory Quantity", + { + accessor: (variant: ProductVariant): string => + variant?.inventory_quantity?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant Allow backorder", + { + accessor: (variant: ProductVariant): string => + variant?.allow_backorder?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant Manage inventory", + { + accessor: (variant: ProductVariant): string => + variant?.manage_inventory?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant Weight", + { + accessor: (variant: ProductVariant): string => + variant?.weight?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant Length", + { + accessor: (variant: ProductVariant): string => + variant?.length?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant Width", + { + accessor: (variant: ProductVariant): string => + variant?.width?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant Height", + { + accessor: (variant: ProductVariant): string => + variant?.height?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant HS Code", + { + accessor: (variant: ProductVariant): string => + variant?.hs_code?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant Origin Country", + { + accessor: (variant: ProductVariant): string => + variant?.origin_country?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant MID Code", + { + accessor: (variant: ProductVariant): string => + variant?.mid_code?.toString() ?? "", + entityName: "variant", + }, + ], + [ + "Variant Material", + { + accessor: (variant: ProductVariant): string => + variant?.material?.toString() ?? "", + entityName: "variant", + }, + ], +]) diff --git a/packages/medusa/src/subscribers/batch-job.ts b/packages/medusa/src/subscribers/batch-job.ts new file mode 100644 index 0000000000..9cd7a22114 --- /dev/null +++ b/packages/medusa/src/subscribers/batch-job.ts @@ -0,0 +1,57 @@ +import BatchJobService from "../services/batch-job" +import EventBusService from "../services/event-bus" +import { StrategyResolverService } from "../services" + +type InjectedDependencies = { + eventBusService: EventBusService + batchJobService: BatchJobService + strategyResolverService: StrategyResolverService +} + +class BatchJobSubscriber { + private readonly eventBusService_: EventBusService + private readonly batchJobService_: BatchJobService + private readonly strategyResolver_: StrategyResolverService + + constructor({ + eventBusService, + batchJobService, + strategyResolverService, + }: InjectedDependencies) { + this.eventBusService_ = eventBusService + this.batchJobService_ = batchJobService + this.strategyResolver_ = strategyResolverService + + this.eventBusService_ + .subscribe(BatchJobService.Events.CREATED, this.preProcessBatchJob) + .subscribe(BatchJobService.Events.CONFIRMED, this.processBatchJob) + } + + preProcessBatchJob = async (data): Promise => { + const batchJob = await this.batchJobService_.retrieve(data.id) + + const batchJobStrategy = this.strategyResolver_.resolveBatchJobByType( + batchJob.type + ) + + await batchJobStrategy.preProcessBatchJob(batchJob.id) + + await this.batchJobService_.setPreProcessingDone(batchJob.id) + } + + processBatchJob = async (data): Promise => { + const batchJob = await this.batchJobService_.retrieve(data.id) + + const batchJobStrategy = this.strategyResolver_.resolveBatchJobByType( + batchJob.type + ) + + await this.batchJobService_.setProcessing(batchJob.id) + + await batchJobStrategy.processJob(batchJob.id) + + await this.batchJobService_.complete(batchJob.id) + } +} + +export default BatchJobSubscriber diff --git a/packages/medusa/src/types/batch-job.ts b/packages/medusa/src/types/batch-job.ts index 4b64738ffa..66584a046e 100644 --- a/packages/medusa/src/types/batch-job.ts +++ b/packages/medusa/src/types/batch-job.ts @@ -8,12 +8,36 @@ import { } from "class-validator" import { IsType } from "../utils/validators/is-type" import { DateComparisonOperator } from "./common" +import { BatchJob } from "../models" export enum BatchJobStatus { CREATED = "created", + PRE_PROCESSED = "pre_processed", + CONFIRMED = "confirmed", PROCESSING = "processing", - AWAITING_CONFIRMATION = "awaiting_confirmation", COMPLETED = "completed", + CANCELED = "canceled", + FAILED = "failed", +} + +export type BatchJobUpdateProps = Partial> + +export type CreateBatchJobInput = { + type: string + context: BatchJob["context"] + dry_run: boolean +} + +export type BatchJobResultError = { + message: string + code: string | number + [key: string]: unknown +} + +export type BatchJobResultStatDescriptor = { + key: string + name: string + message: string } export class FilterableBatchJobProps { @@ -44,3 +68,8 @@ export class FilterableBatchJobProps { @Type(() => DateComparisonOperator) updated_at?: DateComparisonOperator } + +export type BatchJobCreateProps = Pick< + BatchJob, + "context" | "type" | "created_by" | "dry_run" +> diff --git a/packages/medusa/src/types/claim.ts b/packages/medusa/src/types/claim.ts new file mode 100644 index 0000000000..adab858cc9 --- /dev/null +++ b/packages/medusa/src/types/claim.ts @@ -0,0 +1,81 @@ +import { ClaimReason, ClaimType, Order } from "../models" +import { AddressPayload } from "./common" + +export type ClaimTypeValue = `${ClaimType}` + +/* CREATE INPUT */ + +export type CreateClaimInput = { + type: ClaimTypeValue + claim_items: CreateClaimItemInput[] + return_shipping?: CreateClaimReturnShippingInput + additional_items?: CreateClaimItemAdditionalItemInput[] + shipping_methods?: CreateClaimShippingMethodInput[] + refund_amount?: number + shipping_address?: AddressPayload + no_notification?: boolean + metadata?: object + order: Order + claim_order_id?: string + shipping_address_id?: string +} + +type CreateClaimReturnShippingInput = { + option_id?: string + price?: number +} + +type CreateClaimShippingMethodInput = { + id?: string + option_id?: string + price?: number +} + +export type CreateClaimItemInput = { + item_id: string + quantity: number + claim_order_id?: string + reason: ClaimReason + note?: string + tags?: string[] + images?: string[] +} + +type CreateClaimItemAdditionalItemInput = { + variant_id: string + quantity: number +} + +/* UPDATE INPUT */ + +export type UpdateClaimInput = { + claim_items?: UpdateClaimItemInput[] + shipping_methods?: UpdateClaimShippingMethodInput[] + no_notification?: boolean + metadata?: Record +} + +type UpdateClaimShippingMethodInput = { + id?: string + option_id?: string + price?: number +} + +type UpdateClaimItemInput = { + id: string + note?: string + reason?: string + images: UpdateClaimItemImageInput[] + tags: UpdateClaimItemTagInput[] + metadata?: object +} + +type UpdateClaimItemImageInput = { + id?: string + url?: string +} + +type UpdateClaimItemTagInput = { + id?: string + value?: string +} diff --git a/packages/medusa/src/types/common.ts b/packages/medusa/src/types/common.ts index c75d4de338..040105e02e 100644 --- a/packages/medusa/src/types/common.ts +++ b/packages/medusa/src/types/common.ts @@ -7,8 +7,23 @@ import { IsString, } from "class-validator" import "reflect-metadata" -import { FindManyOptions, FindOperator, OrderByCondition } from "typeorm" +import { + FindManyOptions, + FindOneOptions, + FindOperator, + OrderByCondition, +} from "typeorm" import { transformDate } from "../utils/validators/date-transform" +import { BaseEntity } from "../interfaces/models/base-entity" + +/** + * Utility type used to remove some optional attributes (coming from K) from a type T + */ +export type WithRequiredProperty = T & + { + // -? removes 'optional' from a property + [Property in K]-?: T[Property] + } export type PartialPick = { [P in K]?: T[P] @@ -21,11 +36,17 @@ export type Writable = { | FindOperator } -export type ExtendedFindConfig = FindConfig & { - where: Partial> - withDeleted?: boolean - relations?: string[] -} +export type ExtendedFindConfig< + TEntity, + TWhereKeys = TEntity +> = FindConfig & + (FindOneOptions | FindManyOptions) & { + where: Partial> + withDeleted?: boolean + relations?: string[] + } + +export type QuerySelector = Selector & { q?: string } export type Selector = { [key in keyof TEntity]?: @@ -66,6 +87,22 @@ export interface CustomFindOptions { take?: number } +export type QueryConfig = { + defaultFields?: (keyof TEntity | string)[] + defaultRelations?: string[] + allowedFields?: string[] + defaultLimit?: number + isList?: boolean +} + +export type RequestQueryFields = { + expand?: string + fields?: string + offset?: number + limit?: number + order?: string +} + export type PaginatedResponse = { limit: number; offset: number; count: number } export type DeleteResponse = { diff --git a/packages/medusa/src/types/customers.ts b/packages/medusa/src/types/customers.ts index 9bbfd4619e..873c12a237 100644 --- a/packages/medusa/src/types/customers.ts +++ b/packages/medusa/src/types/customers.ts @@ -1,4 +1,5 @@ import { IsOptional, IsString } from "class-validator" +import { AddressPayload } from "./common" export class AdminListCustomerSelector { @IsString() @@ -9,3 +10,28 @@ export class AdminListCustomerSelector { @IsString({ each: true }) groups?: string[] } + +export type CreateCustomerInput = { + email: string + password?: string + password_hash?: string + has_account?: boolean + + first_name?: string + last_name?: string + phone?: string + metadata?: Record +} + +export type UpdateCustomerInput = { + password?: string + metadata?: Record + billing_address?: AddressPayload | string + billing_address_id?: string + groups?: { id: string }[] + + email?: string + first_name?: string + last_name?: string + phone?: string +} diff --git a/packages/medusa/src/types/discount.ts b/packages/medusa/src/types/discount.ts index f835e0ff1d..7883fd6273 100644 --- a/packages/medusa/src/types/discount.ts +++ b/packages/medusa/src/types/discount.ts @@ -11,6 +11,7 @@ import { import { DiscountConditionOperator } from "../models/discount-condition" import { AllocationType, DiscountRuleType } from "../models/discount-rule" import { ExactlyOne } from "./validators/exactly-one" +import { Region } from "../models" export type QuerySelector = { q?: string @@ -132,7 +133,7 @@ export type CreateDiscountInput = { ends_at?: Date valid_duration?: string usage_limit?: number - regions?: string[] + regions?: string[] | Region[] metadata?: Record } diff --git a/packages/medusa/src/types/feature-flags.ts b/packages/medusa/src/types/feature-flags.ts new file mode 100644 index 0000000000..e29acaffb2 --- /dev/null +++ b/packages/medusa/src/types/feature-flags.ts @@ -0,0 +1,10 @@ +export interface IFlagRouter { + isFeatureEnabled: (key: string) => boolean +} + +export type FlagSettings = { + key: string + description: string + env_key: string + default_val: boolean +} diff --git a/packages/medusa/src/types/fulfillment.ts b/packages/medusa/src/types/fulfillment.ts new file mode 100644 index 0000000000..cb7f396b0f --- /dev/null +++ b/packages/medusa/src/types/fulfillment.ts @@ -0,0 +1,39 @@ +import { + Address, + ClaimOrder, + Discount, + LineItem, + Order, + Payment, + ShippingMethod, +} from "../models" + +export type FulFillmentItemType = { + item_id: string + quantity: number +} + +export type FulfillmentItemPartition = { + shipping_method: ShippingMethod + items: LineItem[] +} + +export type CreateShipmentConfig = { + metadata: Record + no_notification?: boolean +} + +export type CreateFulfillmentOrder = Omit & { + is_claim?: boolean + email?: string + payments: Payment[] + discounts: Discount[] + currency_code: string + tax_rate: number | null + region_id: string + display_id: number + billing_address: Address + items: LineItem[] + shipping_methods: ShippingMethod[] + no_notification: boolean +} diff --git a/packages/medusa/src/types/gift-card.ts b/packages/medusa/src/types/gift-card.ts new file mode 100644 index 0000000000..017cb78b78 --- /dev/null +++ b/packages/medusa/src/types/gift-card.ts @@ -0,0 +1,23 @@ +export type CreateGiftCardInput = { + value?: number + balance?: number + ends_at?: Date + is_disabled?: boolean + region_id: string + metadata?: Record +} + +export type UpdateGiftCardInput = { + balance?: number + ends_at?: Date + is_disabled?: boolean + region_id?: string + metadata?: Record +} + +export type CreateGiftCardTransactionInput = { + gift_card_id: string + order_id: string + amount: number + created_at: Date +} diff --git a/packages/medusa/src/types/global.ts b/packages/medusa/src/types/global.ts index 048df9f459..5b55dde52d 100644 --- a/packages/medusa/src/types/global.ts +++ b/packages/medusa/src/types/global.ts @@ -2,17 +2,24 @@ import { AwilixContainer } from "awilix" import { Logger as _Logger } from "winston" import { LoggerOptions } from "typeorm" import { Customer, User } from "../models" +import { FindConfig, RequestQueryFields } from "./common" declare global { // eslint-disable-next-line @typescript-eslint/no-namespace namespace Express { interface Request { - user?: User | Customer + user?: (User | Customer) & { userId?: string } scope: MedusaContainer + validatedQuery: RequestQueryFields & Record + validatedBody: unknown + listConfig: FindConfig + retrieveConfig: FindConfig + filterableFields: Record } } } + export type ClassConstructor = { new (...args: unknown[]): T } @@ -23,6 +30,8 @@ export type MedusaContainer = AwilixContainer & { export type Logger = _Logger & { progress: (activityId: string, msg: string) => void + info: (msg: string) => void + warn: (msg: string) => void } export type ConfigModule = { @@ -43,6 +52,7 @@ export type ConfigModule = { store_cors?: string admin_cors?: string } + featureFlags: Record plugins: ( | { resolve: string diff --git a/packages/medusa/src/types/price-list.ts b/packages/medusa/src/types/price-list.ts index bee6195c88..0a009d831f 100644 --- a/packages/medusa/src/types/price-list.ts +++ b/packages/medusa/src/types/price-list.ts @@ -124,6 +124,8 @@ export type CreatePriceListInput = { status?: PriceListStatus prices: AdminPriceListPricesCreateReq[] customer_groups?: { id: string }[] + starts_at?: Date + ends_at?: Date } export type UpdatePriceListInput = Partial< @@ -153,3 +155,11 @@ export type PriceListPriceCreateInput = { min_quantity?: number max_quantity?: number } + +export type PriceListLoadConfig = { + include_discount_prices?: boolean + customer_id?: string + cart_id?: string + region_id?: string + currency_code?: string +} diff --git a/packages/medusa/src/types/pricing.ts b/packages/medusa/src/types/pricing.ts new file mode 100644 index 0000000000..77118975e6 --- /dev/null +++ b/packages/medusa/src/types/pricing.ts @@ -0,0 +1,38 @@ +import { MoneyAmount, ProductVariant, Product, ShippingOption } from "../models" +import { TaxServiceRate } from "./tax-service" +import { PriceSelectionContext } from "../interfaces/price-selection-strategy" + +export type ProductVariantPricing = { + prices: MoneyAmount[] + original_price: number | null + calculated_price: number | null + calculated_price_type?: string | null +} & TaxedPricing + +export type TaxedPricing = { + original_price_incl_tax: number | null + calculated_price_incl_tax: number | null + original_tax: number | null + calculated_tax: number | null + tax_rates: TaxServiceRate[] | null +} + +export type PricingContext = { + price_selection: PriceSelectionContext + automatic_taxes: boolean + tax_rate: number | null +} + +export type ShippingOptionPricing = { + price_incl_tax: number | null + tax_rates: TaxServiceRate[] | null +} + +export type PricedShippingOption = Partial & + ShippingOptionPricing + +export type PricedVariant = Partial & ProductVariantPricing + +export type PricedProduct = Omit, "variants"> & { + variants: PricedVariant[] +} diff --git a/packages/medusa/src/types/product-variant.ts b/packages/medusa/src/types/product-variant.ts index 73d2ff85cd..14699edaac 100644 --- a/packages/medusa/src/types/product-variant.ts +++ b/packages/medusa/src/types/product-variant.ts @@ -62,7 +62,7 @@ export type CreateProductVariantInput = { export type UpdateProductVariantInput = { title?: string - product_id: string + product_id?: string sku?: string barcode?: string ean?: string @@ -72,14 +72,15 @@ export type UpdateProductVariantInput = { manage_inventory?: boolean hs_code?: string origin_country?: string + variant_rank?: number mid_code?: string material?: string weight?: number length?: number height?: number width?: number - options: ProductVariantOption[] - prices: ProductVariantPrice[] + options?: ProductVariantOption[] + prices?: ProductVariantPrice[] metadata?: object } diff --git a/packages/medusa/src/types/product.ts b/packages/medusa/src/types/product.ts index 5b27d0e2e5..c9eab96276 100644 --- a/packages/medusa/src/types/product.ts +++ b/packages/medusa/src/types/product.ts @@ -7,21 +7,25 @@ import { IsString, ValidateNested, } from "class-validator" +import { FindOperator } from "typeorm" +import { Product, ProductOptionValue, ProductStatus } from "../models" import { optionalBooleanMapper } from "../utils/validators/is-boolean" import { IsType } from "../utils/validators/is-type" -import { DateComparisonOperator, StringComparisonOperator } from "./common" - -export enum ProductStatus { - DRAFT = "draft", - PROPOSED = "proposed", - PUBLISHED = "published", - REJECTED = "rejected", -} +import { + DateComparisonOperator, + FindConfig, + Selector, + StringComparisonOperator, +} from "./common" +import { PriceListLoadConfig } from "./price-list" +/** + * API Level DTOs + Validation rules + */ export class FilterableProductProps { - @IsString() @IsOptional() - id?: string + @IsType([String, [String]]) + id?: string | string[] @IsString() @IsOptional() @@ -123,3 +127,115 @@ export class FilterableProductTypeProps { @IsOptional() q?: string } + +/** + * Service Level DTOs + */ + +export type CreateProductInput = { + title: string + subtitle?: string + profile_id?: string + description?: string + is_giftcard?: boolean + discountable?: boolean + images?: string[] + thumbnail?: string + handle?: string + status?: ProductStatus + type?: CreateProductProductTypeInput + collection_id?: string + tags?: CreateProductProductTagInput[] + options?: CreateProductProductOption[] + variants?: CreateProductProductVariantInput[] + weight?: number + length?: number + height?: number + width?: number + hs_code?: string + origin_country?: string + mid_code?: string + material?: string + metadata?: Record +} + +export type CreateProductProductTagInput = { + id?: string + value: string +} + +export type CreateProductProductTypeInput = { + id?: string + value: string +} + +export type CreateProductProductVariantInput = { + title: string + sku?: string + ean?: string + upc?: string + barcode?: string + hs_code?: string + inventory_quantity?: number + allow_backorder?: boolean + manage_inventory?: boolean + weight?: number + length?: number + height?: number + width?: number + origin_country?: string + mid_code?: string + material?: string + metadata?: object + prices?: CreateProductProductVariantPriceInput[] + options?: { value: string }[] +} + +export type UpdateProductProductVariantDTO = { + id?: string + title?: string + sku?: string + ean?: string + upc?: string + barcode?: string + hs_code?: string + inventory_quantity?: number + allow_backorder?: boolean + manage_inventory?: boolean + weight?: number + length?: number + height?: number + width?: number + origin_country?: string + mid_code?: string + material?: string + metadata?: object + prices?: CreateProductProductVariantPriceInput[] + options?: { value: string; option_id: string }[] +} + +export type CreateProductProductOption = { + title: string +} + +export type CreateProductProductVariantPriceInput = { + region_id?: string + currency_code?: string + amount: number + min_quantity?: number + max_quantity?: number +} + +export type UpdateProductInput = Omit< + Partial, + "variants" +> & { + variants?: UpdateProductProductVariantDTO[] +} + +export type ProductOptionInput = { + title: string + values?: ProductOptionValue[] +} + +export type FindProductConfig = FindConfig & PriceListLoadConfig diff --git a/packages/medusa/src/types/shipping-options.ts b/packages/medusa/src/types/shipping-options.ts new file mode 100644 index 0000000000..4b03a84168 --- /dev/null +++ b/packages/medusa/src/types/shipping-options.ts @@ -0,0 +1,73 @@ +import { Cart, Order } from ".." +import { ShippingOptionPriceType } from "../models/shipping-option" +import { + RequirementType, + ShippingOptionRequirement, +} from "../models/shipping-option-requirement" + +export type ShippingRequirement = { + type: RequirementType + amount: number + id: string +} + +export type ShippingMethodUpdate = { + data?: any + price?: number + return_id?: string + swap_id?: string + order_id?: string + claim_order_id?: string | null +} + +export type CreateShippingMethod = { + data?: any + shipping_option_id?: string + price?: number + return_id?: string + swap_id?: string + cart_id?: string + order_id?: string + draft_order_id?: string + claim_order_id?: string +} + +export type CreateShippingMethodDto = CreateShippingMethod & { + cart?: Cart + order?: Order +} + +export type CreateShippingOptionInput = { + price_type: ShippingOptionPriceType + name: string + region_id: string + profile_id: string + provider_id: string + data: Record + + amount?: number + is_return?: boolean + admin_only?: boolean + metadata?: Record + requirements?: ShippingOptionRequirement[] +} + +export type CreateCustomShippingOptionInput = { + price: number + shipping_option_id: string + cart_id?: string + metadata?: Record +} + +export type UpdateShippingOptionInput = { + metadata?: Record + price_type?: ShippingOptionPriceType + amount?: number + name?: string + admin_only?: boolean + is_return?: boolean + requirements?: ShippingOptionRequirement[] + region_id?: string + provider_id?: string + data?: string +} diff --git a/packages/medusa/src/types/store.ts b/packages/medusa/src/types/store.ts new file mode 100644 index 0000000000..0103a66fa8 --- /dev/null +++ b/packages/medusa/src/types/store.ts @@ -0,0 +1,9 @@ +export type UpdateStoreInput = { + name?: string + swap_link_template?: string + payment_link_template?: string + invite_link_template?: string + default_currency_code?: string + currencies?: string[] + metadata?: Record +} diff --git a/packages/medusa/src/utils/build-query.ts b/packages/medusa/src/utils/build-query.ts index f61123741d..0859b641f3 100644 --- a/packages/medusa/src/utils/build-query.ts +++ b/packages/medusa/src/utils/build-query.ts @@ -4,21 +4,19 @@ import { Selector, Writable, } from "../types/common" -import { FindOperator, In, Raw } from "typeorm" +import { FindOperator, In, IsNull, Raw } from "typeorm" /** -* Used to build TypeORM queries. -* @param selector The selector -* @param config The config -* @return The QueryBuilderConfig -*/ -export function buildQuery( - selector: Selector, + * Used to build TypeORM queries. + * @param selector The selector + * @param config The config + * @return The QueryBuilderConfig + */ +export function buildQuery( + selector: TWhereKeys, config: FindConfig = {} -): ExtendedFindConfig { - const build = ( - obj: Selector - ): Partial> => { +): ExtendedFindConfig { + const build = (obj: Selector): Partial> => { return Object.entries(obj).reduce((acc, [key, value]: any) => { // Undefined values indicate that they have no significance to the query. // If the query is looking for rows where a column is not set it should use null instead of undefined @@ -26,6 +24,11 @@ export function buildQuery( return acc } + if (value === null) { + acc[key] = IsNull() + return acc + } + const subquery: { operator: "<" | ">" | "<=" | ">=" value: unknown @@ -75,13 +78,10 @@ export function buildQuery( } return acc - }, {} as Partial>) + }, {} as Partial>) } - const query: FindConfig & { - where: Partial> - withDeleted?: boolean - } = { + const query: ExtendedFindConfig = { where: build(selector), } @@ -110,4 +110,4 @@ export function buildQuery( } return query -} \ No newline at end of file +} diff --git a/packages/medusa/src/utils/countries.js b/packages/medusa/src/utils/countries.ts similarity index 99% rename from packages/medusa/src/utils/countries.js rename to packages/medusa/src/utils/countries.ts index 20f91a1a04..3ca7aed34f 100644 --- a/packages/medusa/src/utils/countries.js +++ b/packages/medusa/src/utils/countries.ts @@ -1,4 +1,11 @@ -export const countries = [ +export type Country = { + alpha2: string + name: string + alpha3: string + numeric: string +} + +export const countries: Country[] = [ { alpha2: "AF", name: "Afghanistan", alpha3: "AFG", numeric: "004" }, { alpha2: "AL", name: "Albania", alpha3: "ALB", numeric: "008" }, { alpha2: "DZ", name: "Algeria", alpha3: "DZA", numeric: "012" }, diff --git a/packages/medusa/src/utils/currencies.js b/packages/medusa/src/utils/currencies.ts similarity index 98% rename from packages/medusa/src/utils/currencies.js rename to packages/medusa/src/utils/currencies.ts index bcaa487593..81d4e75205 100644 --- a/packages/medusa/src/utils/currencies.js +++ b/packages/medusa/src/utils/currencies.ts @@ -1,4 +1,14 @@ -export const currencies = { +export type Currency = { + symbol: string + name: string + symbol_native: string + decimal_digits: number + rounding: number + code: string + name_plural: string +} + +export const currencies: Record = { USD: { symbol: "$", name: "US Dollar", diff --git a/packages/medusa/src/utils/db-aware-column.ts b/packages/medusa/src/utils/db-aware-column.ts index 6f46899d18..993e359061 100644 --- a/packages/medusa/src/utils/db-aware-column.ts +++ b/packages/medusa/src/utils/db-aware-column.ts @@ -1,6 +1,6 @@ -import { getConfigFile } from "medusa-core-utils" -import path from "path" import { Column, ColumnOptions, ColumnType } from "typeorm" +import path from "path" +import { getConfigFile } from "medusa-core-utils" const pgSqliteTypeMapping: { [key: string]: ColumnType } = { increment: "rowid", diff --git a/packages/medusa/src/utils/feature-flag-decorators.ts b/packages/medusa/src/utils/feature-flag-decorators.ts new file mode 100644 index 0000000000..0e634634cc --- /dev/null +++ b/packages/medusa/src/utils/feature-flag-decorators.ts @@ -0,0 +1,66 @@ +import { getConfigFile } from "medusa-core-utils" +import { Column, ColumnOptions, Entity, EntityOptions } from "typeorm" +import featureFlagsLoader from "../loaders/feature-flags" +import path from "path" +import { ConfigModule } from "../types/global" +import { FlagRouter } from "./flag-router" + +export function FeatureFlagColumn( + featureFlag: string, + columnOptions: ColumnOptions +): PropertyDecorator { + const featureFlagRouter = getFeatureFlagRouter() + + if (!featureFlagRouter.isFeatureEnabled(featureFlag)) { + return (): void => { + // noop + } + } + + return Column(columnOptions) +} + +export function FeatureFlagDecorators( + featureFlag: string, + decorators: PropertyDecorator[] +): PropertyDecorator { + const featureFlagRouter = getFeatureFlagRouter() + + if (!featureFlagRouter.isFeatureEnabled(featureFlag)) { + return (): void => { + // noop + } + } + // eslint-disable-next-line @typescript-eslint/ban-types + return (target: Object, propertyKey: string | symbol): void => { + decorators.forEach((decorator) => { + decorator(target, propertyKey) + }) + } +} + +export function FeatureFlagEntity( + featureFlag: string, + name?: string, + options?: EntityOptions +): ClassDecorator { + // eslint-disable-next-line @typescript-eslint/ban-types + return function (target: Function): void { + target["isFeatureEnabled"] = function (): boolean { + const featureFlagRouter = getFeatureFlagRouter() + + // const featureFlagRouter = featureFlagsLoader(configModule) + return featureFlagRouter.isFeatureEnabled(featureFlag) + } + Entity(name, options)(target) + } +} + +function getFeatureFlagRouter(): FlagRouter { + const { configModule } = getConfigFile( + path.resolve("."), + `medusa-config` + ) as { configModule: ConfigModule } + + return featureFlagsLoader(configModule) +} diff --git a/packages/medusa/src/utils/flag-router.ts b/packages/medusa/src/utils/flag-router.ts new file mode 100644 index 0000000000..8ef8d8ea62 --- /dev/null +++ b/packages/medusa/src/utils/flag-router.ts @@ -0,0 +1,17 @@ +import { IFlagRouter } from "../types/feature-flags" + +export class FlagRouter implements IFlagRouter { + private flags: Record = {} + + constructor(flags: Record) { + this.flags = flags + } + + public isFeatureEnabled(key: string): boolean { + return !!this.flags[key] + } + + public setFlag(key: string, value = true): void { + this.flags[key] = value + } +} diff --git a/packages/medusa/src/utils/format-registration-name.js b/packages/medusa/src/utils/format-registration-name.ts similarity index 77% rename from packages/medusa/src/utils/format-registration-name.js rename to packages/medusa/src/utils/format-registration-name.ts index 85ef6f6158..3c65202cfa 100644 --- a/packages/medusa/src/utils/format-registration-name.js +++ b/packages/medusa/src/utils/format-registration-name.ts @@ -1,20 +1,20 @@ -import path from "path" +import { parse } from "path" /** * Formats a filename into the correct container resolution name. * Names are camelCase formatted and namespaced by the folder i.e: * models/example-person -> examplePersonModel - * @param {string} fn - the full path of the file - * @return {string} the formatted name + * @param path - the full path of the file + * @return the formatted name */ -function formatRegistrationName(fn) { - const parsed = path.parse(fn) - const parsedDir = path.parse(parsed.dir) +function formatRegistrationName(path: string): string { + const parsed = parse(path) + const parsedDir = parse(parsed.dir) const rawname = parsed.name let namespace = parsedDir.name if (namespace.startsWith("__")) { - const parsedCoreDir = path.parse(parsedDir.dir) + const parsedCoreDir = parse(parsedDir.dir) namespace = parsedCoreDir.name } diff --git a/packages/medusa/src/utils/get-query-config.ts b/packages/medusa/src/utils/get-query-config.ts index b7c5e72008..c876a05245 100644 --- a/packages/medusa/src/utils/get-query-config.ts +++ b/packages/medusa/src/utils/get-query-config.ts @@ -1,10 +1,7 @@ import { pick } from "lodash" -import { FindConfig } from "../types/common" - -type BaseEntity = { - id: string - created_at: Date -} +import { FindConfig, QueryConfig, RequestQueryFields } from "../types/common" +import { MedusaError } from "medusa-core-utils/dist" +import { BaseEntity } from "../interfaces/models/base-entity" export function pickByConfig( obj: TModel | TModel[], @@ -81,3 +78,73 @@ export function getListConfig( order: orderBy, } } + +export function prepareListQuery< + T extends RequestQueryFields, + TEntity extends BaseEntity +>(validated: T, queryConfig?: QueryConfig) { + const { order, fields, expand, limit, offset } = validated + + let expandRelations: string[] | undefined = undefined + if (expand) { + expandRelations = expand.split(",") + } + + let expandFields: (keyof TEntity)[] | undefined = undefined + if (fields) { + expandFields = fields.split(",") as (keyof TEntity)[] + } + + let orderBy: { [k: symbol]: "DESC" | "ASC" } | undefined + if (typeof order !== "undefined") { + let orderField = order + if (order.startsWith("-")) { + const [, field] = order.split("-") + orderField = field + orderBy = { [field]: "DESC" } + } else { + orderBy = { [order]: "ASC" } + } + + if (queryConfig?.allowedFields?.length && !queryConfig?.allowedFields.includes(orderField)) { + throw new MedusaError( + MedusaError.Types.INVALID_DATA, + `Order field ${orderField} is not valid` + ) + } + } + + return getListConfig( + queryConfig?.defaultFields as (keyof TEntity)[], + (queryConfig?.defaultRelations ?? []) as string[], + expandFields, + expandRelations, + limit ?? queryConfig?.defaultLimit, + offset ?? 0, + orderBy + ) +} + +export function prepareRetrieveQuery< + T extends RequestQueryFields, + TEntity extends BaseEntity +>(validated: T, queryConfig?: QueryConfig) { + const { fields, expand } = validated + + let expandRelations: string[] = [] + if (expand) { + expandRelations = expand.split(",") + } + + let expandFields: (keyof TEntity)[] | undefined = undefined + if (fields) { + expandFields = fields.split(",") as (keyof TEntity)[] + } + + return getRetrieveConfig( + queryConfig?.defaultFields as (keyof TEntity)[], + (queryConfig?.defaultRelations ?? []) as string[], + expandFields, + expandRelations + ) +} diff --git a/packages/medusa/src/utils/omit-relation-if-exists.ts b/packages/medusa/src/utils/omit-relation-if-exists.ts new file mode 100644 index 0000000000..938b43fdbc --- /dev/null +++ b/packages/medusa/src/utils/omit-relation-if-exists.ts @@ -0,0 +1,15 @@ +/** + * + * @param relations relations from which a relation should be removed + * @param relation relation to be removed + * @returns tuple containing the new relations and a boolean indicating whether the relation was found in the relations array + */ +export const omitRelationIfExists = ( + relations: string[], + relation: string +): [string[], boolean] => { + const filteredRelations = relations.filter((rel) => rel !== relation) + const includesRelation = relations.length !== filteredRelations.length + + return [relations, includesRelation] +} diff --git a/packages/medusa/src/utils/set-metadata.ts b/packages/medusa/src/utils/set-metadata.ts index d27cddc5ed..e56ca95050 100644 --- a/packages/medusa/src/utils/set-metadata.ts +++ b/packages/medusa/src/utils/set-metadata.ts @@ -1,13 +1,13 @@ import { MedusaError } from "medusa-core-utils/dist" /** -* Dedicated method to set metadata. -* @param obj - the entity to apply metadata to. -* @param metadata - the metadata to set -* @return resolves to the updated result. -*/ + * Dedicated method to set metadata. + * @param obj - the entity to apply metadata to. + * @param metadata - the metadata to set + * @return resolves to the updated result. + */ export function setMetadata( - obj: { metadata: Record }, + obj: { metadata: Record | null }, metadata: Record ): Record { const existing = obj.metadata || {} @@ -26,4 +26,4 @@ export function setMetadata( ...existing, ...newData, } -} \ No newline at end of file +} diff --git a/packages/medusa/src/utils/sleep.ts b/packages/medusa/src/utils/sleep.ts new file mode 100644 index 0000000000..f294976b85 --- /dev/null +++ b/packages/medusa/src/utils/sleep.ts @@ -0,0 +1,5 @@ +export async function sleep(ms: number) { + return new Promise((resolve) => { + setTimeout(resolve, ms) + }) +} diff --git a/packages/medusa/tsconfig.json b/packages/medusa/tsconfig.json index d81ce7302b..6d92a61c7a 100644 --- a/packages/medusa/tsconfig.json +++ b/packages/medusa/tsconfig.json @@ -1,6 +1,9 @@ { "compilerOptions": { - "lib": ["es5", "es6"], + "lib": [ + "es5", + "es6" + ], "target": "es5", "outDir": "./dist", "esModuleInterop": true, @@ -18,7 +21,10 @@ "skipLibCheck": true, "downlevelIteration": true // to use ES5 specific tooling }, - "include": ["./src/**/*", "index.d.ts"], + "include": [ + "./src/**/*", + "index.d.ts" + ], "exclude": [ "./dist/**/*", "./src/**/__tests__", diff --git a/yarn.lock b/yarn.lock index 6086513da2..d8eae4c09a 100644 --- a/yarn.lock +++ b/yarn.lock @@ -3516,6 +3516,13 @@ dependencies: regenerator-runtime "^0.13.4" +"@babel/runtime@^7.10.4": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.18.6.tgz#6a1ef59f838debd670421f8c7f2cbb8da9751580" + integrity sha512-t9wi7/AW6XtKahAe20Yw0/mMljKq0B1r2fPdvaAdV/KPDZewFXdaaa6K7lxmZBZ8FBNpCiAT6iHPmd6QO9bKfQ== + dependencies: + regenerator-runtime "^0.13.4" + "@babel/runtime@^7.11.2": version "7.15.3" resolved "https://registry.npmjs.org/@babel/runtime/-/runtime-7.15.3.tgz" @@ -3681,6 +3688,222 @@ resolved "https://registry.npmjs.org/@bcoe/v8-coverage/-/v8-coverage-0.2.3.tgz" integrity sha512-0hYQ8SB4Db5zvZB4axdMHGwEaQjkZzFjQiN9LVYvIFB2nSUHW9tYpxWriPrWDASIxiaXax83REcLxuSdnGPZtw== +"@changesets/apply-release-plan@^6.0.0": + version "6.0.0" + resolved "https://registry.yarnpkg.com/@changesets/apply-release-plan/-/apply-release-plan-6.0.0.tgz#6c663ff99d919bba3902343d76c35cbbbb046520" + integrity sha512-gp6nIdVdfYdwKww2+f8whckKmvfE4JEm4jJgBhTmooi0uzHWhnxvk6JIzQi89qEAMINN0SeVNnXiAtbFY0Mj3w== + dependencies: + "@babel/runtime" "^7.10.4" + "@changesets/config" "^2.0.0" + "@changesets/get-version-range-type" "^0.3.2" + "@changesets/git" "^1.3.2" + "@changesets/types" "^5.0.0" + "@manypkg/get-packages" "^1.1.3" + detect-indent "^6.0.0" + fs-extra "^7.0.1" + lodash.startcase "^4.4.0" + outdent "^0.5.0" + prettier "^1.19.1" + resolve-from "^5.0.0" + semver "^5.4.1" + +"@changesets/assemble-release-plan@^5.1.3": + version "5.1.3" + resolved "https://registry.yarnpkg.com/@changesets/assemble-release-plan/-/assemble-release-plan-5.1.3.tgz#b415c5db64e5a30c53aed8c1adc5ab4c4aaad283" + integrity sha512-I+TTkUoqvxBEuDLoJfJYKDXIJ+nyiTbVJ8KGhpXEsLq4N/ms/AStSbouJwF2d/p3cB+RCPr5+gXh31GSN4kA7w== + dependencies: + "@babel/runtime" "^7.10.4" + "@changesets/errors" "^0.1.4" + "@changesets/get-dependents-graph" "^1.3.2" + "@changesets/types" "^5.0.0" + "@manypkg/get-packages" "^1.1.3" + semver "^5.4.1" + +"@changesets/changelog-git@^0.1.11": + version "0.1.11" + resolved "https://registry.yarnpkg.com/@changesets/changelog-git/-/changelog-git-0.1.11.tgz#80eb45d3562aba2164f25ccc31ac97b9dcd1ded3" + integrity sha512-sWJvAm+raRPeES9usNpZRkooeEB93lOpUN0Lmjz5vhVAb7XGIZrHEJ93155bpE1S0c4oJ5Di9ZWgzIwqhWP/Wg== + dependencies: + "@changesets/types" "^5.0.0" + +"@changesets/changelog-github@^0.4.5": + version "0.4.5" + resolved "https://registry.yarnpkg.com/@changesets/changelog-github/-/changelog-github-0.4.5.tgz#cbdebcf4bb2fa94635db6fba4a474b4f36e6ce79" + integrity sha512-J36QJml3mXYm88PLY2qGepmb7j6LA3NM/wuUy6XBwh14qzVTRek+3Xww5oqeZhpK5lK8ELxGahdhSdYQzMv0kA== + dependencies: + "@changesets/get-github-info" "^0.5.1" + "@changesets/types" "^5.0.0" + dotenv "^8.1.0" + +"@changesets/cli@^2.23.0": + version "2.23.0" + resolved "https://registry.yarnpkg.com/@changesets/cli/-/cli-2.23.0.tgz#e325b2d1b0484188671f684773b8cd5d42d068f1" + integrity sha512-Gi3tMi0Vr6eNd8GX6q73tbOm9XOzGfuLEm4PYVeWG2neg5DlRGNOjYwrFULJ/An3N9MHtHn4r5h1Qvnju9Ijug== + dependencies: + "@babel/runtime" "^7.10.4" + "@changesets/apply-release-plan" "^6.0.0" + "@changesets/assemble-release-plan" "^5.1.3" + "@changesets/changelog-git" "^0.1.11" + "@changesets/config" "^2.0.0" + "@changesets/errors" "^0.1.4" + "@changesets/get-dependents-graph" "^1.3.2" + "@changesets/get-release-plan" "^3.0.9" + "@changesets/git" "^1.3.2" + "@changesets/logger" "^0.0.5" + "@changesets/pre" "^1.0.11" + "@changesets/read" "^0.5.5" + "@changesets/types" "^5.0.0" + "@changesets/write" "^0.1.8" + "@manypkg/get-packages" "^1.1.3" + "@types/is-ci" "^3.0.0" + "@types/semver" "^6.0.0" + ansi-colors "^4.1.3" + chalk "^2.1.0" + enquirer "^2.3.0" + external-editor "^3.1.0" + fs-extra "^7.0.1" + human-id "^1.0.2" + is-ci "^3.0.1" + meow "^6.0.0" + outdent "^0.5.0" + p-limit "^2.2.0" + preferred-pm "^3.0.0" + resolve-from "^5.0.0" + semver "^5.4.1" + spawndamnit "^2.0.0" + term-size "^2.1.0" + tty-table "^4.1.5" + +"@changesets/config@^2.0.0": + version "2.0.0" + resolved "https://registry.yarnpkg.com/@changesets/config/-/config-2.0.0.tgz#1770fdfeba2155cf07154c37e96b55cbd27969f0" + integrity sha512-r5bIFY6CN3K6SQ+HZbjyE3HXrBIopONR47mmX7zUbORlybQXtympq9rVAOzc0Oflbap8QeIexc+hikfZoREXDg== + dependencies: + "@changesets/errors" "^0.1.4" + "@changesets/get-dependents-graph" "^1.3.2" + "@changesets/logger" "^0.0.5" + "@changesets/types" "^5.0.0" + "@manypkg/get-packages" "^1.1.3" + fs-extra "^7.0.1" + micromatch "^4.0.2" + +"@changesets/errors@^0.1.4": + version "0.1.4" + resolved "https://registry.yarnpkg.com/@changesets/errors/-/errors-0.1.4.tgz#f79851746c43679a66b383fdff4c012f480f480d" + integrity sha512-HAcqPF7snsUJ/QzkWoKfRfXushHTu+K5KZLJWPb34s4eCZShIf8BFO3fwq6KU8+G7L5KdtN2BzQAXOSXEyiY9Q== + dependencies: + extendable-error "^0.1.5" + +"@changesets/get-dependents-graph@^1.3.2": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@changesets/get-dependents-graph/-/get-dependents-graph-1.3.2.tgz#f3ec7ce75f4afb6e3e4b6a87fde065f552c85998" + integrity sha512-tsqA6qZRB86SQuApSoDvI8yEWdyIlo/WLI4NUEdhhxLMJ0dapdeT6rUZRgSZzK1X2nv5YwR0MxQBbDAiDibKrg== + dependencies: + "@changesets/types" "^5.0.0" + "@manypkg/get-packages" "^1.1.3" + chalk "^2.1.0" + fs-extra "^7.0.1" + semver "^5.4.1" + +"@changesets/get-github-info@^0.5.1": + version "0.5.1" + resolved "https://registry.yarnpkg.com/@changesets/get-github-info/-/get-github-info-0.5.1.tgz#5a20328b26f301b2193717abb32e73651e8811b7" + integrity sha512-w2yl3AuG+hFuEEmT6j1zDlg7GQLM/J2UxTmk0uJBMdRqHni4zXGe/vUlPfLom5KfX3cRfHc0hzGvloDPjWFNZw== + dependencies: + dataloader "^1.4.0" + node-fetch "^2.5.0" + +"@changesets/get-release-plan@^3.0.9": + version "3.0.9" + resolved "https://registry.yarnpkg.com/@changesets/get-release-plan/-/get-release-plan-3.0.9.tgz#d445660f3679cb65e05e02adfbca037a25b45943" + integrity sha512-5C1r4DcOjVxcCvPmXpymeyT6mdSTLCNiB2L+5uf19BRkDKndJdIQorH5Fe2XBR2nHUcZQFT+2TXDzCepat969w== + dependencies: + "@babel/runtime" "^7.10.4" + "@changesets/assemble-release-plan" "^5.1.3" + "@changesets/config" "^2.0.0" + "@changesets/pre" "^1.0.11" + "@changesets/read" "^0.5.5" + "@changesets/types" "^5.0.0" + "@manypkg/get-packages" "^1.1.3" + +"@changesets/get-version-range-type@^0.3.2": + version "0.3.2" + resolved "https://registry.yarnpkg.com/@changesets/get-version-range-type/-/get-version-range-type-0.3.2.tgz#8131a99035edd11aa7a44c341cbb05e668618c67" + integrity sha512-SVqwYs5pULYjYT4op21F2pVbcrca4qA/bAA3FmFXKMN7Y+HcO8sbZUTx3TAy2VXulP2FACd1aC7f2nTuqSPbqg== + +"@changesets/git@^1.3.2": + version "1.3.2" + resolved "https://registry.yarnpkg.com/@changesets/git/-/git-1.3.2.tgz#336051d9a6d965806b1bc473559a9a2cc70773a6" + integrity sha512-p5UL+urAg0Nnpt70DLiBe2iSsMcDubTo9fTOD/61krmcJ466MGh71OHwdAwu1xG5+NKzeysdy1joRTg8CXcEXA== + dependencies: + "@babel/runtime" "^7.10.4" + "@changesets/errors" "^0.1.4" + "@changesets/types" "^5.0.0" + "@manypkg/get-packages" "^1.1.3" + is-subdir "^1.1.1" + spawndamnit "^2.0.0" + +"@changesets/logger@^0.0.5": + version "0.0.5" + resolved "https://registry.yarnpkg.com/@changesets/logger/-/logger-0.0.5.tgz#68305dd5a643e336be16a2369cb17cdd8ed37d4c" + integrity sha512-gJyZHomu8nASHpaANzc6bkQMO9gU/ib20lqew1rVx753FOxffnCrJlGIeQVxNWCqM+o6OOleCo/ivL8UAO5iFw== + dependencies: + chalk "^2.1.0" + +"@changesets/parse@^0.3.13": + version "0.3.13" + resolved "https://registry.yarnpkg.com/@changesets/parse/-/parse-0.3.13.tgz#82788c1fc18da4750b07357a7a06142d0d975aa1" + integrity sha512-wh9Ifa0dungY6d2nMz6XxF6FZ/1I7j+mEgPAqrIyKS64nifTh1Ua82qKKMMK05CL7i4wiB2NYc3SfnnCX3RVeA== + dependencies: + "@changesets/types" "^5.0.0" + js-yaml "^3.13.1" + +"@changesets/pre@^1.0.11": + version "1.0.11" + resolved "https://registry.yarnpkg.com/@changesets/pre/-/pre-1.0.11.tgz#46a56790fdceabd03407559bbf91340c8e83fb6a" + integrity sha512-CXZnt4SV9waaC9cPLm7818+SxvLKIDHUxaiTXnJYDp1c56xIexx1BNfC1yMuOdzO2a3rAIcZua5Odxr3dwSKfg== + dependencies: + "@babel/runtime" "^7.10.4" + "@changesets/errors" "^0.1.4" + "@changesets/types" "^5.0.0" + "@manypkg/get-packages" "^1.1.3" + fs-extra "^7.0.1" + +"@changesets/read@^0.5.5": + version "0.5.5" + resolved "https://registry.yarnpkg.com/@changesets/read/-/read-0.5.5.tgz#9ed90ef3e9f1ba3436ba5580201854a3f4163058" + integrity sha512-bzonrPWc29Tsjvgh+8CqJ0apQOwWim0zheeD4ZK44ApSa/GudnZJTODtA3yNOOuQzeZmL0NUebVoHIurtIkA7w== + dependencies: + "@babel/runtime" "^7.10.4" + "@changesets/git" "^1.3.2" + "@changesets/logger" "^0.0.5" + "@changesets/parse" "^0.3.13" + "@changesets/types" "^5.0.0" + chalk "^2.1.0" + fs-extra "^7.0.1" + p-filter "^2.1.0" + +"@changesets/types@^4.0.1": + version "4.1.0" + resolved "https://registry.yarnpkg.com/@changesets/types/-/types-4.1.0.tgz#fb8f7ca2324fd54954824e864f9a61a82cb78fe0" + integrity sha512-LDQvVDv5Kb50ny2s25Fhm3d9QSZimsoUGBsUioj6MC3qbMUCuC8GPIvk/M6IvXx3lYhAs0lwWUQLb+VIEUCECw== + +"@changesets/types@^5.0.0": + version "5.0.0" + resolved "https://registry.yarnpkg.com/@changesets/types/-/types-5.0.0.tgz#d5eb52d074bc0358ce47d54bca54370b907812a0" + integrity sha512-IT1kBLSbAgTS4WtpU6P5ko054hq12vk4tgeIFRVE7Vnm4a/wgbNvBalgiKP0MjEXbCkZbItiGQHkCGxYWR55sA== + +"@changesets/write@^0.1.8": + version "0.1.8" + resolved "https://registry.yarnpkg.com/@changesets/write/-/write-0.1.8.tgz#feed408f644c496bc52afc4dd1353670b4152ecb" + integrity sha512-oIHeFVMuP6jf0TPnKPpaFpvvAf3JBc+s2pmVChbeEgQTBTALoF51Z9kqxQfG4XONZPHZnqkmy564c7qohhhhTQ== + dependencies: + "@babel/runtime" "^7.10.4" + "@changesets/types" "^5.0.0" + fs-extra "^7.0.1" + human-id "^1.0.2" + prettier "^1.19.1" + "@cnakazawa/watch@^1.0.3": version "1.0.4" resolved "https://registry.npmjs.org/@cnakazawa/watch/-/watch-1.0.4.tgz" @@ -5450,6 +5673,28 @@ npmlog "^4.1.2" write-file-atomic "^2.3.0" +"@manypkg/find-root@^1.1.0": + version "1.1.0" + resolved "https://registry.yarnpkg.com/@manypkg/find-root/-/find-root-1.1.0.tgz#a62d8ed1cd7e7d4c11d9d52a8397460b5d4ad29f" + integrity sha512-mki5uBvhHzO8kYYix/WRy2WX8S3B5wdVSc9D6KcU5lQNglP2yt58/VfLuAK49glRXChosY8ap2oJ1qgma3GUVA== + dependencies: + "@babel/runtime" "^7.5.5" + "@types/node" "^12.7.1" + find-up "^4.1.0" + fs-extra "^8.1.0" + +"@manypkg/get-packages@^1.1.3": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@manypkg/get-packages/-/get-packages-1.1.3.tgz#e184db9bba792fa4693de4658cfb1463ac2c9c47" + integrity sha512-fo+QhuU3qE/2TQMQmbVMqaQ6EWbMhi4ABWP+O4AM1NqPBuy0OrApV5LO6BrrgnhtAHS2NH6RrVk9OL181tTi8A== + dependencies: + "@babel/runtime" "^7.5.5" + "@changesets/types" "^4.0.1" + "@manypkg/find-root" "^1.1.0" + fs-extra "^8.1.0" + globby "^11.0.0" + read-yaml-file "^1.1.0" + "@mapbox/node-pre-gyp@^1.0.0": version "1.0.9" resolved "https://registry.yarnpkg.com/@mapbox/node-pre-gyp/-/node-pre-gyp-1.0.9.tgz#09a8781a3a036151cdebbe8719d6f8b25d4058bc" @@ -7379,13 +7624,6 @@ "@types/connect" "*" "@types/node" "*" -"@types/bson@*", "@types/bson@1.x || 4.0.x": - version "4.0.5" - resolved "https://registry.npmjs.org/@types/bson/-/bson-4.0.5.tgz" - integrity sha512-vVLwMUqhYJSQ/WKcE60eFqcyuWse5fGH+NMAXHuKrUAPoryq3ATxk5o4bgYNtg5aOM4APVg7Hnb3ASqUYG0PKg== - dependencies: - "@types/node" "*" - "@types/cacheable-request@^6.0.1": version "6.0.2" resolved "https://registry.npmjs.org/@types/cacheable-request/-/cacheable-request-6.0.2.tgz" @@ -7517,7 +7755,7 @@ "@types/qs" "*" "@types/range-parser" "*" -"@types/express@^4.17.13": +"@types/express@*", "@types/express@^4.17.13": version "4.17.13" resolved "https://registry.npmjs.org/@types/express/-/express-4.17.13.tgz" integrity sha512-6bSZTPaTIACxn48l50SR+axgrqm6qXFIxrdAKaG6PaJk3+zuUr35hBlgT7vOmJcum+OEaIBLtHV/qloEAFITeA== @@ -7602,6 +7840,13 @@ "@types/through" "*" rxjs "^6.4.0" +"@types/is-ci@^3.0.0": + version "3.0.0" + resolved "https://registry.yarnpkg.com/@types/is-ci/-/is-ci-3.0.0.tgz#7e8910af6857601315592436f030aaa3ed9783c3" + integrity sha512-Q0Op0hdWbYd1iahB+IFNQcWXFq4O0Q5MwQP7uN0souuQ4rPg1vEYcnIOfr1gY+M+6rc8FGoRaBO1mOOvL29sEQ== + dependencies: + ci-info "^3.1.0" + "@types/is-function@^1.0.0": version "1.0.1" resolved "https://registry.npmjs.org/@types/is-function/-/is-function-1.0.1.tgz" @@ -7746,13 +7991,12 @@ dependencies: "@types/node" "*" -"@types/mongodb@^3.5.27": - version "3.6.20" - resolved "https://registry.npmjs.org/@types/mongodb/-/mongodb-3.6.20.tgz" - integrity sha512-WcdpPJCakFzcWWD9juKoZbRtQxKIMYF/JIAM4JrNHrMcnJL6/a2NWjXxW7fo9hxboxxkg+icff8d7+WIEvKgYQ== +"@types/multer@^1.4.7": + version "1.4.7" + resolved "https://registry.yarnpkg.com/@types/multer/-/multer-1.4.7.tgz#89cf03547c28c7bbcc726f029e2a76a7232cc79e" + integrity sha512-/SNsDidUFCvqqcWDwxv2feww/yqhNeTRL5CVoL3jU4Goc4kKEL10T7Eye65ZqPNi4HRx8sAEX59pV1aEH7drNA== dependencies: - "@types/bson" "*" - "@types/node" "*" + "@types/express" "*" "@types/node-fetch@2", "@types/node-fetch@^2.5.7": version "2.5.12" @@ -7782,6 +8026,11 @@ resolved "https://registry.npmjs.org/@types/node/-/node-12.20.41.tgz" integrity sha512-f6xOqucbDirG7LOzedpvzjP3UTmHttRou3Mosx3vL9wr9AIQGhcPgVnqa8ihpZYnxyM1rxeNCvTyukPKZtq10Q== +"@types/node@^12.7.1": + version "12.20.55" + resolved "https://registry.yarnpkg.com/@types/node/-/node-12.20.55.tgz#c329cbd434c42164f846b909bd6f85b5537f6240" + integrity sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ== + "@types/node@^14.0.10", "@types/node@^14.17.14": version "14.18.5" resolved "https://registry.npmjs.org/@types/node/-/node-14.18.5.tgz" @@ -7930,6 +8179,11 @@ resolved "https://registry.npmjs.org/@types/scheduler/-/scheduler-0.16.2.tgz" integrity sha512-hppQEBDmlwhFAXKJX2KnWLYu5yMfi91yazPb2l+lbJiwW+wdo1gNeRA+3RgNSO39WYX2euey41KEwnqesU2Jew== +"@types/semver@^6.0.0": + version "6.2.3" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-6.2.3.tgz#5798ecf1bec94eaa64db39ee52808ec0693315aa" + integrity sha512-KQf+QAMWKMrtBMsB8/24w53tEsxllMj6TuA80TT/5igJalLI/zm0L3oXRbIAl4Ohfc85gyHX/jhMwsVkmhLU4A== + "@types/semver@^7.3.6": version "7.3.9" resolved "https://registry.npmjs.org/@types/semver/-/semver-7.3.9.tgz" @@ -8929,6 +9183,11 @@ ansi-colors@^4.1.1: resolved "https://registry.npmjs.org/ansi-colors/-/ansi-colors-4.1.1.tgz" integrity sha512-JoX0apGbHaUJBNl6yF+p6JAFYZ666/hhCGKN5t9QFjbJQKUU/g8MNbFDbvfrgKXvI1QpZplPOnwIo99lX/AAmA== +ansi-colors@^4.1.3: + version "4.1.3" + resolved "https://registry.yarnpkg.com/ansi-colors/-/ansi-colors-4.1.3.tgz#37611340eb2243e70cc604cad35d63270d48781b" + integrity sha512-/6w/C21Pm1A7aZitlI5Ni/2J6FFQN8i1Cvz3kHABAAbw93v/NlvKdVOqz7CCWz/3iv/JplRSEEZ83XION15ovw== + ansi-escapes@^3.0.0, ansi-escapes@^3.1.0, ansi-escapes@^3.2.0: version "3.2.0" resolved "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-3.2.0.tgz" @@ -9219,6 +9478,16 @@ array.prototype.flat@^1.2.1, array.prototype.flat@^1.2.5: define-properties "^1.1.3" es-abstract "^1.19.0" +array.prototype.flat@^1.2.3: + version "1.3.0" + resolved "https://registry.yarnpkg.com/array.prototype.flat/-/array.prototype.flat-1.3.0.tgz#0b0c1567bf57b38b56b4c97b8aa72ab45e4adc7b" + integrity sha512-12IUEkHsAhA4DY5s0FPgNXIdc8VRSqD9Zp78a5au9abH/SOBrsp082JOWFNTjkMozh8mqcdiKuaLGhPeYztxSw== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + es-abstract "^1.19.2" + es-shim-unscopables "^1.0.0" + array.prototype.flatmap@^1.2.1, array.prototype.flatmap@^1.2.5: version "1.2.5" resolved "https://registry.npmjs.org/array.prototype.flatmap/-/array.prototype.flatmap-1.2.5.tgz" @@ -10045,6 +10314,13 @@ better-opn@^2.1.1: dependencies: open "^7.0.3" +better-path-resolve@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/better-path-resolve/-/better-path-resolve-1.0.0.tgz#13a35a1104cdd48a7b74bf8758f96a1ee613f99d" + integrity sha512-pbnl5XzGBdrFU/wT4jqmJVPn2B6UHPBOhzMQkY/SPUPB6QtUXtmBHBIwCbXJol93mOpGMnQyP/+BB19q04xj7g== + dependencies: + is-windows "^1.0.0" + better-queue-memory@^1.0.1: version "1.0.4" resolved "https://registry.npmjs.org/better-queue-memory/-/better-queue-memory-1.0.4.tgz" @@ -10086,14 +10362,6 @@ bindings@^1.5.0: dependencies: file-uri-to-path "1.0.0" -bl@^2.2.1: - version "2.2.1" - resolved "https://registry.npmjs.org/bl/-/bl-2.2.1.tgz" - integrity sha512-6Pesp1w0DEX1N550i/uGV/TqucVL4AM/pgThFSN/Qq9si1/DF9aIHs1BxD8V/QU0HoeHO6cQRTAuYnLPKq1e4g== - dependencies: - readable-stream "^2.3.5" - safe-buffer "^5.1.1" - bl@^4.0.0, bl@^4.1.0: version "4.1.0" resolved "https://registry.npmjs.org/bl/-/bl-4.1.0.tgz" @@ -10103,11 +10371,6 @@ bl@^4.0.0, bl@^4.1.0: inherits "^2.0.4" readable-stream "^3.4.0" -bluebird@3.5.1: - version "3.5.1" - resolved "https://registry.npmjs.org/bluebird/-/bluebird-3.5.1.tgz" - integrity sha512-MKiLiV+I1AA596t9w1sQJ8jkiSr5+ZKi0WKrYGUn6d1Fx+Ij4tIj+m2WMQSGczs5jZVxV339chE8iwk6F64wjA== - bluebird@^3.3.5, bluebird@^3.4.0, bluebird@^3.4.1, bluebird@^3.5.1, bluebird@^3.5.3, bluebird@^3.5.5, bluebird@^3.7.2: version "3.7.2" resolved "https://registry.npmjs.org/bluebird/-/bluebird-3.7.2.tgz" @@ -10226,6 +10489,13 @@ braces@^3.0.1, braces@~3.0.2: dependencies: fill-range "^7.0.1" +breakword@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/breakword/-/breakword-1.0.5.tgz#fd420a417f55016736b5b615161cae1c8f819810" + integrity sha512-ex5W9DoOQ/LUEU3PMdLs9ua/CYZl1678NUkKOdUSi8Aw5F1idieaiRURCBFJCwVcrD1J8Iy3vfWSloaMwO2qFg== + dependencies: + wcwidth "^1.0.1" + broadcast-channel@^3.4.1: version "3.7.0" resolved "https://registry.npmjs.org/broadcast-channel/-/broadcast-channel-3.7.0.tgz" @@ -10403,11 +10673,6 @@ bser@2.1.1: dependencies: node-int64 "^0.4.0" -bson@^1.1.4: - version "1.1.6" - resolved "https://registry.npmjs.org/bson/-/bson-1.1.6.tgz" - integrity sha512-EvVNVeGo4tHxwi8L6bPj3y3itEvStdwvvlojVxxbyYfoaxJ6keLgrTuKdyfEAszFK+H3olzBuafE0yoh0D1gdg== - btoa-lite@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/btoa-lite/-/btoa-lite-1.0.0.tgz" @@ -10951,6 +11216,11 @@ ci-info@2.0.0, ci-info@^2.0.0: resolved "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz" integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== +ci-info@^3.1.0: + version "3.3.2" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.3.2.tgz#6d2967ffa407466481c6c90b6e16b3098f080128" + integrity sha512-xmDt/QIAdeZ9+nfdPsaBCpMvHNLFiLdjj59qjqn+6iPe6YmHGQ35sBnQ8uslRBXFmXkiZQOJRjvQeoGppoTjjg== + ci-info@^3.2.0: version "3.3.0" resolved "https://registry.npmjs.org/ci-info/-/ci-info-3.3.0.tgz" @@ -12007,6 +12277,15 @@ cross-spawn@7.0.3, cross-spawn@^7.0.0, cross-spawn@^7.0.1, cross-spawn@^7.0.2, c shebang-command "^2.0.0" which "^2.0.1" +cross-spawn@^5.1.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" + integrity sha512-pTgQJ5KC0d2hcY8eyL1IzlBPYjTkyH72XRZPnLyKus2mBfNjQs3klqbJU2VILqZryAZUt9JOb3h/mWMy23/f5A== + dependencies: + lru-cache "^4.0.1" + shebang-command "^1.2.0" + which "^1.2.9" + cross-spawn@^6.0.0, cross-spawn@^6.0.5: version "6.0.5" resolved "https://registry.npmjs.org/cross-spawn/-/cross-spawn-6.0.5.tgz" @@ -12310,6 +12589,31 @@ csstype@^3.0.2: resolved "https://registry.npmjs.org/csstype/-/csstype-3.0.10.tgz" integrity sha512-2u44ZG2OcNUO9HDp/Jl8C07x6pU/eTR3ncV91SiK3dhG9TWvRVsCoJw14Ckx5DgWkzGA3waZWO3d7pgqpUI/XA== +csv-generate@^3.4.3: + version "3.4.3" + resolved "https://registry.yarnpkg.com/csv-generate/-/csv-generate-3.4.3.tgz#bc42d943b45aea52afa896874291da4b9108ffff" + integrity sha512-w/T+rqR0vwvHqWs/1ZyMDWtHHSJaN06klRqJXBEpDJaM/+dZkso0OKh1VcuuYvK3XM53KysVNq8Ko/epCK8wOw== + +csv-parse@^4.16.3: + version "4.16.3" + resolved "https://registry.yarnpkg.com/csv-parse/-/csv-parse-4.16.3.tgz#7ca624d517212ebc520a36873c3478fa66efbaf7" + integrity sha512-cO1I/zmz4w2dcKHVvpCr7JVRu8/FymG5OEpmvsZYlccYolPBLoVGKUHgNoc4ZGkFeFlWGEDmMyBM+TTqRdW/wg== + +csv-stringify@^5.6.5: + version "5.6.5" + resolved "https://registry.yarnpkg.com/csv-stringify/-/csv-stringify-5.6.5.tgz#c6d74badda4b49a79bf4e72f91cce1e33b94de00" + integrity sha512-PjiQ659aQ+fUTQqSrd1XEDnOr52jh30RBurfzkscaE2tPaFsDH5wOAHJiw8XAHphRknCwMUE9KRayc4K/NbO8A== + +csv@^5.5.0: + version "5.5.3" + resolved "https://registry.yarnpkg.com/csv/-/csv-5.5.3.tgz#cd26c1e45eae00ce6a9b7b27dcb94955ec95207d" + integrity sha512-QTaY0XjjhTQOdguARF0lGKm5/mEq9PD9/VhZZegHDIBq2tQwgNpHc3dneD4mGo2iJs+fTKv5Bp0fZ+BRuY3Z0g== + dependencies: + csv-generate "^3.4.3" + csv-parse "^4.16.3" + csv-stringify "^5.6.5" + stream-transform "^2.1.3" + currently-unhandled@^0.4.1: version "0.4.1" resolved "https://registry.npmjs.org/currently-unhandled/-/currently-unhandled-0.4.1.tgz" @@ -12372,6 +12676,11 @@ dataloader@2.0.0: resolved "https://registry.npmjs.org/dataloader/-/dataloader-2.0.0.tgz" integrity sha512-YzhyDAwA4TaQIhM5go+vCLmU0UikghC/t9DTQYZR2M/UvZ1MdOhPezSDZcjj9uqQJOMqjLcpWtyW2iNINdlatQ== +dataloader@^1.4.0: + version "1.4.0" + resolved "https://registry.yarnpkg.com/dataloader/-/dataloader-1.4.0.tgz#bca11d867f5d3f1b9ed9f737bd15970c65dff5c8" + integrity sha512-68s5jYdlvasItOJnCuI2Q9s4q98g0pCyL3HrcKJu8KNugUl8ahgmZYg38ysLTgQjjXX3H8CJLkAvWrclWfcalw== + date-fns@^2.25.0: version "2.28.0" resolved "https://registry.npmjs.org/date-fns/-/date-fns-2.28.0.tgz" @@ -12555,6 +12864,14 @@ define-properties@^1.1.2, define-properties@^1.1.3: dependencies: object-keys "^1.0.12" +define-properties@^1.1.4: + version "1.1.4" + resolved "https://registry.yarnpkg.com/define-properties/-/define-properties-1.1.4.tgz#0b14d7bd7fbeb2f3572c3a7eda80ea5d57fb05b1" + integrity sha512-uckOqKcfaVvtBdsVkdPv3XjveQJsNQqmhXgRi8uhvWWuPYZCNlzT8qAyblUgNoXdHdjMTzAqeGjAoli8f+bzPA== + dependencies: + has-property-descriptors "^1.0.0" + object-keys "^1.1.1" + define-property@^0.2.5: version "0.2.5" resolved "https://registry.npmjs.org/define-property/-/define-property-0.2.5.tgz" @@ -12625,11 +12942,6 @@ denque@^1.1.0, denque@^1.5.0: resolved "https://registry.npmjs.org/denque/-/denque-1.5.1.tgz" integrity sha512-XwE+iZ4D6ZUB7mfYRMb5wByE8L74HCn30FBN7sWnXksWc1LO1bPDl67pBR9o/kC4z/xSNAwkMYcGgqDV3BE3Hw== -denque@^1.4.1: - version "1.5.0" - resolved "https://registry.npmjs.org/denque/-/denque-1.5.0.tgz" - integrity sha512-CYiCSgIF1p6EUByQPlGkKnP1M9g0ZV3qMIrqMqZqdwazygIA/YP2vrbcyl1h/WppKJTdl1F85cXIle+394iDAQ== - depd@^1.1.2, depd@~1.1.2: version "1.1.2" resolved "https://registry.npmjs.org/depd/-/depd-1.1.2.tgz" @@ -12670,6 +12982,11 @@ detect-indent@^5.0.0: resolved "https://registry.npmjs.org/detect-indent/-/detect-indent-5.0.0.tgz" integrity sha1-OHHMCmoALow+Wzz38zYmRnXwa50= +detect-indent@^6.0.0: + version "6.1.0" + resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.1.0.tgz#592485ebbbf6b3b1ab2be175c8393d04ca0d57e6" + integrity sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA== + detect-libc@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-2.0.1.tgz#e1897aa88fa6ad197862937fbc0441ef352ee0cd" @@ -12941,7 +13258,7 @@ dotenv-expand@^5.1.0: resolved "https://registry.npmjs.org/dotenv-expand/-/dotenv-expand-5.1.0.tgz" integrity sha512-YXQl1DSa4/PQyRfgrv6aoNjhasp/p4qs9FjJ4q4cQk+8m4r6k4ZSiEyytKG8f8W9gi8WsQtIObNmKd+tMzNTmA== -dotenv@^8.0.0, dotenv@^8.2.0, dotenv@^8.6.0: +dotenv@^8.0.0, dotenv@^8.1.0, dotenv@^8.2.0, dotenv@^8.6.0: version "8.6.0" resolved "https://registry.npmjs.org/dotenv/-/dotenv-8.6.0.tgz" integrity sha512-IrPdXQsk2BbzvCBGBOTmmSH5SodmqZNt4ERAZDmW4CT+tL8VtvinqywuANaFu4bOMWki16nqf0e4oC0QIaDr/g== @@ -13179,7 +13496,7 @@ enhanced-resolve@^5.8.3: graceful-fs "^4.2.4" tapable "^2.2.0" -enquirer@^2.3.4, enquirer@^2.3.5, enquirer@^2.3.6: +enquirer@^2.3.0, enquirer@^2.3.4, enquirer@^2.3.5, enquirer@^2.3.6: version "2.3.6" resolved "https://registry.npmjs.org/enquirer/-/enquirer-2.3.6.tgz" integrity sha512-yjNnPr315/FjS4zIsUxYguYUPP2e1NK4d7E7ZOLiyYCcbFBiTMyID+2wvm2w6+pZ/odMA7cRkjhsPbltwBOrLg== @@ -13290,6 +13607,35 @@ es-abstract@^1.19.0, es-abstract@^1.19.1: string.prototype.trimstart "^1.0.4" unbox-primitive "^1.0.1" +es-abstract@^1.19.2, es-abstract@^1.19.5: + version "1.20.1" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814" + integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA== + dependencies: + call-bind "^1.0.2" + es-to-primitive "^1.2.1" + function-bind "^1.1.1" + function.prototype.name "^1.1.5" + get-intrinsic "^1.1.1" + get-symbol-description "^1.0.0" + has "^1.0.3" + has-property-descriptors "^1.0.0" + has-symbols "^1.0.3" + internal-slot "^1.0.3" + is-callable "^1.2.4" + is-negative-zero "^2.0.2" + is-regex "^1.1.4" + is-shared-array-buffer "^1.0.2" + is-string "^1.0.7" + is-weakref "^1.0.2" + object-inspect "^1.12.0" + object-keys "^1.1.1" + object.assign "^4.1.2" + regexp.prototype.flags "^1.4.3" + string.prototype.trimend "^1.0.5" + string.prototype.trimstart "^1.0.5" + unbox-primitive "^1.0.2" + es-array-method-boxes-properly@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/es-array-method-boxes-properly/-/es-array-method-boxes-properly-1.0.0.tgz" @@ -13314,6 +13660,13 @@ es-module-lexer@^0.9.0: resolved "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz" integrity sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ== +es-shim-unscopables@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/es-shim-unscopables/-/es-shim-unscopables-1.0.0.tgz#702e632193201e3edf8713635d083d378e510241" + integrity sha512-Jm6GPcCdC30eMLbZ2x8z2WuRwAws3zTBBKuusffYVUrNj/GVSUAZ+xKMaUpfNDR5IbyNA5LJbaecoUVbmUcB1w== + dependencies: + has "^1.0.3" + es-to-primitive@^1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/es-to-primitive/-/es-to-primitive-1.2.1.tgz" @@ -14168,7 +14521,12 @@ extend@^3.0.0, extend@~3.0.2: resolved "https://registry.npmjs.org/extend/-/extend-3.0.2.tgz" integrity sha512-fjquC59cD7CyW6urNXK0FBufkZcoiGG80wTuPujX590cB5Ttln20E2UB4S/WARVqhXffZl2LNgS+gQdPIIim/g== -external-editor@^3.0.3: +extendable-error@^0.1.5: + version "0.1.7" + resolved "https://registry.yarnpkg.com/extendable-error/-/extendable-error-0.1.7.tgz#60b9adf206264ac920058a7395685ae4670c2b96" + integrity sha512-UOiS2in6/Q0FK0R0q6UY9vYpQ21mr/Qn1KOnte7vsACuNJf514WvCCUHSRCPcgjPT2bAhNIJdlE6bVap1GKmeg== + +external-editor@^3.0.3, external-editor@^3.1.0: version "3.1.0" resolved "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz" integrity sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew== @@ -14572,6 +14930,14 @@ find-up@^5.0.0: locate-path "^6.0.0" path-exists "^4.0.0" +find-yarn-workspace-root2@1.2.16: + version "1.2.16" + resolved "https://registry.yarnpkg.com/find-yarn-workspace-root2/-/find-yarn-workspace-root2-1.2.16.tgz#60287009dd2f324f59646bdb4b7610a6b301c2a9" + integrity sha512-hr6hb1w8ePMpPVUK39S4RlwJzi+xPLuVuG8XlwXU3KD5Yn3qgBWVfy3AzNlDhWvE1EORCE65/Qm26rFQt3VLVA== + dependencies: + micromatch "^4.0.2" + pkg-dir "^4.2.0" + find-yarn-workspace-root@^2.0.0: version "2.0.0" resolved "https://registry.npmjs.org/find-yarn-workspace-root/-/find-yarn-workspace-root-2.0.0.tgz" @@ -14828,6 +15194,15 @@ fs-extra@^10.0.0: jsonfile "^6.0.1" universalify "^2.0.0" +fs-extra@^7.0.1: + version "7.0.1" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" + integrity sha512-YJDaCJZEnBmcbw13fvdAM9AwNOJwOzrE4pqMqBq5nFiEqXUqHwlK4B+3pUw6JNvfSPtX05xFHtYy/1ni01eGCw== + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" + universalify "^0.1.0" + fs-extra@^9.0.0, fs-extra@^9.0.1: version "9.1.0" resolved "https://registry.npmjs.org/fs-extra/-/fs-extra-9.1.0.tgz" @@ -14895,7 +15270,7 @@ function-bind@^1.1.1: resolved "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz" integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== -function.prototype.name@^1.1.0: +function.prototype.name@^1.1.0, function.prototype.name@^1.1.5: version "1.1.5" resolved "https://registry.npmjs.org/function.prototype.name/-/function.prototype.name-1.1.5.tgz" integrity sha512-uN7m/BzVKQnCUF/iW8jYea67v++2u7m5UgENbHRtdDVclOUP+FMPlCNdmk0h/ysGyo2tavMJEDqJAkJdRa1vMA== @@ -15742,6 +16117,18 @@ globby@^10.0.1: merge2 "^1.2.3" slash "^3.0.0" +globby@^11.0.0, globby@^11.0.2, globby@^11.0.4: + version "11.1.0" + resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" + integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== + dependencies: + array-union "^2.1.0" + dir-glob "^3.0.1" + fast-glob "^3.2.9" + ignore "^5.2.0" + merge2 "^1.4.1" + slash "^3.0.0" + globby@^11.0.1, globby@^11.0.3: version "11.0.4" resolved "https://registry.npmjs.org/globby/-/globby-11.0.4.tgz" @@ -15754,18 +16141,6 @@ globby@^11.0.1, globby@^11.0.3: merge2 "^1.3.0" slash "^3.0.0" -globby@^11.0.2, globby@^11.0.4: - version "11.1.0" - resolved "https://registry.npmjs.org/globby/-/globby-11.1.0.tgz" - integrity sha512-jhIXaOzy1sb8IyocaruWSn1TjmnBVs8Ayhcy83rmxNJ8q2uWKCAj3CnJY+KpGSXCueAPc0i05kVvVKtP1t9S3g== - dependencies: - array-union "^2.1.0" - dir-glob "^3.0.1" - fast-glob "^3.2.9" - ignore "^5.2.0" - merge2 "^1.4.1" - slash "^3.0.0" - globby@^9.2.0: version "9.2.0" resolved "https://registry.npmjs.org/globby/-/globby-9.2.0.tgz" @@ -15831,6 +16206,11 @@ graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6 resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.3.tgz" integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ== +graceful-fs@^4.1.5, graceful-fs@^4.2.9: + version "4.2.10" + resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" + integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== + graceful-fs@^4.1.9, graceful-fs@^4.2.3, graceful-fs@^4.2.6: version "4.2.9" resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.9.tgz" @@ -15841,11 +16221,6 @@ graceful-fs@^4.2.4: resolved "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.4.tgz" integrity sha512-WjKPNJF79dtJAVniUlGGWHYGz2jWxT6VhN/4m1NdkbZ2nOsEF+cI1Edgql5zCRhs/VsQYRvrXctxktVXZUkixw== -graceful-fs@^4.2.9: - version "4.2.10" - resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.10.tgz#147d3a006da4ca3ce14728c7aefc287c367d7a6c" - integrity sha512-9ByhssR2fPVsNZj478qUUbKfmL0+t5BDVyjShtyZZLiK7ZDAArFFfopyOTj0M05wE2tJPisA4iTnnXl2YoPvOA== - grapheme-splitter@^1.0.4: version "1.0.4" resolved "https://registry.npmjs.org/grapheme-splitter/-/grapheme-splitter-1.0.4.tgz" @@ -15986,6 +16361,11 @@ has-bigints@^1.0.1: resolved "https://registry.npmjs.org/has-bigints/-/has-bigints-1.0.1.tgz" integrity sha512-LSBS2LjbNBTf6287JEbEzvJgftkF5qFkmCo9hDRpAzKhUOlJ+hx8dd4USs00SgsUNwc4617J9ki5YtEClM2ffA== +has-bigints@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/has-bigints/-/has-bigints-1.0.2.tgz#0871bd3e3d51626f6ca0966668ba35d5602d6eaa" + integrity sha512-tSvCKtBr9lkF0Ex0aQiP9N+OpV4zi2r/Nee5VkRDbaqv35RLYMzbwQfFSZZH0kR+Rd6302UJZ2p/bJCEoR3VoQ== + has-cors@1.1.0: version "1.1.0" resolved "https://registry.npmjs.org/has-cors/-/has-cors-1.1.0.tgz" @@ -16008,11 +16388,23 @@ has-glob@^1.0.0: dependencies: is-glob "^3.0.0" +has-property-descriptors@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-property-descriptors/-/has-property-descriptors-1.0.0.tgz#610708600606d36961ed04c196193b6a607fa861" + integrity sha512-62DVLZGoiEBDHQyqG4w9xCuZ7eJEwNmJRWw2VY84Oedb7WFcA27fiEVe8oUQx9hAUJ4ekurquucTGwsyO1XGdQ== + dependencies: + get-intrinsic "^1.1.1" + has-symbols@^1.0.1, has-symbols@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.2.tgz" integrity sha512-chXa79rL/UC2KlX17jo3vRGz0azaWEx5tGqZg5pO3NUyEJVB17dMruQlzCCOfUvElghKcm5194+BCRvi2Rv/Gw== +has-symbols@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" + integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== + has-tostringtag@^1.0.0: version "1.0.0" resolved "https://registry.npmjs.org/has-tostringtag/-/has-tostringtag-1.0.0.tgz" @@ -16477,6 +16869,11 @@ https-proxy-agent@^2.2.3: agent-base "^4.3.0" debug "^3.1.0" +human-id@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/human-id/-/human-id-1.0.2.tgz#e654d4b2b0d8b07e45da9f6020d8af17ec0a5df3" + integrity sha512-UNopramDEhHJD+VR+ehk8rOslwSfByxPIZyJRfV739NDhN5LF1fa1MqnzKm2lGTQRjNrjK19Q5fhkgIfjlVUKw== + human-signals@^1.1.1: version "1.1.1" resolved "https://registry.npmjs.org/human-signals/-/human-signals-1.1.1.tgz" @@ -16979,6 +17376,13 @@ is-ci@^2.0.0: dependencies: ci-info "^2.0.0" +is-ci@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/is-ci/-/is-ci-3.0.1.tgz#db6ecbed1bd659c43dac0f45661e7674103d1867" + integrity sha512-ZYvCgrefwqoQ6yTyYUbQu64HsITZ3NfKX1lzaEYdkTDcfKzzCI/wthRRYKkdjHKFVgNiXKAKm65Zo1pk2as/QQ== + dependencies: + ci-info "^3.2.0" + is-core-module@^2.2.0: version "2.8.0" resolved "https://registry.npmjs.org/is-core-module/-/is-core-module-2.8.0.tgz" @@ -17190,6 +17594,11 @@ is-negative-zero@^2.0.1: resolved "https://registry.npmjs.org/is-negative-zero/-/is-negative-zero-2.0.1.tgz" integrity sha512-2z6JzQvZRa9A2Y7xC6dQQm4FSTSTNWjKIYYTt4246eMTJmIo0Q+ZyOsU66X8lxK1AbB92dFeglPLrhwpeRKO6w== +is-negative-zero@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/is-negative-zero/-/is-negative-zero-2.0.2.tgz#7bf6f03a28003b8b3965de3ac26f664d765f3150" + integrity sha512-dqJvarLawXsFbNDeJW7zAz8ItJ9cd28YufuuFzh0G8pNHjJMnY08Dv7sYX2uF5UpQOwieAeOExEYAWWfu7ZZUA== + is-node-process@^1.0.1: version "1.0.1" resolved "https://registry.npmjs.org/is-node-process/-/is-node-process-1.0.1.tgz" @@ -17351,6 +17760,13 @@ is-shared-array-buffer@^1.0.1: resolved "https://registry.npmjs.org/is-shared-array-buffer/-/is-shared-array-buffer-1.0.1.tgz" integrity sha512-IU0NmyknYZN0rChcKhRO1X8LYz5Isj/Fsqh8NJOSf+N/hCOTwy29F32Ik7a+QszE63IdvmwdTPDd6cZ5pg4cwA== +is-shared-array-buffer@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" + integrity sha512-sqN2UDu1/0y6uvXyStCOzyhAjCSlHceFoMKJW8W9EU9cvic/QdsZ0kEU93HEy3IUEFZIiH/3w+AH/UQbPHNdhA== + dependencies: + call-bind "^1.0.2" + is-ssh@^1.3.0: version "1.3.3" resolved "https://registry.npmjs.org/is-ssh/-/is-ssh-1.3.3.tgz" @@ -17380,6 +17796,13 @@ is-string@^1.0.7: dependencies: has-tostringtag "^1.0.0" +is-subdir@^1.1.1: + version "1.2.0" + resolved "https://registry.yarnpkg.com/is-subdir/-/is-subdir-1.2.0.tgz#b791cd28fab5202e91a08280d51d9d7254fd20d4" + integrity sha512-2AT6j+gXe/1ueqbW6fLZJiIw3F8iXGJtt0yDrZaBhAZEG1raiTxKWU+IPqMCzQAXOUCKdA4UDMgacKH25XG2Cw== + dependencies: + better-path-resolve "1.0.0" + is-symbol@^1.0.2, is-symbol@^1.0.3: version "1.0.4" resolved "https://registry.npmjs.org/is-symbol/-/is-symbol-1.0.4.tgz" @@ -17428,7 +17851,7 @@ is-valid-path@^0.1.1: dependencies: is-invalid-path "^0.1.0" -is-weakref@^1.0.1: +is-weakref@^1.0.1, is-weakref@^1.0.2: version "1.0.2" resolved "https://registry.npmjs.org/is-weakref/-/is-weakref-1.0.2.tgz" integrity sha512-qctsuLZmIQ0+vSSMfoVvyFe2+GSEvnmZ2ezTup1SBse9+twCCeial6EEi3Nc2KFcf6+qz2FBPnjXsk8xhKSaPQ== @@ -18865,7 +19288,7 @@ js-string-escape@^1.0.1: resolved "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz" integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ== -js-yaml@3.14.1, js-yaml@^3.13.1: +js-yaml@3.14.1, js-yaml@^3.13.0, js-yaml@^3.13.1, js-yaml@^3.6.1: version "3.14.1" resolved "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz" integrity sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g== @@ -19126,11 +19549,6 @@ jws@^3.2.2: jwa "^1.4.1" safe-buffer "^5.0.1" -kareem@2.3.2: - version "2.3.2" - resolved "https://registry.npmjs.org/kareem/-/kareem-2.3.2.tgz" - integrity sha512-STHz9P7X2L4Kwn72fA4rGyqyXdmrMSdxqHx9IXon/FXluXieaFA6KJ2upcHAHxQPQ0LeM/OjLrhFxifHewOALQ== - keygrip@~1.0.3: version "1.0.3" resolved "https://registry.npmjs.org/keygrip/-/keygrip-1.0.3.tgz" @@ -19198,6 +19616,11 @@ kleur@^3.0.3: resolved "https://registry.npmjs.org/kleur/-/kleur-3.0.3.tgz" integrity sha512-eTIzlVOSUR+JxdDFepEYcBMtZ9Qqdef+rnzWdRZuMbOywu5tO2w2N7rqjoANZ5k9vywhL6Br1VRjUIgTQx4E8w== +kleur@^4.1.4: + version "4.1.5" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" + integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== + klona@^2.0.4: version "2.0.5" resolved "https://registry.npmjs.org/klona/-/klona-2.0.5.tgz" @@ -19470,6 +19893,16 @@ load-json-file@^5.3.0: strip-bom "^3.0.0" type-fest "^0.3.0" +load-yaml-file@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/load-yaml-file/-/load-yaml-file-0.2.0.tgz#af854edaf2bea89346c07549122753c07372f64d" + integrity sha512-OfCBkGEw4nN6JLtgRidPX6QxjBQGQf72q3si2uvqyFEMbycSFFHwAZeXx6cJgFM9wmLrf9zBwCP3Ivqa+LLZPw== + dependencies: + graceful-fs "^4.1.5" + js-yaml "^3.13.0" + pify "^4.0.1" + strip-bom "^3.0.0" + loader-runner@^2.4.0: version "2.4.0" resolved "https://registry.npmjs.org/loader-runner/-/loader-runner-2.4.0.tgz" @@ -19694,6 +20127,11 @@ lodash.sortby@^4.7.0: resolved "https://registry.npmjs.org/lodash.sortby/-/lodash.sortby-4.7.0.tgz" integrity sha1-7dFMgk4sycHgsKG0K7UhBRakJDg= +lodash.startcase@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/lodash.startcase/-/lodash.startcase-4.4.0.tgz#9436e34ed26093ed7ffae1936144350915d9add8" + integrity sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg== + lodash.template@^4.0.2, lodash.template@^4.4.0, lodash.template@^4.5.0: version "4.5.0" resolved "https://registry.npmjs.org/lodash.template/-/lodash.template-4.5.0.tgz" @@ -19862,7 +20300,7 @@ lru-cache@4.0.0: pseudomap "^1.0.1" yallist "^2.0.0" -lru-cache@^4.0.0: +lru-cache@^4.0.0, lru-cache@^4.0.1: version "4.1.5" resolved "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz" integrity sha512-sWZlbEP2OsHNkXrMl5GYk/jKk70MBng6UU4YI/qGDYbgf6YbP4EvmqISbXCoJiRKs+1bSpFHVgQxvJ17F2li5g== @@ -20284,11 +20722,6 @@ memory-fs@^0.5.0: errno "^0.1.3" readable-stream "^2.0.1" -memory-pager@^1.0.2: - version "1.5.0" - resolved "https://registry.npmjs.org/memory-pager/-/memory-pager-1.5.0.tgz" - integrity sha512-ZS4Bp4r/Zoeq6+NLJpP+0Zzm0pR8whtGPf1XExKLJBAczGMnSi3It14OiNCStjQjM6NU1okjQGSxgEZN8eBYKg== - meow@^3.3.0: version "3.7.0" resolved "https://registry.npmjs.org/meow/-/meow-3.7.0.tgz" @@ -20320,6 +20753,23 @@ meow@^4.0.0: redent "^2.0.0" trim-newlines "^2.0.0" +meow@^6.0.0: + version "6.1.1" + resolved "https://registry.yarnpkg.com/meow/-/meow-6.1.1.tgz#1ad64c4b76b2a24dfb2f635fddcadf320d251467" + integrity sha512-3YffViIt2QWgTy6Pale5QpopX/IvU3LPL03jOTqp6pGj3VjesdO/U8CuHMKpnQr4shCNCM5fd5XFFvIIl6JBHg== + dependencies: + "@types/minimist" "^1.2.0" + camelcase-keys "^6.2.2" + decamelize-keys "^1.1.0" + hard-rejection "^2.1.0" + minimist-options "^4.0.2" + normalize-package-data "^2.5.0" + read-pkg-up "^7.0.1" + redent "^3.0.0" + trim-newlines "^3.0.0" + type-fest "^0.13.1" + yargs-parser "^18.1.3" + meow@^8.0.0: version "8.1.2" resolved "https://registry.npmjs.org/meow/-/meow-8.1.2.tgz" @@ -20574,7 +21024,7 @@ minimatch@^5.0.1: dependencies: brace-expansion "^2.0.1" -minimist-options@4.1.0: +minimist-options@4.1.0, minimist-options@^4.0.2: version "4.1.0" resolved "https://registry.npmjs.org/minimist-options/-/minimist-options-4.1.0.tgz" integrity sha512-Q4r8ghd80yhO/0j1O3B2BjweX3fiHg9cdOwjJd2J76Q135c+NDxGCqdYKQ1SKBuFfgWbAUzBfvYjPUEeNgqN1A== @@ -20699,6 +21149,11 @@ mixin-deep@^1.2.0: for-in "^1.0.2" is-extendable "^1.0.1" +mixme@^0.5.1: + version "0.5.4" + resolved "https://registry.yarnpkg.com/mixme/-/mixme-0.5.4.tgz#8cb3bd0cd32a513c161bf1ca99d143f0bcf2eff3" + integrity sha512-3KYa4m4Vlqx98GPdOHghxSdNtTvcP8E0kkaJ5Dlh+h2DRzF7zpuVVcA8B0QpKd11YJeP9QQ7ASkKzOeu195Wzw== + mkdirp-promise@^5.0.1: version "5.0.1" resolved "https://registry.npmjs.org/mkdirp-promise/-/mkdirp-promise-5.0.1.tgz" @@ -20742,76 +21197,6 @@ moment-timezone@^0.5.31: resolved "https://registry.npmjs.org/moment/-/moment-2.29.1.tgz" integrity sha512-kHmoybcPV8Sqy59DwNDY3Jefr64lK/by/da0ViFcuA4DH0vQg5Q6Ze5VimxkfQNSC+Mls/Kx53s7TjP1RhFEDQ== -mongodb@3.6.11: - version "3.6.11" - resolved "https://registry.npmjs.org/mongodb/-/mongodb-3.6.11.tgz" - integrity sha512-4Y4lTFHDHZZdgMaHmojtNAlqkvddX2QQBEN0K//GzxhGwlI9tZ9R0vhbjr1Decw+TF7qK0ZLjQT292XgHRRQgw== - dependencies: - bl "^2.2.1" - bson "^1.1.4" - denque "^1.4.1" - optional-require "^1.0.3" - safe-buffer "^5.1.2" - optionalDependencies: - saslprep "^1.0.0" - -mongodb@3.7.3: - version "3.7.3" - resolved "https://registry.npmjs.org/mongodb/-/mongodb-3.7.3.tgz" - integrity sha512-Psm+g3/wHXhjBEktkxXsFMZvd3nemI0r3IPsE0bU+4//PnvNWKkzhZcEsbPcYiWqe8XqXJJEg4Tgtr7Raw67Yw== - dependencies: - bl "^2.2.1" - bson "^1.1.4" - denque "^1.4.1" - optional-require "^1.1.8" - safe-buffer "^5.1.2" - optionalDependencies: - saslprep "^1.0.0" - -mongoose-legacy-pluralize@1.0.2: - version "1.0.2" - resolved "https://registry.npmjs.org/mongoose-legacy-pluralize/-/mongoose-legacy-pluralize-1.0.2.tgz" - integrity sha512-Yo/7qQU4/EyIS8YDFSeenIvXxZN+ld7YdV9LqFVQJzTLye8unujAWPZ4NWKfFA+RNjh+wvTWKY9Z3E5XM6ZZiQ== - -mongoose@^5.10.15: - version "5.13.7" - resolved "https://registry.npmjs.org/mongoose/-/mongoose-5.13.7.tgz" - integrity sha512-ADIvftZ+KfoTALMZ0n8HvBlezFhcUd73hQaHQDwQ+3X+JZlqE47fUy9yhFZ2SjT+qzmuaCcIXCfhewIc38t2fQ== - dependencies: - "@types/mongodb" "^3.5.27" - bson "^1.1.4" - kareem "2.3.2" - mongodb "3.6.11" - mongoose-legacy-pluralize "1.0.2" - mpath "0.8.3" - mquery "3.2.5" - ms "2.1.2" - optional-require "1.0.x" - regexp-clone "1.0.0" - safe-buffer "5.2.1" - sift "13.5.2" - sliced "1.0.1" - -mongoose@^5.8.0: - version "5.13.14" - resolved "https://registry.npmjs.org/mongoose/-/mongoose-5.13.14.tgz" - integrity sha512-j+BlQjjxgZg0iWn42kLeZTB91OejcxWpY2Z50bsZTiKJ7HHcEtcY21Godw496GMkBqJMTzmW7G/kZ04mW+Cb7Q== - dependencies: - "@types/bson" "1.x || 4.0.x" - "@types/mongodb" "^3.5.27" - bson "^1.1.4" - kareem "2.3.2" - mongodb "3.7.3" - mongoose-legacy-pluralize "1.0.2" - mpath "0.8.4" - mquery "3.2.5" - ms "2.1.2" - optional-require "1.0.x" - regexp-clone "1.0.0" - safe-buffer "5.2.1" - sift "13.5.2" - sliced "1.0.1" - morgan@^1.9.1: version "1.10.0" resolved "https://registry.npmjs.org/morgan/-/morgan-1.10.0.tgz" @@ -20835,27 +21220,6 @@ move-concurrently@^1.0.1: rimraf "^2.5.4" run-queue "^1.0.3" -mpath@0.8.3: - version "0.8.3" - resolved "https://registry.npmjs.org/mpath/-/mpath-0.8.3.tgz" - integrity sha512-eb9rRvhDltXVNL6Fxd2zM9D4vKBxjVVQNLNijlj7uoXUy19zNDsIif5zR+pWmPCWNKwAtqyo4JveQm4nfD5+eA== - -mpath@0.8.4: - version "0.8.4" - resolved "https://registry.npmjs.org/mpath/-/mpath-0.8.4.tgz" - integrity sha512-DTxNZomBcTWlrMW76jy1wvV37X/cNNxPW1y2Jzd4DZkAaC5ZGsm8bfGfNOthcDuRJujXLqiuS6o3Tpy0JEoh7g== - -mquery@3.2.5: - version "3.2.5" - resolved "https://registry.npmjs.org/mquery/-/mquery-3.2.5.tgz" - integrity sha512-VjOKHHgU84wij7IUoZzFRU07IAxd5kWJaDmyUzQlbjHjyoeK5TNeeo8ZsFDtTYnSgpW6n/nMNIHvE3u8Lbrf4A== - dependencies: - bluebird "3.5.1" - debug "3.1.0" - regexp-clone "^1.0.0" - safe-buffer "5.1.2" - sliced "1.0.1" - mri@^1.1.0: version "1.1.6" resolved "https://registry.npmjs.org/mri/-/mri-1.1.6.tgz" @@ -21638,6 +22002,11 @@ object-inspect@^1.11.0: resolved "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.0.tgz" integrity sha512-Ho2z80bVIvJloH+YzRmpZVQe87+qASmBUKZDWgx9cu+KDrX2ZDH/3tMy+gXbZETVGs2M8YdxObOh7XAtim9Y0g== +object-inspect@^1.12.0: + version "1.12.2" + resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" + integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== + object-is@^1.0.1: version "1.1.5" resolved "https://registry.npmjs.org/object-is/-/object-is-1.1.5.tgz" @@ -21825,25 +22194,6 @@ opentracing@^0.14.5: resolved "https://registry.npmjs.org/opentracing/-/opentracing-0.14.7.tgz" integrity sha512-vz9iS7MJ5+Bp1URw8Khvdyw1H/hGvzHWlKQ7eRrQojSCDL1/SrWfrY9QebLw97n2deyRtzHRC3MkQfVNUCo91Q== -optional-require@1.0.x: - version "1.0.3" - resolved "https://registry.npmjs.org/optional-require/-/optional-require-1.0.3.tgz" - integrity sha512-RV2Zp2MY2aeYK5G+B/Sps8lW5NHAzE5QClbFP15j+PWmP+T9PxlJXBOOLoSAdgwFvS4t0aMR4vpedMkbHfh0nA== - -optional-require@^1.0.3: - version "1.1.7" - resolved "https://registry.npmjs.org/optional-require/-/optional-require-1.1.7.tgz" - integrity sha512-cIeRZocXsZnZYn+SevbtSqNlLbeoS4mLzuNn4fvXRMDRNhTGg0sxuKXl0FnZCtnew85LorNxIbZp5OeliILhMw== - dependencies: - require-at "^1.0.6" - -optional-require@^1.1.8: - version "1.1.8" - resolved "https://registry.npmjs.org/optional-require/-/optional-require-1.1.8.tgz" - integrity sha512-jq83qaUb0wNg9Krv1c5OQ+58EK+vHde6aBPzLvPPqJm89UQWsvSuFy9X/OSNJnFeSOKo7btE0n8Nl2+nE+z5nA== - dependencies: - require-at "^1.0.6" - optionator@^0.8.1, optionator@^0.8.3: version "0.8.3" resolved "https://registry.npmjs.org/optionator/-/optionator-0.8.3.tgz" @@ -21933,6 +22283,11 @@ osenv@^0.1.4, osenv@^0.1.5: os-homedir "^1.0.0" os-tmpdir "^1.0.0" +outdent@^0.5.0: + version "0.5.0" + resolved "https://registry.yarnpkg.com/outdent/-/outdent-0.5.0.tgz#9e10982fdc41492bb473ad13840d22f9655be2ff" + integrity sha512-/jHxFIzoMXdqPzTaCpFzAAWhpkSjZPF4Vsn6jAfNpmbH/ymsmd7Qc6VE9BGn0L6YMj6uwpQLxCECpus4ukKS9Q== + outvariant@^1.2.0: version "1.2.1" resolved "https://registry.npmjs.org/outvariant/-/outvariant-1.2.1.tgz" @@ -22148,6 +22503,11 @@ pako@~1.0.5: resolved "https://registry.npmjs.org/pako/-/pako-1.0.11.tgz" integrity sha512-4hLB8Py4zZce5s4yd9XzopqwVv/yGNhV1Bl8NTmCq1763HeK2+EwVTv+leGeL13Dnh2wfbqowVPXCIO0z4taYw== +papaparse@^5.3.2: + version "5.3.2" + resolved "https://registry.yarnpkg.com/papaparse/-/papaparse-5.3.2.tgz#d1abed498a0ee299f103130a6109720404fbd467" + integrity sha512-6dNZu0Ki+gyV0eBsFKJhYr+MdQYAzFUGlBMNj3GNrmHxmz1lfRa24CjFObPXtjcetlOv5Ad299MhIK0znp3afw== + parallel-transform@^1.1.0: version "1.2.0" resolved "https://registry.npmjs.org/parallel-transform/-/parallel-transform-1.2.0.tgz" @@ -23279,6 +23639,16 @@ postgres-interval@^1.1.0: dependencies: xtend "^4.0.0" +preferred-pm@^3.0.0: + version "3.0.3" + resolved "https://registry.yarnpkg.com/preferred-pm/-/preferred-pm-3.0.3.tgz#1b6338000371e3edbce52ef2e4f65eb2e73586d6" + integrity sha512-+wZgbxNES/KlJs9q40F/1sfOd/j7f1O9JaHcW5Dsn3aUUOZg3L2bjpVUcKV2jvtElYfoTuQiNeMfQJ4kwUAhCQ== + dependencies: + find-up "^5.0.0" + find-yarn-workspace-root2 "1.2.16" + path-exists "^4.0.0" + which-pm "2.0.0" + prelude-ls@^1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/prelude-ls/-/prelude-ls-1.2.1.tgz" @@ -24230,6 +24600,16 @@ read-pkg@^5.2.0: parse-json "^5.0.0" type-fest "^0.6.0" +read-yaml-file@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/read-yaml-file/-/read-yaml-file-1.1.0.tgz#9362bbcbdc77007cc8ea4519fe1c0b821a7ce0d8" + integrity sha512-VIMnQi/Z4HT2Fxuwg5KrY174U1VdUIASQVWXXyqtNRtxSr9IYkn1rsI6Tb6HsrHCmB7gVpNwX6JxPTHcH6IoTA== + dependencies: + graceful-fs "^4.1.5" + js-yaml "^3.6.1" + pify "^4.0.1" + strip-bom "^3.0.0" + read@1, read@^1.0.7, read@~1.0.1: version "1.0.7" resolved "https://registry.npmjs.org/read/-/read-1.0.7.tgz" @@ -24472,11 +24852,6 @@ regex-not@^1.0.0, regex-not@^1.0.2: extend-shallow "^3.0.2" safe-regex "^1.1.0" -regexp-clone@1.0.0, regexp-clone@^1.0.0: - version "1.0.0" - resolved "https://registry.npmjs.org/regexp-clone/-/regexp-clone-1.0.0.tgz" - integrity sha512-TuAasHQNamyyJ2hb97IuBEif4qBHGjPHBS64sZwytpLEqtBQ1gPJTnOaQ6qmpET16cK14kkjbazl6+p0RRv0yw== - regexp.prototype.flags@^1.2.0, regexp.prototype.flags@^1.3.1: version "1.3.1" resolved "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.3.1.tgz" @@ -24485,6 +24860,15 @@ regexp.prototype.flags@^1.2.0, regexp.prototype.flags@^1.3.1: call-bind "^1.0.2" define-properties "^1.1.3" +regexp.prototype.flags@^1.4.3: + version "1.4.3" + resolved "https://registry.yarnpkg.com/regexp.prototype.flags/-/regexp.prototype.flags-1.4.3.tgz#87cab30f80f66660181a3bb7bf5981a872b367ac" + integrity sha512-fjggEOO3slI6Wvgjwflkc4NFRCTZAu5CnNfBd5qOMYhWdn67nJBBu34/TkD++eeFmd8C9r9jfXJ27+nSiRkSUA== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.3" + functions-have-names "^1.2.2" + regexpp@^2.0.1: version "2.0.1" resolved "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz" @@ -24806,11 +25190,6 @@ request@2.88.2, request@^2.88.0: tunnel-agent "^0.6.0" uuid "^3.3.2" -require-at@^1.0.6: - version "1.0.6" - resolved "https://registry.npmjs.org/require-at/-/require-at-1.0.6.tgz" - integrity sha512-7i1auJbMUrXEAZCOQ0VNJgmcT2VOKPRl2YGJwgpHpC9CE91Mv4/4UYIUm4chGJaI381ZDq1JUicFii64Hapd8g== - require-directory@^2.1.1: version "2.1.1" resolved "https://registry.npmjs.org/require-directory/-/require-directory-2.1.1.tgz" @@ -25235,13 +25614,6 @@ sane@^4.0.3: minimist "^1.1.1" walker "~1.0.5" -saslprep@^1.0.0: - version "1.0.3" - resolved "https://registry.npmjs.org/saslprep/-/saslprep-1.0.3.tgz" - integrity sha512-/MY/PEMbk2SuY5sScONwhUDsV2p77Znkb/q3nSVstq/yQzYJOH/Azh29p9oJLsl3LnQwSvZDKagDGBsBwSooag== - dependencies: - sparse-bitfield "^3.0.3" - sax@1.2.1: version "1.2.1" resolved "https://registry.npmjs.org/sax/-/sax-1.2.1.tgz" @@ -25615,11 +25987,6 @@ side-channel@^1.0.4: get-intrinsic "^1.0.2" object-inspect "^1.9.0" -sift@13.5.2: - version "13.5.2" - resolved "https://registry.npmjs.org/sift/-/sift-13.5.2.tgz" - integrity sha512-+gxdEOMA2J+AI+fVsCqeNn7Tgx3M9ZN9jdi95939l1IJ8cZsqS8sqpJyOkic2SJk+1+98Uwryt/gL6XDaV+UZA== - signal-exit@^3.0.0, signal-exit@^3.0.2, signal-exit@^3.0.3: version "3.0.3" resolved "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.3.tgz" @@ -25724,11 +26091,6 @@ slice-ansi@^4.0.0: astral-regex "^2.0.0" is-fullwidth-code-point "^3.0.0" -sliced@1.0.1: - version "1.0.1" - resolved "https://registry.npmjs.org/sliced/-/sliced-1.0.1.tgz" - integrity sha1-CzpmK10Ewxd7GSa+qCsD+Dei70E= - slide@^1.1.6: version "1.1.6" resolved "https://registry.npmjs.org/slide/-/slide-1.1.6.tgz" @@ -25744,6 +26106,18 @@ smart-buffer@^4.1.0, smart-buffer@^4.2.0: resolved "https://registry.npmjs.org/smart-buffer/-/smart-buffer-4.2.0.tgz" integrity sha512-94hK0Hh8rPqQl2xXc3HsaBoOXKV20MToPkcXvwbISWLEs+64sBq5kFgn2kJDHb1Pry9yrP0dxrCI9RRci7RXKg== +smartwrap@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/smartwrap/-/smartwrap-2.0.2.tgz#7e25d3dd58b51c6ca4aba3a9e391650ea62698a4" + integrity sha512-vCsKNQxb7PnCNd2wY1WClWifAc2lwqsG8OaswpJkVJsvMGcnEntdTCDajZCkk93Ay1U3t/9puJmb525Rg5MZBA== + dependencies: + array.prototype.flat "^1.2.3" + breakword "^1.0.5" + grapheme-splitter "^1.0.4" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + yargs "^15.1.0" + snapdragon-node@^2.0.1: version "2.1.1" resolved "https://registry.npmjs.org/snapdragon-node/-/snapdragon-node-2.1.1.tgz" @@ -25941,12 +26315,13 @@ space-separated-tokens@^1.0.0: resolved "https://registry.npmjs.org/space-separated-tokens/-/space-separated-tokens-1.1.5.tgz" integrity sha512-q/JSVd1Lptzhf5bkYm4ob4iWPjx0KiRe3sRFBNrVqbJkFaBm5vbbowy1mymoPNLRa52+oadOhJ+K49wsSeSjTA== -sparse-bitfield@^3.0.3: - version "3.0.3" - resolved "https://registry.npmjs.org/sparse-bitfield/-/sparse-bitfield-3.0.3.tgz" - integrity sha1-/0rm5oZWBWuks+eSqzM004JzyhE= +spawndamnit@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/spawndamnit/-/spawndamnit-2.0.0.tgz#9f762ac5c3476abb994b42ad592b5ad22bb4b0ad" + integrity sha512-j4JKEcncSjFlqIwU5L/rp2N5SIPsdxaRsIv678+TZxZ0SRDJTm8JrxJMjE/XuiEZNEir3S8l0Fa3Ke339WI4qA== dependencies: - memory-pager "^1.0.2" + cross-spawn "^5.1.0" + signal-exit "^3.0.2" spdx-correct@^3.0.0: version "3.1.1" @@ -26190,6 +26565,13 @@ stream-shift@^1.0.0: resolved "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz" integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== +stream-transform@^2.1.3: + version "2.1.3" + resolved "https://registry.yarnpkg.com/stream-transform/-/stream-transform-2.1.3.tgz#a1c3ecd72ddbf500aa8d342b0b9df38f5aa598e3" + integrity sha512-9GHUiM5hMiCi6Y03jD2ARC1ettBXkQBoQAe7nJsPknnI0ow10aXjTnew8QtYQmLjzn974BnmWEAJgCY6ZP1DeQ== + dependencies: + mixme "^0.5.1" + streamsearch@0.1.2: version "0.1.2" resolved "https://registry.npmjs.org/streamsearch/-/streamsearch-0.1.2.tgz" @@ -26343,6 +26725,15 @@ string.prototype.trimend@^1.0.4: call-bind "^1.0.2" define-properties "^1.1.3" +string.prototype.trimend@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimend/-/string.prototype.trimend-1.0.5.tgz#914a65baaab25fbdd4ee291ca7dde57e869cb8d0" + integrity sha512-I7RGvmjV4pJ7O3kdf+LXFpVfdNOxtCW/2C8f6jNiW4+PQchwxkCDzlk1/7p+Wl4bqFIZeF47qAHXLuHHWKAxog== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + string.prototype.trimstart@^1.0.4: version "1.0.4" resolved "https://registry.npmjs.org/string.prototype.trimstart/-/string.prototype.trimstart-1.0.4.tgz" @@ -26351,6 +26742,15 @@ string.prototype.trimstart@^1.0.4: call-bind "^1.0.2" define-properties "^1.1.3" +string.prototype.trimstart@^1.0.5: + version "1.0.5" + resolved "https://registry.yarnpkg.com/string.prototype.trimstart/-/string.prototype.trimstart-1.0.5.tgz#5466d93ba58cfa2134839f81d7f42437e8c01fef" + integrity sha512-THx16TJCGlsN0o6dl2o6ncWUsdgnLRSA23rRE5pyGBw/mLr3Ej/R2LaqCtgP8VNMGZsvMWnf9ooZPyY2bHvUFg== + dependencies: + call-bind "^1.0.2" + define-properties "^1.1.4" + es-abstract "^1.19.5" + string_decoder@^1.0.0, string_decoder@^1.1.1: version "1.3.0" resolved "https://registry.npmjs.org/string_decoder/-/string_decoder-1.3.0.tgz" @@ -27431,6 +27831,19 @@ tty-browserify@0.0.0: resolved "https://registry.npmjs.org/tty-browserify/-/tty-browserify-0.0.0.tgz" integrity sha1-oVe6QC2iTpv5V/mqadUk7tQpAaY= +tty-table@^4.1.5: + version "4.1.6" + resolved "https://registry.yarnpkg.com/tty-table/-/tty-table-4.1.6.tgz#6bd58338f36c94cce478c3337934d8a65ab40a73" + integrity sha512-kRj5CBzOrakV4VRRY5kUWbNYvo/FpOsz65DzI5op9P+cHov3+IqPbo1JE1ZnQGkHdZgNFDsrEjrfqqy/Ply9fw== + dependencies: + chalk "^4.1.2" + csv "^5.5.0" + kleur "^4.1.4" + smartwrap "^2.0.2" + strip-ansi "^6.0.0" + wcwidth "^1.0.1" + yargs "^17.1.1" + tunnel-agent@^0.6.0: version "0.6.0" resolved "https://registry.npmjs.org/tunnel-agent/-/tunnel-agent-0.6.0.tgz" @@ -27484,6 +27897,11 @@ type-fest@0.15.1: resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.15.1.tgz" integrity sha512-n+UXrN8i5ioo7kqT/nF8xsEzLaqFra7k32SEsSPwvXVGyAcRgV/FUQN/sgfptJTR1oRmmq7z4IXMFSM7im7C9A== +type-fest@^0.13.1: + version "0.13.1" + resolved "https://registry.yarnpkg.com/type-fest/-/type-fest-0.13.1.tgz#0172cb5bce80b0bd542ea348db50c7e21834d934" + integrity sha512-34R7HTnG0XIJcBSn5XhDd7nNFPRcXYRZrBB2O2jdKqYODldSzBAqzsWoZYYvduky73toYS/ESqxPvkDf/F0XMg== + type-fest@^0.18.0: version "0.18.1" resolved "https://registry.npmjs.org/type-fest/-/type-fest-0.18.1.tgz" @@ -27662,6 +28080,16 @@ unbox-primitive@^1.0.1: has-symbols "^1.0.2" which-boxed-primitive "^1.0.2" +unbox-primitive@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/unbox-primitive/-/unbox-primitive-1.0.2.tgz#29032021057d5e6cdbd08c5129c226dff8ed6f9e" + integrity sha512-61pPlCD9h51VoreyJ0BReideM3MDKMKnh6+V9L08331ipq6Q8OFXZYiqP6n/tbHx4s5I9uRhcye6BrbkizkBDw== + dependencies: + call-bind "^1.0.2" + has-bigints "^1.0.2" + has-symbols "^1.0.3" + which-boxed-primitive "^1.0.2" + unc-path-regex@^0.1.2: version "0.1.2" resolved "https://registry.npmjs.org/unc-path-regex/-/unc-path-regex-0.1.2.tgz" @@ -28636,6 +29064,14 @@ which-module@^2.0.0: resolved "https://registry.npmjs.org/which-module/-/which-module-2.0.0.tgz" integrity sha1-2e8H3Od7mQK4o6j6SzHD4/fm6Ho= +which-pm@2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/which-pm/-/which-pm-2.0.0.tgz#8245609ecfe64bf751d0eef2f376d83bf1ddb7ae" + integrity sha512-Lhs9Pmyph0p5n5Z3mVnN0yWcbQYUAD7rbQUiMsQxOJ3T57k7RFe35SUwWMf7dsbDZks1uOmw4AecB/JMDj3v/w== + dependencies: + load-yaml-file "^0.2.0" + path-exists "^4.0.0" + which@^1.2.9, which@^1.3.1: version "1.3.1" resolved "https://registry.npmjs.org/which/-/which-1.3.1.tgz" @@ -28984,7 +29420,7 @@ yargonaut@^1.1.4: figlet "^1.1.1" parent-require "^1.0.0" -yargs-parser@18.x, yargs-parser@^18.1.2: +yargs-parser@18.x, yargs-parser@^18.1.2, yargs-parser@^18.1.3: version "18.1.3" resolved "https://registry.npmjs.org/yargs-parser/-/yargs-parser-18.1.3.tgz" integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ== @@ -29005,6 +29441,11 @@ yargs-parser@^15.0.1: camelcase "^5.0.0" decamelize "^1.2.0" +yargs-parser@^21.0.0: + version "21.0.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.0.1.tgz#0267f286c877a4f0f728fceb6f8a3e4cb95c6e35" + integrity sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg== + yargs@^14.2.2: version "14.2.3" resolved "https://registry.npmjs.org/yargs/-/yargs-14.2.3.tgz" @@ -29022,7 +29463,7 @@ yargs@^14.2.2: y18n "^4.0.0" yargs-parser "^15.0.1" -yargs@^15.3.1, yargs@^15.4.1: +yargs@^15.1.0, yargs@^15.3.1, yargs@^15.4.1: version "15.4.1" resolved "https://registry.npmjs.org/yargs/-/yargs-15.4.1.tgz" integrity sha512-aePbxDmcYW++PaqBsJ+HYUFwCdv4LVvdnhBy78E57PIor8/OVvhMrADFFEDh8DHDFRv/O9i3lPhsENjO7QX0+A== @@ -29065,6 +29506,19 @@ yargs@^17.0.1: y18n "^5.0.5" yargs-parser "^20.2.2" +yargs@^17.1.1: + version "17.5.1" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.5.1.tgz#e109900cab6fcb7fd44b1d8249166feb0b36e58e" + integrity sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA== + dependencies: + cliui "^7.0.2" + escalade "^3.1.1" + get-caller-file "^2.0.5" + require-directory "^2.1.1" + string-width "^4.2.3" + y18n "^5.0.5" + yargs-parser "^21.0.0" + yeast@0.1.2: version "0.1.2" resolved "https://registry.npmjs.org/yeast/-/yeast-0.1.2.tgz"