fix: Ensure uniqueness for products, variants, collections and discounts (#382)

This commit is contained in:
Oliver Windall Juhl
2021-09-10 16:03:44 +02:00
committed by GitHub
parent ec6d16e945
commit 27150959ff
8 changed files with 241 additions and 89 deletions

View File

@@ -1,46 +1,46 @@
const path = require("path");
const { Region, DiscountRule, Discount } = require("@medusajs/medusa");
const path = require("path")
const { Region, DiscountRule, Discount } = require("@medusajs/medusa")
const setupServer = require("../../../helpers/setup-server");
const { useApi } = require("../../../helpers/use-api");
const { initDb, useDb } = require("../../../helpers/use-db");
const adminSeeder = require("../../helpers/admin-seeder");
const setupServer = require("../../../helpers/setup-server")
const { useApi } = require("../../../helpers/use-api")
const { initDb, useDb } = require("../../../helpers/use-db")
const adminSeeder = require("../../helpers/admin-seeder")
jest.setTimeout(30000);
jest.setTimeout(30000)
describe("/admin/discounts", () => {
let medusaProcess;
let dbConnection;
let medusaProcess
let dbConnection
beforeAll(async () => {
const cwd = path.resolve(path.join(__dirname, "..", ".."));
dbConnection = await initDb({ cwd });
medusaProcess = await setupServer({ cwd });
});
const cwd = path.resolve(path.join(__dirname, "..", ".."))
dbConnection = await initDb({ cwd })
medusaProcess = await setupServer({ cwd })
})
afterAll(async () => {
const db = useDb();
await db.shutdown();
medusaProcess.kill();
});
const db = useDb()
await db.shutdown()
medusaProcess.kill()
})
describe("POST /admin/discounts", () => {
beforeEach(async () => {
try {
await adminSeeder(dbConnection);
await adminSeeder(dbConnection)
} catch (err) {
console.log(err);
throw err;
console.log(err)
throw err
}
});
})
afterEach(async () => {
const db = useDb();
await db.teardown();
});
const db = useDb()
await db.teardown()
})
it("creates a discount and updates it", async () => {
const api = useApi();
const api = useApi()
const response = await api
.post(
@@ -62,16 +62,16 @@ describe("/admin/discounts", () => {
}
)
.catch((err) => {
console.log(err);
});
console.log(err)
})
expect(response.status).toEqual(200);
expect(response.status).toEqual(200)
expect(response.data.discount).toEqual(
expect.objectContaining({
code: "HELLOWORLD",
usage_limit: 10,
})
);
)
const updated = await api
.post(
@@ -86,51 +86,51 @@ describe("/admin/discounts", () => {
}
)
.catch((err) => {
console.log(err);
});
console.log(err)
})
expect(updated.status).toEqual(200);
expect(updated.status).toEqual(200)
expect(updated.data.discount).toEqual(
expect.objectContaining({
code: "HELLOWORLD",
usage_limit: 20,
})
);
});
});
)
})
})
describe("testing for soft-deletion + uniqueness on discount codes", () => {
let manager;
let manager
beforeEach(async () => {
manager = dbConnection.manager;
manager = dbConnection.manager
try {
await adminSeeder(dbConnection);
await adminSeeder(dbConnection)
await manager.insert(DiscountRule, {
id: "test-discount-rule",
description: "Test discount rule",
type: "percentage",
value: 10,
allocation: "total",
});
})
await manager.insert(Discount, {
id: "test-discount",
code: "TESTING",
rule_id: "test-discount-rule",
is_dynamic: false,
is_disabled: false,
});
})
} catch (err) {
throw err;
throw err
}
});
})
afterEach(async () => {
const db = useDb();
await db.teardown();
});
const db = useDb()
await db.teardown()
})
it("successfully creates discount with soft-deleted discount code", async () => {
const api = useApi();
const api = useApi()
// First we soft-delete the discount
await api
@@ -140,8 +140,8 @@ describe("/admin/discounts", () => {
},
})
.catch((err) => {
console.log(err);
});
console.log(err)
})
// Lets try to create a discount with same code as deleted one
const response = await api
@@ -164,51 +164,81 @@ describe("/admin/discounts", () => {
}
)
.catch((err) => {
console.log(err);
});
console.log(err)
})
expect(response.status).toEqual(200);
expect(response.status).toEqual(200)
expect(response.data.discount).toEqual(
expect.objectContaining({
code: "TESTING",
usage_limit: 10,
})
);
});
});
)
})
it("should fails when creating a discount with already existing code", async () => {
const api = useApi()
// Lets try to create a discount with same code as deleted one
try {
await api.post(
"/admin/discounts",
{
code: "TESTING",
rule: {
description: "test",
type: "percentage",
value: 10,
allocation: "total",
},
usage_limit: 10,
},
{
headers: {
Authorization: "Bearer test_token",
},
}
)
} catch (error) {
expect(error.response.data.message).toMatch(
/duplicate key value violates unique constraint/i
)
}
})
})
describe("POST /admin/discounts/:discount_id/dynamic-codes", () => {
beforeEach(async () => {
const manager = dbConnection.manager;
const manager = dbConnection.manager
try {
await adminSeeder(dbConnection);
await adminSeeder(dbConnection)
await manager.insert(DiscountRule, {
id: "test-discount-rule",
description: "Dynamic rule",
type: "percentage",
value: 10,
allocation: "total",
});
})
await manager.insert(Discount, {
id: "test-discount",
code: "DYNAMIC",
is_dynamic: true,
is_disabled: false,
rule_id: "test-discount-rule",
});
})
} catch (err) {
console.log(err);
throw err;
console.log(err)
throw err
}
});
})
afterEach(async () => {
const db = useDb();
await db.teardown();
});
const db = useDb()
await db.teardown()
})
it("creates a dynamic discount", async () => {
const api = useApi();
const api = useApi()
const response = await api
.post(
@@ -223,10 +253,10 @@ describe("/admin/discounts", () => {
}
)
.catch((err) => {
console.log(err);
});
console.log(err)
})
expect(response.status).toEqual(200);
});
});
});
expect(response.status).toEqual(200)
})
})
})

View File

@@ -7,7 +7,7 @@ const { initDb, useDb } = require("../../../helpers/use-db")
const adminSeeder = require("../../helpers/admin-seeder")
const productSeeder = require("../../helpers/product-seeder")
jest.setTimeout(30000)
jest.setTimeout(50000)
describe("/admin/products", () => {
let medusaProcess
@@ -263,7 +263,7 @@ describe("/admin/products", () => {
const api = useApi()
const payload = {
title: "Test product",
title: "Test",
description: "test-product-description",
type: { value: "test-type" },
images: ["test-image.png", "test-image-2.png"],
@@ -293,10 +293,10 @@ describe("/admin/products", () => {
expect(response.status).toEqual(200)
expect(response.data.product).toEqual(
expect.objectContaining({
title: "Test product",
title: "Test",
discountable: true,
is_giftcard: false,
handle: "test-product",
handle: "test",
images: expect.arrayContaining([
expect.objectContaining({
url: "test-image.png",
@@ -646,7 +646,7 @@ describe("/admin/products", () => {
)
})
it("successfully creates product with soft-deleted product handle", async () => {
it("successfully creates product with soft-deleted product handle and deletes it again", async () => {
const api = useApi()
// First we soft-delete the product
@@ -691,6 +691,56 @@ describe("/admin/products", () => {
expect(res.status).toEqual(200)
expect(res.data.product.handle).toEqual("test-product")
// Delete product again to ensure uniqueness is enforced in all cases
const response2 = await api
.delete("/admin/products/test-product", {
headers: {
Authorization: "Bearer test_token",
},
})
.catch((err) => {
console.log(err)
})
expect(response2.status).toEqual(200)
expect(response2.data.id).toEqual("test-product")
})
it("should fail when creating a product with a handle that already exists", async () => {
const api = useApi()
// Lets try to create a product with same handle as deleted one
const payload = {
title: "Test product",
handle: "test-product",
description: "test-product-description",
type: { value: "test-type" },
images: ["test-image.png", "test-image-2.png"],
collection_id: "test-collection",
tags: [{ value: "123" }, { value: "456" }],
options: [{ title: "size" }, { title: "color" }],
variants: [
{
title: "Test variant",
inventory_quantity: 10,
prices: [{ currency_code: "usd", amount: 100 }],
options: [{ value: "large" }, { value: "green" }],
},
],
}
try {
await api.post("/admin/products", payload, {
headers: {
Authorization: "Bearer test_token",
},
})
} catch (error) {
expect(error.response.data.message).toMatch(
/duplicate key value violates unique constraint/i
)
}
})
it("successfully deletes product collection", async () => {
@@ -743,6 +793,28 @@ describe("/admin/products", () => {
expect(res.data.collection.handle).toEqual("test-collection")
})
it("should fail when creating a collection with a handle that already exists", async () => {
const api = useApi()
// Lets try to create a collection with same handle as deleted one
const payload = {
title: "Another test collection",
handle: "test-collection",
}
try {
await api.post("/admin/collections", payload, {
headers: {
Authorization: "Bearer test_token",
},
})
} catch (error) {
expect(error.response.data.message).toMatch(
/duplicate key value violates unique constraint/i
)
}
})
it("successfully creates soft-deleted product variant", async () => {
const api = useApi()
@@ -769,7 +841,6 @@ describe("/admin/products", () => {
expect(response.status).toEqual(200)
expect(response.data.variant_id).toEqual("test-variant")
// Lets try to create a product collection with same handle as deleted one
const payload = {
title: "Second variant",
sku: "test-sku",

View File

@@ -0,0 +1,41 @@
import {MigrationInterface, QueryRunner} from "typeorm";
export class enforceUniqueness1631261634964 implements MigrationInterface {
name = 'enforceUniqueness1631261634964'
public async up(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP INDEX "IDX_e08af711f3493df1e921c4c9ef"`);
await queryRunner.query(`DROP INDEX "IDX_77c4073c30ea7793f484750529"`);
await queryRunner.query(`DROP INDEX "IDX_0683952543d7d3f4fffc427034"`);
await queryRunner.query(`DROP INDEX "IDX_410649600ce31c10c4b667ca10"`);
await queryRunner.query(`DROP INDEX "IDX_5248fda27b9f16ef818604bb6f"`);
await queryRunner.query(`DROP INDEX "IDX_832f86daf8103491d634a967da"`);
await queryRunner.query(`DROP INDEX "IDX_ae3e22c67d7c7a969a363533c0"`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_6f234f058bbbea810dce1d04d0" ON "product_collection" ("handle") WHERE deleted_at IS NULL`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_cf9cc6c3f2e6414b992223fff1" ON "product" ("handle") WHERE deleted_at IS NULL`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_2ca8cfbdafb998ecfd6d340389" ON "product_variant" ("sku") WHERE deleted_at IS NULL`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_045d4a149c09f4704e0bc08dd4" ON "product_variant" ("barcode") WHERE deleted_at IS NULL`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_b5b6225539ee8501082fbc0714" ON "product_variant" ("ean") WHERE deleted_at IS NULL`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_aa16f61348be02dd07ce3fc54e" ON "product_variant" ("upc") WHERE deleted_at IS NULL`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_f65bf52e2239ace276ece2b2f4" ON "discount" ("code") WHERE deleted_at IS NULL`);
}
public async down(queryRunner: QueryRunner): Promise<void> {
await queryRunner.query(`DROP INDEX "IDX_f65bf52e2239ace276ece2b2f4"`);
await queryRunner.query(`DROP INDEX "IDX_aa16f61348be02dd07ce3fc54e"`);
await queryRunner.query(`DROP INDEX "IDX_b5b6225539ee8501082fbc0714"`);
await queryRunner.query(`DROP INDEX "IDX_045d4a149c09f4704e0bc08dd4"`);
await queryRunner.query(`DROP INDEX "IDX_2ca8cfbdafb998ecfd6d340389"`);
await queryRunner.query(`DROP INDEX "IDX_cf9cc6c3f2e6414b992223fff1"`);
await queryRunner.query(`DROP INDEX "IDX_6f234f058bbbea810dce1d04d0"`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_ae3e22c67d7c7a969a363533c0" ON "discount" ("code") WHERE (deleted_at IS NOT NULL)`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_832f86daf8103491d634a967da" ON "product_variant" ("upc") WHERE (deleted_at IS NOT NULL)`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_5248fda27b9f16ef818604bb6f" ON "product_variant" ("ean") WHERE (deleted_at IS NOT NULL)`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_410649600ce31c10c4b667ca10" ON "product_variant" ("barcode") WHERE (deleted_at IS NOT NULL)`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_0683952543d7d3f4fffc427034" ON "product_variant" ("sku") WHERE (deleted_at IS NOT NULL)`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_77c4073c30ea7793f484750529" ON "product" ("handle") WHERE (deleted_at IS NOT NULL)`);
await queryRunner.query(`CREATE UNIQUE INDEX "IDX_e08af711f3493df1e921c4c9ef" ON "product_collection" ("handle") WHERE (deleted_at IS NOT NULL)`);
}
}

View File

@@ -24,7 +24,7 @@ export class Discount {
@PrimaryColumn()
id: string
@Index({ unique: true, where: "deleted_at IS NOT NULL" })
@Index({ unique: true, where: "deleted_at IS NULL" })
@Column()
code: string

View File

@@ -24,7 +24,7 @@ export class ProductCollection {
@Column()
title: string
@Index({ unique: true, where: "deleted_at IS NOT NULL" })
@Index({ unique: true, where: "deleted_at IS NULL" })
@Column({ nullable: true })
handle: string

View File

@@ -48,22 +48,22 @@ export class ProductVariant {
prices: MoneyAmount[]
@Column({ nullable: true })
@Index({ unique: true, where: "deleted_at IS NOT NULL" })
@Index({ unique: true, where: "deleted_at IS NULL" })
sku: string
@Column({ nullable: true })
@Index({ unique: true, where: "deleted_at IS NOT NULL" })
@Index({ unique: true, where: "deleted_at IS NULL" })
barcode: string
@Column({ nullable: true })
@Index({ unique: true, where: "deleted_at IS NOT NULL" })
@Index({ unique: true, where: "deleted_at IS NULL" })
ean: string
@Column({ nullable: true })
@Index({ unique: true, where: "deleted_at IS NOT NULL" })
@Index({ unique: true, where: "deleted_at IS NULL" })
upc: string
@Column({ nullable: true, default: 0, select:false })
@Column({ nullable: true, default: 0, select: false })
variant_rank: number
@Column({ type: "int" })

View File

@@ -40,7 +40,7 @@ export class Product {
@Column({ nullable: true })
description: string
@Index({ unique: true, where: "deleted_at IS NOT NULL" })
@Index({ unique: true, where: "deleted_at IS NULL" })
@Column({ nullable: true })
handle: string

View File

@@ -18,8 +18,13 @@ const pgSqliteGenerationMapping: {
let dbType: string
export function resolveDbType(pgSqlType: ColumnType): ColumnType {
if (!dbType) {
const { configModule } = getConfigFile(path.resolve("."), `medusa-config`)
dbType = configModule.projectConfig.database_type
try {
const { configModule } = getConfigFile(path.resolve("."), `medusa-config`)
dbType = configModule.projectConfig.database_type
} catch (error) {
// Default to Postgres to allow for e.g. migrations to run
dbType = "postgres"
}
}
if (dbType === "sqlite" && pgSqlType in pgSqliteTypeMapping) {
@@ -32,8 +37,13 @@ export function resolveDbGenerationStrategy(
pgSqlType: "increment" | "uuid" | "rowid"
): "increment" | "uuid" | "rowid" {
if (!dbType) {
const { configModule } = getConfigFile(path.resolve("."), `medusa-config`)
dbType = configModule.projectConfig.database_type
try {
const { configModule } = getConfigFile(path.resolve("."), `medusa-config`)
dbType = configModule.projectConfig.database_type
} catch (error) {
// Default to Postgres to allow for e.g. migrations to run
dbType = "postgres"
}
}
if (dbType === "sqlite" && pgSqlType in pgSqliteTypeMapping) {