chore(): Reorganize modules (#7210)

**What**
Move all modules to the modules directory
This commit is contained in:
Adrien de Peretti
2024-05-02 17:33:34 +02:00
committed by GitHub
parent 7a351eef09
commit 4eae25e1ef
870 changed files with 91 additions and 62 deletions

View File

@@ -0,0 +1,6 @@
/dist
node_modules
.DS_store
.env*
.env
*.sql

View File

@@ -0,0 +1,49 @@
# @medusajs/workflow-engine-redis
## 0.0.4
### Patch Changes
- [#6869](https://github.com/medusajs/medusa/pull/6869) [`e603726985`](https://github.com/medusajs/medusa/commit/e60372698565315145037eb40fbe86c43f91cc16) Thanks [@carlos-r-l-rodrigues](https://github.com/carlos-r-l-rodrigues)! - Fix Workflow Engine subscribers response and error
- [#6924](https://github.com/medusajs/medusa/pull/6924) [`12fcb655cd`](https://github.com/medusajs/medusa/commit/12fcb655cd0e398e75acce39166d14db13aa2c08) Thanks [@adrien2p](https://github.com/adrien2p)! - Chore/workflow engine loader redis
- [#6886](https://github.com/medusajs/medusa/pull/6886) [`a164c0d512`](https://github.com/medusajs/medusa/commit/a164c0d5126a40e2bc669f9fc2883be502a15036) Thanks [@carlos-r-l-rodrigues](https://github.com/carlos-r-l-rodrigues)! - Async steps marked as success if return StepResponse
- [#6901](https://github.com/medusajs/medusa/pull/6901) [`82a176e30e`](https://github.com/medusajs/medusa/commit/82a176e30e47a7d11caaf31c3023bd8db588b465) Thanks [@adrien2p](https://github.com/adrien2p)! - chore(medusa-test-utils): Handle errors gracefully + Do not set Distributed storage on partial loading
- [#7161](https://github.com/medusajs/medusa/pull/7161) [`edcafa140c`](https://github.com/medusajs/medusa/commit/edcafa140c1869df3c420c2becfbf23b6af75ddc) Thanks [@carlos-r-l-rodrigues](https://github.com/carlos-r-l-rodrigues)! - Fix onStepAwaiting event
- [#6865](https://github.com/medusajs/medusa/pull/6865) [`8fd1488938`](https://github.com/medusajs/medusa/commit/8fd148893850eb66c5eae00c4ca9391a80ea2eb9) Thanks [@adrien2p](https://github.com/adrien2p)! - chore: medusa shutdown
- Updated dependencies [[`0c0b425de7`](https://github.com/medusajs/medusa/commit/0c0b425de7b154b80b712ab17b16215cf62d1e83), [`e603726985`](https://github.com/medusajs/medusa/commit/e60372698565315145037eb40fbe86c43f91cc16), [`8d356217bd`](https://github.com/medusajs/medusa/commit/8d356217bd31c97a196e861ee243822a4d924df7), [`1eeb1e9de3`](https://github.com/medusajs/medusa/commit/1eeb1e9de3e0b571735437b00968ee96e4aabad5), [`20e8df914e`](https://github.com/medusajs/medusa/commit/20e8df914ec5fdf8d562d4fa84f72c58c7056195), [`a164c0d512`](https://github.com/medusajs/medusa/commit/a164c0d5126a40e2bc669f9fc2883be502a15036), [`27f4f0d724`](https://github.com/medusajs/medusa/commit/27f4f0d7243367c2dfc6012bf1f6b7400a77ec7b), [`e0b02a1012`](https://github.com/medusajs/medusa/commit/e0b02a1012981c29830d7779f59ebe805bbfd137), [`e944a627f0`](https://github.com/medusajs/medusa/commit/e944a627f074fb39a56f4bc7b3d6d315736ebf7c), [`1a48fe0282`](https://github.com/medusajs/medusa/commit/1a48fe0282a8bc1f8548a4736255e457d173da09), [`86f499de2f`](https://github.com/medusajs/medusa/commit/86f499de2f31356ab36ad5e93f27345443b3e5f6), [`09a2220569`](https://github.com/medusajs/medusa/commit/09a22205693da62fbf8fd450535d5024cb9c01d1), [`78f603e4f1`](https://github.com/medusajs/medusa/commit/78f603e4f18c9d16f4b58a2189c959026453d8b2), [`cc557c8752`](https://github.com/medusajs/medusa/commit/cc557c8752fd0554f5a1b58522d9a88dc43a8509), [`dd35a4dbff`](https://github.com/medusajs/medusa/commit/dd35a4dbff10c86ea3c5f7f817c18b6e60d599e3), [`58c68f6715`](https://github.com/medusajs/medusa/commit/58c68f67156e993255fbc25d91db15ae23bc95c0), [`1bcb13f892`](https://github.com/medusajs/medusa/commit/1bcb13f892bc61db21b3fc6bdbce85f747aeec4c), [`82a176e30e`](https://github.com/medusajs/medusa/commit/82a176e30e47a7d11caaf31c3023bd8db588b465), [`11517f0faf`](https://github.com/medusajs/medusa/commit/11517f0fafdf00af256240448b58d149d8b6f600), [`62b9dcc6c1`](https://github.com/medusajs/medusa/commit/62b9dcc6c1ce46aadb7944215006c12da3c9f619), [`5d9aea053c`](https://github.com/medusajs/medusa/commit/5d9aea053ce6e04f242f86fb9053c13dec515d5b), [`e26cda4b6a`](https://github.com/medusajs/medusa/commit/e26cda4b6afb7fb25f0b0a7a7ce20b7f914d35db), [`bc06ad2db4`](https://github.com/medusajs/medusa/commit/bc06ad2db48c999023ab823fefc1375196976e9b), [`18f3aacee6`](https://github.com/medusajs/medusa/commit/18f3aacee6752854d377faa806f4cc67bc71456b), [`232322d035`](https://github.com/medusajs/medusa/commit/232322d03515f81e56867ff8c765b8409399ee68), [`38c971f111`](https://github.com/medusajs/medusa/commit/38c971f111af69f176e7e9892eb59f5bae831fa7), [`45c49e89f2`](https://github.com/medusajs/medusa/commit/45c49e89f28123ef622fc1c07253bae94fd74875), [`71aeda7347`](https://github.com/medusajs/medusa/commit/71aeda7347a1dc7039be05071ce90a6dca5f9154), [`528ef4ca90`](https://github.com/medusajs/medusa/commit/528ef4ca90bb2cf6173dccc9fd6a9f9932ff9b76), [`65794f4bb5`](https://github.com/medusajs/medusa/commit/65794f4bb56e4fd3f0ccb7656a948f856f05324e), [`93ef94cad3`](https://github.com/medusajs/medusa/commit/93ef94cad3ddc5b6973b4e48e422b0aa0e6ddbbe), [`4cf71af07d`](https://github.com/medusajs/medusa/commit/4cf71af07d1807c83df3889c1774f82cbd1b9a6f), [`4b57c5d286`](https://github.com/medusajs/medusa/commit/4b57c5d286f9dc6e2098c67e9fecb0d93175b5a1), [`c78915c7c5`](https://github.com/medusajs/medusa/commit/c78915c7c5e91a99c1b1bae932656c8d86b17daf), [`18f3aacee6`](https://github.com/medusajs/medusa/commit/18f3aacee6752854d377faa806f4cc67bc71456b), [`667c8609cc`](https://github.com/medusajs/medusa/commit/667c8609ccf3850f5df8cf784723a95bd0d6d2a6), [`f175cac4af`](https://github.com/medusajs/medusa/commit/f175cac4af63b71066a8398ecf9beaa6f28b20cc), [`0a9b9b073d`](https://github.com/medusajs/medusa/commit/0a9b9b073dd2d3f4aa5e5cb1c16e2221a7200e0d), [`a6562d2a41`](https://github.com/medusajs/medusa/commit/a6562d2a41453cbe7aa43be352c4924e3e4c79d5), [`00e6b21bb5`](https://github.com/medusajs/medusa/commit/00e6b21bb50dbc886bc37ad052a1c40ce865294e), [`8fd1488938`](https://github.com/medusajs/medusa/commit/8fd148893850eb66c5eae00c4ca9391a80ea2eb9), [`1c6ba4468e`](https://github.com/medusajs/medusa/commit/1c6ba4468eab1440931c88929affd5b4c593f377)]:
- @medusajs/types@1.11.16
- @medusajs/workflows-sdk@0.1.6
- @medusajs/modules-sdk@1.12.11
- @medusajs/utils@1.11.9
## 0.0.3
### Patch Changes
- [#6330](https://github.com/medusajs/medusa/pull/6330) [`0c2a460751`](https://github.com/medusajs/medusa/commit/0c2a460751644021056d0f99d9b1fffe509fb7ab) Thanks [@carlos-r-l-rodrigues](https://github.com/carlos-r-l-rodrigues)! - Workflow engine API
- [#6700](https://github.com/medusajs/medusa/pull/6700) [`8f8a4f9b13`](https://github.com/medusajs/medusa/commit/8f8a4f9b1353087d98f6cc75346d43a7f49901a8) Thanks [@olivermrbl](https://github.com/olivermrbl)! - chore: Version all modules to allow for initial testing
- Updated dependencies [[`1fd0457c15`](https://github.com/medusajs/medusa/commit/1fd0457c153b2ef7657c052878d8e5364e1b324a), [`9288f53327`](https://github.com/medusajs/medusa/commit/9288f53327b8ce617af92ed8d14d9459cbfeb13c), [`d4b921f3db`](https://github.com/medusajs/medusa/commit/d4b921f3dbe0a38f1565a8de759996c70798d58e), [`ac86362e81`](https://github.com/medusajs/medusa/commit/ac86362e81d8523cb8e3dfad026fc94658513018), [`e4acde1aa2`](https://github.com/medusajs/medusa/commit/e4acde1aa2eb57f07e6692fe8b61f728948b9a96), [`1a661adf3e`](https://github.com/medusajs/medusa/commit/1a661adf3ef4991aa6e237dd894b6a5c47cd4aca), [`56cbf88115`](https://github.com/medusajs/medusa/commit/56cbf88115994adea7037c3f2814f0c96af3cfc0), [`36a61658f9`](https://github.com/medusajs/medusa/commit/36a61658f969a7b19c84a1e621ad1464927cafb1), [`04a532e5ef`](https://github.com/medusajs/medusa/commit/04a532e5efabbf75b1e4155520b1da175b686ffc), [`0c2a460751`](https://github.com/medusajs/medusa/commit/0c2a460751644021056d0f99d9b1fffe509fb7ab), [`c319edb8e0`](https://github.com/medusajs/medusa/commit/c319edb8e0ecd13d086652147667916e5abab2d8), [`0b9fcb6324`](https://github.com/medusajs/medusa/commit/0b9fcb6324eee9f2556c7e6317775fae93b12a47), [`586df9da25`](https://github.com/medusajs/medusa/commit/586df9da250e492442769f5bac2f8b3de1d46f05), [`b3d826497b`](https://github.com/medusajs/medusa/commit/b3d826497b3dae5e1b26b7924706c24fd5e87ca5), [`a86c87fe14`](https://github.com/medusajs/medusa/commit/a86c87fe1442afce9285e39255914e01012b4449), [`640eccd5dd`](https://github.com/medusajs/medusa/commit/640eccd5ddbb163e0f987ce6c772f1129c2e2632), [`8ea37d03c9`](https://github.com/medusajs/medusa/commit/8ea37d03c914a5004a3e42770668b2d1f7f8f564), [`339a946f38`](https://github.com/medusajs/medusa/commit/339a946f389033c21e05338f9dbf07d88e140533), [`ac829fc67f`](https://github.com/medusajs/medusa/commit/ac829fc67f7495b08f28e55923c59f0fd6320311), [`d9d5afc3cf`](https://github.com/medusajs/medusa/commit/d9d5afc3cfc29221d0e65bff7b78474a8fb8f31f), [`c3c4f49fc2`](https://github.com/medusajs/medusa/commit/c3c4f49fc2126f950e69e291ca939ca88a15afd3), [`9288f53327`](https://github.com/medusajs/medusa/commit/9288f53327b8ce617af92ed8d14d9459cbfeb13c), [`0d46abf0ff`](https://github.com/medusajs/medusa/commit/0d46abf0ffa4c5e03bf7d2a9cdf1db828a76bea8), [`fafde4f54d`](https://github.com/medusajs/medusa/commit/fafde4f54d3ef75a7d382e6cbf94e38b3deae99b), [`8dad2b51a2`](https://github.com/medusajs/medusa/commit/8dad2b51a26c4c3c14a6c95f70424c8bef2ad63e), [`0c705d7bd4`](https://github.com/medusajs/medusa/commit/0c705d7bd41a768c48017ae95b3c8414d96c6acb), [`a6d7070dd6`](https://github.com/medusajs/medusa/commit/a6d7070dd669c21ea19d70434d42c2f8167dc309), [`1d91b7429b`](https://github.com/medusajs/medusa/commit/1d91b7429beebd6f09d5027f7f7e1fe74ce3a8ff), [`168f02f138`](https://github.com/medusajs/medusa/commit/168f02f138ad101e1013f2c8c3f8dc19de12accf), [`1ed5f918c3`](https://github.com/medusajs/medusa/commit/1ed5f918c31794a70aca4a4e4cd83cf456593baa), [`c20eb15cd9`](https://github.com/medusajs/medusa/commit/c20eb15cd9b1bd90c8d01f68eca6f0f181cd902d), [`e5945479e0`](https://github.com/medusajs/medusa/commit/e5945479e091d9560ae3e7240306a31031ef4584), [`f5c2256286`](https://github.com/medusajs/medusa/commit/f5c22562867f412040f8bc6c55ab5de3a3735e62), [`000eb61e33`](https://github.com/medusajs/medusa/commit/000eb61e33e0302db95ee6ad1656ea9b430ed471), [`d550be3685`](https://github.com/medusajs/medusa/commit/d550be3685423218d47a20c57a5e06758f4a961a), [`4b06c17dc0`](https://github.com/medusajs/medusa/commit/4b06c17dc00dc9ed50898573aee704b84dd181b3), [`62a7bcc30c`](https://github.com/medusajs/medusa/commit/62a7bcc30cbc7b234b2b51d7858439951a84edeb), [`f611865553`](https://github.com/medusajs/medusa/commit/f611865553b1f6914bed58ef2eacdf5e929d60dc), [`8f8a4f9b13`](https://github.com/medusajs/medusa/commit/8f8a4f9b1353087d98f6cc75346d43a7f49901a8), [`6500f18b9b`](https://github.com/medusajs/medusa/commit/6500f18b9b80c5c9c473489e7e740d55dca74303), [`ce39b9b66e`](https://github.com/medusajs/medusa/commit/ce39b9b66e8c277ec0691ea6d0a950003be09cc1), [`a6a4b3f01a`](https://github.com/medusajs/medusa/commit/a6a4b3f01a6d2bd97b1580c59134279a1b033a5d), [`4d51f095b3`](https://github.com/medusajs/medusa/commit/4d51f095b3f98f468cefb760512563f7b77bb9cf), [`4625bd1241`](https://github.com/medusajs/medusa/commit/4625bd12416275b09c22cde4a09cb0f68df5d7c1), [`56b0b45304`](https://github.com/medusajs/medusa/commit/56b0b4530401a6ec5aa155874d371e45bb388fe2), [`cc1b66842c`](https://github.com/medusajs/medusa/commit/cc1b66842cbb37c6eab84e2d8b74844c214f38d7), [`24fb102a56`](https://github.com/medusajs/medusa/commit/24fb102a564b1253d1f8b039bb1e435cc5312fbb), [`e85463b2a7`](https://github.com/medusajs/medusa/commit/e85463b2a717751de2e21c39a4c745449b31affe)]:
- @medusajs/types@1.11.14
- @medusajs/utils@1.11.7
- @medusajs/workflows-sdk@0.1.4
- @medusajs/modules-sdk@1.12.9
## 0.0.2
### Patch Changes
- [#6128](https://github.com/medusajs/medusa/pull/6128) [`302323916`](https://github.com/medusajs/medusa/commit/302323916b6d8eaf571cd59b5fc92a913af207de) Thanks [@carlos-r-l-rodrigues](https://github.com/carlos-r-l-rodrigues)! - Modules: Workflows Engine in-memory and Redis
- Updated dependencies [[`68ddd866a5`](https://github.com/medusajs/medusa/commit/68ddd866a5ff9414e2db5b80d75acc5e81948540), [`72bc52231c`](https://github.com/medusajs/medusa/commit/72bc52231ca3a72fa6d197a248fe07a938ed0d85), [`99045848f`](https://github.com/medusajs/medusa/commit/99045848fd3e863359c7878d9bc05271ed083a0e), [`af7af7374`](https://github.com/medusajs/medusa/commit/af7af737455daa0f330840a9678e6339e519dfe6), [`fc6b1772a7`](https://github.com/medusajs/medusa/commit/fc6b1772a71582bb48602c5cac7b2297e9d267a9), [`a9b4214503`](https://github.com/medusajs/medusa/commit/a9b42145032ee88aa922a11fe03e777b140c68f4), [`d85fee42e`](https://github.com/medusajs/medusa/commit/d85fee42ee7f661310584dfee5741d6c53b989bb), [`5e655dd59`](https://github.com/medusajs/medusa/commit/5e655dd59bda4ffface28db38021ba71cae6de10), [`b132ff7669`](https://github.com/medusajs/medusa/commit/b132ff76693148b3a06373c168e8dd5e02970757), [`e28fa7fbdf`](https://github.com/medusajs/medusa/commit/e28fa7fbdf45c5b1fa19848db731132a0bf1757d), [`a12c28b7d5`](https://github.com/medusajs/medusa/commit/a12c28b7d5faed733bebbb4963dff50b9c8a33bc), [`b782d3bcb7`](https://github.com/medusajs/medusa/commit/b782d3bcb7e8088a962584b9a55200dd29c2161c), [`2b9f98895e`](https://github.com/medusajs/medusa/commit/2b9f98895eaca255e01278674b11cd7cb69b388f), [`7f7cb2a263`](https://github.com/medusajs/medusa/commit/7f7cb2a263c26baf540b05a40ab3732ffeb0c73c), [`302323916`](https://github.com/medusajs/medusa/commit/302323916b6d8eaf571cd59b5fc92a913af207de), [`da5cc4cf7`](https://github.com/medusajs/medusa/commit/da5cc4cf7f7f0ef40d409704a95b025ce95477f4), [`daecd82a7`](https://github.com/medusajs/medusa/commit/daecd82a7cdf7315599f464999690414c20d6748), [`738e9115e`](https://github.com/medusajs/medusa/commit/738e9115ec920d48bc52b8a690847e58c87ca28e), [`ce81cade88`](https://github.com/medusajs/medusa/commit/ce81cade887659cefe9638e3c1c2807378191c62), [`fd78f5e24`](https://github.com/medusajs/medusa/commit/fd78f5e24263f5e158c3b7d11fbf0a4436e9c17a), [`192bc336cc`](https://github.com/medusajs/medusa/commit/192bc336cc2b6ec3820d94524c046dcd3c4ac7d9), [`06b33a9b4`](https://github.com/medusajs/medusa/commit/06b33a9b4525b77b1b14b35b973209700945654e), [`b6ac768698`](https://github.com/medusajs/medusa/commit/b6ac768698a3b49d0162cb49e628386f3352d034), [`19bbae61f8`](https://github.com/medusajs/medusa/commit/19bbae61f8de1ac0ed574caff17b33e17705005a), [`130c641e5c`](https://github.com/medusajs/medusa/commit/130c641e5c91cf831de64fb87aebbfdc4d23530d), [`fade8ea7bf`](https://github.com/medusajs/medusa/commit/fade8ea7bf560343ecbde116d226ac44053cdb8e), [`8472460f53`](https://github.com/medusajs/medusa/commit/8472460f533322cc4535199aa768ac163021bc79), [`68d8daccd`](https://github.com/medusajs/medusa/commit/68d8daccd2a8508a13e211130e49017198b51fab)]:
- @medusajs/types@1.11.11
- @medusajs/utils@1.11.4
- @medusajs/modules-sdk@1.12.7
- @medusajs/workflows-sdk@0.1.2

View File

@@ -0,0 +1 @@
# Workflow Orchestrator

View File

@@ -0,0 +1,5 @@
export * from "./workflow_1"
export * from "./workflow_2"
export * from "./workflow_async"
export * from "./workflow_step_timeout"
export * from "./workflow_transaction_timeout"

View File

@@ -0,0 +1,63 @@
import {
StepResponse,
createStep,
createWorkflow,
} from "@medusajs/workflows-sdk"
const step_1 = createStep(
"step_1",
jest.fn((input) => {
input.test = "test"
return new StepResponse(input, { compensate: 123 })
}),
jest.fn((compensateInput) => {
if (!compensateInput) {
return
}
console.log("reverted", compensateInput.compensate)
return new StepResponse({
reverted: true,
})
})
)
const step_2 = createStep(
"step_2",
jest.fn((input, context) => {
if (input) {
return new StepResponse({ notAsyncResponse: input.hey })
}
}),
jest.fn((_, context) => {
return new StepResponse({
step: context.metadata.action,
idempotency_key: context.metadata.idempotency_key,
reverted: true,
})
})
)
const step_3 = createStep(
"step_3",
jest.fn((res) => {
return new StepResponse({
done: {
inputFromSyncStep: res.notAsyncResponse,
},
})
})
)
createWorkflow("workflow_1", function (input) {
step_1(input)
const ret2 = step_2({ hey: "oh" })
step_2({ hey: "async hello" }).config({
name: "new_step_name",
async: true,
})
return step_3(ret2)
})

View File

@@ -0,0 +1,69 @@
import {
StepResponse,
createStep,
createWorkflow,
} from "@medusajs/workflows-sdk"
const step_1 = createStep(
"step_1",
jest.fn((input) => {
input.test = "test"
return new StepResponse(input, { compensate: 123 })
}),
jest.fn((compensateInput) => {
if (!compensateInput) {
return
}
console.log("reverted", compensateInput.compensate)
return new StepResponse({
reverted: true,
})
})
)
const step_2 = createStep(
"step_2",
jest.fn((input, context) => {
if (input) {
return new StepResponse({ notAsyncResponse: input.hey })
}
}),
jest.fn((_, context) => {
return new StepResponse({
step: context.metadata.action,
idempotency_key: context.metadata.idempotency_key,
reverted: true,
})
})
)
const step_3 = createStep(
"step_3",
jest.fn((res) => {
return new StepResponse({
done: {
inputFromSyncStep: res.notAsyncResponse,
},
})
})
)
createWorkflow(
{
name: "workflow_2",
retentionTime: 1000,
},
function (input) {
step_1(input)
const ret2 = step_2({ hey: "oh" })
step_2({ hey: "async hello" }).config({
name: "new_step_name",
async: true,
})
return step_3(ret2)
}
)

View File

@@ -0,0 +1,29 @@
import {
StepResponse,
createStep,
createWorkflow,
} from "@medusajs/workflows-sdk"
import { setTimeout } from "timers/promises"
const step_1_background = createStep(
{
name: "step_1_background",
async: true,
},
jest.fn(async (input) => {
await setTimeout(200)
return new StepResponse(input)
})
)
createWorkflow(
{
name: "workflow_async_background",
},
function (input) {
const resp = step_1_background(input)
return resp
}
)

View File

@@ -0,0 +1,51 @@
import {
StepResponse,
createStep,
createWorkflow,
} from "@medusajs/workflows-sdk"
import { setTimeout } from "timers/promises"
const step_1 = createStep(
"step_1",
jest.fn(async (input) => {
await setTimeout(200)
return new StepResponse(input, { compensate: 123 })
})
)
const step_1_async = createStep(
{
name: "step_1_async",
async: true,
timeout: 0.1, // 0.1 second
},
jest.fn(async (input) => {
return
})
)
createWorkflow(
{
name: "workflow_step_timeout",
},
function (input) {
const resp = step_1(input).config({
timeout: 0.1, // 0.1 second
})
return resp
}
)
createWorkflow(
{
name: "workflow_step_timeout_async",
},
function (input) {
const resp = step_1_async(input)
return resp
}
)

View File

@@ -0,0 +1,44 @@
import {
StepResponse,
createStep,
createWorkflow,
} from "@medusajs/workflows-sdk"
import { setTimeout } from "timers/promises"
const step_1 = createStep(
"step_1",
jest.fn(async (input) => {
await setTimeout(200)
return new StepResponse({
executed: true,
})
}),
jest.fn()
)
createWorkflow(
{
name: "workflow_transaction_timeout",
timeout: 0.1, // 0.1 second
},
function (input) {
const resp = step_1(input)
return resp
}
)
createWorkflow(
{
name: "workflow_transaction_timeout_async",
timeout: 0.1, // 0.1 second
},
function (input) {
const resp = step_1(input).config({
async: true,
})
return resp
}
)

View File

@@ -0,0 +1,300 @@
import { MedusaApp } from "@medusajs/modules-sdk"
import {
TransactionStepTimeoutError,
TransactionTimeoutError,
} from "@medusajs/orchestration"
import { RemoteQueryFunction } from "@medusajs/types"
import { TransactionHandlerType, TransactionStepState } from "@medusajs/utils"
import { IWorkflowEngineService } from "@medusajs/workflows-sdk"
import { knex } from "knex"
import { setTimeout } from "timers/promises"
import "../__fixtures__"
import { DB_URL, TestDatabase } from "../utils"
const sharedPgConnection = knex<any, any>({
client: "pg",
searchPath: process.env.MEDUSA_WORKFLOW_ENGINE_DB_SCHEMA,
connection: {
connectionString: DB_URL,
debug: false,
},
})
const afterEach_ = async () => {
await TestDatabase.clearTables(sharedPgConnection)
}
describe("Workflow Orchestrator module", function () {
describe("Testing basic workflow", function () {
let workflowOrcModule: IWorkflowEngineService
let query: RemoteQueryFunction
afterEach(afterEach_)
beforeAll(async () => {
const {
runMigrations,
query: remoteQuery,
modules,
} = await MedusaApp({
sharedResourcesConfig: {
database: {
connection: sharedPgConnection,
},
},
modulesConfig: {
workflows: {
resolve: __dirname + "/../..",
options: {
redis: {
url: "localhost:6379",
},
},
},
},
})
query = remoteQuery
await runMigrations()
workflowOrcModule = modules.workflows as unknown as IWorkflowEngineService
})
afterEach(afterEach_)
it("should return a list of workflow executions and remove after completed when there is no retentionTime set", async () => {
await workflowOrcModule.run("workflow_1", {
input: {
value: "123",
},
throwOnError: true,
})
let executionsList = await query({
workflow_executions: {
fields: ["workflow_id", "transaction_id", "state"],
},
})
expect(executionsList).toHaveLength(1)
const { result } = await workflowOrcModule.setStepSuccess({
idempotencyKey: {
action: TransactionHandlerType.INVOKE,
stepId: "new_step_name",
workflowId: "workflow_1",
transactionId: executionsList[0].transaction_id,
},
stepResponse: { uhuuuu: "yeaah!" },
})
executionsList = await query({
workflow_executions: {
fields: ["id"],
},
})
expect(executionsList).toHaveLength(0)
expect(result).toEqual({
done: {
inputFromSyncStep: "oh",
},
})
})
it("should return a list of workflow executions and keep it saved when there is a retentionTime set", async () => {
await workflowOrcModule.run("workflow_2", {
input: {
value: "123",
},
throwOnError: true,
transactionId: "transaction_1",
})
let executionsList = await query({
workflow_executions: {
fields: ["id"],
},
})
expect(executionsList).toHaveLength(1)
await workflowOrcModule.setStepSuccess({
idempotencyKey: {
action: TransactionHandlerType.INVOKE,
stepId: "new_step_name",
workflowId: "workflow_2",
transactionId: "transaction_1",
},
stepResponse: { uhuuuu: "yeaah!" },
})
executionsList = await query({
workflow_executions: {
fields: ["id"],
},
})
expect(executionsList).toHaveLength(1)
})
it("should revert the entire transaction when a step timeout expires", async () => {
const { transaction, result, errors } = await workflowOrcModule.run(
"workflow_step_timeout",
{
input: {
myInput: "123",
},
throwOnError: false,
}
)
expect(transaction.flow.state).toEqual("reverted")
expect(result).toEqual({
myInput: "123",
})
expect(errors).toHaveLength(1)
expect(errors[0].action).toEqual("step_1")
expect(errors[0].error).toBeInstanceOf(TransactionStepTimeoutError)
})
it("should revert the entire transaction when the transaction timeout expires", async () => {
const { transaction, result, errors } = await workflowOrcModule.run(
"workflow_transaction_timeout",
{
input: {},
transactionId: "trx",
throwOnError: false,
}
)
expect(transaction.flow.state).toEqual("reverted")
expect(result).toEqual({ executed: true })
expect(errors).toHaveLength(1)
expect(errors[0].action).toEqual("step_1")
expect(
TransactionTimeoutError.isTransactionTimeoutError(errors[0].error)
).toBe(true)
})
it("should revert the entire transaction when a step timeout expires in a async step", async () => {
await workflowOrcModule.run("workflow_step_timeout_async", {
input: {
myInput: "123",
},
transactionId: "transaction_1",
throwOnError: false,
})
await setTimeout(200)
const { transaction, result, errors } = await workflowOrcModule.run(
"workflow_step_timeout_async",
{
input: {
myInput: "123",
},
transactionId: "transaction_1",
throwOnError: false,
}
)
expect(transaction.flow.state).toEqual("reverted")
expect(result).toEqual(undefined)
expect(errors).toHaveLength(1)
expect(errors[0].action).toEqual("step_1_async")
expect(
TransactionStepTimeoutError.isTransactionStepTimeoutError(
errors[0].error
)
).toBe(true)
})
it("should revert the entire transaction when the transaction timeout expires in a transaction containing an async step", async () => {
await workflowOrcModule.run("workflow_transaction_timeout_async", {
input: {},
transactionId: "transaction_1",
throwOnError: false,
})
await setTimeout(200)
const { transaction, result, errors } = await workflowOrcModule.run(
"workflow_transaction_timeout_async",
{
input: {},
transactionId: "transaction_1",
throwOnError: false,
}
)
expect(transaction.flow.state).toEqual("reverted")
expect(result).toEqual(undefined)
expect(errors).toHaveLength(1)
expect(errors[0].action).toEqual("step_1")
expect(
TransactionTimeoutError.isTransactionTimeoutError(errors[0].error)
).toBe(true)
})
it("should complete an async workflow that returns a StepResponse", async () => {
const { transaction, result } = await workflowOrcModule.run(
"workflow_async_background",
{
input: {
myInput: "123",
},
transactionId: "transaction_1",
throwOnError: false,
}
)
expect(transaction.flow.state).toEqual(TransactionStepState.INVOKING)
expect(result).toEqual(undefined)
await setTimeout(205)
const trx = await workflowOrcModule.run("workflow_async_background", {
input: {
myInput: "123",
},
transactionId: "transaction_1",
throwOnError: false,
})
expect(trx.transaction.flow.state).toEqual(TransactionStepState.DONE)
expect(trx.result).toEqual({
myInput: "123",
})
})
it("should subsctibe to a async workflow and receive the response when it finishes", (done) => {
const transactionId = "trx_123"
const onFinish = jest.fn(() => {
done()
})
void workflowOrcModule.run("workflow_async_background", {
input: {
myInput: "123",
},
transactionId,
throwOnError: false,
})
void workflowOrcModule.subscribe({
workflowId: "workflow_async_background",
transactionId,
subscriber: (event) => {
if (event.eventType === "onFinish") {
onFinish()
}
},
})
expect(onFinish).toHaveBeenCalledTimes(0)
})
})
})

View File

@@ -0,0 +1,6 @@
if (typeof process.env.DB_TEMP_NAME === "undefined") {
const tempName = parseInt(process.env.JEST_WORKER_ID || "1")
process.env.DB_TEMP_NAME = `medusa-workflow-engine-redis-${tempName}`
}
process.env.MEDUSA_WORKFLOW_ENGINE_DB_SCHEMA = "public"

View File

@@ -0,0 +1,3 @@
import { JestUtils } from "medusa-test-utils"
JestUtils.afterAllHookDropDatabase()

View File

@@ -0,0 +1,53 @@
import * as process from "process"
const DB_HOST = process.env.DB_HOST ?? "localhost"
const DB_USERNAME = process.env.DB_USERNAME ?? ""
const DB_PASSWORD = process.env.DB_PASSWORD
const DB_NAME = process.env.DB_TEMP_NAME
export const DB_URL = `postgres://${DB_USERNAME}${
DB_PASSWORD ? `:${DB_PASSWORD}` : ""
}@${DB_HOST}/${DB_NAME}`
const Redis = require("ioredis")
const redisUrl = process.env.REDIS_URL || "redis://localhost:6379"
const redis = new Redis(redisUrl)
interface TestDatabase {
clearTables(knex): Promise<void>
}
export const TestDatabase: TestDatabase = {
clearTables: async (knex) => {
await knex.raw(`
TRUNCATE TABLE workflow_execution CASCADE;
`)
await cleanRedis()
},
}
async function deleteKeysByPattern(pattern) {
const stream = redis.scanStream({
match: pattern,
count: 100,
})
for await (const keys of stream) {
if (keys.length) {
const pipeline = redis.pipeline()
keys.forEach((key) => pipeline.del(key))
await pipeline.exec()
}
}
}
async function cleanRedis() {
try {
await deleteKeysByPattern("bull:*")
await deleteKeysByPattern("dtrans:*")
} catch (error) {
console.error("Error:", error)
}
}

View File

@@ -0,0 +1 @@
export * from "./database"

View File

@@ -0,0 +1,21 @@
module.exports = {
moduleNameMapper: {
"^@models": "<rootDir>/src/models",
"^@services": "<rootDir>/src/services",
"^@repositories": "<rootDir>/src/repositories",
},
transform: {
"^.+\\.[jt]s?$": [
"ts-jest",
{
tsconfig: "tsconfig.spec.json",
isolatedModules: true,
},
],
},
testEnvironment: `node`,
moduleFileExtensions: [`js`, `ts`],
modulePathIgnorePatterns: ["dist/"],
setupFiles: ["<rootDir>/integration-tests/setup-env.js"],
setupFilesAfterEnv: ["<rootDir>/integration-tests/setup.js"],
}

View File

@@ -0,0 +1,8 @@
import * as entities from "./src/models"
module.exports = {
entities: Object.values(entities),
schema: "public",
clientUrl: "postgres://postgres@localhost/medusa-workflow-engine-redis",
type: "postgresql",
}

View File

@@ -0,0 +1,61 @@
{
"name": "@medusajs/workflow-engine-redis",
"version": "0.0.4",
"description": "Medusa Workflow Orchestrator module using Redis to track workflows executions",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"dist"
],
"engines": {
"node": ">=16"
},
"repository": {
"type": "git",
"url": "https://github.com/medusajs/medusa",
"directory": "packages/workflow-engine-redis"
},
"publishConfig": {
"access": "public"
},
"author": "Medusa",
"license": "MIT",
"scripts": {
"watch": "tsc --build --watch",
"watch:test": "tsc --build tsconfig.spec.json --watch",
"prepublishOnly": "cross-env NODE_ENV=production tsc --build && tsc-alias -p tsconfig.json",
"build": "rimraf dist && tsc --build && tsc-alias -p tsconfig.json",
"test": "jest --passWithNoTests --runInBand --bail --forceExit -- src",
"test:integration": "jest --forceExit -- integration-tests/**/__tests__/**/*.ts",
"migration:generate": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:generate",
"migration:initial": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:create --initial",
"migration:create": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:create",
"migration:up": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm migration:up",
"orm:cache:clear": " MIKRO_ORM_CLI=./mikro-orm.config.dev.ts mikro-orm cache:clear"
},
"devDependencies": {
"@mikro-orm/cli": "5.9.7",
"cross-env": "^5.2.1",
"jest": "^29.6.3",
"medusa-test-utils": "^1.1.44",
"rimraf": "^3.0.2",
"ts-jest": "^29.1.1",
"ts-node": "^10.9.1",
"tsc-alias": "^1.8.6",
"typescript": "^5.1.6"
},
"dependencies": {
"@medusajs/modules-sdk": "^1.12.11",
"@medusajs/types": "^1.11.16",
"@medusajs/utils": "^1.11.9",
"@medusajs/workflows-sdk": "^0.1.6",
"@mikro-orm/core": "5.9.7",
"@mikro-orm/migrations": "5.9.7",
"@mikro-orm/postgresql": "5.9.7",
"awilix": "^8.0.0",
"bullmq": "^5.4.2",
"dotenv": "^16.4.5",
"ioredis": "^5.3.2",
"knex": "2.4.2"
}
}

View File

@@ -0,0 +1,22 @@
import { Modules } from "@medusajs/modules-sdk"
import { ModulesSdkUtils } from "@medusajs/utils"
import * as models from "@models"
import { moduleDefinition } from "./module-definition"
export default moduleDefinition
const migrationScriptOptions = {
moduleName: Modules.WORKFLOW_ENGINE,
models: models,
pathToMigrations: __dirname + "/migrations",
}
export const runMigrations = ModulesSdkUtils.buildMigrationScript(
migrationScriptOptions
)
export const revertMigration = ModulesSdkUtils.buildRevertMigrationScript(
migrationScriptOptions
)
export * from "./initialize"
export * from "./loaders"

View File

@@ -0,0 +1,35 @@
import {
ExternalModuleDeclaration,
InternalModuleDeclaration,
MODULE_PACKAGE_NAMES,
MedusaModule,
Modules,
} from "@medusajs/modules-sdk"
import { ModulesSdkTypes } from "@medusajs/types"
import { IWorkflowEngineService } from "@medusajs/workflows-sdk"
import { moduleDefinition } from "../module-definition"
import { InitializeModuleInjectableDependencies } from "../types"
export const initialize = async (
options?:
| ModulesSdkTypes.ModuleServiceInitializeOptions
| ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions
| ExternalModuleDeclaration
| InternalModuleDeclaration,
injectedDependencies?: InitializeModuleInjectableDependencies
): Promise<IWorkflowEngineService> => {
const loaded =
// eslint-disable-next-line max-len
await MedusaModule.bootstrap<IWorkflowEngineService>({
moduleKey: Modules.WORKFLOW_ENGINE,
defaultPath: MODULE_PACKAGE_NAMES[Modules.WORKFLOW_ENGINE],
declaration: options as
| InternalModuleDeclaration
| ExternalModuleDeclaration,
injectedDependencies,
moduleExports: moduleDefinition,
})
return loaded[Modules.WORKFLOW_ENGINE]
}

View File

@@ -0,0 +1,34 @@
import { Modules } from "@medusajs/modules-sdk"
import { ModuleJoinerConfig } from "@medusajs/types"
import { MapToConfig } from "@medusajs/utils"
import { WorkflowExecution } from "@models"
import moduleSchema from "./schema"
export const LinkableKeys = {
workflow_execution_id: WorkflowExecution.name,
}
const entityLinkableKeysMap: MapToConfig = {}
Object.entries(LinkableKeys).forEach(([key, value]) => {
entityLinkableKeysMap[value] ??= []
entityLinkableKeysMap[value].push({
mapTo: key,
valueFrom: key.split("_").pop()!,
})
})
export const entityNameToLinkableKeysMap: MapToConfig = entityLinkableKeysMap
export const joinerConfig: ModuleJoinerConfig = {
serviceName: Modules.WORKFLOW_ENGINE,
primaryKeys: ["id"],
schema: moduleSchema,
linkableKeys: LinkableKeys,
alias: {
name: ["workflow_execution", "workflow_executions"],
args: {
entity: WorkflowExecution.name,
methodSuffix: "WorkflowExecution",
},
},
}

View File

@@ -0,0 +1,36 @@
import {
InternalModuleDeclaration,
LoaderOptions,
Modules,
} from "@medusajs/modules-sdk"
import { ModulesSdkTypes } from "@medusajs/types"
import { ModulesSdkUtils } from "@medusajs/utils"
import { EntitySchema } from "@mikro-orm/core"
import * as WorkflowOrchestratorModels from "../models"
export default async (
{
options,
container,
logger,
}: LoaderOptions<
| ModulesSdkTypes.ModuleServiceInitializeOptions
| ModulesSdkTypes.ModuleServiceInitializeCustomDataLayerOptions
>,
moduleDeclaration?: InternalModuleDeclaration
): Promise<void> => {
const entities = Object.values(
WorkflowOrchestratorModels
) as unknown as EntitySchema[]
const pathToMigrations = __dirname + "/../migrations"
await ModulesSdkUtils.mikroOrmConnectionLoader({
moduleName: Modules.WORKFLOW_ENGINE,
entities,
container,
options,
moduleDeclaration,
logger,
pathToMigrations,
})
}

View File

@@ -0,0 +1,9 @@
import { MikroOrmBaseRepository, ModulesSdkUtils } from "@medusajs/utils"
import * as ModuleModels from "@models"
import * as ModuleServices from "@services"
export default ModulesSdkUtils.moduleContainerLoaderFactory({
moduleModels: ModuleModels,
moduleServices: ModuleServices,
moduleRepositories: { BaseRepository: MikroOrmBaseRepository },
})

View File

@@ -0,0 +1,4 @@
export * from "./connection"
export * from "./container"
export * from "./redis"
export * from "./utils"

View File

@@ -0,0 +1,88 @@
import { LoaderOptions } from "@medusajs/modules-sdk"
import { asValue } from "awilix"
import Redis from "ioredis"
import { RedisWorkflowsOptions } from "../types"
export default async ({
container,
logger,
options,
dataLoaderOnly,
}: LoaderOptions): Promise<void> => {
const {
url,
options: redisOptions,
pubsub,
} = options?.redis as RedisWorkflowsOptions
// TODO: get default from ENV VAR
if (!url) {
throw Error(
"No `redis.url` provided in `workflowOrchestrator` module options. It is required for the Workflow Orchestrator Redis."
)
}
const cnnPubSub = pubsub ?? { url, options: redisOptions }
const queueName = options?.queueName ?? "medusa-workflows"
let connection
let redisPublisher
let redisSubscriber
let workerConnection
try {
connection = await getConnection(url, redisOptions)
workerConnection = await getConnection(url, {
...(redisOptions ?? {}),
maxRetriesPerRequest: null,
})
logger?.info(
`Connection to Redis in module 'workflow-engine-redis' established`
)
} catch (err) {
logger?.error(
`An error occurred while connecting to Redis in module 'workflow-engine-redis': ${err}`
)
}
try {
redisPublisher = await getConnection(cnnPubSub.url, cnnPubSub.options)
redisSubscriber = await getConnection(cnnPubSub.url, cnnPubSub.options)
logger?.info(
`Connection to Redis PubSub in module 'workflow-engine-redis' established`
)
} catch (err) {
logger?.error(
`An error occurred while connecting to Redis PubSub in module 'workflow-engine-redis': ${err}`
)
}
container.register({
partialLoading: asValue(true),
redisConnection: asValue(connection),
redisWorkerConnection: asValue(workerConnection),
redisPublisher: asValue(redisPublisher),
redisSubscriber: asValue(redisSubscriber),
redisQueueName: asValue(queueName),
redisDisconnectHandler: asValue(async () => {
connection.disconnect()
workerConnection.disconnect()
redisPublisher.disconnect()
redisSubscriber.disconnect()
}),
})
}
async function getConnection(url, redisOptions) {
const connection = new Redis(url, {
lazyConnect: true,
...(redisOptions ?? {}),
})
await new Promise(async (resolve) => {
await connection.connect(resolve)
})
return connection
}

View File

@@ -0,0 +1,11 @@
import { asClass, asValue } from "awilix"
import { RedisDistributedTransactionStorage } from "../utils"
export default async ({ container, dataLoaderOnly }): Promise<void> => {
container.register({
redisDistributedTransactionStorage: asClass(
RedisDistributedTransactionStorage
).singleton(),
dataLoaderOnly: asValue(!!dataLoaderOnly),
})
}

View File

@@ -0,0 +1,41 @@
import { Migration } from "@mikro-orm/migrations"
export class Migration20231221104256 extends Migration {
async up(): Promise<void> {
this.addSql(
`
CREATE TABLE IF NOT EXISTS workflow_execution
(
id character varying NOT NULL,
workflow_id character varying NOT NULL,
transaction_id character varying NOT NULL,
execution jsonb NULL,
context jsonb NULL,
state character varying NOT NULL,
created_at timestamp WITHOUT time zone NOT NULL DEFAULT Now(),
updated_at timestamp WITHOUT time zone NOT NULL DEFAULT Now(),
deleted_at timestamp WITHOUT time zone NULL,
CONSTRAINT "PK_workflow_execution_workflow_id_transaction_id" PRIMARY KEY ("workflow_id", "transaction_id")
);
CREATE UNIQUE INDEX IF NOT EXISTS "IDX_workflow_execution_id" ON "workflow_execution" ("id");
CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_workflow_id" ON "workflow_execution" ("workflow_id") WHERE deleted_at IS NULL;
CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_transaction_id" ON "workflow_execution" ("transaction_id") WHERE deleted_at IS NULL;
CREATE INDEX IF NOT EXISTS "IDX_workflow_execution_state" ON "workflow_execution" ("state") WHERE deleted_at IS NULL;
`
)
}
async down(): Promise<void> {
this.addSql(
`
DROP INDEX "IDX_workflow_execution_id";
DROP INDEX "IDX_workflow_execution_workflow_id";
DROP INDEX "IDX_workflow_execution_transaction_id";
DROP INDEX "IDX_workflow_execution_state";
DROP TABLE IF EXISTS workflow_execution;
`
)
}
}

View File

@@ -0,0 +1 @@
export { default as WorkflowExecution } from "./workflow-execution"

View File

@@ -0,0 +1,76 @@
import { TransactionState } from "@medusajs/orchestration"
import { DALUtils, generateEntityId } from "@medusajs/utils"
import {
BeforeCreate,
Entity,
Enum,
Filter,
Index,
OnInit,
OptionalProps,
PrimaryKey,
Property,
Unique,
} from "@mikro-orm/core"
type OptionalFields = "deleted_at"
@Entity()
@Unique({
name: "IDX_workflow_execution_workflow_id_transaction_id_unique",
properties: ["workflow_id", "transaction_id"],
})
@Filter(DALUtils.mikroOrmSoftDeletableFilterOptions)
export default class WorkflowExecution {
[OptionalProps]?: OptionalFields
@Property({ columnType: "text", nullable: false })
@Index({ name: "IDX_workflow_execution_id" })
id!: string
@Index({ name: "IDX_workflow_execution_workflow_id" })
@PrimaryKey({ columnType: "text" })
workflow_id: string
@Index({ name: "IDX_workflow_execution_transaction_id" })
@PrimaryKey({ columnType: "text" })
transaction_id: string
@Property({ columnType: "jsonb", nullable: true })
execution: Record<string, unknown> | null = null
@Property({ columnType: "jsonb", nullable: true })
context: Record<string, unknown> | null = null
@Index({ name: "IDX_workflow_execution_state" })
@Enum(() => TransactionState)
state: TransactionState
@Property({
onCreate: () => new Date(),
columnType: "timestamptz",
defaultRaw: "now()",
})
created_at: Date
@Property({
onCreate: () => new Date(),
onUpdate: () => new Date(),
columnType: "timestamptz",
defaultRaw: "now()",
})
updated_at: Date
@Property({ columnType: "timestamptz", nullable: true })
deleted_at: Date | null = null
@BeforeCreate()
onCreate() {
this.id = generateEntityId(this.id, "wf_exec")
}
@OnInit()
onInit() {
this.id = generateEntityId(this.id, "wf_exec")
}
}

View File

@@ -0,0 +1,19 @@
import { ModuleExports } from "@medusajs/types"
import { WorkflowsModuleService } from "@services"
import loadConnection from "./loaders/connection"
import loadContainer from "./loaders/container"
import redisConnection from "./loaders/redis"
import loadUtils from "./loaders/utils"
const service = WorkflowsModuleService
const loaders = [
loadContainer,
loadConnection,
loadUtils,
redisConnection,
] as any
export const moduleDefinition: ModuleExports = {
service,
loaders,
}

View File

@@ -0,0 +1,2 @@
export { MikroOrmBaseRepository as BaseRepository } from "@medusajs/utils"
export { WorkflowExecutionRepository } from "./workflow-execution"

View File

@@ -0,0 +1,7 @@
import { DALUtils } from "@medusajs/utils"
import { WorkflowExecution } from "@models"
// eslint-disable-next-line max-len
export class WorkflowExecutionRepository extends DALUtils.mikroOrmBaseRepositoryFactory(
WorkflowExecution
) {}

View File

@@ -0,0 +1,26 @@
export default `
scalar DateTime
scalar JSON
enum TransactionState {
NOT_STARTED
INVOKING
WAITING_TO_COMPENSATE
COMPENSATING
DONE
REVERTED
FAILED
}
type WorkflowExecution {
id: ID!
created_at: DateTime!
updated_at: DateTime!
deleted_at: DateTime
workflow_id: string
transaction_id: string
execution: JSON
context: JSON
state: TransactionState
}
`

View File

@@ -0,0 +1,2 @@
export * from "./workflow-orchestrator"
export * from "./workflows-module"

View File

@@ -0,0 +1,637 @@
import {
DistributedTransaction,
DistributedTransactionEvents,
TransactionHandlerType,
TransactionStep,
} from "@medusajs/orchestration"
import {
ContainerLike,
Context,
Logger,
MedusaContainer,
} from "@medusajs/types"
import { InjectSharedContext, isString, MedusaContext } from "@medusajs/utils"
import {
FlowRunOptions,
MedusaWorkflow,
resolveValue,
ReturnWorkflow,
} from "@medusajs/workflows-sdk"
import Redis from "ioredis"
import { ulid } from "ulid"
import type { RedisDistributedTransactionStorage } from "../utils"
export type WorkflowOrchestratorRunOptions<T> = Omit<
FlowRunOptions<T>,
"container"
> & {
transactionId?: string
container?: ContainerLike
}
type RegisterStepSuccessOptions<T> = Omit<
WorkflowOrchestratorRunOptions<T>,
"transactionId" | "input"
>
type IdempotencyKeyParts = {
workflowId: string
transactionId: string
stepId: string
action: "invoke" | "compensate"
}
type NotifyOptions = {
eventType: keyof DistributedTransactionEvents
workflowId: string
transactionId?: string
step?: TransactionStep
response?: unknown
result?: unknown
errors?: unknown[]
}
type WorkflowId = string
type TransactionId = string
type SubscriberHandler = {
(input: NotifyOptions): void
} & {
_id?: string
}
type SubscribeOptions = {
workflowId: string
transactionId?: string
subscriber: SubscriberHandler
subscriberId?: string
}
type UnsubscribeOptions = {
workflowId: string
transactionId?: string
subscriberOrId: string | SubscriberHandler
}
type TransactionSubscribers = Map<TransactionId, SubscriberHandler[]>
type Subscribers = Map<WorkflowId, TransactionSubscribers>
const AnySubscriber = "any"
export class WorkflowOrchestratorService {
private instanceId = ulid()
protected redisPublisher: Redis
protected redisSubscriber: Redis
private subscribers: Subscribers = new Map()
private activeStepsCount: number = 0
private logger: Logger
protected redisDistributedTransactionStorage_: RedisDistributedTransactionStorage
constructor({
dataLoaderOnly,
redisDistributedTransactionStorage,
redisPublisher,
redisSubscriber,
logger,
}: {
dataLoaderOnly: boolean
redisDistributedTransactionStorage: RedisDistributedTransactionStorage
workflowOrchestratorService: WorkflowOrchestratorService
redisPublisher: Redis
redisSubscriber: Redis
logger: Logger
}) {
this.redisPublisher = redisPublisher
this.redisSubscriber = redisSubscriber
this.logger = logger
redisDistributedTransactionStorage.setWorkflowOrchestratorService(this)
if (!dataLoaderOnly) {
DistributedTransaction.setStorage(redisDistributedTransactionStorage)
}
this.redisDistributedTransactionStorage_ =
redisDistributedTransactionStorage
this.redisSubscriber.on("message", async (_, message) => {
const { instanceId, data } = JSON.parse(message)
await this.notify(data, false, instanceId)
})
}
async onApplicationShutdown() {
await this.redisDistributedTransactionStorage_.onApplicationShutdown()
}
async onApplicationPrepareShutdown() {
// eslint-disable-next-line max-len
await this.redisDistributedTransactionStorage_.onApplicationPrepareShutdown()
while (this.activeStepsCount > 0) {
await new Promise((resolve) => setTimeout(resolve, 1000))
}
}
@InjectSharedContext()
async run<T = unknown>(
workflowIdOrWorkflow: string | ReturnWorkflow<any, any, any>,
options?: WorkflowOrchestratorRunOptions<T>,
@MedusaContext() sharedContext: Context = {}
) {
let {
input,
context,
transactionId,
resultFrom,
throwOnError,
events: eventHandlers,
container,
} = options ?? {}
const workflowId = isString(workflowIdOrWorkflow)
? workflowIdOrWorkflow
: workflowIdOrWorkflow.getName()
if (!workflowId) {
throw new Error("Workflow ID is required")
}
context ??= {}
context.transactionId ??= transactionId ?? ulid()
const events: FlowRunOptions["events"] = this.buildWorkflowEvents({
customEventHandlers: eventHandlers,
workflowId,
transactionId: context.transactionId,
})
const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId)
if (!exportedWorkflow) {
throw new Error(`Workflow with id "${workflowId}" not found.`)
}
const flow = exportedWorkflow(container as MedusaContainer)
const ret = await flow.run({
input,
throwOnError,
resultFrom,
context,
events,
})
// TODO: temporary
const acknowledgement = {
transactionId: context.transactionId,
workflowId: workflowId,
}
if (ret.transaction.hasFinished()) {
const { result, errors } = ret
await this.notify({
eventType: "onFinish",
workflowId,
transactionId: context.transactionId,
result,
errors,
})
}
return { acknowledgement, ...ret }
}
@InjectSharedContext()
async getRunningTransaction(
workflowId: string,
transactionId: string,
options?: WorkflowOrchestratorRunOptions<undefined>,
@MedusaContext() sharedContext: Context = {}
): Promise<DistributedTransaction> {
let { context, container } = options ?? {}
if (!workflowId) {
throw new Error("Workflow ID is required")
}
if (!transactionId) {
throw new Error("TransactionId ID is required")
}
context ??= {}
context.transactionId ??= transactionId
const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId)
if (!exportedWorkflow) {
throw new Error(`Workflow with id "${workflowId}" not found.`)
}
const flow = exportedWorkflow(container as MedusaContainer)
const transaction = await flow.getRunningTransaction(transactionId, context)
return transaction
}
@InjectSharedContext()
async setStepSuccess<T = unknown>(
{
idempotencyKey,
stepResponse,
options,
}: {
idempotencyKey: string | IdempotencyKeyParts
stepResponse: unknown
options?: RegisterStepSuccessOptions<T>
},
@MedusaContext() sharedContext: Context = {}
) {
const {
context,
throwOnError,
resultFrom,
container,
events: eventHandlers,
} = options ?? {}
const [idempotencyKey_, { workflowId, transactionId }] =
this.buildIdempotencyKeyAndParts(idempotencyKey)
const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId)
if (!exportedWorkflow) {
throw new Error(`Workflow with id "${workflowId}" not found.`)
}
const flow = exportedWorkflow(container as MedusaContainer)
const events = this.buildWorkflowEvents({
customEventHandlers: eventHandlers,
transactionId,
workflowId,
})
const ret = await flow.registerStepSuccess({
idempotencyKey: idempotencyKey_,
context,
resultFrom,
throwOnError,
events,
response: stepResponse,
})
if (ret.transaction.hasFinished()) {
const { result, errors } = ret
await this.notify({
eventType: "onFinish",
workflowId,
transactionId,
result,
errors,
})
}
return ret
}
@InjectSharedContext()
async setStepFailure<T = unknown>(
{
idempotencyKey,
stepResponse,
options,
}: {
idempotencyKey: string | IdempotencyKeyParts
stepResponse: unknown
options?: RegisterStepSuccessOptions<T>
},
@MedusaContext() sharedContext: Context = {}
) {
const {
context,
throwOnError,
resultFrom,
container,
events: eventHandlers,
} = options ?? {}
const [idempotencyKey_, { workflowId, transactionId }] =
this.buildIdempotencyKeyAndParts(idempotencyKey)
const exportedWorkflow: any = MedusaWorkflow.getWorkflow(workflowId)
if (!exportedWorkflow) {
throw new Error(`Workflow with id "${workflowId}" not found.`)
}
const flow = exportedWorkflow(container as MedusaContainer)
const events = this.buildWorkflowEvents({
customEventHandlers: eventHandlers,
transactionId,
workflowId,
})
const ret = await flow.registerStepFailure({
idempotencyKey: idempotencyKey_,
context,
resultFrom,
throwOnError,
events,
response: stepResponse,
})
if (ret.transaction.hasFinished()) {
const { result, errors } = ret
await this.notify({
eventType: "onFinish",
workflowId,
transactionId,
result,
errors,
})
}
return ret
}
@InjectSharedContext()
subscribe(
{ workflowId, transactionId, subscriber, subscriberId }: SubscribeOptions,
@MedusaContext() sharedContext: Context = {}
) {
subscriber._id = subscriberId
const subscribers = this.subscribers.get(workflowId) ?? new Map()
// Subscribe instance to redis
if (!this.subscribers.has(workflowId)) {
void this.redisSubscriber.subscribe(this.getChannelName(workflowId))
}
const handlerIndex = (handlers) => {
return handlers.indexOf((s) => s === subscriber || s._id === subscriberId)
}
if (transactionId) {
const transactionSubscribers = subscribers.get(transactionId) ?? []
const subscriberIndex = handlerIndex(transactionSubscribers)
if (subscriberIndex !== -1) {
transactionSubscribers.slice(subscriberIndex, 1)
}
transactionSubscribers.push(subscriber)
subscribers.set(transactionId, transactionSubscribers)
this.subscribers.set(workflowId, subscribers)
return
}
const workflowSubscribers = subscribers.get(AnySubscriber) ?? []
const subscriberIndex = handlerIndex(workflowSubscribers)
if (subscriberIndex !== -1) {
workflowSubscribers.slice(subscriberIndex, 1)
}
workflowSubscribers.push(subscriber)
subscribers.set(AnySubscriber, workflowSubscribers)
this.subscribers.set(workflowId, subscribers)
}
@InjectSharedContext()
unsubscribe(
{ workflowId, transactionId, subscriberOrId }: UnsubscribeOptions,
@MedusaContext() sharedContext: Context = {}
) {
const subscribers = this.subscribers.get(workflowId) ?? new Map()
const filterSubscribers = (handlers: SubscriberHandler[]) => {
return handlers.filter((handler) => {
return handler._id
? handler._id !== (subscriberOrId as string)
: handler !== (subscriberOrId as SubscriberHandler)
})
}
// Unsubscribe instance
if (!this.subscribers.has(workflowId)) {
void this.redisSubscriber.unsubscribe(this.getChannelName(workflowId))
}
if (transactionId) {
const transactionSubscribers = subscribers.get(transactionId) ?? []
const newTransactionSubscribers = filterSubscribers(
transactionSubscribers
)
subscribers.set(transactionId, newTransactionSubscribers)
this.subscribers.set(workflowId, subscribers)
return
}
const workflowSubscribers = subscribers.get(AnySubscriber) ?? []
const newWorkflowSubscribers = filterSubscribers(workflowSubscribers)
subscribers.set(AnySubscriber, newWorkflowSubscribers)
this.subscribers.set(workflowId, subscribers)
}
private async notify(
options: NotifyOptions,
publish = true,
instanceId = this.instanceId
) {
if (!publish && instanceId === this.instanceId) {
return
}
if (publish) {
const channel = this.getChannelName(options.workflowId)
const message = JSON.stringify({
instanceId: this.instanceId,
data: options,
})
await this.redisPublisher.publish(channel, message)
}
const {
eventType,
workflowId,
transactionId,
errors,
result,
step,
response,
} = options
const subscribers: TransactionSubscribers =
this.subscribers.get(workflowId) ?? new Map()
const notifySubscribers = (handlers: SubscriberHandler[]) => {
handlers.forEach((handler) => {
handler({
eventType,
workflowId,
transactionId,
step,
response,
result,
errors,
})
})
}
if (transactionId) {
const transactionSubscribers = subscribers.get(transactionId) ?? []
notifySubscribers(transactionSubscribers)
}
const workflowSubscribers = subscribers.get(AnySubscriber) ?? []
notifySubscribers(workflowSubscribers)
}
private getChannelName(workflowId: string): string {
return `orchestrator:${workflowId}`
}
private buildWorkflowEvents({
customEventHandlers,
workflowId,
transactionId,
}): DistributedTransactionEvents {
const notify = async ({
eventType,
step,
result,
response,
errors,
}: {
eventType: keyof DistributedTransactionEvents
step?: TransactionStep
response?: unknown
result?: unknown
errors?: unknown[]
}) => {
await this.notify({
workflowId,
transactionId,
eventType,
response,
step,
result,
errors,
})
}
return {
onTimeout: async ({ transaction }) => {
customEventHandlers?.onTimeout?.({ transaction })
await notify({ eventType: "onTimeout" })
},
onBegin: async ({ transaction }) => {
customEventHandlers?.onBegin?.({ transaction })
await notify({ eventType: "onBegin" })
},
onResume: async ({ transaction }) => {
customEventHandlers?.onResume?.({ transaction })
await notify({ eventType: "onResume" })
},
onCompensateBegin: async ({ transaction }) => {
customEventHandlers?.onCompensateBegin?.({ transaction })
await notify({ eventType: "onCompensateBegin" })
},
onFinish: async ({ transaction, result, errors }) => {
// TODO: unsubscribe transaction handlers on finish
customEventHandlers?.onFinish?.({ transaction, result, errors })
},
onStepBegin: async ({ step, transaction }) => {
customEventHandlers?.onStepBegin?.({ step, transaction })
this.activeStepsCount++
await notify({ eventType: "onStepBegin", step })
},
onStepSuccess: async ({ step, transaction }) => {
const stepName = step.definition.action!
const response = await resolveValue(
transaction.getContext().invoke[stepName],
transaction
)
customEventHandlers?.onStepSuccess?.({ step, transaction, response })
await notify({ eventType: "onStepSuccess", step, response })
this.activeStepsCount--
},
onStepFailure: async ({ step, transaction }) => {
const stepName = step.definition.action!
const errors = transaction
.getErrors(TransactionHandlerType.INVOKE)
.filter((err) => err.action === stepName)
customEventHandlers?.onStepFailure?.({ step, transaction, errors })
await notify({ eventType: "onStepFailure", step, errors })
this.activeStepsCount--
},
onStepAwaiting: async ({ step, transaction }) => {
customEventHandlers?.onStepAwaiting?.({ step, transaction })
await notify({ eventType: "onStepAwaiting", step })
this.activeStepsCount--
},
onCompensateStepSuccess: async ({ step, transaction }) => {
const stepName = step.definition.action!
const response = transaction.getContext().compensate[stepName]
customEventHandlers?.onCompensateStepSuccess?.({
step,
transaction,
response,
})
await notify({ eventType: "onCompensateStepSuccess", step, response })
},
onCompensateStepFailure: async ({ step, transaction }) => {
const stepName = step.definition.action!
const errors = transaction
.getErrors(TransactionHandlerType.COMPENSATE)
.filter((err) => err.action === stepName)
customEventHandlers?.onStepFailure?.({ step, transaction, errors })
await notify({ eventType: "onCompensateStepFailure", step, errors })
},
}
}
private buildIdempotencyKeyAndParts(
idempotencyKey: string | IdempotencyKeyParts
): [string, IdempotencyKeyParts] {
const parts: IdempotencyKeyParts = {
workflowId: "",
transactionId: "",
stepId: "",
action: "invoke",
}
let idempotencyKey_ = idempotencyKey as string
const setParts = (workflowId, transactionId, stepId, action) => {
parts.workflowId = workflowId
parts.transactionId = transactionId
parts.stepId = stepId
parts.action = action
}
if (!isString(idempotencyKey)) {
const { workflowId, transactionId, stepId, action } =
idempotencyKey as IdempotencyKeyParts
idempotencyKey_ = [workflowId, transactionId, stepId, action].join(":")
setParts(workflowId, transactionId, stepId, action)
} else {
const [workflowId, transactionId, stepId, action] =
idempotencyKey_.split(":")
setParts(workflowId, transactionId, stepId, action)
}
return [idempotencyKey_, parts]
}
}

View File

@@ -0,0 +1,285 @@
import {
Context,
DAL,
FindConfig,
InternalModuleDeclaration,
ModuleJoinerConfig,
ModulesSdkTypes,
} from "@medusajs/types"
import {
InjectManager,
InjectSharedContext,
MedusaContext,
MedusaError,
isString,
} from "@medusajs/utils"
import type {
IWorkflowEngineService,
ReturnWorkflow,
UnwrapWorkflowInputDataType,
WorkflowOrchestratorTypes,
} from "@medusajs/workflows-sdk"
import { WorkflowOrchestratorService } from "@services"
import { joinerConfig } from "../joiner-config"
type InjectedDependencies = {
baseRepository: DAL.RepositoryService
workflowExecutionService: ModulesSdkTypes.InternalModuleService<any>
workflowOrchestratorService: WorkflowOrchestratorService
redisDisconnectHandler: () => Promise<void>
}
export class WorkflowsModuleService implements IWorkflowEngineService {
protected baseRepository_: DAL.RepositoryService
protected workflowExecutionService_: ModulesSdkTypes.InternalModuleService<any>
protected workflowOrchestratorService_: WorkflowOrchestratorService
protected redisDisconnectHandler_: () => Promise<void>
constructor(
{
baseRepository,
workflowExecutionService,
workflowOrchestratorService,
redisDisconnectHandler,
}: InjectedDependencies,
protected readonly moduleDeclaration: InternalModuleDeclaration
) {
this.baseRepository_ = baseRepository
this.workflowExecutionService_ = workflowExecutionService
this.workflowOrchestratorService_ = workflowOrchestratorService
this.redisDisconnectHandler_ = redisDisconnectHandler
}
__joinerConfig(): ModuleJoinerConfig {
return joinerConfig
}
__hooks = {
onApplicationShutdown: async () => {
await this.workflowOrchestratorService_.onApplicationShutdown()
await this.redisDisconnectHandler_()
},
onApplicationPrepareShutdown: async () => {
await this.workflowOrchestratorService_.onApplicationPrepareShutdown()
},
}
@InjectManager("baseRepository_")
async retrieveWorkflowExecution(
idOrObject:
| string
| {
workflow_id: string
transaction_id: string
},
config: FindConfig<WorkflowOrchestratorTypes.WorkflowExecutionDTO> = {},
@MedusaContext() sharedContext: Context = {}
): Promise<WorkflowOrchestratorTypes.WorkflowExecutionDTO> {
const objValue = isString(idOrObject)
? { id: idOrObject }
: {
workflow_id: idOrObject.workflow_id,
transaction_id: idOrObject.transaction_id,
}
const wfExecution = await this.workflowExecutionService_.list(
objValue,
config,
sharedContext
)
if (wfExecution.length === 0) {
throw new MedusaError(
MedusaError.Types.NOT_FOUND,
`WorkflowExecution with ${Object.keys(objValue).join(
", "
)}: ${Object.values(objValue).join(", ")} was not found`
)
}
// eslint-disable-next-line max-len
return await this.baseRepository_.serialize<WorkflowOrchestratorTypes.WorkflowExecutionDTO>(
wfExecution[0],
{
populate: true,
}
)
}
@InjectManager("baseRepository_")
async listWorkflowExecution(
filters: WorkflowOrchestratorTypes.FilterableWorkflowExecutionProps = {},
config: FindConfig<WorkflowOrchestratorTypes.WorkflowExecutionDTO> = {},
@MedusaContext() sharedContext: Context = {}
): Promise<WorkflowOrchestratorTypes.WorkflowExecutionDTO[]> {
if (filters.transaction_id) {
if (Array.isArray(filters.transaction_id)) {
filters.transaction_id = {
$in: filters.transaction_id,
}
}
}
if (filters.workflow_id) {
if (Array.isArray(filters.workflow_id)) {
filters.workflow_id = {
$in: filters.workflow_id,
}
}
}
const wfExecutions = await this.workflowExecutionService_.list(
filters,
config,
sharedContext
)
return await this.baseRepository_.serialize<
WorkflowOrchestratorTypes.WorkflowExecutionDTO[]
>(wfExecutions, {
populate: true,
})
}
@InjectManager("baseRepository_")
async listAndCountWorkflowExecution(
filters: WorkflowOrchestratorTypes.FilterableWorkflowExecutionProps = {},
config: FindConfig<WorkflowOrchestratorTypes.WorkflowExecutionDTO> = {},
@MedusaContext() sharedContext: Context = {}
): Promise<[WorkflowOrchestratorTypes.WorkflowExecutionDTO[], number]> {
if (filters.transaction_id) {
if (Array.isArray(filters.transaction_id)) {
filters.transaction_id = {
$in: filters.transaction_id,
}
}
}
if (filters.workflow_id) {
if (Array.isArray(filters.workflow_id)) {
filters.workflow_id = {
$in: filters.workflow_id,
}
}
}
const [wfExecutions, count] =
await this.workflowExecutionService_.listAndCount(
filters,
config,
sharedContext
)
return [
await this.baseRepository_.serialize<
WorkflowOrchestratorTypes.WorkflowExecutionDTO[]
>(wfExecutions, {
populate: true,
}),
count,
]
}
@InjectSharedContext()
async run<TWorkflow extends string | ReturnWorkflow<any, any, any>>(
workflowIdOrWorkflow: TWorkflow,
options: WorkflowOrchestratorTypes.WorkflowOrchestratorRunDTO<
TWorkflow extends ReturnWorkflow<any, any, any>
? UnwrapWorkflowInputDataType<TWorkflow>
: unknown
> = {},
@MedusaContext() context: Context = {}
) {
const ret = await this.workflowOrchestratorService_.run<
TWorkflow extends ReturnWorkflow<any, any, any>
? UnwrapWorkflowInputDataType<TWorkflow>
: unknown
>(workflowIdOrWorkflow, options, context)
return ret as any
}
@InjectSharedContext()
async getRunningTransaction(
workflowId: string,
transactionId: string,
@MedusaContext() context: Context = {}
) {
return await this.workflowOrchestratorService_.getRunningTransaction(
workflowId,
transactionId,
context
)
}
@InjectSharedContext()
async setStepSuccess(
{
idempotencyKey,
stepResponse,
options,
}: {
idempotencyKey: string | object
stepResponse: unknown
options?: Record<string, any>
},
@MedusaContext() context: Context = {}
) {
return await this.workflowOrchestratorService_.setStepSuccess(
{
idempotencyKey,
stepResponse,
options,
} as any,
context
)
}
@InjectSharedContext()
async setStepFailure(
{
idempotencyKey,
stepResponse,
options,
}: {
idempotencyKey: string | object
stepResponse: unknown
options?: Record<string, any>
},
@MedusaContext() context: Context = {}
) {
return await this.workflowOrchestratorService_.setStepFailure(
{
idempotencyKey,
stepResponse,
options,
} as any,
context
)
}
@InjectSharedContext()
async subscribe(
args: {
workflowId: string
transactionId?: string
subscriber: Function
subscriberId?: string
},
@MedusaContext() context: Context = {}
) {
return this.workflowOrchestratorService_.subscribe(args as any, context)
}
@InjectSharedContext()
async unsubscribe(
args: {
workflowId: string
transactionId?: string
subscriberOrId: string | Function
},
@MedusaContext() context: Context = {}
) {
return this.workflowOrchestratorService_.unsubscribe(args as any, context)
}
}

View File

@@ -0,0 +1,34 @@
import { Logger } from "@medusajs/types"
import { RedisOptions } from "ioredis"
export type InitializeModuleInjectableDependencies = {
logger?: Logger
}
/**
* Module config type
*/
export type RedisWorkflowsOptions = {
/**
* Redis connection string
*/
url?: string
/**
* Queue name used for retries and timeouts
*/
queueName?: string
/**
* Redis client options
*/
options?: RedisOptions
/**
* Optiona connection string and options to pub/sub
*/
pubsub?: {
url: string
options?: RedisOptions
}
}

View File

@@ -0,0 +1 @@
export * from "./workflow-orchestrator-storage"

View File

@@ -0,0 +1,293 @@
import {
DistributedTransaction,
DistributedTransactionStorage,
TransactionCheckpoint,
TransactionStep,
} from "@medusajs/orchestration"
import { ModulesSdkTypes } from "@medusajs/types"
import { TransactionState } from "@medusajs/utils"
import { WorkflowOrchestratorService } from "@services"
import { Queue, Worker } from "bullmq"
import Redis from "ioredis"
enum JobType {
RETRY = "retry",
STEP_TIMEOUT = "step_timeout",
TRANSACTION_TIMEOUT = "transaction_timeout",
}
// eslint-disable-next-line max-len
export class RedisDistributedTransactionStorage extends DistributedTransactionStorage {
private static TTL_AFTER_COMPLETED = 60 * 15 // 15 minutes
private workflowExecutionService_: ModulesSdkTypes.InternalModuleService<any>
private workflowOrchestratorService_: WorkflowOrchestratorService
private redisClient: Redis
private queue: Queue
private worker: Worker
constructor({
workflowExecutionService,
redisConnection,
redisWorkerConnection,
redisQueueName,
}: {
workflowExecutionService: ModulesSdkTypes.InternalModuleService<any>
redisConnection: Redis
redisWorkerConnection: Redis
redisQueueName: string
}) {
super()
this.workflowExecutionService_ = workflowExecutionService
this.redisClient = redisConnection
this.queue = new Queue(redisQueueName, { connection: this.redisClient })
this.worker = new Worker(
redisQueueName,
async (job) => {
const allJobs = [
JobType.RETRY,
JobType.STEP_TIMEOUT,
JobType.TRANSACTION_TIMEOUT,
]
if (allJobs.includes(job.name as JobType)) {
await this.executeTransaction(
job.data.workflowId,
job.data.transactionId
)
}
},
{ connection: redisWorkerConnection }
)
}
async onApplicationPrepareShutdown() {
// Close worker gracefully, i.e. wait for the current jobs to finish
await this.worker.close()
}
async onApplicationShutdown() {
await this.queue.close()
}
setWorkflowOrchestratorService(workflowOrchestratorService) {
this.workflowOrchestratorService_ = workflowOrchestratorService
}
private async saveToDb(data: TransactionCheckpoint) {
await this.workflowExecutionService_.upsert([
{
workflow_id: data.flow.modelId,
transaction_id: data.flow.transactionId,
execution: data.flow,
context: {
data: data.context,
errors: data.errors,
},
state: data.flow.state,
},
])
}
private async deleteFromDb(data: TransactionCheckpoint) {
await this.workflowExecutionService_.delete([
{
workflow_id: data.flow.modelId,
transaction_id: data.flow.transactionId,
},
])
}
private async executeTransaction(workflowId: string, transactionId: string) {
return await this.workflowOrchestratorService_.run(workflowId, {
transactionId,
throwOnError: false,
})
}
async get(key: string): Promise<TransactionCheckpoint | undefined> {
const data = await this.redisClient.get(key)
return data ? JSON.parse(data) : undefined
}
async list(): Promise<TransactionCheckpoint[]> {
const keys = await this.redisClient.keys(
DistributedTransaction.keyPrefix + ":*"
)
const transactions: any[] = []
for (const key of keys) {
const data = await this.redisClient.get(key)
if (data) {
transactions.push(JSON.parse(data))
}
}
return transactions
}
async save(
key: string,
data: TransactionCheckpoint,
ttl?: number
): Promise<void> {
let retentionTime
/**
* Store the retention time only if the transaction is done, failed or reverted.
* From that moment, this tuple can be later on archived or deleted after the retention time.
*/
const hasFinished = [
TransactionState.DONE,
TransactionState.FAILED,
TransactionState.REVERTED,
].includes(data.flow.state)
if (hasFinished) {
retentionTime = data.flow.options?.retentionTime
Object.assign(data, {
retention_time: retentionTime,
})
}
const stringifiedData = JSON.stringify(data)
const parsedData = JSON.parse(stringifiedData)
if (!hasFinished) {
if (ttl) {
await this.redisClient.set(key, stringifiedData, "EX", ttl)
} else {
await this.redisClient.set(key, stringifiedData)
}
}
if (hasFinished && !retentionTime) {
await this.deleteFromDb(parsedData)
} else {
await this.saveToDb(parsedData)
}
if (hasFinished) {
// await this.redisClient.del(key)
await this.redisClient.set(
key,
stringifiedData,
"EX",
RedisDistributedTransactionStorage.TTL_AFTER_COMPLETED
)
}
}
async scheduleRetry(
transaction: DistributedTransaction,
step: TransactionStep,
timestamp: number,
interval: number
): Promise<void> {
await this.queue.add(
JobType.RETRY,
{
workflowId: transaction.modelId,
transactionId: transaction.transactionId,
stepId: step.id,
},
{
delay: interval > 0 ? interval * 1000 : undefined,
jobId: this.getJobId(JobType.RETRY, transaction, step),
removeOnComplete: true,
}
)
}
async clearRetry(
transaction: DistributedTransaction,
step: TransactionStep
): Promise<void> {
await this.removeJob(JobType.RETRY, transaction, step)
}
async scheduleTransactionTimeout(
transaction: DistributedTransaction,
timestamp: number,
interval: number
): Promise<void> {
await this.queue.add(
JobType.TRANSACTION_TIMEOUT,
{
workflowId: transaction.modelId,
transactionId: transaction.transactionId,
},
{
delay: interval * 1000,
jobId: this.getJobId(JobType.TRANSACTION_TIMEOUT, transaction),
removeOnComplete: true,
}
)
}
async clearTransactionTimeout(
transaction: DistributedTransaction
): Promise<void> {
await this.removeJob(JobType.TRANSACTION_TIMEOUT, transaction)
}
async scheduleStepTimeout(
transaction: DistributedTransaction,
step: TransactionStep,
timestamp: number,
interval: number
): Promise<void> {
await this.queue.add(
JobType.STEP_TIMEOUT,
{
workflowId: transaction.modelId,
transactionId: transaction.transactionId,
stepId: step.id,
},
{
delay: interval * 1000,
jobId: this.getJobId(JobType.STEP_TIMEOUT, transaction, step),
removeOnComplete: true,
}
)
}
async clearStepTimeout(
transaction: DistributedTransaction,
step: TransactionStep
): Promise<void> {
await this.removeJob(JobType.STEP_TIMEOUT, transaction, step)
}
private getJobId(
type: JobType,
transaction: DistributedTransaction,
step?: TransactionStep
) {
const key = [type, transaction.modelId, transaction.transactionId]
if (step) {
key.push(step.id, step.attempts + "")
if (step.isCompensating()) {
key.push("compensate")
}
}
return key.join(":")
}
private async removeJob(
type: JobType,
transaction: DistributedTransaction,
step?: TransactionStep
) {
const jobId = this.getJobId(type, transaction, step)
const job = await this.queue.getJob(jobId)
if (job && job.attemptsStarted === 0) {
await job.remove()
}
}
}

View File

@@ -0,0 +1,38 @@
{
"compilerOptions": {
"lib": ["es2020"],
"target": "es2020",
"outDir": "./dist",
"esModuleInterop": true,
"declarationMap": true,
"declaration": true,
"module": "commonjs",
"moduleResolution": "node",
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"sourceMap": false,
"noImplicitReturns": true,
"strictNullChecks": true,
"strictFunctionTypes": true,
"noImplicitThis": true,
"allowJs": true,
"skipLibCheck": true,
"downlevelIteration": true, // to use ES5 specific tooling
"baseUrl": ".",
"resolveJsonModule": true,
"paths": {
"@models": ["./src/models"],
"@services": ["./src/services"],
"@repositories": ["./src/repositories"],
"@types": ["./src/types"]
}
},
"include": ["src"],
"exclude": [
"dist",
"./src/**/__tests__",
"./src/**/__mocks__",
"./src/**/__fixtures__",
"node_modules"
]
}

View File

@@ -0,0 +1,8 @@
{
"extends": "./tsconfig.json",
"include": ["src", "integration-tests"],
"exclude": ["node_modules", "dist"],
"compilerOptions": {
"sourceMap": true
}
}