feat(workflows-*): Allow to re run non idempotent but stored workflow with the same transaction id if considered done (#12362)

This commit is contained in:
Adrien de Peretti
2025-05-06 17:17:49 +02:00
committed by GitHub
parent 97dd520c64
commit 80007f3afd
31 changed files with 809 additions and 95 deletions

View File

@@ -24,6 +24,15 @@
"nullable": false,
"mappedType": "text"
},
"run_id": {
"name": "run_id",
"type": "text",
"unsigned": false,
"autoincrement": false,
"primary": false,
"nullable": false,
"mappedType": "text"
},
"id": {
"name": "id",
"type": "text",
@@ -151,13 +160,13 @@
"expression": "CREATE INDEX IF NOT EXISTS \"IDX_workflow_execution_transaction_id\" ON \"workflow_execution\" (transaction_id) WHERE deleted_at IS NULL"
},
{
"keyName": "IDX_workflow_execution_workflow_id_transaction_id_unique",
"keyName": "IDX_workflow_execution_workflow_id_transaction_id_run_id_unique",
"columnNames": [],
"composite": false,
"constraint": false,
"primary": false,
"unique": false,
"expression": "CREATE UNIQUE INDEX IF NOT EXISTS \"IDX_workflow_execution_workflow_id_transaction_id_unique\" ON \"workflow_execution\" (workflow_id, transaction_id) WHERE deleted_at IS NULL"
"expression": "CREATE UNIQUE INDEX IF NOT EXISTS \"IDX_workflow_execution_workflow_id_transaction_id_run_id_unique\" ON \"workflow_execution\" (workflow_id, transaction_id, run_id) WHERE deleted_at IS NULL"
},
{
"keyName": "IDX_workflow_execution_state",
@@ -172,7 +181,8 @@
"keyName": "workflow_execution_pkey",
"columnNames": [
"workflow_id",
"transaction_id"
"transaction_id",
"run_id"
],
"composite": true,
"constraint": true,

View File

@@ -0,0 +1,45 @@
import { Migration } from "@mikro-orm/migrations"
import { ulid } from "ulid"
export class Migration20250505101505 extends Migration {
override async up(): Promise<void> {
this.addSql(
`alter table if exists "workflow_execution" drop constraint if exists "workflow_execution_workflow_id_transaction_id_run_id_unique";`
)
this.addSql(
`drop index if exists "IDX_workflow_execution_workflow_id_transaction_id_unique";`
)
this.addSql(
`alter table if exists "workflow_execution" drop constraint if exists "PK_workflow_execution_workflow_id_transaction_id";`
)
this.addSql(
`alter table if exists "workflow_execution" add column if not exists "run_id" text not null default '${ulid()}';`
)
this.addSql(
`CREATE UNIQUE INDEX IF NOT EXISTS "IDX_workflow_execution_workflow_id_transaction_id_run_id_unique" ON "workflow_execution" (workflow_id, transaction_id, run_id) WHERE deleted_at IS NULL;`
)
this.addSql(
`alter table if exists "workflow_execution" add constraint "workflow_execution_pkey" primary key ("workflow_id", "transaction_id", "run_id");`
)
}
override async down(): Promise<void> {
this.addSql(
`drop index if exists "IDX_workflow_execution_workflow_id_transaction_id_run_id_unique";`
)
this.addSql(
`alter table if exists "workflow_execution" drop constraint if exists "workflow_execution_pkey";`
)
this.addSql(
`alter table if exists "workflow_execution" drop column if exists "run_id";`
)
this.addSql(
`CREATE UNIQUE INDEX IF NOT EXISTS "IDX_workflow_execution_workflow_id_transaction_id_unique" ON "workflow_execution" (workflow_id, transaction_id) WHERE deleted_at IS NULL;`
)
this.addSql(
`alter table if exists "workflow_execution" add constraint "workflow_execution_pkey" primary key ("workflow_id", "transaction_id");`
)
}
}

View File

@@ -6,6 +6,7 @@ export const WorkflowExecution = model
id: model.id({ prefix: "wf_exec" }),
workflow_id: model.text().primaryKey(),
transaction_id: model.text().primaryKey(),
run_id: model.text().primaryKey(),
execution: model.json().nullable(),
context: model.json().nullable(),
state: model.enum(TransactionState),
@@ -25,7 +26,7 @@ export const WorkflowExecution = model
where: "deleted_at IS NULL",
},
{
on: ["workflow_id", "transaction_id"],
on: ["workflow_id", "transaction_id", "run_id"],
unique: true,
where: "deleted_at IS NULL",
},

View File

@@ -30,6 +30,7 @@ export type WorkflowOrchestratorRunOptions<T> = Omit<
"container"
> & {
transactionId?: string
runId?: string
container?: ContainerLike
}
@@ -38,6 +39,7 @@ export type WorkflowOrchestratorCancelOptions = Omit<
"transaction" | "transactionId" | "container"
> & {
transactionId: string
runId?: string
container?: ContainerLike
}
@@ -205,7 +207,6 @@ export class WorkflowOrchestratorService {
throwOnError ??= true
context ??= {}
context.transactionId = transactionId ?? ulid()
const workflowId = isString(workflowIdOrWorkflow)
? workflowIdOrWorkflow
: workflowIdOrWorkflow.getName()
@@ -319,7 +320,7 @@ export class WorkflowOrchestratorService {
const transaction = await this.getRunningTransaction(
workflowId,
transactionId,
options
{ ...options, isCancelling: true }
)
if (!transaction) {
if (!throwOnError) {

View File

@@ -6,9 +6,11 @@ import {
SchedulerOptions,
SkipExecutionError,
TransactionCheckpoint,
TransactionContext,
TransactionFlow,
TransactionOptions,
TransactionStep,
TransactionStepError,
} from "@medusajs/framework/orchestration"
import { Logger, ModulesSdkTypes } from "@medusajs/framework/types"
import {
@@ -160,6 +162,7 @@ export class RedisDistributedTransactionStorage
{
workflow_id: data.flow.modelId,
transaction_id: data.flow.transactionId,
run_id: data.flow.runId,
execution: data.flow,
context: {
data: data.context,
@@ -176,6 +179,7 @@ export class RedisDistributedTransactionStorage
{
workflow_id: data.flow.modelId,
transaction_id: data.flow.transactionId,
run_id: data.flow.runId,
},
])
}
@@ -223,7 +227,7 @@ export class RedisDistributedTransactionStorage
async get(
key: string,
options?: TransactionOptions
options?: TransactionOptions & { isCancelling?: boolean }
): Promise<TransactionCheckpoint | undefined> {
const data = await this.redisClient.get(key)
@@ -240,26 +244,54 @@ export class RedisDistributedTransactionStorage
const [_, workflowId, transactionId] = key.split(":")
const trx = await this.workflowExecutionService_
.retrieve(
.list(
{
workflow_id: workflowId,
transaction_id: transactionId,
},
{
select: ["execution", "context"],
order: {
id: "desc",
},
take: 1,
}
)
.then((trx) => trx[0])
.catch(() => undefined)
if (trx) {
const checkpointData = {
flow: trx.execution,
context: trx.context.data,
errors: trx.context.errors,
const execution = trx.execution as TransactionFlow
if (!idempotent) {
const isFailedOrReverted = [
TransactionState.REVERTED,
TransactionState.FAILED,
].includes(execution.state)
const isDone = execution.state === TransactionState.DONE
const isCancellingAndFailedOrReverted =
options?.isCancelling && isFailedOrReverted
const isNotCancellingAndDoneOrFailedOrReverted =
!options?.isCancelling && (isDone || isFailedOrReverted)
if (
isCancellingAndFailedOrReverted ||
isNotCancellingAndDoneOrFailedOrReverted
) {
return
}
}
return checkpointData
return {
flow: trx.execution as TransactionFlow,
context: trx.context?.data as TransactionContext,
errors: trx.context?.errors as TransactionStepError[],
}
}
return
}
@@ -325,6 +357,11 @@ export class RedisDistributedTransactionStorage
})
}
const isNotStarted = data.flow.state === TransactionState.NOT_STARTED
const isManualTransactionId = !data.flow.transactionId.startsWith("auto-")
// Only set if not exists
const shouldSetNX = isNotStarted && isManualTransactionId
// Prepare operations to be executed in batch or pipeline
const stringifiedData = JSON.stringify(data)
const pipeline = this.redisClient.pipeline()
@@ -332,19 +369,45 @@ export class RedisDistributedTransactionStorage
// Execute Redis operations
if (!hasFinished) {
if (ttl) {
pipeline.set(key, stringifiedData, "EX", ttl)
if (shouldSetNX) {
pipeline.set(key, stringifiedData, "EX", ttl, "NX")
} else {
pipeline.set(key, stringifiedData, "EX", ttl)
}
} else {
pipeline.set(key, stringifiedData)
if (shouldSetNX) {
pipeline.set(key, stringifiedData, "NX")
} else {
pipeline.set(key, stringifiedData)
}
}
} else {
pipeline.unlink(key)
}
const pipelinePromise = pipeline.exec().then((result) => {
if (!shouldSetNX) {
return result
}
const actionResult = result?.pop()
const isOk = !!actionResult?.pop()
if (!isOk) {
throw new MedusaError(
MedusaError.Types.INVALID_ARGUMENT,
"Transaction already started for transactionId: " +
data.flow.transactionId
)
}
return result
})
// Database operations
if (hasFinished && !retentionTime && !idempotent) {
await promiseAll([pipeline.exec(), this.deleteFromDb(data)])
await promiseAll([pipelinePromise, this.deleteFromDb(data)])
} else {
await promiseAll([pipeline.exec(), this.saveToDb(data, retentionTime)])
await promiseAll([pipelinePromise, this.saveToDb(data, retentionTime)])
}
}
@@ -540,15 +603,23 @@ export class RedisDistributedTransactionStorage
key: string
options?: TransactionOptions
}) {
const isInitialCheckpoint = data.flow.state === TransactionState.NOT_STARTED
const isInitialCheckpoint = [TransactionState.NOT_STARTED].includes(
data.flow.state
)
/**
* In case many execution can succeed simultaneously, we need to ensure that the latest
* execution does continue if a previous execution is considered finished
*/
const currentFlow = data.flow
const getOptions = {
...options,
isCancelling: !!data.flow.cancelledAt,
} as Parameters<typeof this.get>[1]
const { flow: latestUpdatedFlow } =
(await this.get(key, options)) ??
(await this.get(key, getOptions)) ??
({ flow: {} } as { flow: TransactionFlow })
if (!isInitialCheckpoint && !isPresent(latestUpdatedFlow)) {