diff --git a/README.md b/README.md index 3f6eaf5..2ab70ca 100644 --- a/README.md +++ b/README.md @@ -7,11 +7,10 @@ A durable, resumable workflow engine for Elixir. Similar to Temporal/Inngest. ## Features -- **Pipeline Model** - Data flows from step to step, simple and explicit +- **Pipeline Model** - Context flows from step to step, simple and explicit - **Resumability** - Sleep, wait for events, wait for human input - **Branching** - Pattern-matched conditional flow control -- **Parallel** - Run steps concurrently with merge strategies -- **ForEach** - Process collections with configurable concurrency +- **Parallel** - Run steps concurrently with result collection - **Compensations** - Saga pattern with automatic rollback - **Cron Scheduling** - Recurring workflows with cron expressions - **Reliability** - Automatic retries with exponential/linear/constant backoff @@ -55,28 +54,28 @@ defmodule MyApp.OrderWorkflow do workflow "process_order", timeout: hours(2) do # First step receives workflow input - step :validate, fn order -> + step :validate, fn input -> {:ok, %{ - order_id: order["id"], - items: order["items"], - customer_id: order["customer_id"] + order_id: input["id"], + items: input["items"], + customer_id: input["customer_id"] }} end - # Each step receives previous step's output - step :calculate_total, fn data -> - total = data.items |> Enum.map(& &1["price"]) |> Enum.sum() - {:ok, assign(data, :total, total)} + # Each step receives previous step's output as context + step :calculate_total, fn ctx -> + total = ctx.items |> Enum.map(& &1["price"]) |> Enum.sum() + {:ok, assign(ctx, :total, total)} end - step :charge_payment, [retry: [max_attempts: 3, backoff: :exponential]], fn data -> - {:ok, charge} = PaymentService.charge(data.order_id, data.total) - {:ok, assign(data, :charge_id, charge.id)} + step :charge_payment, [retry: [max_attempts: 3, backoff: :exponential]], fn ctx -> + {:ok, charge} = PaymentService.charge(ctx.order_id, ctx.total) + {:ok, assign(ctx, :charge_id, charge.id)} end - step :send_confirmation, fn data -> - EmailService.send_confirmation(data.order_id) - {:ok, data} + step :send_confirmation, fn ctx -> + EmailService.send_confirmation(ctx.order_id) + {:ok, ctx} end end end @@ -98,26 +97,26 @@ defmodule MyApp.ExpenseApproval do use Durable.Wait workflow "expense_approval" do - step :request_approval, fn data -> + step :request_approval, fn ctx -> result = wait_for_approval("manager", - prompt: "Approve $#{data["amount"]} expense?", + prompt: "Approve $#{ctx["amount"]} expense?", timeout: days(3), timeout_value: :auto_rejected ) - {:ok, assign(data, :decision, result)} + {:ok, assign(ctx, :decision, result)} end - branch on: fn data -> data.decision end do + branch on: fn ctx -> ctx.decision end do :approved -> - step :process, fn data -> - Expenses.reimburse(data["employee_id"], data["amount"]) - {:ok, assign(data, :status, :reimbursed)} + step :process, fn ctx -> + Expenses.reimburse(ctx["employee_id"], ctx["amount"]) + {:ok, assign(ctx, :status, :reimbursed)} end _ -> - step :notify_rejection, fn data -> - Mailer.send_rejection(data["employee_id"]) - {:ok, assign(data, :status, :rejected)} + step :notify_rejection, fn ctx -> + Mailer.send_rejection(ctx["employee_id"]) + {:ok, assign(ctx, :status, :rejected)} end end end @@ -129,7 +128,7 @@ Durable.provide_input(workflow_id, "manager", :approved) ### Parallel Data Fetch -Fetch data concurrently, then combine. +Fetch data concurrently, then combine results. ```elixir defmodule MyApp.DashboardBuilder do @@ -141,23 +140,79 @@ defmodule MyApp.DashboardBuilder do {:ok, %{user_id: input["user_id"]}} end + # Parallel steps produce results in __results__ map parallel do - step :user, fn data -> - {:ok, assign(data, :user, Users.get(data.user_id))} + step :user, fn ctx -> + {:ok, %{user: Users.get(ctx.user_id)}} end - step :orders, fn data -> - {:ok, assign(data, :orders, Orders.recent(data.user_id))} + step :orders, fn ctx -> + {:ok, %{orders: Orders.recent(ctx.user_id)}} end - step :notifications, fn data -> - {:ok, assign(data, :notifs, Notifications.unread(data.user_id))} + step :notifications, fn ctx -> + {:ok, %{notifs: Notifications.unread(ctx.user_id)}} end end - step :render, fn data -> - dashboard = Dashboard.build(data.user, data.orders, data.notifs) - {:ok, assign(data, :dashboard, dashboard)} + # Access results from __results__ map + step :render, fn ctx -> + results = ctx[:__results__] + + # Results are tagged tuples: ["ok", data] or ["error", reason] + user = case results["user"] do + ["ok", data] -> data.user + _ -> nil + end + + orders = case results["orders"] do + ["ok", data] -> data.orders + _ -> [] + end + + notifs = case results["notifications"] do + ["ok", data] -> data.notifs + _ -> [] + end + + dashboard = Dashboard.build(user, orders, notifs) + {:ok, assign(ctx, :dashboard, dashboard)} + end + end +end + +# Or use into: to transform results directly +defmodule MyApp.DashboardBuilderWithInto do + use Durable + use Durable.Helpers + + workflow "build_dashboard_v2" do + step :init, fn input -> + {:ok, %{user_id: input["user_id"]}} + end + + parallel into: fn ctx, results -> + # results contains tuples: %{user: {:ok, data}, orders: {:ok, data}, ...} + case {results[:user], results[:orders], results[:notifications]} do + {{:ok, user_data}, {:ok, orders_data}, {:ok, notifs_data}} -> + {:ok, Map.merge(ctx, %{ + user: user_data.user, + orders: orders_data.orders, + notifs: notifs_data.notifs + })} + + _ -> + {:error, "Failed to fetch dashboard data"} + end + end do + step :user, fn ctx -> {:ok, %{user: Users.get(ctx.user_id)}} end + step :orders, fn ctx -> {:ok, %{orders: Orders.recent(ctx.user_id)}} end + step :notifications, fn ctx -> {:ok, %{notifs: Notifications.unread(ctx.user_id)}} end + end + + step :render, fn ctx -> + dashboard = Dashboard.build(ctx.user, ctx.orders, ctx.notifs) + {:ok, assign(ctx, :dashboard, dashboard)} end end end @@ -165,7 +220,7 @@ end ### Batch Processing -Process items with controlled concurrency. +Process items with controlled concurrency using `Task.async_stream`. ```elixir defmodule MyApp.BulkEmailer do @@ -178,16 +233,27 @@ defmodule MyApp.BulkEmailer do {:ok, %{campaign_id: input["campaign_id"], recipients: recipients}} end - foreach :send_emails, - items: fn data -> data.recipients end, - concurrency: 10, - on_error: :continue do - - # Foreach steps receive (data, item, index) - step :send, fn data, recipient, _idx -> - Mailer.send_campaign(recipient, data.campaign_id) - {:ok, increment(data, :sent_count)} - end + step :send_emails, fn ctx -> + results = + ctx.recipients + |> Task.async_stream( + fn recipient -> + case Mailer.send_campaign(recipient, ctx.campaign_id) do + :ok -> {:ok, recipient} + {:error, reason} -> {:error, {recipient, reason}} + end + end, + max_concurrency: 10, + timeout: :infinity + ) + |> Enum.map(fn {:ok, r} -> r end) + + sent = for {:ok, _} <- results, do: 1 + failed = for {:error, _} <- results, do: 1 + + {:ok, ctx + |> assign(:sent_count, length(sent)) + |> assign(:failed_count, length(failed))} end end end @@ -203,30 +269,30 @@ defmodule MyApp.TripBooking do use Durable.Helpers workflow "book_trip" do - step :book_flight, [compensate: :cancel_flight], fn data -> - booking = Flights.book(data["flight"]) - {:ok, assign(data, :flight, booking)} + step :book_flight, [compensate: :cancel_flight], fn ctx -> + booking = Flights.book(ctx["flight"]) + {:ok, assign(ctx, :flight, booking)} end - step :book_hotel, [compensate: :cancel_hotel], fn data -> - booking = Hotels.book(data["hotel"]) - {:ok, assign(data, :hotel, booking)} + step :book_hotel, [compensate: :cancel_hotel], fn ctx -> + booking = Hotels.book(ctx["hotel"]) + {:ok, assign(ctx, :hotel, booking)} end - step :charge, fn data -> - total = data.flight.price + data.hotel.price - Payments.charge(data["card"], total) - {:ok, assign(data, :charged, true)} + step :charge, fn ctx -> + total = ctx.flight.price + ctx.hotel.price + Payments.charge(ctx["card"], total) + {:ok, assign(ctx, :charged, true)} end - compensate :cancel_flight, fn data -> - Flights.cancel(data.flight.id) - {:ok, data} + compensate :cancel_flight, fn ctx -> + Flights.cancel(ctx.flight.id) + {:ok, ctx} end - compensate :cancel_hotel, fn data -> - Hotels.cancel(data.hotel.id) - {:ok, data} + compensate :cancel_hotel, fn ctx -> + Hotels.cancel(ctx.hotel.id) + {:ok, ctx} end end end @@ -249,10 +315,10 @@ defmodule MyApp.DailyReport do {:ok, %{report: report}} end - step :distribute, fn data -> - Mailer.send_report(data.report, to: "team@company.com") - Slack.post_summary(data.report, channel: "#sales") - {:ok, data} + step :distribute, fn ctx -> + Mailer.send_report(ctx.report, to: "team@company.com") + Slack.post_summary(ctx.report, channel: "#sales") + {:ok, ctx} end end end @@ -272,33 +338,33 @@ defmodule MyApp.TrialReminder do use Durable.Wait workflow "trial_reminder" do - step :welcome, fn data -> - Mailer.send_welcome(data["user_id"]) - {:ok, %{user_id: data["user_id"], trial_started_at: data["trial_started_at"]}} + step :welcome, fn ctx -> + Mailer.send_welcome(ctx["user_id"]) + {:ok, %{user_id: ctx["user_id"], trial_started_at: ctx["trial_started_at"]}} end - step :wait_3_days, fn data -> + step :wait_3_days, fn ctx -> sleep(days(3)) - {:ok, data} + {:ok, ctx} end - step :check_in, fn data -> - Mailer.send_tips(data.user_id) - {:ok, data} + step :check_in, fn ctx -> + Mailer.send_tips(ctx.user_id) + {:ok, ctx} end - step :wait_until_trial_ends, fn data -> - trial_end = DateTime.add(data.trial_started_at, 14, :day) + step :wait_until_trial_ends, fn ctx -> + trial_end = DateTime.add(ctx.trial_started_at, 14, :day) schedule_at(trial_end) - {:ok, data} + {:ok, ctx} end - step :convert_or_remind, fn data -> - if Subscriptions.active?(data.user_id) do - {:ok, assign(data, :converted, true)} + step :convert_or_remind, fn ctx -> + if Subscriptions.active?(ctx.user_id) do + {:ok, assign(ctx, :converted, true)} else - Mailer.send_upgrade_reminder(data.user_id) - {:ok, assign(data, :converted, false)} + Mailer.send_upgrade_reminder(ctx.user_id) + {:ok, assign(ctx, :converted, false)} end end end @@ -316,30 +382,30 @@ defmodule MyApp.PaymentFlow do use Durable.Wait workflow "payment_flow" do - step :create_invoice, fn data -> - invoice = Invoices.create(data["order_id"], data["amount"]) - {:ok, %{order_id: data["order_id"], invoice_id: invoice.id}} + step :create_invoice, fn ctx -> + invoice = Invoices.create(ctx["order_id"], ctx["amount"]) + {:ok, %{order_id: ctx["order_id"], invoice_id: invoice.id}} end - step :await_payment, fn data -> + step :await_payment, fn ctx -> {event, _payload} = wait_for_any(["payment.success", "payment.failed"], timeout: days(7), timeout_value: {"payment.expired", nil} ) - {:ok, assign(data, :result, event)} + {:ok, assign(ctx, :result, event)} end - branch on: fn data -> data.result end do + branch on: fn ctx -> ctx.result end do "payment.success" -> - step :fulfill, fn data -> - Orders.fulfill(data.order_id) - {:ok, assign(data, :status, :fulfilled)} + step :fulfill, fn ctx -> + Orders.fulfill(ctx.order_id) + {:ok, assign(ctx, :status, :fulfilled)} end _ -> - step :cancel, fn data -> - Orders.cancel(data.order_id) - {:ok, assign(data, :status, :cancelled)} + step :cancel, fn ctx -> + Orders.cancel(ctx.order_id) + {:ok, assign(ctx, :status, :cancelled)} end end end @@ -356,12 +422,12 @@ Durable.send_event(workflow_id, "payment.success", %{transaction_id: "txn_123"}) ```elixir use Durable.Helpers -assign(data, :key, value) # Set a value -assign(data, %{a: 1, b: 2}) # Merge multiple values -update(data, :key, default, fn old -> new end) -append(data, :list, item) # Append to list -increment(data, :count) # Increment by 1 -increment(data, :count, 5) # Increment by 5 +assign(ctx, :key, value) # Set a value +assign(ctx, %{a: 1, b: 2}) # Merge multiple values +update(ctx, :key, default, fn old -> new end) +append(ctx, :list, item) # Append to list +increment(ctx, :count) # Increment by 1 +increment(ctx, :count, 5) # Increment by 5 ``` ### Time Helpers @@ -389,7 +455,6 @@ Durable.provide_input(id, "input_name", data) - [Branching](guides/branching.md) - Conditional flow control - [Parallel](guides/parallel.md) - Concurrent execution -- [ForEach](guides/foreach.md) - Collection processing - [Compensations](guides/compensations.md) - Saga pattern - [Waiting](guides/waiting.md) - Sleep, events, human input diff --git a/guides/ai_workflows.md b/guides/ai_workflows.md index e425865..9755c31 100644 --- a/guides/ai_workflows.md +++ b/guides/ai_workflows.md @@ -60,28 +60,28 @@ defmodule MyApp.DocumentProcessor do use Durable.Helpers workflow "process_document" do - step :fetch, fn data -> - doc = DocumentStore.get(data["doc_id"]) + step :fetch, fn ctx -> + doc = DocumentStore.get(ctx["doc_id"]) {:ok, %{doc: doc}} end # AI classification with automatic retry - step :classify, [retry: [max_attempts: 3, backoff: :exponential]], fn data -> - content = data.doc.content + step :classify, [retry: [max_attempts: 3, backoff: :exponential]], fn ctx -> + content = ctx.doc.content doc_type = ReqLLM.generate_text!( "anthropic:claude-sonnet-4-20250514", "Classify this document as :invoice, :contract, or :other. Reply with only the atom.\n\n#{content}" ) |> String.trim() |> String.to_atom() - {:ok, assign(data, :doc_type, doc_type)} + {:ok, assign(ctx, :doc_type, doc_type)} end # Conditional branching - only ONE path executes - branch on: fn data -> data.doc_type end do + branch on: fn ctx -> ctx.doc_type end do :invoice -> - step :extract_invoice, [retry: [max_attempts: 3]], fn data -> - content = data.doc.content + step :extract_invoice, [retry: [max_attempts: 3]], fn ctx -> + content = ctx.doc.content {:ok, extracted} = ReqLLM.generate_object( "anthropic:claude-sonnet-4-20250514", @@ -94,18 +94,18 @@ defmodule MyApp.DocumentProcessor do } ) - {:ok, assign(data, :extracted, extracted)} + {:ok, assign(ctx, :extracted, extracted)} end - step :validate_invoice, fn data -> - extracted = data.extracted + step :validate_invoice, fn ctx -> + extracted = ctx.extracted calculated = Enum.sum(Enum.map(extracted.line_items, & &1.amount)) - {:ok, assign(data, :valid, abs(calculated - extracted.total) < 0.01)} + {:ok, assign(ctx, :valid, abs(calculated - extracted.total) < 0.01)} end :contract -> - step :extract_contract, [retry: [max_attempts: 3]], fn data -> - content = data.doc.content + step :extract_contract, [retry: [max_attempts: 3]], fn ctx -> + content = ctx.doc.content {:ok, extracted} = ReqLLM.generate_object( "anthropic:claude-sonnet-4-20250514", @@ -117,26 +117,26 @@ defmodule MyApp.DocumentProcessor do } ) - {:ok, assign(data, :extracted, extracted)} + {:ok, assign(ctx, :extracted, extracted)} end _ -> - step :flag_for_review, fn data -> - {:ok, assign(data, :needs_review, true)} + step :flag_for_review, fn ctx -> + {:ok, assign(ctx, :needs_review, true)} end end # Runs after any branch completes - step :store, fn data -> - doc = data.doc + step :store, fn ctx -> + doc = ctx.doc DocumentStore.update(doc.id, %{ - doc_type: data.doc_type, - extracted_data: Map.get(data, :extracted, %{}), - needs_review: Map.get(data, :needs_review, false) + doc_type: ctx.doc_type, + extracted_data: Map.get(ctx, :extracted, %{}), + needs_review: Map.get(ctx, :needs_review, false) }) - {:ok, data} + {:ok, ctx} end end end @@ -150,18 +150,18 @@ end ### Retries for API Calls ```elixir -step :ai_call, [retry: [max_attempts: 3, backoff: :exponential]], fn data -> - result = ReqLLM.generate_text!("anthropic:claude-sonnet-4-20250514", data.prompt) - {:ok, assign(data, :result, result)} +step :ai_call, [retry: [max_attempts: 3, backoff: :exponential]], fn ctx -> + result = ReqLLM.generate_text!("anthropic:claude-sonnet-4-20250514", ctx.prompt) + {:ok, assign(ctx, :result, result)} end ``` ### Validate AI Outputs ```elixir -step :extract, fn data -> - case ReqLLM.generate_object(model, data.prompt, schema: schema) do - {:ok, extracted} -> {:ok, assign(data, :data, extracted)} +step :extract, fn ctx -> + case ReqLLM.generate_object(model, ctx.prompt, schema: schema) do + {:ok, extracted} -> {:ok, assign(ctx, :data, extracted)} {:error, _} -> raise "Invalid response" # Triggers retry end end @@ -172,12 +172,12 @@ end ```elixir use Durable.Wait -step :review, fn data -> - if data.confidence < 0.8 do +step :review, fn ctx -> + if ctx.confidence < 0.8 do result = wait_for_input("human_review", timeout: hours(24)) - {:ok, assign(data, :human_verified, result)} + {:ok, assign(ctx, :human_verified, result)} else - {:ok, data} + {:ok, ctx} end end ``` @@ -185,13 +185,13 @@ end ### Branch on AI Classification ```elixir -branch on: fn data -> data.category end do +branch on: fn ctx -> ctx.category end do :billing -> - step :handle_billing, fn data -> {:ok, data} end + step :handle_billing, fn ctx -> {:ok, ctx} end :technical -> - step :handle_technical, fn data -> {:ok, data} end + step :handle_technical, fn ctx -> {:ok, ctx} end _ -> - step :handle_default, fn data -> {:ok, data} end + step :handle_default, fn ctx -> {:ok, ctx} end end ``` diff --git a/guides/branching.md b/guides/branching.md index 1236a17..f4bf4fb 100644 --- a/guides/branching.md +++ b/guides/branching.md @@ -12,32 +12,32 @@ defmodule MyApp.DocumentProcessor do use Durable.Helpers workflow "process_document" do - step :classify, fn data -> - doc_type = AI.classify(data["content"]) - {:ok, assign(data, :doc_type, doc_type)} + step :classify, fn ctx -> + doc_type = AI.classify(ctx["content"]) + {:ok, assign(ctx, :doc_type, doc_type)} end - branch on: fn data -> data.doc_type end do + branch on: fn ctx -> ctx.doc_type end do :invoice -> - step :process_invoice, fn data -> - {:ok, assign(data, :extracted, extract_invoice_data(data))} + step :process_invoice, fn ctx -> + {:ok, assign(ctx, :extracted, extract_invoice_data(ctx))} end :contract -> - step :process_contract, fn data -> - {:ok, assign(data, :extracted, extract_contract_data(data))} + step :process_contract, fn ctx -> + {:ok, assign(ctx, :extracted, extract_contract_data(ctx))} end _ -> - step :manual_review, fn data -> - {:ok, assign(data, :needs_review, true)} + step :manual_review, fn ctx -> + {:ok, assign(ctx, :needs_review, true)} end end # Runs after ANY branch completes - step :save, fn data -> - save_to_database(data) - {:ok, data} + step :save, fn ctx -> + save_to_database(ctx) + {:ok, ctx} end end end @@ -59,41 +59,41 @@ The `on:` option takes a function that returns a value. That value is matched ag ```elixir # Matching atoms -branch on: fn data -> data.status end do +branch on: fn ctx -> ctx.status end do :active -> - step :handle_active, fn data -> {:ok, data} end + step :handle_active, fn ctx -> {:ok, ctx} end :pending -> - step :handle_pending, fn data -> {:ok, data} end + step :handle_pending, fn ctx -> {:ok, ctx} end _ -> - step :handle_other, fn data -> {:ok, data} end + step :handle_other, fn ctx -> {:ok, ctx} end end # Matching strings -branch on: fn data -> data.format end do +branch on: fn ctx -> ctx.format end do "pdf" -> - step :process_pdf, fn data -> {:ok, data} end + step :process_pdf, fn ctx -> {:ok, ctx} end "docx" -> - step :process_docx, fn data -> {:ok, data} end + step :process_docx, fn ctx -> {:ok, ctx} end _ -> - step :unsupported, fn data -> {:ok, data} end + step :unsupported, fn ctx -> {:ok, ctx} end end # Matching booleans -branch on: fn data -> data.is_premium end do +branch on: fn ctx -> ctx.is_premium end do true -> - step :premium_flow, fn data -> {:ok, data} end + step :premium_flow, fn ctx -> {:ok, ctx} end false -> - step :standard_flow, fn data -> {:ok, data} end + step :standard_flow, fn ctx -> {:ok, ctx} end end # Matching integers -branch on: fn data -> data.tier end do +branch on: fn ctx -> ctx.tier end do 1 -> - step :tier_one, fn data -> {:ok, data} end + step :tier_one, fn ctx -> {:ok, ctx} end 2 -> - step :tier_two, fn data -> {:ok, data} end + step :tier_two, fn ctx -> {:ok, ctx} end 3 -> - step :tier_three, fn data -> {:ok, data} end + step :tier_three, fn ctx -> {:ok, ctx} end end ``` @@ -102,23 +102,23 @@ end Each branch can contain multiple steps that execute sequentially: ```elixir -branch on: fn data -> data.order_type end do +branch on: fn ctx -> ctx.order_type end do :subscription -> - step :validate_subscription, fn data -> - {:ok, assign(data, :validated, validate_recurring_payment(data))} + step :validate_subscription, fn ctx -> + {:ok, assign(ctx, :validated, validate_recurring_payment(ctx))} end - step :setup_billing, fn data -> - {:ok, assign(data, :billing, create_subscription_billing(data))} + step :setup_billing, fn ctx -> + {:ok, assign(ctx, :billing, create_subscription_billing(ctx))} end - step :schedule_renewals, fn data -> - {:ok, assign(data, :renewal_scheduled, schedule_monthly_charge(data))} + step :schedule_renewals, fn ctx -> + {:ok, assign(ctx, :renewal_scheduled, schedule_monthly_charge(ctx))} end :one_time -> - step :process_payment, fn data -> - {:ok, assign(data, :charged, charge_once(data))} + step :process_payment, fn ctx -> + {:ok, assign(ctx, :charged, charge_once(ctx))} end end ``` @@ -128,24 +128,24 @@ end The `_` pattern matches any value not matched by other clauses: ```elixir -branch on: fn data -> data.priority end do +branch on: fn ctx -> ctx.priority end do :critical -> - step :alert_oncall, fn data -> + step :alert_oncall, fn ctx -> PagerDuty.alert() - {:ok, data} + {:ok, ctx} end :high -> - step :create_urgent_ticket, fn data -> + step :create_urgent_ticket, fn ctx -> Tickets.create(priority: :high) - {:ok, data} + {:ok, ctx} end _ -> # Matches :medium, :low, or any other value - step :create_normal_ticket, fn data -> + step :create_normal_ticket, fn ctx -> Tickets.create(priority: :normal) - {:ok, data} + {:ok, ctx} end end ``` @@ -156,45 +156,45 @@ end ```elixir workflow "process_order" do - step :load_order, fn data -> - order = Orders.get(data["order_id"]) + step :load_order, fn ctx -> + order = Orders.get(ctx["order_id"]) {:ok, %{order: order, order_type: order.type}} end - branch on: fn data -> data.order_type end do + branch on: fn ctx -> ctx.order_type end do :digital -> - step :generate_download_link, fn data -> - link = Downloads.create(data.order) - {:ok, data + step :generate_download_link, fn ctx -> + link = Downloads.create(ctx.order) + {:ok, ctx |> assign(:delivery_method, :download) |> assign(:download_link, link)} end :physical -> - step :create_shipment, fn data -> - shipment = Shipping.create(data.order) - {:ok, data + step :create_shipment, fn ctx -> + shipment = Shipping.create(ctx.order) + {:ok, ctx |> assign(:delivery_method, :shipping) |> assign(:tracking_number, shipment.tracking)} end - step :notify_warehouse, fn data -> - Warehouse.queue_pick(data.order) - {:ok, data} + step :notify_warehouse, fn ctx -> + Warehouse.queue_pick(ctx.order) + {:ok, ctx} end :service -> - step :schedule_appointment, fn data -> - slot = Calendar.book(data.order) - {:ok, data + step :schedule_appointment, fn ctx -> + slot = Calendar.book(ctx.order) + {:ok, ctx |> assign(:delivery_method, :appointment) |> assign(:appointment, slot)} end end - step :send_confirmation, fn data -> - Email.send_order_confirmation(data.order, data.delivery_method) - {:ok, data} + step :send_confirmation, fn ctx -> + Email.send_order_confirmation(ctx.order, ctx.delivery_method) + {:ok, ctx} end end ``` @@ -203,8 +203,8 @@ end ```elixir workflow "verify_user" do - step :check_verification_status, fn data -> - user = Users.get(data["user_id"]) + step :check_verification_status, fn ctx -> + user = Users.get(ctx["user_id"]) {:ok, %{ user: user, verified: user.email_verified and user.phone_verified, @@ -212,33 +212,33 @@ workflow "verify_user" do }} end - branch on: fn data -> data.verified end do + branch on: fn ctx -> ctx.verified end do true -> - step :already_verified, fn data -> - {:ok, assign(data, :result, :already_verified)} + step :already_verified, fn ctx -> + {:ok, assign(ctx, :result, :already_verified)} end false -> # Nested branch for verification method - branch on: fn data -> data.verification_method end do + branch on: fn ctx -> ctx.verification_method end do :email -> - step :send_email_code, fn data -> + step :send_email_code, fn ctx -> code = generate_code() - Email.send_verification(data.user.email, code) - {:ok, assign(data, :pending_verification, :email)} + Email.send_verification(ctx.user.email, code) + {:ok, assign(ctx, :pending_verification, :email)} end :sms -> - step :send_sms_code, fn data -> + step :send_sms_code, fn ctx -> code = generate_code() - SMS.send(data.user.phone, code) - {:ok, assign(data, :pending_verification, :sms)} + SMS.send(ctx.user.phone, code) + {:ok, assign(ctx, :pending_verification, :sms)} end _ -> - step :require_manual_verification, fn data -> - Support.create_verification_ticket(data.user) - {:ok, assign(data, :pending_verification, :manual)} + step :require_manual_verification, fn ctx -> + Support.create_verification_ticket(ctx.user) + {:ok, assign(ctx, :pending_verification, :manual)} end end end @@ -249,49 +249,49 @@ end ```elixir workflow "expense_routing" do - step :load_expense, fn data -> - expense = Expenses.get(data["expense_id"]) + step :load_expense, fn ctx -> + expense = Expenses.get(ctx["expense_id"]) {:ok, %{expense: expense, amount: expense.amount}} end - step :determine_tier, fn data -> + step :determine_tier, fn ctx -> tier = cond do - data.amount > 10000 -> :executive - data.amount > 1000 -> :manager - data.amount > 100 -> :team_lead + ctx.amount > 10000 -> :executive + ctx.amount > 1000 -> :manager + ctx.amount > 100 -> :team_lead true -> :auto end - {:ok, assign(data, :approval_tier, tier)} + {:ok, assign(ctx, :approval_tier, tier)} end - branch on: fn data -> data.approval_tier end do + branch on: fn ctx -> ctx.approval_tier end do :executive -> - step :cfo_approval, fn data -> - request_approval(:cfo, data.expense) - {:ok, data} + step :cfo_approval, fn ctx -> + request_approval(:cfo, ctx.expense) + {:ok, ctx} end - step :ceo_approval, fn data -> - request_approval(:ceo, data.expense) - {:ok, data} + step :ceo_approval, fn ctx -> + request_approval(:ceo, ctx.expense) + {:ok, ctx} end :manager -> - step :manager_approval, fn data -> - request_approval(:manager, data.expense) - {:ok, data} + step :manager_approval, fn ctx -> + request_approval(:manager, ctx.expense) + {:ok, ctx} end :team_lead -> - step :team_lead_approval, fn data -> - request_approval(:team_lead, data.expense) - {:ok, data} + step :team_lead_approval, fn ctx -> + request_approval(:team_lead, ctx.expense) + {:ok, ctx} end :auto -> - step :auto_approve, fn data -> - Expenses.approve(data.expense, approver: :system) - {:ok, data} + step :auto_approve, fn ctx -> + Expenses.approve(ctx.expense, approver: :system) + {:ok, ctx} end end end @@ -304,7 +304,7 @@ Durable provides two ways to control flow: | Feature | `branch` | `decision` | |---------|----------|------------| | Use case | Execute different step groups | Jump to a specific step | -| Syntax | Pattern matching clauses | Return `{:goto, :step, data}` | +| Syntax | Pattern matching clauses | Return `{:goto, :step, ctx}` | | Multiple steps | Yes, per clause | No, single jump target | | Readability | High, reads top-to-bottom | Lower, requires tracing jumps | @@ -320,24 +320,24 @@ Durable provides two ways to control flow: ```elixir # Prefer branch for distinct paths -branch on: fn data -> data.type end do +branch on: fn ctx -> ctx.type end do :a -> - step :handle_a, fn data -> {:ok, data} end + step :handle_a, fn ctx -> {:ok, ctx} end :b -> - step :handle_b, fn data -> {:ok, data} end + step :handle_b, fn ctx -> {:ok, ctx} end end # Decision for simple skips -decision :check_skip, fn data -> - if data.skip_optional do - {:goto, :final_step, data} +decision :check_skip, fn ctx -> + if ctx.skip_optional do + {:goto, :final_step, ctx} else - {:ok, data} + {:ok, ctx} end end -step :optional_step, fn data -> {:ok, data} end -step :final_step, fn data -> {:ok, data} end +step :optional_step, fn ctx -> {:ok, ctx} end +step :final_step, fn ctx -> {:ok, ctx} end ``` ## Best Practices @@ -345,16 +345,16 @@ step :final_step, fn data -> {:ok, data} end ### Always Include a Default Clause ```elixir -branch on: fn data -> data.status end do +branch on: fn ctx -> ctx.status end do :active -> - step :handle_active, fn data -> {:ok, data} end + step :handle_active, fn ctx -> {:ok, ctx} end :pending -> - step :handle_pending, fn data -> {:ok, data} end + step :handle_pending, fn ctx -> {:ok, ctx} end _ -> # Handle unexpected values gracefully - step :handle_unknown, fn data -> - Logger.warning("Unknown status: #{data.status}") - {:ok, assign(data, :error, :unknown_status)} + step :handle_unknown, fn ctx -> + Logger.warning("Unknown status: #{ctx.status}") + {:ok, assign(ctx, :error, :unknown_status)} end end ``` @@ -363,23 +363,23 @@ end ```elixir # Good - each branch does one thing -branch on: fn data -> data.payment_method end do +branch on: fn ctx -> ctx.payment_method end do :card -> - step :charge_card, fn data -> {:ok, data} end + step :charge_card, fn ctx -> {:ok, ctx} end :bank -> - step :initiate_transfer, fn data -> {:ok, data} end + step :initiate_transfer, fn ctx -> {:ok, ctx} end :crypto -> - step :process_crypto, fn data -> {:ok, data} end + step :process_crypto, fn ctx -> {:ok, ctx} end end # Avoid - too much logic in branches -branch on: fn data -> data.type end do +branch on: fn ctx -> ctx.type end do :a -> - step :step1, fn data -> {:ok, data} end - step :step2, fn data -> {:ok, data} end - step :step3, fn data -> {:ok, data} end - step :step4, fn data -> {:ok, data} end - step :step5, fn data -> {:ok, data} end + step :step1, fn ctx -> {:ok, ctx} end + step :step2, fn ctx -> {:ok, ctx} end + step :step3, fn ctx -> {:ok, ctx} end + step :step4, fn ctx -> {:ok, ctx} end + step :step5, fn ctx -> {:ok, ctx} end # Consider extracting to separate workflow end ``` @@ -388,14 +388,14 @@ end ```elixir # Good -step :classify, fn data -> - {:ok, assign(data, :document_type, :invoice)} +step :classify, fn ctx -> + {:ok, assign(ctx, :document_type, :invoice)} end -branch on: fn data -> data.document_type end do ... end +branch on: fn ctx -> ctx.document_type end do ... end # Avoid -step :classify, fn data -> - {:ok, assign(data, :t, :i)} +step :classify, fn ctx -> + {:ok, assign(ctx, :t, :i)} end -branch on: fn data -> data.t end do ... end +branch on: fn ctx -> ctx.t end do ... end ``` diff --git a/guides/compensations.md b/guides/compensations.md index 2385f72..800689a 100644 --- a/guides/compensations.md +++ b/guides/compensations.md @@ -15,33 +15,33 @@ defmodule MyApp.BookTripWorkflow do workflow "book_trip" do # Step 1: Book flight (with compensation) - step :book_flight, [compensate: :cancel_flight], fn data -> - booking = FlightAPI.book(data["flight"]) - {:ok, assign(data, :flight_id, booking.id)} + step :book_flight, [compensate: :cancel_flight], fn ctx -> + booking = FlightAPI.book(ctx["flight"]) + {:ok, assign(ctx, :flight_id, booking.id)} end # Step 2: Book hotel (with compensation) - step :book_hotel, [compensate: :cancel_hotel], fn data -> - booking = HotelAPI.book(data["hotel"]) - {:ok, assign(data, :hotel_id, booking.id)} + step :book_hotel, [compensate: :cancel_hotel], fn ctx -> + booking = HotelAPI.book(ctx["hotel"]) + {:ok, assign(ctx, :hotel_id, booking.id)} end # Step 3: Charge payment (no compensation needed - it's the last step) - step :charge_payment, fn data -> + step :charge_payment, fn ctx -> # If this fails, compensations run automatically! - PaymentService.charge(data.total) - {:ok, data} + PaymentService.charge(ctx.total) + {:ok, ctx} end # Compensation handlers - compensate :cancel_flight, fn data -> - FlightAPI.cancel(data.flight_id) - {:ok, data} + compensate :cancel_flight, fn ctx -> + FlightAPI.cancel(ctx.flight_id) + {:ok, ctx} end - compensate :cancel_hotel, fn data -> - HotelAPI.cancel(data.hotel_id) - {:ok, data} + compensate :cancel_hotel, fn ctx -> + HotelAPI.cancel(ctx.hotel_id) + {:ok, ctx} end end end @@ -62,28 +62,28 @@ end ### Defining Compensations ```elixir -compensate :handler_name, fn data -> +compensate :handler_name, fn ctx -> # Undo logic here - # Has access to workflow data - {:ok, data} + # Has access to workflow context + {:ok, ctx} end ``` ### Linking Steps to Compensations ```elixir -step :my_step, [compensate: :my_handler], fn data -> +step :my_step, [compensate: :my_handler], fn ctx -> # Step logic - {:ok, data} + {:ok, ctx} end ``` ### Compensation Options ```elixir -compensate :handler_name, [retry: [max_attempts: 3, backoff: :exponential]], fn data -> +compensate :handler_name, [retry: [max_attempts: 3, backoff: :exponential]], fn ctx -> # Compensation with retry - {:ok, data} + {:ok, ctx} end ``` @@ -110,14 +110,14 @@ end Compensations may run multiple times (retries, manual recovery). Design them to be safe to repeat: ```elixir -compensate :cancel_booking, fn data -> - booking_id = data.booking_id +compensate :cancel_booking, fn ctx -> + booking_id = ctx.booking_id # Check if already cancelled before cancelling case BookingAPI.get(booking_id) do - {:ok, %{status: :cancelled}} -> {:ok, data} - {:ok, _booking} -> BookingAPI.cancel(booking_id); {:ok, data} - {:error, :not_found} -> {:ok, data} + {:ok, %{status: :cancelled}} -> {:ok, ctx} + {:ok, _booking} -> BookingAPI.cancel(booking_id); {:ok, ctx} + {:error, :not_found} -> {:ok, ctx} end end ``` @@ -127,14 +127,14 @@ end Always store resource IDs in data so compensations can find them: ```elixir -step :create_resource, [compensate: :delete_resource], fn data -> - resource = ExternalAPI.create(data.params) - {:ok, assign(data, :resource_id, resource.id)} # Store for compensation +step :create_resource, [compensate: :delete_resource], fn ctx -> + resource = ExternalAPI.create(ctx.params) + {:ok, assign(ctx, :resource_id, resource.id)} # Store for compensation end -compensate :delete_resource, fn data -> - ExternalAPI.delete(data.resource_id) - {:ok, data} +compensate :delete_resource, fn ctx -> + ExternalAPI.delete(ctx.resource_id) + {:ok, ctx} end ``` @@ -207,48 +207,48 @@ defmodule MyApp.ProcessOrderWorkflow do use Durable.Helpers workflow "process_order" do - step :reserve_inventory, [compensate: :release_inventory], fn data -> - items = data["items"] + step :reserve_inventory, [compensate: :release_inventory], fn ctx -> + items = ctx["items"] reservations = Enum.map(items, fn item -> {:ok, res} = Inventory.reserve(item.sku, item.quantity) res.id end) - {:ok, assign(data, :reservation_ids, reservations)} + {:ok, assign(ctx, :reservation_ids, reservations)} end - step :charge_customer, [compensate: :refund_customer], fn data -> - amount = data["total"] - {:ok, charge} = Payments.charge(data["customer_id"], amount) - {:ok, assign(data, :charge_id, charge.id)} + step :charge_customer, [compensate: :refund_customer], fn ctx -> + amount = ctx["total"] + {:ok, charge} = Payments.charge(ctx["customer_id"], amount) + {:ok, assign(ctx, :charge_id, charge.id)} end - step :create_shipment, fn data -> + step :create_shipment, fn ctx -> # If shipping fails, inventory is released and payment refunded - {:ok, shipment} = Shipping.create(data["address"], data["items"]) - {:ok, assign(data, :shipment_id, shipment.id)} + {:ok, shipment} = Shipping.create(ctx["address"], ctx["items"]) + {:ok, assign(ctx, :shipment_id, shipment.id)} end - step :send_confirmation, fn data -> - Email.send_order_confirmation(data["customer_email"], %{ - shipment_id: data.shipment_id, - charge_id: data.charge_id + step :send_confirmation, fn ctx -> + Email.send_order_confirmation(ctx["customer_email"], %{ + shipment_id: ctx.shipment_id, + charge_id: ctx.charge_id }) - {:ok, data} + {:ok, ctx} end # Compensations - compensate :release_inventory, fn data -> - Enum.each(data.reservation_ids, fn id -> + compensate :release_inventory, fn ctx -> + Enum.each(ctx.reservation_ids, fn id -> Inventory.release(id) end) - {:ok, data} + {:ok, ctx} end - compensate :refund_customer, fn data -> - Payments.refund(data.charge_id) - {:ok, data} + compensate :refund_customer, fn ctx -> + Payments.refund(ctx.charge_id) + {:ok, ctx} end end end diff --git a/guides/foreach.md b/guides/foreach.md deleted file mode 100644 index 8b173f9..0000000 --- a/guides/foreach.md +++ /dev/null @@ -1,279 +0,0 @@ -# ForEach Loops - -Process collections of items with support for sequential or concurrent execution. - -## Basic Usage - -```elixir -defmodule MyApp.OrderProcessor do - use Durable - use Durable.Helpers - - workflow "process_orders" do - step :fetch_orders, fn _input -> - orders = Orders.fetch_pending() - {:ok, %{orders: orders, processed: []}} - end - - # Process each order sequentially - foreach :process_each, items: fn data -> data.orders end do - # Foreach steps receive (data, item, index) - step :process, fn data, order, _idx -> - result = Orders.process(order) - {:ok, append(data, :processed, result)} - end - end - - step :summary, fn data -> - count = length(data.processed) - Logger.info("Processed #{count} orders") - {:ok, data} - end - end -end -``` - -## Accessing Item and Index - -Inside a foreach block, step functions receive three arguments: - -| Argument | Description | -|----------|-------------| -| `data` | The accumulated workflow data | -| `item` | The current item being processed | -| `index` | The 0-based index of the current item | - -```elixir -foreach :send_emails, items: fn data -> data.recipients end do - step :send, fn data, recipient, index -> - total = length(data.recipients) - Email.send(recipient, subject: "Email #{index + 1} of #{total}") - {:ok, increment(data, :sent_count)} - end -end -``` - -## Options - -### Item Source (`:items`) - Required - -A function that extracts the collection from the data: - -```elixir -# From data key -foreach :process, items: fn data -> data.my_items end do - step :work, fn data, item, _idx -> {:ok, data} end -end - -# Computed items -foreach :process, items: fn data -> Enum.filter(data.items, & &1.active) end do - step :work, fn data, item, _idx -> {:ok, data} end -end -``` - -### Concurrency (`:concurrency`) - -Process multiple items simultaneously: - -```elixir -# Sequential (default) -foreach :process, items: fn data -> data.items end, concurrency: 1 do - # Items processed one at a time - step :work, fn data, item, _idx -> {:ok, data} end -end - -# Process 5 items at once -foreach :process, items: fn data -> data.items end, concurrency: 5 do - # Up to 5 items processed concurrently - step :work, fn data, item, _idx -> {:ok, data} end -end -``` - -### Error Handling (`:on_error`) - -| Strategy | Description | -|----------|-------------| -| `:fail_fast` | Stop on first error (default) | -| `:continue` | Continue processing, collect errors | - -```elixir -# Stop on first failure -foreach :process, items: fn data -> data.items end, on_error: :fail_fast do - step :work, fn data, item, _idx -> - # If this fails, remaining items are skipped - {:ok, data} - end -end - -# Continue despite errors -foreach :process, items: fn data -> data.items end, on_error: :continue do - step :work, fn data, item, _idx -> - # Errors are collected, processing continues - {:ok, data} - end -end -``` - -## Examples - -### Batch Processing with Concurrency - -```elixir -workflow "send_notifications" do - step :fetch_users, fn _input -> - users = Users.all_active() - {:ok, %{users: users, sent_count: 0}} - end - - # Send to 10 users at a time - foreach :notify_each, items: fn data -> data.users end, concurrency: 10 do - step :send, [retry: [max_attempts: 3]], fn data, user, _idx -> - Notifications.send(user.id, "Weekly update") - {:ok, increment(data, :sent_count)} - end - end - - step :report, fn data -> - Logger.info("Sent #{data.sent_count} notifications") - {:ok, data} - end -end -``` - -### Processing with Error Tolerance - -```elixir -workflow "import_records" do - step :load_data, fn input -> - records = CSVParser.parse(input["file_path"]) - {:ok, %{records: records, errors: [], success_count: 0}} - end - - foreach :import_each, - items: fn data -> data.records end, - on_error: :continue do - - step :import, fn data, record, index -> - case Database.insert(record) do - {:ok, _} -> - {:ok, increment(data, :success_count)} - {:error, reason} -> - {:ok, append(data, :errors, %{index: index, reason: reason})} - end - end - end - - step :summary, fn data -> - Logger.info("Imported #{data.success_count} records, #{length(data.errors)} errors") - - data = if data.errors != [] do - assign(data, :has_errors, true) - else - data - end - - {:ok, data} - end -end -``` - -### Multi-Step Processing per Item - -```elixir -foreach :process_documents, items: fn data -> data.documents end do - step :validate, fn data, doc, _idx -> - if valid?(doc) do - {:ok, assign(data, :current_valid, true)} - else - raise "Invalid document: #{doc.id}" - end - end - - step :transform, fn data, doc, _idx -> - transformed = transform(doc) - {:ok, assign(data, :current_result, transformed)} - end - - step :save, fn data, _doc, _idx -> - result = data.current_result - Database.save(result) - {:ok, append(data, :saved_ids, result.id)} - end -end -``` - -### Tracking Progress - -```elixir -foreach :process_large_batch, items: fn data -> data.items end do - step :process, fn data, item, index -> - total = length(data.items) - - if rem(index, 100) == 0 do - Logger.info("Progress: #{index}/#{total}") - end - - process(item) - {:ok, data} - end -end -``` - -## Best Practices - -### Choose Appropriate Concurrency - -```elixir -# External API with rate limits - low concurrency -foreach :call_api, items: fn data -> data.requests end, concurrency: 2 do - step :call, fn data, req, _idx -> {:ok, data} end -end - -# CPU-bound local work - match CPU cores -foreach :process, items: fn data -> data.data end, concurrency: System.schedulers_online() do - step :work, fn data, item, _idx -> {:ok, data} end -end - -# Independent I/O operations - higher concurrency -foreach :fetch, items: fn data -> data.urls end, concurrency: 20 do - step :download, fn data, url, _idx -> {:ok, data} end -end -``` - -### Use `:continue` for Non-Critical Items - -```elixir -# Sending emails - some failures are acceptable -foreach :send_emails, items: fn data -> data.recipients end, on_error: :continue do - step :send, fn data, recipient, _idx -> - case Mailer.send(recipient) do - :ok -> {:ok, increment(data, :sent)} - {:error, reason} -> {:ok, append(data, :failed, {recipient, reason})} - end - end -end -``` - -### Combine with Parallel for Complex Pipelines - -```elixir -foreach :process_orders, items: fn data -> data.orders end do - # Each order has multiple independent tasks - parallel do - step :update_inventory, fn data, order, _idx -> - Inventory.reserve(order.items) - {:ok, data} - end - - step :notify_warehouse, fn data, order, _idx -> - Warehouse.notify(order) - {:ok, data} - end - - step :send_confirmation, fn data, order, _idx -> - Email.send_order_confirmation(order) - {:ok, data} - end - end -end -``` diff --git a/guides/parallel.md b/guides/parallel.md index 6e37955..37f6fe4 100644 --- a/guides/parallel.md +++ b/guides/parallel.md @@ -1,6 +1,6 @@ # Parallel Execution -Run multiple steps concurrently to speed up workflows. +Run multiple steps concurrently and collect results as tagged tuples. ## Basic Usage @@ -8,6 +8,7 @@ Run multiple steps concurrently to speed up workflows. defmodule MyApp.OnboardingWorkflow do use Durable use Durable.Helpers + use Durable.Context workflow "onboard_user" do step :create_user, fn data -> @@ -19,92 +20,190 @@ defmodule MyApp.OnboardingWorkflow do parallel do step :send_welcome_email, fn data -> Mailer.send_welcome(data.user_id) - {:ok, assign(data, :email_sent, true)} + {:ok, %{email_sent: true}} end step :provision_workspace, fn data -> workspace = Workspaces.create(data.user_id) - {:ok, assign(data, :workspace_id, workspace.id)} + {:ok, %{workspace_id: workspace.id}} end step :setup_billing, fn data -> Billing.setup(data.user_id) - {:ok, assign(data, :billing_ready, true)} + {:ok, %{billing_ready: true}} end end - # Runs after ALL parallel steps complete + # Access results from parallel steps step :complete, fn data -> - Logger.info("User onboarded: #{data.user_id}") - {:ok, data} + results = data[:__results__] + + case {results[:send_welcome_email], results[:provision_workspace]} do + {{:ok, _}, {:ok, _}} -> + {:ok, Map.put(data, :onboarded, true)} + + _ -> + {:error, "Onboarding incomplete"} + end end end end ``` -## Options +## Results Model -### Merge Strategy (`:merge`) +Parallel steps produce results stored in the `__results__` key with tagged tuples: -Controls how data changes from parallel steps are combined. +```elixir +# After parallel block completes, context contains: +%{ + ...original_context, + __results__: %{ + step_name: {:ok, returned_data} | {:error, reason} + } +} +``` -| Strategy | Description | -|----------|-------------| -| `:deep_merge` | Deep merge all data (default) | -| `:last_wins` | Last completed step's data wins on conflicts | -| `:collect` | Collect into `%{step_name => data_changes}` | +### Accessing Results + +Use `parallel_results/0`, `parallel_result/1`, and `parallel_ok?/1` helpers: + +```elixir +step :handle_results, fn ctx -> + # Get all results + results = parallel_results() # => %{payment: {:ok, ...}, delivery: {:error, ...}} + + # Get specific result + case parallel_result(:payment) do + {:ok, payment} -> # handle success + {:error, reason} -> # handle error + end + + # Check if step succeeded + if parallel_ok?(:payment) do + # payment was successful + end + + {:ok, ctx} +end +``` + +Or access directly from the context: ```elixir -# Deep merge (default) - combines all nested maps -parallel merge: :deep_merge do - step :a, fn data -> - {:ok, assign(data, :settings, %{notifications: true})} +step :handle_results, fn ctx -> + case ctx[:__results__][:payment] do + {:ok, payment} -> {:ok, Map.put(ctx, :payment_id, payment.id)} + {:error, _} -> {:goto, :handle_payment_failure, ctx} end - step :b, fn data -> - {:ok, assign(data, :settings, %{theme: "dark"})} +end +``` + +## The `into:` Callback + +Use `into:` to transform results before passing to the next step: + +```elixir +parallel into: fn ctx, results -> + # ctx = original context (unchanged by parallel steps) + # results = %{step_name => {:ok, data} | {:error, reason}} + + case {results[:payment], results[:delivery]} do + {{:ok, payment}, {:ok, delivery}} -> + # Return transformed context + {:ok, Map.merge(ctx, %{ + payment_id: payment.id, + delivery_status: delivery.status + })} + + {{:ok, _}, {:error, :not_found}} -> + # Jump to another step + {:goto, :handle_backorder, ctx} + + _ -> + # Fail the workflow + {:error, "Critical failure"} end +end do + step :payment, fn ctx -> {:ok, %{id: 123}} end + step :delivery, fn ctx -> {:error, :not_found} end end -# Result: %{settings: %{notifications: true, theme: "dark"}} +``` + +### `into:` Return Values + +| Return | Effect | +|--------|--------| +| `{:ok, ctx}` | Continue to next step with new context | +| `{:error, reason}` | Fail the workflow | +| `{:goto, :step_name, ctx}` | Jump to the named step | + +When `into:` is provided, the `__results__` key is NOT added to the context - the `into:` callback controls what the next step receives. -# Collect - keeps results separate -parallel merge: :collect do - step :fetch_orders, fn data -> - {:ok, assign(data, :count, Orders.count())} +## The `returns:` Option + +Customize the key name for a step's result: + +```elixir +parallel do + step :fetch_order, returns: :order do + fn ctx -> {:ok, %{items: [...]}} end end - step :fetch_users, fn data -> - {:ok, assign(data, :count, Users.count())} + + step :fetch_user, returns: :user do + fn ctx -> {:ok, %{name: "John"}} end end end -# Result: %{parallel_results: %{fetch_orders: %{count: 10}, fetch_users: %{count: 5}}} + +# Results: +# %{__results__: %{order: {:ok, %{items: [...]}}, user: {:ok, %{name: "John"}}}} ``` -### Error Handling (`:on_error`) +This is useful when the step name is verbose but you want a simpler key in results. + +## Error Handling -Controls what happens when a parallel step fails. +### `:on_error` Option | Strategy | Description | |----------|-------------| -| `:fail_fast` | Cancel other steps immediately (default) | -| `:complete_all` | Wait for all steps, collect errors | +| `:fail_fast` | Stop on first error (default) | +| `:complete_all` | Wait for all steps, collect all results | ```elixir -# Fail fast (default) - stop everything on first error +# Fail fast (default) - workflow fails on first error parallel on_error: :fail_fast do - step :critical_task, fn data -> - # If this fails, other tasks are cancelled - {:ok, data} + step :critical_task, fn ctx -> + {:ok, ctx} end end -# Complete all - continue despite errors +# Complete all - continue despite errors, let next step handle parallel on_error: :complete_all do - step :send_sms, fn data -> - # Even if SMS fails... - {:ok, data} + step :send_sms, fn ctx -> + case SMS.send(ctx.user_id) do + :ok -> {:ok, %{sms_sent: true}} + {:error, e} -> {:error, e} # Preserved in results + end + end + + step :send_email, fn ctx -> + case Mailer.send(ctx.user_id) do + :ok -> {:ok, %{email_sent: true}} + {:error, e} -> {:error, e} # Preserved in results + end end - step :send_email, fn data -> - # ...email still runs - {:ok, data} +end + +step :check_notifications, fn ctx -> + results = ctx[:__results__] + sms_ok = match?({:ok, _}, results[:send_sms]) + email_ok = match?({:ok, _}, results[:send_email]) + + cond do + sms_ok and email_ok -> {:ok, Map.put(ctx, :all_sent, true)} + sms_ok or email_ok -> {:ok, Map.put(ctx, :partial_sent, true)} + true -> {:error, "All notifications failed"} end end ``` @@ -119,81 +218,109 @@ workflow "dashboard_data" do {:ok, %{user_id: input["user_id"]}} end - parallel do - step :fetch_orders, fn data -> - orders = Orders.recent(limit: 10) - {:ok, assign(data, :orders, orders)} + parallel on_error: :complete_all do + step :fetch_orders, fn ctx -> + case Orders.recent(limit: 10) do + {:ok, orders} -> {:ok, %{orders: orders}} + {:error, e} -> {:error, e} + end end - step :fetch_metrics, fn data -> - metrics = Analytics.daily_metrics() - {:ok, assign(data, :metrics, metrics)} + step :fetch_metrics, fn ctx -> + {:ok, %{metrics: Analytics.daily_metrics()}} end - step :fetch_notifications, fn data -> - notifications = Notifications.unread() - {:ok, assign(data, :notifications, notifications)} + step :fetch_notifications, fn ctx -> + {:ok, %{notifications: Notifications.unread()}} end end - step :build_dashboard, fn data -> - dashboard = %{ - orders: data.orders, - metrics: data.metrics, - notifications: data.notifications - } - {:ok, assign(data, :dashboard, dashboard)} + step :build_dashboard, fn ctx -> + results = ctx[:__results__] + + # Handle partial failures gracefully + orders = case results[:fetch_orders] do + {:ok, data} -> data.orders + {:error, _} -> [] + end + + metrics = case results[:fetch_metrics] do + {:ok, data} -> data.metrics + {:error, _} -> %{} + end + + {:ok, %{ + dashboard: %{orders: orders, metrics: metrics}, + has_errors: Enum.any?(results, fn {_, r} -> match?({:error, _}, r) end) + }} end end ``` -### Independent Operations with Error Tolerance +### Conditional Branching Based on Results ```elixir -workflow "notify_all" do - step :prepare, fn data -> - {:ok, %{user_id: data["user_id"], message: data["message"]}} +workflow "order_processing" do + step :validate, fn input -> + {:ok, %{order_id: input["order_id"]}} end - # All notifications run even if some fail - parallel on_error: :complete_all do - step :send_email, [retry: [max_attempts: 3]], fn data -> - Mailer.send(data.user_id, data.message) - {:ok, data} - end + parallel into: fn ctx, results -> + case {results[:check_inventory], results[:check_payment]} do + {{:ok, inv}, {:ok, pay}} when inv.available and pay.authorized -> + {:ok, Map.merge(ctx, %{inventory: inv, payment: pay, ready: true})} - step :send_sms, fn data -> - SMS.send(data.user_id, data.message) - {:ok, data} + {{:ok, _}, {:error, :card_declined}} -> + {:goto, :handle_payment_issue, ctx} + + {{:error, :out_of_stock}, _} -> + {:goto, :handle_backorder, ctx} + + _ -> + {:error, "Order validation failed"} + end + end do + step :check_inventory, fn ctx -> + case Inventory.check(ctx.order_id) do + {:ok, inv} -> {:ok, %{available: inv.quantity > 0, quantity: inv.quantity}} + {:error, e} -> {:error, e} + end end - step :send_push, fn data -> - Push.send(data.user_id, data.message) - {:ok, data} + step :check_payment, fn ctx -> + case Payment.authorize(ctx.order_id) do + {:ok, auth} -> {:ok, %{authorized: true, auth_code: auth.code}} + {:error, e} -> {:error, e} + end end end - step :log_results, fn data -> - Logger.info("Notifications sent for user #{data.user_id}") - {:ok, data} + step :fulfill_order, fn ctx -> + # Only reached if both checks passed + {:ok, Map.put(ctx, :fulfilled, true)} + end + + step :handle_payment_issue, fn ctx -> + {:ok, Map.put(ctx, :needs_payment_retry, true)} + end + + step :handle_backorder, fn ctx -> + {:ok, Map.put(ctx, :backordered, true)} end end ``` -### Combining with Retry - -Individual steps in a parallel block can have their own retry configuration: +### With Retry on Individual Steps ```elixir parallel do - step :external_api_call, [retry: [max_attempts: 5, backoff: :exponential]], fn data -> + step :external_api_call, [retry: [max_attempts: 5, backoff: :exponential]], fn ctx -> result = ExternalAPI.fetch_data() - {:ok, assign(data, :external, result)} + {:ok, %{external: result}} end - step :quick_local_task, fn data -> - result = LocalDB.query() - {:ok, assign(data, :local, result)} + step :quick_local_task, fn ctx -> + {:ok, %{local: LocalDB.query()}} end end ``` @@ -201,67 +328,75 @@ end ## How It Works 1. The parallel block starts all steps concurrently as separate tasks -2. Each step runs independently with its own data snapshot -3. When all steps complete, data is merged based on the merge strategy -4. Execution continues to the next step after the parallel block +2. Each step receives a copy of the current context (steps are isolated) +3. When all steps complete, results are collected into `__results__` map +4. If `into:` is provided, it transforms the results +5. Execution continues to the next step ## Best Practices ### Keep Parallel Steps Independent -Parallel steps shouldn't depend on each other's data changes: +Parallel steps shouldn't depend on each other's data: ```elixir # Good - independent operations parallel do - step :a, fn data -> - {:ok, assign(data, :result_a, compute_a())} - end - step :b, fn data -> - {:ok, assign(data, :result_b, compute_b())} - end + step :a, fn ctx -> {:ok, %{result_a: compute_a()}} end + step :b, fn ctx -> {:ok, %{result_b: compute_b()}} end end # Bad - step b depends on step a's data parallel do - step :a, fn data -> - {:ok, assign(data, :value, 42)} + step :a, fn ctx -> {:ok, Map.put(ctx, :value, 42)} end + step :b, fn ctx -> + # ctx doesn't have :value - steps are isolated! + x = ctx[:value] # Returns nil + {:ok, ctx} end - step :b, fn data -> - # This won't see :value from step a! - x = data[:value] # Returns nil - {:ok, data} +end +``` + +### Use `into:` for Complex Result Handling + +When you need to: +- Transform multiple results into a single value +- Make branching decisions based on results +- Fail early on certain combinations + +```elixir +parallel into: fn ctx, results -> + # Clear logic for handling result combinations + case {results[:a], results[:b]} do + {{:ok, a}, {:ok, b}} -> {:ok, combine(ctx, a, b)} + {{:error, _}, _} -> {:goto, :handle_a_failure, ctx} + {_, {:error, _}} -> {:goto, :handle_b_failure, ctx} end +end do + step :a, fn ctx -> ... end + step :b, fn ctx -> ... end end ``` -### Use Appropriate Error Strategy +### Choose the Right Error Strategy - Use `:fail_fast` when all steps must succeed (transactions, critical paths) -- Use `:complete_all` when steps are independent (notifications, logging) +- Use `:complete_all` when steps are independent and you want to handle partial success -### Consider Step Granularity +### Return Focused Data from Steps -Group related work into single steps rather than many tiny parallel steps: +Return only the data the step produces, not the entire context: ```elixir -# Good - logical grouping -parallel do - step :process_images, fn data -> - Enum.each(data.images, &process_image/1) - {:ok, data} - end - step :process_documents, fn data -> - Enum.each(data.documents, &process_doc/1) - {:ok, data} - end +# Good - return just the new data +step :fetch_user, fn ctx -> + user = Users.get(ctx.user_id) + {:ok, %{name: user.name, email: user.email}} end -# Less ideal - too many small parallel steps -parallel do - step :image_1, fn data -> process_image(data.image_1); {:ok, data} end - step :image_2, fn data -> process_image(data.image_2); {:ok, data} end - step :image_3, fn data -> process_image(data.image_3); {:ok, data} end - # Use foreach for this pattern instead +# Less ideal - returning modified context +step :fetch_user, fn ctx -> + user = Users.get(ctx.user_id) + {:ok, Map.put(ctx, :user, user)} # Works but adds unnecessary data end ``` diff --git a/guides/waiting.md b/guides/waiting.md index 6d25434..62f7a9f 100644 --- a/guides/waiting.md +++ b/guides/waiting.md @@ -31,19 +31,19 @@ Suspend the workflow for a specific duration. ```elixir workflow "delayed_task" do - step :start, fn data -> + step :start, fn ctx -> Logger.info("Starting task") - {:ok, data} + {:ok, ctx} end - step :wait, fn data -> + step :wait, fn ctx -> sleep(minutes(30)) - {:ok, data} + {:ok, ctx} end - step :continue, fn data -> + step :continue, fn ctx -> Logger.info("Resumed after 30 minutes") - {:ok, data} + {:ok, ctx} end end ``` @@ -62,21 +62,21 @@ end Suspend until a specific datetime. ```elixir -step :schedule, fn data -> +step :schedule, fn ctx -> # Wake up at midnight UTC schedule_at(~U[2025-12-25 00:00:00Z]) - {:ok, data} + {:ok, ctx} end # Or use time helpers -step :wait_until_business_hours, fn data -> +step :wait_until_business_hours, fn ctx -> schedule_at(next_business_day(hour: 9)) - {:ok, data} + {:ok, ctx} end -step :wait_for_monday, fn data -> +step :wait_for_monday, fn ctx -> schedule_at(next_weekday(:monday, hour: 9)) - {:ok, data} + {:ok, ctx} end ``` @@ -109,26 +109,26 @@ Suspend until an external system sends an event. ```elixir workflow "payment_flow" do - step :initiate_payment, fn data -> - payment = Payments.create(data["amount"]) - {:ok, assign(data, :payment_id, payment.id)} + step :initiate_payment, fn ctx -> + payment = Payments.create(ctx["amount"]) + {:ok, assign(ctx, :payment_id, payment.id)} end - step :await_confirmation, fn data -> + step :await_confirmation, fn ctx -> result = wait_for_event("payment_confirmed", timeout: minutes(15), timeout_value: :timeout ) - {:ok, assign(data, :payment_result, result)} + {:ok, assign(ctx, :payment_result, result)} end - step :process_result, fn data -> - case data.payment_result do + step :process_result, fn ctx -> + case ctx.payment_result do :timeout -> handle_timeout() %{status: "success"} -> handle_success() _ -> handle_failure() end - {:ok, data} + {:ok, ctx} end end ``` @@ -146,23 +146,23 @@ Wait for any one of multiple events. Returns `{event_name, payload}`. ```elixir workflow "order_status" do - step :await_result, fn data -> + step :await_result, fn ctx -> {event, payload} = wait_for_any(["success", "failure", "cancelled"], timeout: hours(24), timeout_value: {:timeout, nil} ) - {:ok, data + {:ok, ctx |> assign(:result_event, event) |> assign(:result_data, payload)} end - step :handle_result, fn data -> - case data.result_event do - "success" -> process_success(data.result_data) - "failure" -> process_failure(data.result_data) + step :handle_result, fn ctx -> + case ctx.result_event do + "success" -> process_success(ctx.result_data) + "failure" -> process_failure(ctx.result_data) "cancelled" -> process_cancellation() end - {:ok, data} + {:ok, ctx} end end ``` @@ -173,16 +173,16 @@ Wait for all specified events. Returns `%{event_name => payload}`. ```elixir workflow "multi_approval" do - step :await_approvals, fn data -> + step :await_approvals, fn ctx -> results = wait_for_all(["manager_approval", "legal_approval", "finance_approval"], timeout: days(7), timeout_value: {:timeout, :partial} ) - {:ok, assign(data, :approvals, results)} + {:ok, assign(ctx, :approvals, results)} end - step :process_approvals, fn data -> - case data.approvals do + step :process_approvals, fn ctx -> + case ctx.approvals do {:timeout, :partial} -> handle_incomplete_approvals() %{} = all_approvals -> @@ -192,7 +192,7 @@ workflow "multi_approval" do reject_request() end end - {:ok, data} + {:ok, ctx} end end ``` @@ -218,27 +218,27 @@ Suspend until a human provides input. ```elixir workflow "approval_flow" do - step :prepare_request, fn data -> - {:ok, %{request: data}} + step :prepare_request, fn ctx -> + {:ok, %{request: ctx}} end - step :await_approval, fn data -> + step :await_approval, fn ctx -> result = wait_for_input("manager_approval", type: :approval, prompt: "Approve this request?", timeout: days(3), timeout_value: :auto_rejected ) - {:ok, assign(data, :approval, result)} + {:ok, assign(ctx, :approval, result)} end - step :process_decision, fn data -> - status = case data.approval do + step :process_decision, fn ctx -> + status = case ctx.approval do :auto_rejected -> :rejected_timeout %{approved: true} -> :approved _ -> :rejected end - {:ok, assign(data, :status, status)} + {:ok, assign(ctx, :status, status)} end end ``` @@ -271,14 +271,14 @@ end Wait for an approval decision. Returns `:approved` or `:rejected`. ```elixir -step :manager_review, fn data -> +step :manager_review, fn ctx -> result = wait_for_approval("expense_approval", prompt: "Approve expense for $500?", metadata: %{employee: "John", amount: 500}, timeout: days(3), timeout_value: :auto_approved ) - {:ok, assign(data, :approved, result == :approved)} + {:ok, assign(ctx, :approved, result == :approved)} end ``` @@ -287,7 +287,7 @@ end Wait for a single choice selection. ```elixir -step :select_shipping, fn data -> +step :select_shipping, fn ctx -> method = wait_for_choice("shipping_method", prompt: "Select shipping method:", choices: [ @@ -297,7 +297,7 @@ step :select_shipping, fn data -> timeout: hours(24), timeout_value: :standard ) - {:ok, assign(data, :shipping, method)} + {:ok, assign(ctx, :shipping, method)} end ``` @@ -306,13 +306,13 @@ end Wait for text input. ```elixir -step :get_reason, fn data -> +step :get_reason, fn ctx -> reason = wait_for_text("rejection_reason", prompt: "Please provide a reason for rejection:", timeout: hours(4), timeout_value: "No reason provided" ) - {:ok, assign(data, :reason, reason)} + {:ok, assign(ctx, :reason, reason)} end ``` @@ -321,7 +321,7 @@ end Wait for form submission. ```elixir -step :collect_details, fn data -> +step :collect_details, fn ctx -> result = wait_for_form("equipment_request", prompt: "Please specify equipment needs", fields: [ @@ -331,7 +331,7 @@ step :collect_details, fn data -> ], timeout: days(7) ) - {:ok, assign(data, :equipment, result)} + {:ok, assign(ctx, :equipment, result)} end ``` @@ -371,40 +371,40 @@ soon = Durable.Wait.list_pending_inputs( ```elixir workflow "expense_approval" do - step :submit, fn data -> - {:ok, %{expense: data, amount: data["amount"]}} + step :submit, fn ctx -> + {:ok, %{expense: ctx, amount: ctx["amount"]}} end - step :manager_review, fn data -> - if data.amount > 1000 do + step :manager_review, fn ctx -> + if ctx.amount > 1000 do result = wait_for_approval("manager_approval", - prompt: "Approve expense of $#{data.amount}?", + prompt: "Approve expense of $#{ctx.amount}?", timeout: days(2) ) - {:ok, assign(data, :manager_approved, result == :approved)} + {:ok, assign(ctx, :manager_approved, result == :approved)} else - {:ok, assign(data, :manager_approved, true)} + {:ok, assign(ctx, :manager_approved, true)} end end - step :finance_review, fn data -> - if data.amount > 5000 and data.manager_approved do + step :finance_review, fn ctx -> + if ctx.amount > 5000 and ctx.manager_approved do result = wait_for_approval("finance_approval", timeout: days(3) ) - {:ok, assign(data, :finance_approved, result == :approved)} + {:ok, assign(ctx, :finance_approved, result == :approved)} else - {:ok, assign(data, :finance_approved, true)} + {:ok, assign(ctx, :finance_approved, true)} end end - step :finalize, fn data -> - if data.manager_approved and data.finance_approved do - Expenses.approve(data.expense) + step :finalize, fn ctx -> + if ctx.manager_approved and ctx.finance_approved do + Expenses.approve(ctx.expense) else - Expenses.reject(data.expense) + Expenses.reject(ctx.expense) end - {:ok, data} + {:ok, ctx} end end ``` @@ -413,34 +413,34 @@ end ```elixir workflow "order_fulfillment" do - step :create_shipment, fn data -> - shipment = Shipping.create(data["order_id"]) + step :create_shipment, fn ctx -> + shipment = Shipping.create(ctx["order_id"]) {:ok, %{ shipment_id: shipment.id, tracking_number: shipment.tracking, - order_id: data["order_id"] + order_id: ctx["order_id"] }} end - step :await_delivery, fn data -> + step :await_delivery, fn ctx -> # Wait for webhook from shipping provider event = wait_for_event("shipment_delivered", timeout: days(14), timeout_value: :lost_package ) - {:ok, assign(data, :delivery_status, event)} + {:ok, assign(ctx, :delivery_status, event)} end - step :handle_delivery, fn data -> - case data.delivery_status do + step :handle_delivery, fn ctx -> + case ctx.delivery_status do :lost_package -> - Support.create_ticket("Lost package", data.shipment_id) + Support.create_ticket("Lost package", ctx.shipment_id) %{status: "delivered"} -> - Orders.mark_complete(data.order_id) + Orders.mark_complete(ctx.order_id) _ -> Logger.warning("Unexpected delivery status") end - {:ok, data} + {:ok, ctx} end end @@ -461,38 +461,38 @@ end ```elixir workflow "subscription_renewal" do - step :check_expiry, fn data -> - subscription = Subscriptions.get(data["subscription_id"]) + step :check_expiry, fn ctx -> + subscription = Subscriptions.get(ctx["subscription_id"]) {:ok, %{subscription: subscription, expires_at: subscription.expires_at}} end - step :wait_for_reminder_time, fn data -> - reminder_time = DateTime.add(data.expires_at, -7, :day) # 7 days before + step :wait_for_reminder_time, fn ctx -> + reminder_time = DateTime.add(ctx.expires_at, -7, :day) # 7 days before schedule_at(reminder_time) - {:ok, data} + {:ok, ctx} end - step :send_reminder, fn data -> - Mailer.send_renewal_reminder(data.subscription.user_email) - {:ok, data} + step :send_reminder, fn ctx -> + Mailer.send_renewal_reminder(ctx.subscription.user_email) + {:ok, ctx} end - step :await_renewal, fn data -> + step :await_renewal, fn ctx -> renewal = wait_for_event("subscription_renewed", timeout: days(7), timeout_value: :not_renewed ) - {:ok, assign(data, :renewal_result, renewal)} + {:ok, assign(ctx, :renewal_result, renewal)} end - step :handle_result, fn data -> - case data.renewal_result do + step :handle_result, fn ctx -> + case ctx.renewal_result do :not_renewed -> - Subscriptions.expire(data.subscription.id) + Subscriptions.expire(ctx.subscription.id) _ -> Logger.info("Subscription renewed") end - {:ok, data} + {:ok, ctx} end end ``` @@ -501,27 +501,27 @@ end ```elixir workflow "contract_approval" do - step :submit_contract, fn data -> - {:ok, %{contract: data}} + step :submit_contract, fn ctx -> + {:ok, %{contract: ctx}} end - step :await_all_approvals, fn data -> + step :await_all_approvals, fn ctx -> # Wait for all three departments to approve results = wait_for_all(["legal", "finance", "management"], timeout: days(5), timeout_value: {:timeout, :incomplete} ) - {:ok, assign(data, :approval_results, results)} + {:ok, assign(ctx, :approval_results, results)} end - step :finalize, fn data -> - status = case data.approval_results do + step :finalize, fn ctx -> + status = case ctx.approval_results do {:timeout, :incomplete} -> :timed_out approvals -> all_approved = Enum.all?(approvals, fn {_, v} -> v["approved"] end) if all_approved, do: :approved, else: :rejected end - {:ok, assign(data, :status, status)} + {:ok, assign(ctx, :status, status)} end end ``` @@ -530,25 +530,25 @@ end ```elixir workflow "payment_with_timeout" do - step :await_payment_or_cancel, fn data -> + step :await_payment_or_cancel, fn ctx -> # First event wins {event, event_data} = wait_for_any(["payment_received", "user_cancelled", "fraud_detected"], timeout: hours(1), timeout_value: {:timeout, nil} ) - {:ok, data + {:ok, ctx |> assign(:result_event, event) |> assign(:result_data, event_data)} end - step :handle_result, fn data -> - case data.result_event do + step :handle_result, fn ctx -> + case ctx.result_event do "payment_received" -> complete_order() "user_cancelled" -> refund_if_needed() "fraud_detected" -> flag_for_review() :timeout -> expire_order() end - {:ok, data} + {:ok, ctx} end end ``` @@ -558,7 +558,7 @@ end ### Always Handle Timeouts ```elixir -step :await_response, fn data -> +step :await_response, fn ctx -> result = wait_for_input("approval", timeout: days(7), timeout_value: :timed_out # Always provide a timeout value @@ -568,7 +568,7 @@ step :await_response, fn data -> :timed_out -> handle_timeout() response -> handle_response(response) end - {:ok, assign(data, :response, result)} + {:ok, assign(ctx, :response, result)} end ``` @@ -588,15 +588,15 @@ wait_for_event("event") ### Store Data Before Waiting ```elixir -step :prepare_and_wait, fn data -> +step :prepare_and_wait, fn ctx -> # Save important data before suspending - data = data + ctx = ctx |> assign(:prepared_at, DateTime.utc_now()) - |> assign(:request_details, data) + |> assign(:request_details, ctx) # Now wait wait_for_input("approval") - {:ok, data} + {:ok, ctx} end ``` diff --git a/lib/durable/context.ex b/lib/durable/context.ex index b54a43a..c872818 100644 --- a/lib/durable/context.ex +++ b/lib/durable/context.ex @@ -47,8 +47,6 @@ defmodule Durable.Context do @input_key :durable_input @workflow_id_key :durable_workflow_id @step_key :durable_current_step - @foreach_item_key :durable_foreach_item - @foreach_index_key :durable_foreach_index @doc """ Injects context management functions into the calling module. @@ -71,8 +69,9 @@ defmodule Durable.Context do current_step: 0, append_context: 2, increment_context: 2, - current_item: 0, - current_index: 0 + parallel_results: 0, + parallel_result: 1, + parallel_ok?: 1 ] end end @@ -287,39 +286,66 @@ defmodule Durable.Context do end @doc """ - Returns the current item being processed in a foreach block. + Returns the full parallel results map from context. + + The results map contains tagged tuples: `%{step_name => {:ok, data} | {:error, reason}}` ## Examples - foreach :items, items: :items do - step :process do - item = current_item() - # process the item + parallel do + step :payment, fn ctx -> {:ok, %{id: 123}} end + step :delivery, fn ctx -> {:error, :not_found} end + end + + step :handle, fn ctx -> + results = parallel_results() + # => %{payment: {:ok, %{id: 123}}, delivery: {:error, :not_found}} + end + + """ + @spec parallel_results() :: map() + def parallel_results do + get_context(:__results__, %{}) + end + + @doc """ + Returns a specific parallel step's result by name. + + ## Examples + + step :handle, fn ctx -> + case parallel_result(:payment) do + {:ok, payment} -> # handle success + {:error, reason} -> # handle error end end """ - @spec current_item() :: any() - def current_item do - Process.get(@foreach_item_key) + @spec parallel_result(atom()) :: {:ok, any()} | {:error, any()} | nil + def parallel_result(step_name) when is_atom(step_name) do + parallel_results() |> Map.get(step_name) end @doc """ - Returns the current index in a foreach block. + Checks if a parallel step succeeded. ## Examples - foreach :items, items: :items do - step :process do - idx = current_index() - # use the index + step :handle, fn ctx -> + if parallel_ok?(:payment) do + # payment succeeded + else + # payment failed end end """ - @spec current_index() :: non_neg_integer() | nil - def current_index do - Process.get(@foreach_index_key) + @spec parallel_ok?(atom()) :: boolean() + def parallel_ok?(step_name) when is_atom(step_name) do + case parallel_result(step_name) do + {:ok, _} -> true + _ -> false + end end # Internal functions for executor use @@ -368,28 +394,12 @@ defmodule Durable.Context do context() end - @doc false - def set_foreach_item(item, index) do - Process.put(@foreach_item_key, item) - Process.put(@foreach_index_key, index) - :ok - end - - @doc false - def clear_foreach_item do - Process.delete(@foreach_item_key) - Process.delete(@foreach_index_key) - :ok - end - @doc false def cleanup do Process.delete(@context_key) Process.delete(@input_key) Process.delete(@workflow_id_key) Process.delete(@step_key) - Process.delete(@foreach_item_key) - Process.delete(@foreach_index_key) # Log capture keys (cleanup in case of crashes) Process.delete(:durable_logs) Process.delete(:durable_original_group_leader) diff --git a/lib/durable/definition.ex b/lib/durable/definition.ex index 82822f0..c06be33 100644 --- a/lib/durable/definition.ex +++ b/lib/durable/definition.ex @@ -20,7 +20,7 @@ defmodule Durable.Definition do The `body_fn` field contains the step function that processes data. """ - @type step_type :: :step | :decision | :branch | :parallel | :loop | :foreach | :switch + @type step_type :: :step | :decision | :branch | :parallel | :loop | :switch @type retry_opts :: %{ optional(:max_attempts) => pos_integer(), @@ -55,15 +55,10 @@ defmodule Durable.Definition do Executes the step with the given data. For pipeline model steps, calls `body_fn.(data)`. - For foreach steps, calls `body_fn.(data, item, index)`. """ def execute(%__MODULE__{body_fn: body_fn}, data) when is_function(body_fn, 1) do body_fn.(data) end - - def execute(%__MODULE__{body_fn: body_fn}, data, item, index) when is_function(body_fn, 3) do - body_fn.(data, item, index) - end end defmodule Compensation do diff --git a/lib/durable/dsl/step.ex b/lib/durable/dsl/step.ex index 81d6573..9ef1347 100644 --- a/lib/durable/dsl/step.ex +++ b/lib/durable/dsl/step.ex @@ -376,32 +376,55 @@ defmodule Durable.DSL.Step do @doc """ Defines a parallel execution block. - All steps inside execute concurrently. Each step receives a copy of the data. - Results are merged according to the merge strategy. + All steps inside execute concurrently. Each step receives a copy of the context. + Results are collected into a `__results__` map with tagged tuples. ## Options - - `:merge` - Context merge strategy (default: `:deep_merge`) - - `:deep_merge` - Deep merge all step outputs - - `:last_wins` - Last step's output wins on conflicts - - `:collect` - Collect into `%{step_name => output}` + - `:into` - Optional callback to transform results (default: none) + - Receives `(ctx, results)` where results = `%{step_name => {:ok, data} | {:error, reason}}` + - Returns `{:ok, ctx}` | `{:error, reason}` | `{:goto, step, ctx}` - `:on_error` - Error handling (default: `:fail_fast`) - `:fail_fast` - Cancel siblings on first failure - - `:complete_all` - Wait for all, collect errors + - `:complete_all` - Wait for all, collect results - ## Examples + ## Behavior + + Without `:into`, results go to `ctx.__results__` and the next step handles them: - parallel merge: :deep_merge do - step :send_email, fn data -> - EmailService.send(data.order_id) - {:ok, assign(data, :email_sent, true)} + parallel do + step :payment, fn ctx -> {:ok, %{id: 123}} end + step :delivery, fn ctx -> {:error, :not_found} end + end + # Next step receives: + # %{...ctx, __results__: %{payment: {:ok, %{id: 123}}, delivery: {:error, :not_found}}} + + With `:into`, you control what the next step receives: + + parallel into: fn ctx, results -> + case {results.payment, results.delivery} do + {{:ok, payment}, {:ok, _}} -> + {:ok, Map.put(ctx, :payment_id, payment.id)} + {{:ok, _}, {:error, :not_found}} -> + {:goto, :handle_backorder, ctx} + _ -> + {:error, "Critical failure"} end + end do + step :payment, fn ctx -> {:ok, %{id: 123}} end + step :delivery, fn ctx -> {:error, :not_found} end + end - step :update_inventory, fn data -> - InventoryService.decrement(data.items) - {:ok, assign(data, :inventory_updated, true)} + ## Step Options + + - `:returns` - Key name for this step's result (default: step name) + + parallel do + step :fetch_order, returns: :order do + fn ctx -> {:ok, %{items: [...]}} end end end + # Result: %{...ctx, __results__: %{order: {:ok, %{items: [...]}}}} """ defmacro parallel(opts \\ [], do: block) do @@ -410,8 +433,8 @@ defmodule Durable.DSL.Step do step_defs = extract_parallel_steps(block, parallel_id) step_names = Enum.map(step_defs, fn {name, _, _} -> name end) - merge_strategy = Keyword.get(opts, :merge, :deep_merge) error_strategy = Keyword.get(opts, :on_error, :fail_fast) + into_fn = Keyword.get(opts, :into) # Generate named functions for each parallel step step_registrations = @@ -432,7 +455,30 @@ defmodule Durable.DSL.Step do end end) + # Generate named function for into callback if provided + {into_fn_ref, into_fn_def} = + if into_fn do + into_func_name = :"__parallel_into_#{parallel_id}__" + + def_ast = + quote do + @doc false + def unquote(into_func_name)(ctx, results), do: unquote(into_fn).(ctx, results) + end + + ref_ast = + quote do + &(__MODULE__.unquote(into_func_name) / 2) + end + + {ref_ast, def_ast} + else + {nil, nil} + end + quote do + unquote(into_fn_def) + @durable_current_steps %Durable.Definition.Step{ name: unquote(:"parallel_#{parallel_id}"), type: :parallel, @@ -440,8 +486,8 @@ defmodule Durable.DSL.Step do opts: %{ steps: unquote(step_names), all_steps: unquote(step_names), - merge_strategy: unquote(merge_strategy), - error_strategy: unquote(error_strategy) + error_strategy: unquote(error_strategy), + into_fn: unquote(into_fn_ref) } } @@ -466,7 +512,8 @@ defmodule Durable.DSL.Step do step_opts = %{ parallel_id: parallel_id, - original_name: name + original_name: name, + returns: name } [{qualified_name, body_fn, step_opts}] @@ -480,167 +527,51 @@ defmodule Durable.DSL.Step do when is_atom(name) and is_list(opts) do qualified_name = :"parallel_#{parallel_id}__#{name}" + # Extract returns option, default to original name + returns_key = Keyword.get(opts, :returns, name) + step_opts = opts - |> normalize_step_opts() + |> normalize_parallel_step_opts() |> Map.merge(%{ parallel_id: parallel_id, - original_name: name + original_name: name, + returns: returns_key }) [{qualified_name, body_fn, step_opts}] end - defp extract_parallel_step_call(_other, _parallel_id), do: [] - - # ============================================================================ - # ForEach Macro - # ============================================================================ - - @doc """ - Defines a foreach block that iterates over a collection. - - The `:items` option takes a function that extracts items from the data. - Steps inside receive 3 arguments: `(data, item, index)`. - - ## Options - - - `:items` - Required. Function that extracts items from data. - - `:concurrency` - Items to process concurrently (default: 1) - - `:on_error` - Error handling (default: `:fail_fast`) - - `:collect_as` - Context key to store results - - ## Examples - - foreach :process_items, - items: fn data -> data.items end do - - step :process_item, fn data, item, idx -> - processed = %{name: item["name"], position: idx} - {:ok, append(data, :processed_items, processed)} - end - end - - """ - defmacro foreach(name, opts, do: block) when is_atom(name) do - foreach_id = :erlang.unique_integer([:positive]) - - items_fn = - Keyword.get(opts, :items) || - raise ArgumentError, "foreach requires :items option with a function" - - # Validate items is a function - case items_fn do - {:fn, _, _} -> - :ok - - other -> - raise ArgumentError, - "foreach :items must be a function, got: #{inspect(other)}" - end - - concurrency = Keyword.get(opts, :concurrency, 1) - on_error = Keyword.get(opts, :on_error, :fail_fast) - collect_as = Keyword.get(opts, :collect_as) - - step_defs = extract_foreach_steps(block, foreach_id, name) - step_names = Enum.map(step_defs, fn {step_name, _, _} -> step_name end) - - # Generate named functions for each foreach step (3-arity: data, item, index) - step_registrations = - Enum.map(step_defs, fn {qualified_name, body_fn, step_opts} -> - func_name = :"__step_body_#{qualified_name}__" - - quote do - @doc false - def unquote(func_name)(data, item, index), do: unquote(body_fn).(data, item, index) - - @durable_current_steps %Durable.Definition.Step{ - name: unquote(qualified_name), - type: :step, - module: __MODULE__, - body_fn: &(__MODULE__.unquote(func_name) / 3), - opts: unquote(Macro.escape(step_opts)) - } - end - end) - - # Generate named function for items extractor - items_func_name = :"__foreach_items_#{name}__" - - quote do - # Named function for items extraction - @doc false - def unquote(items_func_name)(data), do: unquote(items_fn).(data) - - @durable_current_steps %Durable.Definition.Step{ - name: unquote(:"foreach_#{name}"), - type: :foreach, - module: __MODULE__, - body_fn: &(__MODULE__.unquote(items_func_name) / 1), - opts: %{ - foreach_id: unquote(foreach_id), - foreach_name: unquote(name), - concurrency: unquote(concurrency), - on_error: unquote(on_error), - collect_as: unquote(collect_as), - steps: unquote(step_names), - all_steps: unquote(step_names) - } - } - - unquote_splicing(step_registrations) - end - end - - defp extract_foreach_steps(body, foreach_id, foreach_name) do - case body do - {:__block__, _, statements} -> - Enum.flat_map(statements, &extract_foreach_step_call(&1, foreach_id, foreach_name)) - - statement -> - extract_foreach_step_call(statement, foreach_id, foreach_name) - end - end - - # step :name, fn data, item, idx -> ... end - defp extract_foreach_step_call( - {:step, _meta, [name, {:fn, _, _} = body_fn]}, - foreach_id, - foreach_name - ) - when is_atom(name) do - qualified_name = :"foreach_#{foreach_name}__#{name}" - - step_opts = %{ - foreach_id: foreach_id, - foreach_name: foreach_name, - original_name: name - } - - [{qualified_name, body_fn, step_opts}] - end - - # step :name, [opts], fn data, item, idx -> ... end - defp extract_foreach_step_call( - {:step, _meta, [name, opts, {:fn, _, _} = body_fn]}, - foreach_id, - foreach_name + # step :name, returns: :key do fn data -> ... end end + defp extract_parallel_step_call( + {:step, _meta, [name, opts, [do: {:fn, _, _} = body_fn]]}, + parallel_id ) when is_atom(name) and is_list(opts) do - qualified_name = :"foreach_#{foreach_name}__#{name}" + qualified_name = :"parallel_#{parallel_id}__#{name}" + + # Extract returns option, default to original name + returns_key = Keyword.get(opts, :returns, name) step_opts = opts - |> normalize_step_opts() + |> normalize_parallel_step_opts() |> Map.merge(%{ - foreach_id: foreach_id, - foreach_name: foreach_name, - original_name: name + parallel_id: parallel_id, + original_name: name, + returns: returns_key }) [{qualified_name, body_fn, step_opts}] end - defp extract_foreach_step_call(_other, _foreach_id, _foreach_name), do: [] + defp extract_parallel_step_call(_other, _parallel_id), do: [] + + # Normalize parallel step options (includes :returns) + defp normalize_parallel_step_opts(opts) do + opts + |> Keyword.take([:retry, :timeout, :compensate, :queue, :returns]) + |> Enum.into(%{}) + |> normalize_retry_opts() + end end diff --git a/lib/durable/executor.ex b/lib/durable/executor.ex index e446a64..f7faee8 100644 --- a/lib/durable/executor.ex +++ b/lib/durable/executor.ex @@ -281,9 +281,6 @@ defmodule Durable.Executor do :parallel -> execute_parallel(step, remaining_steps, execution, step_index, workflow_def, config, data) - :foreach -> - execute_foreach(step, remaining_steps, execution, step_index, workflow_def, config, data) - _ -> execute_regular_step( step, @@ -582,8 +579,8 @@ defmodule Durable.Executor do opts = parallel_step.opts parallel_step_names = opts[:steps] || [] all_parallel_steps = opts[:all_steps] || [] - merge_strategy = opts[:merge_strategy] || :deep_merge error_strategy = opts[:error_strategy] || :fail_fast + into_fn = opts[:into_fn] # Find actual step definitions for parallel steps steps_to_execute = @@ -602,54 +599,37 @@ defmodule Durable.Executor do {:ok, exec} = save_data_as_context(config, exec, data) # DURABILITY: Check which parallel steps already completed (for resume) - {completed_names, completed_data} = - get_completed_parallel_steps_with_data(exec.id, parallel_step_names, config) - - # Merge data from completed steps into base data - base_data_with_completed = - Enum.reduce(completed_data, data, &deep_merge(&2, &1)) + completed_results = get_completed_parallel_step_results(exec.id, ordered_steps, config) # Filter to only incomplete steps - incomplete_steps = Enum.reject(ordered_steps, &(&1.name in completed_names)) + completed_step_names = Map.keys(completed_results) + incomplete_steps = Enum.reject(ordered_steps, &(get_returns_key(&1) in completed_step_names)) + + # Bundle opts for handle_parallel_completion + completion_opts = %{ + remaining_steps: remaining_steps, + all_parallel_steps: all_parallel_steps, + exec: exec, + step_index: step_index, + workflow_def: workflow_def, + config: config, + parallel_step_name: parallel_step.name + } - # If all steps already completed, skip execution + # If all steps already completed, use stored results if incomplete_steps == [] do - {:ok, exec} = save_data_as_context(config, exec, base_data_with_completed) - after_parallel = skip_parallel_steps(remaining_steps, all_parallel_steps) - - execute_steps_recursive( - after_parallel, - exec, - step_index, - workflow_def, - config, - base_data_with_completed - ) + handle_parallel_completion(completed_results, data, into_fn, completion_opts) else + # When into_fn is provided, always collect all results and let into_fn handle errors + # When into_fn is nil, use the error_strategy + effective_strategy = if into_fn, do: :complete_all, else: error_strategy + # Execute only incomplete steps in parallel - case execute_parallel_steps( - incomplete_steps, - exec, - config, - base_data_with_completed, - merge_strategy, - error_strategy - ) do - {:ok, merged_data} -> - # Save merged data and continue - {:ok, exec} = save_data_as_context(config, exec, merged_data) - - # Skip past all parallel steps and continue - after_parallel = skip_parallel_steps(remaining_steps, all_parallel_steps) - - execute_steps_recursive( - after_parallel, - exec, - step_index, - workflow_def, - config, - merged_data - ) + case execute_parallel_steps(incomplete_steps, exec, config, data, effective_strategy) do + {:ok, new_results} -> + # Merge new results with completed results + all_results = Map.merge(completed_results, new_results) + handle_parallel_completion(all_results, data, into_fn, completion_opts) {:error, error} -> handle_step_failure(exec, error, workflow_def, config) @@ -657,14 +637,114 @@ defmodule Durable.Executor do end end - defp execute_parallel_steps( - steps, - execution, - config, - base_data, - merge_strategy, - error_strategy - ) do + # Handle completion of parallel block - apply into_fn or add __results__ + # Uses opts map to reduce arity below credo's limit of 8 + defp handle_parallel_completion(results, base_ctx, into_fn, opts) do + %{ + remaining_steps: remaining_steps, + all_parallel_steps: all_parallel_steps, + exec: exec, + step_index: step_index, + workflow_def: workflow_def, + config: config, + parallel_step_name: parallel_step_name + } = opts + + case apply_parallel_into(into_fn, base_ctx, results) do + {:ok, final_ctx} -> + {:ok, exec} = save_data_as_context(config, exec, final_ctx) + after_parallel = skip_parallel_steps(remaining_steps, all_parallel_steps) + + execute_steps_recursive( + after_parallel, + exec, + step_index, + workflow_def, + config, + final_ctx + ) + + {:goto, target_step, goto_ctx} -> + {:ok, exec} = save_data_as_context(config, exec, goto_ctx) + + case find_jump_target(target_step, remaining_steps, parallel_step_name, step_index) do + {:ok, target_steps} -> + execute_steps_recursive( + target_steps, + exec, + step_index, + workflow_def, + config, + goto_ctx + ) + + {:error, reason} -> + handle_step_failure( + exec, + %{type: "parallel_goto_error", message: reason}, + workflow_def, + config + ) + end + + {:error, error} -> + handle_step_failure(exec, normalize_error(error), workflow_def, config) + end + end + + # Apply into function or default to __results__ + defp apply_parallel_into(nil, base_ctx, results) do + # No into function - add results to __results__ key + # Serialize tuples to lists for JSON storage + serialized_results = serialize_parallel_results(results) + {:ok, Map.put(base_ctx, :__results__, serialized_results)} + end + + defp apply_parallel_into(into_fn, base_ctx, results) when is_function(into_fn, 2) do + into_fn.(base_ctx, results) + rescue + e -> + {:error, + %{ + type: "parallel_into_error", + message: Exception.message(e), + stacktrace: Exception.format_stacktrace(__STACKTRACE__) + }} + end + + # Serialize tagged tuples to lists for JSON storage + # {:ok, data} -> ["ok", data], {:error, reason} -> ["error", reason] + defp serialize_parallel_results(results) do + Map.new(results, fn {key, value} -> + serialized_key = if is_atom(key), do: Atom.to_string(key), else: key + + serialized_value = + case value do + {:ok, data} -> ["ok", data] + {:error, reason} -> ["error", serialize_error_reason(reason)] + other -> other + end + + {serialized_key, serialized_value} + end) + end + + # Serialize error reasons that might contain atoms + defp serialize_error_reason(reason) when is_atom(reason), do: Atom.to_string(reason) + defp serialize_error_reason(reason) when is_map(reason), do: reason + defp serialize_error_reason(reason) when is_binary(reason), do: reason + defp serialize_error_reason(reason), do: inspect(reason) + + # Normalize error to map format + defp normalize_error(error) when is_map(error), do: error + defp normalize_error(error) when is_binary(error), do: %{type: "error", message: error} + + defp normalize_error(error) when is_atom(error), + do: %{type: "error", message: Atom.to_string(error)} + + defp normalize_error(error), do: %{type: "error", message: inspect(error)} + + defp execute_parallel_steps(steps, execution, config, base_data, error_strategy) do task_sup = Config.task_supervisor(config.name) task_opts = %{data: base_data, execution_id: execution.id, config: config} @@ -676,52 +756,68 @@ defmodule Durable.Executor do end) results = await_parallel_tasks(tasks, error_strategy) - process_parallel_results(results, base_data, merge_strategy) + process_parallel_results(results, error_strategy) end defp run_parallel_step_task(step, task_opts) do %{data: data, execution_id: exec_id, config: config} = task_opts + # Get the returns key for this step + returns_key = get_returns_key(step) + # Each parallel task gets a copy of the data and returns its result result = StepRunner.execute(step, data, exec_id, config) - handle_parallel_step_result(result, step.name) + handle_parallel_step_result(result, returns_key) + end + + # Get the returns key from step opts (default to original_name) + defp get_returns_key(%{opts: opts}) do + opts[:returns] || opts[:original_name] end - defp handle_parallel_step_result({:ok, output_data}, step_name) do - {:ok, step_name, output_data} + # Now returns tagged tuples: {returns_key, {:ok, data}} or {returns_key, {:error, reason}} + defp handle_parallel_step_result({:ok, output_data}, returns_key) do + {:ok, returns_key, {:ok, output_data}} end - defp handle_parallel_step_result({:decision, _target, _data}, step_name) do - {:error, step_name, - %{ - type: "parallel_decision_not_supported", - message: "decisions not supported in parallel blocks" - }} + defp handle_parallel_step_result({:decision, _target, _data}, returns_key) do + {:ok, returns_key, + {:error, + %{ + type: "parallel_decision_not_supported", + message: "decisions not supported in parallel blocks" + }}} end - defp handle_parallel_step_result({:error, error}, step_name) do - {:error, step_name, error} + defp handle_parallel_step_result({:error, error}, returns_key) do + {:ok, returns_key, {:error, error}} end - defp handle_parallel_step_result({:sleep, _opts}, step_name) do - {:error, step_name, - %{type: "parallel_wait_not_supported", message: "sleep not supported in parallel blocks yet"}} + defp handle_parallel_step_result({:sleep, _opts}, returns_key) do + {:ok, returns_key, + {:error, + %{ + type: "parallel_wait_not_supported", + message: "sleep not supported in parallel blocks yet" + }}} end - defp handle_parallel_step_result({:wait_for_event, _opts}, step_name) do - {:error, step_name, - %{ - type: "parallel_wait_not_supported", - message: "wait_for_event not supported in parallel blocks yet" - }} + defp handle_parallel_step_result({:wait_for_event, _opts}, returns_key) do + {:ok, returns_key, + {:error, + %{ + type: "parallel_wait_not_supported", + message: "wait_for_event not supported in parallel blocks yet" + }}} end - defp handle_parallel_step_result({:wait_for_input, _opts}, step_name) do - {:error, step_name, - %{ - type: "parallel_wait_not_supported", - message: "wait_for_input not supported in parallel blocks yet" - }} + defp handle_parallel_step_result({:wait_for_input, _opts}, returns_key) do + {:ok, returns_key, + {:error, + %{ + type: "parallel_wait_not_supported", + message: "wait_for_input not supported in parallel blocks yet" + }}} end defp await_parallel_tasks(tasks, :fail_fast), do: await_tasks_fail_fast(tasks) @@ -742,74 +838,41 @@ defmodule Durable.Executor do Task.await_many(tasks, :infinity) end - defp process_parallel_results(results, base_context, merge_strategy) do - errors = Enum.filter(results, &match?({:error, _, _}, &1)) - successes = Enum.filter(results, &match?({:ok, _, _}, &1)) - - if errors != [] do - # Any errors -> fail - error_details = - Enum.map(errors, fn {:error, step, err} -> - %{step: step, error: err} - end) - - {:error, - %{ - type: "parallel_error", - message: "One or more parallel steps failed", - errors: error_details - }} - else - # All succeeded -> merge data - step_data = Enum.map(successes, fn {:ok, step_name, data} -> {step_name, data} end) - merged = merge_parallel_data(step_data, base_context, merge_strategy) - {:ok, merged} - end - end - - defp merge_parallel_data(step_data, base_data, :deep_merge) do - Enum.reduce(step_data, base_data, fn {_step_name, data}, acc -> - deep_merge(acc, data) - end) - end - - defp merge_parallel_data(step_data, _base_data, :last_wins) do - case List.last(step_data) do - nil -> %{} - {_step_name, data} -> data - end - end - - defp merge_parallel_data(step_data, base_data, :collect) do - collected = - Enum.into(step_data, %{}, fn {step_name, data} -> - # Extract only the changes from base_data - changes = Map.drop(data, Map.keys(base_data)) - {Atom.to_string(step_name), changes} + # Process results: build results map with tagged tuples + defp process_parallel_results(results, error_strategy) do + # Build the results map + results_map = + Enum.reduce(results, %{}, fn {:ok, returns_key, tagged_result}, acc -> + Map.put(acc, returns_key, tagged_result) end) - Map.put(base_data, "__parallel_results__", collected) - end + # Check for errors based on strategy + errors = + Enum.filter(results_map, fn {_key, result} -> + match?({:error, _}, result) + end) - defp merge_parallel_data(step_data, base_data, _unknown) do - merge_parallel_data(step_data, base_data, :deep_merge) - end + case {error_strategy, errors} do + {:fail_fast, [_ | _]} -> + # Fail fast with first error + {_key, {:error, first_error}} = hd(errors) + {:error, first_error} - defp deep_merge(left, right) when is_map(left) and is_map(right) do - Map.merge(left, right, fn _k, l, r -> deep_merge(l, r) end) + _ -> + # complete_all or no errors: return results map + {:ok, results_map} + end end - defp deep_merge(_left, right), do: right - defp skip_parallel_steps(remaining_steps, all_parallel_step_names) do Enum.reject(remaining_steps, fn step -> step.name in all_parallel_step_names end) end - # Get completed parallel steps with their stored data for durability - defp get_completed_parallel_steps_with_data(workflow_id, step_names, config) do - step_name_strings = Enum.map(step_names, &Atom.to_string/1) + # Get completed parallel step results for durability (with tagged tuples) + defp get_completed_parallel_step_results(workflow_id, steps, config) do + step_name_strings = Enum.map(steps, &Atom.to_string(&1.name)) query = from(s in StepExecution, @@ -821,266 +884,26 @@ defmodule Durable.Executor do completed = Repo.all(config, query) - names = - Enum.map(completed, fn {name, _} -> - String.to_existing_atom(name) - end) - - data_list = - Enum.map(completed, fn {_, output} -> - # Extract data from stored output (parallel steps store it under "__context__") - (output || %{})["__context__"] || %{} + # Build a map from step name to step def for looking up returns key + step_map = + Enum.into(steps, %{}, fn step -> + {Atom.to_string(step.name), step} end) - {names, data_list} - end - - # ============================================================================ - # ForEach Execution - # ============================================================================ + Enum.reduce(completed, %{}, fn {step_name_str, output}, acc -> + case Map.get(step_map, step_name_str) do + nil -> + acc - defp execute_foreach( - foreach_step, - remaining_steps, - execution, - step_index, - workflow_def, - config, - data - ) do - {:ok, exec} = update_current_step(config, execution, foreach_step.name) - - # Get foreach configuration - step_opts = foreach_step.opts - foreach_step_names = step_opts[:steps] || [] - all_foreach_steps = step_opts[:all_steps] || [] - concurrency = step_opts[:concurrency] || 1 - - # Extract items using the body_fn (function that receives data) - # In the new DSL, body_fn contains the items extraction function - items = resolve_foreach_items(foreach_step.body_fn, data) - steps_to_execute = Enum.filter(remaining_steps, &(&1.name in foreach_step_names)) - - # Save data before foreach execution - {:ok, exec} = save_data_as_context(config, exec, data) - - # Bundle options for child functions - foreach_opts = %{ - data: data, - on_error: step_opts[:on_error] || :fail_fast, - collect_as: step_opts[:collect_as] - } - - result = do_execute_foreach(items, steps_to_execute, exec, config, concurrency, foreach_opts) - - handle_foreach_result( - result, - remaining_steps, - all_foreach_steps, - exec, - step_index, - workflow_def, - config - ) - end - - # Resolve items: can be a function that takes data, or legacy specs - defp resolve_foreach_items(items_fn, data) when is_function(items_fn, 1), do: items_fn.(data) - defp resolve_foreach_items({:context_key, key}, data), do: Map.get(data, key) || [] - defp resolve_foreach_items({:mfa, {mod, fun, args}}, _data), do: apply(mod, fun, args) - defp resolve_foreach_items(_, _data), do: [] - - defp do_execute_foreach(items, steps, execution, config, 1, opts) do - execute_foreach_sequential(items, steps, execution, config, opts) - end - - defp do_execute_foreach(items, steps, execution, config, concurrency, opts) do - execute_foreach_concurrent(items, steps, execution, config, concurrency, opts) - end - - defp handle_foreach_result( - {:ok, result_data}, - remaining, - all_foreach_steps, - exec, - idx, - workflow_def, - cfg - ) do - {:ok, exec} = save_data_as_context(cfg, exec, result_data) - after_foreach = skip_foreach_steps(remaining, all_foreach_steps) - execute_steps_recursive(after_foreach, exec, idx, workflow_def, cfg, result_data) - end - - defp handle_foreach_result({:error, error}, _remaining, _all, exec, _idx, workflow_def, cfg) do - handle_step_failure(exec, error, workflow_def, cfg) - end - - defp execute_foreach_sequential(items, steps, execution, config, opts) do - %{data: base_data, on_error: on_error, collect_as: collect_as} = opts - initial_acc = %{data: base_data, results: [], errors: []} - - final_acc = - items - |> Enum.with_index() - |> Enum.reduce_while(initial_acc, fn {item, index}, acc -> - # Execute foreach steps with (data, item, index) - result = execute_foreach_item_steps(steps, acc.data, item, index, execution.id, config) - handle_foreach_item_result(result, index, acc, on_error, collect_as) - end) - - finalize_foreach_result(final_acc, collect_as) - end - - defp handle_foreach_item_result({:ok, item_data}, _index, acc, _on_error, collect_as) do - new_data = deep_merge(acc.data, item_data) - new_results = maybe_collect_result(acc.results, item_data, acc.data, collect_as) - {:cont, %{acc | data: new_data, results: new_results}} - end - - defp handle_foreach_item_result({:error, error}, index, acc, :fail_fast, _collect_as) do - {:halt, %{acc | errors: [{index, error} | acc.errors]}} - end - - defp handle_foreach_item_result({:error, error}, index, acc, :continue, _collect_as) do - {:cont, %{acc | errors: [{index, error} | acc.errors]}} - end - - defp maybe_collect_result(results, _item_data, _base_data, nil), do: results - - defp maybe_collect_result(results, item_data, base_data, _collect_as) do - results ++ [extract_item_result(item_data, base_data)] - end - - defp finalize_foreach_result(%{errors: [], data: data, results: _results}, nil) do - {:ok, data} - end - - defp finalize_foreach_result(%{errors: [], data: data, results: results}, collect_as) do - {:ok, Map.put(data, collect_as, results)} - end - - defp finalize_foreach_result(%{errors: errors}, _collect_as) do - {:error, - %{ - type: "foreach_error", - message: "One or more foreach items failed", - errors: format_foreach_errors(errors) - }} - end - - defp execute_foreach_concurrent(items, steps, execution, config, concurrency, opts) do - %{data: base_data, on_error: on_error, collect_as: collect_as} = opts - task_sup = Config.task_supervisor(config.name) - task_opts = %{steps: steps, execution_id: execution.id, config: config} - - items - |> Enum.with_index() - |> Enum.chunk_every(concurrency) - |> Enum.reduce_while({:ok, base_data, [], []}, fn chunk, - {:ok, current_data, results, errors} -> - tasks = spawn_foreach_chunk_tasks(chunk, current_data, task_sup, task_opts) - task_results = Task.await_many(tasks, :infinity) - - {new_data, new_results, new_errors} = - merge_chunk_results(task_results, current_data, results, errors, collect_as) - - check_chunk_continue(on_error, new_data, new_results, new_errors) - end) - |> finalize_concurrent_foreach(collect_as) - end - - defp spawn_foreach_chunk_tasks(chunk, current_data, task_sup, task_opts) do - Enum.map(chunk, fn {item, index} -> - Task.Supervisor.async(task_sup, fn -> - run_foreach_item_task(item, index, current_data, task_opts) - end) - end) - end - - defp run_foreach_item_task(item, index, data, task_opts) do - %{steps: steps, execution_id: exec_id, config: config} = task_opts - - result = execute_foreach_item_steps(steps, data, item, index, exec_id, config) - - case result do - {:ok, item_data} -> {:ok, index, item_data} - {:error, error} -> {:error, index, error} - end - end - - defp merge_chunk_results(task_results, data, results, errors, collect_as) do - Enum.reduce(task_results, {data, results, errors}, fn - {:ok, _index, item_data}, {d, r, e} -> - merged = deep_merge(d, item_data) - item_result = if collect_as, do: [extract_item_result(item_data, d)], else: [] - {merged, r ++ item_result, e} - - {:error, index, error}, {d, r, e} -> - {d, r, [{index, error} | e]} - end) - end - - defp check_chunk_continue(:fail_fast, _data, _results, [_ | _] = errors) do - {:halt, {:error, errors}} - end - - defp check_chunk_continue(_on_error, data, results, errors) do - {:cont, {:ok, data, results, errors}} - end - - defp finalize_concurrent_foreach({:ok, data, _results, []}, nil), do: {:ok, data} - - defp finalize_concurrent_foreach({:ok, data, results, []}, collect_as), - do: {:ok, Map.put(data, collect_as, results)} - - defp finalize_concurrent_foreach({:ok, _data, _results, errors}, _collect_as) do - {:error, - %{ - type: "foreach_error", - message: "One or more foreach items failed", - errors: format_foreach_errors(errors) - }} - end - - defp finalize_concurrent_foreach({:error, errors}, _collect_as) do - {:error, - %{ - type: "foreach_error", - message: "One or more foreach items failed", - errors: format_foreach_errors(errors) - }} - end - - # Execute foreach item steps using StepRunner.execute_foreach which passes (data, item, index) - defp execute_foreach_item_steps(steps, data, item, index, workflow_id, config) do - Enum.reduce_while(steps, {:ok, data}, fn step, {:ok, current_data} -> - case StepRunner.execute_foreach(step, current_data, item, index, workflow_id, config) do - {:ok, new_data} -> - {:cont, {:ok, new_data}} - - {:error, error} -> - {:halt, {:error, error}} + step -> + returns_key = get_returns_key(step) + # Extract result from stored output + result_data = (output || %{})["__result__"] || (output || %{})["__context__"] || %{} + Map.put(acc, returns_key, {:ok, result_data}) end end) end - defp extract_item_result(item_context, base_context) do - Map.drop(item_context, Map.keys(base_context)) - end - - defp format_foreach_errors(errors) do - Enum.map(errors, fn {index, error} -> - %{index: index, error: error} - end) - end - - defp skip_foreach_steps(remaining_steps, all_foreach_step_names) do - Enum.reject(remaining_steps, fn step -> - step.name in all_foreach_step_names - end) - end - defp find_jump_target(target_step, remaining_steps, current_step, step_index) do with :ok <- validate_target_exists(target_step, step_index), :ok <- validate_not_self(target_step, current_step), diff --git a/lib/durable/executor/step_runner.ex b/lib/durable/executor/step_runner.ex index 96d36e6..a0c02ef 100644 --- a/lib/durable/executor/step_runner.ex +++ b/lib/durable/executor/step_runner.ex @@ -45,18 +45,6 @@ defmodule Durable.Executor.StepRunner do execute_with_retry(step, data, workflow_id, 1, max_attempts, config) end - @doc """ - Executes a foreach step with item and index. - - Foreach steps receive 3 arguments: (data, item, index). - """ - @spec execute_foreach(Step.t(), map(), any(), non_neg_integer(), String.t(), Config.t()) :: - result() - def execute_foreach(%Step{} = step, data, item, index, workflow_id, %Config{} = config) do - max_attempts = get_max_attempts(step) - execute_foreach_with_retry(step, data, item, index, workflow_id, 1, max_attempts, config) - end - defp execute_with_retry(step, data, workflow_id, attempt, max_attempts, config) do # Set current step for logging/observability Context.set_current_step(step.name) @@ -115,66 +103,6 @@ defmodule Durable.Executor.StepRunner do handle_result(result, result_ctx) end - defp execute_foreach_with_retry( - step, - data, - item, - index, - workflow_id, - attempt, - max_attempts, - config - ) do - Context.set_current_step(step.name) - - {:ok, step_exec} = create_step_execution(config, workflow_id, step, attempt) - {:ok, step_exec} = update_step_execution(config, step_exec, :running) - - Durable.LogCapture.start_capture() - - start_time = System.monotonic_time(:millisecond) - - result = - try do - Step.execute(step, data, item, index) - rescue - e -> - {:error, - %{ - type: inspect(e.__struct__), - message: Exception.message(e), - stacktrace: Exception.format_stacktrace(__STACKTRACE__) - }} - catch - :throw, value -> - {:throw, value} - - kind, reason -> - {:error, %{type: "#{kind}", message: inspect(reason)}} - end - - end_time = System.monotonic_time(:millisecond) - duration_ms = end_time - start_time - - logs = Durable.LogCapture.stop_capture() - - # Bundle context into a map to reduce arity - foreach_ctx = %{ - step: step, - step_exec: step_exec, - data: data, - item: item, - index: index, - logs: logs, - duration_ms: duration_ms, - attempt: attempt, - max_attempts: max_attempts, - config: config - } - - handle_foreach_result(result, foreach_ctx) - end - # Handle step result from pipeline model defp handle_result({:ok, new_data}, ctx) when is_map(new_data) do %{step: step, step_exec: step_exec, logs: logs, duration_ms: duration_ms, config: config} = @@ -230,8 +158,11 @@ defmodule Durable.Executor.StepRunner do config: config } = ctx + # Normalize error to map format for database storage + normalized_error = normalize_error_for_storage(error) + if attempt < max_attempts do - {:ok, _} = fail_step_execution(config, step_exec, error, logs, duration_ms) + {:ok, _} = fail_step_execution(config, step_exec, normalized_error, logs, duration_ms) retry_opts = get_retry_opts(step) backoff_strategy = Map.get(retry_opts, :backoff, :exponential) @@ -239,7 +170,7 @@ defmodule Durable.Executor.StepRunner do execute_with_retry(step, data, step_exec.workflow_id, attempt + 1, max_attempts, config) else - {:ok, _} = fail_step_execution(config, step_exec, error, logs, duration_ms) + {:ok, _} = fail_step_execution(config, step_exec, normalized_error, logs, duration_ms) {:error, error} end end @@ -258,86 +189,15 @@ defmodule Durable.Executor.StepRunner do {:error, error} end - # Handle foreach step success - defp handle_foreach_result({:ok, new_data}, ctx) when is_map(new_data) do - %{step: step, step_exec: step_exec, logs: logs, duration_ms: duration_ms, config: config} = - ctx - - handle_step_success(config, step, step_exec, new_data, logs, duration_ms) - end - - # Handle foreach errors - defp handle_foreach_result({:error, error}, ctx) do - %{ - step: step, - step_exec: step_exec, - data: data, - item: item, - index: index, - logs: logs, - duration_ms: duration_ms, - attempt: attempt, - max_attempts: max_attempts, - config: config - } = ctx - - if attempt < max_attempts do - {:ok, _} = fail_step_execution(config, step_exec, error, logs, duration_ms) - - retry_opts = get_retry_opts(step) - backoff_strategy = Map.get(retry_opts, :backoff, :exponential) - Backoff.sleep(backoff_strategy, attempt, retry_opts) - - execute_foreach_with_retry( - step, - data, - item, - index, - step_exec.workflow_id, - attempt + 1, - max_attempts, - config - ) - else - {:ok, _} = fail_step_execution(config, step_exec, error, logs, duration_ms) - {:error, error} - end - end - - # Handle foreach wait primitives - defp handle_foreach_result({:throw, {wait_type, _opts}}, ctx) - when wait_type in [:sleep, :wait_for_event, :wait_for_input, :wait_for_any, :wait_for_all] do - %{step_exec: step_exec, logs: logs, duration_ms: duration_ms, config: config} = ctx - - error = %{ - type: "foreach_wait_not_supported", - message: "#{wait_type} is not supported in foreach blocks" - } - - {:ok, _} = fail_step_execution(config, step_exec, error, logs, duration_ms) - {:error, error} - end - - # Handle invalid foreach return - defp handle_foreach_result(other, ctx) do - %{step_exec: step_exec, logs: logs, duration_ms: duration_ms, config: config} = ctx - - error = %{ - type: "invalid_step_return", - message: "Foreach step must return {:ok, map} or {:error, reason}, got: #{inspect(other)}" - } - - {:ok, _} = fail_step_execution(config, step_exec, error, logs, duration_ms) - {:error, error} - end - defp handle_step_success(config, step, step_exec, new_data, logs, duration_ms) do stored_output = if step.opts[:parallel_id] do - # Include data snapshot for parallel step resumption + # Include result snapshot for parallel step resumption + # Store the raw result data for the new results-based model %{ "__output__" => new_data, - "__context__" => new_data + "__context__" => new_data, + "__result__" => new_data } else new_data @@ -411,4 +271,16 @@ defmodule Durable.Executor.StepRunner do defp serialize_output(output) when is_tuple(output), do: %{value: Tuple.to_list(output)} defp serialize_output(nil), do: nil defp serialize_output(output), do: %{value: inspect(output)} + + # Normalize error to map format for database storage + # The error field in StepExecution expects a map + defp normalize_error_for_storage(error) when is_map(error), do: error + + defp normalize_error_for_storage(error) when is_binary(error), + do: %{type: "error", message: error} + + defp normalize_error_for_storage(error) when is_atom(error), + do: %{type: "error", message: Atom.to_string(error)} + + defp normalize_error_for_storage(error), do: %{type: "error", message: inspect(error)} end diff --git a/test/durable/foreach_test.exs b/test/durable/foreach_test.exs deleted file mode 100644 index c1e3b01..0000000 --- a/test/durable/foreach_test.exs +++ /dev/null @@ -1,749 +0,0 @@ -defmodule Durable.ForEachTest do - use Durable.DataCase, async: false - - alias Durable.Config - alias Durable.Executor - alias Durable.Storage.Schemas.{StepExecution, WorkflowExecution} - - import Ecto.Query - - describe "foreach macro DSL compilation" do - test "foreach macro creates step with type :foreach" do - {:ok, definition} = - SimpleForEachWorkflow.__workflow_definition__("simple_foreach") - - foreach_step = Enum.find(definition.steps, &(&1.type == :foreach)) - - assert foreach_step != nil - assert foreach_step.type == :foreach - assert foreach_step.opts[:steps] != nil - assert is_list(foreach_step.opts[:steps]) - end - - test "foreach creates qualified step names for nested steps" do - {:ok, definition} = - SimpleForEachWorkflow.__workflow_definition__("simple_foreach") - - step_names = Enum.map(definition.steps, & &1.name) |> Enum.map(&Atom.to_string/1) - - # Should have qualified names like foreach___ - assert Enum.any?(step_names, &String.contains?(&1, "foreach_")) - assert Enum.any?(step_names, &String.contains?(&1, "__process")) - end - - test "foreach includes all nested step definitions in workflow" do - {:ok, definition} = - SimpleForEachWorkflow.__workflow_definition__("simple_foreach") - - # Should have: setup, foreach_X, foreach step, final - assert length(definition.steps) >= 3 - end - end - - describe "foreach execution - sequential" do - test "executes steps for each item in collection" do - {:ok, execution} = - create_and_execute_workflow(SimpleForEachWorkflow, %{}) - - assert execution.status == :completed - - # Check that all items were processed - assert execution.context["processed_count"] == 3 - end - - test "current_item returns the current item being processed" do - {:ok, execution} = - create_and_execute_workflow(SimpleForEachWorkflow, %{}) - - assert execution.status == :completed - - # Results should contain processed items - assert execution.context["results"] == [ - "item_1_processed", - "item_2_processed", - "item_3_processed" - ] - end - - test "current_index returns the current index" do - {:ok, execution} = - create_and_execute_workflow(IndexForEachWorkflow, %{}) - - assert execution.status == :completed - - # Should have captured all indices - assert execution.context["indices"] == [0, 1, 2] - end - - test "context changes accumulate across iterations" do - {:ok, execution} = - create_and_execute_workflow(AccumulatingForEachWorkflow, %{}) - - assert execution.status == :completed - - # Counter should have been incremented 3 times - assert execution.context["counter"] == 3 - end - - test "handles empty items list gracefully" do - {:ok, execution} = - create_and_execute_workflow(EmptyForEachWorkflow, %{}) - - assert execution.status == :completed - # Foreach should complete without processing any items - assert execution.context["processed_count"] == 0 - assert execution.context["completed"] == true - end - - test "handles nil items in collection" do - {:ok, execution} = - create_and_execute_workflow(NilItemForEachWorkflow, %{}) - - assert execution.status == :completed - # Should process all items including nil - assert execution.context["processed"] == [1, nil, 3] - assert execution.context["completed"] == true - end - - test "handles large collection (1000 items)" do - {:ok, execution} = - create_and_execute_workflow(LargeCollectionForEachWorkflow, %{}) - - assert execution.status == :completed - # Sum of 1..1000 = 500500 - assert execution.context["sum"] == 500_500 - assert execution.context["completed"] == true - end - end - - describe "foreach execution - concurrent" do - test "executes items concurrently with concurrency limit" do - {:ok, execution} = - create_and_execute_workflow(ConcurrentForEachWorkflow, %{}) - - assert execution.status == :completed - # Concurrency is verified by the fact that all items complete successfully - # and results are collected. Timing assertions are avoided due to CI variability. - end - - test "concurrent foreach processes all items using collect_as" do - {:ok, execution} = - create_and_execute_workflow(ConcurrentForEachWorkflow, %{}) - - assert execution.status == :completed - # When using collect_as, results are properly collected even in concurrent mode - assert length(execution.context["item_results"]) == 3 - end - end - - describe "foreach execution - error handling" do - test "fail_fast stops on first error" do - {:ok, execution} = - create_and_execute_workflow(FailFastForEachWorkflow, %{}) - - assert execution.status == :failed - assert execution.error["type"] == "foreach_error" - end - - test "continue collects errors and continues processing" do - {:ok, execution} = - create_and_execute_workflow(ContinueOnErrorForEachWorkflow, %{}) - - assert execution.status == :failed - assert execution.error["type"] == "foreach_error" - # Should have collected errors - assert is_list(execution.error["errors"]) - end - end - - describe "foreach with collect_as option" do - test "collects results into specified context key" do - {:ok, execution} = - create_and_execute_workflow(CollectAsForEachWorkflow, %{}) - - assert execution.status == :completed - assert is_list(execution.context["collected_results"]) - assert length(execution.context["collected_results"]) == 3 - end - end - - describe "foreach continues after block" do - test "execution continues to steps after foreach block" do - {:ok, execution} = - create_and_execute_workflow(SimpleForEachWorkflow, %{}) - - assert execution.status == :completed - - step_execs = get_step_executions(execution.id) - executed_steps = Enum.map(step_execs, & &1.step_name) - - # Final step should execute after foreach - assert "final" in executed_steps - assert execution.context["completed"] == true - end - end - - describe "foreach execution - edge cases" do - # Note: Non-list/non-enumerable items cause Protocol.UndefinedError - # These tests document this edge case behavior - - test "foreach with non-list items (map) iterates over map entries" do - {:ok, execution} = - create_and_execute_workflow(MapItemsForEachWorkflow, %{}) - - # Maps are enumerable, so foreach processes key-value tuples - assert execution.status == :completed - # Map with 2 entries means 2 iterations - assert execution.context["processed_count"] == 2 - end - - test "foreach with non-list items (string) raises Protocol.UndefinedError" do - # Strings are not enumerable, so this raises an error - assert_raise Protocol.UndefinedError, fn -> - create_and_execute_workflow(StringItemsForEachWorkflow, %{}) - end - end - - test "foreach with non-list items (integer) raises Protocol.UndefinedError" do - # Integers are not enumerable, so this raises an error - assert_raise Protocol.UndefinedError, fn -> - create_and_execute_workflow(IntegerItemsForEachWorkflow, %{}) - end - end - - test "foreach step that raises exception fails workflow" do - {:ok, execution} = - create_and_execute_workflow(RaisingForEachWorkflow, %{}) - - assert execution.status == :failed - assert execution.error != nil - end - - # Note: nil items cause Enumerable protocol error - test "foreach with items function returning nil raises Protocol.UndefinedError" do - assert_raise Protocol.UndefinedError, fn -> - create_and_execute_workflow(NilItemsForEachWorkflow, %{}) - end - end - - test "foreach with deeply nested items processes correctly" do - {:ok, execution} = - create_and_execute_workflow(DeepNestedForEachWorkflow, %{}) - - assert execution.status == :completed - assert execution.context["processed_count"] == 2 - assert execution.context["names"] == ["Alice", "Bob"] - end - end - - # Helper functions - defp create_and_execute_workflow(module, input) do - config = Config.get(Durable) - repo = config.repo - {:ok, workflow_def} = module.__default_workflow__() - - attrs = %{ - workflow_module: Atom.to_string(module), - workflow_name: workflow_def.name, - status: :pending, - queue: "default", - priority: 0, - input: input, - context: %{} - } - - {:ok, execution} = - %WorkflowExecution{} - |> WorkflowExecution.changeset(attrs) - |> repo.insert() - - Executor.execute_workflow(execution.id, config) - {:ok, repo.get!(WorkflowExecution, execution.id)} - end - - defp get_step_executions(workflow_id) do - config = Config.get(Durable) - repo = config.repo - - repo.all( - from(s in StepExecution, - where: s.workflow_id == ^workflow_id, - order_by: [asc: s.inserted_at] - ) - ) - end -end - -# Test workflow modules - -defmodule SimpleForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "simple_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, ["item_1", "item_2", "item_3"]) - |> assign(:results, []) - |> assign(:processed_count, 0) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, item, _idx -> - result = "#{item}_processed" - results = data[:results] || [] - count = data[:processed_count] || 0 - - data = - data - |> assign(:results, results ++ [result]) - |> assign(:processed_count, count + 1) - - {:ok, data} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule IndexForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "index_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, ["a", "b", "c"]) - |> assign(:indices, []) - - {:ok, data} - end) - - foreach :track_indices, items: fn data -> data.items end do - step(:record_index, fn data, _item, idx -> - indices = data[:indices] || [] - {:ok, assign(data, :indices, indices ++ [idx])} - end) - end - - step(:done, fn data -> - {:ok, data} - end) - end -end - -defmodule AccumulatingForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "accumulating_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, [1, 2, 3]) - |> assign(:counter, 0) - - {:ok, data} - end) - - foreach :count_items, items: fn data -> data.items end do - step(:increment, fn data, _item, _idx -> - counter = data[:counter] || 0 - {:ok, assign(data, :counter, counter + 1)} - end) - end - - step(:done, fn data -> - {:ok, data} - end) - end -end - -defmodule ConcurrentForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "concurrent_foreach" do - step(:setup, fn data -> - {:ok, assign(data, :items, [1, 2, 3])} - end) - - foreach :process_concurrent, - items: fn data -> data.items end, - concurrency: 3, - collect_as: :item_results do - step(:slow_process, fn data, item, _idx -> - Process.sleep(50) - {:ok, assign(data, :processed_value, item * 10)} - end) - end - - step(:done, fn data -> - {:ok, data} - end) - end -end - -defmodule FailFastForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "fail_fast_foreach" do - step(:setup, fn data -> - {:ok, assign(data, :items, [1, 2, 3])} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:maybe_fail, fn data, item, _idx -> - if item == 2 do - raise "intentional failure at item 2" - end - - {:ok, assign(data, :processed, item)} - end) - end - - step(:never_reached, fn data -> - {:ok, assign(data, :reached, true)} - end) - end -end - -defmodule ContinueOnErrorForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "continue_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, [1, 2, 3]) - |> assign(:processed, []) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end, on_error: :continue do - step(:maybe_fail, fn data, item, _idx -> - if item == 2 do - raise "intentional failure at item 2" - end - - processed = data[:processed] || [] - {:ok, assign(data, :processed, processed ++ [item])} - end) - end - - step(:done, fn data -> - {:ok, data} - end) - end -end - -defmodule CollectAsForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "collect_as_foreach" do - step(:setup, fn data -> - {:ok, assign(data, :items, ["a", "b", "c"])} - end) - - foreach :process_items, items: fn data -> data.items end, collect_as: :collected_results do - step(:transform, fn data, item, _idx -> - {:ok, assign(data, :result, String.upcase(item))} - end) - end - - step(:done, fn data -> - {:ok, data} - end) - end -end - -defmodule EmptyForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "empty_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, []) - |> assign(:processed_count, 0) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, _item, _idx -> - count = data[:processed_count] || 0 - {:ok, assign(data, :processed_count, count + 1)} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule NilItemForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "nil_item_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, [1, nil, 3]) - |> assign(:processed, []) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, item, _idx -> - processed = data[:processed] || [] - {:ok, assign(data, :processed, processed ++ [item])} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule LargeCollectionForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "large_collection_foreach" do - step(:setup, fn data -> - # Create a list of 1000 items - items = Enum.to_list(1..1000) - - data = - data - |> assign(:items, items) - |> assign(:sum, 0) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:accumulate, fn data, item, _idx -> - sum = data[:sum] || 0 - {:ok, assign(data, :sum, sum + item)} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -# Edge case test workflows - -defmodule MapItemsForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "map_items_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, %{a: 1, b: 2}) - |> assign(:processed_count, 0) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, _item, _idx -> - count = data[:processed_count] || 0 - {:ok, assign(data, :processed_count, count + 1)} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule StringItemsForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "string_items_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, "not a list") - |> assign(:processed_count, 0) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, _item, _idx -> - count = data[:processed_count] || 0 - {:ok, assign(data, :processed_count, count + 1)} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule IntegerItemsForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "integer_items_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, 42) - |> assign(:processed_count, 0) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, _item, _idx -> - count = data[:processed_count] || 0 - {:ok, assign(data, :processed_count, count + 1)} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule ErrorReturningForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "error_returning_foreach" do - step(:setup, fn data -> - {:ok, assign(data, :items, [1, 2, 3])} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, item, _idx -> - if item == 2 do - {:error, "Item 2 caused an error"} - else - {:ok, assign(data, :processed, item)} - end - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule RaisingForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "raising_foreach" do - step(:setup, fn data -> - {:ok, assign(data, :items, [1, 2, 3])} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, item, _idx -> - if item == 2 do - raise "Item 2 caused an error" - end - - {:ok, assign(data, :processed, item)} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule NilItemsForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "nil_items_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, nil) - |> assign(:processed_count, 0) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, _item, _idx -> - count = data[:processed_count] || 0 - {:ok, assign(data, :processed_count, count + 1)} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end - -defmodule DeepNestedForEachWorkflow do - use Durable - use Durable.Helpers - - workflow "deep_nested_foreach" do - step(:setup, fn data -> - data = - data - |> assign(:items, [%{user: %{name: "Alice"}}, %{user: %{name: "Bob"}}]) - |> assign(:processed_count, 0) - |> assign(:names, []) - - {:ok, data} - end) - - foreach :process_items, items: fn data -> data.items end do - step(:process, fn data, item, _idx -> - name = item.user.name - count = data[:processed_count] || 0 - names = data[:names] || [] - - data = - data - |> assign(:processed_count, count + 1) - |> assign(:names, names ++ [name]) - - {:ok, data} - end) - end - - step(:final, fn data -> - {:ok, assign(data, :completed, true)} - end) - end -end diff --git a/test/durable/integration_test.exs b/test/durable/integration_test.exs index dc92728..f301364 100644 --- a/test/durable/integration_test.exs +++ b/test/durable/integration_test.exs @@ -16,7 +16,7 @@ defmodule Durable.IntegrationTest do # ============================================================================ # Scenario 1: E-Commerce Order Processing - # Features: Branch -> ForEach -> Parallel (sequential) + # Features: Branch -> Batch Processing -> Parallel (sequential) # ============================================================================ describe "Scenario 1: E-Commerce Order Processing" do @@ -40,7 +40,7 @@ defmodule Durable.IntegrationTest do # Digital branch should NOT have executed refute Map.has_key?(execution.context, "download_url") - # ForEach should have processed all items + # Batch processing should have processed all items assert execution.context["items_processed"] == ["Widget", "Gadget"] # Final parallel tasks should have executed @@ -68,7 +68,7 @@ defmodule Durable.IntegrationTest do # Physical branch should NOT have executed refute Map.has_key?(execution.context, "shipping_label") - # ForEach should have processed item + # Batch processing should have processed item assert execution.context["items_processed"] == ["E-Book"] assert execution.context["completed"] == true end @@ -87,7 +87,7 @@ defmodule Durable.IntegrationTest do # Verify steps executed - use qualified name patterns for generated step names assert "validate_order" in step_names assert Enum.any?(step_names, &String.contains?(&1, "__process_digital")) - assert Enum.any?(step_names, &String.contains?(&1, "__process_item")) + assert "process_items" in step_names assert Enum.any?(step_names, &String.contains?(&1, "__send_confirmation")) assert Enum.any?(step_names, &String.contains?(&1, "__update_analytics")) assert "complete" in step_names @@ -96,7 +96,7 @@ defmodule Durable.IntegrationTest do # ============================================================================ # Scenario 2: Document Approval Workflow - # Features: Decision (with goto) -> Parallel -> Branch -> ForEach + # Features: Decision (with goto) -> Parallel -> Branch -> Batch Processing # ============================================================================ describe "Scenario 2: Document Approval Workflow" do @@ -137,7 +137,7 @@ defmodule Durable.IntegrationTest do # Approved branch should have run assert execution.context["approval_processed"] == "approved" - # ForEach should have processed all items + # Batch processing should have processed all items assert execution.context["items_updated"] == ["doc1", "doc2", "doc3"] # Rejection should NOT have run @@ -165,7 +165,7 @@ defmodule Durable.IntegrationTest do assert execution.context["approval_processed"] == "rejected" assert execution.context["rejection_notified"] == true - # Approved forEach should NOT run items + # Approved batch should NOT process items refute Map.has_key?(execution.context, "items_updated") assert execution.context["finalized"] == true @@ -192,7 +192,7 @@ defmodule Durable.IntegrationTest do # ============================================================================ # Scenario 3: Batch Data Migration - # Features: ForEach -> Decision -> Parallel -> Branch + # Features: Batch Processing -> Decision -> Parallel -> Branch # ============================================================================ describe "Scenario 3: Batch Data Migration" do @@ -209,7 +209,7 @@ defmodule Durable.IntegrationTest do assert execution.status == :completed - # All batches processed via foreach + # All batches processed assert execution.context["migrated_ids"] == ["batch1", "batch2", "batch3"] # Parallel reporting tasks completed @@ -402,7 +402,7 @@ end defmodule OrderProcessingWorkflow do @moduledoc """ Scenario 1: E-Commerce Order Processing - Features: Branch -> ForEach -> Parallel (sequential, not nested) + Features: Branch -> Batch Processing -> Parallel (sequential, not nested) """ use Durable use Durable.Helpers @@ -441,16 +441,25 @@ defmodule OrderProcessingWorkflow do end) end - # ForEach to process line items (sequential) - foreach :process_items, items: fn data -> data.line_items end do - step(:process_item, fn data, item, _idx -> - current_list = data[:items_processed] || [] - {:ok, assign(data, :items_processed, current_list ++ [item["name"]])} - end) - end + # Batch processing using Enum.map (replaces foreach) + step(:process_items, fn data -> + items_processed = Enum.map(data.line_items, fn item -> item["name"] end) + {:ok, assign(data, :items_processed, items_processed)} + end) # Parallel notification tasks - parallel do + # Use into: to merge results back into context for backward compatibility + parallel into: fn ctx, results -> + merged = + Enum.reduce(results, ctx, fn {_key, result}, acc -> + case result do + {:ok, data} -> Map.merge(acc, data) + _ -> acc + end + end) + + {:ok, merged} + end do step(:send_confirmation, fn data -> {:ok, assign(data, :email_sent, true)} end) @@ -469,7 +478,7 @@ end defmodule DocumentApprovalWorkflow do @moduledoc """ Scenario 2: Document Approval with Decision routing - Features: Decision (goto) -> Parallel -> Branch -> ForEach (sequential) + Features: Decision (goto) -> Parallel -> Branch -> Batch Processing (sequential) """ use Durable use Durable.Helpers @@ -497,7 +506,18 @@ defmodule DocumentApprovalWorkflow do end) # Parallel notification (only runs for high-value) - parallel do + # Use into: to merge results back into context + parallel into: fn ctx, results -> + merged = + Enum.reduce(results, ctx, fn {_key, result}, acc -> + case result do + {:ok, data} -> Map.merge(acc, data) + _ -> acc + end + end) + + {:ok, merged} + end do step(:notify_approvers, fn data -> {:ok, assign(data, :notified, true)} end) @@ -532,17 +552,15 @@ defmodule DocumentApprovalWorkflow do end) end - # ForEach to update items (only runs for both, but only updates if approved) - foreach :update_items, items: fn data -> data.affected_items end do - step(:update_item, fn data, item, _idx -> - if data[:approval_processed] == "approved" do - current_list = data[:items_updated] || [] - {:ok, assign(data, :items_updated, current_list ++ [item])} - else - {:ok, data} - end - end) - end + # Batch processing to update items (only updates if approved) + step(:update_items, fn data -> + if data[:approval_processed] == "approved" do + items_updated = Enum.map(data.affected_items, fn item -> item end) + {:ok, assign(data, :items_updated, items_updated)} + else + {:ok, data} + end + end) step(:finalize, fn data -> {:ok, assign(data, :finalized, true)} @@ -553,7 +571,7 @@ end defmodule BatchMigrationWorkflow do @moduledoc """ Scenario 3: Batch Data Migration - Features: ForEach -> Decision -> Parallel -> Branch (sequential) + Features: Batch Processing -> Decision -> Parallel -> Branch (sequential) """ use Durable use Durable.Helpers @@ -569,13 +587,11 @@ defmodule BatchMigrationWorkflow do {:ok, data} end) - # ForEach to process batches (sequential) - foreach :process_batches, items: fn data -> data.batches end do - step(:migrate_batch, fn data, batch, _idx -> - current_ids = data[:migrated_ids] || [] - {:ok, assign(data, :migrated_ids, current_ids ++ [batch["id"]])} - end) - end + # Batch processing using Enum.map (replaces foreach) + step(:process_batches, fn data -> + migrated_ids = Enum.map(data.batches, fn batch -> batch["id"] end) + {:ok, assign(data, :migrated_ids, migrated_ids)} + end) # Decision based on migration results # If empty, jump to finalize (skipping parallel and mark_success) @@ -590,7 +606,18 @@ defmodule BatchMigrationWorkflow do end) # Parallel reporting (only runs if we have batches) - parallel do + # Use into: to merge results back into context + parallel into: fn ctx, results -> + merged = + Enum.reduce(results, ctx, fn {_key, result}, acc -> + case result do + {:ok, data} -> Map.merge(acc, data) + _ -> acc + end + end) + + {:ok, merged} + end do step(:generate_report, fn data -> {:ok, assign(data, :report_generated, true)} end) diff --git a/test/durable/parallel_test.exs b/test/durable/parallel_test.exs index ec3d26b..9d679e3 100644 --- a/test/durable/parallel_test.exs +++ b/test/durable/parallel_test.exs @@ -39,141 +39,145 @@ defmodule Durable.ParallelTest do # Should have: setup, parallel_X, parallel steps (task_a, task_b), final assert length(definition.steps) >= 4 end + + test "parallel step opts include returns key" do + {:ok, definition} = + SimpleParallelTestWorkflow.__workflow_definition__("simple_parallel") + + # Find a parallel step + parallel_step = + Enum.find(definition.steps, fn step -> + String.contains?(Atom.to_string(step.name), "__task_a") + end) + + assert parallel_step.opts[:returns] == :task_a + end end - describe "parallel execution - all succeed" do - test "executes all parallel steps" do + describe "parallel execution - results model" do + test "parallel steps produce results in __results__ map" do {:ok, execution} = create_and_execute_workflow(SimpleParallelTestWorkflow, %{}) assert execution.status == :completed - step_execs = get_step_executions(execution.id) - executed_steps = Enum.map(step_execs, & &1.step_name) - - # Should execute setup step - assert "setup" in executed_steps - - # Should execute both parallel steps - assert Enum.any?(executed_steps, &String.contains?(&1, "task_a")) - assert Enum.any?(executed_steps, &String.contains?(&1, "task_b")) - - # Should execute final step - assert "final" in executed_steps + # Results should be in __results__ map with tagged tuples + results = execution.context["__results__"] + assert is_map(results) + assert results["task_a"] == ["ok", %{"from_task_a" => true, "initialized" => true}] + assert results["task_b"] == ["ok", %{"from_task_b" => true, "initialized" => true}] end - test "context from all parallel steps is available after parallel block" do + test "next step can access __results__ from context" do {:ok, execution} = - create_and_execute_workflow(SimpleParallelTestWorkflow, %{}) + create_and_execute_workflow(ResultsAccessWorkflow, %{}) assert execution.status == :completed - # Both parallel steps should have set their context values - assert execution.context["from_task_a"] == true - assert execution.context["from_task_b"] == true - # Final step should see both values - assert execution.context["completed"] == true + # The final step should have processed the results + assert execution.context["processed_task_a"] == true + assert execution.context["processed_task_b"] == true end - test "steps execute concurrently" do + test "error results are preserved as {:error, reason} in results map" do {:ok, execution} = - create_and_execute_workflow(TimingParallelWorkflow, %{}) + create_and_execute_workflow(ErrorPreservingParallelWorkflow, %{}) assert execution.status == :completed - # Concurrency is verified by completion and context merging. - # Timing assertions are avoided due to CI variability. - assert execution.context["a_done"] == true - assert execution.context["b_done"] == true + + # Check that error is preserved in results + results = execution.context["__results__"] + assert results["good_task"] == ["ok", %{"good" => true}] + assert match?(["error", _], results["bad_task"]) end end - describe "parallel execution - error handling" do - test "workflow fails if any parallel step fails (fail_fast)" do + describe "parallel execution - into: callback" do + test "into: callback transforms results and returns {:ok, ctx}" do {:ok, execution} = - create_and_execute_workflow(FailingParallelWorkflow, %{}) + create_and_execute_workflow(IntoOkWorkflow, %{}) - assert execution.status == :failed - assert execution.error["type"] == "parallel_error" - assert is_list(execution.error["errors"]) + assert execution.status == :completed + + # The into callback should have transformed the results + assert execution.context["payment_id"] == 123 + assert execution.context["delivery_status"] == "confirmed" + # __results__ should NOT be in context when into: is used + refute Map.has_key?(execution.context, "__results__") end - test "complete_all waits for all steps and collects errors" do + test "into: callback returning {:error, reason} fails workflow" do {:ok, execution} = - create_and_execute_workflow(CompleteAllParallelWorkflow, %{}) + create_and_execute_workflow(IntoErrorWorkflow, %{}) assert execution.status == :failed - assert execution.error["type"] == "parallel_error" - # Should have collected error from failing step - errors = execution.error["errors"] - assert errors != [] + assert execution.error["message"] == "Payment and delivery both failed" end - end - describe "context merge strategies" do - test "deep_merge combines nested maps" do + test "into: callback returning {:goto, step, ctx} jumps to step" do {:ok, execution} = - create_and_execute_workflow(DeepMergeParallelWorkflow, %{}) + create_and_execute_workflow(IntoGotoWorkflow, %{}) assert execution.status == :completed - # Both parallel steps set nested values - assert execution.context["nested"]["from_a"] == true - assert execution.context["nested"]["from_b"] == true + # Should have skipped to handle_backorder step + assert execution.context["backorder_handled"] == true + # Should NOT have executed the normal_flow step + refute Map.has_key?(execution.context, "normal_flow_executed") end + end - test "collect gathers step results" do + describe "parallel execution - returns: option" do + test "returns: option changes result key name" do {:ok, execution} = - create_and_execute_workflow(CollectParallelWorkflow, %{}) + create_and_execute_workflow(ReturnsKeyWorkflow, %{}) assert execution.status == :completed - # Results should be collected under __parallel_results__ - assert is_map(execution.context["__parallel_results__"]) + results = execution.context["__results__"] + # Should use custom key names from returns: + assert Map.has_key?(results, "order_data") + assert Map.has_key?(results, "user_data") + # Should NOT have the original step names + refute Map.has_key?(results, "fetch_order") + refute Map.has_key?(results, "fetch_user") end end - describe "parallel continues after block" do - test "execution continues to steps after parallel block" do + describe "parallel execution - error handling" do + test "fail_fast stops on first error by default" do {:ok, execution} = - create_and_execute_workflow(SimpleParallelTestWorkflow, %{}) - - assert execution.status == :completed + create_and_execute_workflow(FailFastParallelWorkflow, %{}) - step_execs = get_step_executions(execution.id) - executed_steps = Enum.map(step_execs, & &1.step_name) - - # Final step should execute after parallel - assert "final" in executed_steps - assert execution.context["completed"] == true + assert execution.status == :failed + # Error should be from the failing step + assert execution.error["type"] == "test_error" end - end - describe "parallel with single step" do - test "parallel block with single step works correctly" do + test "complete_all collects all results including errors" do {:ok, execution} = - create_and_execute_workflow(SingleStepParallelWorkflow, %{}) + create_and_execute_workflow(CompleteAllWithResultsWorkflow, %{}) assert execution.status == :completed - # The single parallel step should execute - assert execution.context["from_only_task"] == true - # Final step should execute after parallel - assert execution.context["completed"] == true + # All results should be collected, including errors + results = execution.context["__results__"] + assert results["good_task"] == ["ok", %{"good" => true}] + assert match?(["error", _], results["bad_task"]) end end - describe "parallel execution - edge cases" do - test "parallel steps writing same context key (last wins behavior)" do + describe "parallel execution - concurrency" do + test "steps execute concurrently" do {:ok, execution} = - create_and_execute_workflow(ConflictingContextParallelWorkflow, %{}) + create_and_execute_workflow(TimingParallelWorkflow, %{}) assert execution.status == :completed - # With deep_merge, both steps wrote to :shared_key - # The value depends on merge order, but the key should exist - assert Map.has_key?(execution.context, "shared_key") - # At least one value should be present - assert execution.context["shared_key"] in ["from_a", "from_b"] + # Both steps should complete + results = execution.context["__results__"] + assert match?(["ok", _], results["slow_a"]) + assert match?(["ok", _], results["slow_b"]) end test "10+ parallel steps execute successfully" do @@ -182,33 +186,24 @@ defmodule Durable.ParallelTest do assert execution.status == :completed - # All 15 steps should have completed and set their keys + results = execution.context["__results__"] + # All 15 steps should have results for i <- 1..15 do - assert execution.context["step_#{i}"] == true + key = "step_#{i}" + assert Map.has_key?(results, key), "Missing result for #{key}" end - - assert execution.context["completed"] == true end + end - test "parallel with merge :collect stores results correctly" do + describe "parallel with single step" do + test "parallel block with single step works correctly" do {:ok, execution} = - create_and_execute_workflow(CollectMergeParallelWorkflow, %{}) + create_and_execute_workflow(SingleStepParallelWorkflow, %{}) assert execution.status == :completed - # Results should be collected under __parallel_results__ - assert is_map(execution.context["__parallel_results__"]) - # Each step's unique changes should be collected - results = execution.context["__parallel_results__"] - assert map_size(results) == 2 - end - - test "parallel step raising exception fails workflow" do - {:ok, execution} = - create_and_execute_workflow(RaisingParallelWorkflow, %{}) - - assert execution.status == :failed - assert execution.error != nil + results = execution.context["__results__"] + assert results["only_task"] == ["ok", %{"from_only_task" => true, "initialized" => true}] end end @@ -251,10 +246,11 @@ defmodule Durable.ParallelTest do step_type: "step", attempt: 1, status: :completed, - # This is the context snapshot stored by step_runner for durability + # Store result for durability output: %{ "__output__" => nil, - "__context__" => %{"from_task_a" => "original_value", "task_a_runs" => 1} + "__context__" => %{"from_task_a" => "original_value", "task_a_runs" => 1}, + "__result__" => %{"from_task_a" => "original_value", "task_a_runs" => 1} } }) |> repo.insert() @@ -271,90 +267,16 @@ defmodule Durable.ParallelTest do assert execution.status == :completed - # task_a should NOT be re-run (value should be preserved from stored context) - assert execution.context["from_task_a"] == "original_value" - assert execution.context["task_a_runs"] == 1 - - # task_b SHOULD have run (it wasn't completed before) - assert execution.context["from_task_b"] == true - - # Final step should have completed - assert execution.context["completed"] == true + # Check results - task_a should have stored result, task_b should have new result + results = execution.context["__results__"] + assert match?(["ok", %{"from_task_a" => "original_value"}], results["task_a"]) + assert match?(["ok", %{"from_task_b" => true}], results["task_b"]) # Check step executions - task_a should only have 1 execution (the pre-existing one) step_execs = get_step_executions(execution.id) task_a_execs = Enum.filter(step_execs, &String.contains?(&1.step_name, "task_a")) assert length(task_a_execs) == 1 end - - test "all parallel step contexts are merged when resuming with completed steps" do - config = Config.get(Durable) - repo = config.repo - {:ok, workflow_def} = ResumableParallelWorkflow.__default_workflow__() - - attrs = %{ - workflow_module: Atom.to_string(ResumableParallelWorkflow), - workflow_name: workflow_def.name, - status: :pending, - queue: "default", - priority: 0, - input: %{}, - context: %{"initialized" => true} - } - - {:ok, execution} = - %WorkflowExecution{} - |> WorkflowExecution.changeset(attrs) - |> repo.insert() - - parallel_step = Enum.find(workflow_def.steps, &(&1.type == :parallel)) - parallel_step_names = parallel_step.opts[:steps] - - # Mark BOTH parallel steps as completed (simulating all done) - for step_name <- parallel_step_names do - name_str = Atom.to_string(step_name) - - context_key = - if String.contains?(name_str, "task_a"), do: "from_task_a", else: "from_task_b" - - {:ok, _} = - %StepExecution{} - |> StepExecution.changeset(%{ - workflow_id: execution.id, - step_name: name_str, - step_type: "step", - attempt: 1, - status: :completed, - output: %{ - "__output__" => nil, - "__context__" => %{context_key => "stored_value"} - } - }) - |> repo.insert() - end - - {:ok, execution} = - execution - |> Ecto.Changeset.change(current_step: Atom.to_string(parallel_step.name)) - |> repo.update() - - # Execute the workflow - should skip all parallel steps - Executor.execute_workflow(execution.id, config) - execution = repo.get!(WorkflowExecution, execution.id) - - assert execution.status == :completed - - # Context from both stored parallel steps should be merged - assert execution.context["from_task_a"] == "stored_value" - assert execution.context["from_task_b"] == "stored_value" - assert execution.context["completed"] == true - - # No new step executions for parallel steps (both were already done) - step_execs = get_step_executions(execution.id) - parallel_execs = Enum.filter(step_execs, &String.contains?(&1.step_name, "parallel_")) - # Only the 2 pre-existing ones - assert length(parallel_execs) == 2 - end end # Helper functions @@ -422,155 +344,200 @@ defmodule SimpleParallelTestWorkflow do end end -defmodule TimingParallelWorkflow do +defmodule ResultsAccessWorkflow do use Durable use Durable.Helpers + use Durable.Context - workflow "timing_parallel" do + workflow "results_access" do step(:setup, fn data -> - {:ok, data} + {:ok, assign(data, :initialized, true)} end) parallel do - step(:slow_a, fn data -> - Process.sleep(50) - {:ok, assign(data, :a_done, true)} + step(:task_a, fn data -> + {:ok, assign(data, :value_a, 42)} end) - step(:slow_b, fn data -> - Process.sleep(50) - {:ok, assign(data, :b_done, true)} + step(:task_b, fn data -> + {:ok, assign(data, :value_b, 99)} end) end - step(:done, fn data -> + step(:process_results, fn data -> + # Results are serialized with string keys and list format: ["ok", data] or ["error", reason] + results = data[:__results__] || %{} + + data = + case results["task_a"] do + ["ok", _] -> assign(data, :processed_task_a, true) + _ -> data + end + + data = + case results["task_b"] do + ["ok", _] -> assign(data, :processed_task_b, true) + _ -> data + end + {:ok, data} end) end end -defmodule FailingParallelWorkflow do +defmodule ErrorPreservingParallelWorkflow do use Durable use Durable.Helpers - workflow "failing_parallel" do + workflow "error_preserving_parallel" do step(:setup, fn data -> {:ok, data} end) - parallel do + parallel on_error: :complete_all do step(:good_task, fn data -> {:ok, assign(data, :good, true)} end) step(:bad_task, fn _data -> - raise "intentional failure" + {:error, %{type: "test_error", message: "intentional failure"}} end) end - step(:never_reached, fn data -> - {:ok, assign(data, :reached, true)} + step(:final, fn data -> + {:ok, data} end) end end -defmodule CompleteAllParallelWorkflow do +defmodule IntoOkWorkflow do use Durable use Durable.Helpers - workflow "complete_all_parallel" do + workflow "into_ok" do step(:setup, fn data -> - {:ok, data} + {:ok, assign(data, :initialized, true)} end) - parallel on_error: :complete_all do - step(:good_task, fn data -> - Process.sleep(10) - {:ok, assign(data, :good, true)} + parallel into: fn ctx, results -> + case {results[:payment], results[:delivery]} do + {{:ok, payment}, {:ok, delivery}} -> + new_ctx = + ctx + |> Map.put(:payment_id, payment.id) + |> Map.put(:delivery_status, delivery.status) + + {:ok, new_ctx} + + _ -> + {:error, "Unexpected result combination"} + end + end do + step(:payment, fn _data -> + {:ok, %{id: 123, status: "paid"}} end) - step(:bad_task, fn _data -> - raise "intentional failure" + step(:delivery, fn _data -> + {:ok, %{id: 456, status: "confirmed"}} end) end - step(:never_reached, fn data -> - {:ok, assign(data, :reached, true)} + step(:final, fn data -> + {:ok, assign(data, :completed, true)} end) end end -defmodule DeepMergeParallelWorkflow do +defmodule IntoErrorWorkflow do use Durable use Durable.Helpers - workflow "deep_merge_parallel" do + workflow "into_error" do step(:setup, fn data -> - {:ok, assign(data, :nested, %{})} + {:ok, assign(data, :initialized, true)} end) - parallel merge: :deep_merge do - step(:task_a, fn data -> - nested = data[:nested] || %{} - {:ok, assign(data, :nested, Map.put(nested, :from_a, true))} + parallel into: fn _ctx, results -> + case {results[:payment], results[:delivery]} do + {{:ok, _}, {:ok, _}} -> + {:error, "Both succeeded but we expected failure"} + + _ -> + {:error, "Payment and delivery both failed"} + end + end do + step(:payment, fn _data -> + {:error, "Payment failed"} end) - step(:task_b, fn data -> - nested = data[:nested] || %{} - {:ok, assign(data, :nested, Map.put(nested, :from_b, true))} + step(:delivery, fn _data -> + {:error, "Delivery failed"} end) end - step(:done, fn data -> - {:ok, data} + step(:final, fn data -> + {:ok, assign(data, :completed, true)} end) end end -defmodule CollectParallelWorkflow do +defmodule IntoGotoWorkflow do use Durable use Durable.Helpers - workflow "collect_parallel" do + workflow "into_goto" do step(:setup, fn data -> - {:ok, data} + {:ok, assign(data, :initialized, true)} end) - parallel merge: :collect do - step(:task_a, fn data -> - {:ok, assign(data, :result_a, "value_a")} + parallel into: fn ctx, results -> + case results[:delivery] do + {:error, _} -> + {:goto, :handle_backorder, ctx} + + {:ok, _} -> + {:ok, ctx} + end + end do + step(:payment, fn _data -> + {:ok, %{id: 123}} end) - step(:task_b, fn data -> - {:ok, assign(data, :result_b, "value_b")} + step(:delivery, fn _data -> + {:error, :not_found} end) end - step(:done, fn data -> - {:ok, data} + step(:normal_flow, fn data -> + {:ok, assign(data, :normal_flow_executed, true)} + end) + + step(:handle_backorder, fn data -> + {:ok, assign(data, :backorder_handled, true)} + end) + + step(:final, fn data -> + {:ok, assign(data, :completed, true)} end) end end -defmodule ResumableParallelWorkflow do +defmodule ReturnsKeyWorkflow do use Durable use Durable.Helpers - workflow "resumable_parallel" do + workflow "returns_key" do step(:setup, fn data -> {:ok, assign(data, :initialized, true)} end) parallel do - step(:task_a, fn data -> - # This will be tracked to verify it doesn't re-run - current_runs = data[:task_a_runs] || 0 - data = assign(data, :task_a_runs, current_runs + 1) - {:ok, assign(data, :from_task_a, true)} + step(:fetch_order, [returns: :order_data], fn _data -> + {:ok, %{items: ["item1", "item2"]}} end) - step(:task_b, fn data -> - {:ok, assign(data, :from_task_b, true)} + step(:fetch_user, [returns: :user_data], fn _data -> + {:ok, %{name: "John", email: "john@example.com"}} end) end @@ -580,105 +547,96 @@ defmodule ResumableParallelWorkflow do end end -defmodule SingleStepParallelWorkflow do +defmodule FailFastParallelWorkflow do use Durable use Durable.Helpers - workflow "single_step_parallel" do + workflow "fail_fast_parallel" do step(:setup, fn data -> - {:ok, assign(data, :initialized, true)} + {:ok, data} end) - parallel do - step(:only_task, fn data -> - {:ok, assign(data, :from_only_task, true)} + parallel on_error: :fail_fast do + step(:good_task, fn data -> + Process.sleep(50) + {:ok, assign(data, :good, true)} + end) + + step(:bad_task, fn _data -> + {:error, %{type: "test_error", message: "intentional failure"}} end) end - step(:final, fn data -> - {:ok, assign(data, :completed, true)} + step(:never_reached, fn data -> + {:ok, assign(data, :reached, true)} end) end end -# Edge case test workflows - -defmodule ConflictingContextParallelWorkflow do +defmodule CompleteAllWithResultsWorkflow do use Durable use Durable.Helpers - workflow "conflicting_context_parallel" do + workflow "complete_all_with_results" do step(:setup, fn data -> - {:ok, assign(data, :initialized, true)} + {:ok, data} end) - parallel do - step(:task_a, fn data -> - Process.sleep(10) - {:ok, assign(data, :shared_key, "from_a")} + parallel on_error: :complete_all do + step(:good_task, fn data -> + {:ok, assign(data, :good, true)} end) - step(:task_b, fn data -> - Process.sleep(5) - {:ok, assign(data, :shared_key, "from_b")} + step(:bad_task, fn _data -> + {:error, %{type: "test_error", message: "intentional failure"}} end) end step(:final, fn data -> - {:ok, assign(data, :completed, true)} + {:ok, data} end) end end -defmodule ManyParallelStepsWorkflow do +defmodule TimingParallelWorkflow do use Durable use Durable.Helpers - workflow "many_parallel_steps" do + workflow "timing_parallel" do step(:setup, fn data -> - {:ok, assign(data, :initialized, true)} + {:ok, data} end) parallel do - step(:step_1, fn data -> {:ok, assign(data, :step_1, true)} end) - step(:step_2, fn data -> {:ok, assign(data, :step_2, true)} end) - step(:step_3, fn data -> {:ok, assign(data, :step_3, true)} end) - step(:step_4, fn data -> {:ok, assign(data, :step_4, true)} end) - step(:step_5, fn data -> {:ok, assign(data, :step_5, true)} end) - step(:step_6, fn data -> {:ok, assign(data, :step_6, true)} end) - step(:step_7, fn data -> {:ok, assign(data, :step_7, true)} end) - step(:step_8, fn data -> {:ok, assign(data, :step_8, true)} end) - step(:step_9, fn data -> {:ok, assign(data, :step_9, true)} end) - step(:step_10, fn data -> {:ok, assign(data, :step_10, true)} end) - step(:step_11, fn data -> {:ok, assign(data, :step_11, true)} end) - step(:step_12, fn data -> {:ok, assign(data, :step_12, true)} end) - step(:step_13, fn data -> {:ok, assign(data, :step_13, true)} end) - step(:step_14, fn data -> {:ok, assign(data, :step_14, true)} end) - step(:step_15, fn data -> {:ok, assign(data, :step_15, true)} end) + step(:slow_a, fn data -> + Process.sleep(50) + {:ok, assign(data, :a_done, true)} + end) + + step(:slow_b, fn data -> + Process.sleep(50) + {:ok, assign(data, :b_done, true)} + end) end - step(:final, fn data -> - {:ok, assign(data, :completed, true)} + step(:done, fn data -> + {:ok, data} end) end end -defmodule CollectMergeParallelWorkflow do +defmodule SingleStepParallelWorkflow do use Durable use Durable.Helpers - workflow "collect_merge_parallel" do + workflow "single_step_parallel" do step(:setup, fn data -> {:ok, assign(data, :initialized, true)} end) - parallel merge: :collect do - step(:task_a, fn data -> - {:ok, assign(data, :unique_a, "value_a")} - end) - - step(:task_b, fn data -> - {:ok, assign(data, :unique_b, "value_b")} + parallel do + step(:only_task, fn data -> + {:ok, assign(data, :from_only_task, true)} end) end @@ -688,23 +646,31 @@ defmodule CollectMergeParallelWorkflow do end end -defmodule ErrorReturningParallelWorkflow do +defmodule ManyParallelStepsWorkflow do use Durable use Durable.Helpers - workflow "error_returning_parallel" do + workflow "many_parallel_steps" do step(:setup, fn data -> {:ok, assign(data, :initialized, true)} end) parallel do - step(:good_task, fn data -> - {:ok, assign(data, :good, true)} - end) - - step(:error_task, fn _data -> - {:error, "This step returns an error"} - end) + step(:step_1, fn data -> {:ok, assign(data, :step_1, true)} end) + step(:step_2, fn data -> {:ok, assign(data, :step_2, true)} end) + step(:step_3, fn data -> {:ok, assign(data, :step_3, true)} end) + step(:step_4, fn data -> {:ok, assign(data, :step_4, true)} end) + step(:step_5, fn data -> {:ok, assign(data, :step_5, true)} end) + step(:step_6, fn data -> {:ok, assign(data, :step_6, true)} end) + step(:step_7, fn data -> {:ok, assign(data, :step_7, true)} end) + step(:step_8, fn data -> {:ok, assign(data, :step_8, true)} end) + step(:step_9, fn data -> {:ok, assign(data, :step_9, true)} end) + step(:step_10, fn data -> {:ok, assign(data, :step_10, true)} end) + step(:step_11, fn data -> {:ok, assign(data, :step_11, true)} end) + step(:step_12, fn data -> {:ok, assign(data, :step_12, true)} end) + step(:step_13, fn data -> {:ok, assign(data, :step_13, true)} end) + step(:step_14, fn data -> {:ok, assign(data, :step_14, true)} end) + step(:step_15, fn data -> {:ok, assign(data, :step_15, true)} end) end step(:final, fn data -> @@ -713,22 +679,25 @@ defmodule ErrorReturningParallelWorkflow do end end -defmodule RaisingParallelWorkflow do +defmodule ResumableParallelWorkflow do use Durable use Durable.Helpers - workflow "raising_parallel" do + workflow "resumable_parallel" do step(:setup, fn data -> {:ok, assign(data, :initialized, true)} end) parallel do - step(:good_task, fn data -> - {:ok, assign(data, :good, true)} + step(:task_a, fn data -> + # This will be tracked to verify it doesn't re-run + current_runs = data[:task_a_runs] || 0 + data = assign(data, :task_a_runs, current_runs + 1) + {:ok, assign(data, :from_task_a, true)} end) - step(:raising_task, fn _data -> - raise "This step raises an exception" + step(:task_b, fn data -> + {:ok, assign(data, :from_task_b, true)} end) end