diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props index c052057a58..d8694b50be 100644 --- a/dotnet/Directory.Packages.props +++ b/dotnet/Directory.Packages.props @@ -118,14 +118,14 @@ - - - - + + + + - + diff --git a/dotnet/agent-framework-dotnet.slnx b/dotnet/agent-framework-dotnet.slnx index 9801ccc105..c4cdfca7fe 100644 --- a/dotnet/agent-framework-dotnet.slnx +++ b/dotnet/agent-framework-dotnet.slnx @@ -60,6 +60,25 @@ + + + + + + + + + + + + + + + + + + + @@ -506,4 +525,4 @@ - \ No newline at end of file + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/01_SequentialWorkflow.csproj b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/01_SequentialWorkflow.csproj new file mode 100644 index 0000000000..0c0e4f7fe0 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/01_SequentialWorkflow.csproj @@ -0,0 +1,42 @@ + + + net10.0 + v4 + Exe + enable + enable + + SingleAgent + SingleAgent + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/OrderCancelExecutors.cs b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/OrderCancelExecutors.cs new file mode 100644 index 0000000000..6d86bfe757 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/OrderCancelExecutors.cs @@ -0,0 +1,215 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace SequentialWorkflow; + +/// +/// Looks up an order by its ID and return an Order object. +/// +internal sealed class OrderLookup() : Executor("OrderLookup") +{ + public override async ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Magenta; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Activity] OrderLookup: Starting lookup for order '{message}'"); + Console.ResetColor(); + + // Simulate database lookup with delay + await Task.Delay(TimeSpan.FromMicroseconds(100), cancellationToken); + + Order order = new( + Id: message, + OrderDate: DateTime.UtcNow.AddDays(-1), + IsCancelled: false, + Customer: new Customer(Name: "Jerry", Email: "jerry@example.com")); + + Console.ForegroundColor = ConsoleColor.Magenta; + Console.WriteLine($"│ [Activity] OrderLookup: Found order '{message}' for customer '{order.Customer.Name}'"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return order; + } +} + +/// +/// Cancels an order. +/// +internal sealed class OrderCancel() : Executor("OrderCancel") +{ + public override async ValueTask HandleAsync( + Order message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Activity] OrderCancel: Starting cancellation for order '{message.Id}'"); + Console.ResetColor(); + + // Simulate a slow cancellation process (e.g., calling external payment system) + for (int i = 1; i <= 3; i++) + { + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.WriteLine("│ [Activity] OrderCancel: Processing..."); + Console.ResetColor(); + } + + Order cancelledOrder = message with { IsCancelled = true }; + + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine($"│ [Activity] OrderCancel: ✓ Order '{cancelledOrder.Id}' has been cancelled"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return cancelledOrder; + } +} + +/// +/// Sends a cancellation confirmation email to the customer. +/// +internal sealed class SendEmail() : Executor("SendEmail") +{ + public override ValueTask HandleAsync( + Order message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Activity] SendEmail: Sending email to '{message.Customer.Email}'..."); + Console.ResetColor(); + + string result = $"Cancellation email sent for order {message.Id} to {message.Customer.Email}."; + + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("│ [Activity] SendEmail: ✓ Email sent successfully!"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return ValueTask.FromResult(result); + } +} + +internal sealed record Order(string Id, DateTime OrderDate, bool IsCancelled, Customer Customer); + +internal sealed record Customer(string Name, string Email); + +/// +/// Represents a batch cancellation request with multiple order IDs and a reason. +/// This demonstrates using a complex typed object as workflow input. +/// +#pragma warning disable CA1812 // Instantiated via JSON deserialization at runtime +internal sealed record BatchCancelRequest(string[] OrderIds, string Reason, bool NotifyCustomers); +#pragma warning restore CA1812 + +/// +/// Represents the result of processing a batch cancellation. +/// +internal sealed record BatchCancelResult(int TotalOrders, int CancelledCount, string Reason); + +/// +/// Generates a status report for an order. +/// +internal sealed class StatusReport() : Executor("StatusReport") +{ + public override ValueTask HandleAsync( + Order message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Green; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Activity] StatusReport: Generating report for order '{message.Id}'"); + Console.ResetColor(); + + string status = message.IsCancelled ? "Cancelled" : "Active"; + string result = $"Order {message.Id} for {message.Customer.Name}: Status={status}, Date={message.OrderDate:yyyy-MM-dd}"; + + Console.ForegroundColor = ConsoleColor.Green; + Console.WriteLine($"│ [Activity] StatusReport: ✓ {result}"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return ValueTask.FromResult(result); + } +} + +/// +/// Processes a batch cancellation request. Accepts a complex object +/// as input, demonstrating how workflows can receive structured JSON input. +/// +internal sealed class BatchCancelProcessor() : Executor("BatchCancelProcessor") +{ + public override async ValueTask HandleAsync( + BatchCancelRequest message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Activity] BatchCancelProcessor: Processing {message.OrderIds.Length} orders"); + Console.WriteLine($"│ [Activity] BatchCancelProcessor: Reason: {message.Reason}"); + Console.WriteLine($"│ [Activity] BatchCancelProcessor: Notify customers: {message.NotifyCustomers}"); + Console.ResetColor(); + + // Simulate processing each order + int cancelledCount = 0; + foreach (string orderId in message.OrderIds) + { + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + cancelledCount++; + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.WriteLine($"│ [Activity] BatchCancelProcessor: ✓ Cancelled order '{orderId}'"); + Console.ResetColor(); + } + + BatchCancelResult result = new(message.OrderIds.Length, cancelledCount, message.Reason); + + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine($"│ [Activity] BatchCancelProcessor: ✓ Batch complete: {cancelledCount}/{message.OrderIds.Length} cancelled"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return result; + } +} + +/// +/// Generates a summary of the batch cancellation. +/// +internal sealed class BatchCancelSummary() : Executor("BatchCancelSummary") +{ + public override ValueTask HandleAsync( + BatchCancelResult message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine("│ [Activity] BatchCancelSummary: Generating summary"); + Console.ResetColor(); + + string result = $"Batch cancellation complete: {message.CancelledCount}/{message.TotalOrders} orders cancelled. Reason: {message.Reason}"; + + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine($"│ [Activity] BatchCancelSummary: ✓ {result}"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return ValueTask.FromResult(result); + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/Program.cs new file mode 100644 index 0000000000..20da58d1a1 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/Program.cs @@ -0,0 +1,52 @@ +// Copyright (c) Microsoft. All rights reserved. + +// This sample demonstrates three workflows that share executors. +// The CancelOrder workflow cancels an order and notifies the customer. +// The OrderStatus workflow looks up an order and generates a status report. +// The BatchCancelOrders workflow accepts a complex JSON input to cancel multiple orders. +// Both CancelOrder and OrderStatus reuse the same OrderLookup executor, demonstrating executor sharing. + +using Microsoft.Agents.AI.Hosting.AzureFunctions; +using Microsoft.Agents.AI.Workflows; +using Microsoft.Azure.Functions.Worker.Builder; +using Microsoft.Extensions.Hosting; +using SequentialWorkflow; + +// Define executors for all workflows +OrderLookup orderLookup = new(); +OrderCancel orderCancel = new(); +SendEmail sendEmail = new(); +StatusReport statusReport = new(); +BatchCancelProcessor batchCancelProcessor = new(); +BatchCancelSummary batchCancelSummary = new(); + +// Build the CancelOrder workflow: OrderLookup -> OrderCancel -> SendEmail +Workflow cancelOrder = new WorkflowBuilder(orderLookup) + .WithName("CancelOrder") + .WithDescription("Cancel an order and notify the customer") + .AddEdge(orderLookup, orderCancel) + .AddEdge(orderCancel, sendEmail) + .Build(); + +// Build the OrderStatus workflow: OrderLookup -> StatusReport +// This workflow shares the OrderLookup executor with the CancelOrder workflow. +Workflow orderStatus = new WorkflowBuilder(orderLookup) + .WithName("OrderStatus") + .WithDescription("Look up an order and generate a status report") + .AddEdge(orderLookup, statusReport) + .Build(); + +// Build the BatchCancelOrders workflow: BatchCancelProcessor -> BatchCancelSummary +// This workflow demonstrates using a complex JSON object as the workflow input. +Workflow batchCancelOrders = new WorkflowBuilder(batchCancelProcessor) + .WithName("BatchCancelOrders") + .WithDescription("Cancel multiple orders in a batch using a complex JSON input") + .AddEdge(batchCancelProcessor, batchCancelSummary) + .Build(); + +using IHost app = FunctionsApplication + .CreateBuilder(args) + .ConfigureFunctionsWebApplication() + .ConfigureDurableWorkflows(workflows => workflows.AddWorkflows(cancelOrder, orderStatus, batchCancelOrders)) + .Build(); +app.Run(); diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/README.md b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/README.md new file mode 100644 index 0000000000..384fd358a7 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/README.md @@ -0,0 +1,100 @@ +# Sequential Workflow Sample + +This sample demonstrates how to use the Microsoft Agent Framework to create an Azure Functions app that hosts durable workflows with sequential executor chains. It showcases two workflows that share a common executor, demonstrating executor reuse across workflows. + +## Key Concepts Demonstrated + +- Defining workflows with sequential executor chains using `WorkflowBuilder` +- Sharing executors across multiple workflows (the `OrderLookup` executor is used by both workflows) +- Registering workflows with the Function app using `ConfigureDurableWorkflows` +- Durable orchestration ensuring workflows survive process restarts and failures +- Starting workflows via HTTP requests +- Viewing workflow execution history and status in the Durable Task Scheduler (DTS) dashboard + +## Workflows + +This sample defines two workflows: + +1. **CancelOrder**: `OrderLookup` → `OrderCancel` → `SendEmail` — Looks up an order, cancels it, and sends a confirmation email. +2. **OrderStatus**: `OrderLookup` → `StatusReport` — Looks up an order and generates a status report. + +Both workflows share the `OrderLookup` executor, which is registered only once by the framework. + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for more information on how to configure the environment, including how to install and run common sample dependencies. + +## Running the Sample + +With the environment setup and function app running, you can test the sample by sending HTTP requests to the workflow endpoints. + +You can use the `demo.http` file to trigger the workflows, or a command line tool like `curl` as shown below: + +### Cancel an Order + +Bash (Linux/macOS/WSL): + +```bash +curl -X POST http://localhost:7071/api/workflows/CancelOrder/run \ + -H "Content-Type: text/plain" \ + -d "12345" +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Method Post ` + -Uri http://localhost:7071/api/workflows/CancelOrder/run ` + -ContentType text/plain ` + -Body "12345" +``` + +The response will confirm the workflow orchestration has started: + +```text +Workflow orchestration started for CancelOrder. Orchestration runId: abc123def456 +``` + +> **Tip:** You can provide a custom run ID by appending a `runId` query parameter: +> +> ```bash +> curl -X POST "http://localhost:7071/api/workflows/CancelOrder/run?runId=my-order-123" \ +> -H "Content-Type: text/plain" \ +> -d "12345" +> ``` +> +> If not provided, a unique run ID is auto-generated. + +In the function app logs, you will see the sequential execution of each executor: + +```text +│ [Activity] OrderLookup: Starting lookup for order '12345' +│ [Activity] OrderLookup: Found order '12345' for customer 'Jerry' +│ [Activity] OrderCancel: Starting cancellation for order '12345' +│ [Activity] OrderCancel: ✓ Order '12345' has been cancelled +│ [Activity] SendEmail: Sending email to 'jerry@example.com'... +│ [Activity] SendEmail: ✓ Email sent successfully! +``` + +### Get Order Status + +```bash +curl -X POST http://localhost:7071/api/workflows/OrderStatus/run \ + -H "Content-Type: text/plain" \ + -d "12345" +``` + +The `OrderStatus` workflow reuses the same `OrderLookup` executor and then generates a status report: + +```text +│ [Activity] OrderLookup: Starting lookup for order '12345' +│ [Activity] OrderLookup: Found order '12345' for customer 'Jerry' +│ [Activity] StatusReport: Generating report for order '12345' +│ [Activity] StatusReport: ✓ Order 12345 for Jerry: Status=Active, Date=2025-01-01 +``` + +### Viewing Workflows in the DTS Dashboard + +After running a workflow, you can navigate to the Durable Task Scheduler (DTS) dashboard to visualize the completed orchestration, inspect inputs/outputs for each step, and view execution history. + +If you are using the DTS emulator, the dashboard is available at `http://localhost:8082`. diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/demo.http b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/demo.http new file mode 100644 index 0000000000..8366216a6c --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/demo.http @@ -0,0 +1,26 @@ +# Default endpoint address for local testing +@authority=http://localhost:7071 + +### Cancel an order +POST {{authority}}/api/workflows/CancelOrder/run +Content-Type: text/plain + +12345 + +### Cancel an order with a custom run ID +POST {{authority}}/api/workflows/CancelOrder/run?runId=my-custom-id-123 +Content-Type: text/plain + +99999 + +### Get order status (shares OrderLookup executor with CancelOrder) +POST {{authority}}/api/workflows/OrderStatus/run +Content-Type: text/plain + +12345 + +### Batch cancel orders with a complex JSON input +POST {{authority}}/api/workflows/BatchCancelOrders/run +Content-Type: application/json + +{"orderIds": ["1001", "1002", "1003"], "reason": "Customer requested cancellation", "notifyCustomers": true} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/host.json b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/host.json new file mode 100644 index 0000000000..9384a0a583 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/01_SequentialWorkflow/host.json @@ -0,0 +1,20 @@ +{ + "version": "2.0", + "logging": { + "logLevel": { + "Microsoft.Agents.AI.DurableTask": "Information", + "Microsoft.Agents.AI.Hosting.AzureFunctions": "Information", + "DurableTask": "Information", + "Microsoft.DurableTask": "Information" + } + }, + "extensions": { + "durableTask": { + "hubName": "default", + "storageProvider": { + "type": "AzureManaged", + "connectionStringName": "DURABLE_TASK_SCHEDULER_CONNECTION_STRING" + } + } + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/02_ConcurrentWorkflow.csproj b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/02_ConcurrentWorkflow.csproj new file mode 100644 index 0000000000..0c0e4f7fe0 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/02_ConcurrentWorkflow.csproj @@ -0,0 +1,42 @@ + + + net10.0 + v4 + Exe + enable + enable + + SingleAgent + SingleAgent + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/ExpertExecutors.cs b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/ExpertExecutors.cs new file mode 100644 index 0000000000..40674126f6 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/ExpertExecutors.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace WorkflowConcurrency; + +/// +/// Parses and validates the incoming question before sending to AI agents. +/// +internal sealed class ParseQuestionExecutor() : Executor("ParseQuestion") +{ + public override ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Magenta; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine("│ [ParseQuestion] Preparing question for AI agents..."); + + string formattedQuestion = message.Trim(); + if (!formattedQuestion.EndsWith('?')) + { + formattedQuestion += "?"; + } + + Console.WriteLine($"│ [ParseQuestion] Question: \"{formattedQuestion}\""); + Console.WriteLine("│ [ParseQuestion] → Sending to Physicist and Chemist in PARALLEL..."); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return ValueTask.FromResult(formattedQuestion); + } +} + +/// +/// Aggregates responses from all AI agents into a comprehensive answer. +/// This is the Fan-in point where parallel results are collected. +/// +internal sealed class AggregatorExecutor() : Executor("Aggregator") +{ + public override ValueTask HandleAsync( + string[] message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Aggregator] 📋 Received {message.Length} AI agent responses"); + Console.WriteLine("│ [Aggregator] Combining into comprehensive answer..."); + Console.WriteLine("│ [Aggregator] ✓ Aggregation complete!"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + string aggregatedResult = "═══════════════════════════════════════════════════════════════\n" + + " AI EXPERT PANEL RESPONSES\n" + + "═══════════════════════════════════════════════════════════════\n\n"; + + for (int i = 0; i < message.Length; i++) + { + string expertLabel = i == 0 ? "⚛️ PHYSICIST" : "🧪 CHEMIST"; + aggregatedResult += $"{expertLabel}:\n{message[i]}\n\n"; + } + + aggregatedResult += "═══════════════════════════════════════════════════════════════\n" + + $"Summary: Received perspectives from {message.Length} AI experts.\n" + + "═══════════════════════════════════════════════════════════════"; + + return ValueTask.FromResult(aggregatedResult); + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/Program.cs new file mode 100644 index 0000000000..6532009d4b --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/Program.cs @@ -0,0 +1,45 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using Microsoft.Agents.AI; +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.Hosting.AzureFunctions; +using Microsoft.Agents.AI.Workflows; +using Microsoft.Azure.Functions.Worker.Builder; +using Microsoft.Extensions.Hosting; +using OpenAI.Chat; +using WorkflowConcurrency; + +string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); +string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT") + ?? throw new InvalidOperationException("AZURE_OPENAI_DEPLOYMENT is not set."); +string? azureOpenAiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY"); + +// Create Azure OpenAI client +AzureOpenAIClient openAiClient = !string.IsNullOrEmpty(azureOpenAiKey) + ? new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureOpenAiKey)) + : new AzureOpenAIClient(new Uri(endpoint), new AzureCliCredential()); +ChatClient chatClient = openAiClient.GetChatClient(deploymentName); + +// Define the 4 executors for the workflow +ParseQuestionExecutor parseQuestion = new(); +AIAgent physicist = chatClient.AsAIAgent("You are a physics expert. Be concise (2-3 sentences).", "Physicist"); +AIAgent chemist = chatClient.AsAIAgent("You are a chemistry expert. Be concise (2-3 sentences).", "Chemist"); +AggregatorExecutor aggregator = new(); + +// Build workflow: ParseQuestion -> [Physicist, Chemist] (parallel) -> Aggregator +Workflow workflow = new WorkflowBuilder(parseQuestion) + .WithName("ExpertReview") + .AddFanOutEdge(parseQuestion, [physicist, chemist]) + .AddFanInBarrierEdge([physicist, chemist], aggregator) + .Build(); + +using IHost app = FunctionsApplication + .CreateBuilder(args) + .ConfigureFunctionsWebApplication() + .ConfigureDurableWorkflows(workflows => workflows.AddWorkflows(workflow)) + .Build(); +app.Run(); diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/README.md b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/README.md new file mode 100644 index 0000000000..73230ff048 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/README.md @@ -0,0 +1,90 @@ +# Concurrent Workflow Sample + +This sample demonstrates how to use the Microsoft Agent Framework to create an Azure Functions app that orchestrates concurrent execution of multiple AI agents using the fan-out/fan-in pattern within a durable workflow. + +## Key Concepts Demonstrated + +- Defining workflows with fan-out/fan-in edges for parallel execution using `WorkflowBuilder` +- Mixing custom executors with AI agents in a single workflow +- Concurrent execution of multiple AI agents (physics and chemistry experts) +- Response aggregation from parallel branches into a unified result +- Durable orchestration with automatic checkpointing and resumption from failures +- Viewing workflow execution history and status in the Durable Task Scheduler (DTS) dashboard + +## Workflow + +This sample defines a single workflow: + +**ExpertReview**: `ParseQuestion` → [`Physicist`, `Chemist`] (parallel) → `Aggregator` + +1. **ParseQuestion** — A custom executor that validates and formats the incoming question. +2. **Physicist** and **Chemist** — AI agents that run concurrently, each providing an expert perspective. +3. **Aggregator** — A custom executor that combines the parallel responses into a comprehensive answer. + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for more information on how to configure the environment, including how to install and run common sample dependencies. + +This sample requires Azure OpenAI. Set the following environment variables: + +- `AZURE_OPENAI_ENDPOINT` — Your Azure OpenAI endpoint URL. +- `AZURE_OPENAI_DEPLOYMENT` — The name of your chat model deployment. +- `AZURE_OPENAI_KEY` (optional) — Your Azure OpenAI API key. If not set, Azure CLI credentials are used. + +## Running the Sample + +With the environment setup and function app running, you can test the sample by sending an HTTP request with a science question to the workflow endpoint. + +You can use the `demo.http` file to trigger the workflow, or a command line tool like `curl` as shown below: + +Bash (Linux/macOS/WSL): + +```bash +curl -X POST http://localhost:7071/api/workflows/ExpertReview/run \ + -H "Content-Type: text/plain" \ + -d "What is temperature?" +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Method Post ` + -Uri http://localhost:7071/api/workflows/ExpertReview/run ` + -ContentType text/plain ` + -Body "What is temperature?" +``` + +The response will confirm the workflow orchestration has started: + +```text +Workflow orchestration started for ExpertReview. Orchestration runId: abc123def456 +``` + +> **Tip:** You can provide a custom run ID by appending a `runId` query parameter: +> +> ```bash +> curl -X POST "http://localhost:7071/api/workflows/ExpertReview/run?runId=my-review-123" \ +> -H "Content-Type: text/plain" \ +> -d "What is temperature?" +> ``` +> +> If not provided, a unique run ID is auto-generated. + +In the function app logs, you will see the fan-out/fan-in execution pattern: + +```text +│ [ParseQuestion] Preparing question for AI agents... +│ [ParseQuestion] Question: "What is temperature?" +│ [ParseQuestion] → Sending to Physicist and Chemist in PARALLEL... +│ [Aggregator] 📋 Received 2 AI agent responses +│ [Aggregator] Combining into comprehensive answer... +│ [Aggregator] ✓ Aggregation complete! +``` + +The Physicist and Chemist AI agents execute concurrently, and the Aggregator combines their responses into a formatted expert panel result. + +### Viewing Workflows in the DTS Dashboard + +After running a workflow, you can navigate to the Durable Task Scheduler (DTS) dashboard to visualize the completed orchestration, inspect inputs/outputs for each step, and view execution history. + +If you are using the DTS emulator, the dashboard is available at `http://localhost:8082`. diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/demo.http b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/demo.http new file mode 100644 index 0000000000..1a9e563126 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/demo.http @@ -0,0 +1,14 @@ +# Default endpoint address for local testing +@authority=http://localhost:7071 + +### Prompt the agent +POST {{authority}}/api/workflows/ExpertReview/run +Content-Type: text/plain + +What is temperature? + +### Start with a custom run ID +POST {{authority}}/api/workflows/ExpertReview/run?runId=my-review-123 +Content-Type: text/plain + +What is gravity? diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/host.json b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/host.json new file mode 100644 index 0000000000..9384a0a583 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/02_ConcurrentWorkflow/host.json @@ -0,0 +1,20 @@ +{ + "version": "2.0", + "logging": { + "logLevel": { + "Microsoft.Agents.AI.DurableTask": "Information", + "Microsoft.Agents.AI.Hosting.AzureFunctions": "Information", + "DurableTask": "Information", + "Microsoft.DurableTask": "Information" + } + }, + "extensions": { + "durableTask": { + "hubName": "default", + "storageProvider": { + "type": "AzureManaged", + "connectionStringName": "DURABLE_TASK_SCHEDULER_CONNECTION_STRING" + } + } + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/03_WorkflowHITL.csproj b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/03_WorkflowHITL.csproj new file mode 100644 index 0000000000..c569deacd0 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/03_WorkflowHITL.csproj @@ -0,0 +1,43 @@ + + + net10.0 + v4 + Exe + enable + enable + + WorkflowHITLFunctions + WorkflowHITLFunctions + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/Executors.cs b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/Executors.cs new file mode 100644 index 0000000000..c299ee2cd5 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/Executors.cs @@ -0,0 +1,63 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace WorkflowHITLFunctions; + +/// Expense approval request passed to the RequestPort. +public record ApprovalRequest(string ExpenseId, decimal Amount, string EmployeeName); + +/// Approval response received from the RequestPort. +public record ApprovalResponse(bool Approved, string? Comments); + +/// Looks up expense details and creates an approval request. +internal sealed class CreateApprovalRequest() : Executor("RetrieveRequest") +{ + public override ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + // In a real scenario, this would look up expense details from a database + return new ValueTask(new ApprovalRequest(message, 1500.00m, "Jerry")); + } +} + +/// Prepares the approval request for finance review after manager approval. +internal sealed class PrepareFinanceReview() : Executor("PrepareFinanceReview") +{ + public override ValueTask HandleAsync( + ApprovalResponse message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + if (!message.Approved) + { + throw new InvalidOperationException("Cannot proceed to finance review — manager denied the expense."); + } + + // In a real scenario, this would retrieve the original expense details + return new ValueTask(new ApprovalRequest("EXP-2025-001", 1500.00m, "Jerry")); + } +} + +/// Processes the expense reimbursement based on the parallel approval responses. +internal sealed class ExpenseReimburse() : Executor("Reimburse") +{ + public override async ValueTask HandleAsync( + ApprovalResponse[] message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + // Check that all parallel approvals passed + ApprovalResponse? denied = Array.Find(message, r => !r.Approved); + if (denied is not null) + { + return $"Expense reimbursement denied. Comments: {denied.Comments}"; + } + + // Simulate payment processing + await Task.Delay(1000, cancellationToken); + return $"Expense reimbursed at {DateTime.UtcNow:O}"; + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/Program.cs new file mode 100644 index 0000000000..1aa1972e62 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/Program.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +// This sample demonstrates a Human-in-the-Loop (HITL) workflow hosted in Azure Functions. +// +// ┌──────────────────────┐ ┌────────────────┐ ┌─────────────────────┐ ┌────────────────────┐ +// │ CreateApprovalRequest│──►│ManagerApproval │──►│PrepareFinanceReview │──┬►│ BudgetApproval │──┐ +// └──────────────────────┘ │ (RequestPort) │ └─────────────────────┘ │ │ (RequestPort) │ │ +// └────────────────┘ │ └────────────────────┘ │ ┌─────────────────┐ +// │ ├─►│ExpenseReimburse │ +// │ ┌────────────────────┐ │ └─────────────────┘ +// └►│ComplianceApproval │──┘ +// │ (RequestPort) │ +// └────────────────────┘ +// +// The workflow pauses at three RequestPorts — one for the manager, then two in parallel for finance. +// After manager approval, BudgetApproval and ComplianceApproval run concurrently via fan-out/fan-in. +// The framework auto-generates three HTTP endpoints for each workflow: +// POST /api/workflows/{name}/run - Start the workflow +// GET /api/workflows/{name}/status/{id} - Check status and pending approvals +// POST /api/workflows/{name}/respond/{id} - Send approval response to resume + +using Microsoft.Agents.AI.Hosting.AzureFunctions; +using Microsoft.Agents.AI.Workflows; +using Microsoft.Azure.Functions.Worker.Builder; +using Microsoft.Extensions.Hosting; +using WorkflowHITLFunctions; + +// Define executors and RequestPorts for the three HITL pause points +CreateApprovalRequest createRequest = new(); +RequestPort managerApproval = RequestPort.Create("ManagerApproval"); +PrepareFinanceReview prepareFinanceReview = new(); +RequestPort budgetApproval = RequestPort.Create("BudgetApproval"); +RequestPort complianceApproval = RequestPort.Create("ComplianceApproval"); +ExpenseReimburse reimburse = new(); + +// Build the workflow: CreateApprovalRequest -> ManagerApproval -> PrepareFinanceReview -> [BudgetApproval AND ComplianceApproval] -> ExpenseReimburse +Workflow expenseApproval = new WorkflowBuilder(createRequest) + .WithName("ExpenseReimbursement") + .WithDescription("Expense reimbursement with manager and parallel finance approvals") + .AddEdge(createRequest, managerApproval) + .AddEdge(managerApproval, prepareFinanceReview) + .AddFanOutEdge(prepareFinanceReview, [budgetApproval, complianceApproval]) + .AddFanInBarrierEdge([budgetApproval, complianceApproval], reimburse) + .Build(); + +using IHost app = FunctionsApplication + .CreateBuilder(args) + .ConfigureFunctionsWebApplication() + .ConfigureDurableWorkflows(workflows => workflows.AddWorkflow(expenseApproval, exposeStatusEndpoint: true)) + .Build(); +app.Run(); diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/README.md b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/README.md new file mode 100644 index 0000000000..27322b7b6a --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/README.md @@ -0,0 +1,266 @@ +# Human-in-the-Loop (HITL) Workflow — Azure Functions + +This sample demonstrates a durable workflow with Human-in-the-Loop support hosted in Azure Functions. The workflow pauses at three `RequestPort` nodes — one sequential manager approval, then two parallel finance approvals (budget and compliance) via fan-out/fan-in. Approval responses are sent via HTTP endpoints. + +## Key Concepts Demonstrated + +- Using multiple `RequestPort` nodes for sequential and parallel human-in-the-loop interactions in a durable workflow +- Fan-out/fan-in pattern for parallel approval steps +- Auto-generated HTTP endpoints for running workflows, checking status, and sending HITL responses +- Pausing orchestrations via `WaitForExternalEvent` and resuming via `RaiseEventAsync` +- Viewing inputs the workflow is waiting for via the status endpoint + +## Workflow + +This sample implements the following workflow: + +``` +┌──────────────────────┐ ┌────────────────┐ ┌─────────────────────┐ ┌────────────────────┐ +│ CreateApprovalRequest│──►│ManagerApproval │──►│PrepareFinanceReview │──┬►│ BudgetApproval │──┐ +└──────────────────────┘ │ (RequestPort) │ └─────────────────────┘ │ │ (RequestPort) │ │ + └────────────────┘ │ └────────────────────┘ │ ┌─────────────────┐ + │ ├─►│ExpenseReimburse │ + │ ┌────────────────────┐ │ └─────────────────┘ + └►│ComplianceApproval │──┘ + │ (RequestPort) │ + └────────────────────┘ +``` + +## HTTP Endpoints + +The framework auto-generates these endpoints for workflows with `RequestPort` nodes: + +| Method | Endpoint | Description | +|--------|----------|-------------| +| POST | `/api/workflows/ExpenseReimbursement/run` | Start the workflow | +| GET | `/api/workflows/ExpenseReimbursement/status/{runId}` | Check status and inputs the workflow is waiting for | +| POST | `/api/workflows/ExpenseReimbursement/respond/{runId}` | Send approval response to resume | + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for information on how to configure the environment, including how to install and run the Durable Task Scheduler. + +## Running the Sample + +With the environment setup and function app running, you can test the sample by sending HTTP requests to the workflow endpoints. + +You can use the `demo.http` file to trigger the workflow, or a command line tool like `curl` as shown below: + +### Step 1: Start the Workflow + +Bash (Linux/macOS/WSL): + +```bash +curl -X POST http://localhost:7071/api/workflows/ExpenseReimbursement/run \ + -H "Content-Type: text/plain" -d "EXP-2025-001" +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Method Post ` + -Uri http://localhost:7071/api/workflows/ExpenseReimbursement/run ` + -ContentType text/plain ` + -Body "EXP-2025-001" +``` + +The response will confirm the workflow orchestration has started: + +```text +Workflow orchestration started for ExpenseReimbursement. Orchestration runId: abc123def456 +``` + +> [!TIP] +> You can provide a custom run ID by appending a `runId` query parameter: +> +> Bash (Linux/macOS/WSL): +> +> ```bash +> curl -X POST "http://localhost:7071/api/workflows/ExpenseReimbursement/run?runId=expense-001" \ +> -H "Content-Type: text/plain" -d "EXP-2025-001" +> ``` +> +> PowerShell: +> +> ```powershell +> Invoke-RestMethod -Method Post ` +> -Uri "http://localhost:7071/api/workflows/ExpenseReimbursement/run?runId=expense-001" ` +> -ContentType text/plain ` +> -Body "EXP-2025-001" +> ``` +> +> If not provided, a unique run ID is auto-generated. + +### Step 2: Check Workflow Status + +The workflow pauses at the `ManagerApproval` RequestPort. Query the status endpoint to see what input it is waiting for: + +Bash (Linux/macOS/WSL): + +```bash +curl http://localhost:7071/api/workflows/ExpenseReimbursement/status/{runId} +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Uri http://localhost:7071/api/workflows/ExpenseReimbursement/status/{runId} +``` + +```json +{ + "runId": "{runId}", + "status": "Running", + "waitingForInput": [ + { "eventName": "ManagerApproval", "input": { "ExpenseId": "EXP-2025-001", "Amount": 1500.00, "EmployeeName": "Jerry" } } + ] +} +``` + +> [!TIP] +> You can also verify this in the DTS dashboard at `http://localhost:8082`. Find the orchestration by its `runId` and you will see it is in a "Running" state, paused at a `WaitForExternalEvent` call for the `ManagerApproval` event. + +### Step 3: Send Manager Approval Response + +Bash (Linux/macOS/WSL): + +```bash +curl -X POST http://localhost:7071/api/workflows/ExpenseReimbursement/respond/{runId} \ + -H "Content-Type: application/json" \ + -d '{"eventName": "ManagerApproval", "response": {"Approved": true, "Comments": "Approved by manager."}}' +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Method Post ` + -Uri http://localhost:7071/api/workflows/ExpenseReimbursement/respond/{runId} ` + -ContentType application/json ` + -Body '{"eventName": "ManagerApproval", "response": {"Approved": true, "Comments": "Approved by manager."}}' +``` + +```json +{ + "message": "Response sent to workflow.", + "runId": "{runId}", + "eventName": "ManagerApproval", + "validated": true +} +``` + +### Step 4: Check Workflow Status Again + +The workflow now pauses at both the `BudgetApproval` and `ComplianceApproval` RequestPorts in parallel: + +Bash (Linux/macOS/WSL): + +```bash +curl http://localhost:7071/api/workflows/ExpenseReimbursement/status/{runId} +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Uri http://localhost:7071/api/workflows/ExpenseReimbursement/status/{runId} +``` + +```json +{ + "runId": "{runId}", + "status": "Running", + "waitingForInput": [ + { "eventName": "BudgetApproval", "input": { "ExpenseId": "EXP-2025-001", "Amount": 1500.00, "EmployeeName": "Jerry" } }, + { "eventName": "ComplianceApproval", "input": { "ExpenseId": "EXP-2025-001", "Amount": 1500.00, "EmployeeName": "Jerry" } } + ] +} +``` + +### Step 5a: Send Budget Approval Response + +Bash (Linux/macOS/WSL): + +```bash +curl -X POST http://localhost:7071/api/workflows/ExpenseReimbursement/respond/{runId} \ + -H "Content-Type: application/json" \ + -d '{"eventName": "BudgetApproval", "response": {"Approved": true, "Comments": "Budget approved."}}' +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Method Post ` + -Uri http://localhost:7071/api/workflows/ExpenseReimbursement/respond/{runId} ` + -ContentType application/json ` + -Body '{"eventName": "BudgetApproval", "response": {"Approved": true, "Comments": "Budget approved."}}' +``` + +```json +{ + "message": "Response sent to workflow.", + "runId": "{runId}", + "eventName": "BudgetApproval", + "validated": true +} +``` + +### Step 5b: Send Compliance Approval Response + +Bash (Linux/macOS/WSL): + +```bash +curl -X POST http://localhost:7071/api/workflows/ExpenseReimbursement/respond/{runId} \ + -H "Content-Type: application/json" \ + -d '{"eventName": "ComplianceApproval", "response": {"Approved": true, "Comments": "Compliance approved."}}' +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Method Post ` + -Uri http://localhost:7071/api/workflows/ExpenseReimbursement/respond/{runId} ` + -ContentType application/json ` + -Body '{"eventName": "ComplianceApproval", "response": {"Approved": true, "Comments": "Compliance approved."}}' +``` + +```json +{ + "message": "Response sent to workflow.", + "runId": "{runId}", + "eventName": "ComplianceApproval", + "validated": true +} +``` + +### Step 6: Check Final Status + +After all approvals, the workflow completes and the expense is reimbursed: + +Bash (Linux/macOS/WSL): + +```bash +curl http://localhost:7071/api/workflows/ExpenseReimbursement/status/{runId} +``` + +PowerShell: + +```powershell +Invoke-RestMethod -Uri http://localhost:7071/api/workflows/ExpenseReimbursement/status/{runId} +``` + +```json +{ + "runId": "{runId}", + "status": "Completed", + "waitingForInput": null +} +``` + +### Viewing Workflows in the DTS Dashboard + +After running a workflow, you can navigate to the Durable Task Scheduler (DTS) dashboard to visualize the orchestration and inspect its execution history. + +If you are using the DTS emulator, the dashboard is available at `http://localhost:8082`. + +1. Open the dashboard and look for the orchestration instance matching the `runId` returned in Step 1 (e.g., `abc123def456` or your custom ID like `expense-001`). +2. Click into the instance to see the execution timeline, which shows each executor activity and the `WaitForExternalEvent` pauses where the workflow waited for human input — including the two parallel finance approvals. +3. Expand individual activity steps to inspect inputs and outputs — for example, the `ManagerApproval`, `BudgetApproval`, and `ComplianceApproval` external events will show the approval request sent and the response received. diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/demo.http b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/demo.http new file mode 100644 index 0000000000..5e2993ac1c --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/demo.http @@ -0,0 +1,53 @@ +# Default endpoint address for local testing +@authority=http://localhost:7071 + +### Step 1: Start the expense reimbursement workflow +POST {{authority}}/api/workflows/ExpenseReimbursement/run +Content-Type: text/plain + +EXP-2025-001 + +### Step 1 (alternative): Start the workflow with a custom run ID +POST {{authority}}/api/workflows/ExpenseReimbursement/run?runId=expense-001 +Content-Type: text/plain + +EXP-2025-001 + +### Step 2: Check workflow status (replace {runId} with actual run ID from Step 1) +GET {{authority}}/api/workflows/ExpenseReimbursement/status/{runId} + +### Step 3: Send manager approval (replace {runId} with actual run ID from Step 1) +POST {{authority}}/api/workflows/ExpenseReimbursement/respond/{runId} +Content-Type: application/json + +{"eventName": "ManagerApproval", "response": {"Approved": true, "Comments": "Approved by manager."}} + +### Step 3 (alternative): Deny the expense at manager level +POST {{authority}}/api/workflows/ExpenseReimbursement/respond/{runId} +Content-Type: application/json + +{"eventName": "ManagerApproval", "response": {"Approved": false, "Comments": "Insufficient documentation. Please resubmit."}} + +### Step 4: Check workflow status after manager approval (now waiting for parallel finance approvals) +GET {{authority}}/api/workflows/ExpenseReimbursement/status/{runId} + +### Step 5a: Send budget approval (replace {runId} with actual run ID from Step 1) +POST {{authority}}/api/workflows/ExpenseReimbursement/respond/{runId} +Content-Type: application/json + +{"eventName": "BudgetApproval", "response": {"Approved": true, "Comments": "Budget approved."}} + +### Step 5b: Send compliance approval (replace {runId} with actual run ID from Step 1) +POST {{authority}}/api/workflows/ExpenseReimbursement/respond/{runId} +Content-Type: application/json + +{"eventName": "ComplianceApproval", "response": {"Approved": true, "Comments": "Compliance approved."}} + +### Step 5b (alternative): Deny the expense at compliance level +POST {{authority}}/api/workflows/ExpenseReimbursement/respond/{runId} +Content-Type: application/json + +{"eventName": "ComplianceApproval", "response": {"Approved": false, "Comments": "Compliance requirements not met."}} + +### Step 6: Check final workflow status after all approvals +GET {{authority}}/api/workflows/ExpenseReimbursement/status/{runId} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/host.json b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/host.json new file mode 100644 index 0000000000..9384a0a583 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/AzureFunctions/03_WorkflowHITL/host.json @@ -0,0 +1,20 @@ +{ + "version": "2.0", + "logging": { + "logLevel": { + "Microsoft.Agents.AI.DurableTask": "Information", + "Microsoft.Agents.AI.Hosting.AzureFunctions": "Information", + "DurableTask": "Information", + "Microsoft.DurableTask": "Information" + } + }, + "extensions": { + "durableTask": { + "hubName": "default", + "storageProvider": { + "type": "AzureManaged", + "connectionStringName": "DURABLE_TASK_SCHEDULER_CONNECTION_STRING" + } + } + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/01_SequentialWorkflow.csproj b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/01_SequentialWorkflow.csproj new file mode 100644 index 0000000000..8a5308a6f5 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/01_SequentialWorkflow.csproj @@ -0,0 +1,29 @@ + + + net10.0 + Exe + enable + enable + SequentialWorkflow + SequentialWorkflow + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/OrderCancelExecutors.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/OrderCancelExecutors.cs new file mode 100644 index 0000000000..474cb8bcaa --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/OrderCancelExecutors.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace SequentialWorkflow; + +/// +/// Represents a request to cancel an order. +/// +/// The ID of the order to cancel. +/// The reason for cancellation. +internal sealed record OrderCancelRequest(string OrderId, string Reason); + +/// +/// Looks up an order by its ID and return an Order object. +/// +internal sealed class OrderLookup() : Executor("OrderLookup") +{ + public override async ValueTask HandleAsync( + OrderCancelRequest message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Magenta; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Activity] OrderLookup: Starting lookup for order '{message.OrderId}'"); + Console.WriteLine($"│ [Activity] OrderLookup: Cancellation reason: '{message.Reason}'"); + Console.ResetColor(); + + // Simulate database lookup with delay + await Task.Delay(TimeSpan.FromMicroseconds(100), cancellationToken); + + Order order = new( + Id: message.OrderId, + OrderDate: DateTime.UtcNow.AddDays(-1), + IsCancelled: false, + CancelReason: message.Reason, + Customer: new Customer(Name: "Jerry", Email: "jerry@example.com")); + + Console.ForegroundColor = ConsoleColor.Magenta; + Console.WriteLine($"│ [Activity] OrderLookup: Found order '{message.OrderId}' for customer '{order.Customer.Name}'"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return order; + } +} + +/// +/// Cancels an order. +/// +internal sealed class OrderCancel() : Executor("OrderCancel") +{ + public override async ValueTask HandleAsync( + Order message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + // Log that this activity is executing (not replaying) + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Activity] OrderCancel: Starting cancellation for order '{message.Id}'"); + Console.ResetColor(); + + // Simulate a slow cancellation process (e.g., calling external payment system) + for (int i = 1; i <= 3; i++) + { + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.WriteLine("│ [Activity] OrderCancel: Processing..."); + Console.ResetColor(); + } + + Order cancelledOrder = message with { IsCancelled = true }; + + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine($"│ [Activity] OrderCancel: ✓ Order '{cancelledOrder.Id}' has been cancelled"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return cancelledOrder; + } +} + +/// +/// Sends a cancellation confirmation email to the customer. +/// +internal sealed class SendEmail() : Executor("SendEmail") +{ + public override ValueTask HandleAsync( + Order message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Activity] SendEmail: Sending email to '{message.Customer.Email}'..."); + Console.ResetColor(); + + string result = $"Cancellation email sent for order {message.Id} to {message.Customer.Email}."; + + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("│ [Activity] SendEmail: ✓ Email sent successfully!"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return ValueTask.FromResult(result); + } +} + +internal sealed record Order(string Id, DateTime OrderDate, bool IsCancelled, string? CancelReason, Customer Customer); + +internal sealed record Customer(string Name, string Email); diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/Program.cs new file mode 100644 index 0000000000..03e4ed5928 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/Program.cs @@ -0,0 +1,93 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.DurableTask.Worker.AzureManaged; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using SequentialWorkflow; + +// Get DTS connection string from environment variable +string dtsConnectionString = Environment.GetEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING") + ?? "Endpoint=http://localhost:8080;TaskHub=default;Authentication=None"; + +// Define executors for the workflow +OrderLookup orderLookup = new(); +OrderCancel orderCancel = new(); +SendEmail sendEmail = new(); + +// Build the CancelOrder workflow: OrderLookup -> OrderCancel -> SendEmail +Workflow cancelOrder = new WorkflowBuilder(orderLookup) + .WithName("CancelOrder") + .WithDescription("Cancel an order and notify the customer") + .AddEdge(orderLookup, orderCancel) + .AddEdge(orderCancel, sendEmail) + .Build(); + +IHost host = Host.CreateDefaultBuilder(args) +.ConfigureLogging(logging => logging.SetMinimumLevel(LogLevel.Warning)) +.ConfigureServices(services => +{ + services.ConfigureDurableWorkflows( + workflowOptions => workflowOptions.AddWorkflow(cancelOrder), + workerBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString), + clientBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString)); +}) +.Build(); + +await host.StartAsync(); + +IWorkflowClient workflowClient = host.Services.GetRequiredService(); + +Console.WriteLine("Durable Workflow Sample"); +Console.WriteLine("Workflow: OrderLookup -> OrderCancel -> SendEmail"); +Console.WriteLine(); +Console.WriteLine("Enter an order ID (or 'exit'):"); + +while (true) +{ + Console.Write("> "); + string? input = Console.ReadLine(); + if (string.IsNullOrWhiteSpace(input) || input.Equals("exit", StringComparison.OrdinalIgnoreCase)) + { + break; + } + + try + { + OrderCancelRequest request = new(OrderId: input, Reason: "Customer requested cancellation"); + await StartNewWorkflowAsync(request, cancelOrder, workflowClient); + } + catch (Exception ex) + { + Console.WriteLine($"Error: {ex.Message}"); + } + + Console.WriteLine(); +} + +await host.StopAsync(); + +// Start a new workflow using IWorkflowClient with typed input +static async Task StartNewWorkflowAsync(OrderCancelRequest request, Workflow workflow, IWorkflowClient client) +{ + Console.WriteLine($"Starting workflow for order '{request.OrderId}' (Reason: {request.Reason})..."); + + // RunAsync returns IWorkflowRun, cast to IAwaitableWorkflowRun for completion waiting + IAwaitableWorkflowRun run = (IAwaitableWorkflowRun)await client.RunAsync(workflow, request); + Console.WriteLine($"Run ID: {run.RunId}"); + + try + { + Console.WriteLine("Waiting for workflow to complete..."); + string? result = await run.WaitForCompletionAsync(); + Console.WriteLine($"Workflow completed. {result}"); + } + catch (InvalidOperationException ex) + { + Console.WriteLine($"Failed: {ex.Message}"); + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/README.md b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/README.md new file mode 100644 index 0000000000..ac5a3e43f5 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow/README.md @@ -0,0 +1,83 @@ +# Sequential Workflow Sample + +This sample demonstrates how to run a sequential workflow as a durable orchestration from a console application using the Durable Task Framework. It showcases the **durability** aspect - if the process crashes mid-execution, the workflow automatically resumes without re-executing completed activities. + +## Key Concepts Demonstrated + +- Building a sequential workflow with the `WorkflowBuilder` API +- Using `ConfigureDurableWorkflows` to register workflows with dependency injection +- Running workflows with `IWorkflowClient` +- **Durability**: Automatic resume of interrupted workflows +- **Activity caching**: Completed activities are not re-executed on replay + +## Overview + +The sample implements an order cancellation workflow with three executors: + +``` +OrderLookup --> OrderCancel --> SendEmail +``` + +| Executor | Description | +|----------|-------------| +| OrderLookup | Looks up an order by ID | +| OrderCancel | Marks the order as cancelled | +| SendEmail | Sends a cancellation confirmation email | + +## Durability Demonstration + +The key feature of Durable Task Framework is **durability**: + +- **Activity results are persisted**: When an activity completes, its result is saved +- **Orchestrations replay**: On restart, the orchestration replays from the beginning +- **Completed activities skip execution**: The framework uses cached results +- **Automatic resume**: The worker automatically picks up pending work on startup + +### Try It Yourself + +> **Tip:** To give yourself more time to stop the application during `OrderCancel`, consider increasing the loop iteration count or `Task.Delay` duration in the `OrderCancel` executor in `OrderCancelExecutors.cs`. + +1. Start the application and enter an order ID (e.g., `12345`) +2. Wait for `OrderLookup` to complete, then stop the app (Ctrl+C) during `OrderCancel` +3. Restart the application +4. Observe: + - `OrderLookup` is **NOT** re-executed (result was cached) + - `OrderCancel` **restarts** (it didn't complete before the interruption) + - `SendEmail` runs after `OrderCancel` completes + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for information on configuring the environment, including how to install and run the Durable Task Scheduler. + +## Running the Sample + +```bash +cd dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/01_SequentialWorkflow +dotnet run --framework net10.0 +``` + +### Sample Output + +```text +Durable Workflow Sample +Workflow: OrderLookup -> OrderCancel -> SendEmail + +Enter an order ID (or 'exit'): +> 12345 +Starting workflow for order: 12345 +Run ID: abc123... + +[OrderLookup] Looking up order '12345'... +[OrderLookup] Found order for customer 'Jerry' + +[OrderCancel] Cancelling order '12345'... +[OrderCancel] Order cancelled successfully + +[SendEmail] Sending email to 'jerry@example.com'... +[SendEmail] Email sent successfully + +Workflow completed! + +> exit +``` + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/02_ConcurrentWorkflow.csproj b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/02_ConcurrentWorkflow.csproj new file mode 100644 index 0000000000..a05822a286 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/02_ConcurrentWorkflow.csproj @@ -0,0 +1,30 @@ + + + net10.0 + Exe + enable + enable + WorkflowConcurrency + WorkflowConcurrency + + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/ExpertExecutors.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/ExpertExecutors.cs new file mode 100644 index 0000000000..40674126f6 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/ExpertExecutors.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace WorkflowConcurrency; + +/// +/// Parses and validates the incoming question before sending to AI agents. +/// +internal sealed class ParseQuestionExecutor() : Executor("ParseQuestion") +{ + public override ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Magenta; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine("│ [ParseQuestion] Preparing question for AI agents..."); + + string formattedQuestion = message.Trim(); + if (!formattedQuestion.EndsWith('?')) + { + formattedQuestion += "?"; + } + + Console.WriteLine($"│ [ParseQuestion] Question: \"{formattedQuestion}\""); + Console.WriteLine("│ [ParseQuestion] → Sending to Physicist and Chemist in PARALLEL..."); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return ValueTask.FromResult(formattedQuestion); + } +} + +/// +/// Aggregates responses from all AI agents into a comprehensive answer. +/// This is the Fan-in point where parallel results are collected. +/// +internal sealed class AggregatorExecutor() : Executor("Aggregator") +{ + public override ValueTask HandleAsync( + string[] message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Aggregator] 📋 Received {message.Length} AI agent responses"); + Console.WriteLine("│ [Aggregator] Combining into comprehensive answer..."); + Console.WriteLine("│ [Aggregator] ✓ Aggregation complete!"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + string aggregatedResult = "═══════════════════════════════════════════════════════════════\n" + + " AI EXPERT PANEL RESPONSES\n" + + "═══════════════════════════════════════════════════════════════\n\n"; + + for (int i = 0; i < message.Length; i++) + { + string expertLabel = i == 0 ? "⚛️ PHYSICIST" : "🧪 CHEMIST"; + aggregatedResult += $"{expertLabel}:\n{message[i]}\n\n"; + } + + aggregatedResult += "═══════════════════════════════════════════════════════════════\n" + + $"Summary: Received perspectives from {message.Length} AI experts.\n" + + "═══════════════════════════════════════════════════════════════"; + + return ValueTask.FromResult(aggregatedResult); + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/Program.cs new file mode 100644 index 0000000000..ae68a56562 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/Program.cs @@ -0,0 +1,114 @@ +// Copyright (c) Microsoft. All rights reserved. + +// This sample demonstrates the Fan-out/Fan-in pattern in a durable workflow. +// The workflow uses 4 executors: 2 class-based executors and 2 AI agents. +// +// WORKFLOW PATTERN: +// +// ParseQuestion (class-based) +// | +// +----------+----------+ +// | | +// Physicist Chemist +// (AI Agent) (AI Agent) +// | | +// +----------+----------+ +// | +// Aggregator (class-based) + +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using Microsoft.Agents.AI; +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.DurableTask.Worker.AzureManaged; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using OpenAI.Chat; +using WorkflowConcurrency; + +// Configuration +string dtsConnectionString = Environment.GetEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING") + ?? "Endpoint=http://localhost:8080;TaskHub=default;Authentication=None"; +string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); +string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT") + ?? throw new InvalidOperationException("AZURE_OPENAI_DEPLOYMENT is not set."); +string? azureOpenAiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY"); + +// Create Azure OpenAI client +AzureOpenAIClient openAiClient = !string.IsNullOrEmpty(azureOpenAiKey) + ? new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureOpenAiKey)) + : new AzureOpenAIClient(new Uri(endpoint), new AzureCliCredential()); +ChatClient chatClient = openAiClient.GetChatClient(deploymentName); + +// Define the 4 executors for the workflow +ParseQuestionExecutor parseQuestion = new(); +AIAgent physicist = chatClient.AsAIAgent("You are a physics expert. Be concise (2-3 sentences).", "Physicist"); +AIAgent chemist = chatClient.AsAIAgent("You are a chemistry expert. Be concise (2-3 sentences).", "Chemist"); +AggregatorExecutor aggregator = new(); + +// Build workflow: ParseQuestion -> [Physicist, Chemist] (parallel) -> Aggregator +Workflow workflow = new WorkflowBuilder(parseQuestion) + .WithName("ExpertReview") + .AddFanOutEdge(parseQuestion, [physicist, chemist]) + .AddFanInBarrierEdge([physicist, chemist], aggregator) + .Build(); + +// Configure and start the host +IHost host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => logging.SetMinimumLevel(LogLevel.Warning)) + .ConfigureServices(services => + { + services.ConfigureDurableOptions( + options => options.Workflows.AddWorkflow(workflow), + workerBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString), + clientBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString)); + }) + .Build(); + +await host.StartAsync(); + +IWorkflowClient workflowClient = host.Services.GetRequiredService(); + +Console.WriteLine("Fan-out/Fan-in Workflow Sample"); +Console.WriteLine("ParseQuestion -> [Physicist, Chemist] -> Aggregator"); +Console.WriteLine(); +Console.WriteLine("Enter a science question (or 'exit' to quit):"); + +while (true) +{ + Console.Write("> "); + string? input = Console.ReadLine(); + + if (string.IsNullOrWhiteSpace(input) || input.Equals("exit", StringComparison.OrdinalIgnoreCase)) + { + break; + } + + try + { + IWorkflowRun run = await workflowClient.RunAsync(workflow, input); + Console.WriteLine($"Run ID: {run.RunId}"); + + if (run is IAwaitableWorkflowRun awaitableRun) + { + string? result = await awaitableRun.WaitForCompletionAsync(); + + Console.WriteLine("Workflow completed!"); + Console.WriteLine(result); + } + } + catch (Exception ex) + { + Console.WriteLine($"Error: {ex.Message}"); + } + + Console.WriteLine(); +} + +await host.StopAsync(); diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/README.md b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/README.md new file mode 100644 index 0000000000..4887a77ccc --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow/README.md @@ -0,0 +1,100 @@ +# Concurrent Workflow Sample (Fan-Out/Fan-In) + +This sample demonstrates the **fan-out/fan-in** pattern in a durable workflow, combining class-based executors with AI agents running in parallel. + +## Key Concepts Demonstrated + +- **Fan-out/Fan-in pattern**: Parallel execution with result aggregation +- **Mixed executor types**: Class-based executors and AI agents in the same workflow +- **AI agents as executors**: Using `ChatClient.AsAIAgent()` to create workflow-compatible agents +- **Workflow registration**: Auto-registration of agents used within workflows +- **Standalone agents**: Registering agents outside of workflows + +## Overview + +The sample implements an expert review workflow with four executors: + +``` + ParseQuestion + | + +----------+----------+ + | | + Physicist Chemist + (AI Agent) (AI Agent) + | | + +----------+----------+ + | + Aggregator +``` + +| Executor | Type | Description | +|----------|------|-------------| +| ParseQuestion | Class-based | Parses the user's question for expert review | +| Physicist | AI Agent | Provides physics perspective (runs in parallel) | +| Chemist | AI Agent | Provides chemistry perspective (runs in parallel) | +| Aggregator | Class-based | Combines expert responses into a final answer | + +## Fan-Out/Fan-In Pattern + +The workflow demonstrates the fan-out/fan-in pattern: + +1. **Fan-out**: `ParseQuestion` sends the question to both `Physicist` and `Chemist` simultaneously +2. **Parallel execution**: Both AI agents process the question concurrently +3. **Fan-in**: `Aggregator` waits for both agents to complete, then combines their responses + +This pattern is useful for: +- Gathering multiple perspectives on a problem +- Parallel processing of independent tasks +- Reducing overall execution time through concurrency + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for information on configuring the environment. + +### Required Environment Variables + +```bash +# Durable Task Scheduler (optional, defaults to localhost) +DURABLE_TASK_SCHEDULER_CONNECTION_STRING="Endpoint=http://localhost:8080;TaskHub=default;Authentication=None" + +# Azure OpenAI (required) +AZURE_OPENAI_ENDPOINT="https://your-resource.openai.azure.com/" +AZURE_OPENAI_DEPLOYMENT="gpt-4o" +AZURE_OPENAI_KEY="your-key" # Optional if using Azure CLI credentials +``` + +## Running the Sample + +```bash +cd dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/02_ConcurrentWorkflow +dotnet run --framework net10.0 +``` + +### Sample Output + +```text ++-----------------------------------------------------------------------+ +| Fan-out/Fan-in Workflow Sample (4 Executors) | +| | +| ParseQuestion -> [Physicist, Chemist] -> Aggregator | +| (class-based) (AI agents, parallel) (class-based) | ++-----------------------------------------------------------------------+ + +Enter a science question (or 'exit' to quit): + +Question: Why is the sky blue? +Instance: abc123... + +[ParseQuestion] Parsing question for expert review... +[Physicist] Analyzing from physics perspective... +[Chemist] Analyzing from chemistry perspective... +[Aggregator] Combining expert responses... + +Workflow completed! + +Physics perspective: The sky appears blue due to Rayleigh scattering... +Chemistry perspective: The molecular composition of our atmosphere... +Combined answer: ... + +Question: exit +``` diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/03_ConditionalEdges.csproj b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/03_ConditionalEdges.csproj new file mode 100644 index 0000000000..b488b10425 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/03_ConditionalEdges.csproj @@ -0,0 +1,29 @@ + + + net10.0 + Exe + enable + enable + ConditionalEdges + ConditionalEdges + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/NotifyFraud.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/NotifyFraud.cs new file mode 100644 index 0000000000..d22ac39e68 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/NotifyFraud.cs @@ -0,0 +1,85 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace ConditionalEdges; + +internal sealed class Order +{ + public Order(string id, decimal amount) + { + this.Id = id; + this.Amount = amount; + } + public string Id { get; } + public decimal Amount { get; } + public Customer? Customer { get; set; } + public string? PaymentReferenceNumber { get; set; } +} + +public sealed record Customer(int Id, string Name, bool IsBlocked); + +internal sealed class OrderIdParser() : Executor("OrderIdParser") +{ + public override async ValueTask HandleAsync(string message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + return GetOrder(message); + } + + private static Order GetOrder(string id) + { + // Simulate fetching order details + return new Order(id, 100.0m); + } +} + +internal sealed class OrderEnrich() : Executor("EnrichOrder") +{ + public override async ValueTask HandleAsync(Order message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + message.Customer = GetCustomerForOrder(message.Id); + return message; + } + + private static Customer GetCustomerForOrder(string orderId) + { + if (orderId.Contains('B')) + { + return new Customer(101, "George", true); + } + + return new Customer(201, "Jerry", false); + } +} + +internal sealed class PaymentProcessor() : Executor("PaymentProcessor") +{ + public override async ValueTask HandleAsync(Order message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + // Call payment gateway. + message.PaymentReferenceNumber = Guid.NewGuid().ToString().Substring(0, 4); + return message; + } +} + +internal sealed class NotifyFraud() : Executor("NotifyFraud") +{ + public override async ValueTask HandleAsync(Order message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + // Notify fraud team. + return $"Order {message.Id} flagged as fraudulent for customer {message.Customer?.Name}."; + } +} + +internal static class OrderRouteConditions +{ + /// + /// Returns a condition that evaluates to true when the customer is blocked. + /// + internal static Func WhenBlocked() => order => order?.Customer?.IsBlocked == true; + + /// + /// Returns a condition that evaluates to true when the customer is not blocked. + /// + internal static Func WhenNotBlocked() => order => order?.Customer?.IsBlocked == false; +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/Program.cs new file mode 100644 index 0000000000..b7f9ff9944 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/Program.cs @@ -0,0 +1,97 @@ +// Copyright (c) Microsoft. All rights reserved. + +// This sample demonstrates conditional edges in a workflow. +// Orders are routed to different executors based on customer status: +// - Blocked customers → NotifyFraud +// - Valid customers → PaymentProcessor + +using ConditionalEdges; +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.DurableTask.Worker.AzureManaged; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; + +string dtsConnectionString = Environment.GetEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING") + ?? "Endpoint=http://localhost:8080;TaskHub=default;Authentication=None"; + +// Create executor instances +OrderIdParser orderParser = new(); +OrderEnrich orderEnrich = new(); +PaymentProcessor paymentProcessor = new(); +NotifyFraud notifyFraud = new(); + +// Build workflow with conditional edges +// The condition functions evaluate the Order output from OrderEnrich +WorkflowBuilder builder = new(orderParser); +builder + .AddEdge(orderParser, orderEnrich) + .AddEdge(orderEnrich, notifyFraud, condition: OrderRouteConditions.WhenBlocked()) + .AddEdge(orderEnrich, paymentProcessor, condition: OrderRouteConditions.WhenNotBlocked()); + +Workflow auditOrder = builder.WithName("AuditOrder").Build(); + +IHost host = Host.CreateDefaultBuilder(args) +.ConfigureLogging(logging => logging.SetMinimumLevel(LogLevel.Warning)) +.ConfigureServices(services => +{ + services.ConfigureDurableWorkflows( + workflowOptions => workflowOptions.AddWorkflow(auditOrder), + workerBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString), + clientBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString)); +}) +.Build(); + +await host.StartAsync(); + +IWorkflowClient workflowClient = host.Services.GetRequiredService(); + +Console.WriteLine("Enter an order ID (or 'exit'):"); +Console.WriteLine("Tip: Order IDs containing 'B' are flagged as blocked customers.\n"); + +while (true) +{ + Console.Write("> "); + string? input = Console.ReadLine(); + if (string.IsNullOrWhiteSpace(input) || input.Equals("exit", StringComparison.OrdinalIgnoreCase)) + { + break; + } + + try + { + await StartNewWorkflowAsync(input, auditOrder, workflowClient); + } + catch (Exception ex) + { + Console.WriteLine($"Error: {ex.Message}"); + } + + Console.WriteLine(); +} + +await host.StopAsync(); + +// Start a new workflow and wait for completion +static async Task StartNewWorkflowAsync(string orderId, Workflow workflow, IWorkflowClient client) +{ + Console.WriteLine($"Starting workflow for order '{orderId}'..."); + + // Cast to IAwaitableWorkflowRun to access WaitForCompletionAsync + IAwaitableWorkflowRun run = (IAwaitableWorkflowRun)await client.RunAsync(workflow, orderId); + Console.WriteLine($"Run ID: {run.RunId}"); + + try + { + Console.WriteLine("Waiting for workflow to complete..."); + string? result = await run.WaitForCompletionAsync(); + Console.WriteLine($"Workflow completed. {result}"); + } + catch (InvalidOperationException ex) + { + Console.WriteLine($"Failed: {ex.Message}"); + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/README.md b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/README.md new file mode 100644 index 0000000000..fb8c26bf80 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges/README.md @@ -0,0 +1,92 @@ +# Conditional Edges Workflow Sample + +This sample demonstrates how to build a workflow with **conditional edges** that route execution to different paths based on runtime conditions. The workflow evaluates conditions on the output of an executor to determine which downstream executor to run. + +## Key Concepts Demonstrated + +- Building workflows with **conditional edges** using `AddEdge` with a `condition` parameter +- Defining reusable condition functions for routing logic +- Branching workflow execution based on data-driven decisions +- Using `ConfigureDurableWorkflows` to register workflows with dependency injection + +## Overview + +The sample implements an order audit workflow that routes orders differently based on whether the customer is blocked (flagged for fraud): + +``` +OrderIdParser --> OrderEnrich --[IsBlocked]--> NotifyFraud + | + +--[NotBlocked]--> PaymentProcessor +``` + +| Executor | Description | +|----------|-------------| +| OrderIdParser | Parses the order ID and retrieves order details | +| OrderEnrich | Enriches the order with customer information | +| PaymentProcessor | Processes payment for valid orders | +| NotifyFraud | Notifies the fraud team for blocked customers | + +## How Conditional Edges Work + +Conditional edges allow you to specify a condition function that determines whether the edge should be traversed: + +```csharp +builder + .AddEdge(orderParser, orderEnrich) + .AddEdge(orderEnrich, notifyFraud, condition: OrderRouteConditions.WhenBlocked()) + .AddEdge(orderEnrich, paymentProcessor, condition: OrderRouteConditions.WhenNotBlocked()); +``` + +The condition functions receive the output of the source executor and return a boolean: + +```csharp +internal static class OrderRouteConditions +{ + // Routes to NotifyFraud when customer is blocked + internal static Func WhenBlocked() => + order => order?.Customer?.IsBlocked == true; + + // Routes to PaymentProcessor when customer is not blocked + internal static Func WhenNotBlocked() => + order => order?.Customer?.IsBlocked == false; +} +``` + +### Routing Logic + +In this sample, the routing is based on the order ID: +- Order IDs containing the letter **'B'** are associated with blocked customers → routed to `NotifyFraud` +- All other order IDs are associated with valid customers → routed to `PaymentProcessor` + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for information on configuring the environment, including how to install and run the Durable Task Scheduler. + +## Running the Sample + +```bash +cd dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/03_ConditionalEdges +dotnet run --framework net10.0 +``` + +### Sample Output + +**Valid order (routes to PaymentProcessor):** +```text +Enter an order ID (or 'exit'): +> 12345 +Starting workflow for order '12345'... +Run ID: abc123... +Waiting for workflow to complete... +Workflow completed. {"Id":"12345","Amount":100.0,"Customer":{"Id":201,"Name":"Jerry","IsBlocked":false},"PaymentReferenceNumber":"a1b2"} +``` + +**Blocked order (routes to NotifyFraud):** +```text +Enter an order ID (or 'exit'): +> 12345B +Starting workflow for order '12345B'... +Run ID: def456... +Waiting for workflow to complete... +Workflow completed. Order 12345B flagged as fraudulent for customer George. +``` diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/04_WorkflowAndAgents.csproj b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/04_WorkflowAndAgents.csproj new file mode 100644 index 0000000000..a05822a286 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/04_WorkflowAndAgents.csproj @@ -0,0 +1,30 @@ + + + net10.0 + Exe + enable + enable + WorkflowConcurrency + WorkflowConcurrency + + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/ParseQuestionExecutor.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/ParseQuestionExecutor.cs new file mode 100644 index 0000000000..e9a6712393 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/ParseQuestionExecutor.cs @@ -0,0 +1,73 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace WorkflowConcurrency; + +/// +/// Parses and validates the incoming question before sending to AI agents. +/// +internal sealed class ParseQuestionExecutor() : Executor("ParseQuestion") +{ + public override ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Magenta; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine("│ [ParseQuestion] Preparing question for AI agents..."); + + string formattedQuestion = message.Trim(); + if (!formattedQuestion.EndsWith('?')) + { + formattedQuestion += "?"; + } + + Console.WriteLine($"│ [ParseQuestion] Question: \"{formattedQuestion}\""); + Console.WriteLine("│ [ParseQuestion] → Sending to experts..."); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return ValueTask.FromResult(formattedQuestion); + } +} + +/// +/// Aggregates responses from multiple AI agents into a unified response. +/// This executor collects all expert opinions and synthesizes them. +/// +internal sealed class ResponseAggregatorExecutor() : Executor("ResponseAggregator") +{ + public override ValueTask HandleAsync( + string[] message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [Aggregator] 📋 Received {message.Length} AI agent responses"); + Console.WriteLine("│ [Aggregator] Combining into comprehensive answer..."); + Console.WriteLine("│ [Aggregator] ✓ Aggregation complete!"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + string aggregatedResult = "═══════════════════════════════════════════════════════════════\n" + + " AI EXPERT PANEL RESPONSES\n" + + "═══════════════════════════════════════════════════════════════\n\n"; + + for (int i = 0; i < message.Length; i++) + { + string expertLabel = i == 0 ? "⚛️ PHYSICIST" : "🧪 CHEMIST"; + aggregatedResult += $"{expertLabel}:\n{message[i]}\n\n"; + } + + aggregatedResult += "═══════════════════════════════════════════════════════════════\n" + + $"Summary: Received perspectives from {message.Length} AI experts.\n" + + "═══════════════════════════════════════════════════════════════"; + + return ValueTask.FromResult(aggregatedResult); + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/Program.cs new file mode 100644 index 0000000000..5dfec4f277 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/04_WorkflowAndAgents/Program.cs @@ -0,0 +1,133 @@ +// Copyright (c) Microsoft. All rights reserved. + +// This sample demonstrates the THREE ways to configure durable agents and workflows: +// +// 1. ConfigureDurableAgents() - For standalone agents only +// 2. ConfigureDurableWorkflows() - For workflows only +// 3. ConfigureDurableOptions() - For both agents AND workflows +// +// KEY: All methods can be called MULTIPLE times - configurations are ADDITIVE. + +using Azure; +using Azure.AI.OpenAI; +using Azure.Identity; +using Microsoft.Agents.AI; +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.DurableTask.Worker.AzureManaged; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using OpenAI.Chat; +using WorkflowConcurrency; + +// Configuration +string dtsConnectionString = Environment.GetEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING") + ?? "Endpoint=http://localhost:8080;TaskHub=default;Authentication=None"; +string endpoint = Environment.GetEnvironmentVariable("AZURE_OPENAI_ENDPOINT") + ?? throw new InvalidOperationException("AZURE_OPENAI_ENDPOINT is not set."); +string deploymentName = Environment.GetEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT") + ?? throw new InvalidOperationException("AZURE_OPENAI_DEPLOYMENT is not set."); +string? azureOpenAiKey = Environment.GetEnvironmentVariable("AZURE_OPENAI_KEY"); + +// Create AI agents +AzureOpenAIClient openAiClient = !string.IsNullOrEmpty(azureOpenAiKey) + ? new AzureOpenAIClient(new Uri(endpoint), new AzureKeyCredential(azureOpenAiKey)) + : new AzureOpenAIClient(new Uri(endpoint), new AzureCliCredential()); +ChatClient chatClient = openAiClient.GetChatClient(deploymentName); + +AIAgent biologist = chatClient.AsAIAgent("You are a biology expert. Explain concepts clearly in 2-3 sentences.", "Biologist"); +AIAgent physicist = chatClient.AsAIAgent("You are a physics expert. Explain concepts clearly in 2-3 sentences.", "Physicist"); +AIAgent chemist = chatClient.AsAIAgent("You are a chemistry expert. Explain concepts clearly in 2-3 sentences.", "Chemist"); + +// Create workflows +ParseQuestionExecutor questionParser = new(); +ResponseAggregatorExecutor responseAggregator = new(); + +Workflow physicsWorkflow = new WorkflowBuilder(questionParser) + .WithName("PhysicsExpertReview") + .AddEdge(questionParser, physicist) + .Build(); + +Workflow expertTeamWorkflow = new WorkflowBuilder(questionParser) +.WithName("ExpertTeamReview") +.AddFanOutEdge(questionParser, [biologist, physicist]) +.AddFanInBarrierEdge([biologist, physicist], responseAggregator) +.Build(); + +Workflow chemistryWorkflow = new WorkflowBuilder(questionParser) + .WithName("ChemistryExpertReview") + .AddEdge(questionParser, chemist) + .Build(); + +// Configure services - demonstrating all 3 methods (each can be called multiple times) +IHost host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => logging.SetMinimumLevel(LogLevel.Warning)) + .ConfigureServices(services => + { + // METHOD 1: ConfigureDurableAgents - for standalone agents only + services.ConfigureDurableAgents( + options => options.AddAIAgent(biologist), + workerBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString), + clientBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString)); + + // METHOD 2: ConfigureDurableWorkflows - for workflows only + services.ConfigureDurableWorkflows(options => options.AddWorkflow(physicsWorkflow)); + + // METHOD 3: ConfigureDurableOptions - for both agents AND workflows + services.ConfigureDurableOptions(options => + { + options.Agents.AddAIAgent(chemist); + options.Workflows.AddWorkflow(expertTeamWorkflow); + }); + + // Second call to ConfigureDurableOptions (additive - adds to existing config) + services.ConfigureDurableOptions(options => options.Workflows.AddWorkflow(chemistryWorkflow)); + }) + .Build(); + +await host.StartAsync(); +IServiceProvider services = host.Services; +IWorkflowClient workflowClient = services.GetRequiredService(); + +// DEMO 1: Direct agent conversation (standalone agents) +Console.WriteLine("\n═══ DEMO 1: Direct Agent Conversation ═══\n"); + +AIAgent biologistProxy = services.GetRequiredKeyedService("Biologist"); +AgentSession session = await biologistProxy.CreateSessionAsync(); +AgentResponse response = await biologistProxy.RunAsync("What is photosynthesis?", session); +Console.WriteLine($"🧬 Biologist: {response.Text}\n"); + +AIAgent chemistProxy = services.GetRequiredKeyedService("Chemist"); +session = await chemistProxy.CreateSessionAsync(); +response = await chemistProxy.RunAsync("What is a chemical bond?", session); +Console.WriteLine($"🧪 Chemist: {response.Text}\n"); + +// DEMO 2: Single-agent workflow +Console.WriteLine("═══ DEMO 2: Single-Agent Workflow ═══\n"); +await RunWorkflowAsync(workflowClient, physicsWorkflow, "What is the relationship between energy and mass?"); + +// DEMO 3: Multi-agent workflow +Console.WriteLine("═══ DEMO 3: Multi-Agent Workflow ═══\n"); +await RunWorkflowAsync(workflowClient, expertTeamWorkflow, "How does radiation affect living cells?"); + +// DEMO 4: Workflow from second ConfigureDurableOptions call +Console.WriteLine("═══ DEMO 4: Workflow (added via 2nd ConfigureDurableOptions) ═══\n"); +await RunWorkflowAsync(workflowClient, chemistryWorkflow, "What happens during combustion?"); + +Console.WriteLine("\n✅ All demos completed!"); +await host.StopAsync(); + +// Helper method +static async Task RunWorkflowAsync(IWorkflowClient client, Workflow workflow, string question) +{ + Console.WriteLine($"📋 {workflow.Name}: \"{question}\""); + IWorkflowRun run = await client.RunAsync(workflow, question); + if (run is IAwaitableWorkflowRun awaitable) + { + string? result = await awaitable.WaitForCompletionAsync(); + Console.WriteLine($"✅ {result}\n"); + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/05_WorkflowEvents.csproj b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/05_WorkflowEvents.csproj new file mode 100644 index 0000000000..09e20ef622 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/05_WorkflowEvents.csproj @@ -0,0 +1,28 @@ + + + net10.0 + Exe + enable + enable + WorkflowEvents + WorkflowEvents + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/Executors.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/Executors.cs new file mode 100644 index 0000000000..47880f0fff --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/Executors.cs @@ -0,0 +1,129 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace WorkflowEvents; + +// ═══════════════════════════════════════════════════════════════════════════════ +// Custom event types - callers observe these via WatchStreamAsync +// ═══════════════════════════════════════════════════════════════════════════════ + +internal sealed class OrderLookupStartedEvent(string orderId) : WorkflowEvent(orderId) +{ + public string OrderId { get; } = orderId; +} + +internal sealed class OrderFoundEvent(string customerName) : WorkflowEvent(customerName) +{ + public string CustomerName { get; } = customerName; +} + +internal sealed class CancellationProgressEvent(int percentComplete, string status) : WorkflowEvent(status) +{ + public int PercentComplete { get; } = percentComplete; + public string Status { get; } = status; +} + +internal sealed class OrderCancelledEvent() : WorkflowEvent("Order cancelled"); + +internal sealed class EmailSentEvent(string email) : WorkflowEvent(email) +{ + public string Email { get; } = email; +} + +// ═══════════════════════════════════════════════════════════════════════════════ +// Domain models +// ═══════════════════════════════════════════════════════════════════════════════ + +internal sealed record Order(string Id, DateTime OrderDate, bool IsCancelled, string? CancelReason, Customer Customer); + +internal sealed record Customer(string Name, string Email); + +// ═══════════════════════════════════════════════════════════════════════════════ +// Executors - emit events via AddEventAsync and YieldOutputAsync +// ═══════════════════════════════════════════════════════════════════════════════ + +/// +/// Looks up an order by ID, emitting progress events. +/// +internal sealed class OrderLookup() : Executor("OrderLookup") +{ + public override async ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + await context.AddEventAsync(new OrderLookupStartedEvent(message), cancellationToken); + + // Simulate database lookup + await Task.Delay(TimeSpan.FromSeconds(1), cancellationToken); + + Order order = new( + Id: message, + OrderDate: DateTime.UtcNow.AddDays(-1), + IsCancelled: false, + CancelReason: "Customer requested cancellation", + Customer: new Customer(Name: "Jerry", Email: "jerry@example.com")); + + await context.AddEventAsync(new OrderFoundEvent(order.Customer.Name), cancellationToken); + + // YieldOutputAsync emits a WorkflowOutputEvent observable via streaming + await context.YieldOutputAsync(order, cancellationToken); + + return order; + } +} + +/// +/// Cancels an order, emitting progress events during the multi-step process. +/// +internal sealed class OrderCancel() : Executor("OrderCancel") +{ + public override async ValueTask HandleAsync( + Order message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + await context.AddEventAsync(new CancellationProgressEvent(0, "Starting cancellation"), cancellationToken); + + // Simulate a multi-step cancellation process + await Task.Delay(TimeSpan.FromMilliseconds(500), cancellationToken); + await context.AddEventAsync(new CancellationProgressEvent(33, "Contacting payment provider"), cancellationToken); + + await Task.Delay(TimeSpan.FromMilliseconds(500), cancellationToken); + await context.AddEventAsync(new CancellationProgressEvent(66, "Processing refund"), cancellationToken); + + await Task.Delay(TimeSpan.FromMilliseconds(500), cancellationToken); + + Order cancelledOrder = message with { IsCancelled = true }; + await context.AddEventAsync(new CancellationProgressEvent(100, "Complete"), cancellationToken); + await context.AddEventAsync(new OrderCancelledEvent(), cancellationToken); + + await context.YieldOutputAsync(cancelledOrder, cancellationToken); + + return cancelledOrder; + } +} + +/// +/// Sends a cancellation confirmation email, emitting an event on completion. +/// +internal sealed class SendEmail() : Executor("SendEmail") +{ + public override async ValueTask HandleAsync( + Order message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + // Simulate sending email + await Task.Delay(TimeSpan.FromMilliseconds(500), cancellationToken); + + string result = $"Cancellation email sent for order {message.Id} to {message.Customer.Email}."; + + await context.AddEventAsync(new EmailSentEvent(message.Customer.Email), cancellationToken); + + await context.YieldOutputAsync(result, cancellationToken); + + return result; + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/Program.cs new file mode 100644 index 0000000000..3ddec1db37 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/Program.cs @@ -0,0 +1,138 @@ +// Copyright (c) Microsoft. All rights reserved. + +// ═══════════════════════════════════════════════════════════════════════════════ +// SAMPLE: Workflow Events and Streaming +// ═══════════════════════════════════════════════════════════════════════════════ +// +// This sample demonstrates how to use IWorkflowContext event methods in executors +// and stream events from the caller side: +// +// 1. AddEventAsync - Emit custom events that callers can observe in real-time +// 2. StreamAsync - Start a workflow and obtain a streaming handle +// 3. WatchStreamAsync - Observe events as they occur (custom, framework, and terminal) +// +// The sample uses IWorkflowClient.StreamAsync to start a workflow and +// WatchStreamAsync to observe events as they occur in real-time. +// +// Workflow: OrderLookup -> OrderCancel -> SendEmail +// ═══════════════════════════════════════════════════════════════════════════════ + +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.DurableTask.Worker.AzureManaged; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using WorkflowEvents; + +// Get DTS connection string from environment variable +string dtsConnectionString = Environment.GetEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING") + ?? "Endpoint=http://localhost:8080;TaskHub=default;Authentication=None"; + +// Define executors and build workflow +OrderLookup orderLookup = new(); +OrderCancel orderCancel = new(); +SendEmail sendEmail = new(); + +Workflow cancelOrder = new WorkflowBuilder(orderLookup) + .WithName("CancelOrder") + .WithDescription("Cancel an order and notify the customer") + .AddEdge(orderLookup, orderCancel) + .AddEdge(orderCancel, sendEmail) + .Build(); + +// Configure host with durable workflow support +IHost host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => logging.SetMinimumLevel(LogLevel.Warning)) + .ConfigureServices(services => + { + services.ConfigureDurableWorkflows( + workflowOptions => workflowOptions.AddWorkflow(cancelOrder), + workerBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString), + clientBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString)); + }) + .Build(); + +await host.StartAsync(); + +IWorkflowClient workflowClient = host.Services.GetRequiredService(); + +Console.WriteLine("Workflow Events Demo - Enter order ID (or 'exit'):"); + +while (true) +{ + Console.Write("> "); + string? input = Console.ReadLine(); + if (string.IsNullOrWhiteSpace(input) || input.Equals("exit", StringComparison.OrdinalIgnoreCase)) + { + break; + } + + try + { + await RunWorkflowWithStreamingAsync(input, cancelOrder, workflowClient); + } + catch (Exception ex) + { + Console.WriteLine($"Error: {ex.Message}"); + } + + Console.WriteLine(); +} + +await host.StopAsync(); + +// Runs a workflow and streams events as they occur +static async Task RunWorkflowWithStreamingAsync(string orderId, Workflow workflow, IWorkflowClient client) +{ + // StreamAsync starts the workflow and returns a streaming handle for observing events + IStreamingWorkflowRun run = await client.StreamAsync(workflow, orderId); + Console.WriteLine($"Started run: {run.RunId}"); + + // WatchStreamAsync yields events as they're emitted by executors + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + Console.WriteLine($" New event received at {DateTime.Now:HH:mm:ss.ffff} ({evt.GetType().Name})"); + + switch (evt) + { + // Custom domain events (emitted via AddEventAsync) + case OrderLookupStartedEvent e: + WriteColored($" [Lookup] Looking up order {e.OrderId}", ConsoleColor.Cyan); + break; + case OrderFoundEvent e: + WriteColored($" [Lookup] Found: {e.CustomerName}", ConsoleColor.Cyan); + break; + case CancellationProgressEvent e: + WriteColored($" [Cancel] {e.PercentComplete}% - {e.Status}", ConsoleColor.Yellow); + break; + case OrderCancelledEvent: + WriteColored(" [Cancel] Done", ConsoleColor.Yellow); + break; + case EmailSentEvent e: + WriteColored($" [Email] Sent to {e.Email}", ConsoleColor.Magenta); + break; + + case WorkflowOutputEvent e: + WriteColored($" [Output] {e.ExecutorId}", ConsoleColor.DarkGray); + break; + + // Workflow completion + case DurableWorkflowCompletedEvent e: + WriteColored($" Completed: {e.Result}", ConsoleColor.Green); + break; + case DurableWorkflowFailedEvent e: + WriteColored($" Failed: {e.ErrorMessage}", ConsoleColor.Red); + break; + } + } +} + +static void WriteColored(string message, ConsoleColor color) +{ + Console.ForegroundColor = color; + Console.WriteLine(message); + Console.ResetColor(); +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/README.md b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/README.md new file mode 100644 index 0000000000..b519ec8d5c --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/05_WorkflowEvents/README.md @@ -0,0 +1,127 @@ +# Workflow Events Sample + +This sample demonstrates how to use workflow events and streaming in durable workflows. + +## What it demonstrates + +1. **Custom Events** (`AddEventAsync`) — Executors emit domain-specific events during execution +2. **Event Streaming** (`StreamAsync` / `WatchStreamAsync`) — Callers observe events in real-time as the workflow progresses +3. **Framework Events** — Automatic `ExecutorInvokedEvent`, `ExecutorCompletedEvent`, and `WorkflowOutputEvent` events emitted by the framework + +## Emitting Custom Events + +Executors can emit custom domain events during execution using the `IWorkflowContext` instance passed to `HandleAsync`. These events are streamed to callers in real-time via `WatchStreamAsync`. + +### Defining a custom event + +Create a class that inherits from `WorkflowEvent`. Pass any data payload to the base constructor: + +```csharp +public class CancellationProgressEvent(int percentComplete, string status) : WorkflowEvent(status) +{ + public int PercentComplete { get; } = percentComplete; + public string Status { get; } = status; +} +``` + +### Emitting the event from an executor + +Call `AddEventAsync` on the `IWorkflowContext` inside your executor's `HandleAsync` method: + +```csharp +public override async ValueTask HandleAsync( + Order message, + IWorkflowContext context, + CancellationToken cancellationToken = default) +{ + await context.AddEventAsync(new CancellationProgressEvent(33, "Processing refund"), cancellationToken); + // ... rest of the executor logic +} +``` + +### Observing events from the caller + +Use `StreamAsync` to start the workflow and `WatchStreamAsync` to observe events. Pattern match on your custom event types: + +```csharp +IStreamingWorkflowRun run = await workflowClient.StreamAsync(workflow, input); + +await foreach (WorkflowEvent evt in run.WatchStreamAsync()) +{ + switch (evt) + { + case CancellationProgressEvent e: + Console.WriteLine($"{e.PercentComplete}% - {e.Status}"); + break; + } +} +``` + +## Workflow Structure + +``` +OrderLookup → OrderCancel → SendEmail +``` + +Each executor emits custom events during execution: +- `OrderLookup` emits `OrderLookupStartedEvent` and `OrderFoundEvent` +- `OrderCancel` emits `CancellationProgressEvent` (with percentage) and `OrderCancelledEvent` +- `SendEmail` emits `EmailSentEvent` + +## Prerequisites + +- [Durable Task Scheduler](https://learn.microsoft.com/en-us/azure/azure-functions/durable/durable-task-scheduler/durable-task-scheduler) running locally or in Azure +- Set the `DURABLE_TASK_SCHEDULER_CONNECTION_STRING` environment variable (defaults to local emulator) + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for more information on how to configure the environment, including how to install and run common sample dependencies. + +## Running the sample + +```bash +dotnet run +``` + +Enter an order ID at the prompt to start a workflow and watch events stream in real-time: + +```text +> order-42 +Started run: b6ba4d19... + New event received at 13:27:41.4956 (ExecutorInvokedEvent) + New event received at 13:27:41.5019 (OrderLookupStartedEvent) + [Lookup] Looking up order order-42 + New event received at 13:27:41.5025 (OrderFoundEvent) + [Lookup] Found: Jerry + New event received at 13:27:41.5026 (ExecutorCompletedEvent) + New event received at 13:27:41.5026 (WorkflowOutputEvent) + [Output] OrderLookup + New event received at 13:27:43.0772 (ExecutorInvokedEvent) + New event received at 13:27:43.0773 (CancellationProgressEvent) + [Cancel] 0% - Starting cancellation + New event received at 13:27:43.0775 (CancellationProgressEvent) + [Cancel] 33% - Contacting payment provider + New event received at 13:27:43.0776 (CancellationProgressEvent) + [Cancel] 66% - Processing refund + New event received at 13:27:43.0777 (CancellationProgressEvent) + [Cancel] 100% - Complete + New event received at 13:27:43.0779 (OrderCancelledEvent) + [Cancel] Done + New event received at 13:27:43.0780 (ExecutorCompletedEvent) + New event received at 13:27:43.0780 (WorkflowOutputEvent) + [Output] OrderCancel + New event received at 13:27:43.6610 (ExecutorInvokedEvent) + New event received at 13:27:43.6611 (EmailSentEvent) + [Email] Sent to jerry@example.com + New event received at 13:27:43.6613 (ExecutorCompletedEvent) + New event received at 13:27:43.6613 (WorkflowOutputEvent) + [Output] SendEmail + New event received at 13:27:43.6619 (DurableWorkflowCompletedEvent) + Completed: Cancellation email sent for order order-42 to jerry@example.com. +``` + +### Viewing Workflows in the DTS Dashboard + +After running a workflow, you can navigate to the Durable Task Scheduler (DTS) dashboard to inspect the workflow execution and events. + +If you are using the DTS emulator, the dashboard is available at `http://localhost:8082`. diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/06_WorkflowSharedState.csproj b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/06_WorkflowSharedState.csproj new file mode 100644 index 0000000000..c7efbb7d1b --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/06_WorkflowSharedState.csproj @@ -0,0 +1,29 @@ + + + net10.0 + Exe + enable + enable + WorkflowSharedState + WorkflowSharedState + + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/Executors.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/Executors.cs new file mode 100644 index 0000000000..c0c8cfd097 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/Executors.cs @@ -0,0 +1,182 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace WorkflowSharedState; + +// ═══════════════════════════════════════════════════════════════════════════════ +// Domain models +// ═══════════════════════════════════════════════════════════════════════════════ + +/// +/// The primary order data passed through the pipeline via return values. +/// +internal sealed record OrderDetails(string OrderId, string CustomerName, decimal Amount, DateTime OrderDate); + +/// +/// Cross-cutting audit trail accumulated in shared state across executors. +/// Each executor appends its step name and timestamp. This data does not flow +/// through return values — it lives only in shared state. +/// +internal sealed record AuditEntry(string Step, string Timestamp, string Detail); + +// ═══════════════════════════════════════════════════════════════════════════════ +// Executors +// ═══════════════════════════════════════════════════════════════════════════════ + +/// +/// Validates the order and writes the initial audit entry and tax rate to shared state. +/// The order details are returned as the executor output (normal message flow), +/// while the audit trail and tax rate are stored in shared state (side-channel). +/// If the order ID starts with "INVALID", the executor halts the workflow early +/// using . +/// +internal sealed class ValidateOrder() : Executor("ValidateOrder") +{ + public override async ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + await Task.Delay(TimeSpan.FromMilliseconds(200), cancellationToken); + + // Halt the workflow early if the order ID is invalid. + // No downstream executors will run after this. + if (message.StartsWith("INVALID", StringComparison.OrdinalIgnoreCase)) + { + await context.YieldOutputAsync($"Order '{message}' failed validation. Halting workflow.", cancellationToken); + await context.RequestHaltAsync(); + return new OrderDetails(message, "Unknown", 0, DateTime.UtcNow); + } + + OrderDetails details = new(message, "Jerry", 249.99m, DateTime.UtcNow); + + // Store the tax rate in shared state — downstream ProcessPayment reads it + // without needing it in the message chain. + await context.QueueStateUpdateAsync("taxRate", 0.085m, cancellationToken: cancellationToken); + Console.WriteLine(" Wrote to shared state: taxRate = 8.5%"); + + // Start the audit trail in shared state + AuditEntry audit = new("ValidateOrder", DateTime.UtcNow.ToString("o"), $"Validated order {message}"); + await context.QueueStateUpdateAsync("auditValidate", audit, cancellationToken: cancellationToken); + Console.WriteLine(" Wrote to shared state: auditValidate"); + + await context.YieldOutputAsync($"Order '{message}' validated. Customer: {details.CustomerName}, Amount: {details.Amount:C}", cancellationToken); + + return details; + } +} + +/// +/// Enriches the order with shipping information. +/// Reads the audit trail from shared state and appends its own entry. +/// Uses ReadOrInitStateAsync to lazily initialize a shipping tier. +/// Demonstrates custom scopes by writing shipping details under the "shipping" scope. +/// +internal sealed class EnrichOrder() : Executor("EnrichOrder") +{ + public override async ValueTask HandleAsync( + OrderDetails message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + await Task.Delay(TimeSpan.FromMilliseconds(200), cancellationToken); + + // Use ReadOrInitStateAsync — only initializes if no value exists yet + string shippingTier = await context.ReadOrInitStateAsync( + "shippingTier", + () => "Express", + cancellationToken: cancellationToken); + Console.WriteLine($" Read from shared state: shippingTier = {shippingTier}"); + + // Write carrier under a custom "shipping" scope. + // This keeps the key separate from keys written without a scope, + // so "carrier" here won't collide with a "carrier" key written elsewhere. + await context.QueueStateUpdateAsync("carrier", "Contoso Express", scopeName: "shipping", cancellationToken: cancellationToken); + Console.WriteLine(" Wrote to shared state: carrier = Contoso Express (scope: shipping)"); + + // Verify we can read the audit entry from the previous step + AuditEntry? previousAudit = await context.ReadStateAsync("auditValidate", cancellationToken: cancellationToken); + string auditStatus = previousAudit is not null ? $"(previous step: {previousAudit.Step})" : "(no prior audit)"; + Console.WriteLine($" Read from shared state: auditValidate {auditStatus}"); + + // Append our own audit entry + AuditEntry audit = new("EnrichOrder", DateTime.UtcNow.ToString("o"), $"Enriched with {shippingTier} shipping {auditStatus}"); + await context.QueueStateUpdateAsync("auditEnrich", audit, cancellationToken: cancellationToken); + Console.WriteLine(" Wrote to shared state: auditEnrich"); + + await context.YieldOutputAsync($"Order enriched. Shipping: {shippingTier} {auditStatus}", cancellationToken); + + return message; + } +} + +/// +/// Processes payment using the tax rate from shared state (written by ValidateOrder). +/// The tax rate is side-channel data — it doesn't flow through return values. +/// +internal sealed class ProcessPayment() : Executor("ProcessPayment") +{ + public override async ValueTask HandleAsync( + OrderDetails message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + await Task.Delay(TimeSpan.FromMilliseconds(300), cancellationToken); + + // Read tax rate written by ValidateOrder — not available in the message chain + decimal taxRate = await context.ReadOrInitStateAsync("taxRate", () => 0.0m, cancellationToken: cancellationToken); + Console.WriteLine($" Read from shared state: taxRate = {taxRate:P1}"); + + decimal tax = message.Amount * taxRate; + decimal total = message.Amount + tax; + string paymentRef = $"PAY-{Guid.NewGuid():N}"[..16]; + + // Append audit entry + AuditEntry audit = new("ProcessPayment", DateTime.UtcNow.ToString("o"), $"Charged {total:C} (tax: {tax:C})"); + await context.QueueStateUpdateAsync("auditPayment", audit, cancellationToken: cancellationToken); + Console.WriteLine(" Wrote to shared state: auditPayment"); + + await context.YieldOutputAsync($"Payment processed. Total: {total:C} (tax: {tax:C}). Ref: {paymentRef}", cancellationToken); + + return paymentRef; + } +} + +/// +/// Generates the final invoice by reading the full audit trail from shared state. +/// Demonstrates reading multiple state entries written by different executors +/// and clearing a scope with . +/// +internal sealed class GenerateInvoice() : Executor("GenerateInvoice") +{ + public override async ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + + // Read the full audit trail from shared state — each step wrote its own entry + AuditEntry? validateAudit = await context.ReadStateAsync("auditValidate", cancellationToken: cancellationToken); + AuditEntry? enrichAudit = await context.ReadStateAsync("auditEnrich", cancellationToken: cancellationToken); + AuditEntry? paymentAudit = await context.ReadStateAsync("auditPayment", cancellationToken: cancellationToken); + int auditCount = new[] { validateAudit, enrichAudit, paymentAudit }.Count(a => a is not null); + Console.WriteLine($" Read from shared state: {auditCount} audit entries"); + + // Read carrier from the "shipping" scope (written by EnrichOrder) + string? carrier = await context.ReadStateAsync("carrier", scopeName: "shipping", cancellationToken: cancellationToken); + Console.WriteLine($" Read from shared state: carrier = {carrier} (scope: shipping)"); + + // Clear the "shipping" scope — no longer needed after invoice generation. + await context.QueueClearScopeAsync("shipping", cancellationToken); + Console.WriteLine(" Cleared shared state scope: shipping"); + + string auditSummary = string.Join(" → ", new[] + { + validateAudit?.Step, enrichAudit?.Step, paymentAudit?.Step + }.Where(s => s is not null)); + + return $"Invoice complete. Payment: {message}. Audit trail: [{auditSummary}]"; + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/Program.cs new file mode 100644 index 0000000000..2513cc2dad --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/Program.cs @@ -0,0 +1,117 @@ +// Copyright (c) Microsoft. All rights reserved. + +// ═══════════════════════════════════════════════════════════════════════════════ +// SAMPLE: Shared State During Workflow Execution +// ═══════════════════════════════════════════════════════════════════════════════ +// +// This sample demonstrates how executors in a durable workflow can share state +// via IWorkflowContext. State is persisted across supersteps and survives +// process restarts because the orchestration passes it to each activity. +// +// Key concepts: +// 1. QueueStateUpdateAsync - Write a value to shared state +// 2. ReadStateAsync - Read a value written by a previous executor +// 3. ReadOrInitStateAsync - Read or lazily initialize a state value +// 4. QueueClearScopeAsync - Clear all entries under a scope +// 5. RequestHaltAsync - Stop the workflow early (e.g., validation failure) +// +// Workflow: ValidateOrder -> EnrichOrder -> ProcessPayment -> GenerateInvoice +// +// Return values carry primary business data through the pipeline (OrderDetails, +// payment ref). Shared state carries side-channel data that doesn't belong in +// the message chain: a tax rate (set by ValidateOrder, read by ProcessPayment) +// and an audit trail (each executor appends its own entry). +// ═══════════════════════════════════════════════════════════════════════════════ + +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.DurableTask.Worker.AzureManaged; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using WorkflowSharedState; + +// Get DTS connection string from environment variable +string dtsConnectionString = Environment.GetEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING") + ?? "Endpoint=http://localhost:8080;TaskHub=default;Authentication=None"; + +// Define executors +ValidateOrder validateOrder = new(); +EnrichOrder enrichOrder = new(); +ProcessPayment processPayment = new(); +GenerateInvoice generateInvoice = new(); + +// Build the workflow: ValidateOrder -> EnrichOrder -> ProcessPayment -> GenerateInvoice +Workflow orderPipeline = new WorkflowBuilder(validateOrder) + .WithName("OrderPipeline") + .WithDescription("Order processing pipeline with shared state across executors") + .AddEdge(validateOrder, enrichOrder) + .AddEdge(enrichOrder, processPayment) + .AddEdge(processPayment, generateInvoice) + .Build(); + +// Configure host with durable workflow support +IHost host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => logging.SetMinimumLevel(LogLevel.Warning)) + .ConfigureServices(services => + { + services.ConfigureDurableWorkflows( + workflowOptions => workflowOptions.AddWorkflow(orderPipeline), + workerBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString), + clientBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString)); + }) + .Build(); + +await host.StartAsync(); + +IWorkflowClient workflowClient = host.Services.GetRequiredService(); + +Console.WriteLine("Shared State Workflow Demo"); +Console.WriteLine("Workflow: ValidateOrder -> EnrichOrder -> ProcessPayment -> GenerateInvoice"); +Console.WriteLine(); +Console.WriteLine("Enter an order ID (or 'exit'):"); + +while (true) +{ + Console.Write("> "); + string? input = Console.ReadLine(); + if (string.IsNullOrWhiteSpace(input) || input.Equals("exit", StringComparison.OrdinalIgnoreCase)) + { + break; + } + + try + { + // Start the workflow and stream events to see shared state in action + IStreamingWorkflowRun run = await workflowClient.StreamAsync(orderPipeline, input); + Console.WriteLine($"Started run: {run.RunId}"); + + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + switch (evt) + { + case WorkflowOutputEvent e: + Console.WriteLine($" [Output] {e.ExecutorId}: {e.Data}"); + break; + + case DurableWorkflowCompletedEvent e: + Console.WriteLine($" Completed: {e.Result}"); + break; + + case DurableWorkflowFailedEvent e: + Console.WriteLine($" Failed: {e.ErrorMessage}"); + break; + } + } + } + catch (Exception ex) + { + Console.WriteLine($"Error: {ex.Message}"); + } + + Console.WriteLine(); +} + +await host.StopAsync(); diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/README.md b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/README.md new file mode 100644 index 0000000000..31ff55ce84 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/06_WorkflowSharedState/README.md @@ -0,0 +1,71 @@ +# Shared State Workflow Sample + +This sample demonstrates how executors in a durable workflow can share state via `IWorkflowContext`. State written by one executor is accessible to all downstream executors, persisted across supersteps, and survives process restarts. + +## Key Concepts Demonstrated + +- Writing state with `QueueStateUpdateAsync` — executors store data for downstream executors +- Reading state with `ReadStateAsync` — executors access data written by earlier executors +- Lazy initialization with `ReadOrInitStateAsync` — initialize state only if not already present +- Custom scopes with `scopeName` — partition state into isolated namespaces (e.g., `"shipping"`) +- Clearing scopes with `QueueClearScopeAsync` — remove all entries under a scope when no longer needed +- Early termination with `RequestHaltAsync` — halt the workflow when validation fails +- State persistence across supersteps — the orchestration passes shared state to each executor +- Event streaming with `IStreamingWorkflowRun` — observe executor progress in real time + +## Workflow + +**OrderPipeline**: `ValidateOrder` → `EnrichOrder` → `ProcessPayment` → `GenerateInvoice` + +Return values carry primary business data through the pipeline (`OrderDetails` → `OrderDetails` → payment ref → invoice string). Shared state carries side-channel data that doesn't belong in the message chain: + +| Executor | Returns (message flow) | Reads from State | Writes to State | +|----------|----------------------|-----------------|-----------------| +| **ValidateOrder** | `OrderDetails` | — | `taxRate`, `auditValidate` | +| **EnrichOrder** | `OrderDetails` (pass-through) | `auditValidate` | `shippingTier`, `auditEnrich`, `carrier` (scope: shipping) | +| **ProcessPayment** | payment ref string | `taxRate` | `auditPayment` | +| **GenerateInvoice** | invoice string | `auditValidate`, `auditEnrich`, `auditPayment`, `carrier` (scope: shipping) | clears `shipping` scope | + +> [!NOTE] +> `EnrichOrder` writes `carrier` under the `"shipping"` scope using `scopeName: "shipping"`. This keeps the key separate from keys written without a scope, so `"carrier"` in the `"shipping"` scope won't collide with a `"carrier"` key written elsewhere. + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for more information on how to configure the environment, including how to install and run common sample dependencies. + +## Running the Sample + +```bash +dotnet run +``` + +Enter an order ID when prompted. The workflow will process the order through all four executors, streaming events as they occur: + +```text +> ORD-001 +Started run: abc123 + Wrote to shared state: taxRate = 8.5% + Wrote to shared state: auditValidate + [Output] ValidateOrder: Order 'ORD-001' validated. Customer: Jerry, Amount: $249.99 + Read from shared state: shippingTier = Express + Wrote to shared state: carrier = Contoso Express (scope: shipping) + Read from shared state: auditValidate (previous step: ValidateOrder) + Wrote to shared state: auditEnrich + [Output] EnrichOrder: Order enriched. Shipping: Express (previous step: ValidateOrder) + Read from shared state: taxRate = 8.5% + Wrote to shared state: auditPayment + [Output] ProcessPayment: Payment processed. Total: $271.24 (tax: $21.25). Ref: PAY-abc123def456 + Read from shared state: 3 audit entries + Read from shared state: carrier = Contoso Express (scope: shipping) + Cleared shared state scope: shipping + [Output] GenerateInvoice: Invoice complete. Payment: "PAY-abc123def456". Audit trail: [ValidateOrder → EnrichOrder → ProcessPayment] + Completed: Invoice complete. Payment: "PAY-abc123def456". Audit trail: [ValidateOrder → EnrichOrder → ProcessPayment] +``` + +### Viewing Workflows in the DTS Dashboard + +After running a workflow, you can navigate to the Durable Task Scheduler (DTS) dashboard to inspect the orchestration status, executor inputs/outputs, and events. + +If you are using the DTS emulator, the dashboard is available at `http://localhost:8082`. + +To inspect shared state in the dashboard, click on an executor to view its input and output. The input contains a snapshot of the shared state the executor ran with, and the output includes any state updates it made (as `stateUpdates` with scoped keys). diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/07_SubWorkflows.csproj b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/07_SubWorkflows.csproj new file mode 100644 index 0000000000..d8d36ead01 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/07_SubWorkflows.csproj @@ -0,0 +1,28 @@ + + + net10.0 + Exe + enable + enable + SubWorkflows + SubWorkflows + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/Executors.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/Executors.cs new file mode 100644 index 0000000000..121db7af67 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/Executors.cs @@ -0,0 +1,232 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace SubWorkflows; + +/// +/// Event emitted when the fraud check risk score is calculated. +/// +internal sealed class FraudRiskAssessedEvent(int riskScore) : WorkflowEvent($"Risk score: {riskScore}/100") +{ + public int RiskScore => riskScore; +} + +/// +/// Represents an order being processed through the workflow. +/// +internal sealed class OrderInfo +{ + public required string OrderId { get; set; } + + public decimal Amount { get; set; } + + public string? PaymentTransactionId { get; set; } + + public string? TrackingNumber { get; set; } + + public string? Carrier { get; set; } +} + +// Main workflow executors + +/// +/// Entry point executor that receives the order ID and creates an OrderInfo object. +/// +internal sealed class OrderReceived() : Executor("OrderReceived") +{ + public override ValueTask HandleAsync(string message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Cyan; + Console.WriteLine($"[OrderReceived] Processing order '{message}'"); + Console.ResetColor(); + + OrderInfo order = new() + { + OrderId = message, + Amount = 99.99m // Simulated order amount + }; + + return ValueTask.FromResult(order); + } +} + +/// +/// Final executor that outputs the completed order summary. +/// +internal sealed class OrderCompleted() : Executor("OrderCompleted") +{ + public override ValueTask HandleAsync(OrderInfo message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Green; + Console.WriteLine("┌─────────────────────────────────────────────────────────────────┐"); + Console.WriteLine($"│ [OrderCompleted] Order '{message.OrderId}' successfully processed!"); + Console.WriteLine($"│ Payment: {message.PaymentTransactionId}"); + Console.WriteLine($"│ Shipping: {message.Carrier} - {message.TrackingNumber}"); + Console.WriteLine("└─────────────────────────────────────────────────────────────────┘"); + Console.ResetColor(); + + return ValueTask.FromResult($"Order {message.OrderId} completed. Tracking: {message.TrackingNumber}"); + } +} + +// Payment sub-workflow executors + +/// +/// Validates payment information for an order. +/// +internal sealed class ValidatePayment() : Executor("ValidatePayment") +{ + public override async ValueTask HandleAsync(OrderInfo message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine($" [Payment/ValidatePayment] Validating payment for order '{message.OrderId}'..."); + Console.ResetColor(); + + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine($" [Payment/ValidatePayment] Payment validated for ${message.Amount}"); + Console.ResetColor(); + + return message; + } +} + +/// +/// Charges the payment for an order. +/// +internal sealed class ChargePayment() : Executor("ChargePayment") +{ + public override async ValueTask HandleAsync(OrderInfo message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine($" [Payment/ChargePayment] Charging ${message.Amount} for order '{message.OrderId}'..."); + Console.ResetColor(); + + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + + message.PaymentTransactionId = $"TXN-{Guid.NewGuid().ToString("N")[..8].ToUpperInvariant()}"; + + Console.ForegroundColor = ConsoleColor.Yellow; + Console.WriteLine($" [Payment/ChargePayment] ✓ Payment processed: {message.PaymentTransactionId}"); + Console.ResetColor(); + + return message; + } +} + +// FraudCheck sub-sub-workflow executors (nested inside Payment) + +/// +/// Analyzes transaction patterns for potential fraud. +/// +internal sealed class AnalyzePatterns() : Executor("AnalyzePatterns") +{ + public override async ValueTask HandleAsync(OrderInfo message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.WriteLine($" [Payment/FraudCheck/AnalyzePatterns] Analyzing patterns for order '{message.OrderId}'..."); + Console.ResetColor(); + + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + + // Store analysis results in shared state for the next executor in this sub-workflow + int patternsFound = new Random().Next(0, 5); + await context.QueueStateUpdateAsync("patternsFound", patternsFound, cancellationToken: cancellationToken); + + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.WriteLine($" [Payment/FraudCheck/AnalyzePatterns] ✓ Pattern analysis complete ({patternsFound} suspicious patterns)"); + Console.ResetColor(); + + return message; + } +} + +/// +/// Calculates a risk score for the transaction. +/// +internal sealed class CalculateRiskScore() : Executor("CalculateRiskScore") +{ + public override async ValueTask HandleAsync(OrderInfo message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.WriteLine($" [Payment/FraudCheck/CalculateRiskScore] Calculating risk score for order '{message.OrderId}'..."); + Console.ResetColor(); + + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + + // Read the pattern count from shared state (written by AnalyzePatterns) + int patternsFound = await context.ReadStateAsync("patternsFound", cancellationToken: cancellationToken); + int riskScore = Math.Min(patternsFound * 20 + new Random().Next(1, 20), 100); + + // Emit a workflow event from within a nested sub-workflow + await context.AddEventAsync(new FraudRiskAssessedEvent(riskScore), cancellationToken); + + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.WriteLine($" [Payment/FraudCheck/CalculateRiskScore] ✓ Risk score: {riskScore}/100 (based on {patternsFound} patterns)"); + Console.ResetColor(); + + return message; + } +} + +// Shipping sub-workflow executors + +/// +/// Selects a shipping carrier for an order. +/// +/// +/// This executor uses (void return) combined with +/// to forward the order to the next +/// connected executor (CreateShipment). This demonstrates explicit typed message passing +/// as an alternative to returning a value from the handler. +/// +internal sealed class SelectCarrier() : Executor("SelectCarrier") +{ + public override async ValueTask HandleAsync(OrderInfo message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + Console.WriteLine(); + Console.ForegroundColor = ConsoleColor.Blue; + Console.WriteLine($" [Shipping/SelectCarrier] Selecting carrier for order '{message.OrderId}'..."); + Console.ResetColor(); + + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + + message.Carrier = message.Amount > 50 ? "Express" : "Standard"; + + Console.ForegroundColor = ConsoleColor.Blue; + Console.WriteLine($" [Shipping/SelectCarrier] ✓ Selected carrier: {message.Carrier}"); + Console.ResetColor(); + + // Use SendMessageAsync to forward the updated order to connected executors. + // With a void-return executor, this is the mechanism for passing data downstream. + await context.SendMessageAsync(message, cancellationToken: cancellationToken); + } +} + +/// +/// Creates shipment and generates tracking number. +/// +internal sealed class CreateShipment() : Executor("CreateShipment") +{ + public override async ValueTask HandleAsync(OrderInfo message, IWorkflowContext context, CancellationToken cancellationToken = default) + { + Console.ForegroundColor = ConsoleColor.Blue; + Console.WriteLine($" [Shipping/CreateShipment] Creating shipment for order '{message.OrderId}'..."); + Console.ResetColor(); + + await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); + + message.TrackingNumber = $"TRACK-{Guid.NewGuid().ToString("N")[..10].ToUpperInvariant()}"; + + Console.ForegroundColor = ConsoleColor.Blue; + Console.WriteLine($" [Shipping/CreateShipment] ✓ Shipment created: {message.TrackingNumber}"); + Console.ResetColor(); + + return message; + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/Program.cs new file mode 100644 index 0000000000..d542f4aba5 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/Program.cs @@ -0,0 +1,146 @@ +// Copyright (c) Microsoft. All rights reserved. + +// This sample demonstrates nested sub-workflows. A sub-workflow can act as an executor +// within another workflow, including multi-level nesting (sub-workflow within sub-workflow). + +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.DurableTask.Worker.AzureManaged; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using SubWorkflows; + +// Get DTS connection string from environment variable +string dtsConnectionString = Environment.GetEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING") + ?? "Endpoint=http://localhost:8080;TaskHub=default;Authentication=None"; + +// Build the FraudCheck sub-workflow (this will be nested inside the Payment sub-workflow) +AnalyzePatterns analyzePatterns = new(); +CalculateRiskScore calculateRiskScore = new(); + +Workflow fraudCheckWorkflow = new WorkflowBuilder(analyzePatterns) + .WithName("SubFraudCheck") + .WithDescription("Analyzes transaction patterns and calculates risk score") + .AddEdge(analyzePatterns, calculateRiskScore) + .Build(); + +// Build the Payment sub-workflow: ValidatePayment -> FraudCheck (sub-workflow) -> ChargePayment +ValidatePayment validatePayment = new(); +ExecutorBinding fraudCheckExecutor = fraudCheckWorkflow.BindAsExecutor("FraudCheck"); +ChargePayment chargePayment = new(); + +Workflow paymentWorkflow = new WorkflowBuilder(validatePayment) + .WithName("SubPaymentProcessing") + .WithDescription("Validates and processes payment for an order") + .AddEdge(validatePayment, fraudCheckExecutor) + .AddEdge(fraudCheckExecutor, chargePayment) + .Build(); + +// Build the Shipping sub-workflow: SelectCarrier -> CreateShipment +SelectCarrier selectCarrier = new(); +CreateShipment createShipment = new(); + +Workflow shippingWorkflow = new WorkflowBuilder(selectCarrier) + .WithName("SubShippingArrangement") + .WithDescription("Selects carrier and creates shipment") + .AddEdge(selectCarrier, createShipment) + .Build(); + +// Build the main workflow using sub-workflows as executors +// OrderReceived -> Payment (sub-workflow) -> Shipping (sub-workflow) -> OrderCompleted +OrderReceived orderReceived = new(); +OrderCompleted orderCompleted = new(); +ExecutorBinding paymentExecutor = paymentWorkflow.BindAsExecutor("Payment"); +ExecutorBinding shippingExecutor = shippingWorkflow.BindAsExecutor("Shipping"); + +Workflow orderProcessingWorkflow = new WorkflowBuilder(orderReceived) + .WithName("OrderProcessing") + .WithDescription("Processes an order through payment and shipping") + .AddEdge(orderReceived, paymentExecutor) + .AddEdge(paymentExecutor, shippingExecutor) + .AddEdge(shippingExecutor, orderCompleted) + .Build(); + +// Configure and start the host +// Register only the main workflow - sub-workflows are discovered automatically! +IHost host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => logging.SetMinimumLevel(LogLevel.Warning)) + .ConfigureServices(services => + { + services.ConfigureDurableWorkflows( + workflowOptions => workflowOptions.AddWorkflow(orderProcessingWorkflow), + workerBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString), + clientBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString)); + }) + .Build(); + +await host.StartAsync(); + +IWorkflowClient workflowClient = host.Services.GetRequiredService(); + +Console.WriteLine("Durable Sub-Workflows Sample"); +Console.WriteLine("Workflow: OrderReceived -> Payment(sub) -> Shipping(sub) -> OrderCompleted"); +Console.WriteLine(" Payment contains nested FraudCheck sub-workflow (Level 2 nesting)"); +Console.WriteLine(); +Console.WriteLine("Enter an order ID (or 'exit'):"); + +while (true) +{ + Console.Write("> "); + string? input = Console.ReadLine(); + if (string.IsNullOrWhiteSpace(input) || input.Equals("exit", StringComparison.OrdinalIgnoreCase)) + { + break; + } + + try + { + await StartNewWorkflowAsync(input, orderProcessingWorkflow, workflowClient); + } + catch (Exception ex) + { + Console.WriteLine($"Error: {ex.Message}"); + } + + Console.WriteLine(); +} + +await host.StopAsync(); + +// Start a new workflow using streaming to observe events (including from sub-workflows) +static async Task StartNewWorkflowAsync(string orderId, Workflow workflow, IWorkflowClient client) +{ + Console.WriteLine($"\nStarting order processing for '{orderId}'..."); + + IStreamingWorkflowRun run = await client.StreamAsync(workflow, orderId); + Console.WriteLine($"Run ID: {run.RunId}"); + Console.WriteLine(); + + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + switch (evt) + { + // Custom event emitted from the FraudCheck sub-sub-workflow + case FraudRiskAssessedEvent e: + Console.ForegroundColor = ConsoleColor.DarkYellow; + Console.WriteLine($" [Event from sub-workflow] {e.GetType().Name}: Risk score {e.RiskScore}/100"); + Console.ResetColor(); + break; + + case DurableWorkflowCompletedEvent e: + Console.ForegroundColor = ConsoleColor.Green; + Console.WriteLine($"✓ Order completed: {e.Result}"); + Console.ResetColor(); + break; + + case DurableWorkflowFailedEvent e: + Console.ForegroundColor = ConsoleColor.Red; + Console.WriteLine($"✗ Failed: {e.ErrorMessage}"); + Console.ResetColor(); + break; + } + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/README.md b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/README.md new file mode 100644 index 0000000000..83968eee0e --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows/README.md @@ -0,0 +1,105 @@ +# Sub-Workflows Sample (Nested Workflows) + +This sample demonstrates how to compose complex workflows from simpler, reusable sub-workflows. Sub-workflows are built using `WorkflowBuilder` and embedded as executors via `BindAsExecutor()`. Unlike the in-process workflow runner, the durable workflow backend persists execution state across process restarts — each sub-workflow runs as a separate orchestration instance on the Durable Task Scheduler, providing independent checkpointing, fault tolerance, and hierarchical visualization in the DTS dashboard. + +## Key Concepts Demonstrated + +- **Sub-workflows**: Using `Workflow.BindAsExecutor()` to embed a workflow as an executor in another workflow +- **Multi-level nesting**: Sub-workflows within sub-workflows (Level 2 nesting) +- **Automatic discovery**: Registering only the main workflow; sub-workflows are discovered automatically +- **Failure isolation**: Each sub-workflow runs as a separate orchestration instance on the DTS backend +- **Hierarchical visualization**: Parent-child orchestration hierarchy visible in the DTS dashboard +- **Event propagation**: Custom workflow events (`FraudRiskAssessedEvent`) bubble up from nested sub-workflows to the streaming client +- **Message passing**: Using `Executor` (void return) with `SendMessageAsync` to forward typed messages to connected executors (`SelectCarrier`) +- **Shared state within sub-workflows**: Using `QueueStateUpdateAsync`/`ReadStateAsync` to share data between executors within a sub-workflow (`AnalyzePatterns` → `CalculateRiskScore`) + +## Overview + +The sample implements an order processing workflow composed of two sub-workflows, one of which contains its own nested sub-workflow: + +``` +OrderProcessing (main workflow) +├── OrderReceived +├── Payment (sub-workflow) +│ ├── ValidatePayment +│ ├── FraudCheck (sub-sub-workflow) ← Level 2 nesting! +│ │ ├── AnalyzePatterns +│ │ └── CalculateRiskScore +│ └── ChargePayment +├── Shipping (sub-workflow) +│ ├── SelectCarrier ← Uses SendMessageAsync (void-return executor) +│ └── CreateShipment +└── OrderCompleted +``` + +| Executor | Sub-Workflow | Description | +|----------|-------------|-------------| +| OrderReceived | Main | Receives order ID and creates order info | +| ValidatePayment | Payment | Validates payment information | +| AnalyzePatterns | FraudCheck (nested in Payment) | Analyzes transaction patterns, stores results in shared state | +| CalculateRiskScore | FraudCheck (nested in Payment) | Reads shared state, calculates risk score, emits `FraudRiskAssessedEvent` | +| ChargePayment | Payment | Charges payment amount | +| SelectCarrier | Shipping | Selects carrier using `SendMessageAsync` (void-return executor) | +| CreateShipment | Shipping | Creates shipment with tracking | +| OrderCompleted | Main | Outputs completed order summary | + +## How Sub-Workflows Work + +For an introduction to sub-workflows and the `BindAsExecutor()` API, see the [Sub-Workflows foundational sample](../../../../03-workflows/_StartHere/05_SubWorkflows). + +This durable sample extends the same pattern — the key difference is that each sub-workflow runs as a **separate orchestration instance** on the Durable Task Scheduler, providing independent checkpointing, fault tolerance, and hierarchical visualization in the DTS dashboard. + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for information on configuring the environment, including how to install and run the Durable Task Scheduler. + +## Running the Sample + +```bash +cd dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/07_SubWorkflows +dotnet run --framework net10.0 +``` + +### Sample Output + +```text +Durable Sub-Workflows Sample +Workflow: OrderReceived -> Payment(sub) -> Shipping(sub) -> OrderCompleted + Payment contains nested FraudCheck sub-workflow (Level 2 nesting) + +Enter an order ID (or 'exit'): +> ORD-001 +Starting order processing for 'ORD-001'... +Run ID: abc123... + +[OrderReceived] Processing order 'ORD-001' + [Payment/ValidatePayment] Validating payment for order 'ORD-001'... + [Payment/ValidatePayment] Payment validated for $99.99 + [Payment/FraudCheck/AnalyzePatterns] Analyzing patterns for order 'ORD-001'... + [Payment/FraudCheck/AnalyzePatterns] ✓ Pattern analysis complete (2 suspicious patterns) + [Payment/FraudCheck/CalculateRiskScore] Calculating risk score for order 'ORD-001'... + [Payment/FraudCheck/CalculateRiskScore] ✓ Risk score: 53/100 (based on 2 patterns) + [Event from sub-workflow] FraudRiskAssessedEvent: Risk score 53/100 + [Payment/ChargePayment] Charging $99.99 for order 'ORD-001'... + [Payment/ChargePayment] ✓ Payment processed: TXN-A1B2C3D4 + [Shipping/SelectCarrier] Selecting carrier for order 'ORD-001'... + [Shipping/SelectCarrier] ✓ Selected carrier: Express + [Shipping/CreateShipment] Creating shipment for order 'ORD-001'... + [Shipping/CreateShipment] ✓ Shipment created: TRACK-I9J0K1L2M3 +┌─────────────────────────────────────────────────────────────────┐ +│ [OrderCompleted] Order 'ORD-001' successfully processed! +│ Payment: TXN-A1B2C3D4 +│ Shipping: Express - TRACK-I9J0K1L2M3 +└─────────────────────────────────────────────────────────────────┘ +✓ Order completed: Order ORD-001 completed. Tracking: TRACK-I9J0K1L2M3 + +> exit +``` + +### Viewing Workflows in the DTS Dashboard + +After running the workflow, you can navigate to the Durable Task Scheduler (DTS) dashboard to inspect the orchestration hierarchy, including sub-orchestrations. + +If you are using the DTS emulator, the dashboard is available at `http://localhost:8082`. + +Because each sub-workflow runs as a separate orchestration instance, the dashboard shows a parent-child hierarchy: the top-level `OrderProcessing` orchestration with `Payment` and `Shipping` as child orchestrations, and `FraudCheck` nested under `Payment`. You can click into each orchestration to inspect its executor inputs/outputs, events, and execution timeline independently. diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/08_WorkflowHITL.csproj b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/08_WorkflowHITL.csproj new file mode 100644 index 0000000000..a9103b6e48 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/08_WorkflowHITL.csproj @@ -0,0 +1,28 @@ + + + net10.0 + Exe + enable + enable + WorkflowHITL + WorkflowHITL + + + + + + + + + + + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/Executors.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/Executors.cs new file mode 100644 index 0000000000..2006b1cd19 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/Executors.cs @@ -0,0 +1,81 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace WorkflowHITL; + +/// +/// Represents an expense approval request. +/// +/// The unique identifier of the expense. +/// The amount of the expense. +/// The name of the employee submitting the expense. +public record ApprovalRequest(string ExpenseId, decimal Amount, string EmployeeName); + +/// +/// Represents the response to an approval request. +/// +/// Whether the expense was approved. +/// Optional comments from the approver. +public record ApprovalResponse(bool Approved, string? Comments); + +/// +/// Retrieves expense details and creates an approval request. +/// +internal sealed class CreateApprovalRequest() : Executor("RetrieveRequest") +{ + /// + public override ValueTask HandleAsync( + string message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + // In a real scenario, this would look up expense details from a database + return new ValueTask(new ApprovalRequest(message, 1500.00m, "Jerry")); + } +} + +/// +/// Prepares the approval request for finance review after manager approval. +/// +internal sealed class PrepareFinanceReview() : Executor("PrepareFinanceReview") +{ + /// + public override ValueTask HandleAsync( + ApprovalResponse message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + if (!message.Approved) + { + throw new InvalidOperationException("Cannot proceed to finance review — manager denied the expense."); + } + + // In a real scenario, this would retrieve the original expense details + return new ValueTask(new ApprovalRequest("EXP-2025-001", 1500.00m, "Jerry")); + } +} + +/// +/// Processes the expense reimbursement based on the parallel approval responses from budget and compliance. +/// +internal sealed class ExpenseReimburse() : Executor("Reimburse") +{ + /// + public override async ValueTask HandleAsync( + ApprovalResponse[] message, + IWorkflowContext context, + CancellationToken cancellationToken = default) + { + // Check that all parallel approvals passed + ApprovalResponse? denied = Array.Find(message, r => !r.Approved); + if (denied is not null) + { + return $"Expense reimbursement denied. Comments: {denied.Comments}"; + } + + // Simulate payment processing + await Task.Delay(1000, cancellationToken); + return $"Expense reimbursed at {DateTime.UtcNow:O}"; + } +} diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/Program.cs b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/Program.cs new file mode 100644 index 0000000000..bc8fe00341 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/Program.cs @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft. All rights reserved. + +// This sample demonstrates a Human-in-the-Loop (HITL) workflow using Durable Tasks. +// +// ┌──────────────────────┐ ┌────────────────┐ ┌─────────────────────┐ ┌────────────────────┐ +// │ CreateApprovalRequest│──►│ManagerApproval │──►│PrepareFinanceReview │──┬►│ BudgetApproval │──┐ +// └──────────────────────┘ │ (RequestPort) │ └─────────────────────┘ │ │ (RequestPort) │ │ +// └────────────────┘ │ └────────────────────┘ │ ┌─────────────────┐ +// │ ├─►│ExpenseReimburse │ +// │ ┌────────────────────┐ │ └─────────────────┘ +// └►│ComplianceApproval │──┘ +// │ (RequestPort) │ +// └────────────────────┘ +// +// The workflow pauses at three RequestPorts — one for the manager, then two in parallel for finance. +// After manager approval, BudgetApproval and ComplianceApproval run concurrently via fan-out/fan-in. + +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask.Client.AzureManaged; +using Microsoft.DurableTask.Worker.AzureManaged; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Hosting; +using Microsoft.Extensions.Logging; +using WorkflowHITL; + +string dtsConnectionString = Environment.GetEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING") + ?? "Endpoint=http://localhost:8080;TaskHub=default;Authentication=None"; + +// Define executors and RequestPorts for the three HITL pause points +CreateApprovalRequest createRequest = new(); +RequestPort managerApproval = RequestPort.Create("ManagerApproval"); +PrepareFinanceReview prepareFinanceReview = new(); +RequestPort budgetApproval = RequestPort.Create("BudgetApproval"); +RequestPort complianceApproval = RequestPort.Create("ComplianceApproval"); +ExpenseReimburse reimburse = new(); + +// Build the workflow: CreateApprovalRequest -> ManagerApproval -> PrepareFinanceReview -> [BudgetApproval AND ComplianceApproval] -> ExpenseReimburse +Workflow expenseApproval = new WorkflowBuilder(createRequest) + .WithName("ExpenseReimbursement") + .WithDescription("Expense reimbursement with manager and parallel finance approvals") + .AddEdge(createRequest, managerApproval) + .AddEdge(managerApproval, prepareFinanceReview) + .AddFanOutEdge(prepareFinanceReview, [budgetApproval, complianceApproval]) + .AddFanInBarrierEdge([budgetApproval, complianceApproval], reimburse) + .Build(); + +IHost host = Host.CreateDefaultBuilder(args) + .ConfigureLogging(logging => logging.SetMinimumLevel(LogLevel.Warning)) + .ConfigureServices(services => + { + services.ConfigureDurableWorkflows( + options => options.AddWorkflow(expenseApproval), + workerBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString), + clientBuilder: builder => builder.UseDurableTaskScheduler(dtsConnectionString)); + }) + .Build(); + +await host.StartAsync(); + +IWorkflowClient workflowClient = host.Services.GetRequiredService(); + +// Start the workflow with streaming to observe events including HITL pauses +string expenseId = "EXP-2025-001"; +Console.WriteLine($"Starting expense reimbursement workflow for expense: {expenseId}"); +IStreamingWorkflowRun run = await workflowClient.StreamAsync(expenseApproval, expenseId); +Console.WriteLine($"Workflow started with instance ID: {run.RunId}\n"); + +// Watch for workflow events — handle HITL requests as they arrive +await foreach (WorkflowEvent evt in run.WatchStreamAsync()) +{ + switch (evt) + { + case DurableWorkflowWaitingForInputEvent requestEvent: + Console.WriteLine($"Workflow paused at RequestPort: {requestEvent.RequestPort.Id}"); + Console.WriteLine($" Input: {requestEvent.Input}"); + + // In a real scenario, this would involve human interaction (UI, email, Teams, etc.) + ApprovalRequest? request = requestEvent.GetInputAs(); + Console.WriteLine($" Approval for: {request?.EmployeeName}, Amount: {request?.Amount:C}"); + + ApprovalResponse approvalResponse = new(Approved: true, Comments: "Approved by manager."); + await run.SendResponseAsync(requestEvent, approvalResponse); + Console.WriteLine($" Response sent: Approved={approvalResponse.Approved}\n"); + break; + + case DurableWorkflowCompletedEvent completedEvent: + Console.WriteLine($"Workflow completed: {completedEvent.Result}"); + break; + + case DurableWorkflowFailedEvent failedEvent: + Console.WriteLine($"Workflow failed: {failedEvent.ErrorMessage}"); + break; + } +} + +await host.StopAsync(); diff --git a/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/README.md b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/README.md new file mode 100644 index 0000000000..f659077371 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL/README.md @@ -0,0 +1,106 @@ +# Workflow Human-in-the-Loop (HITL) Sample + +This sample demonstrates a **Human-in-the-Loop** pattern in durable workflows using `RequestPort`. The workflow pauses execution at a manager approval point, then fans out to two parallel finance approval points — budget and compliance — before resuming. + +## Key Concepts Demonstrated + +- Using `RequestPort` to define external input points in a workflow +- Sequential and parallel HITL pause points in a single workflow using fan-out/fan-in +- Streaming workflow events with `IStreamingWorkflowRun` +- Handling `DurableWorkflowWaitingForInputEvent` to detect HITL pauses +- Using `SendResponseAsync` to provide responses and resume the workflow +- **Durability**: The workflow survives process restarts while waiting for human input + +## Workflow + +This sample implements the following workflow: + +``` +┌──────────────────────┐ ┌────────────────┐ ┌─────────────────────┐ ┌────────────────────┐ +│ CreateApprovalRequest│──►│ManagerApproval │──►│PrepareFinanceReview │──┬►│ BudgetApproval │──┐ +└──────────────────────┘ │ (RequestPort) │ └─────────────────────┘ │ │ (RequestPort) │ │ + └────────────────┘ │ └────────────────────┘ │ ┌─────────────────┐ + │ ├─►│ExpenseReimburse │ + │ ┌────────────────────┐ │ └─────────────────┘ + └►│ComplianceApproval │──┘ + │ (RequestPort) │ + └────────────────────┘ +``` + +| Step | Description | +|------|-------------| +| CreateApprovalRequest | Retrieves expense details and creates an approval request | +| ManagerApproval (RequestPort) | **PAUSES** the workflow and waits for manager approval | +| PrepareFinanceReview | Prepares the request for finance review after manager approval | +| BudgetApproval (RequestPort) | **PAUSES** the workflow and waits for budget approval (parallel) | +| ComplianceApproval (RequestPort) | **PAUSES** the workflow and waits for compliance approval (parallel) | +| ExpenseReimburse | Processes the reimbursement after all approvals pass | + +## How It Works + +A `RequestPort` defines a typed external input point in the workflow: + +```csharp +RequestPort managerApproval = + RequestPort.Create("ManagerApproval"); +``` + +Use `WatchStreamAsync` to observe events. When the workflow reaches a `RequestPort`, a `DurableWorkflowWaitingForInputEvent` is emitted. Call `SendResponseAsync` to provide the response and resume the workflow: + +```csharp +await foreach (WorkflowEvent evt in run.WatchStreamAsync()) +{ + switch (evt) + { + case DurableWorkflowWaitingForInputEvent requestEvent: + ApprovalRequest? request = requestEvent.GetInputAs(); + await run.SendResponseAsync(requestEvent, new ApprovalResponse(Approved: true, Comments: "Approved.")); + break; + } +} +``` + +## Environment Setup + +See the [README.md](../../README.md) file in the parent directory for information on configuring the environment, including how to install and run the Durable Task Scheduler. + +## Running the Sample + +```bash +cd dotnet/samples/04-hosting/DurableWorkflows/ConsoleApps/08_WorkflowHITL +dotnet run --framework net10.0 +``` + +### Sample Output + +```text +Starting expense reimbursement workflow for expense: EXP-2025-001 +Workflow started with instance ID: abc123... + +Workflow paused at RequestPort: ManagerApproval + Input: {"expenseId":"EXP-2025-001","amount":1500.00,"employeeName":"Jerry"} + Approval for: Jerry, Amount: $1,500.00 + Response sent: Approved=True + +Workflow paused at RequestPort: BudgetApproval + Input: {"expenseId":"EXP-2025-001","amount":1500.00,"employeeName":"Jerry"} + Approval for: Jerry, Amount: $1,500.00 + Response sent: Approved=True + +Workflow paused at RequestPort: ComplianceApproval + Input: {"expenseId":"EXP-2025-001","amount":1500.00,"employeeName":"Jerry"} + Approval for: Jerry, Amount: $1,500.00 + Response sent: Approved=True + +Workflow completed: Expense reimbursed at 2025-01-23T17:30:00.0000000Z +``` + +### Viewing Workflows in the DTS Dashboard + +After running the sample, you can navigate to the Durable Task Scheduler (DTS) dashboard to visualize the completed orchestration and inspect its execution history. + +If you are using the DTS emulator, the dashboard is available at `http://localhost:8082`. + +1. Open the dashboard and look for the orchestration instance matching the instance ID logged in the console output (e.g., `abc123...`). +2. Click into the instance to see the execution timeline, which shows each executor activity and the `WaitForExternalEvent` pauses where the workflow waited for human input — including the two parallel finance approvals. +3. Expand individual activity steps to inspect inputs and outputs — for example, the `ManagerApproval`, `BudgetApproval`, and `ComplianceApproval` external events will show the approval request sent and the response received. diff --git a/dotnet/samples/04-hosting/DurableWorkflows/Directory.Build.props b/dotnet/samples/04-hosting/DurableWorkflows/Directory.Build.props new file mode 100644 index 0000000000..3723bee3cc --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/Directory.Build.props @@ -0,0 +1,5 @@ + + + + + diff --git a/dotnet/samples/04-hosting/DurableWorkflows/README.md b/dotnet/samples/04-hosting/DurableWorkflows/README.md new file mode 100644 index 0000000000..2b7103de50 --- /dev/null +++ b/dotnet/samples/04-hosting/DurableWorkflows/README.md @@ -0,0 +1,50 @@ +# Durable Workflow Samples + +This directory contains samples demonstrating how to build durable workflows using the Microsoft Agent Framework. + +## Environment Setup + +### Prerequisites + +- [.NET 10 SDK](https://dotnet.microsoft.com/download/dotnet/10.0) or later +- [Durable Task Scheduler](https://learn.microsoft.com/en-us/azure/azure-functions/durable/durable-task-scheduler/durable-task-scheduler) running locally or in Azure + +### Running the Durable Task Scheduler Emulator + +To run the emulator locally using Docker: + +```bash +docker run -d -p 8080:8080 --name durabletask-emulator mcr.microsoft.com/durabletask/emulator:latest +``` + +Set the connection string environment variable to point to the local emulator: + +```bash +# Linux/macOS +export DURABLE_TASK_SCHEDULER_CONNECTION_STRING="AccountEndpoint=http://localhost:8080" + +# Windows (PowerShell) +$env:DURABLE_TASK_SCHEDULER_CONNECTION_STRING = "AccountEndpoint=http://localhost:8080" +``` + +## Samples + +### Console Apps + +| Sample | Description | +|--------|-------------| +| [01_SequentialWorkflow](ConsoleApps/01_SequentialWorkflow/) | Basic sequential workflow with ordered executor steps | +| [02_ConcurrentWorkflow](ConsoleApps/02_ConcurrentWorkflow/) | Fan-out/fan-in concurrent workflow execution | +| [03_ConditionalEdges](ConsoleApps/03_ConditionalEdges/) | Workflows with conditional routing between executors | +| [05_WorkflowEvents](ConsoleApps/05_WorkflowEvents/) | Publishing and subscribing to workflow events | +| [06_WorkflowSharedState](ConsoleApps/06_WorkflowSharedState/) | Sharing state across workflow executors | +| [07_SubWorkflows](ConsoleApps/07_SubWorkflows/) | Nested sub-workflow composition | +| [08_WorkflowHITL](ConsoleApps/08_WorkflowHITL/) | Human-in-the-loop workflow with approval gates | + +### Azure Functions + +| Sample | Description | +|--------|-------------| +| [01_SequentialWorkflow](AzureFunctions/01_SequentialWorkflow/) | Sequential workflow hosted in Azure Functions | +| [02_ConcurrentWorkflow](AzureFunctions/02_ConcurrentWorkflow/) | Concurrent workflow hosted in Azure Functions | +| [03_WorkflowHITL](AzureFunctions/03_WorkflowHITL/) | Human-in-the-loop workflow hosted in Azure Functions | diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/CHANGELOG.md b/dotnet/src/Microsoft.Agents.AI.DurableTask/CHANGELOG.md index e3e90fdae0..3ddf3a39c4 100644 --- a/dotnet/src/Microsoft.Agents.AI.DurableTask/CHANGELOG.md +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/CHANGELOG.md @@ -2,20 +2,40 @@ ## [Unreleased] -### Changed +- Added support for durable workflows ([#4436](https://github.com/microsoft/agent-framework/pull/4436)) -- Added TTL configuration for durable agent entities ([#2679](https://github.com/microsoft/agent-framework/pull/2679)) -- Switch to new "Run" method name ([#2843](https://github.com/microsoft/agent-framework/pull/2843)) -- Removed AgentThreadMetadata and used AgentSessionId directly instead ([#3067](https://github.com/microsoft/agent-framework/pull/3067)); -- Renamed AgentThread to AgentSession ([#3430](https://github.com/microsoft/agent-framework/pull/3430)) -- Moved AgentSession.Serialize to AIAgent.SerializeSession ([#3650](https://github.com/microsoft/agent-framework/pull/3650)) -- Renamed serializedSession parameter to serializedState on DeserializeSessionAsync for consistency ([#3681](https://github.com/microsoft/agent-framework/pull/3681)) -- Introduce Core method pattern for Session management methods on AIAgent ([#3699](https://github.com/microsoft/agent-framework/pull/3699)) -- Changed AIAgent.SerializeSession to AIAgent.SerializeSessionAsync ([#3879](https://github.com/microsoft/agent-framework/pull/3879)) -- Changed ChatHistory and AIContext Providers to have pipeline semantics ([#3806](https://github.com/microsoft/agent-framework/pull/3806)) +## v1.0.0-preview.260219.1 + +- [BREAKING] Changed ChatHistory and AIContext Providers to have pipeline semantics ([#3806](https://github.com/microsoft/agent-framework/pull/3806)) - Marked all `RunAsync` overloads as `new`, added missing ones, and added support for primitives and arrays ([#3803](https://github.com/microsoft/agent-framework/pull/3803)) - Improve session cast error message quality and consistency ([#3973](https://github.com/microsoft/agent-framework/pull/3973)) +## v1.0.0-preview.260212.1 + +- [BREAKING] Changed AIAgent.SerializeSession to AIAgent.SerializeSessionAsync ([#3879](https://github.com/microsoft/agent-framework/pull/3879)) + +## v1.0.0-preview.260209.1 + +- [BREAKING] Introduce Core method pattern for Session management methods on AIAgent ([#3699](https://github.com/microsoft/agent-framework/pull/3699)) + +## v1.0.0-preview.260205.1 + +- [BREAKING] Moved AgentSession.Serialize to AIAgent.SerializeSession ([#3650](https://github.com/microsoft/agent-framework/pull/3650)) +- [BREAKING] Renamed serializedSession parameter to serializedState on DeserializeSessionAsync for consistency ([#3681](https://github.com/microsoft/agent-framework/pull/3681)) + +## v1.0.0-preview.260127.1 + +- [BREAKING] Renamed AgentThread to AgentSession ([#3430](https://github.com/microsoft/agent-framework/pull/3430)) + +## v1.0.0-preview.260108.1 + +- [BREAKING] Removed AgentThreadMetadata and used AgentSessionId directly instead ([#3067](https://github.com/microsoft/agent-framework/pull/3067)) + +## v1.0.0-preview.251219.1 + +- Added TTL configuration for durable agent entities ([#2679](https://github.com/microsoft/agent-framework/pull/2679)) +- Switch to new "Run" method name ([#2843](https://github.com/microsoft/agent-framework/pull/2843)) + ## v1.0.0-preview.251204.1 - Added orchestration ID to durable agent entity state ([#2137](https://github.com/microsoft/agent-framework/pull/2137)) diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableAgentsOptions.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableAgentsOptions.cs index cefcad323a..1b84f9f49f 100644 --- a/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableAgentsOptions.cs +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableAgentsOptions.cs @@ -141,4 +141,15 @@ internal IReadOnlyDictionary> GetAgentFa { return this._agentTimeToLive.TryGetValue(agentName, out TimeSpan? ttl) ? ttl : this.DefaultTimeToLive; } + + /// + /// Determines whether an agent with the specified name is registered. + /// + /// The name of the agent to locate. Cannot be null. + /// true if an agent with the specified name is registered; otherwise, false. + internal bool ContainsAgent(string agentName) + { + ArgumentNullException.ThrowIfNull(agentName); + return this._agentFactories.ContainsKey(agentName); + } } diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableDataConverter.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableDataConverter.cs new file mode 100644 index 0000000000..08dddf6852 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableDataConverter.cs @@ -0,0 +1,66 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using System.Text.Json.Serialization.Metadata; +using Microsoft.Agents.AI.DurableTask.State; +using Microsoft.DurableTask; + +namespace Microsoft.Agents.AI.DurableTask; + +/// +/// Custom data converter for durable agents and workflows that ensures proper JSON serialization. +/// +/// +/// This converter handles special cases like using source-generated +/// JSON contexts for AOT compatibility, and falls back to reflection-based serialization for other types. +/// +internal sealed class DurableDataConverter : DataConverter +{ + private static readonly JsonSerializerOptions s_options = new(DurableAgentJsonUtilities.DefaultOptions) + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + PropertyNameCaseInsensitive = true, + }; + + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Fallback uses reflection when metadata unavailable.")] + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Fallback uses reflection when metadata unavailable.")] + public override object? Deserialize(string? data, Type targetType) + { + if (data is null) + { + return null; + } + + if (targetType == typeof(DurableAgentState)) + { + return JsonSerializer.Deserialize(data, DurableAgentStateJsonContext.Default.DurableAgentState); + } + + JsonTypeInfo? typeInfo = s_options.GetTypeInfo(targetType); + return typeInfo is not null + ? JsonSerializer.Deserialize(data, typeInfo) + : JsonSerializer.Deserialize(data, targetType, s_options); + } + + [return: NotNullIfNotNull(nameof(value))] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Fallback uses reflection when metadata unavailable.")] + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Fallback uses reflection when metadata unavailable.")] + public override string? Serialize(object? value) + { + if (value is null) + { + return null; + } + + if (value is DurableAgentState durableAgentState) + { + return JsonSerializer.Serialize(durableAgentState, DurableAgentStateJsonContext.Default.DurableAgentState); + } + + JsonTypeInfo? typeInfo = s_options.GetTypeInfo(value.GetType()); + return typeInfo is not null + ? JsonSerializer.Serialize(value, typeInfo) + : JsonSerializer.Serialize(value, s_options); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableOptions.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableOptions.cs new file mode 100644 index 0000000000..d7f289b223 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableOptions.cs @@ -0,0 +1,31 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using Microsoft.Agents.AI.DurableTask.Workflows; + +namespace Microsoft.Agents.AI.DurableTask; + +/// +/// Provides configuration options for durable agents and workflows. +/// +[DebuggerDisplay("Workflows = {Workflows.Workflows.Count}, Agents = {Agents.AgentCount}")] +public class DurableOptions +{ + /// + /// Initializes a new instance of the class. + /// + internal DurableOptions() + { + this.Workflows = new DurableWorkflowOptions(this); + } + + /// + /// Gets the configuration options for durable agents. + /// + public DurableAgentsOptions Agents { get; } = new(); + + /// + /// Gets the configuration options for durable workflows. + /// + public DurableWorkflowOptions Workflows { get; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableServicesMarker.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableServicesMarker.cs new file mode 100644 index 0000000000..58dea9b20f --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/DurableServicesMarker.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask; + +/// +/// Marker class used to track whether core durable task services have been registered. +/// +/// +/// +/// Problem it solves: Users may call configuration methods multiple times: +/// +/// services.ConfigureDurableOptions(...); // 1st call - registers agent A +/// services.ConfigureDurableOptions(...); // 2nd call - registers workflow X +/// services.ConfigureDurableOptions(...); // 3rd call - registers agent B and workflow Y +/// +/// Each call invokes EnsureDurableServicesRegistered. Without this marker, core services like +/// AddDurableTaskWorker and AddDurableTaskClient would be registered multiple times, +/// causing runtime errors or unexpected behavior. +/// +/// +/// How it works: +/// +/// First call: No marker in services → register marker + all core services +/// Subsequent calls: Marker exists → early return, skip core service registration +/// +/// +/// +/// Why not use TryAddSingleton for everything? +/// While TryAddSingleton prevents duplicate simple service registrations, it doesn't work for +/// complex registrations like AddDurableTaskWorker which have side effects and configure +/// internal builders. The marker pattern provides a clean, explicit guard for the entire registration block. +/// +/// +internal sealed class DurableServicesMarker; diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Logs.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Logs.cs index ba310441df..57ef010a2f 100644 --- a/dotnet/src/Microsoft.Agents.AI.DurableTask/Logs.cs +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Logs.cs @@ -100,4 +100,131 @@ public static partial void LogTTLRescheduled( public static partial void LogTTLExpirationTimeCleared( this ILogger logger, AgentSessionId sessionId); + + // Durable workflow logs (EventIds 100-199) + + [LoggerMessage( + EventId = 100, + Level = LogLevel.Information, + Message = "Starting workflow '{WorkflowName}' with instance '{InstanceId}'")] + public static partial void LogWorkflowStarting( + this ILogger logger, + string workflowName, + string instanceId); + + [LoggerMessage( + EventId = 101, + Level = LogLevel.Information, + Message = "Superstep {Step}: {Count} active executor(s)")] + public static partial void LogSuperstepStarting( + this ILogger logger, + int step, + int count); + + [LoggerMessage( + EventId = 102, + Level = LogLevel.Debug, + Message = "Superstep {Step} executors: [{Executors}]")] + public static partial void LogSuperstepExecutors( + this ILogger logger, + int step, + string executors); + + [LoggerMessage( + EventId = 103, + Level = LogLevel.Information, + Message = "Workflow completed")] + public static partial void LogWorkflowCompleted( + this ILogger logger); + + [LoggerMessage( + EventId = 104, + Level = LogLevel.Warning, + Message = "Workflow '{InstanceId}' terminated early: reached maximum superstep limit ({MaxSupersteps}) with {RemainingExecutors} executor(s) still queued")] + public static partial void LogWorkflowMaxSuperstepsExceeded( + this ILogger logger, + string instanceId, + int maxSupersteps, + int remainingExecutors); + + [LoggerMessage( + EventId = 105, + Level = LogLevel.Debug, + Message = "Fan-In executor {ExecutorId}: aggregated {Count} messages from [{Sources}]")] + public static partial void LogFanInAggregated( + this ILogger logger, + string executorId, + int count, + string sources); + + [LoggerMessage( + EventId = 106, + Level = LogLevel.Debug, + Message = "Executor '{ExecutorId}' returned result (length: {Length}, messages: {MessageCount})")] + public static partial void LogExecutorResultReceived( + this ILogger logger, + string executorId, + int length, + int messageCount); + + [LoggerMessage( + EventId = 107, + Level = LogLevel.Debug, + Message = "Dispatching executor '{ExecutorId}' (agentic: {IsAgentic})")] + public static partial void LogDispatchingExecutor( + this ILogger logger, + string executorId, + bool isAgentic); + + [LoggerMessage( + EventId = 108, + Level = LogLevel.Warning, + Message = "Agent '{AgentName}' not found")] + public static partial void LogAgentNotFound( + this ILogger logger, + string agentName); + + [LoggerMessage( + EventId = 109, + Level = LogLevel.Debug, + Message = "Edge {Source} -> {Sink}: condition returned false, skipping")] + public static partial void LogEdgeConditionFalse( + this ILogger logger, + string source, + string sink); + + [LoggerMessage( + EventId = 110, + Level = LogLevel.Warning, + Message = "Failed to evaluate condition for edge {Source} -> {Sink}, skipping")] + public static partial void LogEdgeConditionEvaluationFailed( + this ILogger logger, + Exception ex, + string source, + string sink); + + [LoggerMessage( + EventId = 111, + Level = LogLevel.Debug, + Message = "Edge {Source} -> {Sink}: routing message")] + public static partial void LogEdgeRoutingMessage( + this ILogger logger, + string source, + string sink); + + [LoggerMessage( + EventId = 112, + Level = LogLevel.Information, + Message = "Workflow waiting for external input at RequestPort '{RequestPortId}'")] + public static partial void LogWaitingForExternalEvent( + this ILogger logger, + string requestPortId); + + [LoggerMessage( + EventId = 113, + Level = LogLevel.Information, + Message = "Received external event for RequestPort '{RequestPortId}'")] + public static partial void LogReceivedExternalEvent( + this ILogger logger, + string requestPortId); } diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Microsoft.Agents.AI.DurableTask.csproj b/dotnet/src/Microsoft.Agents.AI.DurableTask/Microsoft.Agents.AI.DurableTask.csproj index 28046894db..77c877939e 100644 --- a/dotnet/src/Microsoft.Agents.AI.DurableTask/Microsoft.Agents.AI.DurableTask.csproj +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Microsoft.Agents.AI.DurableTask.csproj @@ -17,7 +17,6 @@ - true true @@ -28,6 +27,7 @@ + diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/ServiceCollectionExtensions.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/ServiceCollectionExtensions.cs index 79d44924ca..456e4ae98d 100644 --- a/dotnet/src/Microsoft.Agents.AI.DurableTask/ServiceCollectionExtensions.cs +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/ServiceCollectionExtensions.cs @@ -1,18 +1,18 @@ // Copyright (c) Microsoft. All rights reserved. -using System.Diagnostics.CodeAnalysis; -using System.Text.Json; -using System.Text.Json.Serialization.Metadata; -using Microsoft.Agents.AI.DurableTask.State; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; using Microsoft.DurableTask; using Microsoft.DurableTask.Client; using Microsoft.DurableTask.Worker; using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.DependencyInjection.Extensions; +using Microsoft.Extensions.Logging; namespace Microsoft.Agents.AI.DurableTask; /// -/// Agent-specific extension methods for the class. +/// Extension methods for configuring durable agents and workflows with dependency injection. /// public static class ServiceCollectionExtensions { @@ -30,77 +30,331 @@ public static AIAgent GetDurableAgentProxy(this IServiceProvider services, strin } /// - /// Configures the Durable Agents services via the service collection. + /// Configures durable agents, automatically registering agent entities. /// + /// + /// + /// This method provides an agent-focused configuration experience. + /// If you need to configure both agents and workflows, consider using + /// instead. + /// + /// + /// Multiple calls to this method are supported and configurations are composed additively. + /// + /// /// The service collection. /// A delegate to configure the durable agents. - /// A delegate to configure the Durable Task worker. - /// A delegate to configure the Durable Task client. - /// The service collection. + /// Optional delegate to configure the Durable Task worker. + /// Optional delegate to configure the Durable Task client. + /// The service collection for chaining. public static IServiceCollection ConfigureDurableAgents( this IServiceCollection services, Action configure, Action? workerBuilder = null, Action? clientBuilder = null) { + return services.ConfigureDurableOptions( + options => configure(options.Agents), + workerBuilder, + clientBuilder); + } + + /// + /// Configures durable workflows, automatically registering orchestrations and activities. + /// + /// + /// + /// This method provides a workflow-focused configuration experience. + /// If you need to configure both agents and workflows, consider using + /// instead. + /// + /// + /// Multiple calls to this method are supported and configurations are composed additively. + /// + /// + /// The service collection to configure. + /// A delegate to configure the workflow options. + /// Optional delegate to configure the durable task worker. + /// Optional delegate to configure the durable task client. + /// The service collection for chaining. + public static IServiceCollection ConfigureDurableWorkflows( + this IServiceCollection services, + Action configure, + Action? workerBuilder = null, + Action? clientBuilder = null) + { + return services.ConfigureDurableOptions( + options => configure(options.Workflows), + workerBuilder, + clientBuilder); + } + + /// + /// Configures durable agents and workflows, automatically registering orchestrations, activities, and agent entities. + /// + /// + /// + /// This is the recommended entry point for configuring durable functionality. It provides unified configuration + /// for both agents and workflows through a single instance, ensuring agents + /// referenced in workflows are automatically registered. + /// + /// + /// Multiple calls to this method (or to + /// and ) are supported and configurations are composed additively. + /// + /// + /// The service collection to configure. + /// A delegate to configure the durable options for both agents and workflows. + /// Optional delegate to configure the durable task worker. + /// Optional delegate to configure the durable task client. + /// The service collection for chaining. + /// + /// + /// services.ConfigureDurableOptions(options => + /// { + /// // Register agents not part of workflows + /// options.Agents.AddAIAgent(standaloneAgent); + /// + /// // Register workflows - agents in workflows are auto-registered + /// options.Workflows.AddWorkflow(myWorkflow); + /// }, + /// workerBuilder: builder => builder.UseDurableTaskScheduler(connectionString), + /// clientBuilder: builder => builder.UseDurableTaskScheduler(connectionString)); + /// + /// + public static IServiceCollection ConfigureDurableOptions( + this IServiceCollection services, + Action configure, + Action? workerBuilder = null, + Action? clientBuilder = null) + { + ArgumentNullException.ThrowIfNull(services); ArgumentNullException.ThrowIfNull(configure); - DurableAgentsOptions options = services.ConfigureDurableAgents(configure); + // Get or create the shared DurableOptions instance for configuration + DurableOptions sharedOptions = GetOrCreateSharedOptions(services); + + // Apply the configuration immediately to capture agent names for keyed service registration + configure(sharedOptions); + + // Register keyed services for any new agents + RegisterAgentKeyedServices(services, sharedOptions); + + // Register core services only once + EnsureDurableServicesRegistered(services, sharedOptions, workerBuilder, clientBuilder); + + return services; + } + + private static DurableOptions GetOrCreateSharedOptions(IServiceCollection services) + { + // Look for an existing DurableOptions registration + ServiceDescriptor? existingDescriptor = services.FirstOrDefault( + d => d.ServiceType == typeof(DurableOptions) && d.ImplementationInstance is not null); + + if (existingDescriptor?.ImplementationInstance is DurableOptions existing) + { + return existing; + } + + // Create a new shared options instance + DurableOptions options = new(); + services.AddSingleton(options); + return options; + } + + private static void RegisterAgentKeyedServices(IServiceCollection services, DurableOptions options) + { + foreach (KeyValuePair> factory in options.Agents.GetAgentFactories()) + { + // Only add if not already registered (to support multiple Configure* calls) + if (!services.Any(d => d.ServiceType == typeof(AIAgent) && d.IsKeyedService && Equals(d.ServiceKey, factory.Key))) + { + services.AddKeyedSingleton(factory.Key, (sp, _) => factory.Value(sp).AsDurableAgentProxy(sp)); + } + } + } - // A worker is required to run the agent entities - services.AddDurableTaskWorker(builder => + /// + /// Ensures that the core durable services are registered only once, regardless of how many + /// times the configuration methods are called. + /// + private static void EnsureDurableServicesRegistered( + IServiceCollection services, + DurableOptions sharedOptions, + Action? workerBuilder, + Action? clientBuilder) + { + // Use a marker to ensure we only register core services once + if (services.Any(d => d.ServiceType == typeof(DurableServicesMarker))) { - workerBuilder?.Invoke(builder); + return; + } + + services.AddSingleton(); + + services.TryAddSingleton(); - builder.AddTasks(registry => + // Configure Durable Task Worker - capture sharedOptions reference in closure. + // The options object is populated by all Configure* calls before the worker starts. + + if (workerBuilder is not null) + { + services.AddDurableTaskWorker(builder => { - foreach (string name in options.GetAgentFactories().Keys) - { - registry.AddEntity(AgentSessionId.ToEntityName(name)); - } + workerBuilder?.Invoke(builder); + + builder.AddTasks(registry => RegisterTasksFromOptions(registry, sharedOptions)); }); - }); + } - // The client is needed to send notifications to the agent entities from non-orchestrator code - if (clientBuilder != null) + // Configure Durable Task Client + if (clientBuilder is not null) { services.AddDurableTaskClient(clientBuilder); + services.TryAddSingleton(); + services.TryAddSingleton(); } - services.AddSingleton(); + // Register workflow and agent services + services.TryAddSingleton(); - return services; + // Register agent factories resolver - returns factories from the shared options + services.TryAddSingleton( + sp => sp.GetRequiredService().Agents.GetAgentFactories()); + + // Register DurableAgentsOptions resolver + services.TryAddSingleton(sp => sp.GetRequiredService().Agents); } - // This is internal because it's also used by Microsoft.Azure.Functions.DurableAgents, which is a friend assembly project. - internal static DurableAgentsOptions ConfigureDurableAgents( - this IServiceCollection services, - Action configure) + private static void RegisterTasksFromOptions(DurableTaskRegistry registry, DurableOptions durableOptions) { - DurableAgentsOptions options = new(); - configure(options); + // Build registrations for all workflows including sub-workflows + List registrations = []; + HashSet registeredActivities = []; + HashSet registeredOrchestrations = []; - IReadOnlyDictionary> agents = options.GetAgentFactories(); + DurableWorkflowOptions workflowOptions = durableOptions.Workflows; + foreach (Workflow workflow in workflowOptions.Workflows.Values.ToList()) + { + BuildWorkflowRegistrationRecursive( + workflow, + workflowOptions, + registrations, + registeredActivities, + registeredOrchestrations); + } - // The agent dictionary contains the real agent factories, which is used by the agent entities. - services.AddSingleton(agents); + IReadOnlyDictionary> agentFactories = + durableOptions.Agents.GetAgentFactories(); - // Register the options so AgentEntity can access TTL configuration - services.AddSingleton(options); + // Register orchestrations and activities + foreach (WorkflowRegistrationInfo registration in registrations) + { + // Register with DurableWorkflowInput - the DataConverter handles serialization/deserialization + registry.AddOrchestratorFunc, DurableWorkflowResult>( + registration.OrchestrationName, + (context, input) => RunWorkflowOrchestrationAsync(context, input, durableOptions)); + + foreach (ActivityRegistrationInfo activity in registration.Activities) + { + ExecutorBinding binding = activity.Binding; + registry.AddActivityFunc( + activity.ActivityName, + (context, input) => DurableActivityExecutor.ExecuteAsync(binding, input)); + } + } - // The keyed services are used to resolve durable agent *proxy* instances for external clients. - foreach (var factory in agents) + // Register agent entities + foreach (string agentName in agentFactories.Keys) { - services.AddKeyedSingleton(factory.Key, (sp, _) => factory.Value(sp).AsDurableAgentProxy(sp)); + registry.AddEntity(AgentSessionId.ToEntityName(agentName)); } + } - // A custom data converter is needed because the default chat client uses camel case for JSON properties, - // which is not the default behavior for the Durable Task SDK. - services.AddSingleton(); + private static void BuildWorkflowRegistrationRecursive( + Workflow workflow, + DurableWorkflowOptions workflowOptions, + List registrations, + HashSet registeredActivities, + HashSet registeredOrchestrations) + { + string orchestrationName = WorkflowNamingHelper.ToOrchestrationFunctionName(workflow.Name!); - return options; + if (!registeredOrchestrations.Add(orchestrationName)) + { + return; + } + + registrations.Add(BuildWorkflowRegistration(workflow, registeredActivities)); + + // Process subworkflows recursively to register them as separate orchestrations + foreach (SubworkflowBinding subworkflowBinding in workflow.ReflectExecutors() + .Select(e => e.Value) + .OfType()) + { + Workflow subWorkflow = subworkflowBinding.WorkflowInstance; + workflowOptions.AddWorkflow(subWorkflow); + + BuildWorkflowRegistrationRecursive( + subWorkflow, + workflowOptions, + registrations, + registeredActivities, + registeredOrchestrations); + } } + private static WorkflowRegistrationInfo BuildWorkflowRegistration( + Workflow workflow, + HashSet registeredActivities) + { + string orchestrationName = WorkflowNamingHelper.ToOrchestrationFunctionName(workflow.Name!); + Dictionary executorBindings = workflow.ReflectExecutors(); + List activities = []; + + foreach (KeyValuePair entry in executorBindings + .Where(e => IsActivityBinding(e.Value))) + { + string executorName = WorkflowNamingHelper.GetExecutorName(entry.Key); + string activityName = WorkflowNamingHelper.ToOrchestrationFunctionName(executorName); + + if (registeredActivities.Add(activityName)) + { + activities.Add(new ActivityRegistrationInfo(activityName, entry.Value)); + } + } + + return new WorkflowRegistrationInfo(orchestrationName, activities); + } + + /// + /// Returns for bindings that should be registered as Durable Task activities. + /// (Durable Entities), (sub-orchestrations), + /// and (human-in-the-loop via external events) use specialized dispatch + /// and are excluded. + /// + private static bool IsActivityBinding(ExecutorBinding binding) + => binding is not AIAgentBinding + and not SubworkflowBinding + and not RequestPortBinding; + + private static async Task RunWorkflowOrchestrationAsync( + TaskOrchestrationContext context, + DurableWorkflowInput workflowInput, + DurableOptions durableOptions) + { + ILogger logger = context.CreateReplaySafeLogger("DurableWorkflow"); + DurableWorkflowRunner runner = new(durableOptions); + + // ConfigureAwait(true) is required in orchestration code for deterministic replay. + return await runner.RunWorkflowOrchestrationAsync(context, workflowInput, logger).ConfigureAwait(true); + } + + private sealed record WorkflowRegistrationInfo(string OrchestrationName, List Activities); + + private sealed record ActivityRegistrationInfo(string ActivityName, ExecutorBinding Binding); + /// /// Validates that an agent with the specified name has been registered. /// @@ -124,63 +378,4 @@ internal static void ValidateAgentIsRegistered(IServiceProvider services, string throw new AgentNotRegisteredException(agentName); } } - - private sealed class DefaultDataConverter : DataConverter - { - // Use durable agent options (web defaults + camel case by default) with case-insensitive matching. - // We clone to apply naming/casing tweaks while retaining source-generated metadata where available. - private static readonly JsonSerializerOptions s_options = new(DurableAgentJsonUtilities.DefaultOptions) - { - PropertyNamingPolicy = JsonNamingPolicy.CamelCase, - PropertyNameCaseInsensitive = true, - }; - - [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Fallback path uses reflection when metadata unavailable.")] - [UnconditionalSuppressMessage("ReflectionAnalysis", "IL3050", Justification = "Fallback path uses reflection when metadata unavailable.")] - public override object? Deserialize(string? data, Type targetType) - { - if (data is null) - { - return null; - } - - if (targetType == typeof(DurableAgentState)) - { - return JsonSerializer.Deserialize(data, DurableAgentStateJsonContext.Default.DurableAgentState); - } - - JsonTypeInfo? typeInfo = s_options.GetTypeInfo(targetType); - if (typeInfo is JsonTypeInfo typedInfo) - { - return JsonSerializer.Deserialize(data, typedInfo); - } - - // Fallback (may trigger trimming/AOT warnings for unsupported dynamic types). - return JsonSerializer.Deserialize(data, targetType, s_options); - } - - [return: NotNullIfNotNull(nameof(value))] - [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Fallback path uses reflection when metadata unavailable.")] - [UnconditionalSuppressMessage("ReflectionAnalysis", "IL3050", Justification = "Fallback path uses reflection when metadata unavailable.")] - public override string? Serialize(object? value) - { - if (value is null) - { - return null; - } - - if (value is DurableAgentState durableAgentState) - { - return JsonSerializer.Serialize(durableAgentState, DurableAgentStateJsonContext.Default.DurableAgentState); - } - - JsonTypeInfo? typeInfo = s_options.GetTypeInfo(value.GetType()); - if (typeInfo is JsonTypeInfo typedInfo) - { - return JsonSerializer.Serialize(value, typedInfo); - } - - return JsonSerializer.Serialize(value, s_options); - } - } } diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableActivityExecutor.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableActivityExecutor.cs new file mode 100644 index 0000000000..c9e9a1b125 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableActivityExecutor.cs @@ -0,0 +1,177 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using Microsoft.Agents.AI.Workflows; +using Microsoft.Agents.AI.Workflows.Checkpointing; +using Microsoft.Agents.AI.Workflows.Observability; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Executes workflow activities by invoking executor bindings and handling serialization. +/// +[UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Workflow and executor types are registered at startup.")] +[UnconditionalSuppressMessage("Trimming", "IL2057", Justification = "Workflow and executor types are registered at startup.")] +[UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Workflow and executor types are registered at startup.")] +internal static class DurableActivityExecutor +{ + /// + /// Executes an activity using the provided executor binding. + /// + /// The executor binding to invoke. + /// The serialized input string. + /// A token to cancel the operation. + /// The serialized activity output. + /// Thrown when is null. + /// Thrown when the executor factory is not configured. + internal static async Task ExecuteAsync( + ExecutorBinding binding, + string input, + CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(binding); + + if (binding.FactoryAsync is null) + { + throw new InvalidOperationException($"Executor binding for '{binding.Id}' does not have a factory configured."); + } + + DurableActivityInput? inputWithState = TryDeserializeActivityInput(input); + string executorInput = inputWithState?.Input ?? input; + Dictionary sharedState = inputWithState?.State ?? []; + + Executor executor = await binding.FactoryAsync(binding.Id).ConfigureAwait(false); + Type inputType = ResolveInputType(inputWithState?.InputTypeName, executor.InputTypes); + object typedInput = DeserializeInput(executorInput, inputType); + + DurableWorkflowContext workflowContext = new(sharedState, executor); + object? result = await executor.ExecuteCoreAsync( + typedInput, + new TypeId(inputType), + workflowContext, + WorkflowTelemetryContext.Disabled, + cancellationToken).ConfigureAwait(false); + + return SerializeActivityOutput(result, workflowContext); + } + + private static string SerializeActivityOutput(object? result, DurableWorkflowContext context) + { + DurableExecutorOutput output = new() + { + Result = SerializeResult(result), + StateUpdates = context.StateUpdates, + ClearedScopes = [.. context.ClearedScopes], + Events = context.OutboundEvents.ConvertAll(SerializeEvent), + SentMessages = context.SentMessages, + HaltRequested = context.HaltRequested + }; + + return JsonSerializer.Serialize(output, DurableWorkflowJsonContext.Default.DurableExecutorOutput); + } + + /// + /// Serializes a workflow event with type information for proper deserialization. + /// + private static string SerializeEvent(WorkflowEvent evt) + { + Type eventType = evt.GetType(); + TypedPayload wrapper = new() + { + TypeName = eventType.AssemblyQualifiedName, + Data = JsonSerializer.Serialize(evt, eventType, DurableSerialization.Options) + }; + + return JsonSerializer.Serialize(wrapper, DurableWorkflowJsonContext.Default.TypedPayload); + } + + private static string SerializeResult(object? result) + { + if (result is null) + { + return string.Empty; + } + + if (result is string str) + { + return str; + } + + return JsonSerializer.Serialize(result, result.GetType(), DurableSerialization.Options); + } + + private static DurableActivityInput? TryDeserializeActivityInput(string input) + { + try + { + return JsonSerializer.Deserialize(input, DurableWorkflowJsonContext.Default.DurableActivityInput); + } + catch (JsonException) + { + return null; + } + } + + internal static object DeserializeInput(string input, Type targetType) + { + if (targetType == typeof(string)) + { + return input; + } + + // Fan-in aggregation serializes results as a JSON array of strings (e.g., ["{...}", "{...}"]). + // When the target type is a non-string array, deserialize each element individually. + if (targetType.IsArray && targetType != typeof(string[])) + { + Type elementType = targetType.GetElementType()!; + string[]? stringArray = JsonSerializer.Deserialize(input, DurableSerialization.Options); + if (stringArray is not null) + { + Array result = Array.CreateInstance(elementType, stringArray.Length); + for (int i = 0; i < stringArray.Length; i++) + { + object element = JsonSerializer.Deserialize(stringArray[i], elementType, DurableSerialization.Options) + ?? throw new InvalidOperationException($"Failed to deserialize element {i} to type '{elementType.Name}'."); + result.SetValue(element, i); + } + + return result; + } + } + + return JsonSerializer.Deserialize(input, targetType, DurableSerialization.Options) + ?? throw new InvalidOperationException($"Failed to deserialize input to type '{targetType.Name}'."); + } + + internal static Type ResolveInputType(string? inputTypeName, ISet supportedTypes) + { + if (string.IsNullOrEmpty(inputTypeName)) + { + return supportedTypes.FirstOrDefault() ?? typeof(string); + } + + Type? matchedType = supportedTypes.FirstOrDefault(t => + t.AssemblyQualifiedName == inputTypeName || + t.FullName == inputTypeName || + t.Name == inputTypeName); + + if (matchedType is not null) + { + return matchedType; + } + + Type? loadedType = Type.GetType(inputTypeName); + + // Fall back if type is string or string[] but executor doesn't support it + if (loadedType is not null && !supportedTypes.Contains(loadedType)) + { + if (loadedType == typeof(string) || loadedType == typeof(string[])) + { + return supportedTypes.FirstOrDefault() ?? typeof(string); + } + } + + return loadedType ?? supportedTypes.FirstOrDefault() ?? typeof(string); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableActivityInput.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableActivityInput.cs new file mode 100644 index 0000000000..b49306bf9e --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableActivityInput.cs @@ -0,0 +1,24 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Input payload for activity execution, containing the input and other metadata. +/// +internal sealed class DurableActivityInput +{ + /// + /// Gets or sets the serialized executor input. + /// + public string? Input { get; set; } + + /// + /// Gets or sets the assembly-qualified type name of the input, used for proper deserialization. + /// + public string? InputTypeName { get; set; } + + /// + /// Gets or sets the shared state dictionary (scope-prefixed key -> serialized value). + /// + public Dictionary State { get; set; } = []; +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableExecutorDispatcher.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableExecutorDispatcher.cs new file mode 100644 index 0000000000..b2440cfd83 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableExecutorDispatcher.cs @@ -0,0 +1,216 @@ +// Copyright (c) Microsoft. All rights reserved. + +// ConfigureAwait Usage in Orchestration Code: +// This file uses ConfigureAwait(true) because it runs within orchestration context. +// Durable Task orchestrations require deterministic replay - the same code must execute +// identically across replays. ConfigureAwait(true) ensures continuations run on the +// orchestration's synchronization context, which is essential for replay correctness. +// Using ConfigureAwait(false) here could cause non-deterministic behavior during replay. + +using System.Text.Json; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask; +using Microsoft.Extensions.Logging; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Dispatches workflow executors to activities, AI agents, sub-orchestrations, or external events (human-in-the-loop). +/// +/// +/// Called during the dispatch phase of each superstep by +/// DurableWorkflowRunner.DispatchExecutorsInParallelAsync. For each executor that has +/// pending input, this dispatcher determines whether the executor is an AI agent (stateful, +/// backed by Durable Entities), a request port (human-in-the-loop, backed by external events), +/// a sub-workflow (dispatched as a sub-orchestration), or a regular activity, and invokes the +/// appropriate Durable Task API. +/// The serialised string result is returned to the runner for the routing phase. +/// +internal static class DurableExecutorDispatcher +{ + /// + /// Dispatches an executor based on its type (activity, AI agent, request port, or sub-workflow). + /// + /// The task orchestration context. + /// Information about the executor to dispatch. + /// The message envelope containing input and type information. + /// The shared state dictionary to pass to the executor. + /// The live workflow status used to publish events and pending request port state. + /// The logger for tracing. + /// The result from the executor. + internal static async Task DispatchAsync( + TaskOrchestrationContext context, + WorkflowExecutorInfo executorInfo, + DurableMessageEnvelope envelope, + Dictionary sharedState, + DurableWorkflowLiveStatus liveStatus, + ILogger logger) + { + logger.LogDispatchingExecutor(executorInfo.ExecutorId, executorInfo.IsAgenticExecutor); + + if (executorInfo.IsRequestPortExecutor) + { + return await ExecuteRequestPortAsync(context, executorInfo, envelope.Message, liveStatus, logger).ConfigureAwait(true); + } + + if (executorInfo.IsAgenticExecutor) + { + return await ExecuteAgentAsync(context, executorInfo, logger, envelope.Message).ConfigureAwait(true); + } + + if (executorInfo.IsSubworkflowExecutor) + { + return await ExecuteSubWorkflowAsync(context, executorInfo, envelope.Message).ConfigureAwait(true); + } + + return await ExecuteActivityAsync(context, executorInfo, envelope.Message, envelope.InputTypeName, sharedState).ConfigureAwait(true); + } + + private static async Task ExecuteActivityAsync( + TaskOrchestrationContext context, + WorkflowExecutorInfo executorInfo, + string input, + string? inputTypeName, + Dictionary sharedState) + { + string executorName = WorkflowNamingHelper.GetExecutorName(executorInfo.ExecutorId); + string activityName = WorkflowNamingHelper.ToOrchestrationFunctionName(executorName); + + DurableActivityInput activityInput = new() + { + Input = input, + InputTypeName = inputTypeName, + State = sharedState + }; + + string serializedInput = JsonSerializer.Serialize(activityInput, DurableWorkflowJsonContext.Default.DurableActivityInput); + + return await context.CallActivityAsync(activityName, serializedInput).ConfigureAwait(true); + } + + /// + /// Executes a request port executor by waiting for an external event (human-in-the-loop). + /// + /// + /// When the workflow reaches a executor, the orchestration publishes + /// the pending request to and waits for an external actor + /// (e.g., a UI or API) to raise the corresponding event via + /// . + /// Multiple RequestPorts may be dispatched in parallel during a fan-out superstep. + /// Each adds its pending request to . + /// The wait has no built-in timeout; for time-limited approvals, callers can combine + /// context.CreateTimer with Task.WhenAny in a wrapper executor. + /// + private static async Task ExecuteRequestPortAsync( + TaskOrchestrationContext context, + WorkflowExecutorInfo executorInfo, + string input, + DurableWorkflowLiveStatus liveStatus, + ILogger logger) + { + RequestPort requestPort = executorInfo.RequestPort!; + string eventName = requestPort.Id; + + logger.LogWaitingForExternalEvent(eventName); + + // Publish pending request so external clients can discover what input is needed + liveStatus.PendingEvents.Add(new PendingRequestPortStatus(EventName: eventName, Input: input)); + context.SetCustomStatus(liveStatus); + + // Wait until the external actor raises the event + string response = await context.WaitForExternalEvent(eventName).ConfigureAwait(true); + + // Remove this pending request after receiving the response + liveStatus.PendingEvents.RemoveAll(p => p.EventName == eventName); + context.SetCustomStatus(liveStatus.Events.Count > 0 || liveStatus.PendingEvents.Count > 0 ? liveStatus : null); + + logger.LogReceivedExternalEvent(eventName); + + return response; + } + + /// + /// Executes an AI agent executor through Durable Entities. + /// + /// + /// AI agents are stateful and maintain conversation history. They use Durable Entities + /// to persist state across orchestration replays. + /// + private static async Task ExecuteAgentAsync( + TaskOrchestrationContext context, + WorkflowExecutorInfo executorInfo, + ILogger logger, + string input) + { + string agentName = WorkflowNamingHelper.GetExecutorName(executorInfo.ExecutorId); + DurableAIAgent agent = context.GetAgent(agentName); + + if (agent is null) + { + logger.LogAgentNotFound(agentName); + return $"Agent '{agentName}' not found"; + } + + AgentSession session = await agent.CreateSessionAsync().ConfigureAwait(true); + AgentResponse response = await agent.RunAsync(input, session).ConfigureAwait(true); + + return response.Text; + } + + /// + /// Dispatches a sub-workflow executor as a sub-orchestration. + /// + /// + /// Sub-workflows run as separate orchestration instances, providing independent + /// checkpointing, replay, and hierarchical visualization in the DTS dashboard. + /// The input is wrapped in so the sub-orchestration + /// can extract it using the same envelope structure. The sub-orchestration returns a + /// directly (deserialized by the Durable Task SDK), + /// which this method converts to a so the parent + /// workflow's result processing picks up both the result and any accumulated events. + /// + private static async Task ExecuteSubWorkflowAsync( + TaskOrchestrationContext context, + WorkflowExecutorInfo executorInfo, + string input) + { + string orchestrationName = WorkflowNamingHelper.ToOrchestrationFunctionName(executorInfo.SubWorkflow!.Name!); + + DurableWorkflowInput workflowInput = new() { Input = input }; + + DurableWorkflowResult? workflowResult = await context.CallSubOrchestratorAsync( + orchestrationName, + workflowInput).ConfigureAwait(true); + + return ConvertWorkflowResultToExecutorOutput(workflowResult); + } + + /// + /// Converts a from a sub-orchestration + /// into a JSON string. This bridges the sub-workflow's + /// output format to the parent workflow's result processing, preserving both the result + /// and any accumulated events from the sub-workflow. + /// + private static string ConvertWorkflowResultToExecutorOutput(DurableWorkflowResult? workflowResult) + { + if (workflowResult is null) + { + return string.Empty; + } + + // Propagate the result, events, and sent messages from the sub-workflow. + // SentMessages carry the sub-workflow's output for typed routing in the parent, + // matching the in-process WorkflowHostExecutor behavior. + // Shared state is not included because each workflow instance maintains its own + // independent shared state; it is not shared between parent and sub-workflows. + DurableExecutorOutput executorOutput = new() + { + Result = workflowResult.Result, + Events = workflowResult.Events ?? [], + SentMessages = workflowResult.SentMessages ?? [], + HaltRequested = workflowResult.HaltRequested, + }; + + return JsonSerializer.Serialize(executorOutput, DurableWorkflowJsonContext.Default.DurableExecutorOutput); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableExecutorOutput.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableExecutorOutput.cs new file mode 100644 index 0000000000..ce3f26c14b --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableExecutorOutput.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Output payload from executor execution, containing the result, state updates, and emitted events. +/// +internal sealed class DurableExecutorOutput +{ + /// + /// Gets the executor result. + /// + public string? Result { get; init; } + + /// + /// Gets the state updates (scope-prefixed key to value; null indicates deletion). + /// + public Dictionary StateUpdates { get; init; } = []; + + /// + /// Gets the scope names that were cleared. + /// + public List ClearedScopes { get; init; } = []; + + /// + /// Gets the workflow events emitted during execution. + /// + public List Events { get; init; } = []; + + /// + /// Gets the typed messages sent to downstream executors. + /// + public List SentMessages { get; init; } = []; + + /// + /// Gets a value indicating whether the executor requested a workflow halt. + /// + public bool HaltRequested { get; init; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableHaltRequestedEvent.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableHaltRequestedEvent.cs new file mode 100644 index 0000000000..6c7aacfc48 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableHaltRequestedEvent.cs @@ -0,0 +1,25 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Event raised when an executor requests the workflow to halt via . +/// +public sealed class DurableHaltRequestedEvent : WorkflowEvent +{ + /// + /// Initializes a new instance of the class. + /// + /// The ID of the executor that requested the halt. + public DurableHaltRequestedEvent(string executorId) : base($"Halt requested by {executorId}") + { + this.ExecutorId = executorId; + } + + /// + /// Gets the ID of the executor that requested the halt. + /// + public string ExecutorId { get; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableMessageEnvelope.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableMessageEnvelope.cs new file mode 100644 index 0000000000..56f560a31c --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableMessageEnvelope.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents a message envelope for durable workflow message passing. +/// +/// +/// +/// This is the durable equivalent of MessageEnvelope in the in-process runner. +/// Unlike the in-process version which holds native .NET objects, this envelope +/// contains serialized JSON strings suitable for Durable Task activities. +/// +/// +internal sealed class DurableMessageEnvelope +{ + /// + /// Gets or sets the serialized JSON message content. + /// + public required string Message { get; init; } + + /// + /// Gets or sets the full type name of the message for deserialization. + /// + public string? InputTypeName { get; init; } + + /// + /// Gets or sets the ID of the executor that produced this message. + /// + /// + /// Used for tracing and debugging. Null for initial workflow input. + /// + public string? SourceExecutorId { get; init; } + + /// + /// Creates a new message envelope. + /// + /// The serialized JSON message content. + /// The full type name of the message for deserialization. + /// The ID of the executor that produced this message, or null for initial input. + /// A new instance. + internal static DurableMessageEnvelope Create(string message, string? inputTypeName, string? sourceExecutorId = null) + { + return new DurableMessageEnvelope + { + Message = message, + InputTypeName = inputTypeName, + SourceExecutorId = sourceExecutorId + }; + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableRunStatus.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableRunStatus.cs new file mode 100644 index 0000000000..cff00a84ca --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableRunStatus.cs @@ -0,0 +1,49 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents the execution status of a durable workflow run. +/// +public enum DurableRunStatus +{ + /// + /// The workflow instance was not found. + /// + NotFound, + + /// + /// The workflow is pending and has not started. + /// + Pending, + + /// + /// The workflow is currently running. + /// + Running, + + /// + /// The workflow completed successfully. + /// + Completed, + + /// + /// The workflow failed with an error. + /// + Failed, + + /// + /// The workflow was terminated. + /// + Terminated, + + /// + /// The workflow is suspended. + /// + Suspended, + + /// + /// The workflow status is unknown. + /// + Unknown +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableSerialization.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableSerialization.cs new file mode 100644 index 0000000000..245ec36fb8 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableSerialization.cs @@ -0,0 +1,22 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Shared serialization options for user-defined workflow types that are not known at compile time +/// and therefore cannot use the source-generated . +/// +internal static class DurableSerialization +{ + /// + /// Gets the shared for workflow serialization + /// with camelCase naming and case-insensitive deserialization. + /// + internal static JsonSerializerOptions Options { get; } = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + PropertyNameCaseInsensitive = true + }; +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableStreamingWorkflowRun.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableStreamingWorkflowRun.cs new file mode 100644 index 0000000000..6cacf871e0 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableStreamingWorkflowRun.cs @@ -0,0 +1,452 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Runtime.CompilerServices; +using System.Text.Json; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask; +using Microsoft.DurableTask.Client; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents a durable workflow run that supports streaming workflow events as they occur. +/// +/// +/// +/// Events are detected by monitoring the orchestration's custom status at regular intervals. +/// When executors emit events via or +/// , they are written to the orchestration's +/// custom status and picked up by this streaming run. +/// +/// +/// When the workflow reaches a executor, a +/// is yielded containing the request data. The caller should then call +/// +/// to provide the response and resume the workflow. +/// +/// +[DebuggerDisplay("{WorkflowName} ({RunId})")] +internal sealed class DurableStreamingWorkflowRun : IStreamingWorkflowRun +{ + private readonly DurableTaskClient _client; + private readonly Dictionary _requestPorts; + + /// + /// Initializes a new instance of the class. + /// + /// The durable task client for orchestration operations. + /// The unique instance ID for this orchestration run. + /// The workflow being executed. + internal DurableStreamingWorkflowRun(DurableTaskClient client, string instanceId, Workflow workflow) + { + this._client = client; + this.RunId = instanceId; + this.WorkflowName = workflow.Name ?? string.Empty; + this._requestPorts = ExtractRequestPorts(workflow); + } + + /// + public string RunId { get; } + + /// + /// Gets the name of the workflow being executed. + /// + public string WorkflowName { get; } + + /// + /// Gets the current execution status of the workflow run. + /// + /// A cancellation token to observe. + /// The current status of the durable run. + public async ValueTask GetStatusAsync(CancellationToken cancellationToken = default) + { + OrchestrationMetadata? metadata = await this._client.GetInstanceAsync( + this.RunId, + getInputsAndOutputs: false, + cancellation: cancellationToken).ConfigureAwait(false); + + if (metadata is null) + { + return DurableRunStatus.NotFound; + } + + return metadata.RuntimeStatus switch + { + OrchestrationRuntimeStatus.Pending => DurableRunStatus.Pending, + OrchestrationRuntimeStatus.Running => DurableRunStatus.Running, + OrchestrationRuntimeStatus.Completed => DurableRunStatus.Completed, + OrchestrationRuntimeStatus.Failed => DurableRunStatus.Failed, + OrchestrationRuntimeStatus.Terminated => DurableRunStatus.Terminated, + OrchestrationRuntimeStatus.Suspended => DurableRunStatus.Suspended, + _ => DurableRunStatus.Unknown + }; + } + + /// + public IAsyncEnumerable WatchStreamAsync(CancellationToken cancellationToken = default) + => this.WatchStreamAsync(pollingInterval: null, cancellationToken); + + /// + /// Asynchronously streams workflow events as they occur during workflow execution. + /// + /// The interval between status checks. Defaults to 100ms. + /// A cancellation token to observe. + /// An asynchronous stream of objects. + private async IAsyncEnumerable WatchStreamAsync( + TimeSpan? pollingInterval, + [EnumeratorCancellation] CancellationToken cancellationToken = default) + { + TimeSpan minInterval = pollingInterval ?? TimeSpan.FromMilliseconds(100); + TimeSpan maxInterval = TimeSpan.FromSeconds(2); + TimeSpan currentInterval = minInterval; + + // Track how many events we've already read from the durable workflow status + int lastReadEventIndex = 0; + + // Track which pending events we've already yielded to avoid duplicates + HashSet yieldedPendingEvents = []; + + while (!cancellationToken.IsCancellationRequested) + { + // Poll with getInputsAndOutputs: true because SerializedCustomStatus + // (used for event streaming) is only populated when this flag is set. + OrchestrationMetadata? metadata = await this._client.GetInstanceAsync( + this.RunId, + getInputsAndOutputs: true, + cancellation: cancellationToken).ConfigureAwait(false); + + if (metadata is null) + { + yield break; + } + + bool hasNewEvents = false; + + // Always drain any unread events from the durable workflow status before checking terminal states. + // The orchestration may complete before the next poll, so events would be lost if we + // check terminal status first. + if (metadata.SerializedCustomStatus is not null) + { + if (DurableWorkflowLiveStatus.TryParse(metadata.SerializedCustomStatus, out DurableWorkflowLiveStatus liveStatus)) + { + (List events, lastReadEventIndex) = DrainNewEvents(liveStatus.Events, lastReadEventIndex); + foreach (WorkflowEvent evt in events) + { + hasNewEvents = true; + yield return evt; + } + + // Yield a DurableWorkflowWaitingForInputEvent for each new pending request port + foreach (PendingRequestPortStatus pending in liveStatus.PendingEvents) + { + if (yieldedPendingEvents.Add(pending.EventName)) + { + if (!this._requestPorts.TryGetValue(pending.EventName, out RequestPort? matchingPort)) + { + // RequestPort may not exist in the current workflow definition (e.g., during rolling deployments). + continue; + } + + hasNewEvents = true; + yield return new DurableWorkflowWaitingForInputEvent( + pending.Input, + matchingPort); + } + } + + // Sync tracking with current pending events so re-used RequestPort names can be yielded again + if (liveStatus.PendingEvents.Count == 0) + { + yieldedPendingEvents.Clear(); + } + else + { + yieldedPendingEvents.IntersectWith(liveStatus.PendingEvents.Select(p => p.EventName)); + } + } + } + + // Check terminal states after draining events from the durable workflow status + if (metadata.RuntimeStatus == OrchestrationRuntimeStatus.Completed) + { + // The framework clears the durable workflow status on completion, so events may be in + // SerializedOutput as a DurableWorkflowResult wrapper. + if (TryParseWorkflowResult(metadata.SerializedOutput, out DurableWorkflowResult? outputResult)) + { + (List events, _) = DrainNewEvents(outputResult.Events, lastReadEventIndex); + foreach (WorkflowEvent evt in events) + { + yield return evt; + } + + yield return new DurableWorkflowCompletedEvent(outputResult.Result); + } + else + { + // The runner always wraps output in DurableWorkflowResult, so a parse + // failure here indicates a bug. Yield a failed event so the consumer + // gets a visible, handleable signal without crashing. + yield return new DurableWorkflowFailedEvent( + $"Workflow '{this.WorkflowName}' (RunId: {this.RunId}) completed but its output could not be parsed as DurableWorkflowResult."); + } + + yield break; + } + + if (metadata.RuntimeStatus == OrchestrationRuntimeStatus.Failed) + { + string errorMessage = metadata.FailureDetails?.ErrorMessage ?? "Workflow execution failed."; + yield return new DurableWorkflowFailedEvent(errorMessage, metadata.FailureDetails); + yield break; + } + + if (metadata.RuntimeStatus == OrchestrationRuntimeStatus.Terminated) + { + yield return new DurableWorkflowFailedEvent("Workflow was terminated."); + yield break; + } + + // Adaptive backoff: reset to minimum when events were found, increase otherwise + currentInterval = hasNewEvents + ? minInterval + : TimeSpan.FromMilliseconds(Math.Min(currentInterval.TotalMilliseconds * 2, maxInterval.TotalMilliseconds)); + + try + { + await Task.Delay(currentInterval, cancellationToken).ConfigureAwait(false); + } + catch (OperationCanceledException) when (cancellationToken.IsCancellationRequested) + { + yield break; + } + } + } + + /// + /// Sends a response to a to resume the workflow. + /// + /// The type of the response data. + /// The request event to respond to. + /// The response data to send. + /// A cancellation token to observe. + /// A representing the asynchronous operation. + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Serializing workflow types provided by the caller.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Serializing workflow types provided by the caller.")] + public async ValueTask SendResponseAsync(DurableWorkflowWaitingForInputEvent requestEvent, TResponse response, CancellationToken cancellationToken = default) + { + ArgumentNullException.ThrowIfNull(requestEvent); + + string serializedResponse = JsonSerializer.Serialize(response, DurableSerialization.Options); + await this._client.RaiseEventAsync( + this.RunId, + requestEvent.RequestPort.Id, + serializedResponse, + cancellationToken).ConfigureAwait(false); + } + + /// + /// Waits for the workflow to complete and returns the result. + /// + /// The expected result type. + /// A cancellation token to observe. + /// The result of the workflow execution. + /// Thrown when the workflow failed. + /// Thrown when the workflow was terminated or ended with an unexpected status. + public async ValueTask WaitForCompletionAsync(CancellationToken cancellationToken = default) + { + OrchestrationMetadata metadata = await this._client.WaitForInstanceCompletionAsync( + this.RunId, + getInputsAndOutputs: true, + cancellation: cancellationToken).ConfigureAwait(false); + + if (metadata.RuntimeStatus == OrchestrationRuntimeStatus.Completed) + { + return ExtractResult(metadata.SerializedOutput); + } + + if (metadata.RuntimeStatus == OrchestrationRuntimeStatus.Failed) + { + if (metadata.FailureDetails is not null) + { + throw new TaskFailedException( + taskName: this.WorkflowName, + taskId: -1, + failureDetails: metadata.FailureDetails); + } + + throw new InvalidOperationException( + $"Workflow '{this.WorkflowName}' (RunId: {this.RunId}) failed without failure details."); + } + + throw new InvalidOperationException( + $"Workflow '{this.WorkflowName}' (RunId: {this.RunId}) ended with unexpected status: {metadata.RuntimeStatus}"); + } + + /// + /// Deserializes and returns any events beyond from the list. + /// + private static (List Events, int UpdatedIndex) DrainNewEvents(List serializedEvents, int lastReadIndex) + { + List events = []; + while (lastReadIndex < serializedEvents.Count) + { + string serializedEvent = serializedEvents[lastReadIndex]; + lastReadIndex++; + + WorkflowEvent? workflowEvent = TryDeserializeEvent(serializedEvent); + if (workflowEvent is not null) + { + events.Add(workflowEvent); + } + } + + return (events, lastReadIndex); + } + + /// + /// Attempts to parse the orchestration output as a wrapper. + /// + /// + /// The orchestration returns a object directly. + /// The Durable Task framework's DataConverter serializes it as a JSON object + /// in SerializedOutput, so we deserialize it directly. + /// + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing workflow result wrapper.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing workflow result wrapper.")] + private static bool TryParseWorkflowResult(string? serializedOutput, [NotNullWhen(true)] out DurableWorkflowResult? result) + { + if (serializedOutput is null) + { + result = default!; + return false; + } + + try + { + result = JsonSerializer.Deserialize(serializedOutput, DurableWorkflowJsonContext.Default.DurableWorkflowResult)!; + return result is not null; + } + catch (JsonException) + { + result = default!; + return false; + } + } + + /// + /// Extracts a typed result from the orchestration output by unwrapping the + /// wrapper. + /// + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing workflow result.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing workflow result.")] + internal static TResult? ExtractResult(string? serializedOutput) + { + if (serializedOutput is null) + { + return default; + } + + if (!TryParseWorkflowResult(serializedOutput, out DurableWorkflowResult? workflowResult)) + { + throw new InvalidOperationException( + "Failed to parse orchestration output as DurableWorkflowResult. " + + "The orchestration runner should always wrap output in this format."); + } + + string? resultJson = workflowResult.Result; + + if (resultJson is null) + { + return default; + } + + if (typeof(TResult) == typeof(string)) + { + return (TResult)(object)resultJson; + } + + return JsonSerializer.Deserialize(resultJson, DurableSerialization.Options); + } + + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing workflow event types.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing workflow event types.")] + [UnconditionalSuppressMessage("Trimming", "IL2057", Justification = "Event types are registered at startup.")] + private static WorkflowEvent? TryDeserializeEvent(string serializedEvent) + { + try + { + TypedPayload? wrapper = JsonSerializer.Deserialize( + serializedEvent, + DurableWorkflowJsonContext.Default.TypedPayload); + + if (wrapper?.TypeName is not null && wrapper.Data is not null) + { + Type? eventType = Type.GetType(wrapper.TypeName); + if (eventType is not null) + { + return DeserializeEventByType(eventType, wrapper.Data); + } + } + + return null; + } + catch (JsonException) + { + return null; + } + } + + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing workflow event types.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing workflow event types.")] + private static WorkflowEvent? DeserializeEventByType(Type eventType, string json) + { + // Types with internal constructors need manual deserialization + if (eventType == typeof(ExecutorInvokedEvent) + || eventType == typeof(ExecutorCompletedEvent) + || eventType == typeof(WorkflowOutputEvent)) + { + using JsonDocument doc = JsonDocument.Parse(json); + JsonElement root = doc.RootElement; + + if (eventType == typeof(ExecutorInvokedEvent)) + { + string executorId = root.GetProperty("executorId").GetString() ?? string.Empty; + JsonElement? data = GetDataProperty(root); + return new ExecutorInvokedEvent(executorId, data!); + } + + if (eventType == typeof(ExecutorCompletedEvent)) + { + string executorId = root.GetProperty("executorId").GetString() ?? string.Empty; + JsonElement? data = GetDataProperty(root); + return new ExecutorCompletedEvent(executorId, data); + } + + // WorkflowOutputEvent + string sourceId = root.GetProperty("sourceId").GetString() ?? string.Empty; + object? outputData = GetDataProperty(root); + return new WorkflowOutputEvent(outputData!, sourceId); + } + + return JsonSerializer.Deserialize(json, eventType, DurableSerialization.Options) as WorkflowEvent; + } + + private static JsonElement? GetDataProperty(JsonElement root) + { + if (!root.TryGetProperty("data", out JsonElement dataElement)) + { + return null; + } + + return dataElement.ValueKind == JsonValueKind.Null ? null : dataElement.Clone(); + } + + private static Dictionary ExtractRequestPorts(Workflow workflow) + { + return WorkflowAnalyzer.GetExecutorsFromWorkflowInOrder(workflow) + .Where(e => e.RequestPort is not null) + .ToDictionary(e => e.RequestPort!.Id, e => e.RequestPort!); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowClient.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowClient.cs new file mode 100644 index 0000000000..5944d578ef --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowClient.cs @@ -0,0 +1,95 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask; +using Microsoft.DurableTask.Client; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Provides a durable task-based implementation of for running +/// workflows as durable orchestrations. +/// +internal sealed class DurableWorkflowClient : IWorkflowClient +{ + private readonly DurableTaskClient _client; + + /// + /// Initializes a new instance of the class. + /// + /// The durable task client for orchestration operations. + /// Thrown when is null. + public DurableWorkflowClient(DurableTaskClient client) + { + ArgumentNullException.ThrowIfNull(client); + this._client = client; + } + + /// + public async ValueTask RunAsync( + Workflow workflow, + TInput input, + string? runId = null, + CancellationToken cancellationToken = default) + where TInput : notnull + { + ArgumentNullException.ThrowIfNull(workflow); + + if (string.IsNullOrEmpty(workflow.Name)) + { + throw new ArgumentException("Workflow must have a valid Name property.", nameof(workflow)); + } + + DurableWorkflowInput workflowInput = new() { Input = input }; + + string instanceId = await this._client.ScheduleNewOrchestrationInstanceAsync( + orchestratorName: WorkflowNamingHelper.ToOrchestrationFunctionName(workflow.Name), + input: workflowInput, + options: runId is not null ? new StartOrchestrationOptions(runId) : null, + cancellation: cancellationToken).ConfigureAwait(false); + + return new DurableWorkflowRun(this._client, instanceId, workflow.Name); + } + + /// + public ValueTask RunAsync( + Workflow workflow, + string input, + string? runId = null, + CancellationToken cancellationToken = default) + => this.RunAsync(workflow, input, runId, cancellationToken); + + /// + public async ValueTask StreamAsync( + Workflow workflow, + TInput input, + string? runId = null, + CancellationToken cancellationToken = default) + where TInput : notnull + { + ArgumentNullException.ThrowIfNull(workflow); + + if (string.IsNullOrEmpty(workflow.Name)) + { + throw new ArgumentException("Workflow must have a valid Name property.", nameof(workflow)); + } + + DurableWorkflowInput workflowInput = new() { Input = input }; + + string instanceId = await this._client.ScheduleNewOrchestrationInstanceAsync( + orchestratorName: WorkflowNamingHelper.ToOrchestrationFunctionName(workflow.Name), + input: workflowInput, + options: runId is not null ? new StartOrchestrationOptions(runId) : null, + cancellation: cancellationToken).ConfigureAwait(false); + + return new DurableStreamingWorkflowRun(this._client, instanceId, workflow); + } + + /// + public ValueTask StreamAsync( + Workflow workflow, + string input, + string? runId = null, + CancellationToken cancellationToken = default) + => this.StreamAsync(workflow, input, runId, cancellationToken); +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowCompletedEvent.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowCompletedEvent.cs new file mode 100644 index 0000000000..a4de6d1d50 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowCompletedEvent.cs @@ -0,0 +1,27 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Event raised when a durable workflow completes successfully. +/// +[DebuggerDisplay("Completed: {Result}")] +public sealed class DurableWorkflowCompletedEvent : WorkflowEvent +{ + /// + /// Initializes a new instance of the class. + /// + /// The serialized result of the workflow. + public DurableWorkflowCompletedEvent(string? result) : base(result) + { + this.Result = result; + } + + /// + /// Gets the serialized result of the workflow. + /// + public string? Result { get; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowContext.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowContext.cs new file mode 100644 index 0000000000..5f98f5dc59 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowContext.cs @@ -0,0 +1,327 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// A workflow context for durable workflow execution. +/// +/// +/// State is passed in from the orchestration and updates are collected for return. +/// Events emitted during execution are collected and returned to the orchestration +/// as part of the activity output for streaming to callers. +/// +[DebuggerDisplay("Executor = {_executor.Id}, StateEntries = {_initialState.Count}")] +internal sealed class DurableWorkflowContext : IWorkflowContext +{ + /// + /// The default scope name used when no explicit scope is specified. + /// Scopes partition shared state into logical namespaces so that different + /// parts of a workflow can manage their state keys independently. + /// + private const string DefaultScopeName = "__default__"; + + private readonly Dictionary _initialState; + private readonly Executor _executor; + + /// + /// Initializes a new instance of the class. + /// + /// The shared state passed from the orchestration. + /// The executor running in this context. + internal DurableWorkflowContext(Dictionary? initialState, Executor executor) + { + this._executor = executor; + this._initialState = initialState ?? []; + } + + /// + /// Gets the messages sent during activity execution via . + /// + internal List SentMessages { get; } = []; + + /// + /// Gets the outbound events that were added during activity execution. + /// + internal List OutboundEvents { get; } = []; + + /// + /// Gets the state updates made during activity execution. + /// + internal Dictionary StateUpdates { get; } = []; + + /// + /// Gets the scopes that were cleared during activity execution. + /// + internal HashSet ClearedScopes { get; } = []; + + /// + /// Gets a value indicating whether the executor requested a workflow halt. + /// + internal bool HaltRequested { get; private set; } + + /// + public ValueTask AddEventAsync( + WorkflowEvent workflowEvent, + CancellationToken cancellationToken = default) + { + if (workflowEvent is not null) + { + this.OutboundEvents.Add(workflowEvent); + } + + return default; + } + + /// + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Serializing workflow message types registered at startup.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Serializing workflow message types registered at startup.")] + public ValueTask SendMessageAsync( + object message, + string? targetId = null, + CancellationToken cancellationToken = default) + { + if (message is not null) + { + Type messageType = message.GetType(); + this.SentMessages.Add(new TypedPayload + { + Data = JsonSerializer.Serialize(message, messageType, DurableSerialization.Options), + TypeName = messageType.AssemblyQualifiedName + }); + } + + return default; + } + + /// + public ValueTask YieldOutputAsync( + object output, + CancellationToken cancellationToken = default) + { + if (output is not null) + { + Type outputType = output.GetType(); + if (!this._executor.CanOutput(outputType)) + { + throw new InvalidOperationException( + $"Cannot output object of type {outputType.Name}. " + + $"Expecting one of [{string.Join(", ", this._executor.OutputTypes)}]."); + } + + this.OutboundEvents.Add(new WorkflowOutputEvent(output, this._executor.Id)); + } + + return default; + } + + /// + public ValueTask RequestHaltAsync() + { + this.HaltRequested = true; + this.OutboundEvents.Add(new DurableHaltRequestedEvent(this._executor.Id)); + return default; + } + + /// + public ValueTask ReadStateAsync( + string key, + string? scopeName = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrEmpty(key); + + string scopeKey = GetScopeKey(scopeName, key); + string normalizedScope = scopeName ?? DefaultScopeName; + bool scopeCleared = this.ClearedScopes.Contains(normalizedScope); + + // Local updates take priority over initial state. + if (this.StateUpdates.TryGetValue(scopeKey, out string? updated)) + { + return DeserializeStateAsync(updated); + } + + // If scope was cleared, ignore initial state + if (scopeCleared) + { + return ValueTask.FromResult(default); + } + + // Fall back to initial state passed from orchestration + if (this._initialState.TryGetValue(scopeKey, out string? initial)) + { + return DeserializeStateAsync(initial); + } + + return ValueTask.FromResult(default); + } + + /// + public async ValueTask ReadOrInitStateAsync( + string key, + Func initialStateFactory, + string? scopeName = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrEmpty(key); + ArgumentNullException.ThrowIfNull(initialStateFactory); + + // Cannot rely on `value is not null` because T? on an unconstrained generic + // parameter does not become Nullable for value types — the null check is + // always true for types like int. Instead, check key existence directly. + if (this.HasStateKey(key, scopeName)) + { + T? value = await this.ReadStateAsync(key, scopeName, cancellationToken).ConfigureAwait(false); + if (value is not null) + { + return value; + } + } + + T initialValue = initialStateFactory(); + await this.QueueStateUpdateAsync(key, initialValue, scopeName, cancellationToken).ConfigureAwait(false); + return initialValue; + } + + /// + public ValueTask> ReadStateKeysAsync( + string? scopeName = null, + CancellationToken cancellationToken = default) + { + string scopePrefix = GetScopePrefix(scopeName); + int scopePrefixLength = scopePrefix.Length; + HashSet keys = new(StringComparer.Ordinal); + + bool scopeCleared = scopeName is null + ? this.ClearedScopes.Contains(DefaultScopeName) + : this.ClearedScopes.Contains(scopeName); + + // Start with keys from initial state (skip if scope was cleared) + if (!scopeCleared) + { + foreach (string stateKey in this._initialState.Keys) + { + if (stateKey.StartsWith(scopePrefix, StringComparison.Ordinal)) + { + keys.Add(stateKey[scopePrefixLength..]); + } + } + } + + // Merge local updates: add if non-null, remove if null (deleted) + foreach (KeyValuePair update in this.StateUpdates) + { + if (!update.Key.StartsWith(scopePrefix, StringComparison.Ordinal)) + { + continue; + } + + string key = update.Key[scopePrefixLength..]; + if (update.Value is not null) + { + keys.Add(key); + } + else + { + keys.Remove(key); + } + } + + return ValueTask.FromResult(keys); + } + + /// + public ValueTask QueueStateUpdateAsync( + string key, + T? value, + string? scopeName = null, + CancellationToken cancellationToken = default) + { + ArgumentException.ThrowIfNullOrEmpty(key); + + string scopeKey = GetScopeKey(scopeName, key); + this.StateUpdates[scopeKey] = value is null ? null : SerializeState(value); + return default; + } + + /// + public ValueTask QueueClearScopeAsync( + string? scopeName = null, + CancellationToken cancellationToken = default) + { + this.ClearedScopes.Add(scopeName ?? DefaultScopeName); + + // Remove any pending updates in this scope (snapshot keys to allow removal during iteration) + string scopePrefix = GetScopePrefix(scopeName); + foreach (string key in this.StateUpdates.Keys.ToList()) + { + if (key.StartsWith(scopePrefix, StringComparison.Ordinal)) + { + this.StateUpdates.Remove(key); + } + } + + return default; + } + + /// + public IReadOnlyDictionary? TraceContext => null; + + /// + public bool ConcurrentRunsEnabled => false; + + private static string GetScopeKey(string? scopeName, string key) + => $"{GetScopePrefix(scopeName)}{key}"; + + /// + /// Checks whether the given key exists in local updates or initial state, + /// respecting cleared scopes. + /// + private bool HasStateKey(string key, string? scopeName) + { + string scopeKey = GetScopeKey(scopeName, key); + + if (this.StateUpdates.TryGetValue(scopeKey, out string? updated)) + { + return updated is not null; + } + + string normalizedScope = scopeName ?? DefaultScopeName; + if (this.ClearedScopes.Contains(normalizedScope)) + { + return false; + } + + return this._initialState.ContainsKey(scopeKey); + } + + /// + /// Returns the key prefix for the given scope. Scopes partition shared state + /// into logical namespaces, allowing different workflow executors to manage + /// their state keys independently. When no scope is specified, the + /// is used. + /// + private static string GetScopePrefix(string? scopeName) + => scopeName is null ? $"{DefaultScopeName}:" : $"{scopeName}:"; + + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Serializing workflow state types.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Serializing workflow state types.")] + private static string SerializeState(T value) + => JsonSerializer.Serialize(value, DurableSerialization.Options); + + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing workflow state types.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing workflow state types.")] + private static ValueTask DeserializeStateAsync(string? json) + { + if (json is null) + { + return ValueTask.FromResult(default); + } + + return ValueTask.FromResult(JsonSerializer.Deserialize(json, DurableSerialization.Options)); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowFailedEvent.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowFailedEvent.cs new file mode 100644 index 0000000000..4f1e411be6 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowFailedEvent.cs @@ -0,0 +1,35 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Event raised when a durable workflow fails. +/// +[DebuggerDisplay("Failed: {ErrorMessage}")] +public sealed class DurableWorkflowFailedEvent : WorkflowEvent +{ + /// + /// Initializes a new instance of the class. + /// + /// The error message describing the failure. + /// The full failure details from the Durable Task runtime, if available. + public DurableWorkflowFailedEvent(string errorMessage, TaskFailureDetails? failureDetails = null) : base(errorMessage) + { + this.ErrorMessage = errorMessage; + this.FailureDetails = failureDetails; + } + + /// + /// Gets the error message describing the failure. + /// + public string ErrorMessage { get; } + + /// + /// Gets the full failure details from the Durable Task runtime, including error type, stack trace, and inner failure. + /// + public TaskFailureDetails? FailureDetails { get; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowInput.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowInput.cs new file mode 100644 index 0000000000..bd6f42f501 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowInput.cs @@ -0,0 +1,16 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents the input envelope for a durable workflow orchestration. +/// +/// The type of the workflow input. +internal sealed class DurableWorkflowInput + where TInput : notnull +{ + /// + /// Gets the workflow input data. + /// + public required TInput Input { get; init; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowJsonContext.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowJsonContext.cs new file mode 100644 index 0000000000..12f4c490b9 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowJsonContext.cs @@ -0,0 +1,41 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json.Serialization; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Source-generated JSON serialization context for durable workflow types. +/// +/// +/// +/// This context provides AOT-compatible and trimmer-safe JSON serialization for the +/// internal data transfer types used by the durable workflow infrastructure: +/// +/// +/// : Activity input wrapper with state +/// : Executor output wrapper with results, events, and state updates +/// : Serialized payload wrapper with type info (events and messages) +/// : Live status payload (streaming events and pending request ports) +/// +/// +/// Note: User-defined executor input/output types still use reflection-based serialization +/// since their types are not known at compile time. +/// +/// +[JsonSourceGenerationOptions( + WriteIndented = false, + DefaultIgnoreCondition = JsonIgnoreCondition.WhenWritingNull, + PropertyNamingPolicy = JsonKnownNamingPolicy.CamelCase)] +[JsonSerializable(typeof(DurableActivityInput))] +[JsonSerializable(typeof(DurableExecutorOutput))] +[JsonSerializable(typeof(TypedPayload))] +[JsonSerializable(typeof(List))] +[JsonSerializable(typeof(DurableWorkflowLiveStatus))] +[JsonSerializable(typeof(DurableWorkflowResult))] +[JsonSerializable(typeof(PendingRequestPortStatus))] +[JsonSerializable(typeof(List))] +[JsonSerializable(typeof(List))] +[JsonSerializable(typeof(Dictionary))] +[JsonSerializable(typeof(Dictionary))] +internal partial class DurableWorkflowJsonContext : JsonSerializerContext; diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowLiveStatus.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowLiveStatus.cs new file mode 100644 index 0000000000..5e381ce0eb --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowLiveStatus.cs @@ -0,0 +1,59 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Live status payload written to the orchestration via SetCustomStatus. +/// +/// +/// +/// This is the only orchestration state readable by external clients while the workflow +/// is still running. It is written after each superstep so that +/// can poll for new events. +/// On completion the framework clears it, so events are also +/// embedded in the output via . +/// +/// +/// When the workflow is paused at one or more nodes, +/// contains the request data for each. +/// +/// +internal sealed class DurableWorkflowLiveStatus +{ + /// + /// Gets or sets the pending request ports the workflow is waiting on. Empty when no input is needed. + /// + public List PendingEvents { get; set; } = []; + + /// + /// Gets or sets the serialized workflow events emitted so far. + /// + public List Events { get; set; } = []; + + /// + /// Attempts to deserialize a serialized custom status string into a . + /// + [System.Diagnostics.CodeAnalysis.UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing durable workflow status.")] + [System.Diagnostics.CodeAnalysis.UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing durable workflow status.")] + internal static bool TryParse(string? serializedStatus, out DurableWorkflowLiveStatus result) + { + if (serializedStatus is null) + { + result = default!; + return false; + } + + try + { + result = System.Text.Json.JsonSerializer.Deserialize(serializedStatus, DurableSerialization.Options)!; + return result is not null; + } + catch (System.Text.Json.JsonException) + { + result = default!; + return false; + } + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowOptions.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowOptions.cs new file mode 100644 index 0000000000..67a21c9100 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowOptions.cs @@ -0,0 +1,111 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Provides configuration options for managing durable workflows within an application. +/// +[DebuggerDisplay("Workflows = {Workflows.Count}")] +public sealed class DurableWorkflowOptions +{ + private readonly Dictionary _workflows = new(StringComparer.OrdinalIgnoreCase); + + /// + /// Initializes a new instance of the class. + /// + /// Optional parent options container for accessing related configuration. + internal DurableWorkflowOptions(DurableOptions? parentOptions = null) + { + this.ParentOptions = parentOptions; + } + + /// + /// Gets the parent container, if available. + /// + internal DurableOptions? ParentOptions { get; } + + /// + /// Gets the collection of workflows available in the current context, keyed by their unique names. + /// + public IReadOnlyDictionary Workflows => this._workflows; + + /// + /// Gets the executor registry for direct executor lookup. + /// + internal ExecutorRegistry Executors { get; } = new(); + + /// + /// Adds a workflow to the collection for processing or execution. + /// + /// The workflow instance to add. Cannot be null. + /// + /// When a workflow is added, all executors are registered in the executor registry. + /// Any AI agent executors will also be automatically registered with the + /// if available. + /// + /// Thrown when is null. + /// Thrown when the workflow does not have a valid name. + public void AddWorkflow(Workflow workflow) + { + ArgumentNullException.ThrowIfNull(workflow); + + if (string.IsNullOrEmpty(workflow.Name)) + { + throw new ArgumentException("Workflow must have a valid Name property.", nameof(workflow)); + } + + this._workflows[workflow.Name] = workflow; + this.RegisterWorkflowExecutors(workflow); + } + + /// + /// Adds a collection of workflows to the current instance. + /// + /// The collection of objects to add. + /// Thrown when is null. + public void AddWorkflows(params Workflow[] workflows) + { + ArgumentNullException.ThrowIfNull(workflows); + + foreach (Workflow workflow in workflows) + { + this.AddWorkflow(workflow); + } + } + + /// + /// Registers all executors from a workflow, including AI agents if agent options are available. + /// + private void RegisterWorkflowExecutors(Workflow workflow) + { + DurableAgentsOptions? agentOptions = this.ParentOptions?.Agents; + + foreach ((string executorId, ExecutorBinding binding) in workflow.ReflectExecutors()) + { + string executorName = WorkflowNamingHelper.GetExecutorName(executorId); + this.Executors.Register(executorName, executorId, workflow); + + TryRegisterAgent(binding, agentOptions); + } + } + + /// + /// Registers an AI agent with the agent options if the binding contains an unregistered agent. + /// + private static void TryRegisterAgent(ExecutorBinding binding, DurableAgentsOptions? agentOptions) + { + if (agentOptions is null) + { + return; + } + + if (binding.RawValue is AIAgent { Name: not null } agent + && !agentOptions.ContainsAgent(agent.Name)) + { + agentOptions.AddAIAgent(agent); + } + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowResult.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowResult.cs new file mode 100644 index 0000000000..7f63232185 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowResult.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Wraps the orchestration output to include both the workflow result and accumulated events. +/// +/// +/// The Durable Task framework clears SerializedCustomStatus when an orchestration +/// completes. To ensure streaming clients can retrieve events even after completion, +/// the accumulated events are embedded in the orchestration output alongside the result. +/// +internal sealed class DurableWorkflowResult +{ + /// + /// Gets or sets the serialized result of the workflow execution. + /// + public string? Result { get; set; } + + /// + /// Gets or sets the serialized workflow events emitted during execution. + /// + public List Events { get; set; } = []; + + /// + /// Gets or sets the typed messages to forward to connected executors in the parent workflow. + /// + /// + /// When this workflow runs as a sub-orchestration, these messages are propagated to the + /// parent workflow and routed to successor executors via the edge map. + /// + public List SentMessages { get; set; } = []; + + /// + /// Gets or sets a value indicating whether the workflow was halted by an executor. + /// + /// + /// When this workflow runs as a sub-orchestration, this flag is propagated to the + /// parent workflow so halt semantics are preserved across nesting levels. + /// + public bool HaltRequested { get; set; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowRun.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowRun.cs new file mode 100644 index 0000000000..aeb42f4fb6 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowRun.cs @@ -0,0 +1,116 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask; +using Microsoft.DurableTask.Client; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents a durable workflow run that tracks execution status and provides access to workflow events. +/// +[DebuggerDisplay("{WorkflowName} ({RunId})")] +internal sealed class DurableWorkflowRun : IAwaitableWorkflowRun +{ + private readonly DurableTaskClient _client; + private readonly List _eventSink = []; + private int _lastBookmark; + + /// + /// Initializes a new instance of the class. + /// + /// The durable task client for orchestration operations. + /// The unique instance ID for this orchestration run. + /// The name of the workflow being executed. + internal DurableWorkflowRun(DurableTaskClient client, string instanceId, string workflowName) + { + this._client = client; + this.RunId = instanceId; + this.WorkflowName = workflowName; + } + + /// + public string RunId { get; } + + /// + /// Gets the name of the workflow being executed. + /// + public string WorkflowName { get; } + + /// + /// Waits for the workflow to complete and returns the result. + /// + /// The expected result type. + /// A cancellation token to observe. + /// The result of the workflow execution. + /// Thrown when the workflow failed. + /// Thrown when the workflow was terminated or ended with an unexpected status. + public async ValueTask WaitForCompletionAsync(CancellationToken cancellationToken = default) + { + OrchestrationMetadata metadata = await this._client.WaitForInstanceCompletionAsync( + this.RunId, + getInputsAndOutputs: true, + cancellation: cancellationToken).ConfigureAwait(false); + + if (metadata.RuntimeStatus == OrchestrationRuntimeStatus.Completed) + { + return DurableStreamingWorkflowRun.ExtractResult(metadata.SerializedOutput); + } + + if (metadata.RuntimeStatus == OrchestrationRuntimeStatus.Failed) + { + if (metadata.FailureDetails is not null) + { + // Use TaskFailedException to preserve full failure details including stack trace and inner exceptions + throw new TaskFailedException( + taskName: this.WorkflowName, + taskId: 0, + failureDetails: metadata.FailureDetails); + } + + throw new InvalidOperationException( + $"Workflow '{this.WorkflowName}' (RunId: {this.RunId}) failed without failure details."); + } + + throw new InvalidOperationException( + $"Workflow '{this.WorkflowName}' (RunId: {this.RunId}) ended with unexpected status: {metadata.RuntimeStatus}"); + } + + /// + /// Waits for the workflow to complete and returns the string result. + /// + /// A cancellation token to observe. + /// The string result of the workflow execution. + public ValueTask WaitForCompletionAsync(CancellationToken cancellationToken = default) + => this.WaitForCompletionAsync(cancellationToken); + + /// + /// Gets all events that have been collected from the workflow. + /// + public IEnumerable OutgoingEvents => this._eventSink; + + /// + /// Gets the number of events collected since the last access to . + /// + public int NewEventCount => this._eventSink.Count - this._lastBookmark; + + /// + /// Gets all events collected since the last access to . + /// + public IEnumerable NewEvents + { + get + { + if (this._lastBookmark >= this._eventSink.Count) + { + return []; + } + + int currentBookmark = this._lastBookmark; + this._lastBookmark = this._eventSink.Count; + + return this._eventSink.Skip(currentBookmark); + } + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowRunner.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowRunner.cs new file mode 100644 index 0000000000..b458bf98b0 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowRunner.cs @@ -0,0 +1,619 @@ +// Copyright (c) Microsoft. All rights reserved. + +// ConfigureAwait Usage in Orchestration Code: +// This file uses ConfigureAwait(true) because it runs within orchestration context. +// Durable Task orchestrations require deterministic replay - the same code must execute +// identically across replays. ConfigureAwait(true) ensures continuations run on the +// orchestration's synchronization context, which is essential for replay correctness. +// Using ConfigureAwait(false) here could cause non-deterministic behavior during replay. + +// Superstep execution walkthrough for a workflow like below: +// +// [A] ──► [B] ──► [C] ──► [E] (B→D has condition: x => x.NeedsReview) +// │ ▲ +// └──► [D] ──────┘ +// +// Superstep 1 — A runs +// Queues before: A:[input] Results: {} +// Dispatch: A executes, returns resultA +// Route: EdgeMap routes A's output → B's queue +// Queues after: B:[resultA] Results: {A: resultA} +// +// Superstep 2 — B runs +// Queues before: B:[resultA] Results: {A: resultA} +// Dispatch: B executes, returns resultB (type: Order) +// Route: FanOutRouter sends resultB to: +// C's queue (unconditional) +// D's queue (only if resultB.NeedsReview == true) +// Queues after: C:[resultB], D:[resultB] Results: {A: .., B: resultB} +// (D may be empty if condition was false) +// +// Superstep 3 — C and D run in parallel +// Queues before: C:[resultB], D:[resultB] +// Dispatch: C and D execute concurrently via Task.WhenAll +// Route: Both route output → E's queue +// Queues after: E:[resultC, resultD] Results: {.., C: resultC, D: resultD} +// +// Superstep 4 — E runs (fan-in) +// Queues before: E:[resultC, resultD] ◄── IsFanInExecutor("E") = true +// Collect: AggregateQueueMessages merges into JSON array ["resultC","resultD"] +// Dispatch: E executes with aggregated input +// Route: E has no successors → nothing enqueued +// Queues after: (all empty) Results: {.., E: resultE} +// +// Superstep 5 — loop exits (no pending messages) +// GetFinalResult returns resultE + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using Microsoft.Agents.AI.DurableTask.Workflows.EdgeRouters; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask; +using Microsoft.Extensions.Logging; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +// Superstep loop: +// +// ┌───────────────┐ ┌───────────────┐ ┌───────────────────┐ +// │ Collect │───►│ Dispatch │───►│ Process Results │ +// │ Executor │ │ Executors │ │ & Route Messages │ +// │ Inputs │ │ in Parallel │ │ │ +// └───────────────┘ └───────────────┘ └───────────────────┘ +// ▲ │ +// └───────────────────────────────────────────┘ +// (repeat until no pending messages) + +/// +/// Runs workflow orchestrations using message-driven superstep execution with Durable Task. +/// +internal sealed class DurableWorkflowRunner +{ + private const int MaxSupersteps = 100; + + /// + /// Initializes a new instance of the class. + /// + /// The durable options containing workflow configurations. + public DurableWorkflowRunner(DurableOptions durableOptions) + { + ArgumentNullException.ThrowIfNull(durableOptions); + + this.Options = durableOptions.Workflows; + } + + /// + /// Gets the workflow options. + /// + private DurableWorkflowOptions Options { get; } + + /// + /// Runs a workflow orchestration. + /// + /// The task orchestration context. + /// The workflow input envelope containing workflow input and metadata. + /// The replay-safe logger for orchestration logging. + /// The result of the workflow execution. + /// Thrown when the specified workflow is not found. + internal async Task RunWorkflowOrchestrationAsync( + TaskOrchestrationContext context, + DurableWorkflowInput workflowInput, + ILogger logger) + { + ArgumentNullException.ThrowIfNull(context); + ArgumentNullException.ThrowIfNull(workflowInput); + + Workflow workflow = this.GetWorkflowOrThrow(context.Name); + + string workflowName = context.Name; + string instanceId = context.InstanceId; + logger.LogWorkflowStarting(workflowName, instanceId); + + WorkflowGraphInfo graphInfo = WorkflowAnalyzer.BuildGraphInfo(workflow); + DurableEdgeMap edgeMap = new(graphInfo); + + // Extract input - the start executor determines the expected input type from its own InputTypes + object input = workflowInput.Input; + + return await RunSuperstepLoopAsync(context, workflow, edgeMap, input, logger).ConfigureAwait(true); + } + + private Workflow GetWorkflowOrThrow(string orchestrationName) + { + string workflowName = WorkflowNamingHelper.ToWorkflowName(orchestrationName); + + if (!this.Options.Workflows.TryGetValue(workflowName, out Workflow? workflow)) + { + throw new InvalidOperationException($"Workflow '{workflowName}' not found."); + } + + return workflow; + } + + /// + /// Runs the workflow execution loop using superstep-based processing. + /// + [UnconditionalSuppressMessage("AOT", "IL2026:RequiresUnreferencedCode", Justification = "Input types are preserved by the Durable Task framework's DataConverter.")] + [UnconditionalSuppressMessage("AOT", "IL3050:RequiresDynamicCode", Justification = "Input types are preserved by the Durable Task framework's DataConverter.")] + private static async Task RunSuperstepLoopAsync( + TaskOrchestrationContext context, + Workflow workflow, + DurableEdgeMap edgeMap, + object initialInput, + ILogger logger) + { + SuperstepState state = new(workflow, edgeMap); + + // Convert input to string for the message queue. + // When DurableWorkflowInput is deserialized as DurableWorkflowInput, + // the Input property becomes a JsonElement instead of a string. + // We must extract the raw string value to avoid double-serialization. + string inputString = initialInput switch + { + string s => s, + JsonElement je when je.ValueKind == JsonValueKind.String => je.GetString() ?? string.Empty, + _ => JsonSerializer.Serialize(initialInput) + }; + + edgeMap.EnqueueInitialInput(inputString, state.MessageQueues); + + bool haltRequested = false; + + for (int superstep = 1; superstep <= MaxSupersteps; superstep++) + { + List executorInputs = CollectExecutorInputs(state, logger); + if (executorInputs.Count == 0) + { + break; + } + + logger.LogSuperstepStarting(superstep, executorInputs.Count); + if (logger.IsEnabled(LogLevel.Debug)) + { + logger.LogSuperstepExecutors(superstep, string.Join(", ", executorInputs.Select(e => e.ExecutorId))); + } + + string[] results = await DispatchExecutorsInParallelAsync(context, executorInputs, state, logger).ConfigureAwait(true); + + haltRequested = ProcessSuperstepResults(executorInputs, results, state, context, logger); + + if (haltRequested) + { + break; + } + + // Check if we've reached the limit and still have work remaining + int remainingExecutors = CountRemainingExecutors(state.MessageQueues); + if (superstep == MaxSupersteps && remainingExecutors > 0) + { + logger.LogWorkflowMaxSuperstepsExceeded(context.InstanceId, MaxSupersteps, remainingExecutors); + } + } + + // Publish final events for live streaming (skip during replay) + if (!context.IsReplaying) + { + PublishEventsToLiveStatus(context, state); + } + + string finalResult = GetFinalResult(state.LastResults); + logger.LogWorkflowCompleted(); + + // Return wrapper with both result and events so streaming clients can + // retrieve events from SerializedOutput after the orchestration completes + // (SerializedCustomStatus is cleared by the framework on completion). + // SentMessages carries the final result so parent workflows can route it + // to connected executors, matching the in-process WorkflowHostExecutor behavior. + return new DurableWorkflowResult + { + Result = finalResult, + Events = state.AccumulatedEvents, + SentMessages = !string.IsNullOrEmpty(finalResult) + ? [new TypedPayload { Data = finalResult }] + : [], + HaltRequested = haltRequested + }; + } + + /// + /// Counts the number of executors with pending messages in their queues. + /// + private static int CountRemainingExecutors(Dictionary> messageQueues) + { + return messageQueues.Count(kvp => kvp.Value.Count > 0); + } + + private static async Task DispatchExecutorsInParallelAsync( + TaskOrchestrationContext context, + List executorInputs, + SuperstepState state, + ILogger logger) + { + Task[] dispatchTasks = executorInputs + .Select(input => DurableExecutorDispatcher.DispatchAsync(context, input.Info, input.Envelope, state.SharedState, state.LiveStatus, logger)) + .ToArray(); + + return await Task.WhenAll(dispatchTasks).ConfigureAwait(true); + } + + /// + /// Holds state that accumulates and changes across superstep iterations during workflow execution. + /// + /// + /// + /// MessageQueues starts with one entry (the start executor's queue, seeded by + /// ). After each superstep, RouteOutputToSuccessors + /// adds entries for successor executors that receive routed messages. Queues are drained during + /// CollectExecutorInputs; empty queues are skipped. + /// + /// + /// LastResults is updated after every superstep with the result of each executor that ran. + /// At workflow completion, the last non-empty value is returned as the workflow's final result. + /// + /// + private sealed class SuperstepState + { + public SuperstepState(Workflow workflow, DurableEdgeMap edgeMap) + { + this.EdgeMap = edgeMap; + this.ExecutorBindings = workflow.ReflectExecutors(); + } + + public DurableEdgeMap EdgeMap { get; } + + public Dictionary ExecutorBindings { get; } + + public Dictionary> MessageQueues { get; } = []; + + public Dictionary LastResults { get; } = []; + + /// + /// Shared state dictionary across supersteps (scope-prefixed key -> serialized value). + /// + public Dictionary SharedState { get; } = []; + + /// + /// Accumulated workflow events for the durable workflow status (streaming consumption). + /// + public List AccumulatedEvents { get; } = []; + + /// + /// Workflow status published via SetCustomStatus so external clients can poll for streaming events and pending HITL requests. + /// + public DurableWorkflowLiveStatus LiveStatus { get; } = new(); + } + + /// + /// Represents prepared input for an executor ready for dispatch. + /// + private sealed record ExecutorInput(string ExecutorId, DurableMessageEnvelope Envelope, WorkflowExecutorInfo Info); + + /// + /// Collects inputs for all active executors, applying Fan-In aggregation where needed. + /// + private static List CollectExecutorInputs( + SuperstepState state, + ILogger logger) + { + List inputs = []; + + // Only process queues that have pending messages + foreach ((string executorId, Queue queue) in state.MessageQueues + .Where(kvp => kvp.Value.Count > 0)) + { + DurableMessageEnvelope envelope = GetNextEnvelope(executorId, queue, state.EdgeMap, logger); + WorkflowExecutorInfo executorInfo = CreateExecutorInfo(executorId, state.ExecutorBindings); + + inputs.Add(new ExecutorInput(executorId, envelope, executorInfo)); + } + + return inputs; + } + + private static DurableMessageEnvelope GetNextEnvelope( + string executorId, + Queue queue, + DurableEdgeMap edgeMap, + ILogger logger) + { + bool shouldAggregate = edgeMap.IsFanInExecutor(executorId) && queue.Count > 1; + + return shouldAggregate + ? AggregateQueueMessages(queue, executorId, logger) + : queue.Dequeue(); + } + + /// + /// Aggregates all messages in a queue into a JSON array for Fan-In executors. + /// + private static DurableMessageEnvelope AggregateQueueMessages( + Queue queue, + string executorId, + ILogger logger) + { + List messages = []; + List sourceIds = []; + + while (queue.Count > 0) + { + DurableMessageEnvelope envelope = queue.Dequeue(); + messages.Add(envelope.Message); + + if (envelope.SourceExecutorId is not null) + { + sourceIds.Add(envelope.SourceExecutorId); + } + } + + if (logger.IsEnabled(LogLevel.Debug)) + { + logger.LogFanInAggregated(executorId, messages.Count, string.Join(", ", sourceIds)); + } + + return new DurableMessageEnvelope + { + Message = SerializeToJsonArray(messages), + InputTypeName = typeof(string[]).FullName, + SourceExecutorId = sourceIds.Count > 0 ? string.Join(",", sourceIds) : null + }; + } + + /// + /// Processes results from a superstep, updating state and routing messages to successors. + /// + /// true if a halt was requested by any executor; otherwise, false. + private static bool ProcessSuperstepResults( + List inputs, + string[] rawResults, + SuperstepState state, + TaskOrchestrationContext context, + ILogger logger) + { + bool haltRequested = false; + + for (int i = 0; i < inputs.Count; i++) + { + string executorId = inputs[i].ExecutorId; + ExecutorResultInfo resultInfo = ParseActivityResult(rawResults[i]); + + logger.LogExecutorResultReceived(executorId, resultInfo.Result.Length, resultInfo.SentMessages.Count); + + state.LastResults[executorId] = resultInfo.Result; + + // Merge state updates from activity into shared state + MergeStateUpdates(state, resultInfo.StateUpdates, resultInfo.ClearedScopes); + + // Accumulate events for the durable workflow status (streaming) + state.AccumulatedEvents.AddRange(resultInfo.Events); + + // Check for halt request + haltRequested |= resultInfo.HaltRequested; + + // Publish events for live streaming (skip during replay) + if (!context.IsReplaying) + { + PublishEventsToLiveStatus(context, state); + } + + RouteOutputToSuccessors(executorId, resultInfo.Result, resultInfo.SentMessages, state, logger); + } + + return haltRequested; + } + + /// + /// Merges state updates from an executor into the shared state. + /// + /// + /// When concurrent executors in the same superstep modify keys in the same scope, + /// last-write-wins semantics apply. + /// + private static void MergeStateUpdates( + SuperstepState state, + Dictionary stateUpdates, + List clearedScopes) + { + Dictionary shared = state.SharedState; + + ApplyClearedScopes(shared, clearedScopes); + + // Apply individual state updates + foreach ((string key, string? value) in stateUpdates) + { + if (value is null) + { + shared.Remove(key); + } + else + { + shared[key] = value; + } + } + } + + /// + /// Removes all keys belonging to the specified scopes from the shared state dictionary. + /// + private static void ApplyClearedScopes(Dictionary shared, List clearedScopes) + { + if (clearedScopes.Count == 0 || shared.Count == 0) + { + return; + } + + List keysToRemove = []; + + foreach (string clearedScope in clearedScopes) + { + string scopePrefix = string.Concat(clearedScope, ":"); + keysToRemove.Clear(); + + foreach (string key in shared.Keys) + { + if (key.StartsWith(scopePrefix, StringComparison.Ordinal)) + { + keysToRemove.Add(key); + } + } + + foreach (string key in keysToRemove) + { + shared.Remove(key); + } + + if (shared.Count == 0) + { + break; + } + } + } + + /// + /// Publishes accumulated workflow events to the durable workflow's custom status, + /// making them available to for live streaming. + /// + /// + /// Custom status is the only orchestration state readable by external clients while + /// the orchestration is still running. It is cleared by the framework on completion, + /// so events are also included in for final retrieval. + /// + private static void PublishEventsToLiveStatus( + TaskOrchestrationContext context, + SuperstepState state) + { + state.LiveStatus.Events = state.AccumulatedEvents; + + // Pass the object directly — the framework's DataConverter handles serialization. + // Pre-serializing would cause double-serialization (string wrapped in JSON quotes). + context.SetCustomStatus(state.LiveStatus); + } + + /// + /// Routes executor output (explicit messages or return value) to successor executors. + /// + private static void RouteOutputToSuccessors( + string executorId, + string result, + List sentMessages, + SuperstepState state, + ILogger logger) + { + if (sentMessages.Count > 0) + { + // Only route messages that have content + foreach (TypedPayload message in sentMessages.Where(m => !string.IsNullOrEmpty(m.Data))) + { + state.EdgeMap.RouteMessage(executorId, message.Data!, message.TypeName, state.MessageQueues, logger); + } + + return; + } + + if (!string.IsNullOrEmpty(result)) + { + state.EdgeMap.RouteMessage(executorId, result, inputTypeName: null, state.MessageQueues, logger); + } + } + + /// + /// Serializes a list of messages into a JSON array. + /// + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Serializing string array.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Serializing string array.")] + private static string SerializeToJsonArray(List messages) + { + return JsonSerializer.Serialize(messages); + } + + /// + /// Creates a for the given executor ID. + /// + /// Thrown when the executor ID is not found in bindings. + private static WorkflowExecutorInfo CreateExecutorInfo( + string executorId, + Dictionary executorBindings) + { + if (!executorBindings.TryGetValue(executorId, out ExecutorBinding? binding)) + { + throw new InvalidOperationException($"Executor '{executorId}' not found in workflow bindings."); + } + + bool isAgentic = WorkflowAnalyzer.IsAgentExecutorType(binding.ExecutorType); + RequestPort? requestPort = (binding is RequestPortBinding rpb) ? rpb.Port : null; + Workflow? subWorkflow = (binding is SubworkflowBinding swb) ? swb.WorkflowInstance : null; + + return new WorkflowExecutorInfo(executorId, isAgentic, requestPort, subWorkflow); + } + + /// + /// Returns the last non-empty result from executed steps, or empty string if none. + /// + private static string GetFinalResult(Dictionary lastResults) + { + return lastResults.Values.LastOrDefault(value => !string.IsNullOrEmpty(value)) ?? string.Empty; + } + + /// + /// Output from an executor invocation, including its result, + /// messages, state updates, and emitted workflow events. + /// + private sealed record ExecutorResultInfo( + string Result, + List SentMessages, + Dictionary StateUpdates, + List ClearedScopes, + List Events, + bool HaltRequested); + + /// + /// Parses the raw activity result to extract result, messages, events, and state updates. + /// + private static ExecutorResultInfo ParseActivityResult(string rawResult) + { + if (string.IsNullOrEmpty(rawResult)) + { + return new ExecutorResultInfo(rawResult, [], [], [], [], false); + } + + try + { + DurableExecutorOutput? output = JsonSerializer.Deserialize( + rawResult, + DurableWorkflowJsonContext.Default.DurableExecutorOutput); + + if (output is null || !HasMeaningfulContent(output)) + { + return new ExecutorResultInfo(rawResult, [], [], [], [], false); + } + + return new ExecutorResultInfo( + output.Result ?? string.Empty, + output.SentMessages, + output.StateUpdates, + output.ClearedScopes, + output.Events, + output.HaltRequested); + } + catch (JsonException) + { + return new ExecutorResultInfo(rawResult, [], [], [], [], false); + } + } + + /// + /// Determines whether the activity output contains meaningful content. + /// + /// + /// Distinguishes actual activity output from arbitrary JSON that deserialized + /// successfully but with all default/empty values. + /// + private static bool HasMeaningfulContent(DurableExecutorOutput output) + { + return output.Result is not null + || output.SentMessages?.Count > 0 + || output.Events?.Count > 0 + || output.StateUpdates?.Count > 0 + || output.ClearedScopes?.Count > 0 + || output.HaltRequested; + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowWaitingForInputEvent.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowWaitingForInputEvent.cs new file mode 100644 index 0000000000..ed93c5928b --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/DurableWorkflowWaitingForInputEvent.cs @@ -0,0 +1,42 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Event raised when the durable workflow is waiting for external input at a . +/// +/// The serialized input data that was passed to the RequestPort. +/// The request port definition. +[DebuggerDisplay("RequestPort = {RequestPort.Id}")] +public sealed class DurableWorkflowWaitingForInputEvent( + string Input, + RequestPort RequestPort) : WorkflowEvent +{ + /// + /// Gets the serialized input data that was passed to the RequestPort. + /// + public string Input { get; } = Input; + + /// + /// Gets the request port definition. + /// + public RequestPort RequestPort { get; } = RequestPort; + + /// + /// Attempts to deserialize the input data to the specified type. + /// + /// The type to deserialize to. + /// The deserialized input. + /// Thrown when the input cannot be deserialized to the specified type. + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing workflow types provided by the caller.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing workflow types provided by the caller.")] + public T? GetInputAs() + { + return JsonSerializer.Deserialize(this.Input, DurableSerialization.Options); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableDirectEdgeRouter.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableDirectEdgeRouter.cs new file mode 100644 index 0000000000..3f78093183 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableDirectEdgeRouter.cs @@ -0,0 +1,156 @@ +// Copyright (c) Microsoft. All rights reserved. + +// Routing decision flow for a single edge. +// Example: the B→D edge from a workflow like below: +// +// [A] ──► [B] ──► [C] ──► [E] (B→D has condition: x => x.NeedsReview) +// │ ▲ +// └──► [D] ──────┘ +// +// (condition: x => x.NeedsReview, _sourceOutputType: typeof(Order)) +// +// RouteMessage(envelope) envelope.Message = "{\"NeedsReview\":true, ...}" +// │ +// ▼ +// Has condition? ──── No ────► Enqueue to sink's queue +// │ +// Yes (B→D has one) +// │ +// ▼ +// Deserialize message JSON string → Order object using _sourceOutputType +// │ +// ▼ +// Evaluate _condition(order) order => order.NeedsReview +// │ +// ┌──┴──┐ +// true false +// │ │ +// ▼ └──► Skip (log and return, D will not run) +// Enqueue to +// D's queue + +using System.Diagnostics.CodeAnalysis; +using System.Text.Json; +using Microsoft.Extensions.Logging; + +namespace Microsoft.Agents.AI.DurableTask.Workflows.EdgeRouters; + +/// +/// Routes messages from a source executor to a single target executor with optional condition evaluation. +/// +/// +/// +/// Created by during construction — one instance per (source, sink) edge. +/// When an edge has a condition (e.g., order => order.Total > 1000), the router deserialises +/// the serialised JSON message back to the source executor's output type so the condition delegate +/// can evaluate it against strongly-typed properties. If the condition returns false, the +/// message is not forwarded and the target executor will not run for this edge. +/// +/// +/// For sources with multiple successors, individual instances +/// are wrapped in a so a single RouteMessage call +/// fans the same message out to all targets, each evaluating its own condition independently. +/// +/// +internal sealed class DurableDirectEdgeRouter : IDurableEdgeRouter +{ + private readonly string _sourceId; + private readonly string _sinkId; + private readonly Func? _condition; + private readonly Type? _sourceOutputType; + + /// + /// Initializes a new instance of . + /// + /// The source executor ID. + /// The target executor ID. + /// Optional condition function to evaluate before routing. + /// The output type of the source executor for deserialization. + internal DurableDirectEdgeRouter( + string sourceId, + string sinkId, + Func? condition, + Type? sourceOutputType) + { + this._sourceId = sourceId; + this._sinkId = sinkId; + this._condition = condition; + this._sourceOutputType = sourceOutputType; + } + + /// + public void RouteMessage( + DurableMessageEnvelope envelope, + Dictionary> messageQueues, + ILogger logger) + { + if (this._condition is not null) + { + try + { + object? messageObj = DeserializeForCondition(envelope.Message, this._sourceOutputType); + if (!this._condition(messageObj)) + { + logger.LogEdgeConditionFalse(this._sourceId, this._sinkId); + return; + } + } + catch (Exception ex) + { + logger.LogEdgeConditionEvaluationFailed(ex, this._sourceId, this._sinkId); + return; + } + } + + logger.LogEdgeRoutingMessage(this._sourceId, this._sinkId); + EnqueueMessage(messageQueues, this._sinkId, envelope); + } + + /// + /// Deserializes a JSON message to an object for condition evaluation. + /// + /// + /// Messages travel through the durable workflow as serialized JSON strings, but condition + /// delegates need typed objects to evaluate (e.g., order => order.Status == "Approved"). + /// This method converts the JSON back to an object the condition delegate can evaluate. + /// + /// The JSON string representation of the message. + /// + /// The expected type of the message. When provided, enables strongly-typed deserialization + /// so the condition function receives the correct type to evaluate against. + /// + /// + /// The deserialized object, or null if the JSON is empty. + /// + /// Thrown when the JSON is invalid or cannot be deserialized to the target type. + [UnconditionalSuppressMessage("AOT", "IL3050", Justification = "Deserializing workflow types registered at startup.")] + [UnconditionalSuppressMessage("Trimming", "IL2026", Justification = "Deserializing workflow types registered at startup.")] + private static object? DeserializeForCondition(string json, Type? targetType) + { + if (string.IsNullOrEmpty(json)) + { + return null; + } + + // If we know the source executor's output type, deserialize to that specific type + // so the condition function can access strongly-typed properties. + // Otherwise, deserialize as a generic object for basic inspection. + return targetType is null + ? JsonSerializer.Deserialize(json, DurableSerialization.Options) + : JsonSerializer.Deserialize(json, targetType, DurableSerialization.Options); + } + + private static void EnqueueMessage( + Dictionary> queues, + string executorId, + DurableMessageEnvelope envelope) + { + if (!queues.TryGetValue(executorId, out Queue? queue)) + { + queue = new Queue(); + queues[executorId] = queue; + } + + queue.Enqueue(envelope); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableEdgeMap.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableEdgeMap.cs new file mode 100644 index 0000000000..69b8b7cc1c --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableEdgeMap.cs @@ -0,0 +1,205 @@ +// Copyright (c) Microsoft. All rights reserved. + +// How WorkflowGraphInfo maps to DurableEdgeMap at runtime. +// For a workflow like below: +// +// [A] ──► [B] ──► [C] ──► [E] +// │ ▲ +// └──► [D] ──────┘ +// (condition: x => x.NeedsReview) +// +// WorkflowGraphInfo DurableEdgeMap +// ┌──────────────────────────┐ ┌──────────────────────────────────────┐ +// │ Successors: │ │ _routersBySource: │ +// │ A → [B] │──constructs──►│ A → [DirectRouter(A→B)] │ +// │ B → [C, D] │ │ B → [FanOutRouter([C, D])] │ +// │ C → [E] │ │ C → [DirectRouter(C→E)] │ +// │ D → [E] │ │ D → [DirectRouter(D→E)] │ +// └──────────────────────────┘ │ │ +// ┌──────────────────────────┐ │ _predecessorCounts: │ +// │ Predecessors: │ │ A → 0 │ +// │ E → [C, D] (fan-in!) │──constructs──►│ B → 1, C → 1, D → 1 │ +// └──────────────────────────┘ │ E → 2 ◄── IsFanInExecutor = true │ +// └──────────────────────────────────────┘ +// +// Usage during superstep execution (continuing the example): +// +// 1. EnqueueInitialInput(msg) ──► MessageQueues["A"].Enqueue(envelope) +// +// 2. After B completes, RouteMessage("B", resultB) ──► _routersBySource["B"] +// │ +// ▼ +// FanOutRouter (B has 2 successors) +// ├─► DirectRouter(B→C) ──► no condition ──► enqueue to C +// └─► DirectRouter(B→D) ──► evaluate x => x.NeedsReview ──► enqueue to D (or skip) +// +// 3. Before superstep 4, IsFanInExecutor("E") returns true (count=2) +// → CollectExecutorInputs aggregates C and D results into ["resultC","resultD"] + +using Microsoft.Extensions.Logging; + +namespace Microsoft.Agents.AI.DurableTask.Workflows.EdgeRouters; + +/// +/// Manages message routing through workflow edges for durable orchestrations. +/// +/// +/// +/// This is the durable equivalent of EdgeMap in the in-process runner. +/// It is constructed from (produced by ) +/// and converts the static graph structure into an active routing layer used during superstep execution. +/// +/// +/// What it stores: +/// +/// +/// _routersBySource — For each source executor, a list of instances +/// that know how to deliver messages to successor executors. When a source has multiple successors, a single +/// wraps the individual instances. +/// _predecessorCounts — The number of predecessors for each executor, used to detect +/// fan-in points where multiple incoming messages should be aggregated before execution. +/// _startExecutorId — The entry-point executor that receives the initial workflow input. +/// +/// +/// How it is used during execution: +/// +/// +/// seeds the start executor's queue before the first superstep. +/// After each superstep, DurableWorkflowRunner.RouteOutputToSuccessors calls +/// which looks up the routers for the completed executor and forwards the +/// result to successor queues. Each router may evaluate an edge condition before enqueueing. +/// is checked during input collection to decide whether +/// to aggregate multiple queued messages into a single JSON array before dispatching. +/// +/// +internal sealed class DurableEdgeMap +{ + private readonly Dictionary> _routersBySource = []; + private readonly Dictionary _predecessorCounts = []; + private readonly string _startExecutorId; + + /// + /// Initializes a new instance of from workflow graph info. + /// + /// The workflow graph information containing routing structure. + internal DurableEdgeMap(WorkflowGraphInfo graphInfo) + { + ArgumentNullException.ThrowIfNull(graphInfo); + + this._startExecutorId = graphInfo.StartExecutorId; + + // Build edge routers for each source executor + foreach (KeyValuePair> entry in graphInfo.Successors) + { + string sourceId = entry.Key; + List successorIds = entry.Value; + + if (successorIds.Count == 0) + { + continue; + } + + graphInfo.ExecutorOutputTypes.TryGetValue(sourceId, out Type? sourceOutputType); + + List routers = []; + foreach (string sinkId in successorIds) + { + graphInfo.EdgeConditions.TryGetValue((sourceId, sinkId), out Func? condition); + + routers.Add(new DurableDirectEdgeRouter(sourceId, sinkId, condition, sourceOutputType)); + } + + // If multiple successors, wrap in a fan-out router + if (routers.Count > 1) + { + this._routersBySource[sourceId] = [new DurableFanOutEdgeRouter(sourceId, routers)]; + } + else + { + this._routersBySource[sourceId] = routers; + } + } + + // Store predecessor counts for fan-in detection + foreach (KeyValuePair> entry in graphInfo.Predecessors) + { + this._predecessorCounts[entry.Key] = entry.Value.Count; + } + } + + /// + /// Routes a message from a source executor to its successors. + /// + /// + /// Called by DurableWorkflowRunner.RouteOutputToSuccessors after each superstep. + /// Wraps the message in a and delegates to the + /// appropriate (s) for the source executor. Each router + /// may evaluate an edge condition and, if satisfied, enqueue the envelope into the + /// target executor's message queue for the next superstep. + /// + /// The source executor ID. + /// The serialized message to route. + /// The type name of the message. + /// The message queues to enqueue messages into. + /// The logger for tracing. + internal void RouteMessage( + string sourceId, + string message, + string? inputTypeName, + Dictionary> messageQueues, + ILogger logger) + { + if (!this._routersBySource.TryGetValue(sourceId, out List? routers)) + { + return; + } + + DurableMessageEnvelope envelope = DurableMessageEnvelope.Create(message, inputTypeName, sourceId); + + foreach (IDurableEdgeRouter router in routers) + { + router.RouteMessage(envelope, messageQueues, logger); + } + } + + /// + /// Enqueues the initial workflow input to the start executor. + /// + /// The serialized initial input message. + /// The message queues to enqueue into. + /// + /// This method is used only at workflow startup to provide input to the first executor. + /// No input type hint is required because the start executor determines its expected input type from its own InputTypes configuration. + /// + internal void EnqueueInitialInput( + string message, + Dictionary> messageQueues) + { + DurableMessageEnvelope envelope = DurableMessageEnvelope.Create(message, inputTypeName: null); + EnqueueMessage(messageQueues, this._startExecutorId, envelope); + } + + /// + /// Determines if an executor is a fan-in point (has multiple predecessors). + /// + /// The executor ID to check. + /// true if the executor has multiple predecessors; otherwise, false. + internal bool IsFanInExecutor(string executorId) + { + return this._predecessorCounts.TryGetValue(executorId, out int count) && count > 1; + } + + private static void EnqueueMessage( + Dictionary> queues, + string executorId, + DurableMessageEnvelope envelope) + { + if (!queues.TryGetValue(executorId, out Queue? queue)) + { + queue = new Queue(); + queues[executorId] = queue; + } + + queue.Enqueue(envelope); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableFanOutEdgeRouter.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableFanOutEdgeRouter.cs new file mode 100644 index 0000000000..f13a0def92 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/DurableFanOutEdgeRouter.cs @@ -0,0 +1,67 @@ +// Copyright (c) Microsoft. All rights reserved. + +// Fan-out routing: one source message is forwarded to multiple targets. +// Example from a workflow like below: +// +// [A] ──► [B] ──► [C] ──► [E] (B→D has condition: x => x.NeedsReview) +// │ ▲ +// └──► [D] ──────┘ +// +// B has two successors (C and D), so DurableEdgeMap wraps them: +// +// Executor B completes with resultB (type: Order) +// │ +// ▼ +// FanOutRouter(B) +// ├──► DirectRouter(B→C) ──► no condition ──► enqueue to C +// └──► DirectRouter(B→D) ──► x => x.NeedsReview ──► enqueue to D (or skip) +// +// Each DirectRouter independently evaluates its condition, +// so resultB always reaches C, but only reaches D if NeedsReview is true. + +using Microsoft.Extensions.Logging; + +namespace Microsoft.Agents.AI.DurableTask.Workflows.EdgeRouters; + +/// +/// Routes messages from a source executor to multiple target executors (fan-out pattern). +/// +/// +/// Created by when a source executor has more than one successor. +/// Wraps the individual instances and delegates +/// to each of them, so the same message is evaluated and +/// potentially enqueued for every target independently. +/// +internal sealed class DurableFanOutEdgeRouter : IDurableEdgeRouter +{ + private readonly string _sourceId; + private readonly List _targetRouters; + + /// + /// Initializes a new instance of . + /// + /// The source executor ID. + /// The routers for each target executor. + internal DurableFanOutEdgeRouter(string sourceId, List targetRouters) + { + this._sourceId = sourceId; + this._targetRouters = targetRouters; + } + + /// + public void RouteMessage( + DurableMessageEnvelope envelope, + Dictionary> messageQueues, + ILogger logger) + { + if (logger.IsEnabled(LogLevel.Debug)) + { + logger.LogDebug("Fan-Out from {Source}: routing to {Count} targets", this._sourceId, this._targetRouters.Count); + } + + foreach (IDurableEdgeRouter targetRouter in this._targetRouters) + { + targetRouter.RouteMessage(envelope, messageQueues, logger); + } + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/IDurableEdgeRouter.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/IDurableEdgeRouter.cs new file mode 100644 index 0000000000..692ca15b5f --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/EdgeRouters/IDurableEdgeRouter.cs @@ -0,0 +1,26 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Extensions.Logging; + +namespace Microsoft.Agents.AI.DurableTask.Workflows.EdgeRouters; + +/// +/// Defines the contract for routing messages through workflow edges in durable orchestrations. +/// +/// +/// Implementations include for single-target routing +/// and for multi-target fan-out patterns. +/// +internal interface IDurableEdgeRouter +{ + /// + /// Routes a message from the source executor to its target(s). + /// + /// The message envelope containing the message and metadata. + /// The message queues to enqueue messages into. + /// The logger for tracing. + void RouteMessage( + DurableMessageEnvelope envelope, + Dictionary> messageQueues, + ILogger logger); +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/ExecutorRegistry.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/ExecutorRegistry.cs new file mode 100644 index 0000000000..f747d497b3 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/ExecutorRegistry.cs @@ -0,0 +1,83 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Provides a registry for executor bindings used in durable workflow orchestrations. +/// +/// +/// This registry enables lookup of executors by name, decoupled from specific workflow instances. +/// Executors are registered when workflows are added to . +/// +internal sealed class ExecutorRegistry +{ + private readonly Dictionary _executors = new(StringComparer.OrdinalIgnoreCase); + + /// + /// Gets the number of registered executors. + /// + internal int Count => this._executors.Count; + + /// + /// Attempts to get an executor registration by name. + /// + /// The executor name to look up. + /// When this method returns, contains the registration if found; otherwise, null. + /// if the executor was found; otherwise, . + internal bool TryGetExecutor(string executorName, [NotNullWhen(true)] out ExecutorRegistration? registration) + { + return this._executors.TryGetValue(executorName, out registration); + } + + /// + /// Registers an executor binding from a workflow. + /// + /// The executor name (without GUID suffix). + /// The full executor ID (may include GUID suffix). + /// The workflow containing the executor. + internal void Register(string executorName, string executorId, Workflow workflow) + { + ArgumentException.ThrowIfNullOrEmpty(executorName); + ArgumentException.ThrowIfNullOrEmpty(executorId); + ArgumentNullException.ThrowIfNull(workflow); + + Dictionary bindings = workflow.ReflectExecutors(); + if (!bindings.TryGetValue(executorId, out ExecutorBinding? binding)) + { + throw new InvalidOperationException($"Executor '{executorId}' not found in workflow."); + } + + this._executors.TryAdd(executorName, new ExecutorRegistration(executorId, binding)); + } +} + +/// +/// Represents a registered executor with its binding information. +/// +/// +/// The may differ from the registered name when the executor +/// ID includes an instance suffix (e.g., "ExecutorName_Guid"). +/// +/// The full executor ID (may include instance suffix). +/// The executor binding containing the factory and configuration. +internal sealed record ExecutorRegistration(string ExecutorId, ExecutorBinding Binding) +{ + /// + /// Creates an instance of the executor. + /// + /// A unique identifier for the run context. + /// The cancellation token. + /// The created executor instance. + internal async ValueTask CreateExecutorInstanceAsync(string runId, CancellationToken cancellationToken = default) + { + if (this.Binding.FactoryAsync is null) + { + throw new InvalidOperationException($"Cannot create executor '{this.ExecutorId}': Binding is a placeholder."); + } + + return await this.Binding.FactoryAsync(runId).ConfigureAwait(false); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IAwaitableWorkflowRun.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IAwaitableWorkflowRun.cs new file mode 100644 index 0000000000..e25b77f52c --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IAwaitableWorkflowRun.cs @@ -0,0 +1,34 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents a workflow run that can be awaited for completion. +/// +/// +/// +/// This interface extends to provide methods for waiting +/// until the workflow execution completes. Not all workflow runners support this capability. +/// +/// +/// Use pattern matching to check if a workflow run supports awaiting: +/// +/// IWorkflowRun run = await client.RunAsync(workflow, input); +/// if (run is IAwaitableWorkflowRun awaitableRun) +/// { +/// string? result = await awaitableRun.WaitForCompletionAsync<string>(); +/// } +/// +/// +/// +public interface IAwaitableWorkflowRun : IWorkflowRun +{ + /// + /// Waits for the workflow to complete and returns the result. + /// + /// The expected result type. + /// A cancellation token to observe. + /// The result of the workflow execution. + /// Thrown when the workflow failed or was terminated. + ValueTask WaitForCompletionAsync(CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IStreamingWorkflowRun.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IStreamingWorkflowRun.cs new file mode 100644 index 0000000000..079ee7258e --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IStreamingWorkflowRun.cs @@ -0,0 +1,55 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents a workflow run that supports streaming workflow events as they occur. +/// +/// +/// This interface defines the contract for streaming workflow runs in durable execution +/// environments. Implementations provide real-time access to workflow events. +/// +public interface IStreamingWorkflowRun +{ + /// + /// Gets the unique identifier for the run. + /// + /// + /// This identifier can be provided at the start of the run, or auto-generated. + /// For durable runs, this corresponds to the orchestration instance ID. + /// + string RunId { get; } + + /// + /// Asynchronously streams workflow events as they occur during workflow execution. + /// + /// + /// This method yields instances in real time as the workflow + /// progresses. The stream completes when the workflow completes, fails, or is terminated. + /// Events are delivered in the order they are raised. + /// + /// + /// A that can be used to cancel the streaming operation. + /// If cancellation is requested, the stream will end and no further events will be yielded. + /// + /// + /// An asynchronous stream of objects representing significant + /// workflow state changes. + /// + IAsyncEnumerable WatchStreamAsync(CancellationToken cancellationToken = default); + + /// + /// Sends a response to a to resume the workflow. + /// + /// The type of the response data. + /// The request event to respond to. + /// The response data to send. + /// A cancellation token to observe. + /// A representing the asynchronous operation. + ValueTask SendResponseAsync( + DurableWorkflowWaitingForInputEvent requestEvent, + TResponse response, + CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IWorkflowClient.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IWorkflowClient.cs new file mode 100644 index 0000000000..e84f3fe4cd --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IWorkflowClient.cs @@ -0,0 +1,71 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Defines a client for running and managing workflow executions. +/// +public interface IWorkflowClient +{ + /// + /// Runs a workflow and returns a handle to monitor its execution. + /// + /// The type of the input to the workflow. + /// The workflow to execute. + /// The input to pass to the workflow's starting executor. + /// Optional identifier for the run. If not provided, a new ID will be generated. + /// A cancellation token to observe. + /// An that can be used to monitor the workflow execution. + ValueTask RunAsync( + Workflow workflow, + TInput input, + string? runId = null, + CancellationToken cancellationToken = default) + where TInput : notnull; + + /// + /// Runs a workflow with string input and returns a handle to monitor its execution. + /// + /// The workflow to execute. + /// The string input to pass to the workflow. + /// Optional identifier for the run. If not provided, a new ID will be generated. + /// A cancellation token to observe. + /// An that can be used to monitor the workflow execution. + ValueTask RunAsync( + Workflow workflow, + string input, + string? runId = null, + CancellationToken cancellationToken = default); + + /// + /// Starts a workflow and returns a streaming handle to watch events in real-time. + /// + /// The type of the input to the workflow. + /// The workflow to execute. + /// The input to pass to the workflow's starting executor. + /// Optional identifier for the run. If not provided, a new ID will be generated. + /// A cancellation token to observe. + /// An that can be used to stream workflow events. + ValueTask StreamAsync( + Workflow workflow, + TInput input, + string? runId = null, + CancellationToken cancellationToken = default) + where TInput : notnull; + + /// + /// Starts a workflow with string input and returns a streaming handle to watch events in real-time. + /// + /// The workflow to execute. + /// The string input to pass to the workflow. + /// Optional identifier for the run. If not provided, a new ID will be generated. + /// A cancellation token to observe. + /// An that can be used to stream workflow events. + ValueTask StreamAsync( + Workflow workflow, + string input, + string? runId = null, + CancellationToken cancellationToken = default); +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IWorkflowRun.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IWorkflowRun.cs new file mode 100644 index 0000000000..f6d5e5b203 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/IWorkflowRun.cs @@ -0,0 +1,39 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents a running instance of a workflow. +/// +public interface IWorkflowRun +{ + /// + /// Gets the unique identifier for the run. + /// + /// + /// This identifier can be provided at the start of the run, or auto-generated. + /// For durable runs, this corresponds to the orchestration instance ID. + /// + string RunId { get; } + + /// + /// Gets all events that have been emitted by the workflow. + /// + IEnumerable OutgoingEvents { get; } + + /// + /// Gets the number of events emitted since the last access to . + /// + int NewEventCount { get; } + + /// + /// Gets all events emitted by the workflow since the last access to this property. + /// + /// + /// Each access to this property advances the bookmark, so subsequent accesses + /// will only return events emitted after the previous access. + /// + IEnumerable NewEvents { get; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/PendingRequestPortStatus.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/PendingRequestPortStatus.cs new file mode 100644 index 0000000000..c60f00d5f6 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/PendingRequestPortStatus.cs @@ -0,0 +1,12 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents a RequestPort the workflow is paused at, waiting for a response. +/// +/// The RequestPort ID identifying which input is needed. +/// The serialized request data passed to the RequestPort. +internal sealed record PendingRequestPortStatus( + string EventName, + string Input); diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/TypedPayload.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/TypedPayload.cs new file mode 100644 index 0000000000..7c0998585a --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/TypedPayload.cs @@ -0,0 +1,20 @@ +// Copyright (c) Microsoft. All rights reserved. + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Pairs a JSON-serialized payload with its assembly-qualified type name +/// for type-safe deserialization across activity boundaries. +/// +internal sealed class TypedPayload +{ + /// + /// Gets or sets the assembly-qualified type name of the payload. + /// + public string? TypeName { get; set; } + + /// + /// Gets or sets the serialized payload data as JSON. + /// + public string? Data { get; set; } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowAnalyzer.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowAnalyzer.cs new file mode 100644 index 0000000000..bb4d295616 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowAnalyzer.cs @@ -0,0 +1,245 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Analyzes workflow structure to extract executor metadata and build graph information +/// for message-driven execution. +/// +internal static class WorkflowAnalyzer +{ + private const string AgentExecutorTypeName = "AIAgentHostExecutor"; + private const string AgentAssemblyPrefix = "Microsoft.Agents.AI"; + private const string ExecutorTypePrefix = "Executor"; + + /// + /// Analyzes a workflow instance and returns a list of executors with their metadata. + /// + /// The workflow instance to analyze. + /// A list of executor information in workflow order. + internal static List GetExecutorsFromWorkflowInOrder(Workflow workflow) + { + ArgumentNullException.ThrowIfNull(workflow); + + return workflow.ReflectExecutors() + .Select(kvp => CreateExecutorInfo(kvp.Key, kvp.Value)) + .ToList(); + } + + /// + /// Builds the workflow graph information needed for message-driven execution. + /// + /// + /// + /// Extracts routing information including successors, predecessors, edge conditions, + /// and output types. Supports cyclic workflows through message-driven superstep execution. + /// + /// + /// The returned is consumed by DurableEdgeMap + /// to build the runtime routing layer: + /// Successors become IDurableEdgeRouter instances, + /// Predecessors become fan-in counts, and + /// EdgeConditions / ExecutorOutputTypes are passed into + /// DurableDirectEdgeRouter for conditional routing with typed deserialization. + /// + /// + /// The workflow instance to analyze. + /// A graph info object containing routing information. + internal static WorkflowGraphInfo BuildGraphInfo(Workflow workflow) + { + ArgumentNullException.ThrowIfNull(workflow); + + Dictionary executors = workflow.ReflectExecutors(); + + WorkflowGraphInfo graphInfo = new() + { + StartExecutorId = workflow.StartExecutorId + }; + + InitializeExecutorMappings(graphInfo, executors); + PopulateGraphFromEdges(graphInfo, workflow.Edges); + + return graphInfo; + } + + /// + /// Determines whether the specified executor type is an agentic executor. + /// + /// The executor type to check. + /// true if the executor is an agentic executor; otherwise, false. + internal static bool IsAgentExecutorType(Type executorType) + { + string typeName = executorType.FullName ?? executorType.Name; + string assemblyName = executorType.Assembly.GetName().Name ?? string.Empty; + + return typeName.Contains(AgentExecutorTypeName, StringComparison.OrdinalIgnoreCase) + && assemblyName.Contains(AgentAssemblyPrefix, StringComparison.OrdinalIgnoreCase); + } + + /// + /// Creates a from an executor binding. + /// + /// The unique identifier of the executor. + /// The executor binding containing type and configuration information. + /// A new instance with extracted metadata. + private static WorkflowExecutorInfo CreateExecutorInfo(string executorId, ExecutorBinding binding) + { + bool isAgentic = IsAgentExecutorType(binding.ExecutorType); + RequestPort? requestPort = (binding is RequestPortBinding rpb) ? rpb.Port : null; + Workflow? subWorkflow = (binding is SubworkflowBinding swb) ? swb.WorkflowInstance : null; + + return new WorkflowExecutorInfo(executorId, isAgentic, requestPort, subWorkflow); + } + + /// + /// Initializes the graph info with empty collections for each executor. + /// + /// The graph info to initialize. + /// The dictionary of executor bindings. + private static void InitializeExecutorMappings(WorkflowGraphInfo graphInfo, Dictionary executors) + { + foreach ((string executorId, ExecutorBinding binding) in executors) + { + graphInfo.Successors[executorId] = []; + graphInfo.Predecessors[executorId] = []; + graphInfo.ExecutorOutputTypes[executorId] = GetExecutorOutputType(binding.ExecutorType); + } + } + + /// + /// Populates the graph info with successor/predecessor relationships and edge conditions. + /// + /// The graph info to populate. + /// The dictionary of edges grouped by source executor ID. + private static void PopulateGraphFromEdges(WorkflowGraphInfo graphInfo, Dictionary> edges) + { + foreach ((string sourceId, HashSet edgeSet) in edges) + { + List successors = graphInfo.Successors[sourceId]; + + foreach (Edge edge in edgeSet) + { + AddSuccessorsFromEdge(graphInfo, sourceId, edge, successors); + TryAddEdgeCondition(graphInfo, edge); + } + } + } + + /// + /// Adds successor relationships from an edge to the graph info. + /// + /// The graph info to update. + /// The source executor ID. + /// The edge containing connection information. + /// The list of successors to append to. + private static void AddSuccessorsFromEdge( + WorkflowGraphInfo graphInfo, + string sourceId, + Edge edge, + List successors) + { + foreach (string sinkId in edge.Data.Connection.SinkIds) + { + if (!graphInfo.Successors.ContainsKey(sinkId)) + { + continue; + } + + successors.Add(sinkId); + graphInfo.Predecessors[sinkId].Add(sourceId); + } + } + + /// + /// Extracts and adds an edge condition to the graph info if present. + /// + /// The graph info to update. + /// The edge that may contain a condition. + private static void TryAddEdgeCondition(WorkflowGraphInfo graphInfo, Edge edge) + { + DirectEdgeData? directEdge = edge.DirectEdgeData; + + if (directEdge?.Condition is not null) + { + graphInfo.EdgeConditions[(directEdge.SourceId, directEdge.SinkId)] = directEdge.Condition; + } + } + + /// + /// Extracts the output type from an executor type by walking the inheritance chain. + /// + /// The executor type to analyze. + /// + /// The TOutput type for Executor<TInput, TOutput>, + /// or null for Executor<TInput> (void output) or non-executor types. + /// + private static Type? GetExecutorOutputType(Type executorType) + { + Type? currentType = executorType; + + while (currentType is not null) + { + Type? outputType = TryExtractOutputTypeFromGeneric(currentType); + if (outputType is not null || IsVoidExecutorType(currentType)) + { + return outputType; + } + + currentType = currentType.BaseType; + } + + return null; + } + + /// + /// Attempts to extract the output type from a generic executor type. + /// + /// The type to inspect. + /// The TOutput type if this is an Executor<TInput, TOutput>; otherwise, null. + private static Type? TryExtractOutputTypeFromGeneric(Type type) + { + if (!type.IsGenericType) + { + return null; + } + + Type genericDefinition = type.GetGenericTypeDefinition(); + Type[] genericArgs = type.GetGenericArguments(); + + bool isExecutorType = genericDefinition.Name.StartsWith(ExecutorTypePrefix, StringComparison.Ordinal); + if (!isExecutorType) + { + return null; + } + + // Executor - return TOutput + if (genericArgs.Length == 2) + { + return genericArgs[1]; + } + + return null; + } + + /// + /// Determines whether the type is a void-returning executor (Executor<TInput>). + /// + /// The type to check. + /// true if this is an Executor with a single type parameter; otherwise, false. + private static bool IsVoidExecutorType(Type type) + { + if (!type.IsGenericType) + { + return false; + } + + Type genericDefinition = type.GetGenericTypeDefinition(); + Type[] genericArgs = type.GetGenericArguments(); + + // Executor with 1 type parameter indicates void return + return genericArgs.Length == 1 + && genericDefinition.Name.StartsWith(ExecutorTypePrefix, StringComparison.Ordinal); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowExecutorInfo.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowExecutorInfo.cs new file mode 100644 index 0000000000..ffaa9fbe1f --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowExecutorInfo.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents an executor in the workflow with its metadata. +/// +/// The unique identifier of the executor. +/// Indicates whether this executor is an agentic executor. +/// The request port if this executor is a request port executor; otherwise, null. +/// The sub-workflow if this executor is a sub-workflow executor; otherwise, null. +internal sealed record WorkflowExecutorInfo( + string ExecutorId, + bool IsAgenticExecutor, + RequestPort? RequestPort = null, + Workflow? SubWorkflow = null) +{ + /// + /// Gets a value indicating whether this executor is a request port executor (human-in-the-loop). + /// + public bool IsRequestPortExecutor => this.RequestPort is not null; + + /// + /// Gets a value indicating whether this executor is a sub-workflow executor. + /// + public bool IsSubworkflowExecutor => this.SubWorkflow is not null; +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowGraphInfo.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowGraphInfo.cs new file mode 100644 index 0000000000..a504a07b13 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowGraphInfo.cs @@ -0,0 +1,98 @@ +// Copyright (c) Microsoft. All rights reserved. + +// Example: Given this workflow graph with a fan-out from B and a fan-in at E, +// plus a conditional edge from B to D: +// +// [A] ──► [B] ──► [C] ──► [E] +// │ ▲ +// └──► [D] ──────┘ +// (condition: +// x => x.NeedsReview) +// +// WorkflowAnalyzer.BuildGraphInfo() produces: +// +// StartExecutorId = "A" +// +// Successors (who does each executor send output to?): +// ┌──────────┬──────────────┐ +// │ "A" │ ["B"] │ +// │ "B" │ ["C", "D"] │ ◄── fan-out: B sends to both C and D +// │ "C" │ ["E"] │ +// │ "D" │ ["E"] │ +// │ "E" │ [] │ ◄── terminal: no successors +// └──────────┴──────────────┘ +// +// Predecessors (who feeds into each executor?): +// ┌──────────┬──────────────┐ +// │ "A" │ [] │ ◄── start: no predecessors +// │ "B" │ ["A"] │ +// │ "C" │ ["B"] │ +// │ "D" │ ["B"] │ +// │ "E" │ ["C", "D"] │ ◄── fan-in: count=2, messages will be aggregated +// └──────────┴──────────────┘ +// +// EdgeConditions (which edges have routing conditions?): +// ┌──────────────────┬──────────────────────────┐ +// │ ("B", "D") │ x => x.NeedsReview │ ◄── D only receives if condition is true +// └──────────────────┴──────────────────────────┘ +// (The B→C edge has no condition, so C always receives B's output.) +// +// ExecutorOutputTypes (what type does each executor return?): +// ┌──────────┬──────────────────┐ +// │ "A" │ typeof(string) │ ◄── used by DurableDirectEdgeRouter to deserialize +// │ "B" │ typeof(Order) │ the JSON message for condition evaluation +// │ "C" │ typeof(Report) │ +// │ "D" │ typeof(Report) │ +// │ "E" │ typeof(string) │ +// └──────────┴──────────────────┘ +// +// DurableEdgeMap then consumes this to build the runtime routing layer. + +using System.Diagnostics; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Represents the workflow graph structure needed for message-driven execution. +/// +/// +/// +/// This is a simplified representation that contains only the information needed +/// for routing messages between executors during superstep execution: +/// +/// +/// Successors for routing messages forward +/// Predecessors for detecting fan-in points +/// Edge conditions for conditional routing +/// Output types for deserialization during condition evaluation +/// +/// +[DebuggerDisplay("Start = {StartExecutorId}, Executors = {Successors.Count}")] +internal sealed class WorkflowGraphInfo +{ + /// + /// Gets or sets the starting executor ID for the workflow. + /// + public string StartExecutorId { get; set; } = string.Empty; + + /// + /// Maps each executor ID to its successors (for message routing). + /// + public Dictionary> Successors { get; } = []; + + /// + /// Maps each executor ID to its predecessors (for fan-in detection). + /// + public Dictionary> Predecessors { get; } = []; + + /// + /// Maps edge connections (sourceId, targetId) to their condition functions. + /// The condition function takes the predecessor's result and returns true if the edge should be followed. + /// + public Dictionary<(string SourceId, string TargetId), Func?> EdgeConditions { get; } = []; + + /// + /// Maps executor IDs to their output types (for proper deserialization during condition evaluation). + /// + public Dictionary ExecutorOutputTypes { get; } = []; +} diff --git a/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowNamingHelper.cs b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowNamingHelper.cs new file mode 100644 index 0000000000..0b657b3235 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.DurableTask/Workflows/WorkflowNamingHelper.cs @@ -0,0 +1,113 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics.CodeAnalysis; + +namespace Microsoft.Agents.AI.DurableTask.Workflows; + +/// +/// Provides helper methods for workflow naming conventions used in durable orchestrations. +/// +internal static class WorkflowNamingHelper +{ + internal const string OrchestrationFunctionPrefix = "dafx-"; + private const char ExecutorIdSuffixSeparator = '_'; + + /// + /// Converts a workflow name to its corresponding orchestration function name. + /// + /// The workflow name. + /// The orchestration function name. + /// Thrown when the workflow name is null or empty. + internal static string ToOrchestrationFunctionName(string workflowName) + { + ArgumentException.ThrowIfNullOrEmpty(workflowName); + return string.Concat(OrchestrationFunctionPrefix, workflowName); + } + + /// + /// Converts an orchestration function name back to its workflow name. + /// + /// The orchestration function name. + /// The workflow name. + /// Thrown when the orchestration function name is null, empty, or doesn't have the expected prefix. + internal static string ToWorkflowName(string orchestrationFunctionName) + { + ArgumentException.ThrowIfNullOrEmpty(orchestrationFunctionName); + + if (!TryGetWorkflowName(orchestrationFunctionName, out string? workflowName)) + { + throw new ArgumentException( + $"Orchestration function name '{orchestrationFunctionName}' does not have the expected '{OrchestrationFunctionPrefix}' prefix or is missing a workflow name.", + nameof(orchestrationFunctionName)); + } + + return workflowName; + } + + /// + /// Extracts the executor name from an executor ID. + /// + /// + /// + /// For non-agentic executors, the executor ID is the same as the executor name (e.g., "OrderParser"). + /// + /// + /// For agentic executors, the workflow builder appends a GUID suffix separated by an underscore + /// (e.g., "Physicist_8884e71021334ce49517fa2b17b1695b"). This method extracts just the name portion. + /// + /// + /// The executor ID, which may contain a GUID suffix. + /// The executor name without any GUID suffix. + /// Thrown when the executor ID is null or empty. + internal static string GetExecutorName(string executorId) + { + ArgumentException.ThrowIfNullOrEmpty(executorId); + + int separatorIndex = executorId.LastIndexOf(ExecutorIdSuffixSeparator); + if (separatorIndex > 0) + { + ReadOnlySpan suffix = executorId.AsSpan(separatorIndex + 1); + if (IsGuidSuffix(suffix)) + { + return executorId[..separatorIndex]; + } + } + + return executorId; + } + + /// + /// Checks whether the given span looks like a sanitized GUID (32 hex characters). + /// + private static bool IsGuidSuffix(ReadOnlySpan value) + { + if (value.Length != 32) + { + return false; + } + + foreach (char c in value) + { + if (!char.IsAsciiHexDigit(c)) + { + return false; + } + } + + return true; + } + + private static bool TryGetWorkflowName(string? orchestrationFunctionName, [NotNullWhen(true)] out string? workflowName) + { + workflowName = null; + + if (string.IsNullOrEmpty(orchestrationFunctionName) || + !orchestrationFunctionName.StartsWith(OrchestrationFunctionPrefix, StringComparison.Ordinal)) + { + return false; + } + + workflowName = orchestrationFunctionName[OrchestrationFunctionPrefix.Length..]; + return workflowName.Length > 0; + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/BuiltInFunctionExecutor.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/BuiltInFunctionExecutor.cs index fa0b9ef287..8239ff17cc 100644 --- a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/BuiltInFunctionExecutor.cs +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/BuiltInFunctionExecutor.cs @@ -21,6 +21,15 @@ public async ValueTask ExecuteAsync(FunctionContext context) { ArgumentNullException.ThrowIfNull(context); + // Orchestration triggers use a different input binding mechanism than other triggers. + // The encoded orchestrator state is retrieved via BindInputAsync on the orchestration trigger binding, + // not through IFunctionInputBindingFeature. Handle this case first to avoid unnecessary binding work. + if (context.FunctionDefinition.EntryPoint == BuiltInFunctions.RunWorkflowOrchestrationFunctionEntryPoint) + { + await ExecuteOrchestrationAsync(context); + return; + } + // Acquire the input binding feature (fail fast if missing rather than null-forgiving operator). IFunctionInputBindingFeature? functionInputBindingFeature = context.Features.Get() ?? throw new InvalidOperationException("Function input binding feature is not available on the current context."); @@ -57,11 +66,67 @@ public async ValueTask ExecuteAsync(FunctionContext context) if (durableTaskClient is null) { - // This is not expected to happen since all built-in functions are - // expected to have a Durable Task client binding. + // This is not expected to happen since all built-in functions (other than orchestration triggers) + // are expected to have a Durable Task client binding. throw new InvalidOperationException($"Durable Task client binding is missing for the invocation {context.InvocationId}."); } + if (context.FunctionDefinition.EntryPoint == BuiltInFunctions.RunWorkflowOrchestrationHttpFunctionEntryPoint) + { + if (httpRequestData == null) + { + throw new InvalidOperationException($"HTTP request data binding is missing for the invocation {context.InvocationId}."); + } + + context.GetInvocationResult().Value = await BuiltInFunctions.RunWorkflowOrchestrationHttpTriggerAsync( + httpRequestData, + durableTaskClient, + context); + return; + } + + if (context.FunctionDefinition.EntryPoint == BuiltInFunctions.GetWorkflowStatusHttpFunctionEntryPoint) + { + if (httpRequestData == null) + { + throw new InvalidOperationException($"HTTP request data binding is missing for the invocation {context.InvocationId}."); + } + + context.GetInvocationResult().Value = await BuiltInFunctions.GetWorkflowStatusAsync( + httpRequestData, + durableTaskClient, + context); + return; + } + + if (context.FunctionDefinition.EntryPoint == BuiltInFunctions.RespondToWorkflowHttpFunctionEntryPoint) + { + if (httpRequestData == null) + { + throw new InvalidOperationException($"HTTP request data binding is missing for the invocation {context.InvocationId}."); + } + + context.GetInvocationResult().Value = await BuiltInFunctions.RespondToWorkflowAsync( + httpRequestData, + durableTaskClient, + context); + return; + } + + if (context.FunctionDefinition.EntryPoint == BuiltInFunctions.InvokeWorkflowActivityFunctionEntryPoint) + { + if (encodedEntityRequest is null) + { + throw new InvalidOperationException($"Activity trigger input binding is missing for the invocation {context.InvocationId}."); + } + + context.GetInvocationResult().Value = await BuiltInFunctions.InvokeWorkflowActivityAsync( + encodedEntityRequest, + durableTaskClient, + context); + return; + } + if (context.FunctionDefinition.EntryPoint == BuiltInFunctions.RunAgentHttpFunctionEntryPoint) { if (httpRequestData == null) @@ -70,9 +135,9 @@ public async ValueTask ExecuteAsync(FunctionContext context) } context.GetInvocationResult().Value = await BuiltInFunctions.RunAgentHttpAsync( - httpRequestData, - durableTaskClient, - context); + httpRequestData, + durableTaskClient, + context); return; } @@ -104,4 +169,32 @@ public async ValueTask ExecuteAsync(FunctionContext context) throw new InvalidOperationException($"Unsupported function entry point '{context.FunctionDefinition.EntryPoint}' for invocation {context.InvocationId}."); } + + private static async ValueTask ExecuteOrchestrationAsync(FunctionContext context) + { + BindingMetadata? orchestrationBinding = null; + foreach (BindingMetadata binding in context.FunctionDefinition.InputBindings.Values) + { + if (string.Equals(binding.Type, "orchestrationTrigger", StringComparison.OrdinalIgnoreCase)) + { + orchestrationBinding = binding; + break; + } + } + + if (orchestrationBinding is null) + { + throw new InvalidOperationException($"Orchestration trigger binding is missing for the invocation {context.InvocationId}."); + } + + InputBindingData triggerInputData = await context.BindInputAsync(orchestrationBinding); + if (triggerInputData?.Value is not string encodedOrchestratorState) + { + throw new InvalidOperationException($"Orchestration history state was either missing from the input or not a string value for invocation {context.InvocationId}."); + } + + context.GetInvocationResult().Value = BuiltInFunctions.RunWorkflowOrchestration( + encodedOrchestratorState, + context); + } } diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/BuiltInFunctions.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/BuiltInFunctions.cs index 8573a80613..6dc1ab2244 100644 --- a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/BuiltInFunctions.cs +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/BuiltInFunctions.cs @@ -1,11 +1,14 @@ // Copyright (c) Microsoft. All rights reserved. using System.Net; +using System.Text.Json; using System.Text.Json.Serialization; using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; using Microsoft.Azure.Functions.Worker; using Microsoft.Azure.Functions.Worker.Extensions.Mcp; using Microsoft.Azure.Functions.Worker.Http; +using Microsoft.DurableTask; using Microsoft.DurableTask.Client; using Microsoft.DurableTask.Worker.Grpc; using Microsoft.Extensions.AI; @@ -21,6 +24,203 @@ internal static class BuiltInFunctions internal static readonly string RunAgentHttpFunctionEntryPoint = $"{typeof(BuiltInFunctions).FullName!}.{nameof(RunAgentHttpAsync)}"; internal static readonly string RunAgentEntityFunctionEntryPoint = $"{typeof(BuiltInFunctions).FullName!}.{nameof(InvokeAgentAsync)}"; internal static readonly string RunAgentMcpToolFunctionEntryPoint = $"{typeof(BuiltInFunctions).FullName!}.{nameof(RunMcpToolAsync)}"; + internal static readonly string RunWorkflowOrchestrationHttpFunctionEntryPoint = $"{typeof(BuiltInFunctions).FullName!}.{nameof(RunWorkflowOrchestrationHttpTriggerAsync)}"; + internal static readonly string RunWorkflowOrchestrationFunctionEntryPoint = $"{typeof(BuiltInFunctions).FullName!}.{nameof(RunWorkflowOrchestration)}"; + internal static readonly string InvokeWorkflowActivityFunctionEntryPoint = $"{typeof(BuiltInFunctions).FullName!}.{nameof(InvokeWorkflowActivityAsync)}"; + internal static readonly string GetWorkflowStatusHttpFunctionEntryPoint = $"{typeof(BuiltInFunctions).FullName!}.{nameof(GetWorkflowStatusAsync)}"; + internal static readonly string RespondToWorkflowHttpFunctionEntryPoint = $"{typeof(BuiltInFunctions).FullName!}.{nameof(RespondToWorkflowAsync)}"; + +#pragma warning disable IL3000 // Avoid accessing Assembly file path when publishing as a single file - Azure Functions does not use single-file publishing + internal static readonly string ScriptFile = Path.GetFileName(typeof(BuiltInFunctions).Assembly.Location); +#pragma warning restore IL3000 + + /// + /// Starts a workflow orchestration in response to an HTTP request. + /// The workflow name is derived from the function name by stripping the . + /// Callers can optionally provide a custom run ID via the runId query string parameter + /// (e.g., /api/workflows/MyWorkflow/run?runId=my-id). If not provided, one is auto-generated. + /// + public static async Task RunWorkflowOrchestrationHttpTriggerAsync( + [HttpTrigger] HttpRequestData req, + [DurableClient] DurableTaskClient client, + FunctionContext context) + { + string workflowName = context.FunctionDefinition.Name.Replace(HttpPrefix, string.Empty); + string orchestrationFunctionName = WorkflowNamingHelper.ToOrchestrationFunctionName(workflowName); + string? inputMessage = await req.ReadAsStringAsync(); + + if (string.IsNullOrEmpty(inputMessage)) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.BadRequest, "Workflow input cannot be empty."); + } + + DurableWorkflowInput orchestrationInput = new() { Input = inputMessage }; + + // Allow users to provide a custom run ID via query string; otherwise, auto-generate one. + string? instanceId = req.Query["runId"]; + StartOrchestrationOptions? options = instanceId is not null ? new StartOrchestrationOptions(instanceId) : null; + string resolvedInstanceId = await client.ScheduleNewOrchestrationInstanceAsync(orchestrationFunctionName, orchestrationInput, options); + + HttpResponseData response = req.CreateResponse(HttpStatusCode.Accepted); + await response.WriteStringAsync($"Workflow orchestration started for {workflowName}. Orchestration runId: {resolvedInstanceId}"); + return response; + } + + /// + /// Returns the workflow status including any pending HITL requests. + /// The run ID is extracted from the route parameter {runId}. + /// + public static async Task GetWorkflowStatusAsync( + [HttpTrigger] HttpRequestData req, + [DurableClient] DurableTaskClient client, + FunctionContext context) + { + string? runId = context.BindingContext.BindingData.TryGetValue("runId", out object? value) ? value?.ToString() : null; + if (string.IsNullOrEmpty(runId)) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.BadRequest, "Run ID is required."); + } + + OrchestrationMetadata? metadata = await client.GetInstanceAsync(runId, getInputsAndOutputs: true); + if (metadata is null) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.NotFound, $"Workflow run '{runId}' not found."); + } + + // Parse HITL inputs the workflow is waiting for from the durable workflow status + List? waitingForInput = null; + if (DurableWorkflowLiveStatus.TryParse(metadata.SerializedCustomStatus, out DurableWorkflowLiveStatus liveStatus) + && liveStatus.PendingEvents.Count > 0) + { + waitingForInput = liveStatus.PendingEvents; + } + + HttpResponseData response = req.CreateResponse(HttpStatusCode.OK); + await response.WriteAsJsonAsync(new + { + runId, + status = metadata.RuntimeStatus.ToString(), + waitingForInput = waitingForInput?.Select(p => new { eventName = p.EventName, input = JsonDocument.Parse(p.Input).RootElement }) + }); + return response; + } + + /// + /// Sends a response to a pending RequestPort, resuming the workflow. + /// Expects a JSON body: { "eventName": "...", "response": { ... } }. + /// + public static async Task RespondToWorkflowAsync( + [HttpTrigger] HttpRequestData req, + [DurableClient] DurableTaskClient client, + FunctionContext context) + { + string? runId = context.BindingContext.BindingData.TryGetValue("runId", out object? value) ? value?.ToString() : null; + if (string.IsNullOrEmpty(runId)) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.BadRequest, "Run ID is required."); + } + + WorkflowRespondRequest? request; + try + { + request = await req.ReadFromJsonAsync(context.CancellationToken); + } + catch (JsonException) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.BadRequest, "Request body is not valid JSON."); + } + + if (request is null || string.IsNullOrEmpty(request.EventName) + || request.Response.ValueKind == JsonValueKind.Undefined) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.BadRequest, "Body must contain a non-empty 'eventName' and a 'response' property."); + } + + // Verify the orchestration exists and is in a valid state + OrchestrationMetadata? metadata = await client.GetInstanceAsync(runId, getInputsAndOutputs: true); + if (metadata is null) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.NotFound, $"Workflow run '{runId}' not found."); + } + + if (metadata.RuntimeStatus is OrchestrationRuntimeStatus.Completed + or OrchestrationRuntimeStatus.Failed + or OrchestrationRuntimeStatus.Terminated) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.BadRequest, + $"Workflow run '{runId}' is in terminal state '{metadata.RuntimeStatus}'."); + } + + // Verify the workflow is waiting for the specified event. + // If status can't be parsed (e.g., not yet set during early execution), allow the event through — + // Durable Task safely queues it until the orchestration reaches WaitForExternalEvent. + bool eventValidated = false; + if (DurableWorkflowLiveStatus.TryParse(metadata.SerializedCustomStatus, out DurableWorkflowLiveStatus liveStatus)) + { + if (!liveStatus.PendingEvents.Exists(p => string.Equals(p.EventName, request.EventName, StringComparison.Ordinal))) + { + return await CreateErrorResponseAsync(req, context, HttpStatusCode.BadRequest, + $"Workflow is not waiting for event '{request.EventName}'."); + } + + eventValidated = true; + } + + // Raise the external event to unblock the orchestration's WaitForExternalEvent call + await client.RaiseEventAsync(runId, request.EventName, request.Response.GetRawText()); + + HttpResponseData response = req.CreateResponse(HttpStatusCode.Accepted); + await response.WriteAsJsonAsync(new + { + message = eventValidated + ? "Response sent to workflow." + : "Response sent to workflow. Event could not be validated against pending requests.", + runId, + eventName = request.EventName, + validated = eventValidated, + }); + return response; + } + + /// + /// Executes a workflow activity by looking up the registered executor and delegating to it. + /// The executor name is derived from the activity function name via . + /// + public static Task InvokeWorkflowActivityAsync( + [ActivityTrigger] string input, + [DurableClient] DurableTaskClient durableTaskClient, + FunctionContext functionContext) + { + ArgumentNullException.ThrowIfNull(input); + ArgumentNullException.ThrowIfNull(durableTaskClient); + ArgumentNullException.ThrowIfNull(functionContext); + + string activityFunctionName = functionContext.FunctionDefinition.Name; + string executorName = WorkflowNamingHelper.ToWorkflowName(activityFunctionName); + + DurableOptions durableOptions = functionContext.InstanceServices.GetRequiredService(); + if (!durableOptions.Workflows.Executors.TryGetExecutor(executorName, out ExecutorRegistration? registration)) + { + throw new InvalidOperationException($"Executor '{executorName}' not found in workflow options."); + } + + return DurableActivityExecutor.ExecuteAsync(registration.Binding, input, functionContext.CancellationToken); + } + + /// + /// Runs a workflow orchestration by delegating to + /// via . + /// + public static string RunWorkflowOrchestration( + string encodedOrchestratorRequest, + FunctionContext functionContext) + { + ArgumentNullException.ThrowIfNull(encodedOrchestratorRequest); + ArgumentNullException.ThrowIfNull(functionContext); + + WorkflowOrchestrator orchestrator = new(functionContext.InstanceServices); + return GrpcOrchestrationRunner.LoadAndRun(encodedOrchestratorRequest, orchestrator, functionContext.InstanceServices); + } // Exposed as an entity trigger via AgentFunctionsProvider public static Task InvokeAgentAsync( @@ -332,6 +532,15 @@ private sealed record AgentRunAcceptedResponse( [property: JsonPropertyName("status")] int Status, [property: JsonPropertyName("thread_id")] string ThreadId); + /// + /// Represents a request to respond to a pending RequestPort in a workflow. + /// + /// The name of the event to raise (the RequestPort ID). + /// The response payload to send to the workflow. + private sealed record WorkflowRespondRequest( + [property: JsonPropertyName("eventName")] string? EventName, + [property: JsonPropertyName("response")] JsonElement Response); + /// /// A service provider that combines the original service provider with an additional DurableTaskClient instance. /// diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/CHANGELOG.md b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/CHANGELOG.md index a606629dc2..93c90bba9c 100644 --- a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/CHANGELOG.md +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/CHANGELOG.md @@ -1,6 +1,10 @@ # Release History -## +## [Unreleased] + +- Added Azure Functions hosting support for durable workflows ([#4436](https://github.com/microsoft/agent-framework/pull/4436)) + +## v1.0.0-preview.251219.1 - Addressed incompatibility issue with `Microsoft.Azure.Functions.Worker.Extensions.DurableTask` >= 1.11.0 ([#2759](https://github.com/microsoft/agent-framework/pull/2759)) diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/DurableAgentFunctionMetadataTransformer.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/DurableAgentFunctionMetadataTransformer.cs index f626db2a90..65578a7383 100644 --- a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/DurableAgentFunctionMetadataTransformer.cs +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/DurableAgentFunctionMetadataTransformer.cs @@ -1,6 +1,5 @@ // Copyright (c) Microsoft. All rights reserved. -using Microsoft.Agents.AI.DurableTask; using Microsoft.Azure.Functions.Worker.Core.FunctionMetadata; using Microsoft.Extensions.Logging; @@ -17,10 +16,6 @@ internal sealed class DurableAgentFunctionMetadataTransformer : IFunctionMetadat private readonly IServiceProvider _serviceProvider; private readonly IFunctionsAgentOptionsProvider _functionsAgentOptionsProvider; -#pragma warning disable IL3000 // Avoid accessing Assembly file path when publishing as a single file - Azure Functions does not use single-file publishing - private static readonly string s_builtInFunctionsScriptFile = Path.GetFileName(typeof(BuiltInFunctions).Assembly.Location); -#pragma warning restore IL3000 - public DurableAgentFunctionMetadataTransformer( IReadOnlyDictionary> agents, ILogger logger, @@ -45,14 +40,14 @@ public void Transform(IList original) this._logger.LogRegisteringTriggerForAgent(agentName, "entity"); - original.Add(CreateAgentTrigger(agentName)); + original.Add(FunctionMetadataFactory.CreateEntityTrigger(agentName)); if (this._functionsAgentOptionsProvider.TryGet(agentName, out FunctionsAgentOptions? agentTriggerOptions)) { if (agentTriggerOptions.HttpTrigger.IsEnabled) { this._logger.LogRegisteringTriggerForAgent(agentName, "http"); - original.Add(CreateHttpTrigger(agentName, $"agents/{agentName}/run")); + original.Add(FunctionMetadataFactory.CreateHttpTrigger(agentName, $"agents/{agentName}/run", BuiltInFunctions.RunAgentHttpFunctionEntryPoint)); } if (agentTriggerOptions.McpToolTrigger.IsEnabled) @@ -65,39 +60,6 @@ public void Transform(IList original) } } - private static DefaultFunctionMetadata CreateAgentTrigger(string name) - { - return new DefaultFunctionMetadata() - { - Name = AgentSessionId.ToEntityName(name), - Language = "dotnet-isolated", - RawBindings = - [ - """{"name":"encodedEntityRequest","type":"entityTrigger","direction":"In"}""", - """{"name":"client","type":"durableClient","direction":"In"}""" - ], - EntryPoint = BuiltInFunctions.RunAgentEntityFunctionEntryPoint, - ScriptFile = s_builtInFunctionsScriptFile, - }; - } - - private static DefaultFunctionMetadata CreateHttpTrigger(string name, string route) - { - return new DefaultFunctionMetadata() - { - Name = $"{BuiltInFunctions.HttpPrefix}{name}", - Language = "dotnet-isolated", - RawBindings = - [ - $"{{\"name\":\"req\",\"type\":\"httpTrigger\",\"direction\":\"In\",\"authLevel\":\"function\",\"methods\": [\"post\"],\"route\":\"{route}\"}}", - "{\"name\":\"$return\",\"type\":\"http\",\"direction\":\"Out\"}", - "{\"name\":\"client\",\"type\":\"durableClient\",\"direction\":\"In\"}" - ], - EntryPoint = BuiltInFunctions.RunAgentHttpFunctionEntryPoint, - ScriptFile = s_builtInFunctionsScriptFile, - }; - } - private static DefaultFunctionMetadata CreateMcpToolTrigger(string agentName, string? description) { return new DefaultFunctionMetadata @@ -112,7 +74,7 @@ private static DefaultFunctionMetadata CreateMcpToolTrigger(string agentName, st """{"name":"client","type":"durableClient","direction":"In"}""" ], EntryPoint = BuiltInFunctions.RunAgentMcpToolFunctionEntryPoint, - ScriptFile = s_builtInFunctionsScriptFile, + ScriptFile = BuiltInFunctions.ScriptFile, }; } } diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionMetadataFactory.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionMetadataFactory.cs new file mode 100644 index 0000000000..d88cd939d9 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionMetadataFactory.cs @@ -0,0 +1,101 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Azure.Functions.Worker.Core.FunctionMetadata; + +namespace Microsoft.Agents.AI.Hosting.AzureFunctions; + +/// +/// Provides factory methods for creating common instances +/// used by function metadata transformers. +/// +internal static class FunctionMetadataFactory +{ + /// + /// Creates function metadata for an entity trigger function. + /// + /// The base name used to derive the entity function name. + /// A configured for an entity trigger. + internal static DefaultFunctionMetadata CreateEntityTrigger(string name) + { + return new DefaultFunctionMetadata() + { + Name = AgentSessionId.ToEntityName(name), + Language = "dotnet-isolated", + RawBindings = + [ + """{"name":"encodedEntityRequest","type":"entityTrigger","direction":"In"}""", + """{"name":"client","type":"durableClient","direction":"In"}""" + ], + EntryPoint = BuiltInFunctions.RunAgentEntityFunctionEntryPoint, + ScriptFile = BuiltInFunctions.ScriptFile, + }; + } + + /// + /// Creates function metadata for an HTTP trigger function. + /// + /// The base name used to derive the HTTP function name. + /// The HTTP route for the trigger. + /// The entry point method for the HTTP trigger. + /// The allowed HTTP methods as a JSON array fragment (e.g., "\"get\""). Defaults to POST. + /// A configured for an HTTP trigger. + internal static DefaultFunctionMetadata CreateHttpTrigger(string name, string route, string entryPoint, string methods = "\"post\"") + { + return new DefaultFunctionMetadata() + { + Name = $"{BuiltInFunctions.HttpPrefix}{name}", + Language = "dotnet-isolated", + RawBindings = + [ + $"{{\"name\":\"req\",\"type\":\"httpTrigger\",\"direction\":\"In\",\"authLevel\":\"function\",\"methods\": [{methods}],\"route\":\"{route}\"}}", + "{\"name\":\"$return\",\"type\":\"http\",\"direction\":\"Out\"}", + "{\"name\":\"client\",\"type\":\"durableClient\",\"direction\":\"In\"}" + ], + EntryPoint = entryPoint, + ScriptFile = BuiltInFunctions.ScriptFile, + }; + } + + /// + /// Creates function metadata for an activity trigger function. + /// + /// The name of the activity function. + /// A configured for an activity trigger. + internal static DefaultFunctionMetadata CreateActivityTrigger(string functionName) + { + return new DefaultFunctionMetadata() + { + Name = functionName, + Language = "dotnet-isolated", + RawBindings = + [ + """{"name":"input","type":"activityTrigger","direction":"In","dataType":"String"}""", + """{"name":"durableTaskClient","type":"durableClient","direction":"In"}""" + ], + EntryPoint = BuiltInFunctions.InvokeWorkflowActivityFunctionEntryPoint, + ScriptFile = BuiltInFunctions.ScriptFile, + }; + } + + /// + /// Creates function metadata for an orchestration trigger function. + /// + /// The name of the orchestration function. + /// The entry point method for the orchestration trigger. + /// A configured for an orchestration trigger. + internal static DefaultFunctionMetadata CreateOrchestrationTrigger(string functionName, string entryPoint) + { + return new DefaultFunctionMetadata() + { + Name = functionName, + Language = "dotnet-isolated", + RawBindings = + [ + """{"name":"context","type":"orchestrationTrigger","direction":"In"}""" + ], + EntryPoint = entryPoint, + ScriptFile = BuiltInFunctions.ScriptFile, + }; + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionsApplicationBuilderExtensions.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionsApplicationBuilderExtensions.cs index e13c6008ea..ceb47c389a 100644 --- a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionsApplicationBuilderExtensions.cs +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionsApplicationBuilderExtensions.cs @@ -1,6 +1,7 @@ // Copyright (c) Microsoft. All rights reserved. using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; using Microsoft.Azure.Functions.Worker.Builder; using Microsoft.Azure.Functions.Worker.Core.FunctionMetadata; using Microsoft.Extensions.DependencyInjection; @@ -43,4 +44,90 @@ public static FunctionsApplicationBuilder ConfigureDurableAgents( return builder; } + + /// + /// Configures durable options for the functions application, allowing customization of Durable Task framework + /// settings. + /// + /// This method ensures that a single shared instance is used across all + /// configuration calls. If any workflows have been added, it configures the necessary orchestrations and registers + /// required middleware. + /// The functions application builder to configure. Cannot be null. + /// An action that configures the instance. Cannot be null. + /// The updated instance, enabling method chaining. + public static FunctionsApplicationBuilder ConfigureDurableOptions( + this FunctionsApplicationBuilder builder, + Action configure) + { + ArgumentNullException.ThrowIfNull(builder); + ArgumentNullException.ThrowIfNull(configure); + + // Ensure FunctionsDurableOptions is registered BEFORE the core extension creates a plain DurableOptions + FunctionsDurableOptions sharedOptions = GetOrCreateSharedOptions(builder.Services); + + builder.Services.ConfigureDurableOptions(configure); + + if (sharedOptions.Workflows.Workflows.Count > 0) + { + builder.Services.TryAddEnumerable(ServiceDescriptor.Singleton()); + } + + EnsureMiddlewareRegistered(builder); + + return builder; + } + + /// + /// Configures durable workflow support for the specified Azure Functions application builder. + /// + /// The instance to configure for durable workflows. + /// An action that configures the , allowing customization of durable workflow behavior. + /// The updated instance, enabling method chaining. + public static FunctionsApplicationBuilder ConfigureDurableWorkflows( + this FunctionsApplicationBuilder builder, + Action configure) + { + ArgumentNullException.ThrowIfNull(configure); + + return builder.ConfigureDurableOptions(options => configure(options.Workflows)); + } + + private static void EnsureMiddlewareRegistered(FunctionsApplicationBuilder builder) + { + // Guard against registering the middleware filter multiple times in the pipeline. + if (builder.Services.Any(d => d.ServiceType == typeof(BuiltInFunctionExecutor))) + { + return; + } + + builder.UseWhen(static context => + string.Equals(context.FunctionDefinition.EntryPoint, BuiltInFunctions.RunAgentHttpFunctionEntryPoint, StringComparison.Ordinal) || + string.Equals(context.FunctionDefinition.EntryPoint, BuiltInFunctions.RunAgentEntityFunctionEntryPoint, StringComparison.Ordinal) || + string.Equals(context.FunctionDefinition.EntryPoint, BuiltInFunctions.RunWorkflowOrchestrationHttpFunctionEntryPoint, StringComparison.Ordinal) || + string.Equals(context.FunctionDefinition.EntryPoint, BuiltInFunctions.RunWorkflowOrchestrationFunctionEntryPoint, StringComparison.Ordinal) || + string.Equals(context.FunctionDefinition.EntryPoint, BuiltInFunctions.InvokeWorkflowActivityFunctionEntryPoint, StringComparison.Ordinal) || + string.Equals(context.FunctionDefinition.EntryPoint, BuiltInFunctions.GetWorkflowStatusHttpFunctionEntryPoint, StringComparison.Ordinal) || + string.Equals(context.FunctionDefinition.EntryPoint, BuiltInFunctions.RespondToWorkflowHttpFunctionEntryPoint, StringComparison.Ordinal) + ); + builder.Services.TryAddSingleton(); + } + + /// + /// Gets or creates a shared instance from the service collection. + /// + private static FunctionsDurableOptions GetOrCreateSharedOptions(IServiceCollection services) + { + ServiceDescriptor? existingDescriptor = services.FirstOrDefault( + d => d.ServiceType == typeof(DurableOptions) && d.ImplementationInstance is not null); + + if (existingDescriptor?.ImplementationInstance is FunctionsDurableOptions existing) + { + return existing; + } + + FunctionsDurableOptions options = new(); + services.AddSingleton(options); + services.AddSingleton(options); + return options; + } } diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionsDurableOptions.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionsDurableOptions.cs new file mode 100644 index 0000000000..6e7b6ec5a8 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/FunctionsDurableOptions.cs @@ -0,0 +1,29 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.DurableTask; + +namespace Microsoft.Agents.AI.Hosting.AzureFunctions; + +/// +/// Provides Azure Functions–specific configuration for durable workflows. +/// +internal sealed class FunctionsDurableOptions : DurableOptions +{ + private readonly HashSet _statusEndpointWorkflows = new(StringComparer.OrdinalIgnoreCase); + + /// + /// Enables the status HTTP endpoint for the specified workflow. + /// + internal void EnableStatusEndpoint(string workflowName) + { + this._statusEndpointWorkflows.Add(workflowName); + } + + /// + /// Returns whether the status endpoint is enabled for the specified workflow. + /// + internal bool IsStatusEndpointEnabled(string workflowName) + { + return this._statusEndpointWorkflows.Contains(workflowName); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Logs.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Logs.cs index c49d2b39df..73c3140266 100644 --- a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Logs.cs +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Logs.cs @@ -17,4 +17,16 @@ internal static partial class Logs Level = LogLevel.Information, Message = "Registering {TriggerType} function for agent '{AgentName}'")] public static partial void LogRegisteringTriggerForAgent(this ILogger logger, string agentName, string triggerType); + + [LoggerMessage( + EventId = 102, + Level = LogLevel.Information, + Message = "Registering {TriggerType} trigger function '{FunctionName}' for workflow '{WorkflowKey}'")] + public static partial void LogRegisteringWorkflowTrigger(this ILogger logger, string workflowKey, string functionName, string triggerType); + + [LoggerMessage( + EventId = 103, + Level = LogLevel.Information, + Message = "Function metadata transformation complete. Added {AddedCount} workflow function(s). Total function count: {TotalCount}")] + public static partial void LogTransformationComplete(this ILogger logger, int addedCount, int totalCount); } diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Microsoft.Agents.AI.Hosting.AzureFunctions.csproj b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Microsoft.Agents.AI.Hosting.AzureFunctions.csproj index ce67c9621e..ae63946d97 100644 --- a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Microsoft.Agents.AI.Hosting.AzureFunctions.csproj +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Microsoft.Agents.AI.Hosting.AzureFunctions.csproj @@ -4,7 +4,8 @@ $(TargetFrameworksCore) enable - $(NoWarn);CA2007 + + $(NoWarn);CA2007;AD0001 diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/DurableWorkflowOptionsExtensions.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/DurableWorkflowOptionsExtensions.cs new file mode 100644 index 0000000000..6f40cbb791 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/DurableWorkflowOptionsExtensions.cs @@ -0,0 +1,30 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.Hosting.AzureFunctions; + +/// +/// Extension methods for to configure Azure Functions HTTP trigger options. +/// +public static class DurableWorkflowOptionsExtensions +{ + /// + /// Adds a workflow and optionally exposes a status HTTP endpoint for querying pending HITL requests. + /// + /// The workflow options to add the workflow to. + /// The workflow instance to add. + /// If , a GET endpoint is generated at workflows/{name}/status/{runId}. + public static void AddWorkflow(this DurableWorkflowOptions options, Workflow workflow, bool exposeStatusEndpoint) + { + ArgumentNullException.ThrowIfNull(options); + + options.AddWorkflow(workflow); + + if (exposeStatusEndpoint && options.ParentOptions is FunctionsDurableOptions functionsOptions) + { + functionsOptions.EnableStatusEndpoint(workflow.Name!); + } + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/DurableWorkflowsFunctionMetadataTransformer.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/DurableWorkflowsFunctionMetadataTransformer.cs new file mode 100644 index 0000000000..c7ad9a5ebd --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/DurableWorkflowsFunctionMetadataTransformer.cs @@ -0,0 +1,152 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.DurableTask; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.Azure.Functions.Worker.Core.FunctionMetadata; +using Microsoft.Extensions.Logging; + +namespace Microsoft.Agents.AI.Hosting.AzureFunctions; + +/// +/// Transforms function metadata by dynamically registering Azure Functions triggers +/// for each configured durable workflow and its executors. +/// +/// +/// For each workflow, this transformer registers: +/// +/// An HTTP trigger function to start the workflow orchestration via HTTP. +/// An orchestration trigger function to run the workflow orchestration. +/// An activity trigger function for each non-agent executor in the workflow. +/// An entity trigger function for each AI agent executor in the workflow. +/// +/// When multiple workflows share the same executor, the corresponding function is registered only once. +/// +internal sealed class DurableWorkflowsFunctionMetadataTransformer : IFunctionMetadataTransformer +{ + private readonly ILogger _logger; + private readonly FunctionsDurableOptions _options; + + /// + /// Initializes a new instance of the class. + /// + /// The logger instance for diagnostic output. + /// The durable options containing workflow configurations. + public DurableWorkflowsFunctionMetadataTransformer( + ILogger logger, + FunctionsDurableOptions durableOptions) + { + this._logger = logger ?? throw new ArgumentNullException(nameof(logger)); + ArgumentNullException.ThrowIfNull(durableOptions); + this._options = durableOptions; + } + + /// + public string Name => nameof(DurableWorkflowsFunctionMetadataTransformer); + + /// + public void Transform(IList original) + { + int initialCount = original.Count; + this._logger.LogTransformingFunctionMetadata(initialCount); + + // Track registered function names to avoid duplicates when workflows share executors. + HashSet registeredFunctions = []; + + DurableWorkflowOptions workflowOptions = this._options.Workflows; + foreach (var workflow in workflowOptions.Workflows) + { + string httpFunctionName = $"{BuiltInFunctions.HttpPrefix}{workflow.Key}"; + + if (this._logger.IsEnabled(LogLevel.Information)) + { + this._logger.LogInformation("Registering durable workflow functions for workflow '{WorkflowKey}' with HTTP trigger function name '{HttpFunctionName}'", workflow.Key, httpFunctionName); + } + + // Register an orchestration function for the workflow. + string orchestrationFunctionName = WorkflowNamingHelper.ToOrchestrationFunctionName(workflow.Key); + if (registeredFunctions.Add(orchestrationFunctionName)) + { + this._logger.LogRegisteringWorkflowTrigger(workflow.Key, orchestrationFunctionName, "orchestration"); + original.Add(FunctionMetadataFactory.CreateOrchestrationTrigger( + orchestrationFunctionName, + BuiltInFunctions.RunWorkflowOrchestrationFunctionEntryPoint)); + } + + // Register an HTTP trigger so users can start this workflow via HTTP. + if (registeredFunctions.Add(httpFunctionName)) + { + this._logger.LogRegisteringWorkflowTrigger(workflow.Key, httpFunctionName, "http"); + original.Add(FunctionMetadataFactory.CreateHttpTrigger( + workflow.Key, + $"workflows/{workflow.Key}/run", + BuiltInFunctions.RunWorkflowOrchestrationHttpFunctionEntryPoint)); + } + + // Register a status endpoint if opted in via AddWorkflow(exposeStatusEndpoint: true). + if (this._options.IsStatusEndpointEnabled(workflow.Key)) + { + string statusFunctionName = $"{BuiltInFunctions.HttpPrefix}{workflow.Key}-status"; + if (registeredFunctions.Add(statusFunctionName)) + { + this._logger.LogRegisteringWorkflowTrigger(workflow.Key, statusFunctionName, "http-status"); + original.Add(FunctionMetadataFactory.CreateHttpTrigger( + $"{workflow.Key}-status", + $"workflows/{workflow.Key}/status/{{runId}}", + BuiltInFunctions.GetWorkflowStatusHttpFunctionEntryPoint, + methods: "\"get\"")); + } + } + + // Register a respond endpoint when the workflow contains RequestPort nodes. + bool hasRequestPorts = workflow.Value.ReflectExecutors().Values.Any(b => b is RequestPortBinding); + if (hasRequestPorts) + { + string respondFunctionName = $"{BuiltInFunctions.HttpPrefix}{workflow.Key}-respond"; + if (registeredFunctions.Add(respondFunctionName)) + { + this._logger.LogRegisteringWorkflowTrigger(workflow.Key, respondFunctionName, "http-respond"); + original.Add(FunctionMetadataFactory.CreateHttpTrigger( + $"{workflow.Key}-respond", + $"workflows/{workflow.Key}/respond/{{runId}}", + BuiltInFunctions.RespondToWorkflowHttpFunctionEntryPoint)); + } + } + + // Register activity or entity functions for each executor in the workflow. + // ReflectExecutors() returns all executors across the graph; no need to manually traverse edges. + foreach (KeyValuePair entry in workflow.Value.ReflectExecutors()) + { + // Sub-workflow and RequestPort bindings use specialized dispatch, not activities. + if (entry.Value is SubworkflowBinding or RequestPortBinding) + { + continue; + } + + string executorName = WorkflowNamingHelper.GetExecutorName(entry.Key); + + // AI agent executors are backed by durable entities; other executors use activity triggers. + if (entry.Value is AIAgentBinding) + { + string entityName = AgentSessionId.ToEntityName(executorName); + if (registeredFunctions.Add(entityName)) + { + this._logger.LogRegisteringWorkflowTrigger(workflow.Key, entityName, "entity"); + original.Add(FunctionMetadataFactory.CreateEntityTrigger(executorName)); + } + } + else + { + string functionName = WorkflowNamingHelper.ToOrchestrationFunctionName(executorName); + if (registeredFunctions.Add(functionName)) + { + this._logger.LogRegisteringWorkflowTrigger(workflow.Key, functionName, "activity"); + original.Add(FunctionMetadataFactory.CreateActivityTrigger(functionName)); + } + } + } + } + + this._logger.LogTransformationComplete(original.Count - initialCount, original.Count); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/WorkflowOrchestrator.cs b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/WorkflowOrchestrator.cs new file mode 100644 index 0000000000..f89abedc23 --- /dev/null +++ b/dotnet/src/Microsoft.Agents.AI.Hosting.AzureFunctions/Workflows/WorkflowOrchestrator.cs @@ -0,0 +1,51 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.DurableTask; +using Microsoft.Extensions.DependencyInjection; +using Microsoft.Extensions.Logging; + +namespace Microsoft.Agents.AI.Hosting.AzureFunctions; + +/// +/// A custom implementation that delegates workflow orchestration +/// execution to the . +/// +internal sealed class WorkflowOrchestrator : ITaskOrchestrator +{ + private readonly IServiceProvider _serviceProvider; + + /// + /// Initializes a new instance of the class. + /// + /// The service provider used to resolve workflow dependencies. + public WorkflowOrchestrator(IServiceProvider serviceProvider) + { + this._serviceProvider = serviceProvider ?? throw new ArgumentNullException(nameof(serviceProvider)); + } + + /// + public Type InputType => typeof(DurableWorkflowInput); + + /// + public Type OutputType => typeof(DurableWorkflowResult); + + /// + public async Task RunAsync(TaskOrchestrationContext context, object? input) + { + ArgumentNullException.ThrowIfNull(context); + + DurableWorkflowRunner runner = this._serviceProvider.GetRequiredService(); + ILogger logger = context.CreateReplaySafeLogger(context.Name); + + DurableWorkflowInput workflowInput = input switch + { + DurableWorkflowInput existing => existing, + _ => new DurableWorkflowInput { Input = input! } + }; + + // ConfigureAwait(true) is required to preserve the orchestration context + // across awaits, which the Durable Task framework uses for replay. + return await runner.RunWorkflowOrchestrationAsync(context, workflowInput, logger).ConfigureAwait(true); + } +} diff --git a/dotnet/src/Microsoft.Agents.AI.Workflows/Microsoft.Agents.AI.Workflows.csproj b/dotnet/src/Microsoft.Agents.AI.Workflows/Microsoft.Agents.AI.Workflows.csproj index 27269eb598..c103ead32d 100644 --- a/dotnet/src/Microsoft.Agents.AI.Workflows/Microsoft.Agents.AI.Workflows.csproj +++ b/dotnet/src/Microsoft.Agents.AI.Workflows/Microsoft.Agents.AI.Workflows.csproj @@ -25,6 +25,7 @@ + diff --git a/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/ConsoleAppSamplesValidation.cs b/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/ConsoleAppSamplesValidation.cs index d49614868f..217eb66010 100644 --- a/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/ConsoleAppSamplesValidation.cs +++ b/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/ConsoleAppSamplesValidation.cs @@ -2,47 +2,32 @@ using System.Collections.Concurrent; using System.Diagnostics; -using System.Reflection; using System.Text; -using Microsoft.Extensions.Configuration; -using Microsoft.Extensions.Logging; using Xunit.Abstractions; namespace Microsoft.Agents.AI.DurableTask.IntegrationTests; +/// +/// Integration tests for validating the durable agent console app samples +/// located in samples/Durable/Agents/ConsoleApps. +/// [Collection("Samples")] [Trait("Category", "SampleValidation")] -public sealed class ConsoleAppSamplesValidation(ITestOutputHelper outputHelper) : IAsyncLifetime +public sealed class ConsoleAppSamplesValidation(ITestOutputHelper outputHelper) : SamplesValidationBase(outputHelper) { - private const string DtsPort = "8080"; - private const string RedisPort = "6379"; - - private static readonly string s_dotnetTargetFramework = GetTargetFramework(); - private static readonly IConfiguration s_configuration = - new ConfigurationBuilder() - .AddUserSecrets(Assembly.GetExecutingAssembly()) - .AddEnvironmentVariables() - .Build(); - - private static bool s_infrastructureStarted; private static readonly string s_samplesPath = Path.GetFullPath( Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "..", "..", "..", "..", "..", "samples", "04-hosting", "DurableAgents", "ConsoleApps")); - private readonly ITestOutputHelper _outputHelper = outputHelper; + /// + protected override string SamplesPath => s_samplesPath; - async Task IAsyncLifetime.InitializeAsync() - { - if (!s_infrastructureStarted) - { - await this.StartSharedInfrastructureAsync(); - s_infrastructureStarted = true; - } - } + /// + protected override bool RequiresRedis => true; - async Task IAsyncLifetime.DisposeAsync() + /// + protected override void ConfigureAdditionalEnvironmentVariables(ProcessStartInfo startInfo, Action setEnvVar) { - // Nothing to clean up - await Task.CompletedTask; + setEnvVar("REDIS_CONNECTION_STRING", $"localhost:{RedisPort}"); } [Fact] @@ -475,7 +460,7 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => // (streams can complete very quickly, so we need to interrupt early) if (foundConversationStart && !interrupted && contentLinesBeforeInterrupt >= 2) { - this._outputHelper.WriteLine($"Interrupting stream after {contentLinesBeforeInterrupt} content lines"); + this.OutputHelper.WriteLine($"Interrupting stream after {contentLinesBeforeInterrupt} content lines"); interrupted = true; interruptTime = DateTime.Now; @@ -493,7 +478,7 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => foundLastCursor = true; // Send Enter again to resume - this._outputHelper.WriteLine("Resuming stream from last cursor"); + this.OutputHelper.WriteLine("Resuming stream from last cursor"); await this.WriteInputAsync(process, string.Empty, testTimeoutCts.Token); resumed = true; } @@ -521,7 +506,7 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => if (timeSinceInterrupt < TimeSpan.FromSeconds(2)) { // Continue reading for a bit more to catch the cancellation message - this._outputHelper.WriteLine("Stream completed naturally, but waiting for Last cursor message after interrupt..."); + this.OutputHelper.WriteLine("Stream completed naturally, but waiting for Last cursor message after interrupt..."); continue; } } @@ -536,7 +521,7 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => // Stop once we've verified the interrupt/resume flow works if (resumed && foundResumeMessage && contentLinesAfterResume >= 5) { - this._outputHelper.WriteLine($"Successfully verified interrupt/resume: {contentLinesBeforeInterrupt} lines before, {contentLinesAfterResume} lines after"); + this.OutputHelper.WriteLine($"Successfully verified interrupt/resume: {contentLinesBeforeInterrupt} lines before, {contentLinesAfterResume} lines after"); break; } } @@ -547,7 +532,7 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => TimeSpan timeSinceInterrupt = DateTime.Now - interruptTime.Value; if (timeSinceInterrupt < TimeSpan.FromSeconds(3)) { - this._outputHelper.WriteLine("Waiting for Last cursor message after interrupt..."); + this.OutputHelper.WriteLine("Waiting for Last cursor message after interrupt..."); using CancellationTokenSource waitCts = new(TimeSpan.FromSeconds(2)); try { @@ -558,7 +543,7 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => foundLastCursor = true; if (!resumed) { - this._outputHelper.WriteLine("Resuming stream from last cursor"); + this.OutputHelper.WriteLine("Resuming stream from last cursor"); await this.WriteInputAsync(process, string.Empty, testTimeoutCts.Token); resumed = true; } @@ -576,7 +561,7 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => catch (OperationCanceledException) { // Timeout - check if we got enough to verify the flow - this._outputHelper.WriteLine($"Read timeout reached. Interrupted: {interrupted}, Resumed: {resumed}, Content before: {contentLinesBeforeInterrupt}, Content after: {contentLinesAfterResume}"); + this.OutputHelper.WriteLine($"Read timeout reached. Interrupted: {interrupted}, Resumed: {resumed}, Content before: {contentLinesBeforeInterrupt}, Content after: {contentLinesAfterResume}"); } Assert.True(foundConversationStart, "Conversation start message not found."); @@ -586,7 +571,7 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => // but we should still verify we got the conversation started if (!interrupted) { - this._outputHelper.WriteLine("WARNING: Stream completed before interrupt could be sent. This may indicate the stream is too fast."); + this.OutputHelper.WriteLine("WARNING: Stream completed before interrupt could be sent. This may indicate the stream is too fast."); } Assert.True(interrupted, "Stream was not interrupted (may have completed too quickly)."); @@ -596,365 +581,4 @@ await this.RunSampleTestAsync(samplePath, async (process, logs) => Assert.True(contentLinesAfterResume > 0, "No content received after resume (expected to continue from cursor, not restart)."); }); } - - private static string GetTargetFramework() - { - string filePath = new Uri(typeof(ConsoleAppSamplesValidation).Assembly.Location).LocalPath; - string directory = Path.GetDirectoryName(filePath)!; - string tfm = Path.GetFileName(directory); - if (tfm.StartsWith("net", StringComparison.OrdinalIgnoreCase)) - { - return tfm; - } - - throw new InvalidOperationException($"Unable to find target framework in path: {filePath}"); - } - - private async Task StartSharedInfrastructureAsync() - { - this._outputHelper.WriteLine("Starting shared infrastructure for console app samples..."); - - // Start DTS emulator - await this.StartDtsEmulatorAsync(); - - // Start Redis - await this.StartRedisAsync(); - - // Wait for infrastructure to be ready - await Task.Delay(TimeSpan.FromSeconds(5)); - } - - private async Task StartDtsEmulatorAsync() - { - // Start DTS emulator if it's not already running - if (!await this.IsDtsEmulatorRunningAsync()) - { - this._outputHelper.WriteLine("Starting DTS emulator..."); - await this.RunCommandAsync("docker", [ - "run", "-d", - "--name", "dts-emulator", - "-p", $"{DtsPort}:8080", - "-e", "DTS_USE_DYNAMIC_TASK_HUBS=true", - "mcr.microsoft.com/dts/dts-emulator:latest" - ]); - } - } - - private async Task StartRedisAsync() - { - if (!await this.IsRedisRunningAsync()) - { - this._outputHelper.WriteLine("Starting Redis..."); - await this.RunCommandAsync("docker", [ - "run", "-d", - "--name", "redis", - "-p", $"{RedisPort}:6379", - "redis:latest" - ]); - } - } - - private async Task IsDtsEmulatorRunningAsync() - { - this._outputHelper.WriteLine($"Checking if DTS emulator is running at http://localhost:{DtsPort}/healthz..."); - - // DTS emulator doesn't support HTTP/1.1, so we need to use HTTP/2.0 - using HttpClient http2Client = new() - { - DefaultRequestVersion = new Version(2, 0), - DefaultVersionPolicy = HttpVersionPolicy.RequestVersionExact - }; - - try - { - using CancellationTokenSource timeoutCts = new(TimeSpan.FromSeconds(30)); - using HttpResponseMessage response = await http2Client.GetAsync(new Uri($"http://localhost:{DtsPort}/healthz"), timeoutCts.Token); - if (response.Content.Headers.ContentLength > 0) - { - string content = await response.Content.ReadAsStringAsync(timeoutCts.Token); - this._outputHelper.WriteLine($"DTS emulator health check response: {content}"); - } - - if (response.IsSuccessStatusCode) - { - this._outputHelper.WriteLine("DTS emulator is running"); - return true; - } - - this._outputHelper.WriteLine($"DTS emulator is not running. Status code: {response.StatusCode}"); - return false; - } - catch (HttpRequestException ex) - { - this._outputHelper.WriteLine($"DTS emulator is not running: {ex.Message}"); - return false; - } - } - - private async Task IsRedisRunningAsync() - { - this._outputHelper.WriteLine($"Checking if Redis is running at localhost:{RedisPort}..."); - - try - { - using CancellationTokenSource timeoutCts = new(TimeSpan.FromSeconds(30)); - ProcessStartInfo startInfo = new() - { - FileName = "docker", - Arguments = "exec redis redis-cli ping", - UseShellExecute = false, - RedirectStandardOutput = true, - RedirectStandardError = true, - CreateNoWindow = true - }; - - using Process process = new() { StartInfo = startInfo }; - if (!process.Start()) - { - this._outputHelper.WriteLine("Failed to start docker exec command"); - return false; - } - - string output = await process.StandardOutput.ReadToEndAsync(timeoutCts.Token); - await process.WaitForExitAsync(timeoutCts.Token); - - if (process.ExitCode == 0 && output.Contains("PONG", StringComparison.OrdinalIgnoreCase)) - { - this._outputHelper.WriteLine("Redis is running"); - return true; - } - - this._outputHelper.WriteLine($"Redis is not running. Exit code: {process.ExitCode}, Output: {output}"); - return false; - } - catch (Exception ex) - { - this._outputHelper.WriteLine($"Redis is not running: {ex.Message}"); - return false; - } - } - - private async Task RunSampleTestAsync(string samplePath, Func, Task> testAction) - { - // Generate a unique TaskHub name for this sample test to prevent cross-test interference - // when multiple tests run together and share the same DTS emulator. - string uniqueTaskHubName = $"sample-{Guid.NewGuid().ToString("N").Substring(0, 6)}"; - - // Start the console app - // Use BlockingCollection to safely read logs asynchronously captured from the process - using BlockingCollection logsContainer = []; - using Process appProcess = this.StartConsoleApp(samplePath, logsContainer, uniqueTaskHubName); - try - { - // Run the test - await testAction(appProcess, logsContainer); - } - catch (OperationCanceledException e) - { - throw new TimeoutException("Core test logic timed out!", e); - } - finally - { - logsContainer.CompleteAdding(); - await this.StopProcessAsync(appProcess); - } - } - - private sealed record OutputLog(DateTime Timestamp, LogLevel Level, string Message); - - /// - /// Writes a line to the process's stdin and flushes it. - /// Logs the input being sent for debugging purposes. - /// - private async Task WriteInputAsync(Process process, string input, CancellationToken cancellationToken) - { - this._outputHelper.WriteLine($"{DateTime.Now:HH:mm:ss.fff} [{process.ProcessName}(in)]: {input}"); - await process.StandardInput.WriteLineAsync(input); - await process.StandardInput.FlushAsync(cancellationToken); - } - - /// - /// Reads a line from the logs queue, filtering for Information level logs (stdout). - /// Returns null if the collection is completed and empty, or if cancellation is requested. - /// - private string? ReadLogLine(BlockingCollection logs, CancellationToken cancellationToken) - { - try - { - while (!cancellationToken.IsCancellationRequested) - { - // Block until a log entry is available or cancellation is requested - // Take will throw OperationCanceledException if cancelled, or InvalidOperationException if collection is completed - OutputLog log = logs.Take(cancellationToken); - - // Check for unhandled exceptions in the logs, which are never expected (but can happen) - if (log.Message.Contains("Unhandled exception")) - { - Assert.Fail("Console app encountered an unhandled exception."); - } - - // Only return Information level logs (stdout), skip Error logs (stderr) - if (log.Level == LogLevel.Information) - { - return log.Message; - } - } - } - catch (OperationCanceledException) - { - // Cancellation requested - return null; - } - catch (InvalidOperationException) - { - // Collection is completed and empty - return null; - } - - return null; - } - - private Process StartConsoleApp(string samplePath, BlockingCollection logs, string taskHubName) - { - ProcessStartInfo startInfo = new() - { - FileName = "dotnet", - Arguments = $"run --framework {s_dotnetTargetFramework}", - WorkingDirectory = samplePath, - UseShellExecute = false, - RedirectStandardOutput = true, - RedirectStandardError = true, - RedirectStandardInput = true, - }; - - string openAiEndpoint = s_configuration["AZURE_OPENAI_ENDPOINT"] ?? - throw new InvalidOperationException("The required AZURE_OPENAI_ENDPOINT env variable is not set."); - string openAiDeployment = s_configuration["AZURE_OPENAI_DEPLOYMENT_NAME"] ?? - throw new InvalidOperationException("The required AZURE_OPENAI_DEPLOYMENT_NAME env variable is not set."); - - void SetAndLogEnvironmentVariable(string key, string value) - { - this._outputHelper.WriteLine($"Setting environment variable for {startInfo.FileName} sub-process: {key}={value}"); - startInfo.EnvironmentVariables[key] = value; - } - - // Set required environment variables for the app - SetAndLogEnvironmentVariable("AZURE_OPENAI_ENDPOINT", openAiEndpoint); - SetAndLogEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT_NAME", openAiDeployment); - SetAndLogEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING", - $"Endpoint=http://localhost:{DtsPort};TaskHub={taskHubName};Authentication=None"); - SetAndLogEnvironmentVariable("REDIS_CONNECTION_STRING", $"localhost:{RedisPort}"); - - Process process = new() { StartInfo = startInfo }; - - // Capture the output and error streams asynchronously - // These events fire asynchronously, so we add to the blocking collection which is thread-safe - process.ErrorDataReceived += (sender, e) => - { - if (e.Data != null) - { - string logMessage = $"{DateTime.Now:HH:mm:ss.fff} [{startInfo.FileName}(err)]: {e.Data}"; - this._outputHelper.WriteLine(logMessage); - Debug.WriteLine(logMessage); - try - { - logs.Add(new OutputLog(DateTime.Now, LogLevel.Error, e.Data)); - } - catch (InvalidOperationException) - { - // Collection is completed, ignore - } - } - }; - - process.OutputDataReceived += (sender, e) => - { - if (e.Data != null) - { - string logMessage = $"{DateTime.Now:HH:mm:ss.fff} [{startInfo.FileName}(out)]: {e.Data}"; - this._outputHelper.WriteLine(logMessage); - Debug.WriteLine(logMessage); - try - { - logs.Add(new OutputLog(DateTime.Now, LogLevel.Information, e.Data)); - } - catch (InvalidOperationException) - { - // Collection is completed, ignore - } - } - }; - - if (!process.Start()) - { - throw new InvalidOperationException("Failed to start the console app"); - } - - process.BeginErrorReadLine(); - process.BeginOutputReadLine(); - - return process; - } - - private async Task RunCommandAsync(string command, string[] args) - { - await this.RunCommandAsync(command, workingDirectory: null, args: args); - } - - private async Task RunCommandAsync(string command, string? workingDirectory, string[] args) - { - ProcessStartInfo startInfo = new() - { - FileName = command, - Arguments = string.Join(" ", args), - WorkingDirectory = workingDirectory, - UseShellExecute = false, - RedirectStandardOutput = true, - RedirectStandardError = true, - CreateNoWindow = true - }; - - this._outputHelper.WriteLine($"Running command: {command} {string.Join(" ", args)}"); - - using Process process = new() { StartInfo = startInfo }; - process.ErrorDataReceived += (sender, e) => this._outputHelper.WriteLine($"[{command}(err)]: {e.Data}"); - process.OutputDataReceived += (sender, e) => this._outputHelper.WriteLine($"[{command}(out)]: {e.Data}"); - if (!process.Start()) - { - throw new InvalidOperationException("Failed to start the command"); - } - process.BeginErrorReadLine(); - process.BeginOutputReadLine(); - - using CancellationTokenSource cancellationTokenSource = new(TimeSpan.FromMinutes(1)); - await process.WaitForExitAsync(cancellationTokenSource.Token); - - this._outputHelper.WriteLine($"Command completed with exit code: {process.ExitCode}"); - } - - private async Task StopProcessAsync(Process process) - { - try - { - if (!process.HasExited) - { - this._outputHelper.WriteLine($"{DateTime.Now:HH:mm:ss.fff} Killing process {process.ProcessName}#{process.Id}"); - process.Kill(entireProcessTree: true); - - using CancellationTokenSource timeoutCts = new(TimeSpan.FromSeconds(10)); - await process.WaitForExitAsync(timeoutCts.Token); - this._outputHelper.WriteLine($"{DateTime.Now:HH:mm:ss.fff} Process exited: {process.Id}"); - } - } - catch (Exception ex) - { - this._outputHelper.WriteLine($"{DateTime.Now:HH:mm:ss.fff} Failed to stop process: {ex.Message}"); - } - } - - private CancellationTokenSource CreateTestTimeoutCts(TimeSpan? timeout = null) - { - TimeSpan testTimeout = Debugger.IsAttached ? TimeSpan.FromMinutes(5) : timeout ?? TimeSpan.FromSeconds(60); - return new CancellationTokenSource(testTimeout); - } } diff --git a/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/SamplesValidationBase.cs b/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/SamplesValidationBase.cs new file mode 100644 index 0000000000..24903d4d87 --- /dev/null +++ b/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/SamplesValidationBase.cs @@ -0,0 +1,449 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Collections.Concurrent; +using System.Diagnostics; +using System.Reflection; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; + +namespace Microsoft.Agents.AI.DurableTask.IntegrationTests; + +/// +/// Base class for sample validation integration tests providing shared infrastructure +/// setup and utility methods for running console app samples. +/// +public abstract class SamplesValidationBase : IAsyncLifetime +{ + protected const string DtsPort = "8080"; + protected const string RedisPort = "6379"; + + protected static readonly string DotnetTargetFramework = GetTargetFramework(); + protected static readonly IConfiguration Configuration = + new ConfigurationBuilder() + .AddUserSecrets(Assembly.GetExecutingAssembly()) + .AddEnvironmentVariables() + .Build(); + + // Semaphores for thread-safe initialization of shared infrastructure. + // xUnit may run tests in parallel, so we need to ensure that DTS emulator and Redis + // are started only once across all test instances. Using SemaphoreSlim allows async-safe + // locking, and the double-check pattern (check flag, acquire lock, check flag again) + // minimizes lock contention after initialization is complete. + private static readonly SemaphoreSlim s_dtsInitLock = new(1, 1); + private static readonly SemaphoreSlim s_redisInitLock = new(1, 1); + private static bool s_dtsInfrastructureStarted; + private static bool s_redisInfrastructureStarted; + + protected SamplesValidationBase(ITestOutputHelper outputHelper) + { + this.OutputHelper = outputHelper; + } + + /// + /// Gets the test output helper for logging. + /// + protected ITestOutputHelper OutputHelper { get; } + + /// + /// Gets the base path to the samples directory for this test class. + /// + protected abstract string SamplesPath { get; } + + /// + /// Gets whether this test class requires Redis infrastructure. + /// + protected virtual bool RequiresRedis => false; + + /// + /// Gets the task hub name prefix for this test class. + /// + protected virtual string TaskHubPrefix => "sample"; + + /// + public async Task InitializeAsync() + { + await EnsureDtsInfrastructureStartedAsync(this.OutputHelper, this.StartDtsEmulatorAsync); + + if (this.RequiresRedis) + { + await EnsureRedisInfrastructureStartedAsync(this.OutputHelper, this.StartRedisAsync); + } + + await Task.Delay(TimeSpan.FromSeconds(5)); + } + + /// + /// Ensures DTS infrastructure is started exactly once across all test instances. + /// Static method writes to static field to avoid the code smell of instance methods modifying shared state. + /// + private static async Task EnsureDtsInfrastructureStartedAsync(ITestOutputHelper outputHelper, Func startAction) + { + if (s_dtsInfrastructureStarted) + { + return; + } + + await s_dtsInitLock.WaitAsync(); + try + { + if (!s_dtsInfrastructureStarted) + { + outputHelper.WriteLine("Starting shared DTS infrastructure..."); + await startAction(); + s_dtsInfrastructureStarted = true; + } + } + finally + { + s_dtsInitLock.Release(); + } + } + + /// + /// Ensures Redis infrastructure is started exactly once across all test instances. + /// Static method writes to static field to avoid the code smell of instance methods modifying shared state. + /// + private static async Task EnsureRedisInfrastructureStartedAsync(ITestOutputHelper outputHelper, Func startAction) + { + if (s_redisInfrastructureStarted) + { + return; + } + + await s_redisInitLock.WaitAsync(); + try + { + if (!s_redisInfrastructureStarted) + { + outputHelper.WriteLine("Starting shared Redis infrastructure..."); + await startAction(); + s_redisInfrastructureStarted = true; + } + } + finally + { + s_redisInitLock.Release(); + } + } + + /// + public Task DisposeAsync() => Task.CompletedTask; + + protected sealed record OutputLog(DateTime Timestamp, LogLevel Level, string Message); + + /// + /// Runs a sample test by starting the console app and executing the provided test action. + /// + protected async Task RunSampleTestAsync(string samplePath, Func, Task> testAction) + { + string uniqueTaskHubName = $"{this.TaskHubPrefix}-{Guid.NewGuid():N}"[..^26]; + + using BlockingCollection logsContainer = []; + using Process appProcess = this.StartConsoleApp(samplePath, logsContainer, uniqueTaskHubName); + + try + { + await testAction(appProcess, logsContainer); + } + catch (OperationCanceledException e) + { + throw new TimeoutException("Core test logic timed out!", e); + } + finally + { + logsContainer.CompleteAdding(); + await this.StopProcessAsync(appProcess); + } + } + + /// + /// Writes a line to the process's stdin and flushes it. + /// + protected async Task WriteInputAsync(Process process, string input, CancellationToken cancellationToken) + { + this.OutputHelper.WriteLine($"{DateTime.Now:HH:mm:ss.fff} [{process.ProcessName}(in)]: {input}"); + await process.StandardInput.WriteLineAsync(input); + await process.StandardInput.FlushAsync(cancellationToken); + } + + /// + /// Reads the next Information-level log line from the queue. + /// Returns null if cancelled or collection is completed. + /// + protected string? ReadLogLine(BlockingCollection logs, CancellationToken cancellationToken) + { + try + { + while (!cancellationToken.IsCancellationRequested) + { + OutputLog log = logs.Take(cancellationToken); + + if (log.Message.Contains("Unhandled exception")) + { + Assert.Fail("Console app encountered an unhandled exception."); + } + + if (log.Level == LogLevel.Information) + { + return log.Message; + } + } + } + catch (OperationCanceledException) + { + return null; + } + catch (InvalidOperationException) + { + return null; + } + + return null; + } + + /// + /// Creates a cancellation token source with the specified timeout for test operations. + /// + protected CancellationTokenSource CreateTestTimeoutCts(TimeSpan? timeout = null) + { + TimeSpan testTimeout = Debugger.IsAttached ? TimeSpan.FromMinutes(5) : timeout ?? TimeSpan.FromSeconds(60); + return new CancellationTokenSource(testTimeout); + } + + /// + /// Allows derived classes to set additional environment variables for the console app process. + /// + protected virtual void ConfigureAdditionalEnvironmentVariables(ProcessStartInfo startInfo, Action setEnvVar) + { + } + + private static string GetTargetFramework() + { + string filePath = new Uri(typeof(SamplesValidationBase).Assembly.Location).LocalPath; + string directory = Path.GetDirectoryName(filePath)!; + string tfm = Path.GetFileName(directory); + if (tfm.StartsWith("net", StringComparison.OrdinalIgnoreCase)) + { + return tfm; + } + + throw new InvalidOperationException($"Unable to find target framework in path: {filePath}"); + } + + private async Task StartDtsEmulatorAsync() + { + if (!await this.IsDtsEmulatorRunningAsync()) + { + this.OutputHelper.WriteLine("Starting DTS emulator..."); + await this.RunCommandAsync("docker", "run", "-d", + "--name", "dts-emulator", + "-p", $"{DtsPort}:8080", + "-e", "DTS_USE_DYNAMIC_TASK_HUBS=true", + "mcr.microsoft.com/dts/dts-emulator:latest"); + } + } + + private async Task StartRedisAsync() + { + if (!await this.IsRedisRunningAsync()) + { + this.OutputHelper.WriteLine("Starting Redis..."); + await this.RunCommandAsync("docker", "run", "-d", + "--name", "redis", + "-p", $"{RedisPort}:6379", + "redis:latest"); + } + } + + private async Task IsDtsEmulatorRunningAsync() + { + this.OutputHelper.WriteLine($"Checking if DTS emulator is running at http://localhost:{DtsPort}/healthz..."); + + using HttpClient http2Client = new() + { + DefaultRequestVersion = new Version(2, 0), + DefaultVersionPolicy = HttpVersionPolicy.RequestVersionExact + }; + + try + { + using CancellationTokenSource timeoutCts = new(TimeSpan.FromSeconds(30)); + using HttpResponseMessage response = await http2Client.GetAsync( + new Uri($"http://localhost:{DtsPort}/healthz"), timeoutCts.Token); + + if (response.Content.Headers.ContentLength > 0) + { + string content = await response.Content.ReadAsStringAsync(timeoutCts.Token); + this.OutputHelper.WriteLine($"DTS emulator health check response: {content}"); + } + + bool isRunning = response.IsSuccessStatusCode; + this.OutputHelper.WriteLine(isRunning ? "DTS emulator is running" : $"DTS emulator not running. Status: {response.StatusCode}"); + return isRunning; + } + catch (HttpRequestException ex) + { + this.OutputHelper.WriteLine($"DTS emulator is not running: {ex.Message}"); + return false; + } + } + + private async Task IsRedisRunningAsync() + { + this.OutputHelper.WriteLine($"Checking if Redis is running at localhost:{RedisPort}..."); + + try + { + using CancellationTokenSource timeoutCts = new(TimeSpan.FromSeconds(30)); + ProcessStartInfo startInfo = new() + { + FileName = "docker", + Arguments = "exec redis redis-cli ping", + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true, + CreateNoWindow = true + }; + + using Process process = new() { StartInfo = startInfo }; + if (!process.Start()) + { + this.OutputHelper.WriteLine("Failed to start docker exec command"); + return false; + } + + string output = await process.StandardOutput.ReadToEndAsync(timeoutCts.Token); + await process.WaitForExitAsync(timeoutCts.Token); + + bool isRunning = process.ExitCode == 0 && output.Contains("PONG", StringComparison.OrdinalIgnoreCase); + this.OutputHelper.WriteLine(isRunning ? "Redis is running" : $"Redis not running. Exit: {process.ExitCode}, Output: {output}"); + return isRunning; + } + catch (Exception ex) + { + this.OutputHelper.WriteLine($"Redis is not running: {ex.Message}"); + return false; + } + } + + private Process StartConsoleApp(string samplePath, BlockingCollection logs, string taskHubName) + { + ProcessStartInfo startInfo = new() + { + FileName = "dotnet", + Arguments = $"run --framework {DotnetTargetFramework}", + WorkingDirectory = samplePath, + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true, + RedirectStandardInput = true, + }; + + string openAiEndpoint = Configuration["AZURE_OPENAI_ENDPOINT"] ?? + throw new InvalidOperationException("The required AZURE_OPENAI_ENDPOINT env variable is not set."); + string openAiDeployment = Configuration["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"] ?? + throw new InvalidOperationException("The required AZURE_OPENAI_CHAT_DEPLOYMENT_NAME env variable is not set."); + + void SetAndLogEnvironmentVariable(string key, string value) + { + this.OutputHelper.WriteLine($"Setting environment variable for {startInfo.FileName} sub-process: {key}={value}"); + startInfo.EnvironmentVariables[key] = value; + } + + SetAndLogEnvironmentVariable("AZURE_OPENAI_ENDPOINT", openAiEndpoint); + SetAndLogEnvironmentVariable("AZURE_OPENAI_DEPLOYMENT", openAiDeployment); + SetAndLogEnvironmentVariable("DURABLE_TASK_SCHEDULER_CONNECTION_STRING", + $"Endpoint=http://localhost:{DtsPort};TaskHub={taskHubName};Authentication=None"); + + this.ConfigureAdditionalEnvironmentVariables(startInfo, SetAndLogEnvironmentVariable); + + Process process = new() { StartInfo = startInfo }; + + process.ErrorDataReceived += (sender, e) => this.HandleProcessOutput(e.Data, startInfo.FileName, "err", LogLevel.Error, logs); + process.OutputDataReceived += (sender, e) => this.HandleProcessOutput(e.Data, startInfo.FileName, "out", LogLevel.Information, logs); + + if (!process.Start()) + { + throw new InvalidOperationException("Failed to start the console app"); + } + + process.BeginErrorReadLine(); + process.BeginOutputReadLine(); + + return process; + } + + private void HandleProcessOutput(string? data, string processName, string stream, LogLevel level, BlockingCollection logs) + { + if (data is null) + { + return; + } + + string logMessage = $"{DateTime.Now:HH:mm:ss.fff} [{processName}({stream})]: {data}"; + this.OutputHelper.WriteLine(logMessage); + Debug.WriteLine(logMessage); + + try + { + logs.Add(new OutputLog(DateTime.Now, level, data)); + } + catch (InvalidOperationException) + { + // Collection completed + } + } + + private async Task RunCommandAsync(string command, params string[] args) + { + ProcessStartInfo startInfo = new() + { + FileName = command, + Arguments = string.Join(" ", args), + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true, + CreateNoWindow = true + }; + + this.OutputHelper.WriteLine($"Running command: {command} {string.Join(" ", args)}"); + + using Process process = new() { StartInfo = startInfo }; + process.ErrorDataReceived += (sender, e) => this.OutputHelper.WriteLine($"[{command}(err)]: {e.Data}"); + process.OutputDataReceived += (sender, e) => this.OutputHelper.WriteLine($"[{command}(out)]: {e.Data}"); + + if (!process.Start()) + { + throw new InvalidOperationException("Failed to start the command"); + } + + process.BeginErrorReadLine(); + process.BeginOutputReadLine(); + + using CancellationTokenSource cts = new(TimeSpan.FromMinutes(1)); + await process.WaitForExitAsync(cts.Token); + + this.OutputHelper.WriteLine($"Command completed with exit code: {process.ExitCode}"); + } + + private async Task StopProcessAsync(Process process) + { + try + { + if (!process.HasExited) + { + this.OutputHelper.WriteLine($"{DateTime.Now:HH:mm:ss.fff} Killing process {process.ProcessName}#{process.Id}"); + process.Kill(entireProcessTree: true); + + using CancellationTokenSource cts = new(TimeSpan.FromSeconds(10)); + await process.WaitForExitAsync(cts.Token); + this.OutputHelper.WriteLine($"{DateTime.Now:HH:mm:ss.fff} Process exited: {process.Id}"); + } + } + catch (Exception ex) + { + this.OutputHelper.WriteLine($"{DateTime.Now:HH:mm:ss.fff} Failed to stop process: {ex.Message}"); + } + } +} diff --git a/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/WorkflowConsoleAppSamplesValidation.cs b/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/WorkflowConsoleAppSamplesValidation.cs new file mode 100644 index 0000000000..2ab3b476c1 --- /dev/null +++ b/dotnet/tests/Microsoft.Agents.AI.DurableTask.IntegrationTests/WorkflowConsoleAppSamplesValidation.cs @@ -0,0 +1,565 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Xunit.Abstractions; + +namespace Microsoft.Agents.AI.DurableTask.IntegrationTests; + +/// +/// Integration tests for validating the durable workflow console app samples +/// located in samples/04-hosting/DurableWorkflows/ConsoleApps. +/// +[Collection("Samples")] +[Trait("Category", "SampleValidation")] +public sealed class WorkflowConsoleAppSamplesValidation(ITestOutputHelper outputHelper) : SamplesValidationBase(outputHelper) +{ + private static readonly string s_samplesPath = Path.GetFullPath( + Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "..", "..", "..", "..", "..", "samples", "04-hosting", "DurableWorkflows", "ConsoleApps")); + + /// + protected override string SamplesPath => s_samplesPath; + + /// + protected override string TaskHubPrefix => "workflow"; + + [Fact] + public async Task SequentialWorkflowSampleValidationAsync() + { + using CancellationTokenSource testTimeoutCts = this.CreateTestTimeoutCts(); + string samplePath = Path.Combine(s_samplesPath, "01_SequentialWorkflow"); + + await this.RunSampleTestAsync(samplePath, async (process, logs) => + { + bool inputSent = false; + bool workflowCompleted = false; + bool foundOrderLookup = false; + bool foundOrderCancel = false; + bool foundSendEmail = false; + + string? line; + while ((line = this.ReadLogLine(logs, testTimeoutCts.Token)) != null) + { + if (!inputSent && line.Contains("Enter an order ID", StringComparison.OrdinalIgnoreCase)) + { + await this.WriteInputAsync(process, "12345", testTimeoutCts.Token); + inputSent = true; + } + + if (inputSent) + { + foundOrderLookup |= line.Contains("[Activity] OrderLookup:", StringComparison.Ordinal); + foundOrderCancel |= line.Contains("[Activity] OrderCancel:", StringComparison.Ordinal); + foundSendEmail |= line.Contains("[Activity] SendEmail:", StringComparison.Ordinal); + + if (line.Contains("Workflow completed. Cancellation email sent for order 12345", StringComparison.OrdinalIgnoreCase)) + { + workflowCompleted = true; + break; + } + } + + this.AssertNoError(line); + } + + Assert.True(inputSent, "Input was not sent to the workflow."); + Assert.True(foundOrderLookup, "OrderLookup executor log entry not found."); + Assert.True(foundOrderCancel, "OrderCancel executor log entry not found."); + Assert.True(foundSendEmail, "SendEmail executor log entry not found."); + Assert.True(workflowCompleted, "Workflow did not complete successfully."); + + await this.WriteInputAsync(process, "exit", testTimeoutCts.Token); + }); + } + + [Fact] + public async Task ConcurrentWorkflowSampleValidationAsync() + { + using CancellationTokenSource testTimeoutCts = this.CreateTestTimeoutCts(); + string samplePath = Path.Combine(s_samplesPath, "02_ConcurrentWorkflow"); + + await this.RunSampleTestAsync(samplePath, async (process, logs) => + { + bool inputSent = false; + bool workflowCompleted = false; + bool foundParseQuestion = false; + bool foundAggregator = false; + bool foundAggregatorReceived2Responses = false; + + string? line; + while ((line = this.ReadLogLine(logs, testTimeoutCts.Token)) != null) + { + if (!inputSent && line.Contains("Enter a science question", StringComparison.OrdinalIgnoreCase)) + { + await this.WriteInputAsync(process, "What is gravity?", testTimeoutCts.Token); + inputSent = true; + } + + if (inputSent) + { + foundParseQuestion |= line.Contains("[ParseQuestion]", StringComparison.Ordinal); + foundAggregator |= line.Contains("[Aggregator]", StringComparison.Ordinal); + foundAggregatorReceived2Responses |= line.Contains("Received 2 AI agent responses", StringComparison.Ordinal); + + if (line.Contains("Aggregation complete", StringComparison.OrdinalIgnoreCase)) + { + workflowCompleted = true; + break; + } + } + + this.AssertNoError(line); + } + + Assert.True(inputSent, "Input was not sent to the workflow."); + Assert.True(foundParseQuestion, "ParseQuestion executor log entry not found."); + Assert.True(foundAggregator, "Aggregator executor log entry not found."); + Assert.True(foundAggregatorReceived2Responses, "Aggregator did not receive 2 AI agent responses."); + Assert.True(workflowCompleted, "Workflow did not complete successfully."); + + await this.WriteInputAsync(process, "exit", testTimeoutCts.Token); + }); + } + + [Fact] + public async Task ConditionalEdgesWorkflowSampleValidationAsync() + { + using CancellationTokenSource testTimeoutCts = this.CreateTestTimeoutCts(); + string samplePath = Path.Combine(s_samplesPath, "03_ConditionalEdges"); + + await this.RunSampleTestAsync(samplePath, async (process, logs) => + { + bool validOrderSent = false; + bool blockedOrderSent = false; + bool validOrderCompleted = false; + bool blockedOrderCompleted = false; + + string? line; + while ((line = this.ReadLogLine(logs, testTimeoutCts.Token)) != null) + { + // Send a valid order first (no 'B' in ID) + if (!validOrderSent && line.Contains("Enter an order ID", StringComparison.OrdinalIgnoreCase)) + { + await this.WriteInputAsync(process, "12345", testTimeoutCts.Token); + validOrderSent = true; + } + + // Check valid order completed (routed to PaymentProcessor) + if (validOrderSent && !validOrderCompleted && + line.Contains("PaymentReferenceNumber", StringComparison.OrdinalIgnoreCase)) + { + validOrderCompleted = true; + + // Send a blocked order (contains 'B') + await this.WriteInputAsync(process, "ORDER-B-999", testTimeoutCts.Token); + blockedOrderSent = true; + } + + // Check blocked order completed (routed to NotifyFraud) + if (blockedOrderSent && line.Contains("flagged as fraudulent", StringComparison.OrdinalIgnoreCase)) + { + blockedOrderCompleted = true; + break; + } + + this.AssertNoError(line); + } + + Assert.True(validOrderSent, "Valid order input was not sent."); + Assert.True(validOrderCompleted, "Valid order did not complete (PaymentProcessor path)."); + Assert.True(blockedOrderSent, "Blocked order input was not sent."); + Assert.True(blockedOrderCompleted, "Blocked order did not complete (NotifyFraud path)."); + + await this.WriteInputAsync(process, "exit", testTimeoutCts.Token); + }); + } + + private void AssertNoError(string line) + { + if (line.Contains("Failed:", StringComparison.OrdinalIgnoreCase) || + line.Contains("Error:", StringComparison.OrdinalIgnoreCase)) + { + Assert.Fail($"Workflow failed: {line}"); + } + } + + [Fact] + public async Task WorkflowEventsSampleValidationAsync() + { + using CancellationTokenSource testTimeoutCts = this.CreateTestTimeoutCts(); + string samplePath = Path.Combine(s_samplesPath, "05_WorkflowEvents"); + + await this.RunSampleTestAsync(samplePath, async (process, logs) => + { + bool inputSent = false; + bool foundStartedRun = false; + bool foundExecutorInvoked = false; + bool foundExecutorCompleted = false; + bool foundLookupStarted = false; + bool foundOrderFound = false; + bool foundCancelProgress = false; + bool foundOrderCancelled = false; + bool foundEmailSent = false; + bool foundYieldedOutput = false; + bool foundWorkflowCompleted = false; + bool foundCompletionResult = false; + List eventLines = []; + + string? line; + while ((line = this.ReadLogLine(logs, testTimeoutCts.Token)) != null) + { + if (!inputSent && line.Contains("Enter order ID", StringComparison.OrdinalIgnoreCase)) + { + await this.WriteInputAsync(process, "12345", testTimeoutCts.Token); + inputSent = true; + } + + if (inputSent) + { + foundStartedRun |= line.Contains("Started run:", StringComparison.Ordinal); + foundExecutorInvoked |= line.Contains("ExecutorInvokedEvent", StringComparison.Ordinal); + foundExecutorCompleted |= line.Contains("ExecutorCompletedEvent", StringComparison.Ordinal); + foundLookupStarted |= line.Contains("[Lookup] Looking up order", StringComparison.Ordinal); + foundOrderFound |= line.Contains("[Lookup] Found:", StringComparison.Ordinal); + foundCancelProgress |= line.Contains("[Cancel]", StringComparison.Ordinal) && line.Contains('%'); + foundOrderCancelled |= line.Contains("[Cancel] Done", StringComparison.Ordinal); + foundEmailSent |= line.Contains("[Email] Sent to", StringComparison.Ordinal); + foundYieldedOutput |= line.Contains("[Output]", StringComparison.Ordinal); + foundWorkflowCompleted |= line.Contains("DurableWorkflowCompletedEvent", StringComparison.Ordinal); + + if (line.Contains("Completed:", StringComparison.Ordinal)) + { + foundCompletionResult = line.Contains("12345", StringComparison.Ordinal); + break; + } + + // Collect event lines for ordering verification + if (line.Contains("[Lookup]", StringComparison.Ordinal) + || line.Contains("[Cancel]", StringComparison.Ordinal) + || line.Contains("[Email]", StringComparison.Ordinal) + || line.Contains("[Output]", StringComparison.Ordinal)) + { + eventLines.Add(line); + } + } + + this.AssertNoError(line); + } + + Assert.True(inputSent, "Input was not sent to the workflow."); + Assert.True(foundStartedRun, "Streaming run was not started."); + Assert.True(foundExecutorInvoked, "ExecutorInvokedEvent not found in stream."); + Assert.True(foundExecutorCompleted, "ExecutorCompletedEvent not found in stream."); + Assert.True(foundLookupStarted, "OrderLookupStartedEvent not found in stream."); + Assert.True(foundOrderFound, "OrderFoundEvent not found in stream."); + Assert.True(foundCancelProgress, "CancellationProgressEvent not found in stream."); + Assert.True(foundOrderCancelled, "OrderCancelledEvent not found in stream."); + Assert.True(foundEmailSent, "EmailSentEvent not found in stream."); + Assert.True(foundYieldedOutput, "WorkflowOutputEvent not found in stream."); + Assert.True(foundWorkflowCompleted, "DurableWorkflowCompletedEvent not found in stream."); + Assert.True(foundCompletionResult, "Completion result does not contain the order ID."); + + // Verify event ordering: lookup events appear before cancel events, which appear before email events + int lastLookupIndex = eventLines.FindLastIndex(l => l.Contains("[Lookup]", StringComparison.Ordinal)); + int firstCancelIndex = eventLines.FindIndex(l => l.Contains("[Cancel]", StringComparison.Ordinal)); + int lastCancelIndex = eventLines.FindLastIndex(l => l.Contains("[Cancel]", StringComparison.Ordinal)); + int firstEmailIndex = eventLines.FindIndex(l => l.Contains("[Email]", StringComparison.Ordinal)); + + if (lastLookupIndex >= 0 && firstCancelIndex >= 0) + { + Assert.True(lastLookupIndex < firstCancelIndex, "Lookup events should appear before cancel events."); + } + + if (lastCancelIndex >= 0 && firstEmailIndex >= 0) + { + Assert.True(lastCancelIndex < firstEmailIndex, "Cancel events should appear before email events."); + } + + await this.WriteInputAsync(process, "exit", testTimeoutCts.Token); + }); + } + + [Fact] + public async Task WorkflowSharedStateSampleValidationAsync() + { + using CancellationTokenSource testTimeoutCts = this.CreateTestTimeoutCts(); + string samplePath = Path.Combine(s_samplesPath, "06_WorkflowSharedState"); + + await this.RunSampleTestAsync(samplePath, async (process, logs) => + { + bool inputSent = false; + bool foundStartedRun = false; + bool foundValidateOutput = false; + bool foundEnrichOutput = false; + bool foundPaymentOutput = false; + bool foundInvoiceOutput = false; + bool foundTaxCalculation = false; + bool foundAuditTrail = false; + bool foundWorkflowCompleted = false; + List outputLines = []; + + string? line; + while ((line = this.ReadLogLine(logs, testTimeoutCts.Token)) != null) + { + if (!inputSent && line.Contains("Enter an order ID", StringComparison.OrdinalIgnoreCase)) + { + await this.WriteInputAsync(process, "ORD-001", testTimeoutCts.Token); + inputSent = true; + } + + if (inputSent) + { + foundStartedRun |= line.Contains("Started run:", StringComparison.Ordinal); + + if (line.Contains("[Output]", StringComparison.Ordinal)) + { + foundValidateOutput |= line.Contains("ValidateOrder:", StringComparison.Ordinal) && line.Contains("validated", StringComparison.OrdinalIgnoreCase); + foundEnrichOutput |= line.Contains("EnrichOrder:", StringComparison.Ordinal) && line.Contains("enriched", StringComparison.OrdinalIgnoreCase); + foundPaymentOutput |= line.Contains("ProcessPayment:", StringComparison.Ordinal) && line.Contains("Payment processed", StringComparison.OrdinalIgnoreCase); + foundInvoiceOutput |= line.Contains("GenerateInvoice:", StringComparison.Ordinal) && line.Contains("Invoice complete", StringComparison.OrdinalIgnoreCase); + + // Verify shared state: tax rate was read by ProcessPayment + foundTaxCalculation |= line.Contains("tax:", StringComparison.OrdinalIgnoreCase); + + // Verify shared state: audit trail was accumulated across executors + foundAuditTrail |= line.Contains("Audit trail:", StringComparison.Ordinal) + && line.Contains("ValidateOrder", StringComparison.Ordinal) + && line.Contains("EnrichOrder", StringComparison.Ordinal) + && line.Contains("ProcessPayment", StringComparison.Ordinal); + + outputLines.Add(line); + } + + foundWorkflowCompleted |= line.Contains("DurableWorkflowCompletedEvent", StringComparison.Ordinal) + || line.Contains("Completed:", StringComparison.Ordinal); + + if (line.Contains("Completed:", StringComparison.Ordinal)) + { + break; + } + } + + this.AssertNoError(line); + } + + Assert.True(inputSent, "Input was not sent to the workflow."); + Assert.True(foundStartedRun, "Streaming run was not started."); + Assert.True(foundValidateOutput, "ValidateOrder output not found in stream."); + Assert.True(foundEnrichOutput, "EnrichOrder output not found in stream."); + Assert.True(foundPaymentOutput, "ProcessPayment output not found in stream."); + Assert.True(foundInvoiceOutput, "GenerateInvoice output not found in stream."); + Assert.True(foundTaxCalculation, "Tax calculation (shared state read) not found."); + Assert.True(foundAuditTrail, "Audit trail (shared state accumulation) not found."); + Assert.True(foundWorkflowCompleted, "Workflow completion not found in stream."); + + // Verify output ordering: ValidateOrder -> EnrichOrder -> ProcessPayment -> GenerateInvoice + int validateIndex = outputLines.FindIndex(l => l.Contains("ValidateOrder:", StringComparison.Ordinal) && l.Contains("validated", StringComparison.OrdinalIgnoreCase)); + int enrichIndex = outputLines.FindIndex(l => l.Contains("EnrichOrder:", StringComparison.Ordinal)); + int paymentIndex = outputLines.FindIndex(l => l.Contains("ProcessPayment:", StringComparison.Ordinal)); + int invoiceIndex = outputLines.FindIndex(l => l.Contains("GenerateInvoice:", StringComparison.Ordinal)); + + if (validateIndex >= 0 && enrichIndex >= 0) + { + Assert.True(validateIndex < enrichIndex, "ValidateOrder output should appear before EnrichOrder."); + } + + if (enrichIndex >= 0 && paymentIndex >= 0) + { + Assert.True(enrichIndex < paymentIndex, "EnrichOrder output should appear before ProcessPayment."); + } + + if (paymentIndex >= 0 && invoiceIndex >= 0) + { + Assert.True(paymentIndex < invoiceIndex, "ProcessPayment output should appear before GenerateInvoice."); + } + + await this.WriteInputAsync(process, "exit", testTimeoutCts.Token); + }); + } + + [Fact] + public async Task SubWorkflowsSampleValidationAsync() + { + using CancellationTokenSource testTimeoutCts = this.CreateTestTimeoutCts(); + string samplePath = Path.Combine(s_samplesPath, "07_SubWorkflows"); + + await this.RunSampleTestAsync(samplePath, async (process, logs) => + { + bool inputSent = false; + bool foundOrderReceived = false; + bool foundValidatePayment = false; + bool foundAnalyzePatterns = false; + bool foundCalculateRiskScore = false; + bool foundChargePayment = false; + bool foundSelectCarrier = false; + bool foundCreateShipment = false; + bool foundOrderCompleted = false; + bool foundFraudRiskEvent = false; + bool workflowCompleted = false; + + string? line; + while ((line = this.ReadLogLine(logs, testTimeoutCts.Token)) != null) + { + if (!inputSent && line.Contains("Enter an order ID", StringComparison.OrdinalIgnoreCase)) + { + await this.WriteInputAsync(process, "ORD-001", testTimeoutCts.Token); + inputSent = true; + } + + if (inputSent) + { + // Main workflow executors + foundOrderReceived |= line.Contains("[OrderReceived]", StringComparison.Ordinal); + foundOrderCompleted |= line.Contains("[OrderCompleted]", StringComparison.Ordinal); + + // Payment sub-workflow executors + foundValidatePayment |= line.Contains("[Payment/ValidatePayment]", StringComparison.Ordinal); + foundChargePayment |= line.Contains("[Payment/ChargePayment]", StringComparison.Ordinal); + + // FraudCheck sub-sub-workflow executors (nested inside Payment) + foundAnalyzePatterns |= line.Contains("[Payment/FraudCheck/AnalyzePatterns]", StringComparison.Ordinal); + foundCalculateRiskScore |= line.Contains("[Payment/FraudCheck/CalculateRiskScore]", StringComparison.Ordinal); + + // Shipping sub-workflow executors + foundSelectCarrier |= line.Contains("[Shipping/SelectCarrier]", StringComparison.Ordinal); + foundCreateShipment |= line.Contains("[Shipping/CreateShipment]", StringComparison.Ordinal); + + // Custom event from nested sub-workflow (streamed to client) + foundFraudRiskEvent |= line.Contains("[Event from sub-workflow] FraudRiskAssessedEvent", StringComparison.Ordinal); + + if (line.Contains("Order completed", StringComparison.OrdinalIgnoreCase)) + { + workflowCompleted = true; + break; + } + } + + this.AssertNoError(line); + } + + Assert.True(inputSent, "Input was not sent to the workflow."); + Assert.True(foundOrderReceived, "OrderReceived executor log not found."); + Assert.True(foundValidatePayment, "Payment/ValidatePayment executor log not found."); + Assert.True(foundAnalyzePatterns, "Payment/FraudCheck/AnalyzePatterns executor log not found."); + Assert.True(foundCalculateRiskScore, "Payment/FraudCheck/CalculateRiskScore executor log not found."); + Assert.True(foundChargePayment, "Payment/ChargePayment executor log not found."); + Assert.True(foundSelectCarrier, "Shipping/SelectCarrier executor log not found."); + Assert.True(foundCreateShipment, "Shipping/CreateShipment executor log not found."); + Assert.True(foundOrderCompleted, "OrderCompleted executor log not found."); + Assert.True(foundFraudRiskEvent, "FraudRiskAssessedEvent from nested sub-workflow not found."); + Assert.True(workflowCompleted, "Workflow did not complete successfully."); + + await this.WriteInputAsync(process, "exit", testTimeoutCts.Token); + }); + } + + [Fact] + public async Task WorkflowHITLSampleValidationAsync() + { + using CancellationTokenSource testTimeoutCts = this.CreateTestTimeoutCts(); + string samplePath = Path.Combine(s_samplesPath, "08_WorkflowHITL"); + + await this.RunSampleTestAsync(samplePath, (process, logs) => + { + bool foundStarted = false; + bool foundManagerApprovalPause = false; + bool foundManagerApprovalInput = false; + bool foundManagerResponseSent = false; + bool foundBudgetApprovalPause = false; + bool foundBudgetResponseSent = false; + bool foundComplianceApprovalPause = false; + bool foundComplianceResponseSent = false; + bool foundWorkflowCompleted = false; + + string? line; + while ((line = this.ReadLogLine(logs, testTimeoutCts.Token)) != null) + { + foundStarted |= line.Contains("Starting expense reimbursement workflow", StringComparison.Ordinal); + foundManagerApprovalPause |= line.Contains("Workflow paused at RequestPort: ManagerApproval", StringComparison.Ordinal); + foundManagerApprovalInput |= line.Contains("Approval for: Jerry", StringComparison.Ordinal); + foundManagerResponseSent |= line.Contains("Response sent: Approved=True", StringComparison.Ordinal) && foundManagerApprovalPause && !foundBudgetApprovalPause && !foundComplianceApprovalPause; + foundBudgetApprovalPause |= line.Contains("Workflow paused at RequestPort: BudgetApproval", StringComparison.Ordinal); + foundBudgetResponseSent |= line.Contains("Response sent: Approved=True", StringComparison.Ordinal) && foundBudgetApprovalPause; + foundComplianceApprovalPause |= line.Contains("Workflow paused at RequestPort: ComplianceApproval", StringComparison.Ordinal); + foundComplianceResponseSent |= line.Contains("Response sent: Approved=True", StringComparison.Ordinal) && foundComplianceApprovalPause; + + if (line.Contains("Workflow completed: Expense reimbursed at", StringComparison.Ordinal)) + { + foundWorkflowCompleted = true; + break; + } + + this.AssertNoError(line); + } + + Assert.True(foundStarted, "Workflow start message not found."); + Assert.True(foundManagerApprovalPause, "Manager approval pause not found."); + Assert.True(foundManagerApprovalInput, "Manager approval input (Jerry) not found."); + Assert.True(foundManagerResponseSent, "Manager approval response not sent."); + Assert.True(foundBudgetApprovalPause, "Budget approval pause not found."); + Assert.True(foundBudgetResponseSent, "Budget approval response not sent."); + Assert.True(foundComplianceApprovalPause, "Compliance approval pause not found."); + Assert.True(foundComplianceResponseSent, "Compliance approval response not sent."); + Assert.True(foundWorkflowCompleted, "Workflow did not complete successfully."); + + return Task.CompletedTask; + }); + } + + [Fact] + public async Task WorkflowAndAgentsSampleValidationAsync() + { + using CancellationTokenSource testTimeoutCts = this.CreateTestTimeoutCts(); + string samplePath = Path.Combine(s_samplesPath, "04_WorkflowAndAgents"); + + await this.RunSampleTestAsync(samplePath, (process, logs) => + { + // Arrange + bool foundDemo1 = false; + bool foundBiologistResponse = false; + bool foundChemistResponse = false; + bool foundDemo2 = false; + bool foundPhysicsWorkflow = false; + bool foundDemo3 = false; + bool foundExpertTeamWorkflow = false; + bool foundDemo4 = false; + bool foundChemistryWorkflow = false; + bool allDemosCompleted = false; + + // Act + string? line; + while ((line = this.ReadLogLine(logs, testTimeoutCts.Token)) != null) + { + foundDemo1 |= line.Contains("DEMO 1:", StringComparison.Ordinal); + foundBiologistResponse |= line.Contains("Biologist:", StringComparison.Ordinal); + foundChemistResponse |= line.Contains("Chemist:", StringComparison.Ordinal); + foundDemo2 |= line.Contains("DEMO 2:", StringComparison.Ordinal); + foundPhysicsWorkflow |= line.Contains("PhysicsExpertReview", StringComparison.Ordinal); + foundDemo3 |= line.Contains("DEMO 3:", StringComparison.Ordinal); + foundExpertTeamWorkflow |= line.Contains("ExpertTeamReview", StringComparison.Ordinal); + foundDemo4 |= line.Contains("DEMO 4:", StringComparison.Ordinal); + foundChemistryWorkflow |= line.Contains("ChemistryExpertReview", StringComparison.Ordinal); + + if (line.Contains("All demos completed", StringComparison.OrdinalIgnoreCase)) + { + allDemosCompleted = true; + break; + } + + this.AssertNoError(line); + } + + // Assert + Assert.True(foundDemo1, "DEMO 1 (Direct Agent Conversation) not found."); + Assert.True(foundBiologistResponse, "Biologist agent response not found."); + Assert.True(foundChemistResponse, "Chemist agent response not found."); + Assert.True(foundDemo2, "DEMO 2 (Single-Agent Workflow) not found."); + Assert.True(foundPhysicsWorkflow, "PhysicsExpertReview workflow not found."); + Assert.True(foundDemo3, "DEMO 3 (Multi-Agent Workflow) not found."); + Assert.True(foundExpertTeamWorkflow, "ExpertTeamReview workflow not found."); + Assert.True(foundDemo4, "DEMO 4 (Chemistry Workflow) not found."); + Assert.True(foundChemistryWorkflow, "ChemistryExpertReview workflow not found."); + Assert.True(allDemosCompleted, "Sample did not complete all demos successfully."); + + return Task.CompletedTask; + }); + } +} diff --git a/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Microsoft.Agents.AI.DurableTask.UnitTests.csproj b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Microsoft.Agents.AI.DurableTask.UnitTests.csproj index d6b34bd6b9..335d8e401b 100644 --- a/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Microsoft.Agents.AI.DurableTask.UnitTests.csproj +++ b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Microsoft.Agents.AI.DurableTask.UnitTests.csproj @@ -7,6 +7,7 @@ + diff --git a/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableActivityExecutorTests.cs b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableActivityExecutorTests.cs new file mode 100644 index 0000000000..e3b549e365 --- /dev/null +++ b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableActivityExecutorTests.cs @@ -0,0 +1,235 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Microsoft.Agents.AI.DurableTask.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.UnitTests.Workflows; + +public sealed class DurableActivityExecutorTests +{ + private static readonly JsonSerializerOptions s_camelCaseOptions = new() + { + PropertyNamingPolicy = JsonNamingPolicy.CamelCase, + PropertyNameCaseInsensitive = true + }; + + #region DeserializeInput + + [Fact] + public void DeserializeInput_StringType_ReturnsInputAsIs() + { + // Arrange + const string Input = "hello world"; + + // Act + object result = DurableActivityExecutor.DeserializeInput(Input, typeof(string)); + + // Assert + Assert.Equal("hello world", result); + } + + [Fact] + public void DeserializeInput_SimpleObject_DeserializesCorrectly() + { + // Arrange + string input = JsonSerializer.Serialize(new TestRecord("EXP-001", 100.50m), s_camelCaseOptions); + + // Act + object result = DurableActivityExecutor.DeserializeInput(input, typeof(TestRecord)); + + // Assert + TestRecord record = Assert.IsType(result); + Assert.Equal("EXP-001", record.Id); + Assert.Equal(100.50m, record.Amount); + } + + [Fact] + public void DeserializeInput_StringArray_DeserializesDirectly() + { + // Arrange + string input = JsonSerializer.Serialize((string[])["a", "b", "c"]); + + // Act + object result = DurableActivityExecutor.DeserializeInput(input, typeof(string[])); + + // Assert + string[] array = Assert.IsType(result); + Assert.Equal(["a", "b", "c"], array); + } + + [Fact] + public void DeserializeInput_TypedArrayFromFanIn_DeserializesEachElement() + { + // Arrange — fan-in produces a JSON array of serialized strings + TestRecord r1 = new("EXP-001", 100m); + TestRecord r2 = new("EXP-002", 200m); + string[] serializedElements = + [ + JsonSerializer.Serialize(r1, s_camelCaseOptions), + JsonSerializer.Serialize(r2, s_camelCaseOptions) + ]; + string input = JsonSerializer.Serialize(serializedElements); + + // Act + object result = DurableActivityExecutor.DeserializeInput(input, typeof(TestRecord[])); + + // Assert + TestRecord[] records = Assert.IsType(result); + Assert.Equal(2, records.Length); + Assert.Equal("EXP-001", records[0].Id); + Assert.Equal(100m, records[0].Amount); + Assert.Equal("EXP-002", records[1].Id); + Assert.Equal(200m, records[1].Amount); + } + + [Fact] + public void DeserializeInput_TypedArrayWithSingleElement_DeserializesCorrectly() + { + // Arrange + TestRecord r1 = new("EXP-001", 50m); + string[] serializedElements = [JsonSerializer.Serialize(r1, s_camelCaseOptions)]; + string input = JsonSerializer.Serialize(serializedElements); + + // Act + object result = DurableActivityExecutor.DeserializeInput(input, typeof(TestRecord[])); + + // Assert + TestRecord[] records = Assert.IsType(result); + Assert.Single(records); + Assert.Equal("EXP-001", records[0].Id); + } + + [Fact] + public void DeserializeInput_TypedArrayWithNullElement_ThrowsInvalidOperationException() + { + // Arrange — one element is "null" + string input = JsonSerializer.Serialize((string[])["null"]); + + // Act & Assert + Assert.Throws( + () => DurableActivityExecutor.DeserializeInput(input, typeof(TestRecord[]))); + } + + [Fact] + public void DeserializeInput_InvalidJson_ThrowsJsonException() + { + // Arrange + const string Input = "not valid json"; + + // Act & Assert + Assert.ThrowsAny( + () => DurableActivityExecutor.DeserializeInput(Input, typeof(TestRecord))); + } + + #endregion + + #region ResolveInputType + + [Fact] + public void ResolveInputType_NullTypeName_ReturnsFirstSupportedType() + { + // Arrange + HashSet supportedTypes = [typeof(TestRecord), typeof(string)]; + + // Act + Type result = DurableActivityExecutor.ResolveInputType(null, supportedTypes); + + // Assert + Assert.Equal(typeof(TestRecord), result); + } + + [Fact] + public void ResolveInputType_EmptyTypeName_ReturnsFirstSupportedType() + { + // Arrange + HashSet supportedTypes = [typeof(TestRecord)]; + + // Act + Type result = DurableActivityExecutor.ResolveInputType(string.Empty, supportedTypes); + + // Assert + Assert.Equal(typeof(TestRecord), result); + } + + [Fact] + public void ResolveInputType_EmptySupportedTypes_DefaultsToString() + { + // Arrange + HashSet supportedTypes = []; + + // Act + Type result = DurableActivityExecutor.ResolveInputType(null, supportedTypes); + + // Assert + Assert.Equal(typeof(string), result); + } + + [Fact] + public void ResolveInputType_MatchesByFullName() + { + // Arrange + HashSet supportedTypes = [typeof(TestRecord)]; + + // Act + Type result = DurableActivityExecutor.ResolveInputType(typeof(TestRecord).FullName, supportedTypes); + + // Assert + Assert.Equal(typeof(TestRecord), result); + } + + [Fact] + public void ResolveInputType_MatchesByName() + { + // Arrange + HashSet supportedTypes = [typeof(TestRecord)]; + + // Act + Type result = DurableActivityExecutor.ResolveInputType("TestRecord", supportedTypes); + + // Assert + Assert.Equal(typeof(TestRecord), result); + } + + [Fact] + public void ResolveInputType_StringArrayFallsBackToSupportedType() + { + // Arrange — fan-in sends string[] but executor expects TestRecord[] + HashSet supportedTypes = [typeof(TestRecord[])]; + + // Act + Type result = DurableActivityExecutor.ResolveInputType(typeof(string[]).FullName, supportedTypes); + + // Assert + Assert.Equal(typeof(TestRecord[]), result); + } + + [Fact] + public void ResolveInputType_StringFallsBackToSupportedType() + { + // Arrange — executor doesn't support string + HashSet supportedTypes = [typeof(TestRecord)]; + + // Act + Type result = DurableActivityExecutor.ResolveInputType(typeof(string).FullName, supportedTypes); + + // Assert + Assert.Equal(typeof(TestRecord), result); + } + + [Fact] + public void ResolveInputType_StringArrayRetainedWhenSupported() + { + // Arrange — executor explicitly supports string[] + HashSet supportedTypes = [typeof(string[])]; + + // Act + Type result = DurableActivityExecutor.ResolveInputType(typeof(string[]).FullName, supportedTypes); + + // Assert + Assert.Equal(typeof(string[]), result); + } + + #endregion + + private sealed record TestRecord(string Id, decimal Amount); +} diff --git a/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableStreamingWorkflowRunTests.cs b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableStreamingWorkflowRunTests.cs new file mode 100644 index 0000000000..8aef99e3e1 --- /dev/null +++ b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableStreamingWorkflowRunTests.cs @@ -0,0 +1,765 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Text.Json; +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; +using Microsoft.DurableTask; +using Microsoft.DurableTask.Client; +using Moq; + +namespace Microsoft.Agents.AI.DurableTask.UnitTests.Workflows; + +public sealed class DurableStreamingWorkflowRunTests +{ + private const string InstanceId = "test-instance-123"; + private const string WorkflowTestName = "TestWorkflow"; + + private static Workflow CreateTestWorkflow() => + new WorkflowBuilder(new FunctionExecutor("start", (_, _, _) => default)) + .WithName(WorkflowTestName) + .Build(); + + private static OrchestrationMetadata CreateMetadata( + OrchestrationRuntimeStatus status, + string? serializedCustomStatus = null, + string? serializedOutput = null, + TaskFailureDetails? failureDetails = null) + { + return new OrchestrationMetadata(WorkflowTestName, InstanceId) + { + RuntimeStatus = status, + SerializedCustomStatus = serializedCustomStatus, + SerializedOutput = serializedOutput, + FailureDetails = failureDetails, + }; + } + + private static string SerializeCustomStatus(List events) + { + DurableWorkflowLiveStatus status = new() { Events = events }; + return JsonSerializer.Serialize(status, DurableSerialization.Options); + } + + private static string SerializeCustomStatusWithPendingEvents( + List events, + List pendingEvents) + { + DurableWorkflowLiveStatus status = new() { Events = events, PendingEvents = pendingEvents }; + return JsonSerializer.Serialize(status, DurableSerialization.Options); + } + + private static Workflow CreateTestWorkflowWithRequestPort(string requestPortId) + { + FunctionExecutor start = new("start", (_, _, _) => default); + RequestPort requestPort = RequestPort.Create(requestPortId); + FunctionExecutor end = new("end", (_, _, _) => default); + return new WorkflowBuilder(start) + .WithName(WorkflowTestName) + .AddEdge(start, requestPort) + .AddEdge(requestPort, end) + .Build(); + } + + private static string SerializeWorkflowResult(string? result, List events) + { + DurableWorkflowResult workflowResult = new() { Result = result, Events = events }; + return JsonSerializer.Serialize(workflowResult, DurableWorkflowJsonContext.Default.DurableWorkflowResult); + } + + private static string SerializeEvent(WorkflowEvent evt) + { + Type eventType = evt.GetType(); + TypedPayload wrapper = new() + { + TypeName = eventType.AssemblyQualifiedName, + Data = JsonSerializer.Serialize(evt, eventType, DurableSerialization.Options) + }; + + return JsonSerializer.Serialize(wrapper, DurableWorkflowJsonContext.Default.TypedPayload); + } + + #region Constructor and Properties + + [Fact] + public void Constructor_SetsRunIdAndWorkflowName() + { + // Arrange + Mock mockClient = new("test"); + + // Act + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Assert + Assert.Equal(InstanceId, run.RunId); + Assert.Equal(WorkflowTestName, run.WorkflowName); + } + + [Fact] + public void Constructor_NoWorkflowName_SetsEmptyString() + { + // Arrange + Mock mockClient = new("test"); + Workflow workflow = new WorkflowBuilder(new FunctionExecutor("start", (_, _, _) => default)).Build(); + + // Act + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, workflow); + + // Assert + Assert.Equal(string.Empty, run.WorkflowName); + } + + #endregion + + #region GetStatusAsync + + [Theory] + [InlineData(OrchestrationRuntimeStatus.Pending, DurableRunStatus.Pending)] + [InlineData(OrchestrationRuntimeStatus.Running, DurableRunStatus.Running)] + [InlineData(OrchestrationRuntimeStatus.Completed, DurableRunStatus.Completed)] + [InlineData(OrchestrationRuntimeStatus.Failed, DurableRunStatus.Failed)] + [InlineData(OrchestrationRuntimeStatus.Terminated, DurableRunStatus.Terminated)] + [InlineData(OrchestrationRuntimeStatus.Suspended, DurableRunStatus.Suspended)] + + public async Task GetStatusAsync_MapsRuntimeStatusCorrectlyAsync( + OrchestrationRuntimeStatus runtimeStatus, + DurableRunStatus expectedStatus) + { + // Arrange + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, false, It.IsAny())) + .ReturnsAsync(CreateMetadata(runtimeStatus)); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + DurableRunStatus status = await run.GetStatusAsync(); + + // Assert + Assert.Equal(expectedStatus, status); + } + + [Fact] + public async Task GetStatusAsync_InstanceNotFound_ReturnsNotFoundAsync() + { + // Arrange + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, false, It.IsAny())) + .ReturnsAsync((OrchestrationMetadata?)null); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + DurableRunStatus status = await run.GetStatusAsync(); + + // Assert + Assert.Equal(DurableRunStatus.NotFound, status); + } + + #endregion + + #region WatchStreamAsync + + [Fact] + public async Task WatchStreamAsync_InstanceNotFound_YieldsNoEventsAsync() + { + // Arrange + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync((OrchestrationMetadata?)null); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert + Assert.Empty(events); + } + + [Fact] + public async Task WatchStreamAsync_CompletedWithResult_YieldsCompletedEventAsync() + { + // Arrange + string serializedOutput = SerializeWorkflowResult("done", []); + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: serializedOutput)); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert + Assert.Single(events); + DurableWorkflowCompletedEvent completedEvent = Assert.IsType(events[0]); + Assert.Equal("done", completedEvent.Data); + } + + [Fact] + public async Task WatchStreamAsync_CompletedWithEventsInOutput_YieldsEventsAndCompletionAsync() + { + // Arrange + DurableHaltRequestedEvent haltEvent = new("executor-1"); + string serializedEvent = SerializeEvent(haltEvent); + string serializedOutput = SerializeWorkflowResult("result", [serializedEvent]); + + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: serializedOutput)); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert + Assert.Equal(2, events.Count); + DurableHaltRequestedEvent haltResult = Assert.IsType(events[0]); + Assert.Equal("executor-1", haltResult.ExecutorId); + DurableWorkflowCompletedEvent completedResult = Assert.IsType(events[1]); + Assert.Equal("result", completedResult.Result); + } + + [Fact] + public async Task WatchStreamAsync_CompletedWithoutWrapper_YieldsFailedEventAsync() + { + // Arrange — output not wrapped in DurableWorkflowResult (indicates a bug) + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: "\"raw output\"")); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert — yields a failed event with diagnostic message instead of crashing + Assert.Single(events); + DurableWorkflowFailedEvent failedEvent = Assert.IsType(events[0]); + Assert.Contains("could not be parsed", failedEvent.ErrorMessage); + } + + [Fact] + public async Task WatchStreamAsync_Failed_YieldsFailedEventAsync() + { + // Arrange + Mock mockClient = new("test"); + TaskFailureDetails failureDetails = new("ErrorType", "Something went wrong", null, null, null); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata( + OrchestrationRuntimeStatus.Failed, + failureDetails: failureDetails)); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert + Assert.Single(events); + DurableWorkflowFailedEvent failedEvent = Assert.IsType(events[0]); + Assert.Equal("Something went wrong", failedEvent.ErrorMessage); + Assert.NotNull(failedEvent.FailureDetails); + Assert.Equal("ErrorType", failedEvent.FailureDetails.ErrorType); + Assert.Equal("Something went wrong", failedEvent.FailureDetails.ErrorMessage); + } + + [Fact] + public async Task WatchStreamAsync_FailedWithNoDetails_YieldsDefaultMessageAsync() + { + // Arrange + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata(OrchestrationRuntimeStatus.Failed)); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert + Assert.Single(events); + DurableWorkflowFailedEvent failedEvent = Assert.IsType(events[0]); + Assert.Equal("Workflow execution failed.", failedEvent.ErrorMessage); + Assert.Null(failedEvent.FailureDetails); + } + + [Fact] + public async Task WatchStreamAsync_Terminated_YieldsFailedEventAsync() + { + // Arrange + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata(OrchestrationRuntimeStatus.Terminated)); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert + Assert.Single(events); + DurableWorkflowFailedEvent failedEvent = Assert.IsType(events[0]); + Assert.Equal("Workflow was terminated.", failedEvent.ErrorMessage); + Assert.Null(failedEvent.FailureDetails); + } + + [Fact] + public async Task WatchStreamAsync_EventsInCustomStatus_YieldsEventsBeforeCompletionAsync() + { + // Arrange + DurableHaltRequestedEvent haltEvent = new("exec-1"); + string serializedEvent = SerializeEvent(haltEvent); + string customStatus = SerializeCustomStatus([serializedEvent]); + string serializedOutput = SerializeWorkflowResult("final", []); + + int callCount = 0; + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(() => + { + callCount++; + if (callCount == 1) + { + return CreateMetadata(OrchestrationRuntimeStatus.Running, serializedCustomStatus: customStatus); + } + + return CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: serializedOutput); + }); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert + Assert.Equal(2, events.Count); + DurableHaltRequestedEvent haltResult = Assert.IsType(events[0]); + Assert.Equal("exec-1", haltResult.ExecutorId); + DurableWorkflowCompletedEvent completedResult = Assert.IsType(events[1]); + Assert.Equal("final", completedResult.Result); + } + + [Fact] + public async Task WatchStreamAsync_IncrementalEvents_YieldsOnlyNewEventsPerPollAsync() + { + // Arrange — simulate 3 poll cycles where events accumulate in custom status, + // then a final completion poll. This validates: + // 1. Events arriving across multiple poll cycles are yielded incrementally + // 2. Already-seen events are not re-yielded (lastReadEventIndex dedup) + // 3. Completion event follows all streamed events + DurableHaltRequestedEvent event1 = new("executor-1"); + DurableHaltRequestedEvent event2 = new("executor-2"); + DurableHaltRequestedEvent event3 = new("executor-3"); + + string serializedEvent1 = SerializeEvent(event1); + string serializedEvent2 = SerializeEvent(event2); + string serializedEvent3 = SerializeEvent(event3); + + // Poll 1: 1 event in custom status + string customStatus1 = SerializeCustomStatus([serializedEvent1]); + // Poll 2: same event + 1 new event (accumulating list) + string customStatus2 = SerializeCustomStatus([serializedEvent1, serializedEvent2]); + // Poll 3: all 3 events accumulated + string customStatus3 = SerializeCustomStatus([serializedEvent1, serializedEvent2, serializedEvent3]); + // Poll 4: completed, all events also in output + string serializedOutput = SerializeWorkflowResult("done", [serializedEvent1, serializedEvent2, serializedEvent3]); + + int callCount = 0; + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(() => + { + callCount++; + return callCount switch + { + 1 => CreateMetadata(OrchestrationRuntimeStatus.Running, serializedCustomStatus: customStatus1), + 2 => CreateMetadata(OrchestrationRuntimeStatus.Running, serializedCustomStatus: customStatus2), + 3 => CreateMetadata(OrchestrationRuntimeStatus.Running, serializedCustomStatus: customStatus3), + _ => CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: serializedOutput), + }; + }); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert — exactly 4 events: 3 incremental halt events + 1 completion + Assert.Equal(4, events.Count); + DurableHaltRequestedEvent halt1 = Assert.IsType(events[0]); + DurableHaltRequestedEvent halt2 = Assert.IsType(events[1]); + DurableHaltRequestedEvent halt3 = Assert.IsType(events[2]); + Assert.Equal("executor-1", halt1.ExecutorId); + Assert.Equal("executor-2", halt2.ExecutorId); + Assert.Equal("executor-3", halt3.ExecutorId); + DurableWorkflowCompletedEvent completed = Assert.IsType(events[3]); + Assert.Equal("done", completed.Data); + } + + [Fact] + public async Task WatchStreamAsync_NoNewEventsOnRepoll_DoesNotDuplicateAsync() + { + // Arrange — simulate polling where custom status doesn't change between polls, + // validating that events are not duplicated when the list is unchanged. + DurableHaltRequestedEvent event1 = new("executor-1"); + string serializedEvent1 = SerializeEvent(event1); + string customStatus = SerializeCustomStatus([serializedEvent1]); + string serializedOutput = SerializeWorkflowResult("result", [serializedEvent1]); + + int callCount = 0; + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(() => + { + callCount++; + return callCount switch + { + // First 3 polls return the same custom status (no new events after first) + <= 3 => CreateMetadata(OrchestrationRuntimeStatus.Running, serializedCustomStatus: customStatus), + _ => CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: serializedOutput), + }; + }); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert — event1 appears exactly once despite 3 polls with the same status + Assert.Equal(2, events.Count); + DurableHaltRequestedEvent haltResult = Assert.IsType(events[0]); + Assert.Equal("executor-1", haltResult.ExecutorId); + DurableWorkflowCompletedEvent completedResult = Assert.IsType(events[1]); + Assert.Equal("result", completedResult.Result); + } + + [Fact] + public async Task WatchStreamAsync_Cancellation_EndsGracefullyAsync() + { + // Arrange + using CancellationTokenSource cts = new(); + int pollCount = 0; + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(() => + { + if (++pollCount >= 2) + { + cts.Cancel(); + } + + return CreateMetadata(OrchestrationRuntimeStatus.Running); + }); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync(cts.Token)) + { + events.Add(evt); + } + + // Assert — no exception thrown, stream ends cleanly + Assert.Empty(events); + } + + [Fact] + public async Task WatchStreamAsync_PendingRequestPort_YieldsWaitingForInputEventAsync() + { + // Arrange + string customStatus = SerializeCustomStatusWithPendingEvents( + [], + [new PendingRequestPortStatus("ApprovalPort", """{"amount":100}""")]); + string serializedOutput = SerializeWorkflowResult("approved", []); + + int callCount = 0; + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(() => + { + callCount++; + return callCount == 1 + ? CreateMetadata(OrchestrationRuntimeStatus.Running, serializedCustomStatus: customStatus) + : CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: serializedOutput); + }); + + Workflow workflow = CreateTestWorkflowWithRequestPort("ApprovalPort"); + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, workflow); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert + Assert.Equal(2, events.Count); + DurableWorkflowWaitingForInputEvent waitingEvent = Assert.IsType(events[0]); + Assert.Equal("ApprovalPort", waitingEvent.RequestPort.Id); + Assert.Contains("amount", waitingEvent.Input); + DurableWorkflowCompletedEvent completedEvent = Assert.IsType(events[1]); + Assert.Equal("approved", completedEvent.Result); + } + + [Fact] + public async Task WatchStreamAsync_PendingRequestPort_DoesNotDuplicateOnSubsequentPollsAsync() + { + // Arrange — same pending event across 2 polls, then completion + string customStatus = SerializeCustomStatusWithPendingEvents( + [], + [new PendingRequestPortStatus("ApprovalPort", """{"amount":100}""")]); + string serializedOutput = SerializeWorkflowResult("done", []); + + int callCount = 0; + Mock mockClient = new("test"); + mockClient.Setup(c => c.GetInstanceAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(() => + { + callCount++; + return callCount switch + { + <= 2 => CreateMetadata(OrchestrationRuntimeStatus.Running, serializedCustomStatus: customStatus), + _ => CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: serializedOutput), + }; + }); + + Workflow workflow = CreateTestWorkflowWithRequestPort("ApprovalPort"); + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, workflow); + + // Act + List events = []; + await foreach (WorkflowEvent evt in run.WatchStreamAsync()) + { + events.Add(evt); + } + + // Assert — WaitingForInputEvent yielded only once despite 2 polls + Assert.Equal(2, events.Count); + Assert.IsType(events[0]); + Assert.IsType(events[1]); + } + + #endregion + + #region SendResponseAsync + + [Fact] + public async Task SendResponseAsync_SerializesAndRaisesEventAsync() + { + // Arrange + Mock mockClient = new("test"); + mockClient.Setup(c => c.RaiseEventAsync( + InstanceId, + "ApprovalPort", + It.IsAny(), + It.IsAny())) + .Returns(Task.CompletedTask); + + RequestPort approvalPort = RequestPort.Create("ApprovalPort"); + DurableWorkflowWaitingForInputEvent requestEvent = new("""{"amount":100}""", approvalPort); + Workflow workflow = CreateTestWorkflowWithRequestPort("ApprovalPort"); + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, workflow); + + // Act + await run.SendResponseAsync(requestEvent, new { approved = true, comments = "Looks good" }); + + // Assert + mockClient.Verify(c => c.RaiseEventAsync( + InstanceId, + "ApprovalPort", + It.Is(s => s.Contains("approved") && s.Contains("true")), + It.IsAny()), Times.Once); + } + + [Fact] + public async Task SendResponseAsync_NullRequestEvent_ThrowsAsync() + { + // Arrange + Mock mockClient = new("test"); + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act & Assert + await Assert.ThrowsAsync(() => + run.SendResponseAsync(null!, "response").AsTask()); + } + + #endregion + + #region WaitForCompletionAsync + + [Fact] + public async Task WaitForCompletionAsync_Completed_ReturnsResultAsync() + { + // Arrange + string serializedOutput = SerializeWorkflowResult("hello world", []); + Mock mockClient = new("test"); + mockClient.Setup(c => c.WaitForInstanceCompletionAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata(OrchestrationRuntimeStatus.Completed, serializedOutput: serializedOutput)); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act + string? result = await run.WaitForCompletionAsync(); + + // Assert + Assert.Equal("hello world", result); + } + + [Fact] + public async Task WaitForCompletionAsync_Failed_ThrowsTaskFailedExceptionAsync() + { + // Arrange + Mock mockClient = new("test"); + mockClient.Setup(c => c.WaitForInstanceCompletionAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata( + OrchestrationRuntimeStatus.Failed, + failureDetails: new TaskFailureDetails("Error", "kaboom", null, null, null))); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act & Assert + TaskFailedException ex = await Assert.ThrowsAsync( + () => run.WaitForCompletionAsync().AsTask()); + Assert.Equal("kaboom", ex.FailureDetails.ErrorMessage); + } + + [Fact] + public async Task WaitForCompletionAsync_UnexpectedStatus_ThrowsAsync() + { + // Arrange + Mock mockClient = new("test"); + mockClient.Setup(c => c.WaitForInstanceCompletionAsync(InstanceId, true, It.IsAny())) + .ReturnsAsync(CreateMetadata(OrchestrationRuntimeStatus.Terminated)); + + DurableStreamingWorkflowRun run = new(mockClient.Object, InstanceId, CreateTestWorkflow()); + + // Act & Assert + await Assert.ThrowsAsync( + () => run.WaitForCompletionAsync().AsTask()); + } + + #endregion + + #region ExtractResult + + [Fact] + public void ExtractResult_NullOutput_ReturnsDefault() + { + // Act + string? result = DurableStreamingWorkflowRun.ExtractResult(null); + + // Assert + Assert.Null(result); + } + + [Fact] + public void ExtractResult_WrappedStringResult_ReturnsUnwrappedString() + { + // Arrange + string serializedOutput = SerializeWorkflowResult("hello", []); + + // Act + string? result = DurableStreamingWorkflowRun.ExtractResult(serializedOutput); + + // Assert + Assert.Equal("hello", result); + } + + [Fact] + public void ExtractResult_UnwrappedOutput_ThrowsInvalidOperationException() + { + // Arrange — raw output not wrapped in DurableWorkflowResult + string serializedOutput = JsonSerializer.Serialize("raw value"); + + // Act & Assert + Assert.Throws( + () => DurableStreamingWorkflowRun.ExtractResult(serializedOutput)); + } + + [Fact] + public void ExtractResult_WrappedObjectResult_DeserializesCorrectly() + { + // Arrange + TestPayload original = new() { Name = "test", Value = 42 }; + string resultJson = JsonSerializer.Serialize(original); + string serializedOutput = SerializeWorkflowResult(resultJson, []); + + // Act + TestPayload? result = DurableStreamingWorkflowRun.ExtractResult(serializedOutput); + + // Assert + Assert.NotNull(result); + Assert.Equal("test", result.Name); + Assert.Equal(42, result.Value); + } + + [Fact] + public void ExtractResult_CamelCaseSerializedObject_DeserializesToPascalCaseMembers() + { + // Arrange — executor outputs are serialized with DurableSerialization.Options (camelCase) + TestPayload original = new() { Name = "camel", Value = 99 }; + string resultJson = JsonSerializer.Serialize(original, DurableSerialization.Options); + string serializedOutput = SerializeWorkflowResult(resultJson, []); + + // Act + TestPayload? result = DurableStreamingWorkflowRun.ExtractResult(serializedOutput); + + // Assert + Assert.NotNull(result); + Assert.Equal("camel", result.Name); + Assert.Equal(99, result.Value); + } + + #endregion + + private sealed class TestPayload + { + public string? Name { get; set; } + + public int Value { get; set; } + } +} diff --git a/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableWorkflowContextTests.cs b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableWorkflowContextTests.cs new file mode 100644 index 0000000000..4ceba544a2 --- /dev/null +++ b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/DurableWorkflowContextTests.cs @@ -0,0 +1,504 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.DurableTask.Workflows; +using Microsoft.Agents.AI.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.UnitTests.Workflows; + +public sealed class DurableWorkflowContextTests +{ + private static FunctionExecutor CreateTestExecutor(string id = "test-executor") + => new(id, (_, _, _) => default, outputTypes: [typeof(string)]); + + #region ReadStateAsync + + [Fact] + public async Task ReadStateAsync_KeyExistsInInitialState_ReturnsValueAsync() + { + // Arrange + Dictionary state = new() { ["__default__:counter"] = "42" }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + + // Act + int? result = await context.ReadStateAsync("counter"); + + // Assert + Assert.Equal(42, result); + } + + [Fact] + public async Task ReadStateAsync_KeyDoesNotExist_ReturnsNullAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act + string? result = await context.ReadStateAsync("missing"); + + // Assert + Assert.Null(result); + } + + [Fact] + public async Task ReadStateAsync_LocalUpdateTakesPriorityOverInitialStateAsync() + { + // Arrange + Dictionary state = new() { ["__default__:key"] = "\"old\"" }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + await context.QueueStateUpdateAsync("key", "new"); + + // Act + string? result = await context.ReadStateAsync("key"); + + // Assert + Assert.Equal("new", result); + } + + [Fact] + public async Task ReadStateAsync_ScopeCleared_IgnoresInitialStateAsync() + { + // Arrange + Dictionary state = new() { ["__default__:key"] = "\"value\"" }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + await context.QueueClearScopeAsync(); + + // Act + string? result = await context.ReadStateAsync("key"); + + // Assert + Assert.Null(result); + } + + [Fact] + public async Task ReadStateAsync_WithNamedScope_ReadsFromCorrectScopeAsync() + { + // Arrange + Dictionary state = new() + { + ["scopeA:key"] = "\"fromA\"", + ["scopeB:key"] = "\"fromB\"" + }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + + // Act + string? resultA = await context.ReadStateAsync("key", "scopeA"); + string? resultB = await context.ReadStateAsync("key", "scopeB"); + + // Assert + Assert.Equal("fromA", resultA); + Assert.Equal("fromB", resultB); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + public async Task ReadStateAsync_NullOrEmptyKey_ThrowsArgumentExceptionAsync(string? key) + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act & Assert + await Assert.ThrowsAnyAsync(() => context.ReadStateAsync(key!).AsTask()); + } + + #endregion + + #region ReadOrInitStateAsync + + [Fact] + public async Task ReadOrInitStateAsync_KeyDoesNotExist_CallsFactoryAndQueuesUpdateAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act + string result = await context.ReadOrInitStateAsync("key", () => "initialized"); + + // Assert + Assert.Equal("initialized", result); + Assert.True(context.StateUpdates.ContainsKey("__default__:key")); + } + + [Fact] + public async Task ReadOrInitStateAsync_KeyExists_ReturnsExistingValueAsync() + { + // Arrange + Dictionary state = new() { ["__default__:key"] = "\"existing\"" }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + bool factoryCalled = false; + + // Act + string result = await context.ReadOrInitStateAsync("key", () => + { + factoryCalled = true; + return "should-not-be-used"; + }); + + // Assert + Assert.Equal("existing", result); + Assert.False(factoryCalled); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + public async Task ReadOrInitStateAsync_NullOrEmptyKey_ThrowsArgumentExceptionAsync(string? key) + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act & Assert + await Assert.ThrowsAnyAsync( + () => context.ReadOrInitStateAsync(key!, () => "value").AsTask()); + } + + [Fact] + public async Task ReadOrInitStateAsync_ValueType_MissingKey_CallsFactoryAsync() + { + // Arrange + // Validates that ReadStateAsync returns null (not 0) for missing keys, + // because the return type is int? (Nullable). This ensures the factory + // is correctly invoked for value types when the key does not exist. + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act + int result = await context.ReadOrInitStateAsync("counter", () => 42); + + // Assert + Assert.Equal(42, result); + Assert.True(context.StateUpdates.ContainsKey("__default__:counter")); + } + + [Fact] + public async Task ReadOrInitStateAsync_NullFactory_ThrowsArgumentNullExceptionAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act & Assert + await Assert.ThrowsAsync( + () => context.ReadOrInitStateAsync("key", null!).AsTask()); + } + + #endregion + + #region QueueStateUpdateAsync + + [Fact] + public async Task QueueStateUpdateAsync_SetsValue_VisibleToSubsequentReadAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act + await context.QueueStateUpdateAsync("key", "hello"); + string? result = await context.ReadStateAsync("key"); + + // Assert + Assert.Equal("hello", result); + } + + [Fact] + public async Task QueueStateUpdateAsync_NullValue_RecordsDeletionAsync() + { + // Arrange + Dictionary state = new() { ["__default__:key"] = "\"value\"" }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + + // Act + await context.QueueStateUpdateAsync("key", null); + + // Assert + Assert.True(context.StateUpdates.ContainsKey("__default__:key")); + Assert.Null(context.StateUpdates["__default__:key"]); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + public async Task QueueStateUpdateAsync_NullOrEmptyKey_ThrowsArgumentExceptionAsync(string? key) + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act & Assert + await Assert.ThrowsAnyAsync( + () => context.QueueStateUpdateAsync(key!, "value").AsTask()); + } + + #endregion + + #region QueueClearScopeAsync + + [Fact] + public async Task QueueClearScopeAsync_DefaultScope_ClearsStateAndPendingUpdatesAsync() + { + // Arrange + Dictionary state = new() { ["__default__:key"] = "\"value\"" }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + await context.QueueStateUpdateAsync("pending", "data"); + + // Act + await context.QueueClearScopeAsync(); + + // Assert + Assert.Contains("__default__", context.ClearedScopes); + Assert.Empty(context.StateUpdates); + } + + [Fact] + public async Task QueueClearScopeAsync_NamedScope_OnlyClearsThatScopeAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + await context.QueueStateUpdateAsync("keyA", "valueA", scopeName: "scopeA"); + await context.QueueStateUpdateAsync("keyB", "valueB", scopeName: "scopeB"); + + // Act + await context.QueueClearScopeAsync("scopeA"); + + // Assert + Assert.DoesNotContain("scopeA:keyA", context.StateUpdates.Keys); + Assert.Contains("scopeB:keyB", context.StateUpdates.Keys); + } + + #endregion + + #region ReadStateKeysAsync + + [Fact] + public async Task ReadStateKeysAsync_ReturnsKeysFromInitialStateAsync() + { + // Arrange + Dictionary state = new() + { + ["__default__:alpha"] = "\"a\"", + ["__default__:beta"] = "\"b\"" + }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + + // Act + HashSet keys = await context.ReadStateKeysAsync(); + + // Assert + Assert.Equal(2, keys.Count); + Assert.Contains("alpha", keys); + Assert.Contains("beta", keys); + } + + [Fact] + public async Task ReadStateKeysAsync_MergesLocalUpdatesAndDeletionsAsync() + { + // Arrange + Dictionary state = new() + { + ["__default__:existing"] = "\"val\"", + ["__default__:toDelete"] = "\"val\"" + }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + await context.QueueStateUpdateAsync("newKey", "value"); + await context.QueueStateUpdateAsync("toDelete", null); + + // Act + HashSet keys = await context.ReadStateKeysAsync(); + + // Assert + Assert.Contains("existing", keys); + Assert.Contains("newKey", keys); + Assert.DoesNotContain("toDelete", keys); + } + + [Fact] + public async Task ReadStateKeysAsync_AfterClearScope_ExcludesInitialStateAsync() + { + // Arrange + Dictionary state = new() { ["__default__:old"] = "\"val\"" }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + await context.QueueClearScopeAsync(); + await context.QueueStateUpdateAsync("new", "value"); + + // Act + HashSet keys = await context.ReadStateKeysAsync(); + + // Assert + Assert.DoesNotContain("old", keys); + Assert.Contains("new", keys); + } + + [Fact] + public async Task ReadStateKeysAsync_WithNamedScope_OnlyReturnsKeysFromThatScopeAsync() + { + // Arrange + Dictionary state = new() + { + ["scopeA:key1"] = "\"val\"", + ["scopeB:key2"] = "\"val\"" + }; + DurableWorkflowContext context = new(state, CreateTestExecutor()); + + // Act + HashSet keysA = await context.ReadStateKeysAsync("scopeA"); + + // Assert + Assert.Single(keysA); + Assert.Contains("key1", keysA); + } + + #endregion + + #region AddEventAsync + + [Fact] + public async Task AddEventAsync_AddsEventToCollectionAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + WorkflowEvent evt = new ExecutorInvokedEvent("test", "test-data"); + + // Act + await context.AddEventAsync(evt); + + // Assert + Assert.Single(context.OutboundEvents); + Assert.Same(evt, context.OutboundEvents[0]); + } + + [Fact] + public async Task AddEventAsync_NullEvent_DoesNotAddAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act +#pragma warning disable CS8625 // Cannot convert null literal to non-nullable reference type. + await context.AddEventAsync(null); +#pragma warning restore CS8625 + + // Assert + Assert.Empty(context.OutboundEvents); + } + + #endregion + + #region SendMessageAsync + + [Fact] + public async Task SendMessageAsync_SerializesMessageWithTypeNameAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act + await context.SendMessageAsync("hello"); + + // Assert + Assert.Single(context.SentMessages); + Assert.Equal(typeof(string).AssemblyQualifiedName, context.SentMessages[0].TypeName); + Assert.NotNull(context.SentMessages[0].Data); + } + + [Fact] + public async Task SendMessageAsync_NullMessage_DoesNotAddAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act +#pragma warning disable CS8625 // Cannot convert null literal to non-nullable reference type. + await context.SendMessageAsync(null); +#pragma warning restore CS8625 + + // Assert + Assert.Empty(context.SentMessages); + } + + #endregion + + #region YieldOutputAsync + + [Fact] + public async Task YieldOutputAsync_AddsWorkflowOutputEventAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act + await context.YieldOutputAsync("result"); + + // Assert + Assert.Single(context.OutboundEvents); + WorkflowOutputEvent outputEvent = Assert.IsType(context.OutboundEvents[0]); + Assert.Equal("result", outputEvent.Data); + } + + [Fact] + public async Task YieldOutputAsync_NullOutput_DoesNotAddAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act +#pragma warning disable CS8625 // Cannot convert null literal to non-nullable reference type. + await context.YieldOutputAsync(null); +#pragma warning restore CS8625 + + // Assert + Assert.Empty(context.OutboundEvents); + } + + #endregion + + #region RequestHaltAsync + + [Fact] + public async Task RequestHaltAsync_SetsHaltRequestedAndAddsEventAsync() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Act + await context.RequestHaltAsync(); + + // Assert + Assert.True(context.HaltRequested); + Assert.Single(context.OutboundEvents); + Assert.IsType(context.OutboundEvents[0]); + } + + #endregion + + #region Properties + + [Fact] + public void TraceContext_ReturnsNull() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Assert + Assert.Null(context.TraceContext); + } + + [Fact] + public void ConcurrentRunsEnabled_ReturnsFalse() + { + // Arrange + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Assert + Assert.False(context.ConcurrentRunsEnabled); + } + + [Fact] + public async Task Constructor_NullInitialState_CreatesEmptyStateAsync() + { + // Arrange & Act + DurableWorkflowContext context = new(null, CreateTestExecutor()); + + // Assert + string? result = await context.ReadStateAsync("anything"); + Assert.Null(result); + } + + #endregion +} diff --git a/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/WorkflowNamingHelperTests.cs b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/WorkflowNamingHelperTests.cs new file mode 100644 index 0000000000..780cf1275d --- /dev/null +++ b/dotnet/tests/Microsoft.Agents.AI.DurableTask.UnitTests/Workflows/WorkflowNamingHelperTests.cs @@ -0,0 +1,90 @@ +// Copyright (c) Microsoft. All rights reserved. + +using Microsoft.Agents.AI.DurableTask.Workflows; + +namespace Microsoft.Agents.AI.DurableTask.UnitTests.Workflows; + +public sealed class WorkflowNamingHelperTests +{ + [Fact] + public void ToOrchestrationFunctionName_ValidWorkflowName_ReturnsPrefixedName() + { + string result = WorkflowNamingHelper.ToOrchestrationFunctionName("MyWorkflow"); + + Assert.Equal("dafx-MyWorkflow", result); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + public void ToOrchestrationFunctionName_NullOrEmpty_ThrowsArgumentException(string? workflowName) + { + Assert.ThrowsAny(() => WorkflowNamingHelper.ToOrchestrationFunctionName(workflowName!)); + } + + [Fact] + public void ToWorkflowName_ValidOrchestrationFunctionName_ReturnsWorkflowName() + { + string result = WorkflowNamingHelper.ToWorkflowName("dafx-MyWorkflow"); + + Assert.Equal("MyWorkflow", result); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + public void ToWorkflowName_NullOrEmpty_ThrowsArgumentException(string? orchestrationFunctionName) + { + Assert.ThrowsAny(() => WorkflowNamingHelper.ToWorkflowName(orchestrationFunctionName!)); + } + + [Theory] + [InlineData("MyWorkflow")] + [InlineData("invalid-prefix-MyWorkflow")] + [InlineData("dafx")] + [InlineData("dafx-")] + public void ToWorkflowName_InvalidOrMissingPrefix_ThrowsArgumentException(string orchestrationFunctionName) + { + Assert.Throws(() => WorkflowNamingHelper.ToWorkflowName(orchestrationFunctionName)); + } + + [Fact] + public void GetExecutorName_SimpleExecutorId_ReturnsSameName() + { + string result = WorkflowNamingHelper.GetExecutorName("OrderParser"); + + Assert.Equal("OrderParser", result); + } + + [Fact] + public void GetExecutorName_ExecutorIdWithGuidSuffix_ReturnsNameWithoutSuffix() + { + string result = WorkflowNamingHelper.GetExecutorName("Physicist_8884e71021334ce49517fa2b17b1695b"); + + Assert.Equal("Physicist", result); + } + + [Fact] + public void GetExecutorName_NameWithUnderscoresAndGuidSuffix_ReturnsFullName() + { + string result = WorkflowNamingHelper.GetExecutorName("my_agent_8884e71021334ce49517fa2b17b1695b"); + + Assert.Equal("my_agent", result); + } + + [Fact] + public void GetExecutorName_NameWithUnderscoreButNoGuidSuffix_ReturnsSameName() + { + string result = WorkflowNamingHelper.GetExecutorName("my_custom_executor"); + + Assert.Equal("my_custom_executor", result); + } + + [Theory] + [InlineData(null)] + [InlineData("")] + public void GetExecutorName_NullOrEmpty_ThrowsArgumentException(string? executorId) + { + Assert.ThrowsAny(() => WorkflowNamingHelper.GetExecutorName(executorId!)); + } +} diff --git a/dotnet/tests/Microsoft.Agents.AI.Hosting.AzureFunctions.IntegrationTests/WorkflowSamplesValidation.cs b/dotnet/tests/Microsoft.Agents.AI.Hosting.AzureFunctions.IntegrationTests/WorkflowSamplesValidation.cs new file mode 100644 index 0000000000..dd6e6548b6 --- /dev/null +++ b/dotnet/tests/Microsoft.Agents.AI.Hosting.AzureFunctions.IntegrationTests/WorkflowSamplesValidation.cs @@ -0,0 +1,584 @@ +// Copyright (c) Microsoft. All rights reserved. + +using System.Diagnostics; +using System.Reflection; +using System.Text; +using Microsoft.Extensions.Configuration; +using Microsoft.Extensions.Logging; +using Xunit.Abstractions; + +namespace Microsoft.Agents.AI.Hosting.AzureFunctions.IntegrationTests; + +/// +/// Integration tests for validating the durable workflow Azure Functions samples +/// located in samples/04-hosting/DurableWorkflows/AzureFunctions. +/// +[Collection("Samples")] +[Trait("Category", "SampleValidation")] +public sealed class WorkflowSamplesValidation(ITestOutputHelper outputHelper) : IAsyncLifetime +{ + private const string AzureFunctionsPort = "7071"; + private const string AzuritePort = "10000"; + private const string DtsPort = "8080"; + + private static readonly string s_dotnetTargetFramework = GetTargetFramework(); + private static readonly HttpClient s_sharedHttpClient = new(); + private static readonly IConfiguration s_configuration = + new ConfigurationBuilder() + .AddUserSecrets(Assembly.GetExecutingAssembly()) + .AddEnvironmentVariables() + .Build(); + + private static bool s_infrastructureStarted; + private static readonly TimeSpan s_orchestrationTimeout = TimeSpan.FromMinutes(1); + private static readonly string s_samplesPath = Path.GetFullPath( + Path.Combine(AppDomain.CurrentDomain.BaseDirectory, "..", "..", "..", "..", "..", "samples", "04-hosting", "DurableWorkflows", "AzureFunctions")); + + private readonly ITestOutputHelper _outputHelper = outputHelper; + + async Task IAsyncLifetime.InitializeAsync() + { + if (!s_infrastructureStarted) + { + await this.StartSharedInfrastructureAsync(); + s_infrastructureStarted = true; + } + } + + async Task IAsyncLifetime.DisposeAsync() + { + await Task.CompletedTask; + } + + [Fact] + public async Task SequentialWorkflowSampleValidationAsync() + { + string samplePath = Path.Combine(s_samplesPath, "01_SequentialWorkflow"); + await this.RunSampleTestAsync(samplePath, requiresOpenAI: false, async (logs) => + { + // Test the CancelOrder workflow + Uri cancelOrderUri = new($"http://localhost:{AzureFunctionsPort}/api/workflows/CancelOrder/run"); + this._outputHelper.WriteLine($"Starting CancelOrder workflow via POST request to {cancelOrderUri}..."); + + using HttpContent cancelContent = new StringContent("12345", Encoding.UTF8, "text/plain"); + using HttpResponseMessage cancelResponse = await s_sharedHttpClient.PostAsync(cancelOrderUri, cancelContent); + + Assert.True(cancelResponse.IsSuccessStatusCode, $"CancelOrder request failed with status: {cancelResponse.StatusCode}"); + string cancelResponseText = await cancelResponse.Content.ReadAsStringAsync(); + Assert.Contains("CancelOrder", cancelResponseText); + this._outputHelper.WriteLine($"CancelOrder response: {cancelResponseText}"); + + // Wait for the CancelOrder workflow to complete by checking logs + await this.WaitForConditionAsync( + condition: () => + { + lock (logs) + { + bool exists = logs.Any(log => log.Message.Contains("Workflow completed")); + return Task.FromResult(exists); + } + }, + message: "CancelOrder workflow completed", + timeout: s_orchestrationTimeout); + + // Verify the executor activities ran in sequence + lock (logs) + { + Assert.True(logs.Any(log => log.Message.Contains("[Activity] OrderLookup:")), "OrderLookup activity not found in logs."); + Assert.True(logs.Any(log => log.Message.Contains("[Activity] OrderCancel:")), "OrderCancel activity not found in logs."); + Assert.True(logs.Any(log => log.Message.Contains("[Activity] SendEmail:")), "SendEmail activity not found in logs."); + } + + // Test the OrderStatus workflow (shares OrderLookup executor with CancelOrder) + Uri orderStatusUri = new($"http://localhost:{AzureFunctionsPort}/api/workflows/OrderStatus/run"); + this._outputHelper.WriteLine($"Starting OrderStatus workflow via POST request to {orderStatusUri}..."); + + using HttpContent statusContent = new StringContent("67890", Encoding.UTF8, "text/plain"); + using HttpResponseMessage statusResponse = await s_sharedHttpClient.PostAsync(orderStatusUri, statusContent); + + Assert.True(statusResponse.IsSuccessStatusCode, $"OrderStatus request failed with status: {statusResponse.StatusCode}"); + string statusResponseText = await statusResponse.Content.ReadAsStringAsync(); + Assert.Contains("OrderStatus", statusResponseText); + this._outputHelper.WriteLine($"OrderStatus response: {statusResponseText}"); + + // Wait for the OrderStatus workflow to complete + await this.WaitForConditionAsync( + condition: () => + { + lock (logs) + { + // Look for StatusReport activity which is unique to OrderStatus workflow + bool exists = logs.Any(log => log.Message.Contains("[Activity] StatusReport:")); + return Task.FromResult(exists); + } + }, + message: "OrderStatus workflow completed", + timeout: s_orchestrationTimeout); + }); + } + + [Fact] + public async Task HITLWorkflowSampleValidationAsync() + { + string samplePath = Path.Combine(s_samplesPath, "03_WorkflowHITL"); + await this.RunSampleTestAsync(samplePath, requiresOpenAI: false, async (logs) => + { + // Use a unique run ID to avoid conflicts with previous test runs + string runId = $"hitl-test-{Guid.NewGuid():N}"; + + // Step 1: Start the expense reimbursement workflow + Uri runUri = new($"http://localhost:{AzureFunctionsPort}/api/workflows/ExpenseReimbursement/run?runId={runId}"); + this._outputHelper.WriteLine($"Starting ExpenseReimbursement workflow via POST request to {runUri}..."); + + using HttpContent runContent = new StringContent("EXP-2025-001", Encoding.UTF8, "text/plain"); + using HttpResponseMessage runResponse = await s_sharedHttpClient.PostAsync(runUri, runContent); + + Assert.True(runResponse.IsSuccessStatusCode, $"Run request failed with status: {runResponse.StatusCode}"); + string runResponseText = await runResponse.Content.ReadAsStringAsync(); + Assert.Contains("ExpenseReimbursement", runResponseText); + this._outputHelper.WriteLine($"Run response: {runResponseText}"); + + // Step 2: Wait for the workflow to pause at the ManagerApproval RequestPort + await this.WaitForConditionAsync( + condition: () => + { + lock (logs) + { + bool exists = logs.Any(log => log.Message.Contains("Workflow waiting for external input at RequestPort 'ManagerApproval'")); + return Task.FromResult(exists); + } + }, + message: "Workflow paused at ManagerApproval RequestPort", + timeout: s_orchestrationTimeout); + + // Step 3: Send approval response to resume the workflow + Uri respondUri = new($"http://localhost:{AzureFunctionsPort}/api/workflows/ExpenseReimbursement/respond/{runId}"); + this._outputHelper.WriteLine($"Sending approval response via POST request to {respondUri}..."); + + using HttpContent respondContent = new StringContent( + """{"eventName": "ManagerApproval", "response": {"Approved": true, "Comments": "Approved by test."}}""", + Encoding.UTF8, "application/json"); + using HttpResponseMessage respondResponse = await s_sharedHttpClient.PostAsync(respondUri, respondContent); + + Assert.True(respondResponse.IsSuccessStatusCode, $"Respond request failed with status: {respondResponse.StatusCode}"); + string respondResponseText = await respondResponse.Content.ReadAsStringAsync(); + Assert.Contains("Response sent to workflow", respondResponseText); + this._outputHelper.WriteLine($"Respond response: {respondResponseText}"); + + // Step 4: Wait for the workflow to pause at the parallel BudgetApproval and ComplianceApproval RequestPorts + await this.WaitForConditionAsync( + condition: () => + { + lock (logs) + { + bool exists = logs.Any(log => log.Message.Contains("Workflow waiting for external input at RequestPort 'BudgetApproval'")); + return Task.FromResult(exists); + } + }, + message: "Workflow paused at BudgetApproval RequestPort", + timeout: s_orchestrationTimeout); + + // Step 5a: Send budget approval response + this._outputHelper.WriteLine("Sending BudgetApproval response..."); + + using HttpContent budgetContent = new StringContent( + """{"eventName": "BudgetApproval", "response": {"Approved": true, "Comments": "Budget approved by test."}}""", + Encoding.UTF8, "application/json"); + using HttpResponseMessage budgetResponse = await s_sharedHttpClient.PostAsync(respondUri, budgetContent); + + Assert.True(budgetResponse.IsSuccessStatusCode, $"BudgetApproval request failed with status: {budgetResponse.StatusCode}"); + this._outputHelper.WriteLine($"BudgetApproval response: {await budgetResponse.Content.ReadAsStringAsync()}"); + + // Step 5b: Send compliance approval response + this._outputHelper.WriteLine("Sending ComplianceApproval response..."); + + using HttpContent complianceContent = new StringContent( + """{"eventName": "ComplianceApproval", "response": {"Approved": true, "Comments": "Compliance approved by test."}}""", + Encoding.UTF8, "application/json"); + using HttpResponseMessage complianceResponse = await s_sharedHttpClient.PostAsync(respondUri, complianceContent); + + Assert.True(complianceResponse.IsSuccessStatusCode, $"ComplianceApproval request failed with status: {complianceResponse.StatusCode}"); + this._outputHelper.WriteLine($"ComplianceApproval response: {await complianceResponse.Content.ReadAsStringAsync()}"); + + // Step 6: Wait for the workflow to complete + await this.WaitForConditionAsync( + condition: () => + { + lock (logs) + { + bool exists = logs.Any(log => log.Message.Contains("Workflow completed")); + return Task.FromResult(exists); + } + }, + message: "HITL workflow completed", + timeout: s_orchestrationTimeout); + + // Verify executor activities ran + lock (logs) + { + Assert.True(logs.Any(log => log.Message.Contains("Received external event for RequestPort 'ManagerApproval'")), + "ManagerApproval external event receipt not found in logs."); + Assert.True(logs.Any(log => log.Message.Contains("Received external event for RequestPort 'BudgetApproval'")), + "BudgetApproval external event receipt not found in logs."); + Assert.True(logs.Any(log => log.Message.Contains("Received external event for RequestPort 'ComplianceApproval'")), + "ComplianceApproval external event receipt not found in logs."); + } + }); + } + + [Fact] + public async Task ConcurrentWorkflowSampleValidationAsync() + { + string samplePath = Path.Combine(s_samplesPath, "02_ConcurrentWorkflow"); + await this.RunSampleTestAsync(samplePath, requiresOpenAI: true, async (logs) => + { + // Start the ExpertReview workflow with a science question + const string RequestBody = "What is temperature?"; + using HttpContent content = new StringContent(RequestBody, Encoding.UTF8, "text/plain"); + + Uri startUri = new($"http://localhost:{AzureFunctionsPort}/api/workflows/ExpertReview/run"); + this._outputHelper.WriteLine($"Starting ExpertReview workflow via POST request to {startUri}..."); + using HttpResponseMessage startResponse = await s_sharedHttpClient.PostAsync(startUri, content); + + Assert.True(startResponse.IsSuccessStatusCode, $"ExpertReview request failed with status: {startResponse.StatusCode}"); + string startResponseText = await startResponse.Content.ReadAsStringAsync(); + Assert.Contains("ExpertReview", startResponseText); + this._outputHelper.WriteLine($"ExpertReview response: {startResponseText}"); + + // Wait for the ParseQuestion executor to run + await this.WaitForConditionAsync( + condition: () => + { + lock (logs) + { + bool exists = logs.Any(log => log.Message.Contains("[ParseQuestion]")); + return Task.FromResult(exists); + } + }, + message: "ParseQuestion executor ran", + timeout: s_orchestrationTimeout); + + // Wait for the Aggregator to complete (indicates fan-in from parallel agents) + await this.WaitForConditionAsync( + condition: () => + { + lock (logs) + { + bool exists = logs.Any(log => log.Message.Contains("Aggregation complete")); + return Task.FromResult(exists); + } + }, + message: "Aggregator completed with parallel agent responses", + timeout: s_orchestrationTimeout); + + // Verify the aggregator received responses from both AI agents + lock (logs) + { + Assert.True( + logs.Any(log => log.Message.Contains("AI agent responses")), + "Aggregator did not log receiving AI agent responses."); + } + }); + } + + private async Task StartSharedInfrastructureAsync() + { + // Start Azurite if it's not already running + if (!await this.IsAzuriteRunningAsync()) + { + await this.StartDockerContainerAsync( + containerName: "azurite", + image: "mcr.microsoft.com/azure-storage/azurite", + ports: ["-p", "10000:10000", "-p", "10001:10001", "-p", "10002:10002"]); + + await this.WaitForConditionAsync(this.IsAzuriteRunningAsync, "Azurite is running", TimeSpan.FromSeconds(30)); + } + + // Start DTS emulator if it's not already running + if (!await this.IsDtsEmulatorRunningAsync()) + { + await this.StartDockerContainerAsync( + containerName: "dts-emulator", + image: "mcr.microsoft.com/dts/dts-emulator:latest", + ports: ["-p", "8080:8080", "-p", "8082:8082"]); + + await this.WaitForConditionAsync( + condition: this.IsDtsEmulatorRunningAsync, + message: "DTS emulator is running", + timeout: TimeSpan.FromSeconds(30)); + } + } + + private async Task IsAzuriteRunningAsync() + { + this._outputHelper.WriteLine( + $"Checking if Azurite is running at http://localhost:{AzuritePort}/devstoreaccount1..."); + + try + { + using CancellationTokenSource timeoutCts = new(TimeSpan.FromSeconds(30)); + using HttpResponseMessage response = await s_sharedHttpClient.GetAsync( + requestUri: new Uri($"http://localhost:{AzuritePort}/devstoreaccount1?comp=list"), + cancellationToken: timeoutCts.Token); + if (response.Headers.TryGetValues( + "Server", + out IEnumerable? serverValues) && serverValues.Any(s => s.StartsWith("Azurite", StringComparison.OrdinalIgnoreCase))) + { + this._outputHelper.WriteLine($"Azurite is running, server: {string.Join(", ", serverValues)}"); + return true; + } + + this._outputHelper.WriteLine($"Azurite is not running. Status code: {response.StatusCode}"); + return false; + } + catch (HttpRequestException ex) + { + this._outputHelper.WriteLine($"Azurite is not running: {ex.Message}"); + return false; + } + } + + private async Task IsDtsEmulatorRunningAsync() + { + this._outputHelper.WriteLine($"Checking if DTS emulator is running at http://localhost:{DtsPort}/healthz..."); + + using HttpClient http2Client = new() + { + DefaultRequestVersion = new Version(2, 0), + DefaultVersionPolicy = HttpVersionPolicy.RequestVersionExact + }; + + try + { + using CancellationTokenSource timeoutCts = new(TimeSpan.FromSeconds(30)); + using HttpResponseMessage response = await http2Client.GetAsync(new Uri($"http://localhost:{DtsPort}/healthz"), timeoutCts.Token); + if (response.Content.Headers.ContentLength > 0) + { + string content = await response.Content.ReadAsStringAsync(timeoutCts.Token); + this._outputHelper.WriteLine($"DTS emulator health check response: {content}"); + } + + if (response.IsSuccessStatusCode) + { + this._outputHelper.WriteLine("DTS emulator is running"); + return true; + } + + this._outputHelper.WriteLine($"DTS emulator is not running. Status code: {response.StatusCode}"); + return false; + } + catch (HttpRequestException ex) + { + this._outputHelper.WriteLine($"DTS emulator is not running: {ex.Message}"); + return false; + } + } + + private async Task StartDockerContainerAsync(string containerName, string image, string[] ports) + { + await this.RunCommandAsync("docker", ["stop", containerName]); + await this.RunCommandAsync("docker", ["rm", containerName]); + + List args = ["run", "-d", "--name", containerName]; + args.AddRange(ports); + args.Add(image); + + this._outputHelper.WriteLine( + $"Starting new container: {containerName} with image: {image} and ports: {string.Join(", ", ports)}"); + await this.RunCommandAsync("docker", args.ToArray()); + this._outputHelper.WriteLine($"Container started: {containerName}"); + } + + private async Task WaitForConditionAsync(Func> condition, string message, TimeSpan timeout) + { + this._outputHelper.WriteLine($"Waiting for '{message}'..."); + + using CancellationTokenSource cancellationTokenSource = new(timeout); + while (true) + { + if (await condition()) + { + return; + } + + try + { + await Task.Delay(TimeSpan.FromSeconds(1), cancellationTokenSource.Token); + } + catch (OperationCanceledException) when (cancellationTokenSource.IsCancellationRequested) + { + throw new TimeoutException($"Timeout waiting for '{message}'"); + } + } + } + + private sealed record OutputLog(DateTime Timestamp, LogLevel Level, string Message); + + private async Task RunSampleTestAsync(string samplePath, bool requiresOpenAI, Func, Task> testAction) + { + List logsContainer = []; + using Process funcProcess = this.StartFunctionApp(samplePath, logsContainer, requiresOpenAI); + try + { + await this.WaitForAzureFunctionsAsync(); + await testAction(logsContainer); + } + finally + { + await this.StopProcessAsync(funcProcess); + } + } + + private Process StartFunctionApp(string samplePath, List logs, bool requiresOpenAI) + { + ProcessStartInfo startInfo = new() + { + FileName = "dotnet", + Arguments = $"run -f {s_dotnetTargetFramework} --port {AzureFunctionsPort}", + WorkingDirectory = samplePath, + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true, + }; + + if (requiresOpenAI) + { + string openAiEndpoint = s_configuration["AZURE_OPENAI_ENDPOINT"] ?? + throw new InvalidOperationException("The required AZURE_OPENAI_ENDPOINT env variable is not set."); + string openAiDeployment = s_configuration["AZURE_OPENAI_CHAT_DEPLOYMENT_NAME"] ?? + throw new InvalidOperationException("The required AZURE_OPENAI_CHAT_DEPLOYMENT_NAME env variable is not set."); + + this._outputHelper.WriteLine($"Using Azure OpenAI endpoint: {openAiEndpoint}, deployment: {openAiDeployment}"); + + startInfo.EnvironmentVariables["AZURE_OPENAI_ENDPOINT"] = openAiEndpoint; + startInfo.EnvironmentVariables["AZURE_OPENAI_DEPLOYMENT"] = openAiDeployment; + } + + startInfo.EnvironmentVariables["DURABLE_TASK_SCHEDULER_CONNECTION_STRING"] = + $"Endpoint=http://localhost:{DtsPort};TaskHub=default;Authentication=None"; + startInfo.EnvironmentVariables["AzureWebJobsStorage"] = "UseDevelopmentStorage=true"; + + Process process = new() { StartInfo = startInfo }; + + process.ErrorDataReceived += (sender, e) => + { + if (e.Data != null) + { + this._outputHelper.WriteLine($"[{startInfo.FileName}(err)]: {e.Data}"); + lock (logs) + { + logs.Add(new OutputLog(DateTime.Now, LogLevel.Error, e.Data)); + } + } + }; + + process.OutputDataReceived += (sender, e) => + { + if (e.Data != null) + { + this._outputHelper.WriteLine($"[{startInfo.FileName}(out)]: {e.Data}"); + lock (logs) + { + logs.Add(new OutputLog(DateTime.Now, LogLevel.Information, e.Data)); + } + } + }; + + if (!process.Start()) + { + throw new InvalidOperationException("Failed to start the function app"); + } + + process.BeginErrorReadLine(); + process.BeginOutputReadLine(); + + return process; + } + + private async Task WaitForAzureFunctionsAsync() + { + this._outputHelper.WriteLine( + $"Waiting for Azure Functions Core Tools to be ready at http://localhost:{AzureFunctionsPort}/..."); + await this.WaitForConditionAsync( + condition: async () => + { + try + { + using HttpRequestMessage request = new(HttpMethod.Head, $"http://localhost:{AzureFunctionsPort}/"); + using HttpResponseMessage response = await s_sharedHttpClient.SendAsync(request); + this._outputHelper.WriteLine($"Azure Functions Core Tools response: {response.StatusCode}"); + return response.IsSuccessStatusCode; + } + catch (HttpRequestException) + { + return false; + } + }, + message: "Azure Functions Core Tools is ready", + timeout: TimeSpan.FromSeconds(60)); + } + + private async Task RunCommandAsync(string command, string[] args) + { + ProcessStartInfo startInfo = new() + { + FileName = command, + Arguments = string.Join(" ", args), + UseShellExecute = false, + RedirectStandardOutput = true, + RedirectStandardError = true, + CreateNoWindow = true + }; + + this._outputHelper.WriteLine($"Running command: {command} {string.Join(" ", args)}"); + + using Process process = new() { StartInfo = startInfo }; + process.ErrorDataReceived += (sender, e) => this._outputHelper.WriteLine($"[{command}(err)]: {e.Data}"); + process.OutputDataReceived += (sender, e) => this._outputHelper.WriteLine($"[{command}(out)]: {e.Data}"); + if (!process.Start()) + { + throw new InvalidOperationException("Failed to start the command"); + } + + process.BeginErrorReadLine(); + process.BeginOutputReadLine(); + + using CancellationTokenSource cancellationTokenSource = new(TimeSpan.FromMinutes(1)); + await process.WaitForExitAsync(cancellationTokenSource.Token); + + this._outputHelper.WriteLine($"Command completed with exit code: {process.ExitCode}"); + } + + private async Task StopProcessAsync(Process process) + { + try + { + if (!process.HasExited) + { + this._outputHelper.WriteLine($"Killing process {process.ProcessName}#{process.Id}"); + process.Kill(entireProcessTree: true); + + using CancellationTokenSource timeoutCts = new(TimeSpan.FromSeconds(10)); + await process.WaitForExitAsync(timeoutCts.Token); + this._outputHelper.WriteLine($"Process exited: {process.Id}"); + } + } + catch (Exception ex) + { + this._outputHelper.WriteLine($"Failed to stop process: {ex.Message}"); + } + } + + private static string GetTargetFramework() + { + string filePath = new Uri(typeof(WorkflowSamplesValidation).Assembly.Location).LocalPath; + string directory = Path.GetDirectoryName(filePath)!; + string tfm = Path.GetFileName(directory); + if (tfm.StartsWith("net", StringComparison.OrdinalIgnoreCase)) + { + return tfm; + } + + throw new InvalidOperationException($"Unable to find target framework in path: {filePath}"); + } +}