Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
173 changes: 173 additions & 0 deletions examples/01_standalone_sdk/43_interrupt_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,173 @@
"""Example: Interrupting agent execution with Ctrl+C.

This example demonstrates how to use conversation.interrupt() to immediately
cancel an in-flight LLM call when the user presses Ctrl+C.

Unlike pause(), which waits for the current LLM call to complete,
interrupt() cancels the call immediately by:
- Cancelling the async task running the LLM call
- Closing the HTTP connection
- Raising LLMCancelledError

This is useful for:
- Long-running reasoning tasks that you want to stop immediately
- Expensive API calls you want to cancel to save costs
- Interactive applications where responsiveness is important

Usage:
LLM_API_KEY=your_key python 43_interrupt_example.py

Press Ctrl+C at any time to interrupt the agent.
"""

import os
import signal
import sys
import threading
import time

from openhands.sdk import LLM, Agent, Conversation, Tool
from openhands.tools.terminal import TerminalTool


PROMPT = """
I need you to solve this complex logic puzzle step by step, showing your reasoning:

There are 5 houses in a row, each a different color (Red, Green, Blue, Yellow, White).
Each house is occupied by a person of different nationality.
Each person has a different pet, drink, and cigarette brand.

Clues:
1. The British person lives in the red house.
2. The Swedish person keeps dogs as pets.
3. The Danish person drinks tea.
4. The green house is on the left of the white house.
5. The green house's owner drinks coffee.
6. The person who smokes Pall Mall rears birds.
7. The owner of the yellow house smokes Dunhill.
8. The person living in the center house drinks milk.
9. The Norwegian lives in the first house.
10. The person who smokes Blend lives next to the one who keeps cats.
11. The person who keeps horses lives next to the one who smokes Dunhill.
12. The person who smokes Blue Master drinks beer.
13. The German smokes Prince.
14. The Norwegian lives next to the blue house.
15. The person who smokes Blend has a neighbor who drinks water.

Question: Who owns the fish?

Please solve this completely, showing your full reasoning process with all deductions.
After solving, create a file called 'puzzle_solution.txt' with your complete solution.
"""


def main():
# Track timing
start_time: float | None = None
interrupt_time: float | None = None

# Configure LLM - use gpt-5.2 for long reasoning tasks
# Falls back to environment variable model if gpt-5.2 not available
api_key = os.getenv("LLM_API_KEY")
if not api_key:
print("Error: LLM_API_KEY environment variable is not set.")
sys.exit(1)

model = os.getenv("LLM_MODEL", "openai/gpt-5.2")
base_url = os.getenv("LLM_BASE_URL")

print("=" * 70)
print("Interrupt Example - Press Ctrl+C to immediately stop the agent")
print("=" * 70)
print()

llm = LLM(
usage_id="reasoning-agent",
model=model,
base_url=base_url,
api_key=api_key,
)

print(f"Using model: {model}")
print()

# Create agent with minimal tools
agent = Agent(
llm=llm,
tools=[Tool(name=TerminalTool.name)],
)

conversation = Conversation(agent=agent, workspace=os.getcwd())

# Set up Ctrl+C handler
def signal_handler(_signum, _frame):
nonlocal interrupt_time
interrupt_time = time.time()
print("\n")
print("=" * 70)
print("Ctrl+C detected! Interrupting agent...")
print("=" * 70)

# Call interrupt() - this immediately cancels any in-flight LLM call
conversation.interrupt()

signal.signal(signal.SIGINT, signal_handler)

# Send a task that requires long reasoning
print("Sending a complex reasoning task to the agent...")
print("(This task is designed to take a while - press Ctrl+C to interrupt)")
print()

conversation.send_message(PROMPT)
print(f"Agent status: {conversation.state.execution_status}")
print()

# Run in background thread so we can handle signals
def run_agent():
conversation.run()

start_time = time.time()
thread = threading.Thread(target=run_agent)
thread.start()

print("Agent is working... (press Ctrl+C to interrupt)")
print()

# Wait for thread to complete (either normally or via interrupt)
thread.join()

end_time = time.time()

# Report timing
print()
print("=" * 70)
print("Results")
print("=" * 70)
print()
print(f"Final status: {conversation.state.execution_status}")
print()

if interrupt_time:
interrupt_latency = end_time - interrupt_time
total_time = end_time - start_time
print(f"Total time from start to stop: {total_time:.2f} seconds")
print(f"Time from Ctrl+C to full stop: {interrupt_latency:.3f} seconds")
print()
print("The agent was interrupted immediately!")
print("Without interrupt(), you would have had to wait for the full")
print("LLM response to complete before the agent would stop.")
else:
total_time = end_time - start_time
print(f"Total time: {total_time:.2f} seconds")
print("Agent completed normally (was not interrupted)")

print()

# Report cost
cost = llm.metrics.accumulated_cost
print(f"Accumulated cost: ${cost:.6f}")
print(f"EXAMPLE_COST: {cost}")


if __name__ == "__main__":
main()
15 changes: 15 additions & 0 deletions openhands-sdk/openhands/sdk/conversation/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -193,6 +193,21 @@ def reject_pending_actions(
@abstractmethod
def pause(self) -> None: ...

@abstractmethod
def interrupt(self) -> None:
"""Interrupt the agent immediately, cancelling any in-flight LLM calls.

Unlike pause(), which waits for the current step to complete,
interrupt() attempts to cancel ongoing LLM calls immediately:

- Streaming calls: Cancelled at the next chunk boundary (immediate)
- Non-streaming calls: The async task is cancelled, closing the HTTP connection

This method is thread-safe and can be called from any thread.
After interruption, the conversation status is set to PAUSED.
"""
...

@abstractmethod
def update_secrets(self, secrets: Mapping[str, SecretValue]) -> None: ...

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
from openhands.sdk.hooks import HookConfig, HookEventProcessor, create_hook_callback
from openhands.sdk.io import LocalFileStore
from openhands.sdk.llm import LLM, Message, TextContent
from openhands.sdk.llm.exceptions import LLMCancelledError
from openhands.sdk.llm.llm_profile_store import LLMProfileStore
from openhands.sdk.llm.llm_registry import LLMRegistry
from openhands.sdk.logger import get_logger
Expand Down Expand Up @@ -680,6 +681,10 @@ def run(self) -> None:
)
)
break
except LLMCancelledError:
# LLM call was cancelled via interrupt() - this is not an error
# Status is already set to PAUSED by interrupt()
logger.info("Agent step cancelled by interrupt")
except Exception as e:
self._state.execution_status = ConversationExecutionStatus.ERROR

Expand Down Expand Up @@ -759,6 +764,36 @@ def pause(self) -> None:
self._on_event(pause_event)
logger.info("Agent execution pause requested")

def interrupt(self) -> None:
"""Interrupt the agent immediately, cancelling any in-flight LLM calls.

Unlike pause(), which waits for the current step to complete,
interrupt() cancels ongoing LLM calls immediately:

- Streaming calls: Cancelled at the next chunk boundary (immediate)
- Non-streaming calls: The async task is cancelled, closing the HTTP connection

This method is thread-safe and can be called from any thread.
After interruption, the conversation status is set to PAUSED.
"""
from openhands.sdk.event.user_action import InterruptEvent

# Cancel all LLMs first (main agent LLM + any in registry)
self.agent.llm.cancel()
for llm in self.llm_registry.usage_to_llm.values():
llm.cancel()

# Set paused status
with self._state:
if self._state.execution_status in [
ConversationExecutionStatus.IDLE,
ConversationExecutionStatus.RUNNING,
]:
self._state.execution_status = ConversationExecutionStatus.PAUSED
interrupt_event = InterruptEvent()
self._on_event(interrupt_event)
logger.info("Agent execution interrupted")

def update_secrets(self, secrets: Mapping[str, SecretValue]) -> None:
"""Add secrets to the conversation's secret registry.

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -1119,6 +1119,14 @@ def reject_pending_actions(self, reason: str = "User rejected the action") -> No
def pause(self) -> None:
_send_request(self._client, "POST", f"/api/conversations/{self._id}/pause")

def interrupt(self) -> None:
"""Interrupt the conversation immediately, cancelling any in-flight LLM calls.
For remote conversations, this sends an interrupt request to the server.
The server will cancel the current operation and set the status to paused.
"""
_send_request(self._client, "POST", f"/api/conversations/{self._id}/interrupt")

def update_secrets(self, secrets: Mapping[str, SecretValue]) -> None:
# Convert SecretValue to strings for JSON serialization
# SecretValue can be str or callable, we need to handle both
Expand Down
3 changes: 2 additions & 1 deletion openhands-sdk/openhands/sdk/event/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,7 +19,7 @@
)
from openhands.sdk.event.token import TokenEvent
from openhands.sdk.event.types import EventID, ToolCallID
from openhands.sdk.event.user_action import PauseEvent
from openhands.sdk.event.user_action import InterruptEvent, PauseEvent


__all__ = [
Expand All @@ -36,6 +36,7 @@
"UserRejectObservation",
"RejectionSource",
"PauseEvent",
"InterruptEvent",
"Condensation",
"CondensationRequest",
"CondensationSummaryEvent",
Expand Down
25 changes: 25 additions & 0 deletions openhands-sdk/openhands/sdk/event/user_action.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,28 @@ def visualize(self) -> Text:
def __str__(self) -> str:
"""Plain text string representation for PauseEvent."""
return f"{self.__class__.__name__} ({self.source}): Agent execution paused"


class InterruptEvent(Event):
"""Event indicating that the agent execution was interrupted.

Unlike PauseEvent, InterruptEvent indicates that an in-flight LLM call
was cancelled. This provides immediate interruption rather than waiting
for the current step to complete.
"""

source: SourceType = "user"
reason: str = "User requested interrupt"

@property
def visualize(self) -> Text:
"""Return Rich Text representation of this interrupt event."""
content = Text()
content.append("Conversation Interrupted", style="bold red")
if self.reason != "User requested interrupt":
content.append(f" - {self.reason}", style="dim")
return content

def __str__(self) -> str:
"""Plain text string representation for InterruptEvent."""
return f"{self.__class__.__name__} ({self.source}): {self.reason}"
2 changes: 2 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,7 @@
FunctionCallValidationError,
LLMAuthenticationError,
LLMBadRequestError,
LLMCancelledError,
LLMContextWindowExceedError,
LLMContextWindowTooSmallError,
LLMError,
Expand Down Expand Up @@ -38,6 +39,7 @@
"LLMTimeoutError",
"LLMServiceUnavailableError",
"LLMBadRequestError",
"LLMCancelledError",
"UserCancelledError",
"OperationCancelled",
# Helpers
Expand Down
14 changes: 14 additions & 0 deletions openhands-sdk/openhands/sdk/llm/exceptions/types.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,3 +128,17 @@ def __init__(self, message: str = "User cancelled the request") -> None:
class OperationCancelled(Exception):
def __init__(self, message: str = "Operation was cancelled") -> None:
super().__init__(message)


class LLMCancelledError(Exception):
"""Raised when an LLM call is cancelled by user interrupt.

This exception is raised when `LLM.cancel()` is called during an in-flight
LLM request. For streaming calls, cancellation happens immediately at the
next chunk boundary. For non-streaming calls, cancellation stops the async
task and closes the HTTP connection.
"""

def __init__(self, message: str = "LLM call was cancelled") -> None:
super().__init__(message)
self.message = message
Loading
Loading