Skip to content

Commit 3978c6e

Browse files
authored
StreamableHttp -- resumability support for servers (#587)
1 parent 9dfc925 commit 3978c6e

File tree

5 files changed

+340
-55
lines changed

5 files changed

+340
-55
lines changed

examples/servers/simple-streamablehttp/README.md

Lines changed: 19 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,7 @@ A simple MCP server example demonstrating the StreamableHttp transport, which en
99
- Task management with anyio task groups
1010
- Ability to send multiple notifications over time to the client
1111
- Proper resource cleanup and lifespan management
12+
- Resumability support via InMemoryEventStore
1213

1314
## Usage
1415

@@ -32,6 +33,23 @@ The server exposes a tool named "start-notification-stream" that accepts three a
3233
- `count`: Number of notifications to send (e.g., 5)
3334
- `caller`: Identifier string for the caller
3435

36+
## Resumability Support
37+
38+
This server includes resumability support through the InMemoryEventStore. This enables clients to:
39+
40+
- Reconnect to the server after a disconnection
41+
- Resume event streaming from where they left off using the Last-Event-ID header
42+
43+
44+
The server will:
45+
- Generate unique event IDs for each SSE message
46+
- Store events in memory for later replay
47+
- Replay missed events when a client reconnects with a Last-Event-ID header
48+
49+
Note: The InMemoryEventStore is designed for demonstration purposes only. For production use, consider implementing a persistent storage solution.
50+
51+
52+
3553
## Client
3654

37-
You can connect to this server using an HTTP client, for now only Typescript SDK has streamable HTTP client examples or you can use (Inspector)[https://github.com/modelcontextprotocol/inspector]
55+
You can connect to this server using an HTTP client, for now only Typescript SDK has streamable HTTP client examples or you can use [Inspector](https://github.com/modelcontextprotocol/inspector)
Lines changed: 105 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,105 @@
1+
"""
2+
In-memory event store for demonstrating resumability functionality.
3+
4+
This is a simple implementation intended for examples and testing,
5+
not for production use where a persistent storage solution would be more appropriate.
6+
"""
7+
8+
import logging
9+
from collections import deque
10+
from dataclasses import dataclass
11+
from uuid import uuid4
12+
13+
from mcp.server.streamable_http import (
14+
EventCallback,
15+
EventId,
16+
EventMessage,
17+
EventStore,
18+
StreamId,
19+
)
20+
from mcp.types import JSONRPCMessage
21+
22+
logger = logging.getLogger(__name__)
23+
24+
25+
@dataclass
26+
class EventEntry:
27+
"""
28+
Represents an event entry in the event store.
29+
"""
30+
31+
event_id: EventId
32+
stream_id: StreamId
33+
message: JSONRPCMessage
34+
35+
36+
class InMemoryEventStore(EventStore):
37+
"""
38+
Simple in-memory implementation of the EventStore interface for resumability.
39+
This is primarily intended for examples and testing, not for production use
40+
where a persistent storage solution would be more appropriate.
41+
42+
This implementation keeps only the last N events per stream for memory efficiency.
43+
"""
44+
45+
def __init__(self, max_events_per_stream: int = 100):
46+
"""Initialize the event store.
47+
48+
Args:
49+
max_events_per_stream: Maximum number of events to keep per stream
50+
"""
51+
self.max_events_per_stream = max_events_per_stream
52+
# for maintaining last N events per stream
53+
self.streams: dict[StreamId, deque[EventEntry]] = {}
54+
# event_id -> EventEntry for quick lookup
55+
self.event_index: dict[EventId, EventEntry] = {}
56+
57+
async def store_event(
58+
self, stream_id: StreamId, message: JSONRPCMessage
59+
) -> EventId:
60+
"""Stores an event with a generated event ID."""
61+
event_id = str(uuid4())
62+
event_entry = EventEntry(
63+
event_id=event_id, stream_id=stream_id, message=message
64+
)
65+
66+
# Get or create deque for this stream
67+
if stream_id not in self.streams:
68+
self.streams[stream_id] = deque(maxlen=self.max_events_per_stream)
69+
70+
# If deque is full, the oldest event will be automatically removed
71+
# We need to remove it from the event_index as well
72+
if len(self.streams[stream_id]) == self.max_events_per_stream:
73+
oldest_event = self.streams[stream_id][0]
74+
self.event_index.pop(oldest_event.event_id, None)
75+
76+
# Add new event
77+
self.streams[stream_id].append(event_entry)
78+
self.event_index[event_id] = event_entry
79+
80+
return event_id
81+
82+
async def replay_events_after(
83+
self,
84+
last_event_id: EventId,
85+
send_callback: EventCallback,
86+
) -> StreamId | None:
87+
"""Replays events that occurred after the specified event ID."""
88+
if last_event_id not in self.event_index:
89+
logger.warning(f"Event ID {last_event_id} not found in store")
90+
return None
91+
92+
# Get the stream and find events after the last one
93+
last_event = self.event_index[last_event_id]
94+
stream_id = last_event.stream_id
95+
stream_events = self.streams.get(last_event.stream_id, deque())
96+
97+
# Events in deque are already in chronological order
98+
found_last = False
99+
for event in stream_events:
100+
if found_last:
101+
await send_callback(EventMessage(event.message, event.event_id))
102+
elif event.event_id == last_event_id:
103+
found_last = True
104+
105+
return stream_id

examples/servers/simple-streamablehttp/mcp_simple_streamablehttp/server.py

Lines changed: 21 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,12 +17,24 @@
1717
from starlette.responses import Response
1818
from starlette.routing import Mount
1919

20+
from .event_store import InMemoryEventStore
21+
2022
# Configure logging
2123
logger = logging.getLogger(__name__)
2224

2325
# Global task group that will be initialized in the lifespan
2426
task_group = None
2527

28+
# Event store for resumability
29+
# The InMemoryEventStore enables resumability support for StreamableHTTP transport.
30+
# It stores SSE events with unique IDs, allowing clients to:
31+
# 1. Receive event IDs for each SSE message
32+
# 2. Resume streams by sending Last-Event-ID in GET requests
33+
# 3. Replay missed events after reconnection
34+
# Note: This in-memory implementation is for demonstration ONLY.
35+
# For production, use a persistent storage solution.
36+
event_store = InMemoryEventStore()
37+
2638

2739
@contextlib.asynccontextmanager
2840
async def lifespan(app):
@@ -79,9 +91,14 @@ async def call_tool(
7991

8092
# Send the specified number of notifications with the given interval
8193
for i in range(count):
94+
# Include more detailed message for resumability demonstration
95+
notification_msg = (
96+
f"[{i+1}/{count}] Event from '{caller}' - "
97+
f"Use Last-Event-ID to resume if disconnected"
98+
)
8299
await ctx.session.send_log_message(
83100
level="info",
84-
data=f"Notification {i+1}/{count} from caller: {caller}",
101+
data=notification_msg,
85102
logger="notification_stream",
86103
# Associates this notification with the original request
87104
# Ensures notifications are sent to the correct response stream
@@ -90,6 +107,7 @@ async def call_tool(
90107
# - nowhere (if GET request isn't supported)
91108
related_request_id=ctx.request_id,
92109
)
110+
logger.debug(f"Sent notification {i+1}/{count} for caller: {caller}")
93111
if i < count - 1: # Don't wait after the last notification
94112
await anyio.sleep(interval)
95113

@@ -163,8 +181,10 @@ async def handle_streamable_http(scope, receive, send):
163181
http_transport = StreamableHTTPServerTransport(
164182
mcp_session_id=new_session_id,
165183
is_json_response_enabled=json_response,
184+
event_store=event_store, # Enable resumability
166185
)
167186
server_instances[http_transport.mcp_session_id] = http_transport
187+
logger.info(f"Created new transport with session ID: {new_session_id}")
168188
async with http_transport.connect() as streams:
169189
read_stream, write_stream = streams
170190

0 commit comments

Comments
 (0)