tenseleyflow/loader / 916d43e

Browse files

Add direct tests for runtime chat lane

Authored by espadonne
SHA
916d43e475c65b5c3e33b8d0d7f65a3c33e10d3c
Parents
5b9c14f
Tree
1ee956e

1 changed file

StatusFile+-
A tests/test_chat_lane.py 80 0
tests/test_chat_lane.pyadded
@@ -0,0 +1,80 @@
1
+"""Tests for the runtime-owned conversational fast path."""
2
+
3
+from __future__ import annotations
4
+
5
+from pathlib import Path
6
+
7
+import pytest
8
+
9
+from loader.agent.loop import Agent, AgentConfig
10
+from loader.llm.base import StreamChunk
11
+from loader.runtime.chat_lane import CHAT_SYSTEM_PROMPT, ConversationalTurnRunner
12
+from loader.runtime.launcher import build_runtime_launcher
13
+from tests.helpers.runtime_harness import ScriptedBackend
14
+
15
+
16
+@pytest.mark.asyncio
17
+async def test_conversational_turn_runner_streams_and_persists_history(
18
+    temp_dir: Path,
19
+) -> None:
20
+    agent = Agent(
21
+        backend=ScriptedBackend(
22
+            streams=[
23
+                [
24
+                    StreamChunk(content="Hello ", is_done=False),
25
+                    StreamChunk(
26
+                        content="back.",
27
+                        full_content="Hello back.",
28
+                        is_done=True,
29
+                    ),
30
+                ]
31
+            ]
32
+        ),
33
+        config=AgentConfig(auto_context=False),
34
+        project_root=temp_dir,
35
+    )
36
+    runner = ConversationalTurnRunner(agent)
37
+    events = []
38
+
39
+    async def emit(event) -> None:
40
+        events.append(event)
41
+
42
+    response = await runner.run("hello there", emit)
43
+
44
+    assert response == "Hello back."
45
+    assert [event.type for event in events] == ["thinking", "stream", "stream", "response"]
46
+    assert agent.session.messages[-2].content == "hello there"
47
+    assert agent.session.messages[-1].content == "Hello back."
48
+    assert agent.backend.invocations[0].messages[0].content == CHAT_SYSTEM_PROMPT
49
+
50
+
51
+@pytest.mark.asyncio
52
+async def test_runtime_launcher_runs_conversational_fast_path(
53
+    temp_dir: Path,
54
+) -> None:
55
+    agent = Agent(
56
+        backend=ScriptedBackend(
57
+            streams=[
58
+                [
59
+                    StreamChunk(content="Quick ", is_done=False),
60
+                    StreamChunk(
61
+                        content="reply.",
62
+                        full_content="Quick reply.",
63
+                        is_done=True,
64
+                    ),
65
+                ]
66
+            ]
67
+        ),
68
+        config=AgentConfig(auto_context=False),
69
+        project_root=temp_dir,
70
+    )
71
+    launcher = build_runtime_launcher(agent)
72
+    events = []
73
+
74
+    async def emit(event) -> None:
75
+        events.append(event)
76
+
77
+    response = await launcher.run_conversational("thanks", emit)
78
+
79
+    assert response == "Quick reply."
80
+    assert any(event.type == "response" and event.content == "Quick reply." for event in events)