Python · 2103 bytes Raw Blame History
1 """Direct unit coverage for REPL streaming helpers."""
2
3 from __future__ import annotations
4
5 from types import SimpleNamespace
6 from unittest.mock import patch
7
8 from dlm.repl.streaming import CaptureStreamer, build_streamer, concatenate_tokens, should_stream
9
10
11 def test_should_stream_tracks_stdout_tty_state() -> None:
12 with patch("sys.stdout", new=SimpleNamespace(isatty=lambda: True)):
13 assert should_stream() is True
14 with patch("sys.stdout", new=SimpleNamespace(isatty=lambda: False)):
15 assert should_stream() is False
16
17
18 def test_should_stream_handles_broken_stdout() -> None:
19 class MissingIsAtty:
20 pass
21
22 class RaisesValueError:
23 @staticmethod
24 def isatty() -> bool:
25 raise ValueError("closed")
26
27 with patch("sys.stdout", new=MissingIsAtty()):
28 assert should_stream() is False
29 with patch("sys.stdout", new=RaisesValueError()):
30 assert should_stream() is False
31
32
33 def test_capture_streamer_is_noop_and_keeps_text_buffer() -> None:
34 streamer = CaptureStreamer()
35 streamer.put(["ignored"])
36 streamer.end()
37 assert streamer.text == ""
38
39
40 def test_build_streamer_returns_capture_streamer_when_disabled() -> None:
41 assert isinstance(build_streamer(object(), stream_to_stdout=False), CaptureStreamer)
42
43
44 def test_build_streamer_wraps_transformers_text_streamer() -> None:
45 calls: list[tuple[object, bool, bool]] = []
46
47 class FakeTextStreamer:
48 def __init__(
49 self, tokenizer: object, *, skip_prompt: bool, skip_special_tokens: bool
50 ) -> None:
51 calls.append((tokenizer, skip_prompt, skip_special_tokens))
52
53 fake_transformers = SimpleNamespace(TextStreamer=FakeTextStreamer)
54 tokenizer = object()
55
56 with patch.dict("sys.modules", {"transformers": fake_transformers}):
57 streamer = build_streamer(tokenizer, stream_to_stdout=True)
58
59 assert isinstance(streamer, FakeTextStreamer)
60 assert calls == [(tokenizer, True, True)]
61
62
63 def test_concatenate_tokens_joins_token_pieces() -> None:
64 assert concatenate_tokens(["hello", " ", "world"]) == "hello world"