tenseleyflow/loader / f729e7f

Browse files

feat: add slash commands for /exit, /model, /help, /clear

- /help or /h - show available commands
- /exit or /q - exit the application
- /clear or /c - clear conversation
- /model or /m - list models or switch (/model llama3.2:3b)
- /models - list available Ollama models
- Shows current model with green dot, others with dim dot
- Updates status line when model changes
- Auto-detects Native vs ReAct mode for new model
Authored by espadonne
SHA
f729e7f8adecb5b2685ab9a1906594c53033c329
Parents
19f8bdf
Tree
1a2df4b

1 changed file

StatusFile+-
M src/loader/ui/app.py 103 2
src/loader/ui/app.pymodified
@@ -102,6 +102,9 @@ class LoaderApp(App):
102102
             "[dim]Type a message to get started. "
103103
             "Press Ctrl+C to quit, Ctrl+L to clear.[/dim]"
104104
         )
105
+        self._add_message(
106
+            "[dim]Commands: /help, /model, /clear, /exit[/dim]"
107
+        )
105108
 
106109
     def _add_message(self, content: str, classes: str = "") -> None:
107110
         """Add a message to the message area."""
@@ -136,8 +139,13 @@ class LoaderApp(App):
136139
         """Handle user input submission."""
137140
         user_input = message.value
138141
 
139
-        # Handle special commands
140
-        if user_input.lower() == "exit":
142
+        # Handle slash commands
143
+        if user_input.startswith("/"):
144
+            self._handle_command(user_input)
145
+            return
146
+
147
+        # Handle legacy commands (without slash) for backwards compat
148
+        if user_input.lower() in ("exit", "quit"):
141149
             self.exit()
142150
             return
143151
 
@@ -173,6 +181,99 @@ class LoaderApp(App):
173181
             "steering-message"
174182
         )
175183
 
184
+    def _handle_command(self, command: str) -> None:
185
+        """Handle slash commands."""
186
+        parts = command[1:].split(maxsplit=1)  # Remove leading /
187
+        cmd = parts[0].lower() if parts else ""
188
+        args = parts[1] if len(parts) > 1 else ""
189
+
190
+        if cmd in ("exit", "quit", "q"):
191
+            self._add_message("[dim]Goodbye![/dim]")
192
+            self.exit()
193
+
194
+        elif cmd in ("clear", "c"):
195
+            self.action_clear_messages()
196
+
197
+        elif cmd in ("help", "h", "?"):
198
+            self._show_help()
199
+
200
+        elif cmd in ("model", "m"):
201
+            self._handle_model_command(args)
202
+
203
+        elif cmd == "models":
204
+            self._handle_model_command("")  # List models
205
+
206
+        else:
207
+            self._add_message(f"[red]Unknown command: /{cmd}[/red]\nType /help for available commands.")
208
+
209
+    def _show_help(self) -> None:
210
+        """Show help message with available commands."""
211
+        help_text = """[bold]Available Commands:[/bold]
212
+
213
+[cyan]/help[/cyan], [cyan]/h[/cyan]        Show this help message
214
+[cyan]/exit[/cyan], [cyan]/q[/cyan]        Exit the application
215
+[cyan]/clear[/cyan], [cyan]/c[/cyan]       Clear the conversation
216
+[cyan]/model[/cyan] [dim]<name>[/dim]   Switch to a different model
217
+[cyan]/models[/cyan]         List available models
218
+
219
+[bold]Shortcuts:[/bold]
220
+[dim]Ctrl+C[/dim]          Exit
221
+[dim]Ctrl+L[/dim]          Clear conversation"""
222
+        self._add_message(help_text)
223
+
224
+    def _handle_model_command(self, args: str) -> None:
225
+        """Handle /model command - switch or list models."""
226
+        if not args:
227
+            # List available models
228
+            self._list_models()
229
+        else:
230
+            # Switch to specified model
231
+            self._switch_model(args.strip())
232
+
233
+    def _list_models(self) -> None:
234
+        """List available Ollama models."""
235
+        import asyncio
236
+
237
+        async def fetch_models():
238
+            if hasattr(self.agent.backend, "list_models"):
239
+                return await self.agent.backend.list_models()
240
+            return []
241
+
242
+        try:
243
+            models = asyncio.get_event_loop().run_until_complete(fetch_models())
244
+            if models:
245
+                lines = ["[bold]Available Models:[/bold]", ""]
246
+                current = self.agent.backend.model if hasattr(self.agent.backend, "model") else ""
247
+                for m in models:
248
+                    name = m.get("name", "")
249
+                    size_mb = m.get("size", 0) / (1024 * 1024)
250
+                    marker = "[green]●[/green]" if name == current else "[dim]○[/dim]"
251
+                    lines.append(f"  {marker} [cyan]{name}[/cyan] [dim]({size_mb:.0f}MB)[/dim]")
252
+                lines.append("")
253
+                lines.append("[dim]Use /model <name> to switch[/dim]")
254
+                self._add_message("\n".join(lines))
255
+            else:
256
+                self._add_message("[yellow]No models found. Is Ollama running?[/yellow]")
257
+        except Exception as e:
258
+            self._add_message(f"[red]Error listing models: {e}[/red]")
259
+
260
+    def _switch_model(self, model_name: str) -> None:
261
+        """Switch to a different model."""
262
+        if hasattr(self.agent.backend, "model"):
263
+            old_model = self.agent.backend.model
264
+            self.agent.backend.model = model_name
265
+            self.model_name = model_name
266
+            # Update status line
267
+            self.query_one(StatusLine).model = model_name
268
+            # Update mode based on new model's capabilities
269
+            if hasattr(self.agent.backend, "supports_native_tools"):
270
+                supports_native = self.agent.backend.supports_native_tools()
271
+                self.mode = "Native" if supports_native else "ReAct"
272
+                self.query_one(StatusLine).mode = self.mode
273
+            self._add_message(f"[green]Switched model:[/green] {old_model} → [bold]{model_name}[/bold]")
274
+        else:
275
+            self._add_message("[red]Model switching not supported for this backend[/red]")
276
+
176277
     async def _request_confirmation(
177278
         self,
178279
         tool_name: str,