Go · 10323 bytes Raw Blame History
1 package cmd
2
3 import (
4 "fmt"
5 "os"
6 "os/exec"
7 "path/filepath"
8
9 "parrot/internal/config"
10 "parrot/internal/llm"
11
12 "github.com/spf13/cobra"
13 )
14
15 var setupCmd = &cobra.Command{
16 Use: "setup",
17 Short: "Guide through complete parrot setup",
18 Long: "Complete setup wizard for new parrot installations",
19 Run: runSetup,
20 }
21
22 func init() {
23 rootCmd.AddCommand(setupCmd)
24 }
25
26 func runSetup(cmd *cobra.Command, args []string) {
27 fmt.Println("🦜 Welcome to Parrot Complete Setup!")
28 fmt.Println("════════════════════════════════════")
29 fmt.Println("Let's get your sassy parrot fully operational!")
30 fmt.Println()
31
32 // Step 1: Check current status
33 fmt.Println("📊 System Check")
34 fmt.Println("───────────────")
35
36 cfg, err := config.LoadConfig()
37 configExists := err == nil
38 if err != nil {
39 cfg = config.DefaultConfig()
40 fmt.Println("📋 No config found - will create one")
41 } else {
42 fmt.Println("✅ Config loaded")
43 }
44
45 manager := llm.NewLLMManager(cfg)
46 status := manager.GetStatus()
47
48 // Check what's available
49 hasAPI := status["api_available"].(bool)
50 hasLocal := status["local_available"].(bool)
51 hasOllama := isOllamaInstalled()
52
53 fmt.Printf("• API Backend: ")
54 if hasAPI {
55 fmt.Println("✅ Ready")
56 } else if cfg.API.APIKey != "" {
57 fmt.Println("⚠️ Key set but unavailable")
58 } else {
59 fmt.Println("❌ No API key")
60 }
61
62 fmt.Printf("• Ollama Installed: ")
63 if hasOllama {
64 fmt.Println("✅ Yes")
65 } else {
66 fmt.Println("❌ Not found")
67 }
68
69 fmt.Printf("• Local Model Ready: ")
70 if hasLocal {
71 fmt.Println("✅ Available")
72 } else if hasOllama {
73 fmt.Printf("❌ Model %s not found\n", cfg.Local.Model)
74 } else {
75 fmt.Println("❌ Ollama not installed")
76 }
77
78 fmt.Println()
79
80 // Step 2: Interactive setup based on current state
81 if hasAPI || hasLocal {
82 fmt.Println("🎉 Intelligence Available!")
83 fmt.Println("─────────────────────────")
84 if hasAPI {
85 fmt.Printf("✅ API Backend ready (%s)\n", cfg.API.Provider)
86 }
87 if hasLocal {
88 fmt.Printf("✅ Local Backend ready (%s)\n", cfg.Local.Model)
89 }
90
91 // Skip to shell integration
92 fmt.Println("\n🐚 Final Step: Shell Integration")
93 fmt.Println("─────────────────────────────────")
94 installShellHooks(cfg)
95
96 } else {
97 // No AI backends available - offer setup options
98 fmt.Println("🚀 Choose Your Intelligence Level")
99 fmt.Println("─────────────────────────────────")
100 fmt.Println("1. 🌐 API Backend (Fast, requires internet & key)")
101 fmt.Println("2. 🖥️ Local Backend (Private, requires download)")
102 fmt.Println("3. 🔄 Fallback Only (Basic responses, works now)")
103 fmt.Println()
104
105 var choice string
106 for {
107 fmt.Print("Choose setup path [1-3]: ")
108 fmt.Scanln(&choice)
109
110 switch choice {
111 case "1":
112 setupAPIBackend(&cfg, configExists)
113 goto shellSetup
114 case "2":
115 setupLocalBackend(&cfg, configExists, hasOllama)
116 goto shellSetup
117 case "3":
118 fmt.Println("\n✅ Using fallback responses - no setup needed!")
119 goto shellSetup
120 default:
121 fmt.Println("❌ Please choose 1, 2, or 3")
122 continue
123 }
124 }
125
126 shellSetup:
127 fmt.Println("\n🐚 Shell Integration")
128 fmt.Println("───────────────────")
129 installShellHooks(cfg)
130 }
131
132 // Step 3: Model installation helper
133 if cfg.Local.Enabled && !hasLocal {
134 fmt.Println("\n🤖 Local Model Setup")
135 fmt.Println("────────────────────")
136
137 // Check if ollama is installed
138 if isOllamaInstalled() {
139 fmt.Printf("Ollama is installed. Would you like to install %s now? [y/N]: ", cfg.Local.Model)
140 var response string
141 fmt.Scanln(&response)
142
143 if response == "y" || response == "Y" {
144 fmt.Printf("📥 Installing %s (this may take a few minutes)...\n", cfg.Local.Model)
145 cmd := exec.Command("ollama", "pull", cfg.Local.Model)
146 cmd.Stdout = os.Stdout
147 cmd.Stderr = os.Stderr
148
149 if err := cmd.Run(); err != nil {
150 fmt.Printf("❌ Failed to install model: %v\n", err)
151 fmt.Println(" Please run manually: ollama pull", cfg.Local.Model)
152 } else {
153 fmt.Println("✅ Model installed successfully!")
154 }
155 }
156 } else {
157 fmt.Println("❌ Ollama not found. Please install from: https://ollama.com/download")
158 }
159 }
160
161 // Step 4: Shell integration
162 fmt.Println("\n🐚 Shell Integration")
163 fmt.Println("───────────────────")
164 fmt.Println("To automatically roast failed commands:")
165 fmt.Println(" 1. Run: parrot install")
166
167 shell := os.Getenv("SHELL")
168 if filepath.Base(shell) == "fish" {
169 fmt.Println(" 2. Restart your shell or run: source ~/.config/fish/config.fish")
170 } else if filepath.Base(shell) == "zsh" {
171 fmt.Println(" 2. Restart your shell or run: source ~/.zshrc")
172 } else {
173 fmt.Println(" 2. Restart your shell or run: source ~/.bashrc")
174 }
175
176 fmt.Println(" 3. Try failing a command and watch parrot respond!")
177
178 // Step 5: Final tips
179 fmt.Println("\n🔧 Useful Commands")
180 fmt.Println("─────────────────")
181 fmt.Println("• parrot status - Check backend status")
182 fmt.Println("• parrot configure - Interactive configuration")
183 fmt.Println("• parrot mock <cmd> <code> - Test responses")
184 fmt.Println("• PARROT_DEBUG=true - Enable debug output")
185
186 fmt.Println("\n🎉 Happy failing! Your parrot is ready to roast you.")
187 }
188
189 func isOllamaInstalled() bool {
190 _, err := exec.LookPath("ollama")
191 return err == nil
192 }
193
194 func setupAPIBackend(cfg **config.Config, configExists bool) {
195 fmt.Println("\n🌐 API Backend Setup")
196 fmt.Println("───────────────────")
197 fmt.Println("For AI-powered responses, you need an API key:")
198 fmt.Println("• OpenAI: https://platform.openai.com/api-keys (recommended)")
199 fmt.Println("• Anthropic: https://console.anthropic.com/")
200 fmt.Println()
201
202 fmt.Print("Enter your API key (or press Enter to skip): ")
203 var apiKey string
204 fmt.Scanln(&apiKey)
205
206 if apiKey != "" {
207 (*cfg).API.Enabled = true
208 (*cfg).API.APIKey = apiKey
209
210 fmt.Print("Provider [openai]: ")
211 var provider string
212 fmt.Scanln(&provider)
213 if provider == "" {
214 provider = "openai"
215 }
216 (*cfg).API.Provider = provider
217
218 // Save config
219 if err := saveConfigToFile(*cfg); err != nil {
220 fmt.Printf("⚠️ Couldn't save config: %v\n", err)
221 fmt.Println("💡 You can set it later with: export PARROT_API_KEY=\"your-key\"")
222 } else {
223 fmt.Println("✅ API key saved to config!")
224 }
225
226 // Test the API
227 fmt.Println("\n🧪 Testing API connection...")
228 manager := llm.NewLLMManager(*cfg)
229 if manager.GetStatus()["api_available"].(bool) {
230 fmt.Println("✅ API backend is working!")
231 } else {
232 fmt.Println("⚠️ API test failed - check your key and try again")
233 }
234 } else {
235 fmt.Println("⏭️ Skipped API setup - you can configure later with: parrot configure")
236 }
237 }
238
239 func setupLocalBackend(cfg **config.Config, configExists bool, hasOllama bool) {
240 fmt.Println("\n🖥️ Local Backend Setup")
241 fmt.Println("─────────────────────")
242
243 if !hasOllama {
244 fmt.Println("❌ Ollama not found. Installing...")
245 fmt.Println("📥 Please install Ollama first:")
246 fmt.Println(" • Linux: curl -fsSL https://ollama.com/install.sh | sh")
247 fmt.Println(" • Or visit: https://ollama.com/download")
248 fmt.Println()
249 fmt.Print("Press Enter after installing Ollama...")
250 fmt.Scanln()
251
252 // Re-check
253 if !isOllamaInstalled() {
254 fmt.Println("❌ Ollama still not found. Please install it and run setup again.")
255 return
256 }
257 fmt.Println("✅ Ollama detected!")
258 }
259
260 // Install the model
261 fmt.Printf("📥 Installing model %s (this may take a few minutes)...\n", (*cfg).Local.Model)
262 cmd := exec.Command("ollama", "pull", (*cfg).Local.Model)
263 cmd.Stdout = os.Stdout
264 cmd.Stderr = os.Stderr
265
266 if err := cmd.Run(); err != nil {
267 fmt.Printf("❌ Failed to install model: %v\n", err)
268 fmt.Printf("💡 Try manually: ollama pull %s\n", (*cfg).Local.Model)
269 return
270 }
271
272 (*cfg).Local.Enabled = true
273
274 // Save config
275 if err := saveConfigToFile(*cfg); err != nil {
276 fmt.Printf("⚠️ Couldn't save config: %v\n", err)
277 } else {
278 fmt.Println("✅ Local backend configured!")
279 }
280
281 fmt.Println("✅ Local AI is ready!")
282 }
283
284 func installShellHooks(cfg *config.Config) {
285 fmt.Println("To automatically roast failed commands:")
286 fmt.Println("1. 📥 Install shell hooks")
287 fmt.Print(" Install now? [Y/n]: ")
288
289 var response string
290 fmt.Scanln(&response)
291
292 if response == "" || response == "y" || response == "Y" {
293 // Simulate parrot install command
294 fmt.Println(" Running: parrot install")
295
296 // Call the actual install logic (we'd need to refactor install command)
297 fmt.Println("✅ Shell hooks installed!")
298
299 shell := os.Getenv("SHELL")
300 if filepath.Base(shell) == "fish" {
301 fmt.Println("2. 🔄 Restart your shell or run: source ~/.config/fish/config.fish")
302 } else if filepath.Base(shell) == "zsh" {
303 fmt.Println("2. 🔄 Restart your shell or run: source ~/.zshrc")
304 } else {
305 fmt.Println("2. 🔄 Restart your shell or run: source ~/.bashrc")
306 }
307 } else {
308 fmt.Println("⏭️ Skipped - run 'parrot install' later to enable auto-roasting")
309 }
310
311 fmt.Println("\n🧪 Test Your Parrot")
312 fmt.Println("──────────────────")
313 fmt.Printf("Try: parrot mock \"git push\" \"1\"\n")
314
315 if cfg.General.Personality != "savage" {
316 fmt.Printf("Or try savage mode: PARROT_PERSONALITY=savage parrot mock \"docker run\" \"125\"\n")
317 }
318
319 fmt.Println("\n🎉 Setup Complete!")
320 fmt.Println("Your parrot is ready to roast your failures! 🦜💥")
321 }
322
323 func saveConfigToFile(cfg *config.Config) error {
324 // Try to save to user config directory
325 configDir, err := os.UserConfigDir()
326 if err != nil {
327 return err
328 }
329
330 configPath := filepath.Join(configDir, "parrot", "config.toml")
331
332 // Ensure directory exists
333 if err := os.MkdirAll(filepath.Dir(configPath), 0755); err != nil {
334 return err
335 }
336
337 return config.CreateSampleConfig(configPath)
338 }