Go · 5325 bytes Raw Blame History
1 package config
2
3 import (
4 "fmt"
5 "os"
6 "path/filepath"
7
8 "github.com/BurntSushi/toml"
9 )
10
11 type Config struct {
12 // API Configuration (Primary backend)
13 API APIConfig `toml:"api"`
14
15 // Local LLM Configuration (Secondary backend)
16 Local LocalConfig `toml:"local"`
17
18 // General Settings
19 General GeneralConfig `toml:"general"`
20 }
21
22 type APIConfig struct {
23 Enabled bool `toml:"enabled"`
24 Provider string `toml:"provider"` // "openai", "anthropic", "custom"
25 Endpoint string `toml:"endpoint"` // Custom endpoint URL
26 APIKey string `toml:"api_key"` // API key
27 Model string `toml:"model"` // Model name
28 Timeout int `toml:"timeout"` // Request timeout in seconds
29 }
30
31 type LocalConfig struct {
32 Enabled bool `toml:"enabled"`
33 Provider string `toml:"provider"` // "ollama"
34 Endpoint string `toml:"endpoint"` // Ollama endpoint
35 Model string `toml:"model"` // Model name
36 Timeout int `toml:"timeout"` // Request timeout in seconds
37 }
38
39 type GeneralConfig struct {
40 Personality string `toml:"personality"` // "savage", "sarcastic", "mild"
41 GenerationMode string `toml:"generation_mode"` // "snappy" (fast) or "spicy" (quality)
42 FallbackMode bool `toml:"fallback_mode"` // Use hardcoded responses only
43 Debug bool `toml:"debug"` // Debug logging
44 Colors bool `toml:"colors"` // Enable colored output
45 Enhanced bool `toml:"enhanced"` // Enhanced formatting with borders/emphasis
46 }
47
48 // Default configuration
49 func DefaultConfig() *Config {
50 return &Config{
51 API: APIConfig{
52 Enabled: true,
53 Provider: "openai",
54 Endpoint: "https://api.openai.com/v1",
55 APIKey: "", // Must be set by user
56 Model: "gpt-3.5-turbo",
57 Timeout: 3, // Reduced from 10 to 3 seconds for responsiveness
58 },
59 Local: LocalConfig{
60 Enabled: true,
61 Provider: "ollama",
62 Endpoint: "http://127.0.0.1:11434",
63 Model: "llama3.2:3b",
64 Timeout: 5, // 5 seconds with optimized generation options should be plenty
65 },
66 General: GeneralConfig{
67 Personality: "savage",
68 GenerationMode: "snappy", // Default to fast mode
69 FallbackMode: false,
70 Debug: false,
71 Colors: true,
72 Enhanced: false,
73 },
74 }
75 }
76
77 // Config file paths in order of preference
78 func GetConfigPaths() []string {
79 var paths []string
80
81 // 0. Environment-specified config path (highest priority)
82 if configPath := os.Getenv("PARROT_CONFIG"); configPath != "" {
83 paths = append(paths, configPath)
84 }
85
86 // 1. System-wide config (for RPM installs)
87 paths = append(paths, "/etc/parrot/config.toml")
88
89 // 2. User config directory
90 if configDir, err := os.UserConfigDir(); err == nil {
91 paths = append(paths, filepath.Join(configDir, "parrot", "config.toml"))
92 }
93
94 // 3. Home directory
95 if homeDir, err := os.UserHomeDir(); err == nil {
96 paths = append(paths, filepath.Join(homeDir, ".parrot.toml"))
97 }
98
99 // 4. Current directory (for development)
100 paths = append(paths, "./parrot.toml")
101
102 return paths
103 }
104
105 // Load configuration from first available config file
106 func LoadConfig() (*Config, error) {
107 config := DefaultConfig()
108
109 // Try to load from config files
110 for _, path := range GetConfigPaths() {
111 if _, err := os.Stat(path); err == nil {
112 if err := loadFromFile(config, path); err != nil {
113 return nil, fmt.Errorf("error loading config from %s: %w", path, err)
114 }
115 break
116 }
117 }
118
119 // Override with environment variables
120 loadFromEnv(config)
121
122 return config, nil
123 }
124
125 func loadFromFile(config *Config, path string) error {
126 _, err := toml.DecodeFile(path, config)
127 return err
128 }
129
130 func loadFromEnv(config *Config) {
131 // API configuration from environment
132 if key := os.Getenv("PARROT_API_KEY"); key != "" {
133 config.API.APIKey = key
134 }
135 if endpoint := os.Getenv("PARROT_API_ENDPOINT"); endpoint != "" {
136 config.API.Endpoint = endpoint
137 }
138 if model := os.Getenv("PARROT_API_MODEL"); model != "" {
139 config.API.Model = model
140 }
141
142 // Local configuration from environment
143 if endpoint := os.Getenv("PARROT_OLLAMA_ENDPOINT"); endpoint != "" {
144 config.Local.Endpoint = endpoint
145 }
146 if model := os.Getenv("PARROT_OLLAMA_MODEL"); model != "" {
147 config.Local.Model = model
148 }
149
150 // General configuration
151 if personality := os.Getenv("PARROT_PERSONALITY"); personality != "" {
152 config.General.Personality = personality
153 }
154 if mode := os.Getenv("PARROT_MODE"); mode != "" {
155 config.General.GenerationMode = mode
156 }
157 if os.Getenv("PARROT_FALLBACK_ONLY") == "true" {
158 config.General.FallbackMode = true
159 }
160 if os.Getenv("PARROT_DEBUG") == "true" {
161 config.General.Debug = true
162 }
163 if os.Getenv("PARROT_NO_COLOR") == "true" || os.Getenv("NO_COLOR") != "" {
164 config.General.Colors = false
165 }
166 if os.Getenv("PARROT_ENHANCED") == "true" {
167 config.General.Enhanced = true
168 }
169 }
170
171 // Create a sample config file
172 func CreateSampleConfig(path string) error {
173 config := DefaultConfig()
174
175 // Ensure directory exists
176 if err := os.MkdirAll(filepath.Dir(path), 0755); err != nil {
177 return fmt.Errorf("failed to create config directory: %w", err)
178 }
179
180 file, err := os.Create(path)
181 if err != nil {
182 return fmt.Errorf("failed to create config file: %w", err)
183 }
184 defer file.Close()
185
186 encoder := toml.NewEncoder(file)
187 if err := encoder.Encode(config); err != nil {
188 return fmt.Errorf("failed to encode config: %w", err)
189 }
190
191 return nil
192 }