# LooseCannon Configuration # Server port (default: 8765) PORT=8765 # Ollama API URL (default: http://localhost:11434) OLLAMA_URL=http://localhost:11434 # Ollama model to use (default: llama2) # You can use any model available in Ollama: llama2, mistral, codellama, etc. # Run 'ollama list' to see available models OLLAMA_MODEL=llama2 # Debug mode (shows extra logging) DEBUG=false