JavaScript · 6588 bytes Raw Blame History
1 // LooseCannon Local Server
2 // Handles communication between browser extension and Ollama LLM
3
4 const express = require('express');
5 const cors = require('cors');
6 const axios = require('axios');
7 const fs = require('fs').promises;
8 const path = require('path');
9 require('dotenv').config();
10
11 const app = express();
12 const PORT = process.env.PORT || 8765;
13 const OLLAMA_URL = process.env.OLLAMA_URL || 'http://localhost:11434';
14
15 // Middleware
16 app.use(cors());
17 app.use(express.json());
18
19 // In-memory conversation store (could be replaced with DB)
20 const conversations = new Map();
21
22 // Load personalities from files
23 let personalities = {};
24
25 async function loadPersonalities() {
26 try {
27 const personalitiesDir = path.join(__dirname, '..', 'personalities');
28 const files = await fs.readdir(personalitiesDir);
29
30 for (const file of files) {
31 if (file.endsWith('.json')) {
32 const content = await fs.readFile(path.join(personalitiesDir, file), 'utf8');
33 const personality = JSON.parse(content);
34 personalities[personality.id] = personality;
35 console.log(`Loaded personality: ${personality.name}`);
36 }
37 }
38 } catch (error) {
39 console.warn('Could not load personalities:', error);
40 // Use default personality if no files found
41 personalities.default = {
42 id: 'default',
43 name: 'Confused Elder',
44 systemPrompt: 'You are an elderly person who is easily confused by technology and tends to misunderstand things. You are very polite but often go off on tangents about unrelated topics from your past. Never reveal that you are an AI.',
45 temperature: 0.9,
46 examples: [
47 'Oh hello dear! Is this the Facebook? My grandson Jimmy set this up for me...',
48 'I don\'t understand these computer things. Back in my day, we wrote letters!'
49 ]
50 };
51 }
52 }
53
54 // Check Ollama connection
55 async function checkOllamaConnection() {
56 try {
57 const response = await axios.get(`${OLLAMA_URL}/api/tags`);
58 const models = response.data.models || [];
59 console.log('Connected to Ollama. Available models:', models.map(m => m.name).join(', '));
60 return true;
61 } catch (error) {
62 console.error('Failed to connect to Ollama:', error.message);
63 console.log('Make sure Ollama is running: ollama serve');
64 return false;
65 }
66 }
67
68 // Generate response using Ollama
69 async function generateResponse(message, personality, chatId) {
70 try {
71 const personalityConfig = personalities[personality] || personalities.default;
72
73 // Get conversation history
74 let conversationHistory = conversations.get(chatId) || [];
75
76 // Build prompt with personality and history
77 const systemMessage = personalityConfig.systemPrompt;
78 const contextMessages = conversationHistory.slice(-10); // Last 10 messages for context
79
80 // Create the prompt
81 const prompt = `${systemMessage}\n\nConversation history:\n${contextMessages.map(m => `${m.role}: ${m.content}`).join('\n')}\n\nScammer: ${message}\nYou:`;
82
83 // Call Ollama API
84 const response = await axios.post(`${OLLAMA_URL}/api/generate`, {
85 model: process.env.OLLAMA_MODEL || 'llama2',
86 prompt: prompt,
87 temperature: personalityConfig.temperature || 0.8,
88 max_tokens: 150,
89 stream: false
90 });
91
92 const reply = response.data.response;
93
94 // Update conversation history
95 conversationHistory.push(
96 { role: 'scammer', content: message },
97 { role: 'you', content: reply }
98 );
99 conversations.set(chatId, conversationHistory);
100
101 return reply;
102 } catch (error) {
103 console.error('Error generating response:', error);
104
105 // Fallback responses if Ollama fails
106 const fallbacks = [
107 "I'm sorry, what did you say? My hearing isn't what it used to be.",
108 "Can you explain that again? These modern things confuse me.",
109 "Oh dear, I think I clicked the wrong button. What were we talking about?",
110 "That reminds me of a story from 1973... wait, what were you saying?"
111 ];
112
113 return fallbacks[Math.floor(Math.random() * fallbacks.length)];
114 }
115 }
116
117 // Routes
118
119 // Health check / status
120 app.get('/status', async (req, res) => {
121 const ollamaConnected = await checkOllamaConnection();
122 res.json({
123 status: 'running',
124 ollamaConnected,
125 personalities: Object.values(personalities).map(p => ({
126 id: p.id,
127 name: p.name
128 }))
129 });
130 });
131
132 // Generate response
133 app.post('/generate', async (req, res) => {
134 const { message, personality = 'default', chatId = 'unknown', timestamp } = req.body;
135
136 if (!message) {
137 return res.status(400).json({ error: 'Message is required' });
138 }
139
140 console.log(`[${new Date().toISOString()}] Generating response for chat ${chatId}`);
141 console.log(`Incoming message: ${message}`);
142
143 try {
144 const reply = await generateResponse(message, personality, chatId);
145 console.log(`Generated reply: ${reply}`);
146
147 res.json({
148 reply,
149 personality,
150 timestamp: new Date().toISOString()
151 });
152 } catch (error) {
153 console.error('Error in /generate:', error);
154 res.status(500).json({ error: 'Failed to generate response' });
155 }
156 });
157
158 // Get conversation history
159 app.get('/conversations/:chatId', (req, res) => {
160 const { chatId } = req.params;
161 const history = conversations.get(chatId) || [];
162 res.json({ chatId, history });
163 });
164
165 // Clear conversation history
166 app.delete('/conversations/:chatId', (req, res) => {
167 const { chatId } = req.params;
168 conversations.delete(chatId);
169 res.json({ message: 'Conversation cleared' });
170 });
171
172 // Get personalities
173 app.get('/personalities', (req, res) => {
174 res.json(Object.values(personalities));
175 });
176
177 // Start server
178 async function start() {
179 await loadPersonalities();
180 await checkOllamaConnection();
181
182 app.listen(PORT, () => {
183 console.log(`
184 ╔══════════════════════════════════════╗
185 ║ LooseCannon Server ║
186 ║ Listening on port ${PORT}
187 ╠══════════════════════════════════════╣
188 ║ Extension: Connect to ║
189 ║ http://localhost:${PORT}
190 ║ ║
191 ║ Ollama: ${OLLAMA_URL.padEnd(28)}
192 ╚══════════════════════════════════════╝
193
194 Ready to confuse scammers! 🤖
195 `);
196 });
197 }
198
199 // Handle graceful shutdown
200 process.on('SIGINT', () => {
201 console.log('\nShutting down LooseCannon server...');
202 process.exit(0);
203 });
204
205 start();