JavaScript · 13815 bytes Raw Blame History
1 // LooseCannon Local Server - Enhanced Version (ES Modules)
2 // Handles communication between browser extension and Ollama LLM
3 // Now with conversation management and multi-platform support
4
5 import express from 'express';
6 import cors from 'cors';
7 import fs from 'fs/promises';
8 import path from 'path';
9 import { fileURLToPath } from 'url';
10 import dotenv from 'dotenv';
11 import { ConversationManager } from './conversation-manager.mjs';
12
13 dotenv.config();
14
15 const __filename = fileURLToPath(import.meta.url);
16 const __dirname = path.dirname(__filename);
17
18 const app = express();
19 const PORT = process.env.PORT || 8765;
20 const OLLAMA_URL = process.env.OLLAMA_URL || 'http://localhost:11434';
21
22 // Initialize conversation manager
23 const conversationManager = new ConversationManager();
24
25 // Middleware
26 app.use(cors());
27 app.use(express.json({ limit: '10mb' }));
28
29 // Load personalities from files
30 let personalities = {};
31
32 async function loadPersonalities() {
33 try {
34 const personalitiesDir = path.join(__dirname, '..', 'personalities');
35 const files = await fs.readdir(personalitiesDir);
36
37 for (const file of files) {
38 if (file.endsWith('.json')) {
39 const content = await fs.readFile(path.join(personalitiesDir, file), 'utf8');
40 const personality = JSON.parse(content);
41 personalities[personality.id] = personality;
42 console.log(`Loaded personality: ${personality.name}`);
43 }
44 }
45 } catch (error) {
46 console.warn('Could not load personalities:', error);
47 personalities.default = {
48 id: 'default',
49 name: 'Confused Elder',
50 systemPrompt: 'You are an elderly person who is easily confused by technology and tends to misunderstand things. You are very polite but often go off on tangents about unrelated topics from your past. Never reveal that you are an AI.',
51 temperature: 0.9
52 };
53 }
54 }
55
56 // Fetch wrapper for Ollama API with better error handling
57 async function fetchOllama(endpoint, options = {}) {
58 try {
59 const response = await fetch(`${OLLAMA_URL}${endpoint}`, {
60 ...options,
61 headers: {
62 'Content-Type': 'application/json',
63 ...options.headers
64 }
65 });
66
67 if (!response.ok) {
68 throw new Error(`Ollama API error: ${response.status}`);
69 }
70
71 return await response.json();
72 } catch (error) {
73 console.error('Ollama fetch error:', error);
74 throw error;
75 }
76 }
77
78 // Check Ollama connection
79 async function checkOllamaConnection() {
80 try {
81 const data = await fetchOllama('/api/tags');
82 const models = data.models || [];
83 console.log('Connected to Ollama. Available models:', models.map(m => m.name).join(', '));
84 return true;
85 } catch (error) {
86 console.error('Failed to connect to Ollama:', error.message);
87 console.log('Make sure Ollama is running: ollama serve');
88 return false;
89 }
90 }
91
92 // Enhanced response generation with context awareness
93 async function generateEnhancedResponse(message, personality, chatId, platform, context, suggestions) {
94 try {
95 const personalityConfig = personalities[personality] || personalities.default;
96 const conversation = conversationManager.getConversation(chatId, platform);
97
98 let systemPrompt = personalityConfig.systemPrompt;
99
100 // Add strategy modifiers based on context
101 if (context && context.suggestedStrategy) {
102 switch (context.suggestedStrategy) {
103 case 'maximum_confusion':
104 systemPrompt += '\n\nBe EXTREMELY confused and misunderstand everything. Mix up basic concepts.';
105 break;
106 case 'waste_time':
107 systemPrompt += '\n\nAsk lots of clarifying questions. Pretend to not understand simple instructions.';
108 break;
109 case 'play_poor':
110 systemPrompt += '\n\nMention that you have no money and are struggling financially.';
111 break;
112 case 'ask_questions':
113 systemPrompt += '\n\nBe very curious and ask lots of questions about everything they say.';
114 break;
115 }
116 }
117
118 const recentMessages = conversation.messages.slice(-10);
119 const historyText = recentMessages.map(m =>
120 `${m.sender || 'Them'}: ${m.content}`
121 ).join('\n');
122
123 const prompt = `${systemPrompt}
124
125 Recent conversation:
126 ${historyText}
127
128 They just said: "${message}"
129
130 Remember to stay in character. Respond naturally as your character would.
131
132 Your response:`;
133
134 const response = await fetchOllama('/api/generate', {
135 method: 'POST',
136 body: JSON.stringify({
137 model: process.env.OLLAMA_MODEL || 'llama2',
138 prompt: prompt,
139 temperature: personalityConfig.temperature || 0.8,
140 options: {
141 num_predict: 200,
142 top_p: 0.9,
143 stop: ["\n\n", "Them:", "They said:"]
144 },
145 stream: false
146 })
147 });
148
149 let reply = response.response;
150 reply = reply.trim();
151 reply = reply.replace(/As an AI|I'm an AI|I am an AI|artificial intelligence/gi, '');
152
153 // Add personality quirks
154 if (personality === 'confused-elder' && Math.random() > 0.7) {
155 const tangents = [
156 ' Wait, this reminds me of something that happened in 1987...',
157 ' Oh, my cat is meowing. One second dear.',
158 ' Where did I put my glasses?'
159 ];
160 reply += tangents[Math.floor(Math.random() * tangents.length)];
161 }
162
163 return reply;
164 } catch (error) {
165 console.error('Error generating enhanced response:', error);
166
167 const fallbacks = suggestions && suggestions.length > 0
168 ? suggestions.map(s => s.response)
169 : [
170 "I'm sorry, what did you say? I'm having trouble with this computer.",
171 "Can you explain that again? These modern things confuse me.",
172 "Oh dear, I think I clicked the wrong button. What were we talking about?"
173 ];
174
175 return fallbacks[Math.floor(Math.random() * fallbacks.length)];
176 }
177 }
178
179 // Routes
180
181 app.get('/status', async (req, res) => {
182 const ollamaConnected = await checkOllamaConnection();
183 res.json({
184 status: 'running',
185 version: '0.3.1',
186 ollamaConnected,
187 personalities: Object.values(personalities).map(p => ({
188 id: p.id,
189 name: p.name
190 })),
191 stats: conversationManager.getStatistics()
192 });
193 });
194
195 app.post('/conversation/add', (req, res) => {
196 const { chatId, platform, message } = req.body;
197 const conversation = conversationManager.addMessage(chatId, message, platform);
198 const context = conversationManager.generateContextSummary(chatId, platform);
199 res.json(context);
200 });
201
202 app.post('/suggestions', (req, res) => {
203 const { chatId, platform } = req.body;
204 const suggestions = conversationManager.getResponseSuggestions(chatId, platform);
205 res.json(suggestions);
206 });
207
208 app.post('/generate', async (req, res) => {
209 const {
210 message,
211 personality = 'default',
212 chatId = 'unknown',
213 platform = 'whatsapp',
214 context,
215 suggestions,
216 timestamp
217 } = req.body;
218
219 if (!message) {
220 return res.status(400).json({ error: 'Message is required' });
221 }
222
223 console.log(`[${new Date().toISOString()}] Generating response for ${platform}:${chatId}`);
224
225 try {
226 conversationManager.addMessage(chatId, {
227 content: message,
228 sender: 'them',
229 type: 'text',
230 timestamp: new Date(timestamp)
231 }, platform);
232
233 const reply = await generateEnhancedResponse(
234 message,
235 personality,
236 chatId,
237 platform,
238 context,
239 suggestions
240 );
241
242 conversationManager.addMessage(chatId, {
243 content: reply,
244 sender: 'us',
245 type: 'text',
246 timestamp: new Date()
247 }, platform);
248
249 console.log(`Generated reply: ${reply}`);
250
251 res.json({
252 reply,
253 personality,
254 timestamp: new Date().toISOString(),
255 context: conversationManager.generateContextSummary(chatId, platform)
256 });
257 } catch (error) {
258 console.error('Error in /generate:', error);
259 res.status(500).json({ error: 'Failed to generate response' });
260 }
261 });
262
263 app.post('/conversation/export', (req, res) => {
264 const { chatId, platform = 'whatsapp' } = req.body;
265
266 try {
267 const exportData = conversationManager.exportConversation(chatId, platform);
268 res.json(exportData);
269 } catch (error) {
270 console.error('Error exporting conversation:', error);
271 res.status(500).json({ error: 'Failed to export conversation' });
272 }
273 });
274
275 app.get('/statistics', (req, res) => {
276 const stats = conversationManager.getStatistics();
277 res.json(stats);
278 });
279
280 app.get('/conversations/:platform/:chatId', (req, res) => {
281 const { platform, chatId } = req.params;
282 const conversation = conversationManager.getConversation(chatId, platform);
283
284 res.json({
285 chatId,
286 platform,
287 messages: conversation.messages,
288 context: conversation.context,
289 state: conversation.state
290 });
291 });
292
293 app.delete('/conversations/:platform/:chatId', (req, res) => {
294 const { platform, chatId } = req.params;
295 const conversation = conversationManager.getConversation(chatId, platform);
296 conversation.messages = [];
297 conversation.context.responseCount = 0;
298 res.json({ message: 'Conversation cleared' });
299 });
300
301 app.get('/personalities', (req, res) => {
302 res.json(Object.values(personalities));
303 });
304
305 app.get('/personalities/:id', (req, res) => {
306 const { id } = req.params;
307 const personality = personalities[id];
308
309 if (!personality) {
310 return res.status(404).json({ error: 'Personality not found' });
311 }
312
313 res.json(personality);
314 });
315
316 app.post('/personalities', async (req, res) => {
317 const { id, name, systemPrompt, temperature } = req.body;
318
319 if (!id || !name || !systemPrompt) {
320 return res.status(400).json({ error: 'Missing required fields' });
321 }
322
323 const personality = {
324 id,
325 name,
326 systemPrompt,
327 temperature: temperature || 0.8
328 };
329
330 personalities[id] = personality;
331
332 try {
333 const filePath = path.join(__dirname, '..', 'personalities', `${id}.json`);
334 await fs.writeFile(filePath, JSON.stringify(personality, null, 2));
335 res.json({ success: true, personality });
336 } catch (error) {
337 console.error('Error saving personality:', error);
338 res.status(500).json({ error: 'Failed to save personality' });
339 }
340 });
341
342 app.post('/test-personality', async (req, res) => {
343 const { systemPrompt, temperature, message } = req.body;
344
345 try {
346 const testPersonality = {
347 systemPrompt,
348 temperature: temperature || 0.8
349 };
350
351 personalities.test = testPersonality;
352
353 const reply = await generateEnhancedResponse(
354 message,
355 'test',
356 'test-chat',
357 'test',
358 {},
359 []
360 );
361
362 res.json({ response: reply });
363 } catch (error) {
364 console.error('Error testing personality:', error);
365 res.status(500).json({ error: 'Test failed' });
366 }
367 });
368
369 app.post('/emergency-stop', (req, res) => {
370 console.log('EMERGENCY STOP ACTIVATED');
371 conversationManager.conversations.clear();
372 res.json({ message: 'All sessions stopped' });
373 });
374
375 app.get('/analytics', (req, res) => {
376 const stats = conversationManager.getStatistics();
377 res.json({
378 totalMessages: stats.totalMessages,
379 scammersDetected: stats.confirmedScammers,
380 ...stats
381 });
382 });
383
384 app.get('/conversations/active', (req, res) => {
385 const conversations = Array.from(conversationManager.conversations.values())
386 .filter(conv => conv.state === 'active')
387 .map(conv => ({
388 id: conv.id,
389 platform: conv.platform,
390 chatId: conv.chatId,
391 messageCount: conv.messages.length,
392 scammerScore: conv.context.scammerScore,
393 duration: Date.now() - new Date(conv.startTime).getTime()
394 }));
395
396 res.json(conversations);
397 });
398
399 app.get('/patterns', (req, res) => {
400 // Return some example patterns for now
401 res.json([
402 { id: 'urgent', type: 'keyword', occurrences: 42, verified: true },
403 { id: 'money_request', type: 'pattern', occurrences: 31, verified: true },
404 { id: 'verification', type: 'keyword', occurrences: 28, verified: false }
405 ]);
406 });
407
408 app.post('/patterns/sync', (req, res) => {
409 // Placeholder for pattern sync
410 res.json([]);
411 });
412
413 app.post('/analytics/sync', (req, res) => {
414 // Placeholder for analytics sync
415 res.json({ success: true });
416 });
417
418 // Cleanup old conversations periodically
419 setInterval(() => {
420 conversationManager.cleanup();
421 }, 60 * 60 * 1000);
422
423 // Start server
424 async function start() {
425 await loadPersonalities();
426 await checkOllamaConnection();
427
428 app.listen(PORT, () => {
429 console.log(`
430 ╔══════════════════════════════════════╗
431 ║ LooseCannon Server v0.3.1 ║
432 ║ Listening on port ${PORT}
433 ╠══════════════════════════════════════╣
434 ║ Features: ║
435 ║ ✓ Multi-platform support ║
436 ║ ✓ Conversation management ║
437 ║ ✓ Scammer detection ║
438 ║ ✓ Context-aware responses ║
439 ║ ✓ Modern dependencies ║
440 ║ ║
441 ║ Extension: Connect to ║
442 ║ http://localhost:${PORT}
443 ║ ║
444 ║ Ollama: ${OLLAMA_URL.padEnd(28)}
445 ╚══════════════════════════════════════╝
446
447 Ready to confuse scammers across all platforms!
448 `);
449 });
450 }
451
452 // Handle graceful shutdown
453 process.on('SIGINT', () => {
454 console.log('\n\nShutting down LooseCannon server...');
455 console.log('Statistics:', conversationManager.getStatistics());
456 process.exit(0);
457 });
458
459 // Handle uncaught errors
460 process.on('uncaughtException', (error) => {
461 console.error('Uncaught Exception:', error);
462 });
463
464 process.on('unhandledRejection', (reason, promise) => {
465 console.error('Unhandled Rejection at:', promise, 'reason:', reason);
466 });
467
468 start();