init repo
- SHA
f9488921532da9272319530f226999bd7b738cc0- Tree
12318ad
f948892
f9488921532da9272319530f226999bd7b738cc012318ad| Status | File | + | - |
|---|---|---|---|
| A |
.gitignore
|
1 | 0 |
| A |
audio/list.php
|
15 | 0 |
| A |
css/main.css
|
375 | 0 |
| A |
deploy.sh
|
26 | 0 |
| A |
index.html
|
93 | 0 |
| A |
js/app.js
|
347 | 0 |
| A |
js/audio-engine.js
|
460 | 0 |
| A |
js/effects.js
|
251 | 0 |
| A |
js/gesture-detector.js
|
238 | 0 |
| A |
js/worklets/bitcrusher.js
|
65 | 0 |
| A |
js/worklets/spectral-freeze.js
|
112 | 0 |
.gitignoreadded@@ -0,0 +1,1 @@ | |||
| 1 | +*.wav | ||
audio/list.phpadded@@ -0,0 +1,15 @@ | |||
| 1 | +<?php | ||
| 2 | +// audio/list.php - Optional API endpoint for listing audio files | ||
| 3 | +// This provides a cleaner way to get the file list than parsing HTML | ||
| 4 | + | ||
| 5 | +header('Content-Type: application/json'); | ||
| 6 | +header('Access-Control-Allow-Origin: *'); | ||
| 7 | + | ||
| 8 | +// Get all WAV files in the current directory | ||
| 9 | +$files = glob('*.wav'); | ||
| 10 | + | ||
| 11 | +// Sort alphabetically | ||
| 12 | +sort($files); | ||
| 13 | + | ||
| 14 | +// Return as JSON | ||
| 15 | +echo json_encode($files); | ||
css/main.cssadded@@ -0,0 +1,375 @@ | |||
| 1 | +/* css/main.css */ | ||
| 2 | + | ||
| 3 | +/* Reset and Base Styles */ | ||
| 4 | +* { | ||
| 5 | + box-sizing: border-box; | ||
| 6 | + margin: 0; | ||
| 7 | + padding: 0; | ||
| 8 | +} | ||
| 9 | + | ||
| 10 | +body { | ||
| 11 | + font-family: -apple-system, BlinkMacSystemFont, 'Segoe UI', Roboto, 'Helvetica Neue', Arial, sans-serif; | ||
| 12 | + background: #1a1a1a; | ||
| 13 | + color: #fff; | ||
| 14 | + line-height: 1.6; | ||
| 15 | + padding: 20px; | ||
| 16 | +} | ||
| 17 | + | ||
| 18 | +/* Layout */ | ||
| 19 | +.container { | ||
| 20 | + max-width: 1200px; | ||
| 21 | + margin: 0 auto; | ||
| 22 | +} | ||
| 23 | + | ||
| 24 | +header { | ||
| 25 | + margin-bottom: 30px; | ||
| 26 | +} | ||
| 27 | + | ||
| 28 | +h1 { | ||
| 29 | + font-size: 28px; | ||
| 30 | + font-weight: 600; | ||
| 31 | + letter-spacing: -0.5px; | ||
| 32 | +} | ||
| 33 | + | ||
| 34 | +/* Controls Section */ | ||
| 35 | +.controls { | ||
| 36 | + background: #2a2a2a; | ||
| 37 | + padding: 24px; | ||
| 38 | + border-radius: 12px; | ||
| 39 | + margin-bottom: 24px; | ||
| 40 | + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.3); | ||
| 41 | +} | ||
| 42 | + | ||
| 43 | +.control-group { | ||
| 44 | + display: flex; | ||
| 45 | + gap: 12px; | ||
| 46 | + flex-wrap: wrap; | ||
| 47 | + margin-bottom: 16px; | ||
| 48 | +} | ||
| 49 | + | ||
| 50 | +.ir-controls { | ||
| 51 | + display: flex; | ||
| 52 | + align-items: center; | ||
| 53 | + gap: 12px; | ||
| 54 | + margin-bottom: 16px; | ||
| 55 | +} | ||
| 56 | + | ||
| 57 | +.ir-controls label { | ||
| 58 | + font-weight: 500; | ||
| 59 | +} | ||
| 60 | + | ||
| 61 | +/* Buttons */ | ||
| 62 | +.btn { | ||
| 63 | + background: #0066ff; | ||
| 64 | + color: white; | ||
| 65 | + border: none; | ||
| 66 | + padding: 10px 20px; | ||
| 67 | + border-radius: 6px; | ||
| 68 | + cursor: pointer; | ||
| 69 | + font-size: 14px; | ||
| 70 | + font-weight: 500; | ||
| 71 | + transition: all 0.2s ease; | ||
| 72 | +} | ||
| 73 | + | ||
| 74 | +.btn:hover { | ||
| 75 | + background: #0052cc; | ||
| 76 | + transform: translateY(-1px); | ||
| 77 | + box-shadow: 0 2px 8px rgba(0, 102, 255, 0.3); | ||
| 78 | +} | ||
| 79 | + | ||
| 80 | +.btn:active { | ||
| 81 | + transform: translateY(0); | ||
| 82 | +} | ||
| 83 | + | ||
| 84 | +.btn:disabled { | ||
| 85 | + background: #666; | ||
| 86 | + cursor: not-allowed; | ||
| 87 | + transform: none; | ||
| 88 | + box-shadow: none; | ||
| 89 | +} | ||
| 90 | + | ||
| 91 | +.btn-primary { | ||
| 92 | + background: #0066ff; | ||
| 93 | +} | ||
| 94 | + | ||
| 95 | +.btn-success { | ||
| 96 | + background: #00a854; | ||
| 97 | +} | ||
| 98 | + | ||
| 99 | +.btn-success:hover { | ||
| 100 | + background: #008844; | ||
| 101 | +} | ||
| 102 | + | ||
| 103 | +.btn-small { | ||
| 104 | + padding: 6px 12px; | ||
| 105 | + font-size: 13px; | ||
| 106 | +} | ||
| 107 | + | ||
| 108 | +/* Select */ | ||
| 109 | +.select { | ||
| 110 | + background: #333; | ||
| 111 | + color: white; | ||
| 112 | + border: 1px solid #555; | ||
| 113 | + padding: 8px 12px; | ||
| 114 | + border-radius: 6px; | ||
| 115 | + font-size: 14px; | ||
| 116 | + cursor: pointer; | ||
| 117 | +} | ||
| 118 | + | ||
| 119 | +.select:focus { | ||
| 120 | + outline: none; | ||
| 121 | + border-color: #0066ff; | ||
| 122 | +} | ||
| 123 | + | ||
| 124 | +/* Status */ | ||
| 125 | +.status { | ||
| 126 | + font-size: 14px; | ||
| 127 | + color: #00ff00; | ||
| 128 | + font-weight: 500; | ||
| 129 | +} | ||
| 130 | + | ||
| 131 | +/* Main Area */ | ||
| 132 | +.main-area { | ||
| 133 | + display: grid; | ||
| 134 | + grid-template-columns: 1fr 320px; | ||
| 135 | + gap: 24px; | ||
| 136 | +} | ||
| 137 | + | ||
| 138 | +/* Video Section */ | ||
| 139 | +.video-section { | ||
| 140 | + background: #2a2a2a; | ||
| 141 | + border-radius: 12px; | ||
| 142 | + overflow: hidden; | ||
| 143 | + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.3); | ||
| 144 | +} | ||
| 145 | + | ||
| 146 | +.video-container { | ||
| 147 | + position: relative; | ||
| 148 | +} | ||
| 149 | + | ||
| 150 | +#videoCanvas { | ||
| 151 | + width: 100%; | ||
| 152 | + height: auto; | ||
| 153 | + display: block; | ||
| 154 | + background: #000; | ||
| 155 | +} | ||
| 156 | + | ||
| 157 | +/* Effect Zones */ | ||
| 158 | +.effect-zones { | ||
| 159 | + position: absolute; | ||
| 160 | + top: 0; | ||
| 161 | + left: 0; | ||
| 162 | + width: 100%; | ||
| 163 | + height: 48px; | ||
| 164 | + display: flex; | ||
| 165 | + pointer-events: none; | ||
| 166 | + background: rgba(0, 0, 0, 0.5); | ||
| 167 | +} | ||
| 168 | + | ||
| 169 | +.zone { | ||
| 170 | + flex: 1; | ||
| 171 | + border-right: 1px solid rgba(255, 255, 255, 0.1); | ||
| 172 | + display: flex; | ||
| 173 | + align-items: center; | ||
| 174 | + justify-content: center; | ||
| 175 | + font-size: 12px; | ||
| 176 | + font-weight: 500; | ||
| 177 | + color: rgba(255, 255, 255, 0.6); | ||
| 178 | + text-transform: capitalize; | ||
| 179 | + transition: all 0.3s ease; | ||
| 180 | +} | ||
| 181 | + | ||
| 182 | +.zone:last-child { | ||
| 183 | + border-right: none; | ||
| 184 | +} | ||
| 185 | + | ||
| 186 | +.zone.active { | ||
| 187 | + background: rgba(0, 102, 255, 0.3); | ||
| 188 | + color: #fff; | ||
| 189 | + text-shadow: 0 0 4px rgba(0, 102, 255, 0.5); | ||
| 190 | +} | ||
| 191 | + | ||
| 192 | +/* Cue Indicator */ | ||
| 193 | +.cue-indicator { | ||
| 194 | + position: absolute; | ||
| 195 | + top: 60px; | ||
| 196 | + left: 50%; | ||
| 197 | + transform: translateX(-50%); | ||
| 198 | + background: #ff0000; | ||
| 199 | + color: white; | ||
| 200 | + padding: 12px 24px; | ||
| 201 | + border-radius: 6px; | ||
| 202 | + font-weight: bold; | ||
| 203 | + font-size: 16px; | ||
| 204 | + letter-spacing: 1px; | ||
| 205 | + box-shadow: 0 4px 12px rgba(255, 0, 0, 0.4); | ||
| 206 | + display: none; | ||
| 207 | +} | ||
| 208 | + | ||
| 209 | +.cue-indicator.active { | ||
| 210 | + display: block; | ||
| 211 | + animation: pulse 0.5s ease-in-out infinite; | ||
| 212 | +} | ||
| 213 | + | ||
| 214 | +@keyframes pulse { | ||
| 215 | + 0%, 100% { | ||
| 216 | + opacity: 1; | ||
| 217 | + transform: translateX(-50%) scale(1); | ||
| 218 | + } | ||
| 219 | + 50% { | ||
| 220 | + opacity: 0.8; | ||
| 221 | + transform: translateX(-50%) scale(1.05); | ||
| 222 | + } | ||
| 223 | +} | ||
| 224 | + | ||
| 225 | +/* Parameters Section */ | ||
| 226 | +.params-section { | ||
| 227 | + background: #2a2a2a; | ||
| 228 | + padding: 24px; | ||
| 229 | + border-radius: 12px; | ||
| 230 | + box-shadow: 0 2px 8px rgba(0, 0, 0, 0.3); | ||
| 231 | +} | ||
| 232 | + | ||
| 233 | +.params-section h3 { | ||
| 234 | + font-size: 18px; | ||
| 235 | + margin-bottom: 20px; | ||
| 236 | + font-weight: 500; | ||
| 237 | +} | ||
| 238 | + | ||
| 239 | +.params-section h3 span { | ||
| 240 | + color: #0066ff; | ||
| 241 | + font-weight: 600; | ||
| 242 | +} | ||
| 243 | + | ||
| 244 | +/* Parameter Visualizer */ | ||
| 245 | +.param-visualizer { | ||
| 246 | + margin-bottom: 24px; | ||
| 247 | +} | ||
| 248 | + | ||
| 249 | +.param-bar { | ||
| 250 | + width: 100%; | ||
| 251 | + height: 200px; | ||
| 252 | + background: #1a1a1a; | ||
| 253 | + border-radius: 8px; | ||
| 254 | + position: relative; | ||
| 255 | + overflow: hidden; | ||
| 256 | + box-shadow: inset 0 2px 4px rgba(0, 0, 0, 0.3); | ||
| 257 | +} | ||
| 258 | + | ||
| 259 | +.param-fill { | ||
| 260 | + position: absolute; | ||
| 261 | + bottom: 0; | ||
| 262 | + width: 100%; | ||
| 263 | + background: linear-gradient(to top, #0066ff, #00aaff); | ||
| 264 | + border-radius: 8px 8px 0 0; | ||
| 265 | + transition: height 0.1s ease; | ||
| 266 | + height: 50%; | ||
| 267 | +} | ||
| 268 | + | ||
| 269 | +.param-labels { | ||
| 270 | + display: flex; | ||
| 271 | + justify-content: space-between; | ||
| 272 | + margin-top: 8px; | ||
| 273 | + font-size: 12px; | ||
| 274 | + color: #666; | ||
| 275 | +} | ||
| 276 | + | ||
| 277 | +/* Info Boxes */ | ||
| 278 | +.info-box { | ||
| 279 | + background: #333; | ||
| 280 | + padding: 16px; | ||
| 281 | + border-radius: 8px; | ||
| 282 | + margin-bottom: 16px; | ||
| 283 | + font-size: 14px; | ||
| 284 | +} | ||
| 285 | + | ||
| 286 | +.info-box:last-child { | ||
| 287 | + margin-bottom: 0; | ||
| 288 | +} | ||
| 289 | + | ||
| 290 | +.info-list { | ||
| 291 | + display: grid; | ||
| 292 | + grid-template-columns: auto 1fr; | ||
| 293 | + gap: 8px 16px; | ||
| 294 | +} | ||
| 295 | + | ||
| 296 | +.info-list dt { | ||
| 297 | + font-weight: 600; | ||
| 298 | + color: #999; | ||
| 299 | +} | ||
| 300 | + | ||
| 301 | +.info-list dd { | ||
| 302 | + color: #fff; | ||
| 303 | + font-family: 'SF Mono', Monaco, 'Cascadia Code', monospace; | ||
| 304 | +} | ||
| 305 | + | ||
| 306 | +/* Instructions */ | ||
| 307 | +.instructions h4 { | ||
| 308 | + font-size: 16px; | ||
| 309 | + margin-bottom: 12px; | ||
| 310 | + font-weight: 500; | ||
| 311 | +} | ||
| 312 | + | ||
| 313 | +.instructions ol { | ||
| 314 | + list-style: none; | ||
| 315 | + counter-reset: step-counter; | ||
| 316 | +} | ||
| 317 | + | ||
| 318 | +.instructions li { | ||
| 319 | + counter-increment: step-counter; | ||
| 320 | + position: relative; | ||
| 321 | + padding-left: 32px; | ||
| 322 | + margin-bottom: 8px; | ||
| 323 | + line-height: 1.5; | ||
| 324 | +} | ||
| 325 | + | ||
| 326 | +.instructions li::before { | ||
| 327 | + content: counter(step-counter); | ||
| 328 | + position: absolute; | ||
| 329 | + left: 0; | ||
| 330 | + top: 0; | ||
| 331 | + background: #0066ff; | ||
| 332 | + color: white; | ||
| 333 | + width: 24px; | ||
| 334 | + height: 24px; | ||
| 335 | + border-radius: 50%; | ||
| 336 | + display: flex; | ||
| 337 | + align-items: center; | ||
| 338 | + justify-content: center; | ||
| 339 | + font-size: 12px; | ||
| 340 | + font-weight: bold; | ||
| 341 | +} | ||
| 342 | + | ||
| 343 | +/* Responsive */ | ||
| 344 | +@media (max-width: 968px) { | ||
| 345 | + .main-area { | ||
| 346 | + grid-template-columns: 1fr; | ||
| 347 | + } | ||
| 348 | + | ||
| 349 | + .params-section { | ||
| 350 | + order: -1; | ||
| 351 | + } | ||
| 352 | + | ||
| 353 | + .control-group { | ||
| 354 | + justify-content: center; | ||
| 355 | + } | ||
| 356 | +} | ||
| 357 | + | ||
| 358 | +@media (max-width: 640px) { | ||
| 359 | + body { | ||
| 360 | + padding: 12px; | ||
| 361 | + } | ||
| 362 | + | ||
| 363 | + h1 { | ||
| 364 | + font-size: 24px; | ||
| 365 | + } | ||
| 366 | + | ||
| 367 | + .controls { | ||
| 368 | + padding: 16px; | ||
| 369 | + } | ||
| 370 | + | ||
| 371 | + .btn { | ||
| 372 | + padding: 8px 16px; | ||
| 373 | + font-size: 13px; | ||
| 374 | + } | ||
| 375 | +} | ||
deploy.shadded@@ -0,0 +1,26 @@ | |||
| 1 | +#!/usr/bin/env bash | ||
| 2 | +set -euo pipefail | ||
| 3 | + | ||
| 4 | +PROJECT_DIR="$(cd "$(dirname "$0")" && pwd)" | ||
| 5 | +STAMP=$(date +%Y-%m-%d-%H%M%S) | ||
| 6 | +OUT=~/builds/$STAMP | ||
| 7 | +mkdir -p "$OUT" | ||
| 8 | + | ||
| 9 | +echo "▶ Staging files..." | ||
| 10 | +rsync -az --delete --exclude deploy.sh --exclude .git \ | ||
| 11 | + "$PROJECT_DIR"/ "$OUT"/ | ||
| 12 | + | ||
| 13 | +echo "▶ Publishing release..." | ||
| 14 | +rsync -az --delete "$OUT"/ /var/www/cue.musicsian.com/releases/$STAMP/ | ||
| 15 | + | ||
| 16 | +echo "▶ Flipping current symlink..." | ||
| 17 | +sudo ln -nfs /var/www/cue.musicsian.com/releases/$STAMP \ | ||
| 18 | + /var/www/cue.musicsian.com/current | ||
| 19 | + | ||
| 20 | +echo "▶ Restoring SELinux labels..." | ||
| 21 | +sudo restorecon -Rv /var/www/cue.musicsian.com/releases/$STAMP >/dev/null | ||
| 22 | + | ||
| 23 | +echo "▶ Reloading Nginx..." | ||
| 24 | +sudo systemctl reload nginx | ||
| 25 | + | ||
| 26 | +echo "✓ Deployed $STAMP to cue.musicsian.com" | ||
index.htmladded@@ -0,0 +1,93 @@ | |||
| 1 | +<!DOCTYPE html> | ||
| 2 | +<html lang="en"> | ||
| 3 | +<head> | ||
| 4 | + <meta charset="UTF-8"> | ||
| 5 | + <meta name="viewport" content="width=device-width, initial-scale=1.0"> | ||
| 6 | + <title>Gesture DSP - Web Audio Port</title> | ||
| 7 | + <link rel="stylesheet" href="css/main.css"> | ||
| 8 | +</head> | ||
| 9 | +<body> | ||
| 10 | + <div class="container"> | ||
| 11 | + <header> | ||
| 12 | + <h1>🎵 Gesture DSP - Web Audio Port</h1> | ||
| 13 | + </header> | ||
| 14 | + | ||
| 15 | + <section class="controls"> | ||
| 16 | + <div class="control-group"> | ||
| 17 | + <button id="startCamera" class="btn btn-primary">Start Camera</button> | ||
| 18 | + <select id="audioSelect" class="select"> | ||
| 19 | + <option value="">Select Audio File...</option> | ||
| 20 | + <option value="__demo__">Generated Demo</option> | ||
| 21 | + <option disabled>──────────</option> | ||
| 22 | + </select> | ||
| 23 | + <button id="loadFile" class="btn">Upload WAV</button> | ||
| 24 | + <input type="file" id="fileInput" accept="audio/wav" hidden> | ||
| 25 | + <button id="playPause" class="btn btn-success" disabled>Play</button> | ||
| 26 | + </div> | ||
| 27 | + | ||
| 28 | + <div class="ir-controls"> | ||
| 29 | + <label for="irSelect">Impulse Response:</label> | ||
| 30 | + <select id="irSelect" class="select"> | ||
| 31 | + <option value="none">None</option> | ||
| 32 | + <option value="hall">Hall</option> | ||
| 33 | + <option value="room">Room</option> | ||
| 34 | + <option value="plate">Plate</option> | ||
| 35 | + </select> | ||
| 36 | + <button id="loadIR" class="btn btn-small">Load Custom IR</button> | ||
| 37 | + <input type="file" id="irInput" accept="audio/wav" hidden> | ||
| 38 | + </div> | ||
| 39 | + | ||
| 40 | + <div class="status" id="status">Ready to start...</div> | ||
| 41 | + </section> | ||
| 42 | + | ||
| 43 | + <main class="main-area"> | ||
| 44 | + <section class="video-section"> | ||
| 45 | + <div class="video-container"> | ||
| 46 | + <canvas id="videoCanvas" width="640" height="480"></canvas> | ||
| 47 | + <div class="effect-zones" id="effectZones"></div> | ||
| 48 | + <div class="cue-indicator" id="cueIndicator">CUE TRIGGERED</div> | ||
| 49 | + </div> | ||
| 50 | + </section> | ||
| 51 | + | ||
| 52 | + <aside class="params-section"> | ||
| 53 | + <h3>Current Effect: <span id="currentEffect">None</span></h3> | ||
| 54 | + | ||
| 55 | + <div class="param-visualizer"> | ||
| 56 | + <div class="param-bar"> | ||
| 57 | + <div class="param-fill" id="paramFill"></div> | ||
| 58 | + </div> | ||
| 59 | + <div class="param-labels"> | ||
| 60 | + <span>0%</span> | ||
| 61 | + <span>50%</span> | ||
| 62 | + <span>100%</span> | ||
| 63 | + </div> | ||
| 64 | + </div> | ||
| 65 | + | ||
| 66 | + <div class="info-box"> | ||
| 67 | + <dl class="info-list"> | ||
| 68 | + <dt>Parameter:</dt> | ||
| 69 | + <dd id="paramValue">0.50</dd> | ||
| 70 | + <dt>Raw Param:</dt> | ||
| 71 | + <dd id="rawParamValue">0.50</dd> | ||
| 72 | + <dt>Hand Detected:</dt> | ||
| 73 | + <dd id="handStatus">No</dd> | ||
| 74 | + </dl> | ||
| 75 | + </div> | ||
| 76 | + | ||
| 77 | + <div class="info-box instructions"> | ||
| 78 | + <h4>Instructions:</h4> | ||
| 79 | + <ol> | ||
| 80 | + <li>Hold a neon pink/orange post-it</li> | ||
| 81 | + <li>Move horizontally to select effect</li> | ||
| 82 | + <li>Hold a teal post-it with other hand</li> | ||
| 83 | + <li>Move teal post-it vertically for parameter</li> | ||
| 84 | + <li>Keep post-it in same zone for cue trigger</li> | ||
| 85 | + </ol> | ||
| 86 | + </div> | ||
| 87 | + </aside> | ||
| 88 | + </main> | ||
| 89 | + </div> | ||
| 90 | + | ||
| 91 | + <script type="module" src="js/app.js"></script> | ||
| 92 | +</body> | ||
| 93 | +</html> | ||
js/app.jsadded@@ -0,0 +1,347 @@ | |||
| 1 | +// js/app.js | ||
| 2 | +import { AudioEngine } from './audio-engine.js'; | ||
| 3 | +import { GestureDetector } from './gesture-detector.js'; | ||
| 4 | + | ||
| 5 | +class GestureDSPApp { | ||
| 6 | + constructor() { | ||
| 7 | + this.audioEngine = new AudioEngine(); | ||
| 8 | + this.gestureDetector = new GestureDetector(); | ||
| 9 | + | ||
| 10 | + this.effectHistory = []; | ||
| 11 | + this.lastCueTime = 0; | ||
| 12 | + this.cueActive = false; | ||
| 13 | + | ||
| 14 | + this.setupUI(); | ||
| 15 | + this.setupEffectZones(); | ||
| 16 | + this.setupGestureCallbacks(); | ||
| 17 | + } | ||
| 18 | + | ||
| 19 | + async loadAudioFileList() { | ||
| 20 | + try { | ||
| 21 | + // First try the API endpoint if available | ||
| 22 | + const response = await fetch('audio/list.php'); | ||
| 23 | + if (response.ok && response.headers.get('content-type')?.includes('application/json')) { | ||
| 24 | + const files = await response.json(); | ||
| 25 | + console.log('Audio files from API:', files); | ||
| 26 | + this.populateAudioSelect(files); | ||
| 27 | + return; | ||
| 28 | + } | ||
| 29 | + } catch (err) { | ||
| 30 | + console.log('API endpoint error:', err); | ||
| 31 | + } | ||
| 32 | + | ||
| 33 | + try { | ||
| 34 | + // Fallback to parsing directory listing | ||
| 35 | + const response = await fetch('audio/'); | ||
| 36 | + const text = await response.text(); | ||
| 37 | + console.log('Directory listing response:', text.substring(0, 200)); | ||
| 38 | + | ||
| 39 | + // Parse the directory listing | ||
| 40 | + const files = this.parseDirectoryListing(text); | ||
| 41 | + console.log('Parsed audio files:', files); | ||
| 42 | + | ||
| 43 | + if (files.length > 0) { | ||
| 44 | + this.populateAudioSelect(files); | ||
| 45 | + } else { | ||
| 46 | + console.log('No audio files found, using defaults'); | ||
| 47 | + this.addDefaultAudioFiles(); | ||
| 48 | + } | ||
| 49 | + } catch (err) { | ||
| 50 | + console.error('Could not load audio file list:', err); | ||
| 51 | + this.addDefaultAudioFiles(); | ||
| 52 | + } | ||
| 53 | + } | ||
| 54 | + | ||
| 55 | + populateAudioSelect(files) { | ||
| 56 | + const select = document.getElementById('audioSelect'); | ||
| 57 | + files.forEach(filename => { | ||
| 58 | + if (filename.toLowerCase().endsWith('.wav')) { | ||
| 59 | + const option = document.createElement('option'); | ||
| 60 | + option.value = filename; | ||
| 61 | + option.textContent = this.formatFilename(filename); | ||
| 62 | + select.appendChild(option); | ||
| 63 | + } | ||
| 64 | + }); | ||
| 65 | + } | ||
| 66 | + | ||
| 67 | + formatFilename(filename) { | ||
| 68 | + // Remove .wav extension and prettify | ||
| 69 | + return filename | ||
| 70 | + .replace(/\.wav$/i, '') | ||
| 71 | + .replace(/[_-]/g, ' ') | ||
| 72 | + .replace(/\b\w/g, l => l.toUpperCase()); | ||
| 73 | + } | ||
| 74 | + | ||
| 75 | + parseDirectoryListing(html) { | ||
| 76 | + const files = []; | ||
| 77 | + | ||
| 78 | + // Try to parse Apache/Nginx directory listing | ||
| 79 | + const linkRegex = /<a\s+href="([^"]+\.wav)"[^>]*>/gi; | ||
| 80 | + let match; | ||
| 81 | + while ((match = linkRegex.exec(html)) !== null) { | ||
| 82 | + const filename = match[1]; | ||
| 83 | + if (!filename.startsWith('/') && !filename.startsWith('..')) { | ||
| 84 | + files.push(filename); | ||
| 85 | + } | ||
| 86 | + } | ||
| 87 | + | ||
| 88 | + // If no files found, try a different pattern (JSON response) | ||
| 89 | + if (files.length === 0) { | ||
| 90 | + try { | ||
| 91 | + const json = JSON.parse(html); | ||
| 92 | + if (Array.isArray(json)) { | ||
| 93 | + return json.filter(f => f.endsWith('.wav')); | ||
| 94 | + } | ||
| 95 | + } catch (e) { | ||
| 96 | + // Not JSON, ignore | ||
| 97 | + } | ||
| 98 | + } | ||
| 99 | + | ||
| 100 | + return files.sort(); | ||
| 101 | + } | ||
| 102 | + | ||
| 103 | + addDefaultAudioFiles() { | ||
| 104 | + // Fallback list of common demo files | ||
| 105 | + const defaultFiles = [ | ||
| 106 | + 'drums.wav', | ||
| 107 | + 'synth.wav', | ||
| 108 | + 'vocals.wav', | ||
| 109 | + 'guitar.wav', | ||
| 110 | + 'piano.wav' | ||
| 111 | + ]; | ||
| 112 | + | ||
| 113 | + const select = document.getElementById('audioSelect'); | ||
| 114 | + defaultFiles.forEach(filename => { | ||
| 115 | + const option = document.createElement('option'); | ||
| 116 | + option.value = filename; | ||
| 117 | + option.textContent = filename.replace(/\.wav$/i, ''); | ||
| 118 | + select.appendChild(option); | ||
| 119 | + }); | ||
| 120 | + } | ||
| 121 | + | ||
| 122 | + async loadAudioFromServer(filename) { | ||
| 123 | + try { | ||
| 124 | + this.updateStatus(`Loading ${filename}...`); | ||
| 125 | + const response = await fetch(`audio/${filename}`); | ||
| 126 | + | ||
| 127 | + if (!response.ok) { | ||
| 128 | + throw new Error(`HTTP error! status: ${response.status}`); | ||
| 129 | + } | ||
| 130 | + | ||
| 131 | + const arrayBuffer = await response.arrayBuffer(); | ||
| 132 | + await this.audioEngine.loadArrayBuffer(arrayBuffer); | ||
| 133 | + this.updateStatus(`Loaded: ${filename}`); | ||
| 134 | + } catch (err) { | ||
| 135 | + console.error('Error loading audio file:', err); | ||
| 136 | + this.updateStatus(`Failed to load ${filename}`); | ||
| 137 | + } | ||
| 138 | + } | ||
| 139 | + | ||
| 140 | + async setupUI() { | ||
| 141 | + // Load available audio files | ||
| 142 | + await this.loadAudioFileList(); | ||
| 143 | + | ||
| 144 | + // Camera control | ||
| 145 | + document.getElementById('startCamera').addEventListener('click', () => this.startCamera()); | ||
| 146 | + | ||
| 147 | + // Audio selection | ||
| 148 | + document.getElementById('audioSelect').addEventListener('change', async (e) => { | ||
| 149 | + const value = e.target.value; | ||
| 150 | + if (!value) return; | ||
| 151 | + | ||
| 152 | + if (value === '__demo__') { | ||
| 153 | + await this.audioEngine.loadDemoAudio(); | ||
| 154 | + this.updateStatus('Demo audio loaded'); | ||
| 155 | + } else { | ||
| 156 | + await this.loadAudioFromServer(value); | ||
| 157 | + } | ||
| 158 | + document.getElementById('playPause').disabled = false; | ||
| 159 | + }); | ||
| 160 | + | ||
| 161 | + // Audio upload | ||
| 162 | + document.getElementById('loadFile').addEventListener('click', () => { | ||
| 163 | + document.getElementById('fileInput').click(); | ||
| 164 | + }); | ||
| 165 | + | ||
| 166 | + document.getElementById('fileInput').addEventListener('change', async (e) => { | ||
| 167 | + const file = e.target.files[0]; | ||
| 168 | + if (file) { | ||
| 169 | + await this.audioEngine.loadFile(file); | ||
| 170 | + this.updateStatus(`Loaded: ${file.name}`); | ||
| 171 | + document.getElementById('playPause').disabled = false; | ||
| 172 | + | ||
| 173 | + // Reset select to show custom file loaded | ||
| 174 | + document.getElementById('audioSelect').value = ''; | ||
| 175 | + } | ||
| 176 | + }); | ||
| 177 | + | ||
| 178 | + document.getElementById('playPause').addEventListener('click', () => { | ||
| 179 | + this.togglePlayback(); | ||
| 180 | + }); | ||
| 181 | + | ||
| 182 | + // IR controls | ||
| 183 | + document.getElementById('irSelect').addEventListener('change', (e) => { | ||
| 184 | + this.audioEngine.loadPresetIR(e.target.value); | ||
| 185 | + this.updateStatus(`Loaded ${e.target.value} IR`); | ||
| 186 | + }); | ||
| 187 | + | ||
| 188 | + document.getElementById('loadIR').addEventListener('click', () => { | ||
| 189 | + document.getElementById('irInput').click(); | ||
| 190 | + }); | ||
| 191 | + | ||
| 192 | + document.getElementById('irInput').addEventListener('change', async (e) => { | ||
| 193 | + const file = e.target.files[0]; | ||
| 194 | + if (file) { | ||
| 195 | + await this.audioEngine.loadIRFile(file); | ||
| 196 | + this.updateStatus(`Loaded IR: ${file.name}`); | ||
| 197 | + } | ||
| 198 | + }); | ||
| 199 | + } | ||
| 200 | + | ||
| 201 | + setupEffectZones() { | ||
| 202 | + const zonesContainer = document.getElementById('effectZones'); | ||
| 203 | + zonesContainer.innerHTML = ''; | ||
| 204 | + | ||
| 205 | + this.audioEngine.effectNames.forEach((name, index) => { | ||
| 206 | + const zone = document.createElement('div'); | ||
| 207 | + zone.className = 'zone'; | ||
| 208 | + zone.textContent = name.replace('_', ' '); | ||
| 209 | + zone.id = `zone-${index}`; | ||
| 210 | + zonesContainer.appendChild(zone); | ||
| 211 | + }); | ||
| 212 | + } | ||
| 213 | + | ||
| 214 | + setupGestureCallbacks() { | ||
| 215 | + this.gestureDetector.onHandsDetected = (redHand, yellowHand) => { | ||
| 216 | + if (redHand) { | ||
| 217 | + this.handleEffectSelection(redHand); | ||
| 218 | + } | ||
| 219 | + | ||
| 220 | + if (yellowHand) { | ||
| 221 | + this.handleParameterControl(yellowHand); | ||
| 222 | + } else { | ||
| 223 | + // Slowly return to center when no hand detected | ||
| 224 | + if (this.audioEngine.audioContext && this.audioEngine.effects) { | ||
| 225 | + const currentParam = this.audioEngine.currentParam; | ||
| 226 | + this.audioEngine.updateEffectParameter(currentParam * 0.95 + 0.5 * 0.05); | ||
| 227 | + } | ||
| 228 | + this.updateParameterDisplay(); | ||
| 229 | + } | ||
| 230 | + | ||
| 231 | + // Update hand status | ||
| 232 | + document.getElementById('handStatus').textContent = | ||
| 233 | + redHand || yellowHand ? 'Yes' : 'No'; | ||
| 234 | + }; | ||
| 235 | + } | ||
| 236 | + | ||
| 237 | + handleEffectSelection(handBox) { | ||
| 238 | + const centerX = handBox.x + handBox.width / 2; | ||
| 239 | + const canvasWidth = this.gestureDetector.canvas.width; | ||
| 240 | + const zoneWidth = canvasWidth / this.audioEngine.effectNames.length; | ||
| 241 | + const effectIndex = Math.floor(centerX / zoneWidth); | ||
| 242 | + const clampedIndex = Math.max(0, Math.min(effectIndex, this.audioEngine.effectNames.length - 1)); | ||
| 243 | + | ||
| 244 | + // Update effect history for cue detection | ||
| 245 | + this.effectHistory.push(clampedIndex); | ||
| 246 | + if (this.effectHistory.length > 10) { | ||
| 247 | + this.effectHistory.shift(); | ||
| 248 | + } | ||
| 249 | + | ||
| 250 | + // Check for cue trigger | ||
| 251 | + if (this.effectHistory.length === 10 && | ||
| 252 | + this.effectHistory.every(i => i === clampedIndex) && | ||
| 253 | + Date.now() - this.lastCueTime > 2000) { | ||
| 254 | + this.triggerCue(); | ||
| 255 | + this.lastCueTime = Date.now(); | ||
| 256 | + } | ||
| 257 | + | ||
| 258 | + // Switch effect | ||
| 259 | + this.audioEngine.switchToEffect(clampedIndex); | ||
| 260 | + | ||
| 261 | + // Update UI | ||
| 262 | + document.querySelectorAll('.zone').forEach((zone, i) => { | ||
| 263 | + zone.classList.toggle('active', i === clampedIndex); | ||
| 264 | + }); | ||
| 265 | + | ||
| 266 | + document.getElementById('currentEffect').textContent = | ||
| 267 | + this.audioEngine.effectNames[clampedIndex].replace('_', ' '); | ||
| 268 | + } | ||
| 269 | + | ||
| 270 | + handleParameterControl(handBox) { | ||
| 271 | + const centerY = handBox.y + handBox.height / 2; | ||
| 272 | + const canvasHeight = this.gestureDetector.canvas.height; | ||
| 273 | + const paramMinRatio = 0.2; | ||
| 274 | + const paramMaxRatio = 0.8; | ||
| 275 | + const minY = paramMinRatio * canvasHeight; | ||
| 276 | + const maxY = paramMaxRatio * canvasHeight; | ||
| 277 | + | ||
| 278 | + let param; | ||
| 279 | + if (centerY <= minY) { | ||
| 280 | + param = 1.0; | ||
| 281 | + } else if (centerY >= maxY) { | ||
| 282 | + param = 0.0; | ||
| 283 | + } else { | ||
| 284 | + param = 1.0 - (centerY - minY) / (maxY - minY); | ||
| 285 | + } | ||
| 286 | + | ||
| 287 | + // Only update if audio engine is initialized | ||
| 288 | + if (this.audioEngine.audioContext) { | ||
| 289 | + this.audioEngine.updateEffectParameter(param); | ||
| 290 | + } | ||
| 291 | + this.updateParameterDisplay(); | ||
| 292 | + } | ||
| 293 | + | ||
| 294 | + updateParameterDisplay() { | ||
| 295 | + // Safely get parameters with defaults | ||
| 296 | + const param = this.audioEngine.currentParam || 0.5; | ||
| 297 | + const rawParam = this.audioEngine.rawParam || 0.5; | ||
| 298 | + | ||
| 299 | + document.getElementById('paramValue').textContent = param.toFixed(2); | ||
| 300 | + document.getElementById('rawParamValue').textContent = rawParam.toFixed(2); | ||
| 301 | + document.getElementById('paramFill').style.height = `${param * 100}%`; | ||
| 302 | + } | ||
| 303 | + | ||
| 304 | + triggerCue() { | ||
| 305 | + this.cueActive = true; | ||
| 306 | + const indicator = document.getElementById('cueIndicator'); | ||
| 307 | + indicator.classList.add('active'); | ||
| 308 | + | ||
| 309 | + setTimeout(() => { | ||
| 310 | + indicator.classList.remove('active'); | ||
| 311 | + this.cueActive = false; | ||
| 312 | + }, 1000); | ||
| 313 | + } | ||
| 314 | + | ||
| 315 | + async startCamera() { | ||
| 316 | + try { | ||
| 317 | + // Initialize audio engine first if not already done | ||
| 318 | + await this.audioEngine.init(); | ||
| 319 | + | ||
| 320 | + await this.gestureDetector.start(); | ||
| 321 | + this.updateStatus('Camera active - wear colored gloves!'); | ||
| 322 | + document.getElementById('startCamera').disabled = true; | ||
| 323 | + } catch (err) { | ||
| 324 | + console.error('Error starting camera:', err); | ||
| 325 | + this.updateStatus('Camera access denied'); | ||
| 326 | + } | ||
| 327 | + } | ||
| 328 | + | ||
| 329 | + togglePlayback() { | ||
| 330 | + if (this.audioEngine.isPlaying) { | ||
| 331 | + this.audioEngine.stop(); | ||
| 332 | + document.getElementById('playPause').textContent = 'Play'; | ||
| 333 | + } else { | ||
| 334 | + this.audioEngine.play(); | ||
| 335 | + document.getElementById('playPause').textContent = 'Stop'; | ||
| 336 | + } | ||
| 337 | + } | ||
| 338 | + | ||
| 339 | + updateStatus(message) { | ||
| 340 | + document.getElementById('status').textContent = message; | ||
| 341 | + } | ||
| 342 | +} | ||
| 343 | + | ||
| 344 | +// Initialize the app when DOM is ready | ||
| 345 | +document.addEventListener('DOMContentLoaded', () => { | ||
| 346 | + window.app = new GestureDSPApp(); | ||
| 347 | +}); | ||
js/audio-engine.jsadded@@ -0,0 +1,460 @@ | |||
| 1 | +// js/audio-engine.js | ||
| 2 | +import { Effects } from './effects.js'; | ||
| 3 | + | ||
| 4 | +export class AudioEngine { | ||
| 5 | + constructor() { | ||
| 6 | + this.audioContext = null; | ||
| 7 | + this.source = null; | ||
| 8 | + this.isPlaying = false; | ||
| 9 | + this.audioBuffer = null; | ||
| 10 | + | ||
| 11 | + this.currentEffectIndex = 0; | ||
| 12 | + this.currentParam = 0.5; | ||
| 13 | + this.rawParam = 0.5; | ||
| 14 | + this.smoothedParam = 0.5; | ||
| 15 | + this.paramSmoothFactor = 0.2; | ||
| 16 | + | ||
| 17 | + this.effectNames = [ | ||
| 18 | + 'mid_side', | ||
| 19 | + 'bitcrush', | ||
| 20 | + 'lowpass', | ||
| 21 | + 'highpass', | ||
| 22 | + 'delay', | ||
| 23 | + 'reverb', | ||
| 24 | + 'spectral_freeze', | ||
| 25 | + 'pitch_shift' | ||
| 26 | + ]; | ||
| 27 | + | ||
| 28 | + this.effects = null; | ||
| 29 | + this.effectNodes = []; | ||
| 30 | + } | ||
| 31 | + | ||
| 32 | + async init() { | ||
| 33 | + if (!this.audioContext) { | ||
| 34 | + this.audioContext = new (window.AudioContext || window.webkitAudioContext)(); | ||
| 35 | + | ||
| 36 | + // Load AudioWorklet modules | ||
| 37 | + await this.loadWorklets(); | ||
| 38 | + | ||
| 39 | + // Setup audio graph | ||
| 40 | + this.setupAudioGraph(); | ||
| 41 | + } | ||
| 42 | + } | ||
| 43 | + | ||
| 44 | + async loadWorklets() { | ||
| 45 | + try { | ||
| 46 | + // Load bitcrusher worklet | ||
| 47 | + const bitcrusherResponse = await fetch('js/worklets/bitcrusher.js'); | ||
| 48 | + const bitcrusherCode = await bitcrusherResponse.text(); | ||
| 49 | + const bitcrusherBlob = new Blob([bitcrusherCode], { type: 'application/javascript' }); | ||
| 50 | + const bitcrusherUrl = URL.createObjectURL(bitcrusherBlob); | ||
| 51 | + await this.audioContext.audioWorklet.addModule(bitcrusherUrl); | ||
| 52 | + | ||
| 53 | + // Load spectral freeze worklet | ||
| 54 | + const freezeResponse = await fetch('js/worklets/spectral-freeze.js'); | ||
| 55 | + const freezeCode = await freezeResponse.text(); | ||
| 56 | + const freezeBlob = new Blob([freezeCode], { type: 'application/javascript' }); | ||
| 57 | + const freezeUrl = URL.createObjectURL(freezeBlob); | ||
| 58 | + await this.audioContext.audioWorklet.addModule(freezeUrl); | ||
| 59 | + } catch (err) { | ||
| 60 | + console.error('Error loading AudioWorklets:', err); | ||
| 61 | + console.log('Falling back to inline worklets'); | ||
| 62 | + | ||
| 63 | + // Fallback: load worklets inline | ||
| 64 | + await this.loadInlineWorklets(); | ||
| 65 | + } | ||
| 66 | + } | ||
| 67 | + | ||
| 68 | + async loadInlineWorklets() { | ||
| 69 | + // Inline worklet code as fallback | ||
| 70 | + const bitcrusherProcessor = ` | ||
| 71 | + class BitcrusherProcessor extends AudioWorkletProcessor { | ||
| 72 | + static get parameterDescriptors() { | ||
| 73 | + return [{ | ||
| 74 | + name: 'bitDepth', | ||
| 75 | + defaultValue: 8, | ||
| 76 | + minValue: 1, | ||
| 77 | + maxValue: 16, | ||
| 78 | + automationRate: 'k-rate' | ||
| 79 | + }]; | ||
| 80 | + } | ||
| 81 | + | ||
| 82 | + process(inputs, outputs, parameters) { | ||
| 83 | + const input = inputs[0]; | ||
| 84 | + const output = outputs[0]; | ||
| 85 | + const bitDepth = parameters.bitDepth[0]; | ||
| 86 | + | ||
| 87 | + const step = 2 / Math.pow(2, bitDepth); | ||
| 88 | + | ||
| 89 | + for (let channel = 0; channel < input.length; channel++) { | ||
| 90 | + const inputChannel = input[channel]; | ||
| 91 | + const outputChannel = output[channel]; | ||
| 92 | + | ||
| 93 | + for (let i = 0; i < inputChannel.length; i++) { | ||
| 94 | + const sample = inputChannel[i]; | ||
| 95 | + outputChannel[i] = Math.round(sample / step) * step; | ||
| 96 | + } | ||
| 97 | + } | ||
| 98 | + | ||
| 99 | + return true; | ||
| 100 | + } | ||
| 101 | + } | ||
| 102 | + | ||
| 103 | + registerProcessor('bitcrusher-processor', BitcrusherProcessor); | ||
| 104 | + `; | ||
| 105 | + | ||
| 106 | + const spectralFreezeProcessor = ` | ||
| 107 | + class SpectralFreezeProcessor extends AudioWorkletProcessor { | ||
| 108 | + constructor() { | ||
| 109 | + super(); | ||
| 110 | + this.frozenSpectrum = null; | ||
| 111 | + this.isActive = false; | ||
| 112 | + this.port.onmessage = (e) => { | ||
| 113 | + if (e.data.type === 'setActive') { | ||
| 114 | + this.isActive = e.data.value; | ||
| 115 | + if (!this.isActive) { | ||
| 116 | + this.frozenSpectrum = null; | ||
| 117 | + } | ||
| 118 | + } | ||
| 119 | + }; | ||
| 120 | + } | ||
| 121 | + | ||
| 122 | + static get parameterDescriptors() { | ||
| 123 | + return [{ | ||
| 124 | + name: 'freeze', | ||
| 125 | + defaultValue: 0, | ||
| 126 | + minValue: 0, | ||
| 127 | + maxValue: 1, | ||
| 128 | + automationRate: 'k-rate' | ||
| 129 | + }]; | ||
| 130 | + } | ||
| 131 | + | ||
| 132 | + process(inputs, outputs, parameters) { | ||
| 133 | + const input = inputs[0]; | ||
| 134 | + const output = outputs[0]; | ||
| 135 | + const freeze = parameters.freeze[0]; | ||
| 136 | + | ||
| 137 | + if (freeze > 0.5 && !this.frozenSpectrum && input[0]) { | ||
| 138 | + this.frozenSpectrum = new Float32Array(input[0].length); | ||
| 139 | + for (let i = 0; i < input[0].length; i++) { | ||
| 140 | + this.frozenSpectrum[i] = input[0][i]; | ||
| 141 | + } | ||
| 142 | + } else if (freeze <= 0.5) { | ||
| 143 | + this.frozenSpectrum = null; | ||
| 144 | + } | ||
| 145 | + | ||
| 146 | + for (let channel = 0; channel < input.length; channel++) { | ||
| 147 | + const inputChannel = input[channel]; | ||
| 148 | + const outputChannel = output[channel]; | ||
| 149 | + | ||
| 150 | + if (this.frozenSpectrum && freeze > 0.5) { | ||
| 151 | + for (let i = 0; i < outputChannel.length; i++) { | ||
| 152 | + const phase = Math.random() * 2 * Math.PI; | ||
| 153 | + outputChannel[i] = this.frozenSpectrum[i % this.frozenSpectrum.length] * | ||
| 154 | + Math.cos(phase) * 0.8; | ||
| 155 | + } | ||
| 156 | + } else { | ||
| 157 | + for (let i = 0; i < outputChannel.length; i++) { | ||
| 158 | + outputChannel[i] = inputChannel ? inputChannel[i] : 0; | ||
| 159 | + } | ||
| 160 | + } | ||
| 161 | + } | ||
| 162 | + | ||
| 163 | + return true; | ||
| 164 | + } | ||
| 165 | + } | ||
| 166 | + | ||
| 167 | + registerProcessor('spectral-freeze-processor', SpectralFreezeProcessor); | ||
| 168 | + `; | ||
| 169 | + | ||
| 170 | + const bitcrusherBlob = new Blob([bitcrusherProcessor], { type: 'application/javascript' }); | ||
| 171 | + const bitcrusherUrl = URL.createObjectURL(bitcrusherBlob); | ||
| 172 | + await this.audioContext.audioWorklet.addModule(bitcrusherUrl); | ||
| 173 | + | ||
| 174 | + const freezeBlob = new Blob([spectralFreezeProcessor], { type: 'application/javascript' }); | ||
| 175 | + const freezeUrl = URL.createObjectURL(freezeBlob); | ||
| 176 | + await this.audioContext.audioWorklet.addModule(freezeUrl); | ||
| 177 | + } | ||
| 178 | + | ||
| 179 | + setupAudioGraph() { | ||
| 180 | + // Create main input/output nodes | ||
| 181 | + this.inputGain = this.audioContext.createGain(); | ||
| 182 | + this.outputGain = this.audioContext.createGain(); | ||
| 183 | + | ||
| 184 | + // Create effects | ||
| 185 | + this.effects = new Effects(this.audioContext); | ||
| 186 | + | ||
| 187 | + // Create a simple serial effects chain | ||
| 188 | + this.setupSerialEffectsChain(); | ||
| 189 | + | ||
| 190 | + this.outputGain.connect(this.audioContext.destination); | ||
| 191 | + } | ||
| 192 | + | ||
| 193 | + setupSerialEffectsChain() { | ||
| 194 | + // Create bypass and effect paths for each effect | ||
| 195 | + this.effectNodes = []; | ||
| 196 | + | ||
| 197 | + let previousNode = this.inputGain; | ||
| 198 | + | ||
| 199 | + this.effectNames.forEach((name, i) => { | ||
| 200 | + const bypass = this.audioContext.createGain(); | ||
| 201 | + const effectInput = this.audioContext.createGain(); | ||
| 202 | + const mixer = this.audioContext.createGain(); | ||
| 203 | + | ||
| 204 | + // Split signal to bypass and effect | ||
| 205 | + previousNode.connect(bypass); | ||
| 206 | + previousNode.connect(effectInput); | ||
| 207 | + | ||
| 208 | + // Connect effect | ||
| 209 | + const effectOutput = this.audioContext.createGain(); | ||
| 210 | + | ||
| 211 | + switch (name) { | ||
| 212 | + case 'mid_side': | ||
| 213 | + effectInput.connect(this.effects.midSideIn); | ||
| 214 | + this.effects.midSideOut.connect(effectOutput); | ||
| 215 | + break; | ||
| 216 | + | ||
| 217 | + case 'bitcrush': | ||
| 218 | + effectInput.connect(this.effects.bitcrusher); | ||
| 219 | + this.effects.bitcrusher.connect(effectOutput); | ||
| 220 | + break; | ||
| 221 | + | ||
| 222 | + case 'lowpass': | ||
| 223 | + effectInput.connect(this.effects.lowpass); | ||
| 224 | + this.effects.lowpass.connect(effectOutput); | ||
| 225 | + break; | ||
| 226 | + | ||
| 227 | + case 'highpass': | ||
| 228 | + effectInput.connect(this.effects.highpass); | ||
| 229 | + this.effects.highpass.connect(effectOutput); | ||
| 230 | + break; | ||
| 231 | + | ||
| 232 | + case 'delay': | ||
| 233 | + effectInput.connect(this.effects.delay); | ||
| 234 | + effectInput.connect(this.effects.delayDry); | ||
| 235 | + this.effects.delayMix.connect(effectOutput); | ||
| 236 | + this.effects.delayDry.connect(effectOutput); | ||
| 237 | + break; | ||
| 238 | + | ||
| 239 | + case 'reverb': | ||
| 240 | + effectInput.connect(this.effects.convolver); | ||
| 241 | + effectInput.connect(this.effects.reverbDry); | ||
| 242 | + this.effects.convolver.connect(this.effects.reverbMix); | ||
| 243 | + this.effects.reverbMix.connect(effectOutput); | ||
| 244 | + this.effects.reverbDry.connect(effectOutput); | ||
| 245 | + break; | ||
| 246 | + | ||
| 247 | + case 'spectral_freeze': | ||
| 248 | + effectInput.connect(this.effects.spectralFreeze); | ||
| 249 | + this.effects.spectralFreeze.connect(effectOutput); | ||
| 250 | + break; | ||
| 251 | + | ||
| 252 | + case 'pitch_shift': | ||
| 253 | + effectInput.connect(effectOutput); // Pass through, handled by playback rate | ||
| 254 | + break; | ||
| 255 | + } | ||
| 256 | + | ||
| 257 | + // Mix bypass and effect | ||
| 258 | + bypass.connect(mixer); | ||
| 259 | + effectOutput.connect(mixer); | ||
| 260 | + | ||
| 261 | + // Store node info | ||
| 262 | + this.effectNodes.push({ | ||
| 263 | + name, | ||
| 264 | + bypass, | ||
| 265 | + effectInput, | ||
| 266 | + effectOutput, | ||
| 267 | + mixer, | ||
| 268 | + isActive: false | ||
| 269 | + }); | ||
| 270 | + | ||
| 271 | + // Set initial state (all bypassed) | ||
| 272 | + bypass.gain.value = 1; | ||
| 273 | + effectInput.gain.value = 0; | ||
| 274 | + | ||
| 275 | + // Chain to next effect | ||
| 276 | + previousNode = mixer; | ||
| 277 | + }); | ||
| 278 | + | ||
| 279 | + // Connect final node to output | ||
| 280 | + previousNode.connect(this.outputGain); | ||
| 281 | + } | ||
| 282 | + | ||
| 283 | + switchToEffect(index) { | ||
| 284 | + // Disable all effects | ||
| 285 | + this.effectNodes.forEach((node, i) => { | ||
| 286 | + if (i === index) { | ||
| 287 | + // Enable this effect | ||
| 288 | + node.bypass.gain.setTargetAtTime(0, this.audioContext.currentTime, 0.01); | ||
| 289 | + node.effectInput.gain.setTargetAtTime(1, this.audioContext.currentTime, 0.01); | ||
| 290 | + node.isActive = true; | ||
| 291 | + } else { | ||
| 292 | + // Bypass this effect | ||
| 293 | + node.bypass.gain.setTargetAtTime(1, this.audioContext.currentTime, 0.01); | ||
| 294 | + node.effectInput.gain.setTargetAtTime(0, this.audioContext.currentTime, 0.01); | ||
| 295 | + node.isActive = false; | ||
| 296 | + } | ||
| 297 | + }); | ||
| 298 | + | ||
| 299 | + this.currentEffectIndex = index; | ||
| 300 | + } | ||
| 301 | + | ||
| 302 | + updateEffectParameter(value) { | ||
| 303 | + // Don't update if not initialized | ||
| 304 | + if (!this.effects) { | ||
| 305 | + return; | ||
| 306 | + } | ||
| 307 | + | ||
| 308 | + this.rawParam = value; | ||
| 309 | + | ||
| 310 | + // Smooth the parameter | ||
| 311 | + this.smoothedParam = this.paramSmoothFactor * this.smoothedParam + | ||
| 312 | + (1 - this.paramSmoothFactor) * value; | ||
| 313 | + this.currentParam = this.smoothedParam; | ||
| 314 | + | ||
| 315 | + // Update the effect | ||
| 316 | + const effectName = this.effectNames[this.currentEffectIndex]; | ||
| 317 | + this.effects.updateParameter(effectName, this.currentParam); | ||
| 318 | + | ||
| 319 | + // Special handling for pitch shift - EXTREME VERSION | ||
| 320 | + if (effectName === 'pitch_shift' && this.source && this.source.playbackRate) { | ||
| 321 | + // INSANE pitch range: -3 octaves to +3 octaves | ||
| 322 | + // Non-linear curve for more fun in the middle | ||
| 323 | + const normalized = (this.currentParam - 0.5) * 2; // -1 to 1 | ||
| 324 | + const semitones = normalized * Math.abs(normalized) * 36; // -36 to +36 with curve | ||
| 325 | + this.source.playbackRate.value = Math.pow(2, semitones / 12); | ||
| 326 | + | ||
| 327 | + // Also detune for microtonal madness at certain positions | ||
| 328 | + if (this.source.detune) { | ||
| 329 | + this.source.detune.value = Math.sin(this.currentParam * Math.PI * 4) * 50; | ||
| 330 | + } | ||
| 331 | + } | ||
| 332 | + } | ||
| 333 | + | ||
| 334 | + async loadArrayBuffer(arrayBuffer) { | ||
| 335 | + await this.init(); | ||
| 336 | + | ||
| 337 | + try { | ||
| 338 | + const audioBuffer = await this.audioContext.decodeAudioData(arrayBuffer); | ||
| 339 | + this.audioBuffer = audioBuffer; | ||
| 340 | + return true; | ||
| 341 | + } catch (err) { | ||
| 342 | + console.error('Error decoding audio:', err); | ||
| 343 | + return false; | ||
| 344 | + } | ||
| 345 | + } | ||
| 346 | + | ||
| 347 | + async loadFile(file) { | ||
| 348 | + const arrayBuffer = await file.arrayBuffer(); | ||
| 349 | + return this.loadArrayBuffer(arrayBuffer); | ||
| 350 | + } | ||
| 351 | + | ||
| 352 | + async loadDemoAudio() { | ||
| 353 | + await this.init(); | ||
| 354 | + | ||
| 355 | + // Create a rich demo audio | ||
| 356 | + const sampleRate = this.audioContext.sampleRate; | ||
| 357 | + const duration = 30; | ||
| 358 | + const buffer = this.audioContext.createBuffer(2, sampleRate * duration, sampleRate); | ||
| 359 | + | ||
| 360 | + for (let channel = 0; channel < 2; channel++) { | ||
| 361 | + const channelData = buffer.getChannelData(channel); | ||
| 362 | + | ||
| 363 | + for (let i = 0; i < channelData.length; i++) { | ||
| 364 | + const t = i / sampleRate; | ||
| 365 | + | ||
| 366 | + // Bass line | ||
| 367 | + const bassFreq = 110 * Math.pow(2, Math.floor(t * 2) % 4 / 12); | ||
| 368 | + const bass = Math.sin(2 * Math.PI * bassFreq * t) * 0.3; | ||
| 369 | + | ||
| 370 | + // Melody | ||
| 371 | + const melodyPattern = [0, 3, 5, 7, 8, 7, 5, 3]; | ||
| 372 | + const melodyNote = melodyPattern[Math.floor(t * 4) % 8]; | ||
| 373 | + const melodyFreq = 440 * Math.pow(2, melodyNote / 12); | ||
| 374 | + const melody = Math.sin(2 * Math.PI * melodyFreq * t) * 0.2; | ||
| 375 | + | ||
| 376 | + // Drums | ||
| 377 | + const kick = (t % 0.5 < 0.05) ? Math.sin(2 * Math.PI * 60 * t) * Math.exp(-t % 0.5 * 20) : 0; | ||
| 378 | + const hihat = (t % 0.125 < 0.02) ? (Math.random() * 2 - 1) * 0.1 * Math.exp(-t % 0.125 * 50) : 0; | ||
| 379 | + | ||
| 380 | + // Mix with stereo separation | ||
| 381 | + const pan = channel === 0 ? 0.7 : 1.3; | ||
| 382 | + channelData[i] = (bass + melody * pan + kick + hihat) * 0.5; | ||
| 383 | + } | ||
| 384 | + } | ||
| 385 | + | ||
| 386 | + this.audioBuffer = buffer; | ||
| 387 | + } | ||
| 388 | + | ||
| 389 | + async loadPresetIR(preset) { | ||
| 390 | + if (preset === 'none') { | ||
| 391 | + this.effects.createDefaultIR(); | ||
| 392 | + return; | ||
| 393 | + } | ||
| 394 | + | ||
| 395 | + // Create synthetic IRs for different spaces | ||
| 396 | + const lengths = { room: 1, hall: 2, plate: 3 }; | ||
| 397 | + const length = this.audioContext.sampleRate * lengths[preset]; | ||
| 398 | + const ir = this.audioContext.createBuffer(2, length, this.audioContext.sampleRate); | ||
| 399 | + | ||
| 400 | + for (let channel = 0; channel < 2; channel++) { | ||
| 401 | + const channelData = ir.getChannelData(channel); | ||
| 402 | + | ||
| 403 | + for (let i = 0; i < length; i++) { | ||
| 404 | + const decay = Math.pow(1 - i / length, 2); | ||
| 405 | + | ||
| 406 | + switch (preset) { | ||
| 407 | + case 'room': | ||
| 408 | + channelData[i] = (Math.random() * 2 - 1) * decay; | ||
| 409 | + break; | ||
| 410 | + case 'hall': | ||
| 411 | + channelData[i] = (Math.random() * 2 - 1) * decay * | ||
| 412 | + (1 + 0.5 * Math.sin(i / this.audioContext.sampleRate * 100)); | ||
| 413 | + break; | ||
| 414 | + case 'plate': | ||
| 415 | + channelData[i] = (Math.random() * 2 - 1) * decay * | ||
| 416 | + Math.sin(i / this.audioContext.sampleRate * 2000); | ||
| 417 | + break; | ||
| 418 | + } | ||
| 419 | + } | ||
| 420 | + } | ||
| 421 | + | ||
| 422 | + this.effects.setImpulseResponse(ir); | ||
| 423 | + } | ||
| 424 | + | ||
| 425 | + async loadIRFile(file) { | ||
| 426 | + await this.init(); | ||
| 427 | + | ||
| 428 | + try { | ||
| 429 | + const arrayBuffer = await file.arrayBuffer(); | ||
| 430 | + const irBuffer = await this.audioContext.decodeAudioData(arrayBuffer); | ||
| 431 | + this.effects.setImpulseResponse(irBuffer); | ||
| 432 | + return true; | ||
| 433 | + } catch (err) { | ||
| 434 | + console.error('Error loading IR:', err); | ||
| 435 | + return false; | ||
| 436 | + } | ||
| 437 | + } | ||
| 438 | + | ||
| 439 | + play() { | ||
| 440 | + if (!this.audioBuffer) return; | ||
| 441 | + | ||
| 442 | + this.source = this.audioContext.createBufferSource(); | ||
| 443 | + this.source.buffer = this.audioBuffer; | ||
| 444 | + this.source.loop = true; | ||
| 445 | + this.source.connect(this.inputGain); | ||
| 446 | + this.source.start(); | ||
| 447 | + | ||
| 448 | + this.isPlaying = true; | ||
| 449 | + } | ||
| 450 | + | ||
| 451 | + stop() { | ||
| 452 | + if (this.source) { | ||
| 453 | + this.source.stop(); | ||
| 454 | + this.source.disconnect(); | ||
| 455 | + this.source = null; | ||
| 456 | + } | ||
| 457 | + | ||
| 458 | + this.isPlaying = false; | ||
| 459 | + } | ||
| 460 | +} | ||
js/effects.jsadded@@ -0,0 +1,251 @@ | |||
| 1 | +// js/effects.js | ||
| 2 | + | ||
| 3 | +export class Effects { | ||
| 4 | + constructor(audioContext) { | ||
| 5 | + this.context = audioContext; | ||
| 6 | + this.setupEffects(); | ||
| 7 | + } | ||
| 8 | + | ||
| 9 | + setupEffects() { | ||
| 10 | + // Mid/Side processing | ||
| 11 | + this.setupMidSide(); | ||
| 12 | + | ||
| 13 | + // Bitcrusher | ||
| 14 | + this.bitcrusher = new AudioWorkletNode(this.context, 'bitcrusher-processor'); | ||
| 15 | + | ||
| 16 | + // Filters with EXTREME settings | ||
| 17 | + this.lowpass = this.context.createBiquadFilter(); | ||
| 18 | + this.lowpass.type = 'lowpass'; | ||
| 19 | + this.lowpass.frequency.value = 1000; | ||
| 20 | + | ||
| 21 | + this.highpass = this.context.createBiquadFilter(); | ||
| 22 | + this.highpass.type = 'highpass'; | ||
| 23 | + this.highpass.frequency.value = 1000; | ||
| 24 | + | ||
| 25 | + // Delay with modulation | ||
| 26 | + this.setupDelay(); | ||
| 27 | + | ||
| 28 | + // Add LFO for delay modulation | ||
| 29 | + this.lfo = this.context.createOscillator(); | ||
| 30 | + this.lfoGain = this.context.createGain(); | ||
| 31 | + this.lfoGain.gain.value = 0.002; // Subtle modulation | ||
| 32 | + this.lfo.frequency.value = 0.5; | ||
| 33 | + this.lfo.connect(this.lfoGain); | ||
| 34 | + this.lfoGain.connect(this.delay.delayTime); | ||
| 35 | + this.lfo.start(); | ||
| 36 | + | ||
| 37 | + // Convolution reverb with filter | ||
| 38 | + this.setupReverb(); | ||
| 39 | + | ||
| 40 | + // Add reverb filter for character | ||
| 41 | + this.reverbFilter = this.context.createBiquadFilter(); | ||
| 42 | + this.reverbFilter.type = 'lowpass'; | ||
| 43 | + this.reverbFilter.frequency.value = 5000; | ||
| 44 | + this.convolver.connect(this.reverbFilter); | ||
| 45 | + this.reverbFilter.connect(this.reverbMix); | ||
| 46 | + | ||
| 47 | + // Spectral freeze | ||
| 48 | + this.spectralFreeze = new AudioWorkletNode(this.context, 'spectral-freeze-processor'); | ||
| 49 | + | ||
| 50 | + // Pitch shift (placeholder - uses playback rate in AudioEngine) | ||
| 51 | + this.pitchShift = this.context.createGain(); | ||
| 52 | + | ||
| 53 | + // Add a limiter to prevent clipping from extreme effects | ||
| 54 | + this.limiter = this.context.createDynamicsCompressor(); | ||
| 55 | + this.limiter.threshold.value = -3; | ||
| 56 | + this.limiter.knee.value = 0; | ||
| 57 | + this.limiter.ratio.value = 20; | ||
| 58 | + this.limiter.attack.value = 0.001; | ||
| 59 | + this.limiter.release.value = 0.1; | ||
| 60 | + } | ||
| 61 | + | ||
| 62 | + setupMidSide() { | ||
| 63 | + this.midSideIn = this.context.createChannelSplitter(2); | ||
| 64 | + this.midSideOut = this.context.createChannelMerger(2); | ||
| 65 | + this.midGain = this.context.createGain(); | ||
| 66 | + this.sideGain = this.context.createGain(); | ||
| 67 | + this.sideGain.gain.value = 1.0; | ||
| 68 | + | ||
| 69 | + // Create mid/side matrix | ||
| 70 | + // Mid = (L + R) / 2 | ||
| 71 | + // Side = (L - R) / 2 | ||
| 72 | + this.midSideIn.connect(this.midGain, 0); | ||
| 73 | + this.midSideIn.connect(this.midGain, 1); | ||
| 74 | + this.midSideIn.connect(this.sideGain, 0); | ||
| 75 | + this.midSideIn.connect(this.sideGain, 1); | ||
| 76 | + | ||
| 77 | + // Reconstruct L/R from M/S | ||
| 78 | + this.midGain.connect(this.midSideOut, 0, 0); | ||
| 79 | + this.midGain.connect(this.midSideOut, 0, 1); | ||
| 80 | + this.sideGain.connect(this.midSideOut, 0, 0); | ||
| 81 | + | ||
| 82 | + // Invert phase for right channel side | ||
| 83 | + const sideInverter = this.context.createGain(); | ||
| 84 | + sideInverter.gain.value = -1; | ||
| 85 | + this.sideGain.connect(sideInverter); | ||
| 86 | + sideInverter.connect(this.midSideOut, 0, 1); | ||
| 87 | + } | ||
| 88 | + | ||
| 89 | + setupDelay() { | ||
| 90 | + this.delay = this.context.createDelay(2); | ||
| 91 | + this.delay.delayTime.value = 0.3; | ||
| 92 | + this.delayFeedback = this.context.createGain(); | ||
| 93 | + this.delayFeedback.gain.value = 0.4; | ||
| 94 | + this.delayMix = this.context.createGain(); | ||
| 95 | + this.delayMix.gain.value = 0.5; | ||
| 96 | + this.delayDry = this.context.createGain(); | ||
| 97 | + | ||
| 98 | + // Feedback loop | ||
| 99 | + this.delay.connect(this.delayFeedback); | ||
| 100 | + this.delayFeedback.connect(this.delay); | ||
| 101 | + this.delay.connect(this.delayMix); | ||
| 102 | + } | ||
| 103 | + | ||
| 104 | + setupReverb() { | ||
| 105 | + this.convolver = this.context.createConvolver(); | ||
| 106 | + this.reverbMix = this.context.createGain(); | ||
| 107 | + this.reverbMix.gain.value = 0.5; | ||
| 108 | + this.reverbDry = this.context.createGain(); | ||
| 109 | + | ||
| 110 | + // Create default impulse response | ||
| 111 | + this.createDefaultIR(); | ||
| 112 | + } | ||
| 113 | + | ||
| 114 | + createDefaultIR() { | ||
| 115 | + const length = this.context.sampleRate * 2; // 2 seconds | ||
| 116 | + const impulse = this.context.createBuffer(2, length, this.context.sampleRate); | ||
| 117 | + | ||
| 118 | + for (let channel = 0; channel < 2; channel++) { | ||
| 119 | + const channelData = impulse.getChannelData(channel); | ||
| 120 | + for (let i = 0; i < length; i++) { | ||
| 121 | + channelData[i] = (Math.random() * 2 - 1) * Math.pow(1 - i / length, 2); | ||
| 122 | + } | ||
| 123 | + } | ||
| 124 | + | ||
| 125 | + this.convolver.buffer = impulse; | ||
| 126 | + } | ||
| 127 | + | ||
| 128 | + setImpulseResponse(buffer) { | ||
| 129 | + this.convolver.buffer = buffer; | ||
| 130 | + } | ||
| 131 | + | ||
| 132 | + getEffect(name) { | ||
| 133 | + const effects = { | ||
| 134 | + 'mid_side': this.midSideOut, | ||
| 135 | + 'bitcrush': this.bitcrusher, | ||
| 136 | + 'lowpass': this.lowpass, | ||
| 137 | + 'highpass': this.highpass, | ||
| 138 | + 'delay': this.delayMix, | ||
| 139 | + 'reverb': this.reverbMix, | ||
| 140 | + 'spectral_freeze': this.spectralFreeze, | ||
| 141 | + 'pitch_shift': this.pitchShift | ||
| 142 | + }; | ||
| 143 | + | ||
| 144 | + return effects[name]; | ||
| 145 | + } | ||
| 146 | + | ||
| 147 | + connectEffect(name, input, output) { | ||
| 148 | + switch (name) { | ||
| 149 | + case 'mid_side': | ||
| 150 | + input.connect(this.midSideIn); | ||
| 151 | + this.midSideOut.connect(output); | ||
| 152 | + break; | ||
| 153 | + | ||
| 154 | + case 'delay': | ||
| 155 | + input.connect(this.delay); | ||
| 156 | + input.connect(this.delayDry); | ||
| 157 | + this.delayMix.connect(output); | ||
| 158 | + this.delayDry.connect(output); | ||
| 159 | + break; | ||
| 160 | + | ||
| 161 | + case 'reverb': | ||
| 162 | + input.connect(this.convolver); | ||
| 163 | + input.connect(this.reverbDry); | ||
| 164 | + this.convolver.connect(this.reverbMix); | ||
| 165 | + this.reverbMix.connect(output); | ||
| 166 | + this.reverbDry.connect(output); | ||
| 167 | + break; | ||
| 168 | + | ||
| 169 | + default: | ||
| 170 | + const effect = this.getEffect(name); | ||
| 171 | + input.connect(effect); | ||
| 172 | + effect.connect(output); | ||
| 173 | + } | ||
| 174 | + } | ||
| 175 | + | ||
| 176 | + updateParameter(effectName, value) { | ||
| 177 | + switch (effectName) { | ||
| 178 | + case 'mid_side': | ||
| 179 | + // EXTREME stereo effects - from completely mono to psychedelic width | ||
| 180 | + this.sideGain.gain.value = value * 8.0; // SUPER WIDE | ||
| 181 | + // Also mess with the mid gain for more extreme effect | ||
| 182 | + this.midGain.gain.value = 1 - (value * 0.8); // Reduce mid as side increases | ||
| 183 | + break; | ||
| 184 | + | ||
| 185 | + case 'bitcrush': | ||
| 186 | + // EXTREME bit reduction with sample rate reduction too! | ||
| 187 | + const bitDepth = Math.max(1, 8 - (value * 7)); // 8-bit down to 1-bit | ||
| 188 | + this.bitcrusher.parameters.get('bitDepth').value = bitDepth; | ||
| 189 | + // TODO: Add sample rate reduction for more lofi effect | ||
| 190 | + break; | ||
| 191 | + | ||
| 192 | + case 'lowpass': | ||
| 193 | + // INSANE filter sweep from sub-bass to almost nothing | ||
| 194 | + this.lowpass.frequency.value = 20 * Math.pow(500, value); // 20Hz to 10kHz | ||
| 195 | + this.lowpass.Q.value = 0.5 + (value * value * 30); // Exponential resonance - SCREAMING at high values | ||
| 196 | + // Add some gain compensation for the resonance | ||
| 197 | + const lpGain = 1 - (value * value * 0.5); | ||
| 198 | + if (this.lowpass.gain) this.lowpass.gain.value = lpGain; | ||
| 199 | + break; | ||
| 200 | + | ||
| 201 | + case 'highpass': | ||
| 202 | + // CRAZY high pass that can remove everything | ||
| 203 | + this.highpass.frequency.value = 10 * Math.pow(1000, value); // 10Hz to 10kHz | ||
| 204 | + this.highpass.Q.value = 0.5 + (value * value * 30); // SCREAMING resonance | ||
| 205 | + break; | ||
| 206 | + | ||
| 207 | + case 'delay': | ||
| 208 | + // CHAOS delay - multiple taps, extreme feedback | ||
| 209 | + const delayTime = 0.001 + (value * value * 1.5); // 1ms to 1.5 seconds (exponential) | ||
| 210 | + this.delay.delayTime.value = delayTime; | ||
| 211 | + this.delayFeedback.gain.value = Math.min(0.98, value * 1.1); // Can go over 100%! | ||
| 212 | + this.delayMix.gain.value = value * 1.5; // Delay can be louder than dry | ||
| 213 | + | ||
| 214 | + // Modulate delay time slightly for chorus/flanger effects at low values | ||
| 215 | + if (value < 0.3 && this.lfo) { | ||
| 216 | + this.lfo.frequency.value = 2 + value * 10; | ||
| 217 | + } | ||
| 218 | + break; | ||
| 219 | + | ||
| 220 | + case 'reverb': | ||
| 221 | + // MASSIVE reverb with pre-delay and filtering | ||
| 222 | + this.reverbMix.gain.value = value * value * 2; // Exponential curve, can be 200% wet | ||
| 223 | + this.reverbDry.gain.value = 1 - value; | ||
| 224 | + | ||
| 225 | + // Add some filtering to the reverb for more character | ||
| 226 | + if (this.reverbFilter) { | ||
| 227 | + this.reverbFilter.frequency.value = 200 + (1 - value) * 5000; // Darker reverb as it gets wetter | ||
| 228 | + } | ||
| 229 | + break; | ||
| 230 | + | ||
| 231 | + case 'spectral_freeze': | ||
| 232 | + // Multiple freeze modes based on position | ||
| 233 | + if (value < 0.2) { | ||
| 234 | + this.spectralFreeze.parameters.get('freeze').value = 0; | ||
| 235 | + } else if (value < 0.5) { | ||
| 236 | + // Stutter freeze | ||
| 237 | + this.spectralFreeze.parameters.get('freeze').value = | ||
| 238 | + Math.sin(Date.now() * 0.01) > 0 ? 1 : 0; | ||
| 239 | + } else { | ||
| 240 | + // Full freeze | ||
| 241 | + this.spectralFreeze.parameters.get('freeze').value = 1; | ||
| 242 | + } | ||
| 243 | + break; | ||
| 244 | + | ||
| 245 | + case 'pitch_shift': | ||
| 246 | + // EXTREME pitch shifting with formant effects | ||
| 247 | + // This is handled in AudioEngine but let's add a note | ||
| 248 | + break; | ||
| 249 | + } | ||
| 250 | + } | ||
| 251 | +} | ||
js/gesture-detector.jsadded@@ -0,0 +1,238 @@ | |||
| 1 | +// js/gesture-detector.js | ||
| 2 | + | ||
| 3 | +export class GestureDetector { | ||
| 4 | + constructor() { | ||
| 5 | + this.video = null; | ||
| 6 | + this.canvas = document.getElementById('videoCanvas'); | ||
| 7 | + this.ctx = this.canvas.getContext('2d'); | ||
| 8 | + this.animationId = null; | ||
| 9 | + | ||
| 10 | + // Callback for detected hands | ||
| 11 | + this.onHandsDetected = null; | ||
| 12 | + | ||
| 13 | + // Detection parameters | ||
| 14 | + this.minHandSize = 5; // Very small | ||
| 15 | + this.maxHandY = 1.0; // No vertical restriction | ||
| 16 | + } | ||
| 17 | + | ||
| 18 | + async start() { | ||
| 19 | + const stream = await navigator.mediaDevices.getUserMedia({ | ||
| 20 | + video: { | ||
| 21 | + width: 640, | ||
| 22 | + height: 480, | ||
| 23 | + facingMode: 'user' | ||
| 24 | + } | ||
| 25 | + }); | ||
| 26 | + | ||
| 27 | + if (!this.video) { | ||
| 28 | + this.video = document.createElement('video'); | ||
| 29 | + this.video.width = 640; | ||
| 30 | + this.video.height = 480; | ||
| 31 | + this.video.autoplay = true; | ||
| 32 | + this.video.playsInline = true; | ||
| 33 | + } | ||
| 34 | + | ||
| 35 | + this.video.srcObject = stream; | ||
| 36 | + | ||
| 37 | + // Wait for video to be ready | ||
| 38 | + await new Promise((resolve) => { | ||
| 39 | + this.video.onloadedmetadata = resolve; | ||
| 40 | + }); | ||
| 41 | + | ||
| 42 | + this.startProcessing(); | ||
| 43 | + } | ||
| 44 | + | ||
| 45 | + stop() { | ||
| 46 | + if (this.animationId) { | ||
| 47 | + cancelAnimationFrame(this.animationId); | ||
| 48 | + this.animationId = null; | ||
| 49 | + } | ||
| 50 | + | ||
| 51 | + if (this.video && this.video.srcObject) { | ||
| 52 | + const tracks = this.video.srcObject.getTracks(); | ||
| 53 | + tracks.forEach(track => track.stop()); | ||
| 54 | + this.video.srcObject = null; | ||
| 55 | + } | ||
| 56 | + } | ||
| 57 | + | ||
| 58 | + startProcessing() { | ||
| 59 | + const process = () => { | ||
| 60 | + if (this.video && this.video.readyState === this.video.HAVE_ENOUGH_DATA) { | ||
| 61 | + // Draw video frame | ||
| 62 | + this.ctx.drawImage(this.video, 0, 0, this.canvas.width, this.canvas.height); | ||
| 63 | + | ||
| 64 | + // Get image data and detect hands | ||
| 65 | + const imageData = this.ctx.getImageData(0, 0, this.canvas.width, this.canvas.height); | ||
| 66 | + const detection = this.detectHands(imageData); | ||
| 67 | + | ||
| 68 | + // Draw detection boxes | ||
| 69 | + if (detection.redHand) { | ||
| 70 | + this.drawBox(detection.redHand, '#000000'); // Black outline for white objects | ||
| 71 | + } | ||
| 72 | + | ||
| 73 | + if (detection.yellowHand) { | ||
| 74 | + this.drawBox(detection.yellowHand, '#40e0d0'); // Turquoise for teal | ||
| 75 | + } | ||
| 76 | + | ||
| 77 | + // Call callback if registered | ||
| 78 | + if (this.onHandsDetected) { | ||
| 79 | + this.onHandsDetected(detection.redHand, detection.yellowHand); | ||
| 80 | + } | ||
| 81 | + } | ||
| 82 | + | ||
| 83 | + this.animationId = requestAnimationFrame(process); | ||
| 84 | + }; | ||
| 85 | + | ||
| 86 | + process(); | ||
| 87 | + } | ||
| 88 | + | ||
| 89 | + drawBox(box, color) { | ||
| 90 | + this.ctx.strokeStyle = color; | ||
| 91 | + this.ctx.lineWidth = 2; | ||
| 92 | + this.ctx.strokeRect(box.x, box.y, box.width, box.height); | ||
| 93 | + | ||
| 94 | + // Draw center point | ||
| 95 | + const centerX = box.x + box.width / 2; | ||
| 96 | + const centerY = box.y + box.height / 2; | ||
| 97 | + this.ctx.fillStyle = color; | ||
| 98 | + this.ctx.beginPath(); | ||
| 99 | + this.ctx.arc(centerX, centerY, 4, 0, 2 * Math.PI); | ||
| 100 | + this.ctx.fill(); | ||
| 101 | + } | ||
| 102 | + | ||
| 103 | + detectHands(imageData) { | ||
| 104 | + const data = imageData.data; | ||
| 105 | + const width = imageData.width; | ||
| 106 | + const height = imageData.height; | ||
| 107 | + | ||
| 108 | + // Pixel arrays for each color | ||
| 109 | + const whitePixels = []; | ||
| 110 | + const tealPixels = []; | ||
| 111 | + | ||
| 112 | + // Sample center pixel for debugging | ||
| 113 | + const centerX = Math.floor(width / 2); | ||
| 114 | + const centerY = Math.floor(height / 2); | ||
| 115 | + const centerI = (centerY * width + centerX) * 4; | ||
| 116 | + const centerR = data[centerI]; | ||
| 117 | + const centerG = data[centerI + 1]; | ||
| 118 | + const centerB = data[centerI + 2]; | ||
| 119 | + | ||
| 120 | + // Process every 2nd pixel for better performance | ||
| 121 | + for (let y = 0; y < height; y += 2) { | ||
| 122 | + for (let x = 0; x < width; x += 2) { | ||
| 123 | + const i = (y * width + x) * 4; | ||
| 124 | + const r = data[i]; | ||
| 125 | + const g = data[i + 1]; | ||
| 126 | + const b = data[i + 2]; | ||
| 127 | + | ||
| 128 | + // Convert to HSV for better color discrimination | ||
| 129 | + const hsv = this.rgbToHsv(r/255, g/255, b/255); | ||
| 130 | + | ||
| 131 | + // BRIGHT WHITE detection | ||
| 132 | + // White should have very low saturation and high brightness | ||
| 133 | + const isWhite = hsv.s < 0.15 && // Very low saturation (near grayscale) | ||
| 134 | + hsv.v > 0.75 && // High brightness | ||
| 135 | + r > 190 && g > 190 && b > 190 && // All channels high | ||
| 136 | + Math.abs(r - g) < 20 && // Channels close together | ||
| 137 | + Math.abs(g - b) < 20 && // Neutral color | ||
| 138 | + Math.abs(r - b) < 20; // No color cast | ||
| 139 | + | ||
| 140 | + // Alternative: Very bright neutral colors | ||
| 141 | + const isBrightNeutral = (r + g + b) > 650 && // Total brightness | ||
| 142 | + Math.max(r, g, b) - Math.min(r, g, b) < 25; // Low variance | ||
| 143 | + | ||
| 144 | + if (isWhite || isBrightNeutral) { | ||
| 145 | + whitePixels.push({ x, y }); | ||
| 146 | + } | ||
| 147 | + | ||
| 148 | + // TEAL detection (keeping exactly as is - it works!) | ||
| 149 | + const isTeal = (hsv.h > 0.45 && hsv.h < 0.55) && // Cyan range | ||
| 150 | + hsv.s > 0.3 && // Some saturation | ||
| 151 | + hsv.v > 0.3 && // Not too dark | ||
| 152 | + (g > r * 1.2 || b > r * 1.2); // Green or blue dominant | ||
| 153 | + | ||
| 154 | + if (isTeal) { | ||
| 155 | + tealPixels.push({ x, y }); | ||
| 156 | + } | ||
| 157 | + } | ||
| 158 | + } | ||
| 159 | + | ||
| 160 | + // Debug: Draw center crosshair and color info | ||
| 161 | + this.ctx.strokeStyle = '#000000'; | ||
| 162 | + this.ctx.lineWidth = 2; | ||
| 163 | + this.ctx.strokeRect(centerX - 10, centerY - 10, 20, 20); | ||
| 164 | + this.ctx.strokeStyle = '#ffffff'; | ||
| 165 | + this.ctx.lineWidth = 1; | ||
| 166 | + this.ctx.strokeRect(centerX - 9, centerY - 9, 18, 18); | ||
| 167 | + | ||
| 168 | + // Show color at center with better contrast | ||
| 169 | + this.ctx.fillStyle = '#000000'; | ||
| 170 | + this.ctx.fillRect(10, 10, 200, 30); | ||
| 171 | + this.ctx.fillStyle = '#ffffff'; | ||
| 172 | + this.ctx.font = '12px monospace'; | ||
| 173 | + this.ctx.fillText(`Center RGB: ${centerR},${centerG},${centerB}`, 15, 30); | ||
| 174 | + | ||
| 175 | + // Find bounding boxes | ||
| 176 | + const whiteHand = this.getBoundingBox(whitePixels, height); | ||
| 177 | + const tealHand = this.getBoundingBox(tealPixels, height); | ||
| 178 | + | ||
| 179 | + // Debug logging | ||
| 180 | + if (whitePixels.length > 50 || tealPixels.length > 50) { | ||
| 181 | + console.log('White pixels:', whitePixels.length, 'Teal pixels:', tealPixels.length); | ||
| 182 | + } | ||
| 183 | + | ||
| 184 | + return { | ||
| 185 | + redHand: whiteHand, | ||
| 186 | + yellowHand: tealHand | ||
| 187 | + }; | ||
| 188 | + } | ||
| 189 | + | ||
| 190 | + rgbToHsv(r, g, b) { | ||
| 191 | + const max = Math.max(r, g, b); | ||
| 192 | + const min = Math.min(r, g, b); | ||
| 193 | + const diff = max - min; | ||
| 194 | + | ||
| 195 | + const v = max; | ||
| 196 | + const s = max === 0 ? 0 : diff / max; | ||
| 197 | + | ||
| 198 | + let h = 0; | ||
| 199 | + if (diff !== 0) { | ||
| 200 | + if (max === r) { | ||
| 201 | + h = ((g - b) / diff + (g < b ? 6 : 0)) / 6; | ||
| 202 | + } else if (max === g) { | ||
| 203 | + h = ((b - r) / diff + 2) / 6; | ||
| 204 | + } else { | ||
| 205 | + h = ((r - g) / diff + 4) / 6; | ||
| 206 | + } | ||
| 207 | + } | ||
| 208 | + | ||
| 209 | + return { h, s, v }; | ||
| 210 | + } | ||
| 211 | + | ||
| 212 | + getBoundingBox(pixels, frameHeight) { | ||
| 213 | + // Higher threshold to avoid noise | ||
| 214 | + if (pixels.length < 100) return null; | ||
| 215 | + | ||
| 216 | + const xs = pixels.map(p => p.x); | ||
| 217 | + const ys = pixels.map(p => p.y); | ||
| 218 | + | ||
| 219 | + const box = { | ||
| 220 | + x: Math.min(...xs), | ||
| 221 | + y: Math.min(...ys), | ||
| 222 | + width: Math.max(...xs) - Math.min(...xs), | ||
| 223 | + height: Math.max(...ys) - Math.min(...ys) | ||
| 224 | + }; | ||
| 225 | + | ||
| 226 | + // Reasonable size requirements | ||
| 227 | + if (box.width < 20 || box.height < 20) { | ||
| 228 | + return null; | ||
| 229 | + } | ||
| 230 | + | ||
| 231 | + // Reject if too large (probably background) | ||
| 232 | + if (box.width > frameHeight * 0.5 || box.height > frameHeight * 0.5) { | ||
| 233 | + return null; | ||
| 234 | + } | ||
| 235 | + | ||
| 236 | + return box; | ||
| 237 | + } | ||
| 238 | +} | ||
js/worklets/bitcrusher.jsadded@@ -0,0 +1,65 @@ | |||
| 1 | +// js/worklets/bitcrusher.js | ||
| 2 | + | ||
| 3 | +class BitcrusherProcessor extends AudioWorkletProcessor { | ||
| 4 | + constructor() { | ||
| 5 | + super(); | ||
| 6 | + this.phase = 0; | ||
| 7 | + this.lastSample = [0, 0]; | ||
| 8 | + } | ||
| 9 | + | ||
| 10 | + static get parameterDescriptors() { | ||
| 11 | + return [{ | ||
| 12 | + name: 'bitDepth', | ||
| 13 | + defaultValue: 8, | ||
| 14 | + minValue: 1, | ||
| 15 | + maxValue: 16, | ||
| 16 | + automationRate: 'k-rate' | ||
| 17 | + }, { | ||
| 18 | + name: 'sampleRateReduction', | ||
| 19 | + defaultValue: 1, | ||
| 20 | + minValue: 1, | ||
| 21 | + maxValue: 50, | ||
| 22 | + automationRate: 'k-rate' | ||
| 23 | + }]; | ||
| 24 | + } | ||
| 25 | + | ||
| 26 | + process(inputs, outputs, parameters) { | ||
| 27 | + const input = inputs[0]; | ||
| 28 | + const output = outputs[0]; | ||
| 29 | + | ||
| 30 | + if (!input || !input[0]) { | ||
| 31 | + return true; | ||
| 32 | + } | ||
| 33 | + | ||
| 34 | + const bitDepth = parameters.bitDepth[0] || parameters.bitDepth; | ||
| 35 | + const sampleRateReduction = parameters.sampleRateReduction?.[0] || 1; | ||
| 36 | + const step = 2 / Math.pow(2, bitDepth); | ||
| 37 | + | ||
| 38 | + for (let channel = 0; channel < input.length; channel++) { | ||
| 39 | + const inputChannel = input[channel]; | ||
| 40 | + const outputChannel = output[channel]; | ||
| 41 | + | ||
| 42 | + for (let i = 0; i < inputChannel.length; i++) { | ||
| 43 | + // Sample rate reduction | ||
| 44 | + this.phase++; | ||
| 45 | + if (this.phase >= sampleRateReduction) { | ||
| 46 | + this.phase = 0; | ||
| 47 | + // Quantize the sample | ||
| 48 | + const sample = inputChannel[i]; | ||
| 49 | + this.lastSample[channel] = Math.round(sample / step) * step; | ||
| 50 | + | ||
| 51 | + // Add some aliasing artifacts for extra grit | ||
| 52 | + if (bitDepth < 4) { | ||
| 53 | + this.lastSample[channel] *= (1 + Math.random() * 0.1 - 0.05); | ||
| 54 | + } | ||
| 55 | + } | ||
| 56 | + | ||
| 57 | + outputChannel[i] = this.lastSample[channel]; | ||
| 58 | + } | ||
| 59 | + } | ||
| 60 | + | ||
| 61 | + return true; | ||
| 62 | + } | ||
| 63 | +} | ||
| 64 | + | ||
| 65 | +registerProcessor('bitcrusher-processor', BitcrusherProcessor); | ||
js/worklets/spectral-freeze.jsadded@@ -0,0 +1,112 @@ | |||
| 1 | +// js/worklets/spectral-freeze.js | ||
| 2 | + | ||
| 3 | +class SpectralFreezeProcessor extends AudioWorkletProcessor { | ||
| 4 | + constructor() { | ||
| 5 | + super(); | ||
| 6 | + this.frozenBuffer = null; | ||
| 7 | + this.bufferIndex = 0; | ||
| 8 | + this.fadeIn = 0; | ||
| 9 | + this.fadeOut = 0; | ||
| 10 | + this.isTransitioning = false; | ||
| 11 | + | ||
| 12 | + // Buffer size for spectral capture | ||
| 13 | + this.bufferSize = 2048; | ||
| 14 | + this.captureBuffer = new Float32Array(this.bufferSize); | ||
| 15 | + this.captureIndex = 0; | ||
| 16 | + | ||
| 17 | + // Crossfade duration in samples | ||
| 18 | + this.fadeLength = 128; | ||
| 19 | + } | ||
| 20 | + | ||
| 21 | + static get parameterDescriptors() { | ||
| 22 | + return [{ | ||
| 23 | + name: 'freeze', | ||
| 24 | + defaultValue: 0, | ||
| 25 | + minValue: 0, | ||
| 26 | + maxValue: 1, | ||
| 27 | + automationRate: 'k-rate' | ||
| 28 | + }]; | ||
| 29 | + } | ||
| 30 | + | ||
| 31 | + process(inputs, outputs, parameters) { | ||
| 32 | + const input = inputs[0]; | ||
| 33 | + const output = outputs[0]; | ||
| 34 | + | ||
| 35 | + if (!input || !input[0]) { | ||
| 36 | + return true; | ||
| 37 | + } | ||
| 38 | + | ||
| 39 | + const freeze = parameters.freeze[0] || parameters.freeze; | ||
| 40 | + const shouldFreeze = freeze > 0.5; | ||
| 41 | + | ||
| 42 | + for (let channel = 0; channel < output.length; channel++) { | ||
| 43 | + const inputChannel = input[channel]; | ||
| 44 | + const outputChannel = output[channel]; | ||
| 45 | + | ||
| 46 | + for (let i = 0; i < outputChannel.length; i++) { | ||
| 47 | + const inputSample = inputChannel ? inputChannel[i] : 0; | ||
| 48 | + | ||
| 49 | + // Capture input into buffer | ||
| 50 | + if (!shouldFreeze) { | ||
| 51 | + this.captureBuffer[this.captureIndex] = inputSample; | ||
| 52 | + this.captureIndex = (this.captureIndex + 1) % this.bufferSize; | ||
| 53 | + } | ||
| 54 | + | ||
| 55 | + // Handle freeze state | ||
| 56 | + if (shouldFreeze && !this.frozenBuffer) { | ||
| 57 | + // Start freezing - copy capture buffer | ||
| 58 | + this.frozenBuffer = new Float32Array(this.captureBuffer); | ||
| 59 | + this.bufferIndex = 0; | ||
| 60 | + this.isTransitioning = true; | ||
| 61 | + this.fadeIn = 0; | ||
| 62 | + } else if (!shouldFreeze && this.frozenBuffer) { | ||
| 63 | + // Stop freezing | ||
| 64 | + this.isTransitioning = true; | ||
| 65 | + this.fadeOut = 0; | ||
| 66 | + } | ||
| 67 | + | ||
| 68 | + // Generate output | ||
| 69 | + if (this.frozenBuffer && shouldFreeze) { | ||
| 70 | + // Output frozen spectrum with slight variation | ||
| 71 | + const frozenSample = this.frozenBuffer[this.bufferIndex]; | ||
| 72 | + const variation = 1 + (Math.random() - 0.5) * 0.02; // ±1% variation | ||
| 73 | + let outputSample = frozenSample * variation; | ||
| 74 | + | ||
| 75 | + // Apply fade in | ||
| 76 | + if (this.isTransitioning && this.fadeIn < this.fadeLength) { | ||
| 77 | + const fadeGain = this.fadeIn / this.fadeLength; | ||
| 78 | + outputSample = inputSample * (1 - fadeGain) + outputSample * fadeGain; | ||
| 79 | + this.fadeIn++; | ||
| 80 | + if (this.fadeIn >= this.fadeLength) { | ||
| 81 | + this.isTransitioning = false; | ||
| 82 | + } | ||
| 83 | + } | ||
| 84 | + | ||
| 85 | + outputChannel[i] = outputSample; | ||
| 86 | + this.bufferIndex = (this.bufferIndex + 1) % this.bufferSize; | ||
| 87 | + } else if (this.frozenBuffer && !shouldFreeze) { | ||
| 88 | + // Fade out frozen buffer | ||
| 89 | + const frozenSample = this.frozenBuffer[this.bufferIndex]; | ||
| 90 | + const fadeGain = 1 - (this.fadeOut / this.fadeLength); | ||
| 91 | + | ||
| 92 | + outputChannel[i] = inputSample * (1 - fadeGain) + frozenSample * fadeGain; | ||
| 93 | + | ||
| 94 | + this.bufferIndex = (this.bufferIndex + 1) % this.bufferSize; | ||
| 95 | + this.fadeOut++; | ||
| 96 | + | ||
| 97 | + if (this.fadeOut >= this.fadeLength) { | ||
| 98 | + this.frozenBuffer = null; | ||
| 99 | + this.isTransitioning = false; | ||
| 100 | + } | ||
| 101 | + } else { | ||
| 102 | + // Pass through | ||
| 103 | + outputChannel[i] = inputSample; | ||
| 104 | + } | ||
| 105 | + } | ||
| 106 | + } | ||
| 107 | + | ||
| 108 | + return true; | ||
| 109 | + } | ||
| 110 | +} | ||
| 111 | + | ||
| 112 | +registerProcessor('spectral-freeze-processor', SpectralFreezeProcessor); | ||