TypeScript · 4138 bytes Raw Blame History
1 import React from "react";
2
3 type TokenType = "keyword" | "string" | "comment" | "number" | "function" | "operator" | "text";
4
5 interface Token {
6 type: TokenType;
7 value: string;
8 }
9
10 const LUA_KEYWORDS = new Set([
11 "and", "break", "do", "else", "elseif", "end", "false", "for",
12 "function", "goto", "if", "in", "local", "nil", "not", "or",
13 "repeat", "return", "then", "true", "until", "while",
14 ]);
15
16 function tokenizeLua(code: string): Token[][] {
17 return code.split("\n").map((line) => {
18 const tokens: Token[] = [];
19 let i = 0;
20
21 while (i < line.length) {
22 const remaining = line.slice(i);
23
24 // Line comment
25 if (remaining.startsWith("--")) {
26 tokens.push({ type: "comment", value: remaining });
27 break;
28 }
29
30 // Whitespace
31 if (/^\s/.test(remaining)) {
32 let end = 0;
33 while (end < remaining.length && /\s/.test(remaining[end])) end++;
34 tokens.push({ type: "text", value: remaining.slice(0, end) });
35 i += end;
36 continue;
37 }
38
39 // String (double-quoted)
40 if (remaining[0] === '"') {
41 let end = 1;
42 while (end < remaining.length && remaining[end] !== '"') {
43 if (remaining[end] === "\\") end++;
44 end++;
45 }
46 if (end < remaining.length) end++;
47 tokens.push({ type: "string", value: remaining.slice(0, end) });
48 i += end;
49 continue;
50 }
51
52 // String (single-quoted)
53 if (remaining[0] === "'") {
54 let end = 1;
55 while (end < remaining.length && remaining[end] !== "'") {
56 if (remaining[end] === "\\") end++;
57 end++;
58 }
59 if (end < remaining.length) end++;
60 tokens.push({ type: "string", value: remaining.slice(0, end) });
61 i += end;
62 continue;
63 }
64
65 // Number
66 if (/^-?\d/.test(remaining) || (remaining[0] === "." && /\d/.test(remaining[1] || ""))) {
67 const m = remaining.match(/^-?(?:0x[0-9a-fA-F]+|\d+(?:\.\d+)?(?:[eE][+-]?\d+)?)/);
68 if (m) {
69 tokens.push({ type: "number", value: m[0] });
70 i += m[0].length;
71 continue;
72 }
73 }
74
75 // Operators
76 if (/^[=~<>+\-*/%^#.,:{}()\[\]]/.test(remaining)) {
77 const m = remaining.match(/^(?:\.\.\.?|[~<>=]=|<<|>>|\/\/|[=~<>+\-*/%^#,.:{}()\[\]])/);
78 if (m) {
79 tokens.push({ type: "operator", value: m[0] });
80 i += m[0].length;
81 continue;
82 }
83 }
84
85 // Word (keyword, function name, or identifier)
86 const wordMatch = remaining.match(/^[a-zA-Z_][a-zA-Z0-9_]*/);
87 if (wordMatch) {
88 const word = wordMatch[0];
89 if (LUA_KEYWORDS.has(word)) {
90 tokens.push({ type: "keyword", value: word });
91 } else {
92 // Check if followed by ( to detect function calls
93 const after = line.slice(i + word.length).trimStart();
94 if (after.startsWith("(") || after.startsWith('"') || after.startsWith("'")) {
95 tokens.push({ type: "function", value: word });
96 } else {
97 tokens.push({ type: "text", value: word });
98 }
99 }
100 i += word.length;
101 continue;
102 }
103
104 tokens.push({ type: "text", value: remaining[0] });
105 i++;
106 }
107
108 return tokens;
109 });
110 }
111
112 const classMap: Record<TokenType, string> = {
113 keyword: "lua-keyword",
114 string: "lua-string",
115 comment: "lua-comment",
116 number: "lua-number",
117 function: "lua-function",
118 operator: "lua-operator",
119 text: "lua-text",
120 };
121
122 interface LuaHighlighterProps {
123 code: string;
124 className?: string;
125 }
126
127 export default function LuaHighlighter({ code, className = "" }: LuaHighlighterProps) {
128 const tokenizedLines = tokenizeLua(code);
129
130 return (
131 <code className={className}>
132 {tokenizedLines.map((tokens, lineIndex) => (
133 <React.Fragment key={lineIndex}>
134 {tokens.map((token, tokenIndex) => (
135 <span key={tokenIndex} className={classMap[token.type]}>
136 {token.value}
137 </span>
138 ))}
139 {lineIndex < tokenizedLines.length - 1 && "\n"}
140 </React.Fragment>
141 ))}
142 </code>
143 );
144 }