TypeScript · 5190 bytes Raw Blame History
1 import React from "react";
2 import {
3 isKeyword,
4 isOption,
5 isPath,
6 isNumber,
7 matchOperator,
8 isPrefixCommand,
9 } from "@/lib/shell-tokens";
10
11 type TokenType =
12 | "command"
13 | "keyword"
14 | "option"
15 | "string"
16 | "variable"
17 | "comment"
18 | "operator"
19 | "number"
20 | "path"
21 | "text"
22 | "prompt";
23
24 interface Token {
25 type: TokenType;
26 value: string;
27 }
28
29 function tokenizeLine(line: string): Token[] {
30 const tokens: Token[] = [];
31 let i = 0;
32 let expectCommand = true;
33
34 while (i < line.length) {
35 const remaining = line.slice(i);
36
37 if (/^\s/.test(remaining)) {
38 let end = 0;
39 while (end < remaining.length && /\s/.test(remaining[end])) end++;
40 tokens.push({ type: "text", value: remaining.slice(0, end) });
41 i += end;
42 continue;
43 }
44
45 if (remaining[0] === "#") {
46 tokens.push({ type: "comment", value: remaining });
47 break;
48 }
49
50 if (remaining[0] === "'") {
51 let end = 1;
52 while (end < remaining.length && remaining[end] !== "'") end++;
53 if (end < remaining.length) end++;
54 tokens.push({ type: "string", value: remaining.slice(0, end) });
55 i += end;
56 expectCommand = false;
57 continue;
58 }
59
60 if (remaining[0] === '"') {
61 let end = 1;
62 while (end < remaining.length && remaining[end] !== '"') {
63 if (remaining[end] === "\\" && end + 1 < remaining.length) end++;
64 end++;
65 }
66 if (end < remaining.length) end++;
67 tokens.push({ type: "string", value: remaining.slice(0, end) });
68 i += end;
69 expectCommand = false;
70 continue;
71 }
72
73 if (remaining[0] === "$") {
74 let end = 1;
75 if (remaining[1] === "{") {
76 let depth = 1;
77 end = 2;
78 while (end < remaining.length && depth > 0) {
79 if (remaining[end] === "{") depth++;
80 if (remaining[end] === "}") depth--;
81 end++;
82 }
83 } else if (remaining[1] === "(") {
84 let depth = 1;
85 end = 2;
86 while (end < remaining.length && depth > 0) {
87 if (remaining[end] === "(") depth++;
88 if (remaining[end] === ")") depth--;
89 end++;
90 }
91 } else if (/[?#@*$!_0-9]/.test(remaining[1] || "")) {
92 end = 2;
93 } else {
94 while (end < remaining.length && /[a-zA-Z0-9_]/.test(remaining[end])) end++;
95 }
96 tokens.push({ type: "variable", value: remaining.slice(0, end) });
97 i += end;
98 expectCommand = false;
99 continue;
100 }
101
102 const op = matchOperator(remaining);
103 if (op) {
104 tokens.push({ type: "operator", value: op });
105 i += op.length;
106 if (["|", "&&", "||", ";", "&", "(", "{"].includes(op)) expectCommand = true;
107 continue;
108 }
109
110 const wordMatch = remaining.match(/^[^\s'"$#|&;<>(){}[\]]+/);
111 if (wordMatch) {
112 const word = wordMatch[0];
113 let type: TokenType = "text";
114
115 if (expectCommand) {
116 if (isKeyword(word)) {
117 type = "keyword";
118 } else if (isPrefixCommand(word)) {
119 type = "text";
120 } else {
121 type = "command";
122 expectCommand = false;
123 }
124 if (["if", "then", "else", "elif", "do", "while", "until", "for", "case", "in", "{", "("].includes(word)) {
125 expectCommand = true;
126 }
127 } else {
128 if (isOption(word)) type = "option";
129 else if (isPath(word)) type = "path";
130 else if (isNumber(word)) type = "number";
131 else if (isKeyword(word)) {
132 type = "keyword";
133 expectCommand = true;
134 }
135 }
136
137 tokens.push({ type, value: word });
138 i += word.length;
139 continue;
140 }
141
142 tokens.push({ type: "text", value: remaining[0] });
143 i++;
144 }
145
146 return tokens;
147 }
148
149 function tokenize(code: string): Token[][] {
150 return code.split("\n").map((line) => {
151 const trimmed = line.trimStart();
152 const leadingWs = line.slice(0, line.length - trimmed.length);
153 const tokens: Token[] = [];
154 if (leadingWs) tokens.push({ type: "text", value: leadingWs });
155 if (trimmed.startsWith("$ ")) {
156 tokens.push({ type: "prompt", value: "$ " });
157 tokens.push(...tokenizeLine(trimmed.slice(2)));
158 } else {
159 tokens.push(...tokenizeLine(trimmed));
160 }
161 return tokens;
162 });
163 }
164
165 const classMap: Record<TokenType, string> = {
166 command: "sh-command",
167 keyword: "sh-keyword",
168 option: "sh-option",
169 string: "sh-string",
170 variable: "sh-variable",
171 comment: "sh-comment",
172 operator: "sh-operator",
173 number: "sh-number",
174 path: "sh-path",
175 text: "sh-text",
176 prompt: "sh-prompt",
177 };
178
179 interface ShellHighlighterProps {
180 code: string;
181 className?: string;
182 }
183
184 export default function ShellHighlighter({ code, className = "" }: ShellHighlighterProps) {
185 const tokenizedLines = tokenize(code);
186
187 return (
188 <code className={className}>
189 {tokenizedLines.map((tokens, lineIndex) => (
190 <React.Fragment key={lineIndex}>
191 {tokens.map((token, tokenIndex) => (
192 <span key={tokenIndex} className={classMap[token.type]}>
193 {token.value}
194 </span>
195 ))}
196 {lineIndex < tokenizedLines.length - 1 && "\n"}
197 </React.Fragment>
198 ))}
199 </code>
200 );
201 }