scripts/CodeMirror/mode/crystal/crystal.js (view raw)
1// CodeMirror, copyright (c) by Marijn Haverbeke and others
2// Distributed under an MIT license: https://codemirror.net/LICENSE
3
4(function(mod) {
5 if (typeof exports == "object" && typeof module == "object") // CommonJS
6 mod(require("../../lib/codemirror"));
7 else if (typeof define == "function" && define.amd) // AMD
8 define(["../../lib/codemirror"], mod);
9 else // Plain browser env
10 mod(CodeMirror);
11})(function(CodeMirror) {
12 "use strict";
13
14 CodeMirror.defineMode("crystal", function(config) {
15 function wordRegExp(words, end) {
16 return new RegExp((end ? "" : "^") + "(?:" + words.join("|") + ")" + (end ? "$" : "\\b"));
17 }
18
19 function chain(tokenize, stream, state) {
20 state.tokenize.push(tokenize);
21 return tokenize(stream, state);
22 }
23
24 var operators = /^(?:[-+/%|&^]|\*\*?|[<>]{2})/;
25 var conditionalOperators = /^(?:[=!]~|===|<=>|[<>=!]=?|[|&]{2}|~)/;
26 var indexingOperators = /^(?:\[\][?=]?)/;
27 var anotherOperators = /^(?:\.(?:\.{2})?|->|[?:])/;
28 var idents = /^[a-z_\u009F-\uFFFF][a-zA-Z0-9_\u009F-\uFFFF]*/;
29 var types = /^[A-Z_\u009F-\uFFFF][a-zA-Z0-9_\u009F-\uFFFF]*/;
30 var keywords = wordRegExp([
31 "abstract", "alias", "as", "asm", "begin", "break", "case", "class", "def", "do",
32 "else", "elsif", "end", "ensure", "enum", "extend", "for", "fun", "if",
33 "include", "instance_sizeof", "lib", "macro", "module", "next", "of", "out", "pointerof",
34 "private", "protected", "rescue", "return", "require", "select", "sizeof", "struct",
35 "super", "then", "type", "typeof", "uninitialized", "union", "unless", "until", "when", "while", "with",
36 "yield", "__DIR__", "__END_LINE__", "__FILE__", "__LINE__"
37 ]);
38 var atomWords = wordRegExp(["true", "false", "nil", "self"]);
39 var indentKeywordsArray = [
40 "def", "fun", "macro",
41 "class", "module", "struct", "lib", "enum", "union",
42 "do", "for"
43 ];
44 var indentKeywords = wordRegExp(indentKeywordsArray);
45 var indentExpressionKeywordsArray = ["if", "unless", "case", "while", "until", "begin", "then"];
46 var indentExpressionKeywords = wordRegExp(indentExpressionKeywordsArray);
47 var dedentKeywordsArray = ["end", "else", "elsif", "rescue", "ensure"];
48 var dedentKeywords = wordRegExp(dedentKeywordsArray);
49 var dedentPunctualsArray = ["\\)", "\\}", "\\]"];
50 var dedentPunctuals = new RegExp("^(?:" + dedentPunctualsArray.join("|") + ")$");
51 var nextTokenizer = {
52 "def": tokenFollowIdent, "fun": tokenFollowIdent, "macro": tokenMacroDef,
53 "class": tokenFollowType, "module": tokenFollowType, "struct": tokenFollowType,
54 "lib": tokenFollowType, "enum": tokenFollowType, "union": tokenFollowType
55 };
56 var matching = {"[": "]", "{": "}", "(": ")", "<": ">"};
57
58 function tokenBase(stream, state) {
59 if (stream.eatSpace()) {
60 return null;
61 }
62
63 // Macros
64 if (state.lastToken != "\\" && stream.match("{%", false)) {
65 return chain(tokenMacro("%", "%"), stream, state);
66 }
67
68 if (state.lastToken != "\\" && stream.match("{{", false)) {
69 return chain(tokenMacro("{", "}"), stream, state);
70 }
71
72 // Comments
73 if (stream.peek() == "#") {
74 stream.skipToEnd();
75 return "comment";
76 }
77
78 // Variables and keywords
79 var matched;
80 if (stream.match(idents)) {
81 stream.eat(/[?!]/);
82
83 matched = stream.current();
84 if (stream.eat(":")) {
85 return "atom";
86 } else if (state.lastToken == ".") {
87 return "property";
88 } else if (keywords.test(matched)) {
89 if (indentKeywords.test(matched)) {
90 if (!(matched == "fun" && state.blocks.indexOf("lib") >= 0) && !(matched == "def" && state.lastToken == "abstract")) {
91 state.blocks.push(matched);
92 state.currentIndent += 1;
93 }
94 } else if ((state.lastStyle == "operator" || !state.lastStyle) && indentExpressionKeywords.test(matched)) {
95 state.blocks.push(matched);
96 state.currentIndent += 1;
97 } else if (matched == "end") {
98 state.blocks.pop();
99 state.currentIndent -= 1;
100 }
101
102 if (nextTokenizer.hasOwnProperty(matched)) {
103 state.tokenize.push(nextTokenizer[matched]);
104 }
105
106 return "keyword";
107 } else if (atomWords.test(matched)) {
108 return "atom";
109 }
110
111 return "variable";
112 }
113
114 // Class variables and instance variables
115 // or attributes
116 if (stream.eat("@")) {
117 if (stream.peek() == "[") {
118 return chain(tokenNest("[", "]", "meta"), stream, state);
119 }
120
121 stream.eat("@");
122 stream.match(idents) || stream.match(types);
123 return "variable-2";
124 }
125
126 // Constants and types
127 if (stream.match(types)) {
128 return "tag";
129 }
130
131 // Symbols or ':' operator
132 if (stream.eat(":")) {
133 if (stream.eat("\"")) {
134 return chain(tokenQuote("\"", "atom", false), stream, state);
135 } else if (stream.match(idents) || stream.match(types) ||
136 stream.match(operators) || stream.match(conditionalOperators) || stream.match(indexingOperators)) {
137 return "atom";
138 }
139 stream.eat(":");
140 return "operator";
141 }
142
143 // Strings
144 if (stream.eat("\"")) {
145 return chain(tokenQuote("\"", "string", true), stream, state);
146 }
147
148 // Strings or regexps or macro variables or '%' operator
149 if (stream.peek() == "%") {
150 var style = "string";
151 var embed = true;
152 var delim;
153
154 if (stream.match("%r")) {
155 // Regexps
156 style = "string-2";
157 delim = stream.next();
158 } else if (stream.match("%w")) {
159 embed = false;
160 delim = stream.next();
161 } else if (stream.match("%q")) {
162 embed = false;
163 delim = stream.next();
164 } else {
165 if(delim = stream.match(/^%([^\w\s=])/)) {
166 delim = delim[1];
167 } else if (stream.match(/^%[a-zA-Z0-9_\u009F-\uFFFF]*/)) {
168 // Macro variables
169 return "meta";
170 } else {
171 // '%' operator
172 return "operator";
173 }
174 }
175
176 if (matching.hasOwnProperty(delim)) {
177 delim = matching[delim];
178 }
179 return chain(tokenQuote(delim, style, embed), stream, state);
180 }
181
182 // Here Docs
183 if (matched = stream.match(/^<<-('?)([A-Z]\w*)\1/)) {
184 return chain(tokenHereDoc(matched[2], !matched[1]), stream, state)
185 }
186
187 // Characters
188 if (stream.eat("'")) {
189 stream.match(/^(?:[^']|\\(?:[befnrtv0'"]|[0-7]{3}|u(?:[0-9a-fA-F]{4}|\{[0-9a-fA-F]{1,6}\})))/);
190 stream.eat("'");
191 return "atom";
192 }
193
194 // Numbers
195 if (stream.eat("0")) {
196 if (stream.eat("x")) {
197 stream.match(/^[0-9a-fA-F]+/);
198 } else if (stream.eat("o")) {
199 stream.match(/^[0-7]+/);
200 } else if (stream.eat("b")) {
201 stream.match(/^[01]+/);
202 }
203 return "number";
204 }
205
206 if (stream.eat(/^\d/)) {
207 stream.match(/^\d*(?:\.\d+)?(?:[eE][+-]?\d+)?/);
208 return "number";
209 }
210
211 // Operators
212 if (stream.match(operators)) {
213 stream.eat("="); // Operators can follow assign symbol.
214 return "operator";
215 }
216
217 if (stream.match(conditionalOperators) || stream.match(anotherOperators)) {
218 return "operator";
219 }
220
221 // Parens and braces
222 if (matched = stream.match(/[({[]/, false)) {
223 matched = matched[0];
224 return chain(tokenNest(matched, matching[matched], null), stream, state);
225 }
226
227 // Escapes
228 if (stream.eat("\\")) {
229 stream.next();
230 return "meta";
231 }
232
233 stream.next();
234 return null;
235 }
236
237 function tokenNest(begin, end, style, started) {
238 return function (stream, state) {
239 if (!started && stream.match(begin)) {
240 state.tokenize[state.tokenize.length - 1] = tokenNest(begin, end, style, true);
241 state.currentIndent += 1;
242 return style;
243 }
244
245 var nextStyle = tokenBase(stream, state);
246 if (stream.current() === end) {
247 state.tokenize.pop();
248 state.currentIndent -= 1;
249 nextStyle = style;
250 }
251
252 return nextStyle;
253 };
254 }
255
256 function tokenMacro(begin, end, started) {
257 return function (stream, state) {
258 if (!started && stream.match("{" + begin)) {
259 state.currentIndent += 1;
260 state.tokenize[state.tokenize.length - 1] = tokenMacro(begin, end, true);
261 return "meta";
262 }
263
264 if (stream.match(end + "}")) {
265 state.currentIndent -= 1;
266 state.tokenize.pop();
267 return "meta";
268 }
269
270 return tokenBase(stream, state);
271 };
272 }
273
274 function tokenMacroDef(stream, state) {
275 if (stream.eatSpace()) {
276 return null;
277 }
278
279 var matched;
280 if (matched = stream.match(idents)) {
281 if (matched == "def") {
282 return "keyword";
283 }
284 stream.eat(/[?!]/);
285 }
286
287 state.tokenize.pop();
288 return "def";
289 }
290
291 function tokenFollowIdent(stream, state) {
292 if (stream.eatSpace()) {
293 return null;
294 }
295
296 if (stream.match(idents)) {
297 stream.eat(/[!?]/);
298 } else {
299 stream.match(operators) || stream.match(conditionalOperators) || stream.match(indexingOperators);
300 }
301 state.tokenize.pop();
302 return "def";
303 }
304
305 function tokenFollowType(stream, state) {
306 if (stream.eatSpace()) {
307 return null;
308 }
309
310 stream.match(types);
311 state.tokenize.pop();
312 return "def";
313 }
314
315 function tokenQuote(end, style, embed) {
316 return function (stream, state) {
317 var escaped = false;
318
319 while (stream.peek()) {
320 if (!escaped) {
321 if (stream.match("{%", false)) {
322 state.tokenize.push(tokenMacro("%", "%"));
323 return style;
324 }
325
326 if (stream.match("{{", false)) {
327 state.tokenize.push(tokenMacro("{", "}"));
328 return style;
329 }
330
331 if (embed && stream.match("#{", false)) {
332 state.tokenize.push(tokenNest("#{", "}", "meta"));
333 return style;
334 }
335
336 var ch = stream.next();
337
338 if (ch == end) {
339 state.tokenize.pop();
340 return style;
341 }
342
343 escaped = embed && ch == "\\";
344 } else {
345 stream.next();
346 escaped = false;
347 }
348 }
349
350 return style;
351 };
352 }
353
354 function tokenHereDoc(phrase, embed) {
355 return function (stream, state) {
356 if (stream.sol()) {
357 stream.eatSpace()
358 if (stream.match(phrase)) {
359 state.tokenize.pop();
360 return "string";
361 }
362 }
363
364 var escaped = false;
365 while (stream.peek()) {
366 if (!escaped) {
367 if (stream.match("{%", false)) {
368 state.tokenize.push(tokenMacro("%", "%"));
369 return "string";
370 }
371
372 if (stream.match("{{", false)) {
373 state.tokenize.push(tokenMacro("{", "}"));
374 return "string";
375 }
376
377 if (embed && stream.match("#{", false)) {
378 state.tokenize.push(tokenNest("#{", "}", "meta"));
379 return "string";
380 }
381
382 escaped = embed && stream.next() == "\\";
383 } else {
384 stream.next();
385 escaped = false;
386 }
387 }
388
389 return "string";
390 }
391 }
392
393 return {
394 startState: function () {
395 return {
396 tokenize: [tokenBase],
397 currentIndent: 0,
398 lastToken: null,
399 lastStyle: null,
400 blocks: []
401 };
402 },
403
404 token: function (stream, state) {
405 var style = state.tokenize[state.tokenize.length - 1](stream, state);
406 var token = stream.current();
407
408 if (style && style != "comment") {
409 state.lastToken = token;
410 state.lastStyle = style;
411 }
412
413 return style;
414 },
415
416 indent: function (state, textAfter) {
417 textAfter = textAfter.replace(/^\s*(?:\{%)?\s*|\s*(?:%\})?\s*$/g, "");
418
419 if (dedentKeywords.test(textAfter) || dedentPunctuals.test(textAfter)) {
420 return config.indentUnit * (state.currentIndent - 1);
421 }
422
423 return config.indentUnit * state.currentIndent;
424 },
425
426 fold: "indent",
427 electricInput: wordRegExp(dedentPunctualsArray.concat(dedentKeywordsArray), true),
428 lineComment: '#'
429 };
430 });
431
432 CodeMirror.defineMIME("text/x-crystal", "crystal");
433});