scripts/CodeMirror/mode/sass/sass.js (view raw)
1// CodeMirror, copyright (c) by Marijn Haverbeke and others
2// Distributed under an MIT license: https://codemirror.net/LICENSE
3
4(function(mod) {
5 if (typeof exports == "object" && typeof module == "object") // CommonJS
6 mod(require("../../lib/codemirror"), require("../css/css"));
7 else if (typeof define == "function" && define.amd) // AMD
8 define(["../../lib/codemirror", "../css/css"], mod);
9 else // Plain browser env
10 mod(CodeMirror);
11})(function(CodeMirror) {
12"use strict";
13
14CodeMirror.defineMode("sass", function(config) {
15 var cssMode = CodeMirror.mimeModes["text/css"];
16 var propertyKeywords = cssMode.propertyKeywords || {},
17 colorKeywords = cssMode.colorKeywords || {},
18 valueKeywords = cssMode.valueKeywords || {},
19 fontProperties = cssMode.fontProperties || {};
20
21 function tokenRegexp(words) {
22 return new RegExp("^" + words.join("|"));
23 }
24
25 var keywords = ["true", "false", "null", "auto"];
26 var keywordsRegexp = new RegExp("^" + keywords.join("|"));
27
28 var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-",
29 "\\!=", "/", "\\*", "%", "and", "or", "not", ";","\\{","\\}",":"];
30 var opRegexp = tokenRegexp(operators);
31
32 var pseudoElementsRegexp = /^::?[a-zA-Z_][\w\-]*/;
33
34 var word;
35
36 function isEndLine(stream) {
37 return !stream.peek() || stream.match(/\s+$/, false);
38 }
39
40 function urlTokens(stream, state) {
41 var ch = stream.peek();
42
43 if (ch === ")") {
44 stream.next();
45 state.tokenizer = tokenBase;
46 return "operator";
47 } else if (ch === "(") {
48 stream.next();
49 stream.eatSpace();
50
51 return "operator";
52 } else if (ch === "'" || ch === '"') {
53 state.tokenizer = buildStringTokenizer(stream.next());
54 return "string";
55 } else {
56 state.tokenizer = buildStringTokenizer(")", false);
57 return "string";
58 }
59 }
60 function comment(indentation, multiLine) {
61 return function(stream, state) {
62 if (stream.sol() && stream.indentation() <= indentation) {
63 state.tokenizer = tokenBase;
64 return tokenBase(stream, state);
65 }
66
67 if (multiLine && stream.skipTo("*/")) {
68 stream.next();
69 stream.next();
70 state.tokenizer = tokenBase;
71 } else {
72 stream.skipToEnd();
73 }
74
75 return "comment";
76 };
77 }
78
79 function buildStringTokenizer(quote, greedy) {
80 if (greedy == null) { greedy = true; }
81
82 function stringTokenizer(stream, state) {
83 var nextChar = stream.next();
84 var peekChar = stream.peek();
85 var previousChar = stream.string.charAt(stream.pos-2);
86
87 var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
88
89 if (endingString) {
90 if (nextChar !== quote && greedy) { stream.next(); }
91 if (isEndLine(stream)) {
92 state.cursorHalf = 0;
93 }
94 state.tokenizer = tokenBase;
95 return "string";
96 } else if (nextChar === "#" && peekChar === "{") {
97 state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
98 stream.next();
99 return "operator";
100 } else {
101 return "string";
102 }
103 }
104
105 return stringTokenizer;
106 }
107
108 function buildInterpolationTokenizer(currentTokenizer) {
109 return function(stream, state) {
110 if (stream.peek() === "}") {
111 stream.next();
112 state.tokenizer = currentTokenizer;
113 return "operator";
114 } else {
115 return tokenBase(stream, state);
116 }
117 };
118 }
119
120 function indent(state) {
121 if (state.indentCount == 0) {
122 state.indentCount++;
123 var lastScopeOffset = state.scopes[0].offset;
124 var currentOffset = lastScopeOffset + config.indentUnit;
125 state.scopes.unshift({ offset:currentOffset });
126 }
127 }
128
129 function dedent(state) {
130 if (state.scopes.length == 1) return;
131
132 state.scopes.shift();
133 }
134
135 function tokenBase(stream, state) {
136 var ch = stream.peek();
137
138 // Comment
139 if (stream.match("/*")) {
140 state.tokenizer = comment(stream.indentation(), true);
141 return state.tokenizer(stream, state);
142 }
143 if (stream.match("//")) {
144 state.tokenizer = comment(stream.indentation(), false);
145 return state.tokenizer(stream, state);
146 }
147
148 // Interpolation
149 if (stream.match("#{")) {
150 state.tokenizer = buildInterpolationTokenizer(tokenBase);
151 return "operator";
152 }
153
154 // Strings
155 if (ch === '"' || ch === "'") {
156 stream.next();
157 state.tokenizer = buildStringTokenizer(ch);
158 return "string";
159 }
160
161 if(!state.cursorHalf){// state.cursorHalf === 0
162 // first half i.e. before : for key-value pairs
163 // including selectors
164
165 if (ch === "-") {
166 if (stream.match(/^-\w+-/)) {
167 return "meta";
168 }
169 }
170
171 if (ch === ".") {
172 stream.next();
173 if (stream.match(/^[\w-]+/)) {
174 indent(state);
175 return "qualifier";
176 } else if (stream.peek() === "#") {
177 indent(state);
178 return "tag";
179 }
180 }
181
182 if (ch === "#") {
183 stream.next();
184 // ID selectors
185 if (stream.match(/^[\w-]+/)) {
186 indent(state);
187 return "builtin";
188 }
189 if (stream.peek() === "#") {
190 indent(state);
191 return "tag";
192 }
193 }
194
195 // Variables
196 if (ch === "$") {
197 stream.next();
198 stream.eatWhile(/[\w-]/);
199 return "variable-2";
200 }
201
202 // Numbers
203 if (stream.match(/^-?[0-9\.]+/))
204 return "number";
205
206 // Units
207 if (stream.match(/^(px|em|in)\b/))
208 return "unit";
209
210 if (stream.match(keywordsRegexp))
211 return "keyword";
212
213 if (stream.match(/^url/) && stream.peek() === "(") {
214 state.tokenizer = urlTokens;
215 return "atom";
216 }
217
218 if (ch === "=") {
219 // Match shortcut mixin definition
220 if (stream.match(/^=[\w-]+/)) {
221 indent(state);
222 return "meta";
223 }
224 }
225
226 if (ch === "+") {
227 // Match shortcut mixin definition
228 if (stream.match(/^\+[\w-]+/)){
229 return "variable-3";
230 }
231 }
232
233 if(ch === "@"){
234 if(stream.match(/@extend/)){
235 if(!stream.match(/\s*[\w]/))
236 dedent(state);
237 }
238 }
239
240
241 // Indent Directives
242 if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)) {
243 indent(state);
244 return "def";
245 }
246
247 // Other Directives
248 if (ch === "@") {
249 stream.next();
250 stream.eatWhile(/[\w-]/);
251 return "def";
252 }
253
254 if (stream.eatWhile(/[\w-]/)){
255 if(stream.match(/ *: *[\w-\+\$#!\("']/,false)){
256 word = stream.current().toLowerCase();
257 var prop = state.prevProp + "-" + word;
258 if (propertyKeywords.hasOwnProperty(prop)) {
259 return "property";
260 } else if (propertyKeywords.hasOwnProperty(word)) {
261 state.prevProp = word;
262 return "property";
263 } else if (fontProperties.hasOwnProperty(word)) {
264 return "property";
265 }
266 return "tag";
267 }
268 else if(stream.match(/ *:/,false)){
269 indent(state);
270 state.cursorHalf = 1;
271 state.prevProp = stream.current().toLowerCase();
272 return "property";
273 }
274 else if(stream.match(/ *,/,false)){
275 return "tag";
276 }
277 else{
278 indent(state);
279 return "tag";
280 }
281 }
282
283 if(ch === ":"){
284 if (stream.match(pseudoElementsRegexp)){ // could be a pseudo-element
285 return "variable-3";
286 }
287 stream.next();
288 state.cursorHalf=1;
289 return "operator";
290 }
291
292 } // cursorHalf===0 ends here
293 else{
294
295 if (ch === "#") {
296 stream.next();
297 // Hex numbers
298 if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){
299 if (isEndLine(stream)) {
300 state.cursorHalf = 0;
301 }
302 return "number";
303 }
304 }
305
306 // Numbers
307 if (stream.match(/^-?[0-9\.]+/)){
308 if (isEndLine(stream)) {
309 state.cursorHalf = 0;
310 }
311 return "number";
312 }
313
314 // Units
315 if (stream.match(/^(px|em|in)\b/)){
316 if (isEndLine(stream)) {
317 state.cursorHalf = 0;
318 }
319 return "unit";
320 }
321
322 if (stream.match(keywordsRegexp)){
323 if (isEndLine(stream)) {
324 state.cursorHalf = 0;
325 }
326 return "keyword";
327 }
328
329 if (stream.match(/^url/) && stream.peek() === "(") {
330 state.tokenizer = urlTokens;
331 if (isEndLine(stream)) {
332 state.cursorHalf = 0;
333 }
334 return "atom";
335 }
336
337 // Variables
338 if (ch === "$") {
339 stream.next();
340 stream.eatWhile(/[\w-]/);
341 if (isEndLine(stream)) {
342 state.cursorHalf = 0;
343 }
344 return "variable-2";
345 }
346
347 // bang character for !important, !default, etc.
348 if (ch === "!") {
349 stream.next();
350 state.cursorHalf = 0;
351 return stream.match(/^[\w]+/) ? "keyword": "operator";
352 }
353
354 if (stream.match(opRegexp)){
355 if (isEndLine(stream)) {
356 state.cursorHalf = 0;
357 }
358 return "operator";
359 }
360
361 // attributes
362 if (stream.eatWhile(/[\w-]/)) {
363 if (isEndLine(stream)) {
364 state.cursorHalf = 0;
365 }
366 word = stream.current().toLowerCase();
367 if (valueKeywords.hasOwnProperty(word)) {
368 return "atom";
369 } else if (colorKeywords.hasOwnProperty(word)) {
370 return "keyword";
371 } else if (propertyKeywords.hasOwnProperty(word)) {
372 state.prevProp = stream.current().toLowerCase();
373 return "property";
374 } else {
375 return "tag";
376 }
377 }
378
379 //stream.eatSpace();
380 if (isEndLine(stream)) {
381 state.cursorHalf = 0;
382 return null;
383 }
384
385 } // else ends here
386
387 if (stream.match(opRegexp))
388 return "operator";
389
390 // If we haven't returned by now, we move 1 character
391 // and return an error
392 stream.next();
393 return null;
394 }
395
396 function tokenLexer(stream, state) {
397 if (stream.sol()) state.indentCount = 0;
398 var style = state.tokenizer(stream, state);
399 var current = stream.current();
400
401 if (current === "@return" || current === "}"){
402 dedent(state);
403 }
404
405 if (style !== null) {
406 var startOfToken = stream.pos - current.length;
407
408 var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
409
410 var newScopes = [];
411
412 for (var i = 0; i < state.scopes.length; i++) {
413 var scope = state.scopes[i];
414
415 if (scope.offset <= withCurrentIndent)
416 newScopes.push(scope);
417 }
418
419 state.scopes = newScopes;
420 }
421
422
423 return style;
424 }
425
426 return {
427 startState: function() {
428 return {
429 tokenizer: tokenBase,
430 scopes: [{offset: 0, type: "sass"}],
431 indentCount: 0,
432 cursorHalf: 0, // cursor half tells us if cursor lies after (1)
433 // or before (0) colon (well... more or less)
434 definedVars: [],
435 definedMixins: []
436 };
437 },
438 token: function(stream, state) {
439 var style = tokenLexer(stream, state);
440
441 state.lastToken = { style: style, content: stream.current() };
442
443 return style;
444 },
445
446 indent: function(state) {
447 return state.scopes[0].offset;
448 }
449 };
450}, "css");
451
452CodeMirror.defineMIME("text/x-sass", "sass");
453
454});