1// CodeMirror, copyright (c) by Marijn Haverbeke and others
2// Distributed under an MIT license: http://codemirror.net/LICENSE
3
4(function(mod) {
5  if (typeof exports == "object" && typeof module == "object") // CommonJS
6    mod(require("../../lib/codemirror"));
7  else if (typeof define == "function" && define.amd) // AMD
8    define(["../../lib/codemirror"], mod);
9  else // Plain browser env
10    mod(CodeMirror);
11})(function(CodeMirror) {
12"use strict";
13
14CodeMirror.defineMode("sass", function(config) {
15  var tokenRegexp = function(words){
16    return new RegExp("^" + words.join("|"));
17  };
18
19  var keywords = ["true", "false", "null", "auto"];
20  var keywordsRegexp = new RegExp("^" + keywords.join("|"));
21
22  var operators = ["\\(", "\\)", "=", ">", "<", "==", ">=", "<=", "\\+", "-", "\\!=", "/", "\\*", "%", "and", "or", "not"];
23  var opRegexp = tokenRegexp(operators);
24
25  var pseudoElementsRegexp = /^::?[\w\-]+/;
26
27  var urlTokens = function(stream, state){
28    var ch = stream.peek();
29
30    if (ch === ")"){
31      stream.next();
32      state.tokenizer = tokenBase;
33      return "operator";
34    }else if (ch === "("){
35      stream.next();
36      stream.eatSpace();
37
38      return "operator";
39    }else if (ch === "'" || ch === '"'){
40      state.tokenizer = buildStringTokenizer(stream.next());
41      return "string";
42    }else{
43      state.tokenizer = buildStringTokenizer(")", false);
44      return "string";
45    }
46  };
47  var multilineComment = function(stream, state) {
48    if (stream.skipTo("*/")){
49      stream.next();
50      stream.next();
51      state.tokenizer = tokenBase;
52    }else {
53      stream.next();
54    }
55
56    return "comment";
57  };
58
59  var buildStringTokenizer = function(quote, greedy){
60    if(greedy == null){ greedy = true; }
61
62    function stringTokenizer(stream, state){
63      var nextChar = stream.next();
64      var peekChar = stream.peek();
65      var previousChar = stream.string.charAt(stream.pos-2);
66
67      var endingString = ((nextChar !== "\\" && peekChar === quote) || (nextChar === quote && previousChar !== "\\"));
68
69      /*
70      console.log("previousChar: " + previousChar);
71      console.log("nextChar: " + nextChar);
72      console.log("peekChar: " + peekChar);
73      console.log("ending: " + endingString);
74      */
75
76      if (endingString){
77        if (nextChar !== quote && greedy) { stream.next(); }
78        state.tokenizer = tokenBase;
79        return "string";
80      }else if (nextChar === "#" && peekChar === "{"){
81        state.tokenizer = buildInterpolationTokenizer(stringTokenizer);
82        stream.next();
83        return "operator";
84      }else {
85        return "string";
86      }
87    }
88
89    return stringTokenizer;
90  };
91
92  var buildInterpolationTokenizer = function(currentTokenizer){
93    return function(stream, state){
94      if (stream.peek() === "}"){
95        stream.next();
96        state.tokenizer = currentTokenizer;
97        return "operator";
98      }else{
99        return tokenBase(stream, state);
100      }
101    };
102  };
103
104  var indent = function(state){
105    if (state.indentCount == 0){
106      state.indentCount++;
107      var lastScopeOffset = state.scopes[0].offset;
108      var currentOffset = lastScopeOffset + config.indentUnit;
109      state.scopes.unshift({ offset:currentOffset });
110    }
111  };
112
113  var dedent = function(state){
114    if (state.scopes.length == 1) { return; }
115
116    state.scopes.shift();
117  };
118
119  var tokenBase = function(stream, state) {
120    var ch = stream.peek();
121
122    // Single line Comment
123    if (stream.match('//')) {
124      stream.skipToEnd();
125      return "comment";
126    }
127
128    // Multiline Comment
129    if (stream.match('/*')){
130      state.tokenizer = multilineComment;
131      return state.tokenizer(stream, state);
132    }
133
134    // Interpolation
135    if (stream.match('#{')){
136    state.tokenizer = buildInterpolationTokenizer(tokenBase);
137      return "operator";
138    }
139
140    if (ch === "."){
141      stream.next();
142
143      // Match class selectors
144      if (stream.match(/^[\w-]+/)){
145        indent(state);
146        return "atom";
147      }else if (stream.peek() === "#"){
148        indent(state);
149        return "atom";
150      }else{
151        return "operator";
152      }
153    }
154
155    if (ch === "#"){
156      stream.next();
157
158      // Hex numbers
159      if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)){
160        return "number";
161      }
162
163      // ID selectors
164      if (stream.match(/^[\w-]+/)){
165        indent(state);
166        return "atom";
167      }
168
169      if (stream.peek() === "#"){
170        indent(state);
171        return "atom";
172      }
173    }
174
175    // Numbers
176    if (stream.match(/^-?[0-9\.]+/)){
177      return "number";
178    }
179
180    // Units
181    if (stream.match(/^(px|em|in)\b/)){
182      return "unit";
183    }
184
185    if (stream.match(keywordsRegexp)){
186      return "keyword";
187    }
188
189    if (stream.match(/^url/) && stream.peek() === "("){
190      state.tokenizer = urlTokens;
191      return "atom";
192    }
193
194    // Variables
195    if (ch === "$"){
196      stream.next();
197      stream.eatWhile(/[\w-]/);
198
199      if (stream.peek() === ":"){
200        stream.next();
201        return "variable-2";
202      }else{
203        return "variable-3";
204      }
205    }
206
207    if (ch === "!"){
208      stream.next();
209
210      if (stream.match(/^[\w]+/)){
211        return "keyword";
212      }
213
214      return "operator";
215    }
216
217    if (ch === "="){
218      stream.next();
219
220      // Match shortcut mixin definition
221      if (stream.match(/^[\w-]+/)){
222        indent(state);
223        return "meta";
224      }else {
225        return "operator";
226      }
227    }
228
229    if (ch === "+"){
230      stream.next();
231
232      // Match shortcut mixin definition
233      if (stream.match(/^[\w-]+/)){
234        return "variable-3";
235      }else {
236        return "operator";
237      }
238    }
239
240    // Indent Directives
241    if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)){
242      indent(state);
243      return "meta";
244    }
245
246    // Other Directives
247    if (ch === "@"){
248      stream.next();
249      stream.eatWhile(/[\w-]/);
250      return "meta";
251    }
252
253    // Strings
254    if (ch === '"' || ch === "'"){
255      stream.next();
256      state.tokenizer = buildStringTokenizer(ch);
257      return "string";
258    }
259
260    // Pseudo element selectors
261    if (ch == ':' && stream.match(pseudoElementsRegexp)){
262      return "keyword";
263    }
264
265    // atoms
266    if (stream.eatWhile(/[\w-&]/)){
267      // matches a property definition
268      if (stream.peek() === ":" && !stream.match(pseudoElementsRegexp, false))
269        return "property";
270      else
271        return "atom";
272    }
273
274    if (stream.match(opRegexp)){
275      return "operator";
276    }
277
278    // If we haven't returned by now, we move 1 character
279    // and return an error
280    stream.next();
281    return null;
282  };
283
284  var tokenLexer = function(stream, state) {
285    if (stream.sol()){
286      state.indentCount = 0;
287    }
288    var style = state.tokenizer(stream, state);
289    var current = stream.current();
290
291    if (current === "@return"){
292      dedent(state);
293    }
294
295    if (style === "atom"){
296      indent(state);
297    }
298
299    if (style !== null){
300      var startOfToken = stream.pos - current.length;
301      var withCurrentIndent = startOfToken + (config.indentUnit * state.indentCount);
302
303      var newScopes = [];
304
305      for (var i = 0; i < state.scopes.length; i++){
306        var scope = state.scopes[i];
307
308        if (scope.offset <= withCurrentIndent){
309          newScopes.push(scope);
310        }
311      }
312
313      state.scopes = newScopes;
314    }
315
316
317    return style;
318  };
319
320  return {
321    startState: function() {
322      return {
323        tokenizer: tokenBase,
324        scopes: [{offset: 0, type: 'sass'}],
325        definedVars: [],
326        definedMixins: []
327      };
328    },
329    token: function(stream, state) {
330      var style = tokenLexer(stream, state);
331
332      state.lastToken = { style: style, content: stream.current() };
333
334      return style;
335    },
336
337    indent: function(state) {
338      return state.scopes[0].offset;
339    }
340  };
341});
342
343CodeMirror.defineMIME("text/x-sass", "sass");
344
345});
346