1 /** 2 * Link to the project's GitHub page: 3 * https://github.com/pickhardt/coffeescript-codemirror-mode 4 */ 5 CodeMirror.defineMode("coffeescript", function(conf) { 6 var ERRORCLASS = "error"; 7 8 function wordRegexp(words) { 9 return new RegExp("^((" + words.join(")|(") + "))\\b"); 10 } 11 12 var operators = /^(?:->|=>|\+[+=]?|-[\-=]?|\*[\*=]?|\/[\/=]?|[=!]=|<[><]?=?|>>?=?|%=?|&=?|\|=?|\^=?|\~|!|\?)/; 13 var delimiters = /^(?:[()\[\]{},:`=;]|\.\.?\.?)/; 14 var identifiers = /^[_A-Za-z$][_A-Za-z$0-9]*/; 15 var properties = /^(@|this\.)[_A-Za-z$][_A-Za-z$0-9]*/; 16 17 var wordOperators = wordRegexp(["and", "or", "not", 18 "is", "isnt", "in", 19 "instanceof", "typeof"]); 20 var indentKeywords = ["for", "while", "loop", "if", "unless", "else", 21 "switch", "try", "catch", "finally", "class"]; 22 var commonKeywords = ["break", "by", "continue", "debugger", "delete", 23 "do", "in", "of", "new", "return", "then", 24 "this", "throw", "when", "until"]; 25 26 var keywords = wordRegexp(indentKeywords.concat(commonKeywords)); 27 28 indentKeywords = wordRegexp(indentKeywords); 29 30 31 var stringPrefixes = /^('{3}|\"{3}|['\"])/; 32 var regexPrefixes = /^(\/{3}|\/)/; 33 var commonConstants = ["Infinity", "NaN", "undefined", "null", "true", "false", "on", "off", "yes", "no"]; 34 var constants = wordRegexp(commonConstants); 35 36 // Tokenizers 37 function tokenBase(stream, state) { 38 // Handle scope changes 39 if (stream.sol()) { 40 if (state.scope.align === null) state.scope.align = false; 41 var scopeOffset = state.scope.offset; 42 if (stream.eatSpace()) { 43 var lineOffset = stream.indentation(); 44 if (lineOffset > scopeOffset && state.scope.type == "coffee") { 45 return "indent"; 46 } else if (lineOffset < scopeOffset) { 47 return "dedent"; 48 } 49 return null; 50 } else { 51 if (scopeOffset > 0) { 52 dedent(stream, state); 53 } 54 } 55 } 56 if (stream.eatSpace()) { 57 return null; 58 } 59 60 var ch = stream.peek(); 61 62 // Handle docco title comment (single line) 63 if (stream.match("####")) { 64 stream.skipToEnd(); 65 return "comment"; 66 } 67 68 // Handle multi line comments 69 if (stream.match("###")) { 70 state.tokenize = longComment; 71 return state.tokenize(stream, state); 72 } 73 74 // Single line comment 75 if (ch === "#") { 76 stream.skipToEnd(); 77 return "comment"; 78 } 79 80 // Handle number literals 81 if (stream.match(/^-?[0-9\.]/, false)) { 82 var floatLiteral = false; 83 // Floats 84 if (stream.match(/^-?\d*\.\d+(e[\+\-]?\d+)?/i)) { 85 floatLiteral = true; 86 } 87 if (stream.match(/^-?\d+\.\d*/)) { 88 floatLiteral = true; 89 } 90 if (stream.match(/^-?\.\d+/)) { 91 floatLiteral = true; 92 } 93 94 if (floatLiteral) { 95 // prevent from getting extra . on 1.. 96 if (stream.peek() == "."){ 97 stream.backUp(1); 98 } 99 return "number"; 100 } 101 // Integers 102 var intLiteral = false; 103 // Hex 104 if (stream.match(/^-?0x[0-9a-f]+/i)) { 105 intLiteral = true; 106 } 107 // Decimal 108 if (stream.match(/^-?[1-9]\d*(e[\+\-]?\d+)?/)) { 109 intLiteral = true; 110 } 111 // Zero by itself with no other piece of number. 112 if (stream.match(/^-?0(?![\dx])/i)) { 113 intLiteral = true; 114 } 115 if (intLiteral) { 116 return "number"; 117 } 118 } 119 120 // Handle strings 121 if (stream.match(stringPrefixes)) { 122 state.tokenize = tokenFactory(stream.current(), "string"); 123 return state.tokenize(stream, state); 124 } 125 // Handle regex literals 126 if (stream.match(regexPrefixes)) { 127 if (stream.current() != "/" || stream.match(/^.*\//, false)) { // prevent highlight of division 128 state.tokenize = tokenFactory(stream.current(), "string-2"); 129 return state.tokenize(stream, state); 130 } else { 131 stream.backUp(1); 132 } 133 } 134 135 // Handle operators and delimiters 136 if (stream.match(operators) || stream.match(wordOperators)) { 137 return "operator"; 138 } 139 if (stream.match(delimiters)) { 140 return "punctuation"; 141 } 142 143 if (stream.match(constants)) { 144 return "atom"; 145 } 146 147 if (stream.match(keywords)) { 148 return "keyword"; 149 } 150 151 if (stream.match(identifiers)) { 152 return "variable"; 153 } 154 155 if (stream.match(properties)) { 156 return "property"; 157 } 158 159 // Handle non-detected items 160 stream.next(); 161 return ERRORCLASS; 162 } 163 164 function tokenFactory(delimiter, outclass) { 165 var singleline = delimiter.length == 1; 166 return function(stream, state) { 167 while (!stream.eol()) { 168 stream.eatWhile(/[^'"\/\\]/); 169 if (stream.eat("\\")) { 170 stream.next(); 171 if (singleline && stream.eol()) { 172 return outclass; 173 } 174 } else if (stream.match(delimiter)) { 175 state.tokenize = tokenBase; 176 return outclass; 177 } else { 178 stream.eat(/['"\/]/); 179 } 180 } 181 if (singleline) { 182 if (conf.mode.singleLineStringErrors) { 183 outclass = ERRORCLASS; 184 } else { 185 state.tokenize = tokenBase; 186 } 187 } 188 return outclass; 189 }; 190 } 191 192 function longComment(stream, state) { 193 while (!stream.eol()) { 194 stream.eatWhile(/[^#]/); 195 if (stream.match("###")) { 196 state.tokenize = tokenBase; 197 break; 198 } 199 stream.eatWhile("#"); 200 } 201 return "comment"; 202 } 203 204 function indent(stream, state, type) { 205 type = type || "coffee"; 206 var offset = 0, align = false, alignOffset = null; 207 for (var scope = state.scope; scope; scope = scope.prev) { 208 if (scope.type === "coffee") { 209 offset = scope.offset + conf.indentUnit; 210 break; 211 } 212 } 213 if (type !== "coffee") { 214 align = null; 215 alignOffset = stream.column() + stream.current().length; 216 } else if (state.scope.align) { 217 state.scope.align = false; 218 } 219 state.scope = { 220 offset: offset, 221 type: type, 222 prev: state.scope, 223 align: align, 224 alignOffset: alignOffset 225 }; 226 } 227 228 function dedent(stream, state) { 229 if (!state.scope.prev) return; 230 if (state.scope.type === "coffee") { 231 var _indent = stream.indentation(); 232 var matched = false; 233 for (var scope = state.scope; scope; scope = scope.prev) { 234 if (_indent === scope.offset) { 235 matched = true; 236 break; 237 } 238 } 239 if (!matched) { 240 return true; 241 } 242 while (state.scope.prev && state.scope.offset !== _indent) { 243 state.scope = state.scope.prev; 244 } 245 return false; 246 } else { 247 state.scope = state.scope.prev; 248 return false; 249 } 250 } 251 252 function tokenLexer(stream, state) { 253 var style = state.tokenize(stream, state); 254 var current = stream.current(); 255 256 // Handle "." connected identifiers 257 if (current === ".") { 258 style = state.tokenize(stream, state); 259 current = stream.current(); 260 if (/^\.[\w$]+$/.test(current)) { 261 return "variable"; 262 } else { 263 return ERRORCLASS; 264 } 265 } 266 267 // Handle scope changes. 268 if (current === "return") { 269 state.dedent += 1; 270 } 271 if (((current === "->" || current === "=>") && 272 !state.lambda && 273 !stream.peek()) 274 || style === "indent") { 275 indent(stream, state); 276 } 277 var delimiter_index = "[({".indexOf(current); 278 if (delimiter_index !== -1) { 279 indent(stream, state, "])}".slice(delimiter_index, delimiter_index+1)); 280 } 281 if (indentKeywords.exec(current)){ 282 indent(stream, state); 283 } 284 if (current == "then"){ 285 dedent(stream, state); 286 } 287 288 289 if (style === "dedent") { 290 if (dedent(stream, state)) { 291 return ERRORCLASS; 292 } 293 } 294 delimiter_index = "])}".indexOf(current); 295 if (delimiter_index !== -1) { 296 while (state.scope.type == "coffee" && state.scope.prev) 297 state.scope = state.scope.prev; 298 if (state.scope.type == current) 299 state.scope = state.scope.prev; 300 } 301 if (state.dedent > 0 && stream.eol() && state.scope.type == "coffee") { 302 if (state.scope.prev) state.scope = state.scope.prev; 303 state.dedent -= 1; 304 } 305 306 return style; 307 } 308 309 var external = { 310 startState: function(basecolumn) { 311 return { 312 tokenize: tokenBase, 313 scope: {offset:basecolumn || 0, type:"coffee", prev: null, align: false}, 314 lastToken: null, 315 lambda: false, 316 dedent: 0 317 }; 318 }, 319 320 token: function(stream, state) { 321 var fillAlign = state.scope.align === null && state.scope; 322 if (fillAlign && stream.sol()) fillAlign.align = false; 323 324 var style = tokenLexer(stream, state); 325 if (fillAlign && style && style != "comment") fillAlign.align = true; 326 327 state.lastToken = {style:style, content: stream.current()}; 328 329 if (stream.eol() && stream.lambda) { 330 state.lambda = false; 331 } 332 333 return style; 334 }, 335 336 indent: function(state, text) { 337 if (state.tokenize != tokenBase) return 0; 338 var scope = state.scope; 339 var closer = text && "])}".indexOf(text.charAt(0)) > -1; 340 if (closer) while (scope.type == "coffee" && scope.prev) scope = scope.prev; 341 var closes = closer && scope.type === text.charAt(0); 342 if (scope.align) 343 return scope.alignOffset - (closes ? 1 : 0); 344 else 345 return (closes ? scope.prev : scope).offset; 346 }, 347 348 lineComment: "#", 349 fold: "indent" 350 }; 351 return external; 352 }); 353 354 CodeMirror.defineMIME("text/x-coffeescript", "coffeescript"); 355