123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455 |
- // CodeMirror, copyright (c) by Marijn Haverbeke and others
- // Distributed under an MIT license: https://codemirror.net/LICENSE
- ;(function (mod) {
- if (typeof exports == 'object' && typeof module == 'object')
- // CommonJS
- mod(require('../../lib/codemirror'), require('../css/css'))
- else if (typeof define == 'function' && define.amd)
- // AMD
- define(['../../lib/codemirror', '../css/css'], mod)
- // Plain browser env
- else mod(CodeMirror)
- })(function (CodeMirror) {
- 'use strict'
- CodeMirror.defineMode(
- 'sass',
- function (config) {
- var cssMode = CodeMirror.mimeModes['text/css']
- var propertyKeywords = cssMode.propertyKeywords || {},
- colorKeywords = cssMode.colorKeywords || {},
- valueKeywords = cssMode.valueKeywords || {},
- fontProperties = cssMode.fontProperties || {}
- function tokenRegexp(words) {
- return new RegExp('^' + words.join('|'))
- }
- var keywords = ['true', 'false', 'null', 'auto']
- var keywordsRegexp = new RegExp('^' + keywords.join('|'))
- var operators = ['\\(', '\\)', '=', '>', '<', '==', '>=', '<=', '\\+', '-', '\\!=', '/', '\\*', '%', 'and', 'or', 'not', ';', '\\{', '\\}', ':']
- var opRegexp = tokenRegexp(operators)
- var pseudoElementsRegexp = /^::?[a-zA-Z_][\w\-]*/
- var word
- function isEndLine(stream) {
- return !stream.peek() || stream.match(/\s+$/, false)
- }
- function urlTokens(stream, state) {
- var ch = stream.peek()
- if (ch === ')') {
- stream.next()
- state.tokenizer = tokenBase
- return 'operator'
- } else if (ch === '(') {
- stream.next()
- stream.eatSpace()
- return 'operator'
- } else if (ch === "'" || ch === '"') {
- state.tokenizer = buildStringTokenizer(stream.next())
- return 'string'
- } else {
- state.tokenizer = buildStringTokenizer(')', false)
- return 'string'
- }
- }
- function comment(indentation, multiLine) {
- return function (stream, state) {
- if (stream.sol() && stream.indentation() <= indentation) {
- state.tokenizer = tokenBase
- return tokenBase(stream, state)
- }
- if (multiLine && stream.skipTo('*/')) {
- stream.next()
- stream.next()
- state.tokenizer = tokenBase
- } else {
- stream.skipToEnd()
- }
- return 'comment'
- }
- }
- function buildStringTokenizer(quote, greedy) {
- if (greedy == null) {
- greedy = true
- }
- function stringTokenizer(stream, state) {
- var nextChar = stream.next()
- var peekChar = stream.peek()
- var previousChar = stream.string.charAt(stream.pos - 2)
- var endingString = (nextChar !== '\\' && peekChar === quote) || (nextChar === quote && previousChar !== '\\')
- if (endingString) {
- if (nextChar !== quote && greedy) {
- stream.next()
- }
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- state.tokenizer = tokenBase
- return 'string'
- } else if (nextChar === '#' && peekChar === '{') {
- state.tokenizer = buildInterpolationTokenizer(stringTokenizer)
- stream.next()
- return 'operator'
- } else {
- return 'string'
- }
- }
- return stringTokenizer
- }
- function buildInterpolationTokenizer(currentTokenizer) {
- return function (stream, state) {
- if (stream.peek() === '}') {
- stream.next()
- state.tokenizer = currentTokenizer
- return 'operator'
- } else {
- return tokenBase(stream, state)
- }
- }
- }
- function indent(state) {
- if (state.indentCount == 0) {
- state.indentCount++
- var lastScopeOffset = state.scopes[0].offset
- var currentOffset = lastScopeOffset + config.indentUnit
- state.scopes.unshift({ offset: currentOffset })
- }
- }
- function dedent(state) {
- if (state.scopes.length == 1) return
- state.scopes.shift()
- }
- function tokenBase(stream, state) {
- var ch = stream.peek()
- // Comment
- if (stream.match('/*')) {
- state.tokenizer = comment(stream.indentation(), true)
- return state.tokenizer(stream, state)
- }
- if (stream.match('//')) {
- state.tokenizer = comment(stream.indentation(), false)
- return state.tokenizer(stream, state)
- }
- // Interpolation
- if (stream.match('#{')) {
- state.tokenizer = buildInterpolationTokenizer(tokenBase)
- return 'operator'
- }
- // Strings
- if (ch === '"' || ch === "'") {
- stream.next()
- state.tokenizer = buildStringTokenizer(ch)
- return 'string'
- }
- if (!state.cursorHalf) {
- // state.cursorHalf === 0
- // first half i.e. before : for key-value pairs
- // including selectors
- if (ch === '-') {
- if (stream.match(/^-\w+-/)) {
- return 'meta'
- }
- }
- if (ch === '.') {
- stream.next()
- if (stream.match(/^[\w-]+/)) {
- indent(state)
- return 'qualifier'
- } else if (stream.peek() === '#') {
- indent(state)
- return 'tag'
- }
- }
- if (ch === '#') {
- stream.next()
- // ID selectors
- if (stream.match(/^[\w-]+/)) {
- indent(state)
- return 'builtin'
- }
- if (stream.peek() === '#') {
- indent(state)
- return 'tag'
- }
- }
- // Variables
- if (ch === '$') {
- stream.next()
- stream.eatWhile(/[\w-]/)
- return 'variable-2'
- }
- // Numbers
- if (stream.match(/^-?[0-9\.]+/)) return 'number'
- // Units
- if (stream.match(/^(px|em|in)\b/)) return 'unit'
- if (stream.match(keywordsRegexp)) return 'keyword'
- if (stream.match(/^url/) && stream.peek() === '(') {
- state.tokenizer = urlTokens
- return 'atom'
- }
- if (ch === '=') {
- // Match shortcut mixin definition
- if (stream.match(/^=[\w-]+/)) {
- indent(state)
- return 'meta'
- }
- }
- if (ch === '+') {
- // Match shortcut mixin definition
- if (stream.match(/^\+[\w-]+/)) {
- return 'variable-3'
- }
- }
- if (ch === '@') {
- if (stream.match('@extend')) {
- if (!stream.match(/\s*[\w]/)) dedent(state)
- }
- }
- // Indent Directives
- if (stream.match(/^@(else if|if|media|else|for|each|while|mixin|function)/)) {
- indent(state)
- return 'def'
- }
- // Other Directives
- if (ch === '@') {
- stream.next()
- stream.eatWhile(/[\w-]/)
- return 'def'
- }
- if (stream.eatWhile(/[\w-]/)) {
- if (stream.match(/ *: *[\w-\+\$#!\("']/, false)) {
- word = stream.current().toLowerCase()
- var prop = state.prevProp + '-' + word
- if (propertyKeywords.hasOwnProperty(prop)) {
- return 'property'
- } else if (propertyKeywords.hasOwnProperty(word)) {
- state.prevProp = word
- return 'property'
- } else if (fontProperties.hasOwnProperty(word)) {
- return 'property'
- }
- return 'tag'
- } else if (stream.match(/ *:/, false)) {
- indent(state)
- state.cursorHalf = 1
- state.prevProp = stream.current().toLowerCase()
- return 'property'
- } else if (stream.match(/ *,/, false)) {
- return 'tag'
- } else {
- indent(state)
- return 'tag'
- }
- }
- if (ch === ':') {
- if (stream.match(pseudoElementsRegexp)) {
- // could be a pseudo-element
- return 'variable-3'
- }
- stream.next()
- state.cursorHalf = 1
- return 'operator'
- }
- } // cursorHalf===0 ends here
- else {
- if (ch === '#') {
- stream.next()
- // Hex numbers
- if (stream.match(/[0-9a-fA-F]{6}|[0-9a-fA-F]{3}/)) {
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- return 'number'
- }
- }
- // Numbers
- if (stream.match(/^-?[0-9\.]+/)) {
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- return 'number'
- }
- // Units
- if (stream.match(/^(px|em|in)\b/)) {
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- return 'unit'
- }
- if (stream.match(keywordsRegexp)) {
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- return 'keyword'
- }
- if (stream.match(/^url/) && stream.peek() === '(') {
- state.tokenizer = urlTokens
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- return 'atom'
- }
- // Variables
- if (ch === '$') {
- stream.next()
- stream.eatWhile(/[\w-]/)
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- return 'variable-2'
- }
- // bang character for !important, !default, etc.
- if (ch === '!') {
- stream.next()
- state.cursorHalf = 0
- return stream.match(/^[\w]+/) ? 'keyword' : 'operator'
- }
- if (stream.match(opRegexp)) {
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- return 'operator'
- }
- // attributes
- if (stream.eatWhile(/[\w-]/)) {
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- }
- word = stream.current().toLowerCase()
- if (valueKeywords.hasOwnProperty(word)) {
- return 'atom'
- } else if (colorKeywords.hasOwnProperty(word)) {
- return 'keyword'
- } else if (propertyKeywords.hasOwnProperty(word)) {
- state.prevProp = stream.current().toLowerCase()
- return 'property'
- } else {
- return 'tag'
- }
- }
- //stream.eatSpace();
- if (isEndLine(stream)) {
- state.cursorHalf = 0
- return null
- }
- } // else ends here
- if (stream.match(opRegexp)) return 'operator'
- // If we haven't returned by now, we move 1 character
- // and return an error
- stream.next()
- return null
- }
- function tokenLexer(stream, state) {
- if (stream.sol()) state.indentCount = 0
- var style = state.tokenizer(stream, state)
- var current = stream.current()
- if (current === '@return' || current === '}') {
- dedent(state)
- }
- if (style !== null) {
- var startOfToken = stream.pos - current.length
- var withCurrentIndent = startOfToken + config.indentUnit * state.indentCount
- var newScopes = []
- for (var i = 0; i < state.scopes.length; i++) {
- var scope = state.scopes[i]
- if (scope.offset <= withCurrentIndent) newScopes.push(scope)
- }
- state.scopes = newScopes
- }
- return style
- }
- return {
- startState: function () {
- return {
- tokenizer: tokenBase,
- scopes: [{ offset: 0, type: 'sass' }],
- indentCount: 0,
- cursorHalf: 0, // cursor half tells us if cursor lies after (1)
- // or before (0) colon (well... more or less)
- definedVars: [],
- definedMixins: [],
- }
- },
- token: function (stream, state) {
- var style = tokenLexer(stream, state)
- state.lastToken = { style: style, content: stream.current() }
- return style
- },
- indent: function (state) {
- return state.scopes[0].offset
- },
- blockCommentStart: '/*',
- blockCommentEnd: '*/',
- lineComment: '//',
- fold: 'indent',
- }
- },
- 'css'
- )
- CodeMirror.defineMIME('text/x-sass', 'sass')
- })
|