123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476 |
- // CodeMirror, copyright (c) by Marijn Haverbeke and others
- // Distributed under an MIT license: https://codemirror.net/LICENSE
- ;(function (mod) {
- if (typeof exports == 'object' && typeof module == 'object')
- // CommonJS
- mod(require('../../lib/codemirror'))
- else if (typeof define == 'function' && define.amd)
- // AMD
- define(['../../lib/codemirror'], mod)
- // Plain browser env
- else mod(CodeMirror)
- })(function (CodeMirror) {
- 'use strict'
- function wordRegexp(words) {
- return new RegExp('^((' + words.join(')|(') + '))\\b')
- }
- var wordOperators = wordRegexp(['and', 'or', 'not', 'is'])
- var commonKeywords = [
- 'as',
- 'assert',
- 'break',
- 'class',
- 'continue',
- 'def',
- 'del',
- 'elif',
- 'else',
- 'except',
- 'finally',
- 'for',
- 'from',
- 'global',
- 'if',
- 'import',
- 'lambda',
- 'pass',
- 'raise',
- 'return',
- 'try',
- 'while',
- 'with',
- 'yield',
- 'in',
- ]
- var commonBuiltins = [
- 'abs',
- 'all',
- 'any',
- 'bin',
- 'bool',
- 'bytearray',
- 'callable',
- 'chr',
- 'classmethod',
- 'compile',
- 'complex',
- 'delattr',
- 'dict',
- 'dir',
- 'divmod',
- 'enumerate',
- 'eval',
- 'filter',
- 'float',
- 'format',
- 'frozenset',
- 'getattr',
- 'globals',
- 'hasattr',
- 'hash',
- 'help',
- 'hex',
- 'id',
- 'input',
- 'int',
- 'isinstance',
- 'issubclass',
- 'iter',
- 'len',
- 'list',
- 'locals',
- 'map',
- 'max',
- 'memoryview',
- 'min',
- 'next',
- 'object',
- 'oct',
- 'open',
- 'ord',
- 'pow',
- 'property',
- 'range',
- 'repr',
- 'reversed',
- 'round',
- 'set',
- 'setattr',
- 'slice',
- 'sorted',
- 'staticmethod',
- 'str',
- 'sum',
- 'super',
- 'tuple',
- 'type',
- 'vars',
- 'zip',
- '__import__',
- 'NotImplemented',
- 'Ellipsis',
- '__debug__',
- ]
- CodeMirror.registerHelper('hintWords', 'python', commonKeywords.concat(commonBuiltins))
- function top(state) {
- return state.scopes[state.scopes.length - 1]
- }
- CodeMirror.defineMode('python', function (conf, parserConf) {
- var ERRORCLASS = 'error'
- var delimiters = parserConf.delimiters || parserConf.singleDelimiters || /^[\(\)\[\]\{\}@,:`=;\.\\]/
- // (Backwards-compatibility with old, cumbersome config system)
- var operators = [
- parserConf.singleOperators,
- parserConf.doubleOperators,
- parserConf.doubleDelimiters,
- parserConf.tripleDelimiters,
- parserConf.operators || /^([-+*/%\/&|^]=?|[<>=]+|\/\/=?|\*\*=?|!=|[~!@]|\.\.\.)/,
- ]
- for (var i = 0; i < operators.length; i++) if (!operators[i]) operators.splice(i--, 1)
- var hangingIndent = parserConf.hangingIndent || conf.indentUnit
- var myKeywords = commonKeywords,
- myBuiltins = commonBuiltins
- if (parserConf.extra_keywords != undefined) myKeywords = myKeywords.concat(parserConf.extra_keywords)
- if (parserConf.extra_builtins != undefined) myBuiltins = myBuiltins.concat(parserConf.extra_builtins)
- var py3 = !(parserConf.version && Number(parserConf.version) < 3)
- if (py3) {
- // since http://legacy.python.org/dev/peps/pep-0465/ @ is also an operator
- var identifiers = parserConf.identifiers || /^[_A-Za-z\u00A1-\uFFFF][_A-Za-z0-9\u00A1-\uFFFF]*/
- myKeywords = myKeywords.concat(['nonlocal', 'False', 'True', 'None', 'async', 'await'])
- myBuiltins = myBuiltins.concat(['ascii', 'bytes', 'exec', 'print'])
- var stringPrefixes = new RegExp('^(([rbuf]|(br)|(rb)|(fr)|(rf))?(\'{3}|"{3}|[\'"]))', 'i')
- } else {
- var identifiers = parserConf.identifiers || /^[_A-Za-z][_A-Za-z0-9]*/
- myKeywords = myKeywords.concat(['exec', 'print'])
- myBuiltins = myBuiltins.concat([
- 'apply',
- 'basestring',
- 'buffer',
- 'cmp',
- 'coerce',
- 'execfile',
- 'file',
- 'intern',
- 'long',
- 'raw_input',
- 'reduce',
- 'reload',
- 'unichr',
- 'unicode',
- 'xrange',
- 'False',
- 'True',
- 'None',
- ])
- var stringPrefixes = new RegExp('^(([rubf]|(ur)|(br))?(\'{3}|"{3}|[\'"]))', 'i')
- }
- var keywords = wordRegexp(myKeywords)
- var builtins = wordRegexp(myBuiltins)
- // tokenizers
- function tokenBase(stream, state) {
- var sol = stream.sol() && state.lastToken != '\\'
- if (sol) state.indent = stream.indentation()
- // Handle scope changes
- if (sol && top(state).type == 'py') {
- var scopeOffset = top(state).offset
- if (stream.eatSpace()) {
- var lineOffset = stream.indentation()
- if (lineOffset > scopeOffset) pushPyScope(state)
- else if (lineOffset < scopeOffset && dedent(stream, state) && stream.peek() != '#') state.errorToken = true
- return null
- } else {
- var style = tokenBaseInner(stream, state)
- if (scopeOffset > 0 && dedent(stream, state)) style += ' ' + ERRORCLASS
- return style
- }
- }
- return tokenBaseInner(stream, state)
- }
- function tokenBaseInner(stream, state, inFormat) {
- if (stream.eatSpace()) return null
- // Handle Comments
- if (!inFormat && stream.match(/^#.*/)) return 'comment'
- // Handle Number Literals
- if (stream.match(/^[0-9\.]/, false)) {
- var floatLiteral = false
- // Floats
- if (stream.match(/^[\d_]*\.\d+(e[\+\-]?\d+)?/i)) {
- floatLiteral = true
- }
- if (stream.match(/^[\d_]+\.\d*/)) {
- floatLiteral = true
- }
- if (stream.match(/^\.\d+/)) {
- floatLiteral = true
- }
- if (floatLiteral) {
- // Float literals may be "imaginary"
- stream.eat(/J/i)
- return 'number'
- }
- // Integers
- var intLiteral = false
- // Hex
- if (stream.match(/^0x[0-9a-f_]+/i)) intLiteral = true
- // Binary
- if (stream.match(/^0b[01_]+/i)) intLiteral = true
- // Octal
- if (stream.match(/^0o[0-7_]+/i)) intLiteral = true
- // Decimal
- if (stream.match(/^[1-9][\d_]*(e[\+\-]?[\d_]+)?/)) {
- // Decimal literals may be "imaginary"
- stream.eat(/J/i)
- // TODO - Can you have imaginary longs?
- intLiteral = true
- }
- // Zero by itself with no other piece of number.
- if (stream.match(/^0(?![\dx])/i)) intLiteral = true
- if (intLiteral) {
- // Integer literals may be "long"
- stream.eat(/L/i)
- return 'number'
- }
- }
- // Handle Strings
- if (stream.match(stringPrefixes)) {
- var isFmtString = stream.current().toLowerCase().indexOf('f') !== -1
- if (!isFmtString) {
- state.tokenize = tokenStringFactory(stream.current(), state.tokenize)
- return state.tokenize(stream, state)
- } else {
- state.tokenize = formatStringFactory(stream.current(), state.tokenize)
- return state.tokenize(stream, state)
- }
- }
- for (var i = 0; i < operators.length; i++) if (stream.match(operators[i])) return 'operator'
- if (stream.match(delimiters)) return 'punctuation'
- if (state.lastToken == '.' && stream.match(identifiers)) return 'property'
- if (stream.match(keywords) || stream.match(wordOperators)) return 'keyword'
- if (stream.match(builtins)) return 'builtin'
- if (stream.match(/^(self|cls)\b/)) return 'variable-2'
- if (stream.match(identifiers)) {
- if (state.lastToken == 'def' || state.lastToken == 'class') return 'def'
- return 'variable'
- }
- // Handle non-detected items
- stream.next()
- return inFormat ? null : ERRORCLASS
- }
- function formatStringFactory(delimiter, tokenOuter) {
- while ('rubf'.indexOf(delimiter.charAt(0).toLowerCase()) >= 0) delimiter = delimiter.substr(1)
- var singleline = delimiter.length == 1
- var OUTCLASS = 'string'
- function tokenNestedExpr(depth) {
- return function (stream, state) {
- var inner = tokenBaseInner(stream, state, true)
- if (inner == 'punctuation') {
- if (stream.current() == '{') {
- state.tokenize = tokenNestedExpr(depth + 1)
- } else if (stream.current() == '}') {
- if (depth > 1) state.tokenize = tokenNestedExpr(depth - 1)
- else state.tokenize = tokenString
- }
- }
- return inner
- }
- }
- function tokenString(stream, state) {
- while (!stream.eol()) {
- stream.eatWhile(/[^'"\{\}\\]/)
- if (stream.eat('\\')) {
- stream.next()
- if (singleline && stream.eol()) return OUTCLASS
- } else if (stream.match(delimiter)) {
- state.tokenize = tokenOuter
- return OUTCLASS
- } else if (stream.match('{{')) {
- // ignore {{ in f-str
- return OUTCLASS
- } else if (stream.match('{', false)) {
- // switch to nested mode
- state.tokenize = tokenNestedExpr(0)
- if (stream.current()) return OUTCLASS
- else return state.tokenize(stream, state)
- } else if (stream.match('}}')) {
- return OUTCLASS
- } else if (stream.match('}')) {
- // single } in f-string is an error
- return ERRORCLASS
- } else {
- stream.eat(/['"]/)
- }
- }
- if (singleline) {
- if (parserConf.singleLineStringErrors) return ERRORCLASS
- else state.tokenize = tokenOuter
- }
- return OUTCLASS
- }
- tokenString.isString = true
- return tokenString
- }
- function tokenStringFactory(delimiter, tokenOuter) {
- while ('rubf'.indexOf(delimiter.charAt(0).toLowerCase()) >= 0) delimiter = delimiter.substr(1)
- var singleline = delimiter.length == 1
- var OUTCLASS = 'string'
- function tokenString(stream, state) {
- while (!stream.eol()) {
- stream.eatWhile(/[^'"\\]/)
- if (stream.eat('\\')) {
- stream.next()
- if (singleline && stream.eol()) return OUTCLASS
- } else if (stream.match(delimiter)) {
- state.tokenize = tokenOuter
- return OUTCLASS
- } else {
- stream.eat(/['"]/)
- }
- }
- if (singleline) {
- if (parserConf.singleLineStringErrors) return ERRORCLASS
- else state.tokenize = tokenOuter
- }
- return OUTCLASS
- }
- tokenString.isString = true
- return tokenString
- }
- function pushPyScope(state) {
- while (top(state).type != 'py') state.scopes.pop()
- state.scopes.push({ offset: top(state).offset + conf.indentUnit, type: 'py', align: null })
- }
- function pushBracketScope(stream, state, type) {
- var align = stream.match(/^[\s\[\{\(]*(?:#|$)/, false) ? null : stream.column() + 1
- state.scopes.push({ offset: state.indent + hangingIndent, type: type, align: align })
- }
- function dedent(stream, state) {
- var indented = stream.indentation()
- while (state.scopes.length > 1 && top(state).offset > indented) {
- if (top(state).type != 'py') return true
- state.scopes.pop()
- }
- return top(state).offset != indented
- }
- function tokenLexer(stream, state) {
- if (stream.sol()) {
- state.beginningOfLine = true
- state.dedent = false
- }
- var style = state.tokenize(stream, state)
- var current = stream.current()
- // Handle decorators
- if (state.beginningOfLine && current == '@') return stream.match(identifiers, false) ? 'meta' : py3 ? 'operator' : ERRORCLASS
- if (/\S/.test(current)) state.beginningOfLine = false
- if ((style == 'variable' || style == 'builtin') && state.lastToken == 'meta') style = 'meta'
- // Handle scope changes.
- if (current == 'pass' || current == 'return') state.dedent = true
- if (current == 'lambda') state.lambda = true
- if (current == ':' && !state.lambda && top(state).type == 'py' && stream.match(/^\s*(?:#|$)/, false)) pushPyScope(state)
- if (current.length == 1 && !/string|comment/.test(style)) {
- var delimiter_index = '[({'.indexOf(current)
- if (delimiter_index != -1) pushBracketScope(stream, state, '])}'.slice(delimiter_index, delimiter_index + 1))
- delimiter_index = '])}'.indexOf(current)
- if (delimiter_index != -1) {
- if (top(state).type == current) state.indent = state.scopes.pop().offset - hangingIndent
- else return ERRORCLASS
- }
- }
- if (state.dedent && stream.eol() && top(state).type == 'py' && state.scopes.length > 1) state.scopes.pop()
- return style
- }
- var external = {
- startState: function (basecolumn) {
- return {
- tokenize: tokenBase,
- scopes: [{ offset: basecolumn || 0, type: 'py', align: null }],
- indent: basecolumn || 0,
- lastToken: null,
- lambda: false,
- dedent: 0,
- }
- },
- token: function (stream, state) {
- var addErr = state.errorToken
- if (addErr) state.errorToken = false
- var style = tokenLexer(stream, state)
- if (style && style != 'comment') state.lastToken = style == 'keyword' || style == 'punctuation' ? stream.current() : style
- if (style == 'punctuation') style = null
- if (stream.eol() && state.lambda) state.lambda = false
- return addErr ? style + ' ' + ERRORCLASS : style
- },
- indent: function (state, textAfter) {
- if (state.tokenize != tokenBase) return state.tokenize.isString ? CodeMirror.Pass : 0
- var scope = top(state)
- var closing = scope.type == textAfter.charAt(0) || (scope.type == 'py' && !state.dedent && /^(else:|elif |except |finally:)/.test(textAfter))
- if (scope.align != null) return scope.align - (closing ? 1 : 0)
- else return scope.offset - (closing ? hangingIndent : 0)
- },
- electricInput: /^\s*([\}\]\)]|else:|elif |except |finally:)$/,
- closeBrackets: { triples: '\'"' },
- lineComment: '#',
- fold: 'indent',
- }
- return external
- })
- CodeMirror.defineMIME('text/x-python', 'python')
- var words = function (str) {
- return str.split(' ')
- }
- CodeMirror.defineMIME('text/x-cython', {
- name: 'python',
- extra_keywords: words('by cdef cimport cpdef ctypedef enum except ' + 'extern gil include nogil property public ' + 'readonly struct union DEF IF ELIF ELSE'),
- })
- })
|