Types
TokType = enum tkInvalid = "tkInvalid", tkEof = "[EOF]", tkSymbol = "tkSymbol", tkAddr = "addr", tkAnd = "and", tkAs = "as", tkAsm = "asm", tkBind = "bind", tkBlock = "block", tkBreak = "break", tkCase = "case", tkCast = "cast", tkConcept = "concept", tkConst = "const", tkContinue = "continue", tkConverter = "converter", tkDefer = "defer", tkDiscard = "discard", tkDistinct = "distinct", tkDiv = "div", tkDo = "do", tkElif = "elif", tkElse = "else", tkEnd = "end", tkEnum = "enum", tkExcept = "except", tkExport = "export", tkFinally = "finally", tkFor = "for", tkFrom = "from", tkFunc = "func", tkIf = "if", tkImport = "import", tkIn = "in", tkInclude = "include", tkInterface = "interface", tkIs = "is", tkIsnot = "isnot", tkIterator = "iterator", tkLet = "let", tkMacro = "macro", tkMethod = "method", tkMixin = "mixin", tkMod = "mod", tkNil = "nil", tkNot = "not", tkNotin = "notin", tkObject = "object", tkOf = "of", tkOr = "or", tkOut = "out", tkProc = "proc", tkPtr = "ptr", tkRaise = "raise", tkRef = "ref", tkReturn = "return", tkShl = "shl", tkShr = "shr", tkStatic = "static", tkTemplate = "template", tkTry = "try", tkTuple = "tuple", tkType = "type", tkUsing = "using", tkVar = "var", tkWhen = "when", tkWhile = "while", tkXor = "xor", tkYield = "yield", tkIntLit = "tkIntLit", tkInt8Lit = "tkInt8Lit", tkInt16Lit = "tkInt16Lit", tkInt32Lit = "tkInt32Lit", tkInt64Lit = "tkInt64Lit", tkUIntLit = "tkUIntLit", tkUInt8Lit = "tkUInt8Lit", tkUInt16Lit = "tkUInt16Lit", tkUInt32Lit = "tkUInt32Lit", tkUInt64Lit = "tkUInt64Lit", tkFloatLit = "tkFloatLit", tkFloat32Lit = "tkFloat32Lit", tkFloat64Lit = "tkFloat64Lit", tkFloat128Lit = "tkFloat128Lit", tkStrLit = "tkStrLit", tkRStrLit = "tkRStrLit", tkTripleStrLit = "tkTripleStrLit", tkGStrLit = "tkGStrLit", tkGTripleStrLit = "tkGTripleStrLit", tkCharLit = "tkCharLit", tkParLe = "(", tkParRi = ")", tkBracketLe = "[", tkBracketRi = "]", tkCurlyLe = "{", tkCurlyRi = "}", tkBracketDotLe = "[.", tkBracketDotRi = ".]", tkCurlyDotLe = "{.", tkCurlyDotRi = ".}", tkParDotLe = "(.", tkParDotRi = ".)", tkComma = ",", tkSemiColon = ";", tkColon = ":", tkColonColon = "::", tkEquals = "=", tkDot = ".", tkDotDot = "..", tkBracketLeColon = "[:", tkOpr, tkComment, tkAccent = "`", tkSpaces, tkInfixOpr, tkPrefixOpr, tkPostfixOpr
- Source Edit
TokTypes = set[TokType]
- Source Edit
NumericalBase = enum base10, base2, base8, base16
- Source Edit
Token = object tokType*: TokType indent*: int ident*: PIdent iNumber*: BiggestInt fNumber*: BiggestFloat base*: NumericalBase strongSpaceA*: int8 strongSpaceB*: int8 literal*: string line*, col*: int when defined(nimpretty): offsetA*, offsetB*: int commentOffsetA*, commentOffsetB*: int
- Source Edit
ErrorHandler = proc (conf: ConfigRef; info: TLineInfo; msg: TMsgKind; arg: string)
- Source Edit
Lexer = object of TBaseLexer fileIdx*: FileIndex indentAhead*: int currLineIndent*: int strongSpaces*, allowTabs*: bool errorHandler*: ErrorHandler cache*: IdentCache when defined(nimsuggest): previousToken: TLineInfo config*: ConfigRef
- Source Edit
Consts
MaxLineLength = 80
- Source Edit
numChars: set[char] = {'0'..'9', 'a'..'z', 'A'..'Z'}
- Source Edit
SymChars: set[char] = {'a'..'z', 'A'..'Z', '0'..'9', '\x80'..'\xFF'}
- Source Edit
SymStartChars: set[char] = {'a'..'z', 'A'..'Z', '\x80'..'\xFF'}
- Source Edit
OpChars: set[char] = {'+', '-', '*', '/', '\\', '<', '>', '!', '?', '^', '.', '|', '=', '%', '&', '$', '@', '~', ':'}
- Source Edit
tokKeywordLow = tkAddr
- Source Edit
tokKeywordHigh = tkYield
- Source Edit
Procs
proc getLineInfo(L: Lexer; tok: Token): TLineInfo {...}{.inline, raises: [], tags: [].}
- Source Edit
proc isKeyword(kind: TokType): bool {...}{.raises: [], tags: [].}
- Source Edit
proc isNimIdentifier(s: string): bool {...}{.raises: [], tags: [].}
- Source Edit
proc `$`(tok: Token): string {...}{.raises: [], tags: [].}
- Source Edit
proc prettyTok(tok: Token): string {...}{.raises: [], tags: [].}
- Source Edit
proc printTok(conf: ConfigRef; tok: Token) {...}{.raises: [Exception, IOError], tags: [RootEffect, WriteIOEffect].}
- Source Edit
proc initToken(L: var Token) {...}{.raises: [], tags: [].}
- Source Edit
proc openLexer(lex: var Lexer; fileIdx: FileIndex; inputstream: PLLStream; cache: IdentCache; config: ConfigRef) {...}{. raises: [IOError, Exception], tags: [ReadIOEffect, RootEffect].}
- Source Edit
proc openLexer(lex: var Lexer; filename: AbsoluteFile; inputstream: PLLStream; cache: IdentCache; config: ConfigRef) {...}{. raises: [IOError, Exception, KeyError], tags: [ReadIOEffect, RootEffect, ReadDirEffect].}
- Source Edit
proc closeLexer(lex: var Lexer) {...}{.raises: [], tags: [].}
- Source Edit
proc lexMessage(L: Lexer; msg: TMsgKind; arg = "") {...}{. raises: [Exception, ValueError, IOError, ERecoverableError], tags: [RootEffect, WriteIOEffect, ReadIOEffect, ReadEnvEffect].}
- Source Edit
proc lexMessageTok(L: Lexer; msg: TMsgKind; tok: Token; arg = "") {...}{. raises: [Exception, ValueError, IOError, ERecoverableError], tags: [RootEffect, WriteIOEffect, ReadIOEffect, ReadEnvEffect].}
- Source Edit
proc getPrecedence(tok: Token): int {...}{.raises: [], tags: [].}
- Calculates the precedence of the given token. Source Edit
proc newlineFollows(L: Lexer): bool {...}{.raises: [], tags: [].}
- Source Edit
proc rawGetTok(L: var Lexer; tok: var Token) {...}{. raises: [Exception, ValueError, IOError, ERecoverableError], tags: [RootEffect, WriteIOEffect, ReadIOEffect, ReadEnvEffect].}
- Source Edit
proc getIndentWidth(fileIdx: FileIndex; inputstream: PLLStream; cache: IdentCache; config: ConfigRef): int {...}{. raises: [IOError, Exception, ValueError, ERecoverableError], tags: [ReadIOEffect, RootEffect, WriteIOEffect, ReadEnvEffect].}
- Source Edit
proc getPrecedence(ident: PIdent): int {...}{.raises: [], tags: [].}
- assumes ident is binary operator already Source Edit