Types
TokType = enum tkInvalid, tkEof, tkSymbol, tkAddr, tkAnd, tkAs, tkAsm, tkBind, tkBlock, tkBreak, tkCase, tkCast, tkConcept, tkConst, tkContinue, tkConverter, tkDefer, tkDiscard, tkDistinct, tkDiv, tkDo, tkElif, tkElse, tkEnd, tkEnum, tkExcept, tkExport, tkFinally, tkFor, tkFrom, tkFunc, tkIf, tkImport, tkIn, tkInclude, tkInterface, tkIs, tkIsnot, tkIterator, tkLet, tkMacro, tkMethod, tkMixin, tkMod, tkNil, tkNot, tkNotin, tkObject, tkOf, tkOr, tkOut, tkProc, tkPtr, tkRaise, tkRef, tkReturn, tkShl, tkShr, tkStatic, tkTemplate, tkTry, tkTuple, tkType, tkUsing, tkVar, tkWhen, tkWhile, tkXor, tkYield, tkIntLit, tkInt8Lit, tkInt16Lit, tkInt32Lit, tkInt64Lit, tkUIntLit, tkUInt8Lit, tkUInt16Lit, tkUInt32Lit, tkUInt64Lit, tkFloatLit, tkFloat32Lit, tkFloat64Lit, tkFloat128Lit, tkStrLit, tkRStrLit, tkTripleStrLit, tkGStrLit, tkGTripleStrLit, tkCharLit, tkParLe, tkParRi, tkBracketLe, tkBracketRi, tkCurlyLe, tkCurlyRi, tkBracketDotLe, tkBracketDotRi, tkCurlyDotLe, tkCurlyDotRi, tkParDotLe, tkParDotRi, tkComma, tkSemiColon, tkColon, tkColonColon, tkEquals, tkDot, tkDotDot, tkBracketLeColon, tkOpr, tkComment, tkAccent, tkSpaces, tkInfixOpr, tkPrefixOpr, tkPostfixOpr
- Source Edit
TokTypes = set[TokType]
- Source Edit
NumericalBase = enum base10, base2, base8, base16
- Source Edit
Token = object tokType*: TokType indent*: int ident*: PIdent iNumber*: BiggestInt fNumber*: BiggestFloat base*: NumericalBase strongSpaceA*: int8 strongSpaceB*: int8 literal*: string line*, col*: int when defined(nimpretty): offsetA*, offsetB*: int commentOffsetA*, commentOffsetB*: int
- Source Edit
ErrorHandler = proc (conf: ConfigRef; info: TLineInfo; msg: TMsgKind; arg: string)
- Source Edit
Lexer = object of TBaseLexer fileIdx*: FileIndex indentAhead*: int currLineIndent*: int strongSpaces*, allowTabs*: bool errorHandler*: ErrorHandler cache*: IdentCache when defined(nimsuggest): previousToken: TLineInfo config*: ConfigRef
- Source Edit
Consts
MaxLineLength = 80
- Source Edit
numChars: set[char] = {'0'..'9', 'a'..'z', 'A'..'Z'}
- Source Edit
SymChars: set[char] = {'a'..'z', 'A'..'Z', '0'..'9', '\x80'..'\xFF'}
- Source Edit
SymStartChars: set[char] = {'a'..'z', 'A'..'Z', '\x80'..'\xFF'}
- Source Edit
OpChars: set[char] = {'+', '-', '*', '/', '\\', '<', '>', '!', '?', '^', '.', '|', '=', '%', '&', '$', '@', '~', ':'}
- Source Edit
tokKeywordLow = tkAddr
- Source Edit
tokKeywordHigh = tkYield
- Source Edit
TokTypeToStr: array[TokType, string] = ["tkInvalid", "[EOF]", "tkSymbol", "addr", "and", "as", "asm", "bind", "block", "break", "case", "cast", "concept", "const", "continue", "converter", "defer", "discard", "distinct", "div", "do", "elif", "else", "end", "enum", "except", "export", "finally", "for", "from", "func", "if", "import", "in", "include", "interface", "is", "isnot", "iterator", "let", "macro", "method", "mixin", "mod", "nil", "not", "notin", "object", "of", "or", "out", "proc", "ptr", "raise", "ref", "return", "shl", "shr", "static", "template", "try", "tuple", "type", "using", "var", "when", "while", "xor", "yield", "tkIntLit", "tkInt8Lit", "tkInt16Lit", "tkInt32Lit", "tkInt64Lit", "tkUIntLit", "tkUInt8Lit", "tkUInt16Lit", "tkUInt32Lit", "tkUInt64Lit", "tkFloatLit", "tkFloat32Lit", "tkFloat64Lit", "tkFloat128Lit", "tkStrLit", "tkRStrLit", "tkTripleStrLit", "tkGStrLit", "tkGTripleStrLit", "tkCharLit", "(", ")", "[", "]", "{", "}", "[.", ".]", "{.", ".}", "(.", ".)", ",", ";", ":", "::", "=", ".", "..", "[:", "tkOpr", "tkComment", "`", "tkSpaces", "tkInfixOpr", "tkPrefixOpr", "tkPostfixOpr"]
- Source Edit
Procs
proc getLineInfo(L: Lexer; tok: Token): TLineInfo {...}{.inline, raises: [], tags: [].}
- Source Edit
proc isKeyword(kind: TokType): bool {...}{.raises: [], tags: [].}
- Source Edit
proc isNimIdentifier(s: string): bool {...}{.raises: [], tags: [].}
- Source Edit
proc `$`(tok: Token): string {...}{.raises: [], tags: [].}
- Source Edit
proc prettyTok(tok: Token): string {...}{.raises: [], tags: [].}
- Source Edit
proc printTok(conf: ConfigRef; tok: Token) {...}{.raises: [Exception, IOError], tags: [RootEffect, WriteIOEffect].}
- Source Edit
proc initToken(L: var Token) {...}{.raises: [], tags: [].}
- Source Edit
proc openLexer(lex: var Lexer; fileIdx: FileIndex; inputstream: PLLStream; cache: IdentCache; config: ConfigRef) {...}{. raises: [IOError, Exception], tags: [ReadIOEffect, RootEffect].}
- Source Edit
proc openLexer(lex: var Lexer; filename: AbsoluteFile; inputstream: PLLStream; cache: IdentCache; config: ConfigRef) {...}{. raises: [IOError, Exception, KeyError], tags: [ReadIOEffect, RootEffect, ReadDirEffect].}
- Source Edit
proc closeLexer(lex: var Lexer) {...}{.raises: [], tags: [].}
- Source Edit
proc lexMessage(L: Lexer; msg: TMsgKind; arg = "") {...}{. raises: [Exception, ValueError, IOError, ERecoverableError], tags: [RootEffect, WriteIOEffect, ReadIOEffect, ReadEnvEffect].}
- Source Edit
proc lexMessageTok(L: Lexer; msg: TMsgKind; tok: Token; arg = "") {...}{. raises: [Exception, ValueError, IOError, ERecoverableError], tags: [RootEffect, WriteIOEffect, ReadIOEffect, ReadEnvEffect].}
- Source Edit
proc getPrecedence(tok: Token): int {...}{.raises: [], tags: [].}
- Calculates the precedence of the given token. Source Edit
proc newlineFollows(L: Lexer): bool {...}{.raises: [], tags: [].}
- Source Edit
proc rawGetTok(L: var Lexer; tok: var Token) {...}{. raises: [Exception, ValueError, IOError, ERecoverableError], tags: [RootEffect, WriteIOEffect, ReadIOEffect, ReadEnvEffect].}
- Source Edit
proc getIndentWidth(fileIdx: FileIndex; inputstream: PLLStream; cache: IdentCache; config: ConfigRef): int {...}{. raises: [IOError, Exception, ValueError, ERecoverableError], tags: [ReadIOEffect, RootEffect, WriteIOEffect, ReadEnvEffect].}
- Source Edit
proc getPrecedence(ident: PIdent): int {...}{.raises: [], tags: [].}
- assumes ident is binary operator already Source Edit