Documentation
¶
Index ¶
Constants ¶
This section is empty.
Variables ¶
View Source
var TopLevelTokens = []TokenType{ SeparatorToken, FuncKeywordToken, TypeKeywordToken, VarKeywordToken, ConstKeywordToken, }
Functions ¶
This section is empty.
Types ¶
type KeywordTokenizer ¶
func (KeywordTokenizer) Tokenize ¶
func (t KeywordTokenizer) Tokenize(txt *core.SourceView) (tkn Token, err error)
type PrefixedTokenizer ¶
Tokenizer that scans words prefixed with the provided prefix string, Scans the word until a whitespace is encountered.
func (PrefixedTokenizer) Tokenize ¶
func (t PrefixedTokenizer) Tokenize(txt *core.SourceView) (tkn Token, err error)
type SpecificTokenizer ¶
type SpecificTokenizer interface {
Tokenize(txt *core.SourceView) (Token, error)
}
type Token ¶
type Token struct { Type TokenType View core.UnmanagedSourceView }
type Tokenizer ¶
type Tokenizer interface {
Tokenize(core.SourceView) ([]Token, error)
}
func NewTokenizer ¶
func NewTokenizer() Tokenizer
type WordTokenizer ¶
type WordTokenizer struct {
Token TokenType
}
func (WordTokenizer) Tokenize ¶
func (t WordTokenizer) Tokenize(txt *core.SourceView) (tkn Token, err error)
Click to show internal directories.
Click to hide internal directories.