lex

package
v0.0.0-...-a5d0cb6 Latest Latest
Warning

This package is not in the latest version of its module.

Go to latest
Published: Mar 19, 2025 License: GPL-3.0 Imports: 4 Imported by: 0

Documentation

Index

Constants

This section is empty.

Variables

Functions

This section is empty.

Types

type KeywordTokenizer

type KeywordTokenizer struct {
	Keyword string
	Token   TokenType
}

func (KeywordTokenizer) Tokenize

func (t KeywordTokenizer) Tokenize(txt *core.SourceView) (tkn Token, err error)

type PrefixedTokenizer

type PrefixedTokenizer struct {
	Prefix string
	Token  TokenType
}

Tokenizer that scans words prefixed with the provided prefix string, Scans the word until a whitespace is encountered.

func (PrefixedTokenizer) Tokenize

func (t PrefixedTokenizer) Tokenize(txt *core.SourceView) (tkn Token, err error)

type SpecificTokenizer

type SpecificTokenizer interface {
	Tokenize(txt *core.SourceView) (Token, error)
}

type Token

type Token struct {
	Type TokenType
	View core.UnmanagedSourceView
}

func (Token) String

func (tkn Token) String(ctx core.SourceContext) string

type TokenType

type TokenType uint8
const (
	RegisterToken TokenType = iota
	TypeToken
	LabelToken
	GlobalToken
	ImmediateToken
	EqualToken
	LeftCurlyBraceToken
	RightCurlyBraceToken
	FuncKeywordToken
	TypeKeywordToken
	VarKeywordToken
	ConstKeywordToken
	PointerToken
	RepeatToken
	OperatorToken
	SeparatorToken
)

func (TokenType) String

func (tkn TokenType) String() string

type Tokenizer

type Tokenizer interface {
	Tokenize(core.SourceView) ([]Token, error)
}

func NewTokenizer

func NewTokenizer() Tokenizer

type WordTokenizer

type WordTokenizer struct {
	Token TokenType
}

func (WordTokenizer) Tokenize

func (t WordTokenizer) Tokenize(txt *core.SourceView) (tkn Token, err error)

Jump to

Keyboard shortcuts

? : This menu
/ : Search site
f or F : Jump to
y or Y : Canonical URL