// Common interfaces and shared functions for the ll1 module. package common import "regexp" import "strings" import "fmt" type Location struct { Name *string Line int Col int } func (p Location) String() string { name := "" if p.Name != nil { name = *p.Name } return fmt.Sprintf("%s:%d:%d:", name, p.Line, p.Col) } // Kind is the kind of token. type Kind int const ( EndKind Kind = -29000 SkipKind Kind = -30000 ErrorKind Kind = -31000 ) type Action func(f Lexer, k Kind, matches ...string) []Token // Value is the value of a token, can be string value, integer // or some other custom value type Value interface { Value() Value String() string } type StringValue string func (sv StringValue) Value() Value { return sv } func (sv StringValue) String() string { return string(sv) } type ErrorValue struct { Err error } func (ev ErrorValue) Value() Value { return ev } func (ev ErrorValue) String() string { return string(ev.Err.Error()) } type IntValue int64 func (iv IntValue) Value() Value { return iv } func (iv IntValue) String() string { return fmt.Sprintf("%d", iv) } type FloatValue int64 func (fv FloatValue) Value() Value { return fv } func (fv FloatValue) String() string { return fmt.Sprintf("%d", fv) } type Token interface { Location() Location Kind() Kind Text() string Value() Value } type Lexer interface { // Accept will accept a regexp and advance, returning the matches. // Returns nil if no matches were found. Accept(re *regexp.Regexp) []string // Returns the current lexer Location. Location() Location // Returns if the lexer is at the end or not. EOF() bool // The lexer creates a token with the current lexer Location and // the given kind and text. MakeToken(kind Kind, form string, args ...interface{}) Token // The lexer creates a token with the current lexer Location and // the given kind. The text is taken from the lexer string builder and // that builser is reset. MakeBuilderToken(kind Kind) Token // The lexer has a string builder, which can be used to append // strings or runes to and which can be returned and cleared when the // token is complete. Builder() *strings.Builder // Lexeme adds a lexeme to the lexer. Lexeme(kind Kind, re, context string, act Action) error // Calls the lexer once. LexOnce() []Token // Returns the current lexer context Context() string // Pushes the named context on the lexer context stack PushContext(name string) // Pops the current context from the lexer context stack. PopContext() }