123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390 |
- {{- /* This template generates a recursive descent parser based on the */ -}}
- {{- /* information about the LL(1) grammar processed by the ll1 tool. */ -}}
- /*
- * {{.OutName}}: Parser for the {{.Grammar.Top.Name}} grammar.
- * Generated by the ll1 tool from {{.InName}} at {{Now}}.
- * Based on template: {{.Templates}}
- * Uses a scanner
- *
- * Available definition keys at template expansion:
- * {{.Definitions}}
- *
- * DO NOT EDIT.
- */
- package {{ .Package }}
- {{ range .Import }}
- import "{{.}}"
- {{ end }}
- import "io"
- import "os"
- import "fmt"
- {{$prefix := .Prefix }}
- {{- $Parser := ( printf "%s%s" $prefix "Parser") -}}
- {{- $ParserError := ( printf "%s%s" $prefix "ParserError") -}}
- {{- $Lexer := ( printf "%s%s" $prefix "Lexer") -}}
- {{- $TokenKind := ( printf "%s%s" $prefix "TokenKind") -}}
- {{- $Position := ( printf "%s%s" $prefix "Position") -}}
- {{- $Token := ( printf "%s%s" $prefix "Token") -}}
- {{- $Value := ( printf "%s%s" $prefix "Value") -}}
- // {{$Value}} is the lexical value of a lexer token.
- {{if .ValueType }}
- //line ll1.parser.go.tpl:39
- type {{$Value}} = {{.ValueType}}
- {{ else }}
- // This is based on strings as a default.
- //line ll1.parser.go.tpl:43
- type {{$Value}} = string
- {{ end }}
- {{if (.LexerType) eq "scanner.Scanner"}}
- // {{$Position}} is a position within a source file. Since the lexer is based on
- // text/scanner, we use that package's Position.
- //line ll1.parser.go.tpl:51
- type {{$Position}} = scanner.Position
- {{else}}
- // {{$Position}} is a position within a source file.
- //line ll1.parser.go.tpl:55
- type {{$Position}} struct {
- Filename string // filename, if any
- Offset int // byte offset, starting at 0
- Line int // line number, starting at 1
- Column int // column number, starting at 1 (character count per line)
- }
- {{end}}
- // {{$TokenKind}} is the kind or type of a token.
- // This has rune as the underlying type so one-character tokens can be easily
- // supported. EOF will be 65535 (I.e, -1 cast to rune). Non-character token
- // kinds will start from 65533 down (i.e -3, -4, -5, etc).
- //line ll1.parser.go.tpl:69
- type {{$TokenKind}} rune
- // No{{$TokenKind}} means "no token kind" i.e. no token.
- //line ll1.parser.go.tpl:74
- const No{{$TokenKind}} {{$TokenKind}} = {{$TokenKind}}(0)
- // {{$TokenKind}}EOF means the end of the input.
- //line ll1.parser.go.tpl:77
- const {{$TokenKind}}EOF {{$TokenKind}} = {{$TokenKind}}(-1)
- // {{$TokenKind}}Error means a parsing or lexing error was encountered.
- //line ll1.parser.go.tpl:80
- const {{$TokenKind}}Error {{$TokenKind}} = {{$TokenKind}}(-2)
- // Convert token kind to a string representation
- //line ll1.parser.go.tpl:86
- func (tk {{$TokenKind}}) String() string {
- {{if (.LexerType) eq "scanner.Scanner"}}
- return scanner.TokenString(rune(tk))
- {{else}}
- switch (tk) {
- case No{{$TokenKind}}: return "NoToken"
- case {{$TokenKind}}EOF: return "EOF"
- {{ range .Grammar.Rules -}}
- {{- $ruleName := .Name -}}
- {{- if .IsTerminal -}}
- {{- $TokenKindName := ( printf "%s%s" $TokenKind $ruleName) -}}
- case {{$TokenKindName}}: return "{{$TokenKindName}}"
- {{end}}
- {{end}}
- default:
- return fmt.Printf("TokenKind(%d)", int(tk))
- }
- {{end}}
- }
- // {{$Token}} is the result of a single lexical analysis step by the lexer.
- //line ll1.parser.go.tpl:109
- type {{$Token}} struct {
- {{$Position}} // Position in the source where the token was found.
- {{$TokenKind}} // Type of the token
- {{$Value}} // Value of the token
- }
- // Make{{$Token}} makes a token with the given position, type and value.
- //line ll1.parser.go.tpl:118
- func Make{{$Token}}(pos {{$Position}}, typ {{$TokenKind}}, val {{$Value}}) {{$Token}} {
- return {{$Token}}{ pos, typ, val}
- }
- // {{$Lexer}} performs the lexical analysis of the input.
- //line ll1.parser.go.tpl:124
- type {{$Lexer}} struct {
- // Embed {{.LexerType}}
- {{.LexerType}}
- Filename string
- }
- {{if (.LexerType) eq "scanner.Scanner"}}
- // New{{$Lexer}}FromReader creates a new lexer for the given parser and input.
- //line ll1.parser.go.tpl:133
- func New{{$Lexer}}FromReader(parser *{{$Parser}}, reader io.Reader, filename string) *{{$Lexer}} {
- lexer := &{{$Lexer}}{}
- lexer.Filename = filename
- lexer.Scanner.Init(reader)
- lexer.Scanner.Mode = scanner.GoTokens
- lexer.Scanner.Error = func (s *scanner.Scanner, msg string) {
- parser.Panicf("%s: scanner error: %s, %s", s.Position, s.TokenText(), msg)
- }
- // XXX: needs to be generated from the identifier rule in the syntax!
- lexer.Scanner.IsIdentRune = func(ch rune, i int) bool {
- if i == 0 {
- return unicode.IsLetter(ch)
- }
- return unicode.IsLetter(ch) ||
- unicode.IsNumber(ch) ||
- ch == '_' ||
- ch == '-'
- }
- return lexer
- }
- //line ll1.parser.go.tpl:155
- func (lex *{{$Lexer}}) Lex() {{$Token}} {
- scanned := lex.Scanner.Scan()
- pos := lex.Scanner.Position
- pos.Filename = lex.Filename
- value := lex.Scanner.TokenText()
- // Get rid of the quotes
- if scanned == scanner.Char ||
- scanned == scanner.String ||
- scanned == scanner.RawString {
- value = value[1:len(value) - 1]
- }
- token := {{$Token}} {
- {{$TokenKind}}: {{$TokenKind}}(scanned),
- {{$Value}}: value,
- {{$Position}}: pos,
- }
- return token
- }
- {{else}}
- // Please provide the following functions:
- //
- // * You own lexer creation function with the following signature:
- // New{{$Lexer}}FromReader(parser *{{$Parser}}, reader io.Reader, filename string) *{{$Lexer}}
- //
- // * Your own lexing function with the type
- // func (lex *{{$Lexer}}) Lex() {{$Token}}
- {{end}}
- // {{$Parser}} parses the input and returns a parse tree,
- // based on the rules in {{.InName}}
- //line ll1.parser.go.tpl:188
- type {{$Parser}} struct {
- reader io.Reader
- lexer *{{$Lexer}}
- current {{$Token}}
- Errors []{{$ParserError}}
- Filename string
- Debug io.Writer
- }
- //line ll1.parser.go.tpl:198
- func New{{$Parser}}FromReader(reader io.Reader, filename string, debug bool) *{{$Parser}} {
- parser := &{{$Parser}}{}
- parser.lexer = New{{$Lexer}}FromReader(parser, reader, filename)
- parser.Filename = filename
-
- parser.current.{{$TokenKind}} = No{{$TokenKind}}
- parser.Debug = nil
- if debug {
- parser.Debug = os.Stderr
- }
- return parser
- }
- // Advances the parser. Returns the current token /after/ advancing.
- //line ll1.parser.go.tpl:214
- func (p *{{$Parser}}) Advance() {{$Token}} {
- token := p.lexer.Lex()
- p.Debugf("Lexed token: %v", token)
- p.current = token
- return token
- }
- // {{$ParserError}} is an error encountered during parsing or lexing.
- // The parser may panic with this type on errors that would prevent the parser
- // from making progress.
- //line ll1.parser.go.tpl:225
- type {{$ParserError}} struct {
- *{{$Parser}} // Parser that had the error.
- *{{$Token}} // Token at which the error was found
- Chain error // underlying error
- }
- //line ll1.parser.go.tpl:232
- func (pe {{$ParserError}}) Error() string {
- // XXX will need to be improved
- return pe.Chain.Error()
- }
- //line ll1.parser.go.tpl:238
- func (parser *{{$Parser}}) Errorf(message string, args ...interface{}) {{$ParserError}} {
- err := fmt.Errorf(message, args...)
- pe := {{$ParserError}} {
- {{$Parser}}: parser,
- {{$Token}}: &parser.current,
- Chain: err,
- }
- parser.Errors = append(parser.Errors, pe)
- return pe
- }
- //line ll1.parser.go.tpl:250
- func (parser *{{$Parser}}) Panicf(message string, args ...interface{}) {
- pe := parser.Errorf(message, args...)
- panic(pe)
- }
- //line ll1.parser.go.tpl:257
- func (p *{{$Parser}}) Debugf(message string, args ...interface{}) {
- if p.Debug != nil {
- fmt.Fprintf(p.Debug, message, args)
- }
- }
- /* Looks at the current token and advances the lexer if the token is of any of
- the token kinds given in kinds. In this case it will return the accepted
- token and advance the parser. Otherwise, it will call parser.Panicf.*/
- //line ll1.parser.go.tpl:267
- func (parser *{{$Parser}}) Require(kinds ...{{$TokenKind}}) {{$Token}} {
- parser.Debugf("Require: %v\n", kinds)
- if parser.current.{{$TokenKind}} == {{$TokenKind}}(0) {
- parser.Advance()
- }
-
- expected := ""
- sep := ""
- for _, kind := range kinds {
- if kind == parser.current.{{$TokenKind}} {
- accepted := parser.current
- parser.Advance()
- return accepted
- }
- expected = fmt.Sprintf("%s%s%s", expected, sep, kind.String())
- }
-
- parser.Panicf("error: expected one of the following: %s", expected)
- return {{$Token}}{}
- }
- //line ll1.parser.go.tpl:288
- func (parser {{$Parser}}) NextIs(kinds ...{{$TokenKind}}) bool {
- parser.Debugf("NextIs: %v\n", kinds)
- if (parser.current.{{$TokenKind}} == 0) {
- parser.Advance()
- }
- for _, kind := range kinds {
- if kind == parser.current.{{$TokenKind}} {
- return true
- }
- }
- return false
- }
- {{ $tokenKindValue := 2 }}
- {{ range .Grammar.Rules -}}
- {{- $ruleName := .Name -}}
- {{ if .Template }}
- // Expanded from template of rule {{$ruleName}}
- {{ .Template }}
- {{ end }}
- {{- $terminal := .IsTerminal -}}
- {{- if $terminal -}}
- {{- $TokenKindName := ( printf "%s%s" $TokenKind $ruleName) -}}
- //line ll1.parser.go.tpl:313
- const {{$TokenKindName}} {{$TokenKind}} = {{$TokenKind}}(-{{$tokenKindValue}})
- {{ $tokenKindValue = (iadd $tokenKindValue 1) }}
- //line ll1.parser.go.tpl:316
- func ( *{{$Lexer}}) Lex{{$TokenKindName}}() ({{$TokenKind}}, error) {
- result := {{$TokenKindName}}
- return result, nil
- }
- {{ else }}
- {{ $RuleType := ( printf "%s%s" $prefix $ruleName) }}
- //line ll1.parser.go.tpl:324
- type {{$RuleType}} struct {
- {{$Token}}
- {{ range .Definition.Sequences -}}
- {{- range .Elements -}}
- {{- if (IsNonterminal .) -}}
- *{{$prefix}}{{ . }}
- {{ end -}}
- {{- end}}
- {{ end -}}
- }
- //line ll1.parser.go.tpl:335
- func (p *{{$Parser}}) Is{{$RuleType}}() (bool) {
- return false
- // return p.NextIs()
- }
- //line ll1.parser.go.tpl:340
- func (p *{{$Parser}}) Parse{{$RuleType}}() (*{{$RuleType}}, error) {
- var err error
- result := &{{$RuleType}} { {{$Token}}: p.current }
- {{ range .Definition.Sequences -}}
- {{- range .Elements -}}
- {{- if (IsNonterminal .) -}}
- {{- $fn := (printf "%s%s" $prefix .)}}
- if p.Is{{$fn}}() {
- result.{{$fn}}, err = p.Parse{{$fn}}()
- if err != nil {
- return result, err
- }
- }
- {{ end -}}
- {{- end}}
- {{ end -}}
- return result, err
- }
- {{end}}
- {{ end }}
|