{{- /* This template generates a recursive descent parser based on the */ -}} {{- /* information about the LL(1) grammar processed by the ll1 tool. */ -}} /* * {{.OutName}}: Parser for the {{.Grammar.Top.Name}} grammar. * Generated by the ll1 tool from {{.InName}} at {{Now}}. * Based on template: {{.Templates}} * Uses a scanner * * Available definition keys at template expansion: * {{.Definitions}} * * DO NOT EDIT. */ package {{ .Package }} {{ range .Import }} import "{{.}}" {{ end }} import "io" import "os" import "fmt" {{$prefix := .Prefix }} {{- $Parser := ( printf "%s%s" $prefix "Parser") -}} {{- $ParserError := ( printf "%s%s" $prefix "ParserError") -}} {{- $Lexer := ( printf "%s%s" $prefix "Lexer") -}} {{- $TokenKind := ( printf "%s%s" $prefix "TokenKind") -}} {{- $Position := ( printf "%s%s" $prefix "Position") -}} {{- $Token := ( printf "%s%s" $prefix "Token") -}} {{- $Value := ( printf "%s%s" $prefix "Value") -}} // {{$Value}} is the lexical value of a lexer token. {{if .ValueType }} type {{$Value}} = {{.ValueType}} {{ else }} // This is based on strings as a default. type {{$Value}} = string {{ end }} {{if (.LexerType) eq "scanner.Scanner"}} // {{$Position}} is a position within a source file. Since the lexer is based on // text/scanner, we use that package's Position. type {{$Position}} = scanner.Position {{else}} // {{$Position}} is a position within a source file. type {{$Position}} struct { Filename string // filename, if any Offset int // byte offset, starting at 0 Line int // line number, starting at 1 Column int // column number, starting at 1 (character count per line) } {{end}} // {{$TokenKind}} is the kind or type of a token. // This has rune as the underlying type so one-character tokens can be easily // supported. EOF will be 65535 (I.e, -1 cast to rune). Non-character token // kinds will start from 65533 down (i.e -3, -4, -5, etc). type {{$TokenKind}} rune // No{{$TokenKind}} means "no token kind" i.e. no token. const No{{$TokenKind}} {{$TokenKind}} = {{$TokenKind}}(0) // {{$TokenKind}}EOF means the end of the input. const {{$TokenKind}}EOF {{$TokenKind}} = {{$TokenKind}}(-1) // {{$TokenKind}}Error means a parsing or lexing error was encountered. const {{$TokenKind}}Error {{$TokenKind}} = {{$TokenKind}}(-2) // Convert token kind to a string representation func (tk {{$TokenKind}}) String() string { {{if (.LexerType) eq "scanner.Scanner"}} return scanner.TokenString(rune(tk)) {{else}} switch (tk) { case No{{$TokenKind}}: return "NoToken" case {{$TokenKind}}EOF: return "EOF" {{ range .Grammar.Rules -}} {{- $ruleName := .Name -}} {{- if .IsTerminal -}} {{- $TokenKindName := ( printf "%s%s" $TokenKind $ruleName) -}} case {{$TokenKindName}}: return "{{$TokenKindName}}" {{end}} {{end}} default: return fmt.Printf("TokenKind(%d)", int(tk)) } {{end}} } // {{$Token}} is the result of a single lexical analysis step by the lexer. type {{$Token}} struct { {{$Position}} // Position in the source where the token was found. {{$TokenKind}} // Type of the token {{$Value}} // Value of the token } // Make{{$Token}} makes a token with the given position, type and value. func Make{{$Token}}(pos {{$Position}}, typ {{$TokenKind}}, val {{$Value}}) {{$Token}} { return {{$Token}}{ pos, typ, val} } // {{$Lexer}} performs the lexical analysis of the input. type {{$Lexer}} struct { // Embed {{.LexerType}} {{.LexerType}} Filename string } {{if (.LexerType) eq "scanner.Scanner"}} // New{{$Lexer}}FromReader creates a new lexer for the given parser and input. func New{{$Lexer}}FromReader(parser *{{$Parser}}, reader io.Reader, filename string) *{{$Lexer}} { lexer := &{{$Lexer}}{} lexer.Filename = filename lexer.Scanner.Init(reader) lexer.Scanner.Mode = scanner.GoTokens lexer.Scanner.Error = func (s *scanner.Scanner, msg string) { parser.Panicf("%s: scanner error: %s, %s", s.Position, s.TokenText(), msg) } // XXX: needs to be generated from the identifier rule in the syntax! lexer.Scanner.IsIdentRune = func(ch rune, i int) bool { if i == 0 { return unicode.IsLetter(ch) } return unicode.IsLetter(ch) || unicode.IsNumber(ch) || ch == '_' || ch == '-' } return lexer } func (lex *{{$Lexer}}) Lex() {{$Token}} { scanned := lex.Scanner.Scan() pos := lex.Scanner.Position pos.Filename = lex.Filename value := lex.Scanner.TokenText() // Get rid of the quotes if scanned == scanner.Char || scanned == scanner.String || scanned == scanner.RawString { value = value[1:len(value) - 1] } token := {{$Token}} { {{$TokenKind}}: {{$TokenKind}}(scanned), {{$Value}}: value, {{$Position}}: pos, } return token } {{else}} // Please provide the following functions: // // * You own lexer creation function with the following signature: // New{{$Lexer}}FromReader(parser *{{$Parser}}, reader io.Reader, filename string) *{{$Lexer}} // // * Your own lexing function with the type // func (lex *{{$Lexer}}) Lex() {{$Token}} {{end}} // {{$Parser}} parses the input and returns a parse tree, // based on the rules in {{.InName}} type {{$Parser}} struct { reader io.Reader lexer *{{$Lexer}} current {{$Token}} Errors []{{$ParserError}} Filename string Debug io.Writer } func New{{$Parser}}FromReader(reader io.Reader, filename string, debug bool) *{{$Parser}} { parser := &{{$Parser}}{} parser.lexer = New{{$Lexer}}FromReader(parser, reader, filename) parser.Filename = filename parser.current.{{$TokenKind}} = No{{$TokenKind}} parser.Debug = nil if debug { parser.Debug = os.Stderr } return parser } // Advances the parser. Returns the current token /after/ advancing. func (p *{{$Parser}}) Advance() {{$Token}} { token := p.lexer.Lex() p.Debugf("Lexed token: %v", token) p.current = token return token } // {{$ParserError}} is an error encountered during parsing or lexing. // The parser may panic with this type on errors that would prevent the parser // from making progress. type {{$ParserError}} struct { *{{$Parser}} // Parser that had the error. *{{$Token}} // Token at which the error was found Chain error // underlying error } func (pe {{$ParserError}}) Error() string { // XXX will need to be improved return pe.Chain.Error() } func (parser *{{$Parser}}) Errorf(message string, args ...interface{}) {{$ParserError}} { err := fmt.Errorf(message, args...) pe := {{$ParserError}} { {{$Parser}}: parser, {{$Token}}: &parser.current, Chain: err, } parser.Errors = append(parser.Errors, pe) return pe } func (parser *{{$Parser}}) Panicf(message string, args ...interface{}) { pe := parser.Errorf(message, args...) panic(pe) } func (p *{{$Parser}}) Debugf(message string, args ...interface{}) { if p.Debug != nil { fmt.Fprintf(p.Debug, message, args) } } /* Looks at the current token and advances the lexer if the token is of any of the token kinds given in kinds. In this case it will return the accepted token and advance the parser. Otherwise, it will call parser.Panicf.*/ func (parser *{{$Parser}}) Require(kinds ...{{$TokenKind}}) {{$Token}} { parser.Debugf("Require: %v\n", kinds) if parser.current.{{$TokenKind}} == {{$TokenKind}}(0) { parser.Advance() } expected := "" sep := "" for _, kind := range kinds { if kind == parser.current.{{$TokenKind}} { accepted := parser.current parser.Advance() return accepted } expected = fmt.Sprintf("%s%s%s", expected, sep, kind.String()) } parser.Panicf("error: expected one of the following: %s", expected) return {{$Token}}{} } func (parser {{$Parser}}) NextIs(kinds ...{{$TokenKind}}) bool { parser.Debugf("NextIs: %v\n", kinds) if (parser.current.{{$TokenKind}} == 0) { parser.Advance() } for _, kind := range kinds { if kind == parser.current.{{$TokenKind}} { return true } } return false } {{ $tokenKindValue := 2 }} {{ range .Grammar.Rules -}} {{- $ruleName := .Name -}} {{ if .Template }} // Expanded from template of rule {{$ruleName}} {{ .Template }} {{ end }} {{- $terminal := .IsTerminal -}} {{- if $terminal -}} {{- $TokenKindName := ( printf "%s%s" $TokenKind $ruleName) -}} const {{$TokenKindName}} {{$TokenKind}} = {{$TokenKind}}(-{{$tokenKindValue}}) {{ $tokenKindValue = (iadd $tokenKindValue 1) }} func ( *{{$Lexer}}) Lex{{$TokenKindName}}() ({{$TokenKind}}, error) { result := {{$TokenKindName}} return result, nil } {{ else }} {{ $RuleType := ( printf "%s%s" $prefix $ruleName) }} type {{$RuleType}} struct { } func ( *{{$Parser}}) Parse{{$RuleType}}() ({{$RuleType}}, error) { result := {{$RuleType}} {} return result, nil } {{end}} {{ end }}