ll1.parser.go.lined.tpl 9.7 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363
  1. {{- /* This template generates a recursive descent parser based on the */ -}}
  2. {{- /* information about the LL(1) grammar processed by the ll1 tool. */ -}}
  3. /*
  4. * {{.OutName}}: Parser for the {{.Grammar.Top.Name}} grammar.
  5. * Generated by the ll1 tool from {{.InName}} at {{Now}}.
  6. * Based on template: {{.Templates}}
  7. * Uses a scanner
  8. *
  9. * Available definition keys at template expansion:
  10. * {{.Definitions}}
  11. *
  12. * DO NOT EDIT.
  13. */
  14. package {{ .Package }}
  15. {{ range .Import }}
  16. import "{{.}}"
  17. {{ end }}
  18. import "io"
  19. import "os"
  20. import "fmt"
  21. {{$prefix := .Prefix }}
  22. {{- $Parser := ( printf "%s%s" $prefix "Parser") -}}
  23. {{- $ParserError := ( printf "%s%s" $prefix "ParserError") -}}
  24. {{- $Lexer := ( printf "%s%s" $prefix "Lexer") -}}
  25. {{- $TokenKind := ( printf "%s%s" $prefix "TokenKind") -}}
  26. {{- $Position := ( printf "%s%s" $prefix "Position") -}}
  27. {{- $Token := ( printf "%s%s" $prefix "Token") -}}
  28. {{- $Value := ( printf "%s%s" $prefix "Value") -}}
  29. // {{$Value}} is the lexical value of a lexer token.
  30. {{if .ValueType }}
  31. //line ll1.parser.go.tpl:39
  32. type {{$Value}} = {{.ValueType}}
  33. {{ else }}
  34. // This is based on strings as a default.
  35. //line ll1.parser.go.tpl:43
  36. type {{$Value}} = string
  37. {{ end }}
  38. {{if (.LexerType) eq "scanner.Scanner"}}
  39. // {{$Position}} is a position within a source file. Since the lexer is based on
  40. // text/scanner, we use that package's Position.
  41. //line ll1.parser.go.tpl:51
  42. type {{$Position}} = scanner.Position
  43. {{else}}
  44. // {{$Position}} is a position within a source file.
  45. //line ll1.parser.go.tpl:55
  46. type {{$Position}} struct {
  47. Filename string // filename, if any
  48. Offset int // byte offset, starting at 0
  49. Line int // line number, starting at 1
  50. Column int // column number, starting at 1 (character count per line)
  51. }
  52. {{end}}
  53. // {{$TokenKind}} is the kind or type of a token.
  54. // This has rune as the underlying type so one-character tokens can be easily
  55. // supported. EOF will be 65535 (I.e, -1 cast to rune). Non-character token
  56. // kinds will start from 65533 down (i.e -3, -4, -5, etc).
  57. //line ll1.parser.go.tpl:69
  58. type {{$TokenKind}} rune
  59. // No{{$TokenKind}} means "no token kind" i.e. no token.
  60. //line ll1.parser.go.tpl:74
  61. const No{{$TokenKind}} {{$TokenKind}} = {{$TokenKind}}(0)
  62. // {{$TokenKind}}EOF means the end of the input.
  63. //line ll1.parser.go.tpl:77
  64. const {{$TokenKind}}EOF {{$TokenKind}} = {{$TokenKind}}(-1)
  65. // {{$TokenKind}}Error means a parsing or lexing error was encountered.
  66. //line ll1.parser.go.tpl:80
  67. const {{$TokenKind}}Error {{$TokenKind}} = {{$TokenKind}}(-2)
  68. // Convert token kind to a string representation
  69. //line ll1.parser.go.tpl:86
  70. func (tk {{$TokenKind}}) String() string {
  71. {{if (.LexerType) eq "scanner.Scanner"}}
  72. return scanner.TokenString(rune(tk))
  73. {{else}}
  74. switch (tk) {
  75. case No{{$TokenKind}}: return "NoToken"
  76. case {{$TokenKind}}EOF: return "EOF"
  77. {{ range .Grammar.Rules -}}
  78. {{- $ruleName := .Name -}}
  79. {{- if .IsTerminal -}}
  80. {{- $TokenKindName := ( printf "%s%s" $TokenKind $ruleName) -}}
  81. case {{$TokenKindName}}: return "{{$TokenKindName}}"
  82. {{end}}
  83. {{end}}
  84. default:
  85. return fmt.Printf("TokenKind(%d)", int(tk))
  86. }
  87. {{end}}
  88. }
  89. // {{$Token}} is the result of a single lexical analysis step by the lexer.
  90. //line ll1.parser.go.tpl:109
  91. type {{$Token}} struct {
  92. {{$Position}} // Position in the source where the token was found.
  93. {{$TokenKind}} // Type of the token
  94. {{$Value}} // Value of the token
  95. }
  96. // Make{{$Token}} makes a token with the given position, type and value.
  97. //line ll1.parser.go.tpl:118
  98. func Make{{$Token}}(pos {{$Position}}, typ {{$TokenKind}}, val {{$Value}}) {{$Token}} {
  99. return {{$Token}}{ pos, typ, val}
  100. }
  101. // {{$Lexer}} performs the lexical analysis of the input.
  102. //line ll1.parser.go.tpl:124
  103. type {{$Lexer}} struct {
  104. // Embed {{.LexerType}}
  105. {{.LexerType}}
  106. Filename string
  107. }
  108. {{if (.LexerType) eq "scanner.Scanner"}}
  109. // New{{$Lexer}}FromReader creates a new lexer for the given parser and input.
  110. //line ll1.parser.go.tpl:133
  111. func New{{$Lexer}}FromReader(parser *{{$Parser}}, reader io.Reader, filename string) *{{$Lexer}} {
  112. lexer := &{{$Lexer}}{}
  113. lexer.Filename = filename
  114. lexer.Scanner.Init(reader)
  115. lexer.Scanner.Mode = scanner.GoTokens
  116. lexer.Scanner.Error = func (s *scanner.Scanner, msg string) {
  117. parser.Panicf("%s: scanner error: %s, %s", s.Position, s.TokenText(), msg)
  118. }
  119. // XXX: needs to be generated from the identifier rule in the syntax!
  120. lexer.Scanner.IsIdentRune = func(ch rune, i int) bool {
  121. if i == 0 {
  122. return unicode.IsLetter(ch)
  123. }
  124. return unicode.IsLetter(ch) ||
  125. unicode.IsNumber(ch) ||
  126. ch == '_' ||
  127. ch == '-'
  128. }
  129. return lexer
  130. }
  131. //line ll1.parser.go.tpl:155
  132. func (lex *{{$Lexer}}) Lex() {{$Token}} {
  133. scanned := lex.Scanner.Scan()
  134. pos := lex.Scanner.Position
  135. pos.Filename = lex.Filename
  136. value := lex.Scanner.TokenText()
  137. // Get rid of the quotes
  138. if scanned == scanner.Char ||
  139. scanned == scanner.String ||
  140. scanned == scanner.RawString {
  141. value = value[1:len(value) - 1]
  142. }
  143. token := {{$Token}} {
  144. {{$TokenKind}}: {{$TokenKind}}(scanned),
  145. {{$Value}}: value,
  146. {{$Position}}: pos,
  147. }
  148. return token
  149. }
  150. {{else}}
  151. // Please provide the following functions:
  152. //
  153. // * You own lexer creation function with the following signature:
  154. // New{{$Lexer}}FromReader(parser *{{$Parser}}, reader io.Reader, filename string) *{{$Lexer}}
  155. //
  156. // * Your own lexing function with the type
  157. // func (lex *{{$Lexer}}) Lex() {{$Token}}
  158. {{end}}
  159. // {{$Parser}} parses the input and returns a parse tree,
  160. // based on the rules in {{.InName}}
  161. //line ll1.parser.go.tpl:188
  162. type {{$Parser}} struct {
  163. reader io.Reader
  164. lexer *{{$Lexer}}
  165. current {{$Token}}
  166. Errors []{{$ParserError}}
  167. Filename string
  168. Debug io.Writer
  169. }
  170. //line ll1.parser.go.tpl:198
  171. func New{{$Parser}}FromReader(reader io.Reader, filename string, debug bool) *{{$Parser}} {
  172. parser := &{{$Parser}}{}
  173. parser.lexer = New{{$Lexer}}FromReader(parser, reader, filename)
  174. parser.Filename = filename
  175. parser.current.{{$TokenKind}} = No{{$TokenKind}}
  176. parser.Debug = nil
  177. if debug {
  178. parser.Debug = os.Stderr
  179. }
  180. return parser
  181. }
  182. // Advances the parser. Returns the current token /after/ advancing.
  183. //line ll1.parser.go.tpl:214
  184. func (p *{{$Parser}}) Advance() {{$Token}} {
  185. token := p.lexer.Lex()
  186. p.Debugf("Lexed token: %v", token)
  187. p.current = token
  188. return token
  189. }
  190. // {{$ParserError}} is an error encountered during parsing or lexing.
  191. // The parser may panic with this type on errors that would prevent the parser
  192. // from making progress.
  193. //line ll1.parser.go.tpl:225
  194. type {{$ParserError}} struct {
  195. *{{$Parser}} // Parser that had the error.
  196. *{{$Token}} // Token at which the error was found
  197. Chain error // underlying error
  198. }
  199. //line ll1.parser.go.tpl:232
  200. func (pe {{$ParserError}}) Error() string {
  201. // XXX will need to be improved
  202. return pe.Chain.Error()
  203. }
  204. //line ll1.parser.go.tpl:238
  205. func (parser *{{$Parser}}) Errorf(message string, args ...interface{}) {{$ParserError}} {
  206. err := fmt.Errorf(message, args...)
  207. pe := {{$ParserError}} {
  208. {{$Parser}}: parser,
  209. {{$Token}}: &parser.current,
  210. Chain: err,
  211. }
  212. parser.Errors = append(parser.Errors, pe)
  213. return pe
  214. }
  215. //line ll1.parser.go.tpl:250
  216. func (parser *{{$Parser}}) Panicf(message string, args ...interface{}) {
  217. pe := parser.Errorf(message, args...)
  218. panic(pe)
  219. }
  220. //line ll1.parser.go.tpl:257
  221. func (p *{{$Parser}}) Debugf(message string, args ...interface{}) {
  222. if p.Debug != nil {
  223. fmt.Fprintf(p.Debug, message, args)
  224. }
  225. }
  226. /* Looks at the current token and advances the lexer if the token is of any of
  227. the token kinds given in kinds. In this case it will return the accepted
  228. token and advance the parser. Otherwise, it will call parser.Panicf.*/
  229. //line ll1.parser.go.tpl:267
  230. func (parser *{{$Parser}}) Require(kinds ...{{$TokenKind}}) {{$Token}} {
  231. parser.Debugf("Require: %v\n", kinds)
  232. if parser.current.{{$TokenKind}} == {{$TokenKind}}(0) {
  233. parser.Advance()
  234. }
  235. expected := ""
  236. sep := ""
  237. for _, kind := range kinds {
  238. if kind == parser.current.{{$TokenKind}} {
  239. accepted := parser.current
  240. parser.Advance()
  241. return accepted
  242. }
  243. expected = fmt.Sprintf("%s%s%s", expected, sep, kind.String())
  244. }
  245. parser.Panicf("error: expected one of the following: %s", expected)
  246. return {{$Token}}{}
  247. }
  248. //line ll1.parser.go.tpl:288
  249. func (parser {{$Parser}}) NextIs(kinds ...{{$TokenKind}}) bool {
  250. parser.Debugf("NextIs: %v\n", kinds)
  251. if (parser.current.{{$TokenKind}} == 0) {
  252. parser.Advance()
  253. }
  254. for _, kind := range kinds {
  255. if kind == parser.current.{{$TokenKind}} {
  256. return true
  257. }
  258. }
  259. return false
  260. }
  261. {{ $tokenKindValue := 2 }}
  262. {{ range .Grammar.Rules -}}
  263. {{- $ruleName := .Name -}}
  264. {{ if .Template }}
  265. // Expanded from template of rule {{$ruleName}}
  266. {{ .Template }}
  267. {{ end }}
  268. {{- $terminal := .IsTerminal -}}
  269. {{- if $terminal -}}
  270. {{- $TokenKindName := ( printf "%s%s" $TokenKind $ruleName) -}}
  271. //line ll1.parser.go.tpl:313
  272. const {{$TokenKindName}} {{$TokenKind}} = {{$TokenKind}}(-{{$tokenKindValue}})
  273. {{ $tokenKindValue = (iadd $tokenKindValue 1) }}
  274. //line ll1.parser.go.tpl:316
  275. func ( *{{$Lexer}}) Lex{{$TokenKindName}}() ({{$TokenKind}}, error) {
  276. result := {{$TokenKindName}}
  277. return result, nil
  278. }
  279. {{ else }}
  280. {{ $RuleType := ( printf "%s%s" $prefix $ruleName) }}
  281. //line ll1.parser.go.tpl:324
  282. type {{$RuleType}} struct {
  283. }
  284. //line ll1.parser.go.tpl:328
  285. func ( *{{$Parser}}) Parse{{$RuleType}}() ({{$RuleType}}, error) {
  286. result := {{$RuleType}} {}
  287. return result, nil
  288. }
  289. {{end}}
  290. {{ end }}