raku.go 2.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152
  1. // raku
  2. /* Raku is an easy to use scripting language that can also be used easily interactively
  3. Syntax (verified LL(1) )
  4. PROGRAM -> STATEMENTS .
  5. STATEMENTS -> STATEMENT STATEMENTS | .
  6. STATEMENT -> EXPRESSION | BLOCK | EMPTY_LINE | comment .
  7. EXPRESSION -> VALUE PARAMETERS NL.
  8. PARAMETERS_NONEMPTY -> PARAMETER PARAMETERS.
  9. PARAMETERS-> PARAMETERS_NONEMPTY | .
  10. PARAMETER -> BLOCK | VALUE .
  11. EMPTY_LINE -> NL .
  12. BLOCK -> ob STATEMENTS cb | op STATEMENTS cp | oa STATEMENTS ca.
  13. NL -> nl | semicolon .
  14. VALUE -> string | float | integer | symbol .
  15. Lexer:
  16. */
  17. package raku
  18. import (
  19. "fmt"
  20. "io"
  21. )
  22. type Value string
  23. type TokenType int
  24. type Position struct {
  25. Index int
  26. Row int
  27. Column int
  28. }
  29. const (
  30. TokenError TokenType = iota
  31. TokenEOF
  32. )
  33. type Token struct {
  34. TokenType
  35. Value
  36. Position
  37. }
  38. func (me Token) String() string {
  39. return fmt.Sprintf("Token: %d >%s< %d %d %d.", me.TokenType, string(me.Value), me.Index, me.Row, me.Column)
  40. }
  41. type TokenChannel chan Token
  42. type Lexer struct {
  43. Reader io.Reader
  44. Current Position
  45. Last Position
  46. Token Token
  47. rule LexerRule
  48. Output TokenChannel
  49. buffer []byte
  50. }
  51. type LexerRule func(lexer *Lexer) LexerRule
  52. func (lexer *Lexer) Emit(t TokenType, v Value) {
  53. tok := Token{t, v, lexer.Current}
  54. lexer.Output <- tok
  55. }
  56. func (lexer *Lexer) Error(message string, args ...interface{}) {
  57. value := fmt.Sprintf(message, args...)
  58. lexer.Emit(TokenError, Value(value))
  59. }
  60. func LexError(lexer *Lexer) LexerRule {
  61. lexer.Error("Error")
  62. return nil
  63. }
  64. func LexNormal(lexer *Lexer) LexerRule {
  65. return LexError
  66. }
  67. func OpenLexer(reader io.Reader) *Lexer {
  68. lexer := &Lexer{}
  69. lexer.Reader = reader
  70. lexer.Output = make(TokenChannel)
  71. // lexer.buffer = new(byte[1024])
  72. return lexer
  73. }
  74. func (me *Lexer) ReadReader() (bool, error) {
  75. buffer := make([]byte, 1024)
  76. n, err := me.Reader.Read(buffer)
  77. if n > 0 {
  78. me.buffer = append(me.buffer, buffer...)
  79. }
  80. if err == io.EOF {
  81. me.Emit(TokenEOF, "")
  82. return true, nil
  83. } else if err != nil {
  84. me.Error("Error reading from reader: %s", err)
  85. return true, err
  86. }
  87. return false, nil
  88. }
  89. func (me *Lexer) Start() {
  90. more, err := me.ReadReader()
  91. for err == nil && more {
  92. more, err = me.ReadReader()
  93. }
  94. if err != nil {
  95. return
  96. }
  97. rule := LexNormal
  98. for rule != nil {
  99. rule = rule(me)
  100. }
  101. close(me.Output)
  102. }
  103. /*
  104. func (me *Lexer) TryLexing() {
  105. go {
  106. me.Start()
  107. }
  108. for token := range me.Output {
  109. fmt.Println("Token %s", token)
  110. }
  111. }
  112. */
  113. type Parser struct {
  114. Lexer
  115. }
  116. type Environment struct {
  117. Parent *Environment
  118. }
  119. func main() {
  120. fmt.Println("Hello World!")
  121. }