lexer.go 6.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304
  1. package muesli
  2. import (
  3. _ "bytes"
  4. _ "errors"
  5. "fmt"
  6. _ "io"
  7. _ "reflect"
  8. _ "runtime"
  9. "strings"
  10. _ "unicode"
  11. "io"
  12. "os"
  13. "bufio"
  14. "unicode"
  15. // "gitlab.com/beoran/woe/graphviz"
  16. // _ "gitlab.com/beoran/woe/monolog"
  17. )
  18. type Position struct {
  19. FileName string
  20. Line int
  21. Column int
  22. }
  23. type Lexer struct {
  24. Position
  25. Index int
  26. Start int
  27. io.RuneScanner
  28. buffer []rune
  29. Current rune
  30. }
  31. /** Token Kind. Uses a rune to easily handle single character tokens. */
  32. type TokenKind rune
  33. const (
  34. TokenKindInteger = TokenKind('i')
  35. TokenKindFloat = TokenKind('f')
  36. TokenKindString = TokenKind('s')
  37. TokenKindBoolean = TokenKind('b')
  38. TokenKindWord = TokenKind('w')
  39. TokenKindType = TokenKind('t')
  40. TokenKindGet = TokenKind('$')
  41. TokenKindSet = TokenKind('=')
  42. TokenKindOpenBlock = TokenKind('{')
  43. TokenKindCloseBlock = TokenKind('}')
  44. TokenKindOpenList = TokenKind('[')
  45. TokenKindCloseList = TokenKind(']')
  46. TokenKindOpenParen = TokenKind('(')
  47. TokenKindCloseParen = TokenKind(')')
  48. TokenKindError = TokenKind('!')
  49. TokenKindEOX = TokenKind('\n')
  50. )
  51. func NewToken(kind TokenKind, val Value, pos Position) Token {
  52. return Token{kind, val, pos}
  53. }
  54. func (lexer Lexer) MakeToken(kind TokenKind) Token {
  55. val := StringValue(string(lexer.buffer))
  56. return NewToken(kind, val, lexer.Position)
  57. }
  58. func (lexer * Lexer) Next() (rune, error) {
  59. r, _, err := lexer.RuneScanner.ReadRune()
  60. if err != nil {
  61. return 0, err
  62. }
  63. lexer.Current = r
  64. lexer.buffer = append(lexer.buffer, r)
  65. lexer.Index++
  66. lexer.Position.Column++
  67. if r == '\n' {
  68. lexer.Position.Column = 1
  69. lexer.Position.Line++
  70. }
  71. return lexer.buffer[len(lexer.buffer) - 1], nil
  72. }
  73. func (lexer * Lexer) Previous() error {
  74. err := lexer.RuneScanner.UnreadRune()
  75. if err != nil {
  76. return err
  77. }
  78. lexer.Index--
  79. lexer.Position.Column--
  80. if (len(lexer.buffer) > 0) {
  81. r := lexer.buffer[len(lexer.buffer) - 1];
  82. lexer.buffer = lexer.buffer[0: len(lexer.buffer) - 1];
  83. if r == '\n' {
  84. lexer.Position.Column = 1
  85. lexer.Position.Line++
  86. }
  87. lexer.Current = r
  88. }
  89. return nil
  90. }
  91. func (lexer * Lexer) SkipSpace() (error) {
  92. var r rune
  93. var err error
  94. r = lexer.Current
  95. for unicode.IsSpace(r) {
  96. r, err = lexer.Next()
  97. if err != nil {
  98. return err
  99. }
  100. }
  101. lexer.Previous()
  102. return nil
  103. }
  104. func (lexer * Lexer) LexNumber() (Token, error) {
  105. isFloat := false
  106. var r rune
  107. var err error
  108. r = lexer.Current
  109. for unicode.IsDigit(r) || r == '.' {
  110. if r == '.' {
  111. if isFloat { // double . in floating point is an error
  112. tok := lexer.MakeToken(TokenKindError)
  113. err = fmt.Errorf("Double period . in floating point constant.")
  114. return tok, err
  115. } else {
  116. isFloat = true
  117. }
  118. }
  119. r, err = lexer.Next()
  120. if err != nil {
  121. return lexer.MakeToken(TokenKindError), err
  122. }
  123. }
  124. lexer.Previous()
  125. if isFloat {
  126. return lexer.MakeToken(TokenKindFloat), nil
  127. } else {
  128. return lexer.MakeToken(TokenKindInteger), nil
  129. }
  130. }
  131. func (lexer * Lexer) LexString() (Token, error) {
  132. inEscape := false
  133. var r rune
  134. var err error
  135. r, err = lexer.Next()
  136. if err != nil {
  137. return lexer.MakeToken(TokenKindError), err
  138. }
  139. for r != '"' || inEscape {
  140. if r == '\\' {
  141. // TODO escape parsing, now just a single character after it
  142. if inEscape { // double backslash
  143. } else {
  144. inEscape = true
  145. }
  146. } else {
  147. inEscape = false
  148. }
  149. r, err = lexer.Next()
  150. if err != nil {
  151. return lexer.MakeToken(TokenKindError), err
  152. }
  153. }
  154. return lexer.MakeToken(TokenKindString), nil
  155. }
  156. func (lexer * Lexer) LexLongString() (Token, error) {
  157. var r rune
  158. var err error
  159. r, err = lexer.Next()
  160. if err != nil {
  161. return lexer.MakeToken(TokenKindError), err
  162. }
  163. for r != '`' {
  164. r, err = lexer.Next()
  165. if err != nil {
  166. return lexer.MakeToken(TokenKindError), err
  167. }
  168. }
  169. return lexer.MakeToken(TokenKindString), nil
  170. }
  171. func (lexer * Lexer) LexWord() (Token, error) {
  172. var r rune
  173. var err error
  174. r, err = lexer.Next()
  175. if err != nil {
  176. return lexer.MakeToken(TokenKindError), err
  177. }
  178. for r != '`' {
  179. r, err = lexer.Next()
  180. if err != nil {
  181. return lexer.MakeToken(TokenKindError), err
  182. }
  183. }
  184. return lexer.MakeToken(TokenKindString), nil
  185. }
  186. func (lexer * Lexer) Lex() (Token, error) {
  187. r, err := lexer.Next()
  188. if err != nil {
  189. return lexer.MakeToken(TokenKindError), err
  190. }
  191. if unicode.IsSpace(r) {
  192. lexer.SkipSpace()
  193. }
  194. if unicode.IsDigit(r) {
  195. return lexer.LexNumber()
  196. }
  197. if r == '\n' || r == '.' {
  198. return lexer.MakeToken(TokenKindEOX), nil
  199. }
  200. if r == '"' {
  201. return lexer.LexString()
  202. }
  203. if r == '`' {
  204. return lexer.LexLongString()
  205. }
  206. switch (TokenKind(r)) {
  207. case TokenKindGet : fallthrough
  208. case TokenKindSet : fallthrough
  209. case TokenKindOpenBlock : fallthrough
  210. case TokenKindCloseBlock: fallthrough
  211. case TokenKindOpenList : fallthrough
  212. case TokenKindCloseList : fallthrough
  213. case TokenKindOpenParen : fallthrough
  214. case TokenKindCloseParen:
  215. return lexer.MakeToken(TokenKind(r)), nil
  216. default:
  217. }
  218. if unicode.IsLetter(r) {
  219. return lexer.LexWord()
  220. }
  221. return lexer.MakeToken(TokenKindError), fmt.Errorf("Unknown character")
  222. }
  223. func NewLexer(scanner io.RuneScanner, filename string) Lexer {
  224. lexer := Lexer{}
  225. lexer.RuneScanner = scanner
  226. lexer.Position.FileName = filename
  227. lexer.Position.Column = 1
  228. lexer.Position.Line = 1
  229. return lexer
  230. }
  231. func NewLexerFromInputString(input string) Lexer {
  232. reader := strings.NewReader(input)
  233. return NewLexer(reader, "<input>")
  234. }
  235. func NewLexerFromFileName(filename string) (*Lexer, error) {
  236. read, err := os.Open(filename)
  237. if err != nil {
  238. bread := bufio.NewReader(read)
  239. lex := NewLexer(bread, filename)
  240. return &lex, nil
  241. }
  242. return nil , err
  243. }