flexer_test.go 1.5 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162
  1. package flexer
  2. import "testing"
  3. import . "src.eruta.nl/beoran/ll1/common"
  4. const (
  5. tWord = Kind(-1 - iota)
  6. tArrow
  7. tSpace
  8. tString
  9. tPlus = Kind('+')
  10. tEos = Kind('.')
  11. )
  12. func TestFlexer(t *testing.T) {
  13. pos := Location{}
  14. expected := []Token{
  15. MakeToken(pos, tSpace, "\t "),
  16. MakeToken(pos, tWord, "PROGRAM"),
  17. MakeToken(pos, tSpace, " "),
  18. MakeToken(pos, tArrow, "->"),
  19. MakeToken(pos, tSpace, " "),
  20. MakeToken(pos, tWord, "STATEMENT"),
  21. MakeToken(pos, tPlus, "+"),
  22. MakeToken(pos, tSpace, " "),
  23. MakeToken(pos, tEos, ".\n"),
  24. MakeToken(pos, tWord, "say"),
  25. MakeToken(pos, tSpace, " "),
  26. MakeToken(pos, tString, "hello\nworld"),
  27. MakeToken(pos, tEos, "."),
  28. }
  29. f := NewFlexer(`test`, "\t PROGRAM -> STATEMENT+ .\nsay \"hello\\nworld\".")
  30. f.Lexeme(tSpace, `[ \t]+`, "", nil)
  31. f.Lexeme(tWord, `[A-Za-z_]+`, "", nil)
  32. f.Lexeme(tArrow, `\->`, "", nil)
  33. f.Lexeme(tPlus, `\+`, "", nil)
  34. f.Lexeme(tEos, `\.[\n\r]*`, "", nil)
  35. f.Lexeme(SkipKind, `"`, "", ContextAction("string"))
  36. f.Lexeme(tString, `"`, "string", PopAction(tString))
  37. f.Lexeme(SkipKind, `\\[etnru][0-9a-f]*`, "string", EscapeAction('"'))
  38. f.Lexeme(SkipKind, `.`, "string", StoreAction())
  39. toks := LexAll(f)
  40. for i, e := range expected {
  41. tok := toks[i]
  42. t.Logf("toks: %d, %v", i, tok)
  43. ko := tok.Kind()
  44. ke := e.Kind()
  45. if ko != ke {
  46. t.Errorf("error: kind:%d|%d|", ko, ke)
  47. }
  48. to := tok.Text()
  49. te := e.Text()
  50. if to != te {
  51. t.Errorf("error: text:%s|%s|", to, te)
  52. }
  53. }
  54. if !f.EOF() {
  55. t.Errorf("error: should be EOF")
  56. }
  57. }