Browse Source

Lexer is still flaky, considering switching to state machine based lexer.

Beoran 5 years ago
parent
commit
5ffa4a6725
11 changed files with 949 additions and 352 deletions
  1. 214 120
      ast.go
  2. 3 9
      design_muesli.muesli
  3. 6 0
      doc.go
  4. 239 142
      lexer.go
  5. 62 19
      lexer_test.go
  6. 59 2
      logger.go
  7. 2 0
      muesli.go
  8. 250 50
      parser.go
  9. 49 0
      parser_test.go
  10. 8 4
      token.go
  11. 57 6
      value.go

+ 214 - 120
ast.go

@@ -1,140 +1,234 @@
 // Abstract Syntax tree for the MUESLI interpreter
 package muesli
 
-import "fmt"
-
+import (
+	"fmt"
+	"strings"
+)
+
+type AstKind int
+
+const (
+	AstKindProgram = AstKind(iota)
+	AstKindStatements
+	AstKindStatement
+	AstKindSet
+	AstKindGet
+	AstKindTarget
+	AstKindCommand
+	AstKindArguments
+	AstKindArgument
+	AstKindExpression
+	AstKindBlock
+	AstKindParenthesis
+	AstKindList
+	AstKindCapture
+	AstKindWordValue
+	AstKindWord
+	AstKindType
+	AstKindValue
+	AstKindEnd
+	AstKindError
+)
+
+func (astkind AstKind) String() string {
+	switch astkind {
+	case AstKindProgram:
+		return "AstKindProgram"
+	case AstKindStatements:
+		return "AstKindStatements"
+	case AstKindStatement:
+		return "AstKindStatement"
+	case AstKindSet:
+		return "AstKindSet"
+	case AstKindGet:
+		return "AstKindGet"
+	case AstKindTarget:
+		return "AstKindTarget"
+	case AstKindCommand:
+		return "AstKindCommand"
+	case AstKindArguments:
+		return "AstKindArguments"
+	case AstKindArgument:
+		return "AstKindArgument"
+	case AstKindExpression:
+		return "AstKindExpression"
+	case AstKindBlock:
+		return "AstKindBlock"
+	case AstKindParenthesis:
+		return "AstKindParenthesis"
+	case AstKindList:
+		return "AstKindList"
+	case AstKindCapture:
+		return "AstKindCapture"
+	case AstKindWordValue:
+		return "AstKindWordValue"
+	case AstKindWord:
+		return "AstKindWord"
+	case AstKindType:
+		return "AstKindType"
+	case AstKindValue:
+		return "AstKindValue"
+	case AstKindEnd:
+		return "AstKindEnd"
+	case AstKindError:
+		return "AstKindError"
+	default:
+		return "Unknown AstKind"
+	}
+}
 
 /* AST node kind */
 type Ast struct {
-	Parent * Ast
-    Child  * Ast
-    Before * Ast
-    After  * Ast
+	Parent *Ast
+	Child  *Ast
+	Before *Ast
+	After  *Ast
 	AstKind
 	*Token
 }
 
-
-func NewAst(kind AstKind, parent * Ast, token * Token) *Ast {
-    child := &Ast{parent, nil, nil, nil,  kind, token}
-    return child
+func NewAst(kind AstKind, parent *Ast, token *Token) *Ast {
+	child := &Ast{parent, nil, nil, nil, kind, token}
+	return child
 }
 
-func (ast * Ast) LastSibling() * Ast {
-    res := ast
-    for res != nil && res.After != nil {
-        res = res.After
-    }
-    return res
+func (ast *Ast) LastSibling() *Ast {
+	res := ast
+	for res != nil && res.After != nil {
+		res = res.After
+	}
+	return res
 }
 
-func (ast * Ast) LastChild() * Ast {
-    return ast.Child.LastSibling()
+func (ast *Ast) LastChild() *Ast {
+	return ast.Child.LastSibling()
 }
 
 /* Detaches, I.E removes this node and all it's children from the parent tree. */
-func (ast * Ast) Remove() * Ast {
-    parent := ast.Parent
-    before := ast.Before
-    after  := ast.After
-    if before != nil {
-        before.After = after
-    }
-    if after != nil {
-        after.Before = before
-    }
-    if parent != nil {
-        /* Special case if ast is the first child of it's parent. */
-        if ast == parent.Child {
-            parent.Child = after
-        }
-    }
-    ast.Parent = nil
-    return ast
-}
-
-func (ast * Ast) InsertSibling(sibling * Ast) * Ast {
-    after := ast.After
-    ast.After = sibling
-    sibling.Before = ast
-    sibling.After  = after
-    if after != nil {
-        after.Before = sibling
-    }
-    sibling.Parent = ast.Parent
-    return sibling
-}
-
-func (ast * Ast) AppendSibling(sibling * Ast) * Ast {
-    return ast.LastSibling().InsertSibling(sibling)
-}
-
-func (ast * Ast) AppendChild(child * Ast) * Ast {
-    child.Parent = ast
-    if ast.Child == nil {
-        ast.Child = child
-    } else {
-        ast.Child.AppendSibling(child)
-    }
-    return child
-}
-
-func (ast * Ast) NewSibling(kind AstKind, token * Token) * Ast {
-    sibling := NewAst(kind, ast.Parent, token)
-    return ast.AppendSibling(sibling)
-}
-
-func (ast * Ast) NewChild(kind AstKind, token * Token) * Ast {
-    sibling := NewAst(kind, ast.Parent, token)
-    return ast.AppendChild(sibling)
-}
-
-func (ast * Ast) Walk(walker func(node * Ast) * Ast) * Ast {
-    if found := walker(ast); found != nil {
-        return found
-    }
-    if ast.Child != nil {
-        if found := ast.Child.Walk(walker); found != nil {
-            return found
-        }
-    }
-    if ast.After != nil {
-        if found := ast.After.Walk(walker); found != nil {
-            return found
-        }
-    }
-    return nil
-}
-
-func (ast * Ast) String() string {
-    return fmt.Sprintf("Ast %d %v", ast.AstKind, ast.Token)
-}
-
-func (ast * Ast) Display() {
-    ast.Walk(func(node * Ast) * Ast {
-        fmt.Printf("Tree: %s\n", node)
-        return nil
-    })
-}
-
-func (ast * Ast) Depth() int {
-    var depth int = 0;
-    parent := ast.Parent
-    for parent != nil {
-        depth ++;
-        parent = parent.Parent
-    }
-    return depth
-}
-
-func (ast * Ast) CountChildren() int {
-    var count int = 0;
-    child := ast.Child
-    for child != nil {
-        count ++;
-        child = child.After
-    }
-    return count;
+func (ast *Ast) Remove() *Ast {
+	parent := ast.Parent
+	before := ast.Before
+	after := ast.After
+	if before != nil {
+		before.After = after
+	}
+	if after != nil {
+		after.Before = before
+	}
+	if parent != nil {
+		/* Special case if ast is the first child of it's parent. */
+		if ast == parent.Child {
+			parent.Child = after
+		}
+	}
+	ast.Parent = nil
+	return ast
+}
+
+func (ast *Ast) InsertSibling(sibling *Ast) *Ast {
+	after := ast.After
+	ast.After = sibling
+	sibling.Before = ast
+	sibling.After = after
+	if after != nil {
+		after.Before = sibling
+	}
+	sibling.Parent = ast.Parent
+	return sibling
+}
+
+func (ast *Ast) AppendSibling(sibling *Ast) *Ast {
+	return ast.LastSibling().InsertSibling(sibling)
+}
+
+func (ast *Ast) AppendChild(child *Ast) *Ast {
+	child.Parent = ast
+	if ast.Child == nil {
+		ast.Child = child
+	} else {
+		ast.Child.AppendSibling(child)
+	}
+	return child
 }
 
+func (ast *Ast) NewSibling(kind AstKind, token *Token) *Ast {
+	sibling := NewAst(kind, ast.Parent, token)
+	return ast.AppendSibling(sibling)
+}
+
+func (ast *Ast) NewChild(kind AstKind, token *Token) *Ast {
+	sibling := NewAst(kind, ast.Parent, token)
+	return ast.AppendChild(sibling)
+}
 
+func (ast *Ast) Walk(walker func(node *Ast) *Ast) *Ast {
+	if found := walker(ast); found != nil {
+		return found
+	}
+	if ast.Child != nil {
+		if found := ast.Child.Walk(walker); found != nil {
+			return found
+		}
+	}
+	if ast.After != nil {
+		if found := ast.After.Walk(walker); found != nil {
+			return found
+		}
+	}
+	return nil
+}
+
+func (ast *Ast) String() string {
+	if ast.Token == nil {
+		return fmt.Sprintf("Ast %s nil", ast.AstKind.String())
+	}
+	return fmt.Sprintf("Ast %s %v", ast.AstKind.String(), ast.Token.String())
+}
+
+func (ast *Ast) Display() {
+	ast.Walk(func(node *Ast) *Ast {
+		depth := node.Depth()
+		fmt.Printf(strings.Repeat("--", depth))
+		if node != nil {
+			fmt.Printf("Ast: %s\n", node.String())
+		} else {
+			fmt.Printf("Ast: nil node\n")
+		}
+		return nil
+	})
+}
+
+func (ast *Ast) Depth() int {
+	var depth int = 0
+	parent := ast.Parent
+	for parent != nil {
+		depth++
+		parent = parent.Parent
+	}
+	return depth
+}
+
+func (ast *Ast) CountChildren() int {
+	var count int = 0
+	child := ast.Child
+	for child != nil {
+		count++
+		child = child.After
+	}
+	return count
+}
+
+func (ast *Ast) Errors() []*Ast {
+	res := make([]*Ast, 0)
+	if ast == nil {
+		return res
+	}
+	ast.Walk(func(node *Ast) *Ast {
+		if node != nil && node.AstKind == AstKindError {
+			res = append(res, node)
+		}
+		return nil
+	})
+	return res
+}

+ 3 - 9
design_muesli.muesli

@@ -12,15 +12,9 @@
 this is still comment
 # Muesli consists of newline separated statements, however, a newline 
 # after { or the do keyword is ignored and does not count as a separator.
-/*
-    C style comments are also supported, and unlke C, they DO nest, but the 
-    comment indicator and the end of comment indicator must be the fist element 
-    on the line. Anything on the line of the end-of-comment indicator is also 
-    ignored
-    /* 
-        so this is fine 
-    */ this is ignored too
-*/
+#{ This is a block comment, it nests 
+but the {} pairs must match. 
+}
 
 
 # Empty lines are ignored.

+ 6 - 0
doc.go

@@ -0,0 +1,6 @@
+// muesli project doc.go
+
+/*
+muesli document
+*/
+package muesli

+ 239 - 142
lexer.go

@@ -10,8 +10,8 @@ import (
 	"os"
 	_ "reflect"
 	_ "runtime"
+	"strconv"
 	"strings"
-    "strconv"
 	"unicode"
 	_ "unicode"
 	// "gitlab.com/beoran/woe/graphviz"
@@ -27,6 +27,11 @@ type Lexer struct {
 	io.RuneScanner
 	buffer  []rune
 	Current rune
+	LoggerWrapper
+}
+
+func (lexer *Lexer) SetLogger(logger Logger) {
+	lexer.LoggerWrapper = LoggerWrapper{logger}
 }
 
 func (lexer *Lexer) ClearBuffer() {
@@ -34,70 +39,86 @@ func (lexer *Lexer) ClearBuffer() {
 }
 
 func (lexer *Lexer) MakeIntegerToken() Token {
-    var sbuffer = string(lexer.buffer)
-    i, err := strconv.ParseInt(sbuffer, 0, 64)
-    if err == nil { 
-        lexer.ClearBuffer()
-        return NewToken(TokenKindInteger, IntValue(i), lexer.Position)
-    } else {
-        lexer.ClearBuffer()
-        return lexer.MakeErrorToken(err);
-    }
+	var sbuffer = string(lexer.buffer)
+	i, err := strconv.ParseInt(sbuffer, 0, 64)
+	if err == nil {
+		lexer.ClearBuffer()
+		return NewToken(TokenKindInteger, IntValue(i), lexer.Position)
+	} else {
+		lexer.ClearBuffer()
+		return lexer.MakeErrorToken(err)
+	}
 }
 
 func (lexer *Lexer) MakeFloatToken() Token {
-    var sbuffer = string(lexer.buffer)
-    f, err := strconv.ParseFloat(sbuffer, 64)
-    if err == nil { 
-        lexer.ClearBuffer()
-        return NewToken(TokenKindFloat, FloatValue(f), lexer.Position)
-    } else {
-        lexer.ClearBuffer()
-        return lexer.MakeErrorToken(err);
-    }
+	var sbuffer = string(lexer.buffer)
+	f, err := strconv.ParseFloat(sbuffer, 64)
+	if err == nil {
+		lexer.ClearBuffer()
+		return NewToken(TokenKindFloat, FloatValue(f), lexer.Position)
+	} else {
+		lexer.ClearBuffer()
+		return lexer.MakeErrorToken(err)
+	}
 }
 
 func (lexer *Lexer) MakeBooleanToken(b bool) Token {
-    lexer.ClearBuffer()
-    return NewToken(TokenKindBoolean, BoolValue(b), lexer.Position)
+	lexer.ClearBuffer()
+	return NewToken(TokenKindBoolean, BoolValue(b), lexer.Position)
 }
 
-
 func (lexer *Lexer) MakeStringValueToken(kind TokenKind) Token {
-    var sbuffer = string(lexer.buffer)
-    return NewToken(kind, StringValue(sbuffer), lexer.Position)
-}
-
-func (lexer *Lexer) MakeToken(kind TokenKind) Token {    
-    switch (kind)   { 
-        case TokenKindInteger   : return lexer.MakeIntegerToken() 
-        case TokenKindFloat     : return lexer.MakeFloatToken()
-        case TokenKindString    : fallthrough 
-        case TokenKindSymbol    : fallthrough 
-        case TokenKindType      : fallthrough
-        case TokenKindError     : fallthrough
-        case TokenKindWord      : return lexer.MakeStringValueToken(kind)
-        case TokenKindBoolean   : fallthrough
-        case TokenKindGet       : fallthrough 
-        case TokenKindSet       : fallthrough 
-        case TokenKindOpenBlock : fallthrough 
-        case TokenKindCloseBlock: fallthrough 
-        case TokenKindOpenList  : fallthrough 
-        case TokenKindCloseList : fallthrough 
-        case TokenKindOpenParen : fallthrough 
-        case TokenKindCloseParen: fallthrough 
-        case TokenKindEOX       : fallthrough
-        case TokenKindEOF       : 
-            val := StringValue(string(lexer.buffer))
-            lexer.ClearBuffer()
-            return NewToken(kind, val, lexer.Position)
-        default :
-            return lexer.MakeErrorfToken("Internal error on token type %s", kind)
-    }
+	var sbuffer = string(lexer.buffer)
+	return NewToken(kind, StringValue(sbuffer), lexer.Position)
+}
+
+func (lexer *Lexer) MakeToken(kind TokenKind) Token {
+	switch kind {
+	case TokenKindInteger:
+		return lexer.MakeIntegerToken()
+	case TokenKindFloat:
+		return lexer.MakeFloatToken()
+	case TokenKindString:
+		fallthrough
+	case TokenKindSymbol:
+		fallthrough
+	case TokenKindType:
+		fallthrough
+	case TokenKindError:
+		fallthrough
+	case TokenKindWord:
+		return lexer.MakeStringValueToken(kind)
+	case TokenKindBoolean:
+		fallthrough
+	case TokenKindGet:
+		fallthrough
+	case TokenKindSet:
+		fallthrough
+	case TokenKindOpenBlock:
+		fallthrough
+	case TokenKindCloseBlock:
+		fallthrough
+	case TokenKindOpenList:
+		fallthrough
+	case TokenKindCloseList:
+		fallthrough
+	case TokenKindOpenParen:
+		fallthrough
+	case TokenKindCloseParen:
+		fallthrough
+	case TokenKindEOX:
+		fallthrough
+	case TokenKindEOF:
+		val := StringValue(string(lexer.buffer))
+		lexer.ClearBuffer()
+		return NewToken(kind, val, lexer.Position)
+	default:
+		return lexer.MakeErrorfToken("Internal error on token type %s", kind)
+	}
 }
 
 func (lexer Lexer) MakeErrorToken(err error) Token {
-	return NewToken(TokenKindError, err.Error(), lexer.Position)
+	return NewToken(TokenKindError, ErrorValue{err}, lexer.Position)
 }
 
 func (lexer Lexer) MakeErrorfToken(format string, va ...interface{}) Token {
@@ -106,7 +127,7 @@ func (lexer Lexer) MakeErrorfToken(format string, va ...interface{}) Token {
 }
 
 func (lexer Lexer) MakeEOFToken() Token {
-	return NewToken(TokenKindEOF, "", lexer.Position)
+	return NewToken(TokenKindEOF, &EmptyValue{}, lexer.Position)
 }
 
 func (lexer *Lexer) Peek() (rune, error) {
@@ -208,11 +229,60 @@ func (lexer *Lexer) SkipWhile(predicate func(rune) bool) (bool, error) {
 }
 
 func isSpace(r rune) bool {
-	return r == ' ' || r == '\t'
+	return r == ' ' || r == '\t' || r == '\v' || r == '\r'
+}
+
+func isComment(r rune) bool {
+	return r == '#'
 }
 
 func (lexer *Lexer) SkipSpace() error {
-	_, err := lexer.SkipWhile(isSpace)
+	r, err := lexer.Skip()
+	lexer.LogDebug("Skipping %c.", r)
+	if err != nil {
+		return err
+	}
+	for ; isSpace(r) && err == nil; r, err = lexer.Skip() {
+	}
+	return err
+}
+
+func (lexer *Lexer) SkipBlockComment() error {
+	var err error
+	var r rune
+	lexer.LogDebug("Skipping block comment.")
+	for block := 1; block > 0; {
+		r, err = lexer.Skip()
+		if err != nil {
+			return err
+		}
+		if r == '{' {
+			block++
+		} else if r == '}' {
+			block--
+		}
+		lexer.LogDebug("Skipping block comment: %d", block)
+	}
+	return err
+}
+
+func (lexer *Lexer) SkipComment() error {
+	r, err := lexer.Skip()
+	lexer.LogDebug("Skipping %c.", r)
+	if err != nil {
+		return err
+	}
+	first := true
+	for r, err = lexer.Skip(); r != '\n' && err == nil; r, err = lexer.Skip() {
+		lexer.LogDebug("Skipping loop %c.", r)
+		if first && r == '{' {
+			first = false
+			return lexer.SkipBlockComment()
+		}
+	}
+	if err != nil {
+		return err
+	}
 	return err
 }
 
@@ -265,51 +335,62 @@ func isDoubleQuote(r rune) bool {
 }
 
 func (lexer *Lexer) handleEscapeHexChars(amount int) error {
-    buffer := make([]byte, 0)
-    r, err := lexer.Skip()
-    for index  := 0 ; err == nil && index < amount ;  {
-        if unicode.Is(unicode.ASCII_Hex_Digit, r) {
-            buffer = append(buffer, byte(r))
-        } else {
-            return fmt.Errorf("Not a hexadecimal digit: %c", r)
-        }
-        index++
-        if (index < amount) { 
-            r, err = lexer.Skip()
-        }
-    }
-    if err != nil {
-        return err
-    }
-    i, err := strconv.ParseInt(string(buffer), 16, 32)
-    if err != nil {
-        return err
-    }
-    lexer.appendRune(rune(i))
-    _, err = lexer.Peek()
-    return err
+	buffer := make([]byte, 0)
+	r, err := lexer.Skip()
+	for index := 0; err == nil && index < amount; {
+		if unicode.Is(unicode.ASCII_Hex_Digit, r) {
+			buffer = append(buffer, byte(r))
+		} else {
+			return fmt.Errorf("Not a hexadecimal digit: %c", r)
+		}
+		index++
+		if index < amount {
+			r, err = lexer.Skip()
+		}
+	}
+	if err != nil {
+		return err
+	}
+	i, err := strconv.ParseInt(string(buffer), 16, 32)
+	if err != nil {
+		return err
+	}
+	lexer.appendRune(rune(i))
+	_, err = lexer.Peek()
+	return err
 }
 
-
 func (lexer *Lexer) handleEscape() error {
 	r, err := lexer.Skip()
-    if err != nil {
+	if err != nil {
 		return err
 	}
 	switch r {
-        case 'a':   lexer.appendRune('\a')
-        case 'b':   lexer.appendRune('\b')
-        case 'e':   lexer.appendRune('\033')
-        case 'f':   lexer.appendRune('\f')
-        case 'n':   lexer.appendRune('\n')
-        case 'r':   lexer.appendRune('\r')
-        case 't':   lexer.appendRune('\t')
-        case '\\':  lexer.appendRune('\\')
-        case '"':   lexer.appendRune('"')
-        // case 'o':   fallthrough   // No octals, for now.
-        case 'x':   err = lexer.handleEscapeHexChars(2) 
-        case 'u':   err = lexer.handleEscapeHexChars(4)
-        case 'U':   err = lexer.handleEscapeHexChars(6)
+	case 'a':
+		lexer.appendRune('\a')
+	case 'b':
+		lexer.appendRune('\b')
+	case 'e':
+		lexer.appendRune('\033')
+	case 'f':
+		lexer.appendRune('\f')
+	case 'n':
+		lexer.appendRune('\n')
+	case 'r':
+		lexer.appendRune('\r')
+	case 't':
+		lexer.appendRune('\t')
+	case '\\':
+		lexer.appendRune('\\')
+	case '"':
+		lexer.appendRune('"')
+	// case 'o':   fallthrough   // No octals, for now.
+	case 'x':
+		err = lexer.handleEscapeHexChars(2)
+	case 'u':
+		err = lexer.handleEscapeHexChars(4)
+	case 'U':
+		err = lexer.handleEscapeHexChars(6)
 	default:
 		return fmt.Errorf("Unknown escape sequence character %c: %d", r, r)
 	}
@@ -319,25 +400,25 @@ func (lexer *Lexer) handleEscape() error {
 
 func (lexer *Lexer) LexString() Token {
 	var err error
-    var r rune
+	var r rune
 
 	_, err = lexer.Skip() // Skip first "
 	if err != nil {
 		return lexer.handleError(err)
 	}
-    
-    r, err = lexer.Skip() 
-	for  ; r != '"' && err == nil ; { 
-        if r == '\\' {
+
+	r, err = lexer.Skip()
+	for r != '"' && err == nil {
+		if r == '\\' {
 			err = lexer.handleEscape()
-            if err != nil {
-                return lexer.handleError(err)
-            }
+			if err != nil {
+				return lexer.handleError(err)
+			}
 		} else {
-            lexer.appendRune(r)
-            // still inside the string
+			lexer.appendRune(r)
+			// still inside the string
 		}
-        r, err = lexer.Skip()
+		r, err = lexer.Skip()
 	}
 	if err != nil {
 		return lexer.MakeErrorfToken("when parsing string: %s", err)
@@ -377,7 +458,7 @@ func (lexer *Lexer) LexLongString() Token {
 
 func (lexer *Lexer) LexWord() Token {
 	var err error
-    first := true
+	first := true
 
 	_, err = lexer.Next()
 	if err != nil {
@@ -385,26 +466,29 @@ func (lexer *Lexer) LexWord() Token {
 	}
 
 	_, err = lexer.NextWhile(func(r rune) bool {
-        if first {
-            first = false
-            return unicode.IsLetter(r) || r == '_'
-        } else {
-            return unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_'
-        }
+		if first {
+			first = false
+			return unicode.IsLetter(r) || r == '_'
+		} else {
+			return unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_'
+		}
 	})
-    
-    if err != nil {
+
+	if err != nil {
 		return lexer.handleError(err)
 	}
-    
-    sbuffer := string(lexer.buffer)
-    
-    // handle key words    
-    switch sbuffer {
-        case "true" :  return lexer.MakeBooleanToken(true)
-        case "false":  return lexer.MakeBooleanToken(false)
-        default: 	   return lexer.MakeToken(TokenKindWord)
-    }
+
+	sbuffer := string(lexer.buffer)
+
+	// handle key words
+	switch sbuffer {
+	case "true":
+		return lexer.MakeBooleanToken(true)
+	case "false":
+		return lexer.MakeBooleanToken(false)
+	default:
+		return lexer.MakeToken(TokenKindWord)
+	}
 }
 
 func (lexer *Lexer) LexSymbol() Token {
@@ -416,34 +500,46 @@ func (lexer *Lexer) LexSymbol() Token {
 	}
 
 	_, err = lexer.NextWhile(func(r rune) bool {
-        return !unicode.IsSpace(r)
+		return !unicode.IsSpace(r)
 	})
-    
-    if err != nil {
+
+	if err != nil {
 		return lexer.handleError(err)
 	}
 
 	return lexer.MakeToken(TokenKindSymbol)
 }
 
-
-func (lexer *Lexer) lex() Token {
+func (lexer *Lexer) skipSpaceAndCommentAndPeek() (rune, error) {
 	r, err := lexer.Peek()
-
 	if err != nil {
-		return lexer.handleError(err)
+		return r, err
 	}
-
-	if isSpace(r) {
-		err = lexer.SkipSpace()
+	for isSpace(r) || isComment(r) {
+		if isSpace(r) {
+			err = lexer.SkipSpace()
+		} else if isComment(r) {
+			err = lexer.SkipComment()
+		}
 		if err != nil {
-			return lexer.handleError(err)
+			return r, err
 		}
-		r, err = lexer.Peek()
+		lexer.LogDebug("Peeked again: >%c<", r)
+		r, err := lexer.Peek()
 		if err != nil {
-			return lexer.handleError(err)
+			return r, err
 		}
 	}
+	return r, err
+}
+
+func (lexer *Lexer) lex() Token {
+	r, err := lexer.skipSpaceAndCommentAndPeek()
+	lexer.LogDebug(" After skip: >%c< >%v<\n", r, err)
+
+	if err != nil {
+		return lexer.handleError(err)
+	}
 
 	if unicode.IsDigit(r) || r == '-' {
 		return lexer.LexNumber()
@@ -461,8 +557,8 @@ func (lexer *Lexer) lex() Token {
 	if r == '`' {
 		return lexer.LexLongString()
 	}
-    
-    if r == ':' {
+
+	if r == ':' {
 		return lexer.LexSymbol()
 	}
 
@@ -514,16 +610,17 @@ func (lexer *Lexer) LexAll() []Token {
 	return res
 }
 
-func NewLexer(scanner io.RuneScanner, filename string) Lexer {
-	lexer := Lexer{}
+func NewLexer(scanner io.RuneScanner, filename string) *Lexer {
+	lexer := &Lexer{}
 	lexer.RuneScanner = scanner
 	lexer.Position.FileName = filename
 	lexer.Position.Column = 1
 	lexer.Position.Line = 1
+	lexer.LoggerWrapper = LoggerWrapper{nil}
 	return lexer
 }
 
-func NewLexerFromInputString(input string) Lexer {
+func NewLexerFromString(input string) *Lexer {
 	reader := strings.NewReader(input)
 	return NewLexer(reader, "<input>")
 }
@@ -533,7 +630,7 @@ func NewLexerFromFileName(filename string) (*Lexer, error) {
 	if err != nil {
 		bread := bufio.NewReader(read)
 		lex := NewLexer(bread, filename)
-		return &lex, nil
+		return lex, nil
 	}
 	return nil, err
 }

+ 62 - 19
lexer_test.go

@@ -1,12 +1,14 @@
 package muesli
 
 import (
+	"reflect"
+	"runtime"
 	_ "strings"
 	"testing"
 )
 
 func LexText(input string) []Token {
-	lexer := NewLexerFromInputString(input)
+	lexer := NewLexerFromString(input)
 	tokens := lexer.LexAll()
 	return tokens
 }
@@ -26,26 +28,69 @@ func HelperTryLexText(input string, test *testing.T) {
 }
 
 func HelperLexExpect(input string, wantKind TokenKind, wantValue Value, test *testing.T) {
-	lexer := NewLexerFromInputString(input)
+	lexer := NewLexerFromString(input)
+	lexer.SetLogger(&testLogger{})
 	token := lexer.Lex()
-    if (token.TokenKind == wantKind) && (token.Value == wantValue) {
-        test.Logf("Token as expected %v %v", token.TokenKind, token.Value)
-    } else {
-        test.Errorf("Unexpected token kind or value: %v %v >%v< >%v<", 
-            token.TokenKind, wantKind, token.Value, wantValue)
-    }
+	if (token.TokenKind == wantKind) && (token.Value == wantValue) {
+		/* test.Logf("Token as expected %v %v", token.TokenKind, token.Value) */
+	} else {
+		test.Errorf("Unexpected token kind or value: %v %v >%v< >%v<",
+			token.TokenKind, wantKind, token.Value, wantValue)
+	}
 }
 
-func TestLex(test *testing.T) {
-    HelperLexExpect("word\n", TokenKindWord, StringValue("word"), test)
-    HelperLexExpect(":symbol\n", TokenKindSymbol, StringValue("symbol"), test)
-    HelperLexExpect("1234\n", TokenKindInteger, IntValue(1234), test)
-    HelperLexExpect("-3.14\n", TokenKindFloat, FloatValue(-3.14), test)
-    HelperLexExpect(`"Hello \"world" \n`, TokenKindString, StringValue(`Hello "world`), test)
-    HelperLexExpect("true\n", TokenKindBoolean, TrueValue, test)
-    HelperLexExpect("false\n", TokenKindBoolean, FalseValue, test)
+func HelperFunctionName(f interface{}) string {
+	fp := reflect.ValueOf(f).Pointer()
+	info := runtime.FuncForPC(fp)
+	if info != nil {
+		return info.Name()
+	}
+	return "unknown"
 }
 
+func HelperLexTestSkip(input string, want rune, call func(*Lexer) error, test *testing.T) {
+	var r rune
+	lexer := NewLexerFromString(input)
+	lexer.SetLogger(&testLogger{})
+	fn := HelperFunctionName(call)
+	err := call(lexer)
+	if err != nil {
+		test.Errorf("Unexpected error result: %s: %v", fn, err)
+	}
+	r, err = lexer.Peek()
+	if err != nil {
+		test.Errorf("Unexpected error result on peek for %s: %v", fn, err)
+	}
+	if r != want {
+		test.Errorf("Unexpected character peeked for %s: %c: %c", fn, r, want)
+	}
+}
+
+func TestLexParts(test *testing.T) {
+	HelperLexTestSkip(" xyz", 'x', (*Lexer).SkipSpace, test)
+	HelperLexTestSkip("     xyz", 'x', (*Lexer).SkipSpace, test)
+	HelperLexTestSkip(" #  \nx", 'x', (*Lexer).SkipComment, test)
+	HelperLexTestSkip("#{}x", 'x', (*Lexer).SkipComment, test)
+	HelperLexTestSkip("#{{}{{}}}x", 'x', (*Lexer).SkipComment, test)
+	HelperLexTestSkip("    \tword\n", 'w', (*Lexer).SkipComment, test)
+}
+
+func TestLex(test *testing.T) {
+	HelperLexExpect("word\n", TokenKindWord, StringValue("word"), test)
+	HelperLexExpect(":symbol\n", TokenKindSymbol, StringValue("symbol"), test)
+	HelperLexExpect("1234\n", TokenKindInteger, IntValue(1234), test)
+	HelperLexExpect("-3.14\n", TokenKindFloat, FloatValue(-3.14), test)
+	HelperLexExpect(`"Hello \"world" \n`, TokenKindString, StringValue(`Hello "world`), test)
+	HelperLexExpect("true\n", TokenKindBoolean, TrueValue, test)
+	HelperLexExpect("false\n", TokenKindBoolean, FalseValue, test)
+	HelperLexExpect("    \tword\n", TokenKindWord, StringValue("word"), test)
+	/*
+		HelperLexExpect("# comment should be ignored\ntrue\n", TokenKindBoolean, TrueValue, test)
+		HelperLexExpect("  # comment should be ignored\ntrue\n", TokenKindBoolean, TrueValue, test)
+		HelperLexExpect("  # comment should be ignored\n  true\n", TokenKindBoolean, TrueValue, test)
+		HelperLexExpect("#{ comment should be ignored\n this too } true\n", TokenKindBoolean, TrueValue, test)
+	*/
+}
 
 func TestLexing(test *testing.T) {
 	const input = `
@@ -69,7 +114,5 @@ def increment variable by value {
     =variable (add variable $value)
 }
 `
-	test.Log("Hi test!")
-
-	HelperTryLexText(input, test)
+	// HelperTryLexText(input, test)
 }

+ 59 - 2
logger.go

@@ -1,13 +1,21 @@
 package muesli
 
-import "runtime"
+import (
+	"fmt"
+	"runtime"
+	"strings"
+)
 
-/** Logger interface that Muuesli uses.*/
+/** Logger interface that Muesli uses,to allow external logging packages to be used easly.*/
 type Logger interface {
 	Log(level string, file string, line int, format string, args ...interface{})
 }
 
 func WriteLog(logger Logger, depth int, level string, format string, args ...interface{}) {
+	if logger == nil {
+		return
+	}
+
 	_, file, line, ok := runtime.Caller(depth)
 
 	if !ok {
@@ -17,3 +25,52 @@ func WriteLog(logger Logger, depth int, level string, format string, args ...int
 
 	logger.Log(level, file, line, format, args...)
 }
+
+type LoggerWrapper struct {
+	Logger
+}
+
+func (lw LoggerWrapper) WriteLog(depth int, level string, format string, args ...interface{}) {
+	WriteLog(lw.Logger, depth, level, format, args...)
+}
+
+func (lw LoggerWrapper) LogDebug(format string, args ...interface{}) {
+	lw.WriteLog(3, "DEBUG:", format, args...)
+}
+
+func (lw LoggerWrapper) LogInfo(format string, args ...interface{}) {
+	lw.WriteLog(3, "INFO:", format, args...)
+}
+
+func (lw LoggerWrapper) LogWarning(format string, args ...interface{}) {
+	lw.WriteLog(3, "WARNING:", format, args...)
+}
+
+func (lw LoggerWrapper) LogError(format string, args ...interface{}) {
+	lw.WriteLog(3, "ERROR:", format, args...)
+}
+
+type testLogger struct {
+	last   string
+	repeat int
+}
+
+func (tl *testLogger) Log(level string, file string, line int, format string, args ...interface{}) {
+	text1 := fmt.Sprintf("%s:%s:%d: ", level, file, line)
+	text2 := fmt.Sprintf(format, args...)
+	if format[len(format)-1] != '\n' {
+		text2 = text2 + "\n"
+	}
+	out := text1 + text2
+	if strings.Compare(out, tl.last) == 0 {
+		tl.repeat++
+	} else {
+		if tl.repeat > 0 {
+			fmt.Printf("%s:%s:%d: Above message repeated %d times.",
+				level, file, line, tl.repeat)
+		}
+		tl.last = out
+		tl.repeat = 0
+		fmt.Printf("%s", out)
+	}
+}

+ 2 - 0
muesli.go

@@ -0,0 +1,2 @@
+// muesli project muesli.go
+package muesli

+ 250 - 50
parser.go

@@ -15,60 +15,68 @@ import (
 	// _ "gitlab.com/beoran/woe/monolog"
 )
 
-type AstKind int
-
-const (
-	AstKindProgram = AstKind(iota)
-	AstKindStatements
-	AstKindStatement
-	AstKindSet
-	AstKindGet
-	AstKindCommand
-	AstKindArguments
-	AstKindBlock
-	AstKindList
-	AstKindCapture
-	AstKindWordValue
-	AstKindWord
-	AstKindType
-	AstKindValue
-	AstKindEox
-	AstKindError
-)
+/* Grammar:
+
+Desrired syntax (verified LL(1) on smlweb.cpsc.ucalgary.ca)
+
+PROGRAM -> STATEMENTS.
+STATEMENTS -> STATEMENT STATEMENTS | .
+STATEMENT -> EXPRESSION eos | BLOCK .
+WORDOPS -> WORDOP WORDOPS | .
+EXPRESSION -> SETTER | GETTER | COMMAND.
+COMMAND -> WORDVALUE PARAMETERS.
+PARAMETERS -> PARAMETER PARAMETERS | .
+PARAMETER -> WORDVALUE | PARENTHESIS | GETTER | ARRARY | BLOCK .
+PARENTHESIS -> '(' EXPRESSION ')' .
+BLOCK -> '{' STATEMENTS '}' .
+WORDVALUE -> word | VALUE.
+VALUE -> string | integer | float | symbol.
+SETTER -> set word PARAMETERS .
+GETTER -> get word .
+
+ *
+ * program -> statements
+ * statements -> statement+
+ * statement -> get / set / command
+ *
+*/
 
 type Parser struct {
-	Lexer Lexer
-	Ast
-	next    *Token
+	Lexer   *Lexer
 	current *Token
+	LoggerWrapper
 }
 
-func (parser *Parser) Peek() *Token {
-	if parser.next == nil {
-		token := parser.Lexer.Lex()
-		parser.next = &token
-	}
-	return parser.next
+func (parser *Parser) SetLogger(logger Logger) {
+	parser.LoggerWrapper = LoggerWrapper{logger}
 }
 
-func (parser *Parser) Next() *Token {
-	next := parser.Peek()
-	parser.current = next
-	parser.next = nil
-	parser.Peek()
-	return parser.current
+func (parser *Parser) Advance() {
+	token := parser.Lexer.Lex()
+	parser.current = &token
+	parser.LogDebug("Next token: %s\n", token.String())
 }
 
-func (parser *Parser) Require(wanted TokenKind, astkind AstKind, parent *Ast) *Ast {
-	token := parser.Next()
-	if token.TokenKind == wanted {
-		return parent.NewChild(astkind, token)
+/* Looks at the current token and advances the lexer if the token is of any of
+the token kinds given in kinds. In this case it will return the accepted
+token and advance the parser. Otherwise, if no token kind matches, the lexer
+does not advance and the current token remains the same, except if that was nil.*/
+func (parser *Parser) Accept(kinds ...TokenKind) *Token {
+	if parser.current == nil {
+		parser.Advance()
 	}
-	return parent.NewChild(AstKindError, token)
+
+	for _, kind := range kinds {
+		if kind == parser.current.TokenKind {
+			accepted := parser.current
+			parser.Advance()
+			return accepted
+		}
+	}
+	return nil
 }
 
-func (parser *Parser) ParseMany(
-	astkind AstKind, parsefunc func(*Parser) *Ast) *Ast {
+func (parser *Parser) ParseMany(astkind AstKind, parsefunc func(*Parser) *Ast) *Ast {
 	ast := NewAst(astkind, nil, nil)
 	for sub := parsefunc(parser); sub != nil && sub.AstKind != AstKindError; sub = parsefunc(parser) {
 		ast.AppendChild(sub)
@@ -77,7 +85,7 @@ func (parser *Parser) ParseMany(
 }
 
 func (parser *Parser) NewAstError(message string) *Ast {
-	sv := StringValue(message)
+	sv := StringValue(message + " at token " + parser.current.String())
 	pos := parser.current.Position
 	tok := NewToken(TokenKindError, sv, pos)
 	return NewAst(AstKindError, nil, &tok)
@@ -92,26 +100,196 @@ func (parser *Parser) ParseAny(astkind AstKind, parsefuncs ...(func(*Parser) *As
 			return ast
 		}
 	}
-	err := parser.NewAstError("Unexpected token")
-	ast.AppendChild(err)
+	return nil
+}
+
+func (parser *Parser) ParseRequireAny(astkind AstKind, parsefuncs ...(func(*Parser) *Ast)) *Ast {
+	ast := parser.ParseAny(astkind, parsefuncs...)
+	if ast == nil {
+		err := parser.NewAstError("Unexpected token")
+		return err
+	}
+	return ast
+}
+
+func (parser *Parser) ParseValue() *Ast {
+	value := parser.Accept(TokenKindInteger, TokenKindString,
+		TokenKindBoolean, TokenKindFloat, TokenKindSymbol)
+	if value == nil {
+		return nil
+	}
+	return NewAst(AstKindValue, nil, value)
+}
+
+func (parser *Parser) ParseWordValue() *Ast {
+	value := parser.Accept(TokenKindInteger, TokenKindString,
+		TokenKindBoolean, TokenKindFloat, TokenKindSymbol, TokenKindWord)
+	if value == nil {
+		return nil
+	}
+	return NewAst(AstKindWordValue, nil, value)
+}
+
+func (parser *Parser) ParseArgument() *Ast {
+	return parser.ParseAny(AstKindArgument,
+		(*Parser).ParseWordValue,
+		(*Parser).ParseGet,
+		(*Parser).ParseSet,
+		(*Parser).ParseParenthesis,
+		(*Parser).ParseList,
+		(*Parser).ParseBlock)
+}
+
+func (parser *Parser) ParseArguments() *Ast {
+	return parser.ParseMany(AstKindArguments, (*Parser).ParseArgument)
+}
+
+func (parser *Parser) ParseList() *Ast {
+	op := parser.Accept(TokenKindOpenList)
+	if op == nil {
+		return nil
+	}
+
+	ast := NewAst(AstKindList, nil, op)
+	args := parser.ParseArguments()
+	if args.AstKind == AstKindError {
+		return args
+	}
+	if cp := parser.Accept(TokenKindCloseList); cp == nil {
+		return parser.NewAstError("expected closing brackets")
+	}
+
+	ast.AppendChild(args)
+	return ast
+}
+
+func (parser *Parser) ParseParenthesis() *Ast {
+	op := parser.Accept(TokenKindOpenParen)
+
+	if op == nil {
+		return nil
+	}
+
+	ast := NewAst(AstKindParenthesis, nil, op)
+	expr := parser.ParseExpression()
+	if expr == nil {
+		return parser.NewAstError("expected expression")
+	}
+	if expr.AstKind == AstKindError {
+		return expr
+	}
+	if cp := parser.Accept(TokenKindCloseParen); cp == nil {
+		return parser.NewAstError("expected closing parenthesis")
+	}
+
+	ast.AppendChild(expr)
+	return ast
+}
+
+func (parser *Parser) ParseBlock() *Ast {
+	op := parser.Accept(TokenKindOpenBlock)
+	if op == nil {
+		return nil
+	}
+
+	ast := NewAst(AstKindBlock, nil, op)
+	stats := parser.ParseStatements()
+	if stats == nil {
+		return parser.NewAstError("expected expression")
+	}
+	if stats.AstKind == AstKindError {
+		return stats
+	}
+	if cp := parser.Accept(TokenKindCloseBlock); cp == nil {
+		return parser.NewAstError("expected closing block")
+	}
+
+	ast.AppendChild(stats)
+	return ast
+}
+
+/* Parses the target of a set or get expression */
+func (parser *Parser) ParseTarget() *Ast {
+	target := parser.Accept(TokenKindWord, TokenKindSymbol)
+	ast := NewAst(AstKindTarget, nil, target)
+
+	if target == nil {
+		paren := parser.ParseParenthesis()
+		if paren == nil {
+			return parser.NewAstError("expected word, symbol or parenthesis")
+		}
+		ast.AppendChild(paren)
+	}
+
 	return ast
 }
 
 func (parser *Parser) ParseSet() *Ast {
-	return nil
+	set := parser.Accept(TokenKindSet)
+	if set == nil {
+		return nil
+	}
+
+	ast := NewAst(AstKindGet, nil, set)
+	target := parser.ParseTarget()
+	ast.AppendChild(target)
+
+	argument := parser.ParseArgument()
+	if argument == nil {
+		return parser.NewAstError("Expected argument to set")
+	}
+	ast.AppendChild(argument)
+	return ast
 }
 
 func (parser *Parser) ParseGet() *Ast {
-	return nil
+	get := parser.Accept(TokenKindGet)
+	if get == nil {
+		return nil
+	}
+	ast := NewAst(AstKindGet, nil, get)
+	target := parser.ParseTarget()
+	ast.AppendChild(target)
+	return ast
 }
 
 func (parser *Parser) ParseCommand() *Ast {
-	return nil
+	word := parser.Accept(TokenKindWord)
+	if word == nil {
+		return nil
+	}
+	arguments := parser.ParseArguments()
+	command := NewAst(AstKindCommand, nil, word)
+	command.AppendChild(arguments)
+	return command
+}
+
+func (parser *Parser) ParseExpression() *Ast {
+	exp := parser.ParseRequireAny(AstKindExpression, (*Parser).ParseSet,
+		(*Parser).ParseGet, (*Parser).ParseCommand)
+	return exp
+}
+
+func (parser *Parser) ParseEmptyStatement() *Ast {
+	eox := parser.Accept(TokenKindEOX)
+	if eox == nil {
+		return nil
+	}
+	ast := NewAst(AstKindEnd, nil, eox)
+	return ast
 }
 
 func (parser *Parser) ParseStatement() *Ast {
-	return parser.ParseAny(AstKindStatement,
-		(*Parser).ParseSet, (*Parser).ParseGet, (*Parser).ParseCommand)
+	ast := parser.ParseAny(AstKindStatement,
+		(*Parser).ParseBlock,
+		(*Parser).ParseSet,
+		(*Parser).ParseGet,
+		(*Parser).ParseCommand,
+		(*Parser).ParseParenthesis)
+	if eox := parser.Accept(TokenKindEOX); eox == nil {
+		return parser.NewAstError("expected end of statement")
+	}
+	return ast
 }
 
 func (parser *Parser) ParseStatements() *Ast {
@@ -121,7 +299,11 @@ func (parser *Parser) ParseStatements() *Ast {
 func (parser *Parser) ParseProgram() *Ast {
 	ast := NewAst(AstKindProgram, nil, nil)
 	stats := parser.ParseStatements()
+	eof := parser.Accept(TokenKindEOF)
+	aeof := NewAst(AstKindEnd, nil, eof)
+	// be lenient with missing EOF for now...
 	ast.AppendChild(stats)
+	ast.AppendChild(aeof)
 	return ast
 }
 
@@ -129,3 +311,21 @@ func (parser *Parser) Parse() *Ast {
 	ast := parser.ParseProgram()
 	return ast
 }
+
+func NewParser(lexer *Lexer) *Parser {
+	parser := &Parser{lexer, nil, LoggerWrapper{nil}}
+	return parser
+}
+
+func NewParserFromString(input string) *Parser {
+	lexer := NewLexerFromString(input)
+	return NewParser(lexer)
+}
+
+func NewParserFromFilename(filename string) (*Parser, error) {
+	lexer, err := NewLexerFromFileName(filename)
+	if err != nil {
+		return nil, err
+	}
+	return NewParser(lexer), nil
+}

+ 49 - 0
parser_test.go

@@ -0,0 +1,49 @@
+package muesli
+
+import (
+	_ "strings"
+	"testing"
+)
+
+func HelperFailOnErrors(ast *Ast, expected int, test *testing.T) {
+	if ast == nil {
+		test.Errorf("Parse failed, %d parse errors expected", expected)
+	}
+	errors := ast.Errors()
+	if len(errors) != expected {
+		test.Log("Parse errors:\n")
+		for _, err := range errors {
+			test.Logf("%s\n", err.String())
+		}
+		test.Errorf("There were %d parse errors, %d expected", len(errors), expected)
+	}
+}
+
+func HelperParseAndFailOnErrors(prog string, expected int,
+	parsefunc func(*Parser) *Ast, test *testing.T) {
+	parser := NewParserFromString(prog)
+	/* parser.SetLogger(testLogger{}) */
+	ast := parsefunc(parser)
+	HelperFailOnErrors(ast, expected, test)
+}
+
+/*
+func TestParser(test *testing.T) {
+	com := `puts "hello"
+`
+	// say ( add 5 10 ) .`
+	parser := NewParserFromString(com)
+	ast := parser.Parse()
+	ast.Display()
+}
+
+func TestParser2(test *testing.T) {
+	com := `puts "hello"
+	say ( add 5 10 ) .`
+	HelperParseAndFailOnErrors(com, 0, (*Parser).Parse, test)
+}
+
+func TestParenthesis(test *testing.T) {
+	HelperParseAndFailOnErrors(`( add 5 10 )`, 0, (*Parser).ParseParenthesis, test)
+}
+*/

+ 8 - 4
token.go

@@ -18,7 +18,7 @@ const (
 	TokenKindInteger    = TokenKind('i')
 	TokenKindFloat      = TokenKind('f')
 	TokenKindString     = TokenKind('s')
-    TokenKindSymbol     = TokenKind('S')
+	TokenKindSymbol     = TokenKind('S')
 	TokenKindBoolean    = TokenKind('b')
 	TokenKindWord       = TokenKind('w')
 	TokenKindType       = TokenKind('t')
@@ -41,7 +41,7 @@ var TokenKindNames map[TokenKind]string = map[TokenKind]string{
 	TokenKindFloat:      "Float",
 	TokenKindSymbol:     "Symbol",
 	TokenKindString:     "String",
-    TokenKindBoolean:    "Boolean",
+	TokenKindBoolean:    "Boolean",
 	TokenKindWord:       "Word",
 	TokenKindType:       "Type",
 	TokenKindGet:        "Get",
@@ -73,13 +73,17 @@ type Token struct {
 }
 
 func (token Token) String() string {
+	if token.Value == nil {
+		return fmt.Sprintf("<%s:nil:%v>", token.TokenKind.String(), token.Position)
+
+	}
 	return fmt.Sprintf("<%s:%q:%v>", token.TokenKind.String(),
-		token.Value, token.Position)
+		token.Value.String(), token.Position)
 }
 
 func (token Token) Error() string {
 	if token.TokenKind == TokenKindError {
-		return token.Value.(string)
+		return token.Value.String()
 	}
 	return "No error"
 }

+ 57 - 6
value.go

@@ -1,11 +1,13 @@
 package muesli
 
+import "fmt"
+
 /* Run time values */
 type Value interface {
+	String() string
 }
 
 type IntValue int64
-
 type FloatValue float64
 
 type StringValue string
@@ -13,21 +15,70 @@ type StringValue string
 type BoolValue bool
 
 const (
-    TrueValue    = BoolValue(true)
-    FalseValue   = BoolValue(false)
+	TrueValue  = BoolValue(true)
+	FalseValue = BoolValue(false)
 )
 
-var     NilValue = Value(nil)
+var NilValue = Value(nil)
 
 type WordValue string
 
 type TypeValue string
 
-type ErrorValue error
+type ErrorValue struct {
+	error
+}
 
-type AnyValue struct {
+type EmptyValue struct {
 }
 
 type ListValue struct {
 	List []Value
 }
+
+func (val IntValue) String() string {
+	return fmt.Sprintf("%d", int64(val))
+}
+
+func (val FloatValue) String() string {
+	return fmt.Sprintf("%f", float64(val))
+}
+
+func (val BoolValue) String() string {
+	if bool(val) {
+		return "true"
+	} else {
+		return "false"
+	}
+}
+
+func (val StringValue) String() string {
+	return string(val)
+}
+
+func (val WordValue) String() string {
+	return string(val)
+}
+
+func (val TypeValue) String() string {
+	return string(val)
+}
+
+func (val ErrorValue) String() string {
+	return fmt.Sprintf("%s", val.Error())
+}
+
+func (val EmptyValue) String() string {
+	return "<empty>"
+}
+
+func (val ListValue) String() string {
+	res := "["
+	sep := ""
+	for _, elt := range val.List {
+		res = res + sep + elt.String()
+		sep = ", "
+	}
+	res += "]"
+	return res
+}