Browse Source

Parser for Raku scripting basically works. Also implemented an intrusive tree package to support the AST, and a graphviz packge for visualisation of the AST.

Beoran 7 years ago
parent
commit
69df157d89
6 changed files with 1097 additions and 122 deletions
  1. 165 0
      graphviz/graphviz.go
  2. 15 0
      graphviz/graphviz_test.go
  3. 522 66
      raku/raku.go
  4. 132 0
      raku/raku_test.go
  5. 135 43
      tree/tree.go
  6. 128 13
      tree/tree_test.go

+ 165 - 0
graphviz/graphviz.go

@@ -0,0 +1,165 @@
+// graphviz
+package graphviz
+
+import (
+	"fmt"
+	"io"
+	"io/ioutil"
+	"os"
+	"os/exec"
+	"strings"
+)
+
+var replacer *strings.Replacer
+
+func init() {
+	replacer = strings.NewReplacer("\n", "\\n", "\r", "\\r", "\t", "\\t")
+}
+
+type Attributes map[string]string
+
+func NewAttributes(attributes ...string) Attributes {
+	me := make(Attributes)
+	for i := 1; i < len(attributes); i += 2 {
+		key := attributes[i-1]
+		value := replacer.Replace(attributes[i])
+		me[key] = value
+	}
+	return me
+}
+
+type Node struct {
+	Attributes
+	ID string
+}
+
+func NewNode(id string, attributes ...string) *Node {
+	me := &Node{}
+	me.ID = id
+	me.Attributes = NewAttributes(attributes...)
+	return me
+}
+
+func (me Attributes) WriteTo(out io.Writer) {
+	comma := false
+	if len(me) > 0 {
+		fmt.Fprintf(out, "[")
+		for k, v := range me {
+			if comma {
+				fmt.Fprintf(out, ",")
+			}
+			fmt.Fprintf(out, "%s=\"%s\"", k, v)
+			comma = true
+		}
+		fmt.Fprintf(out, "]")
+	}
+}
+
+func (me Attributes) WriteForGraphTo(out io.Writer) {
+	if len(me) > 0 {
+		for k, v := range me {
+			fmt.Fprintf(out, "%s=\"%s\";\n", k, v)
+		}
+	}
+}
+
+func (me *Node) WriteTo(out io.Writer) {
+	fmt.Fprintf(out, "%s", me.ID)
+	me.Attributes.WriteTo(out)
+	fmt.Fprintf(out, ";\n")
+}
+
+type Edge struct {
+	Attributes
+	From *Node
+	To   *Node
+}
+
+func NewEdge(from, to *Node, attributes ...string) *Edge {
+	me := &Edge{}
+	me.From = from
+	me.To = to
+	me.Attributes = NewAttributes(attributes...)
+	return me
+}
+
+func (me *Edge) WriteTo(out io.Writer) {
+	if (me.From != nil) && (me.To != nil) {
+		fmt.Fprintf(out, "%s -> %s ", me.From.ID, me.To.ID)
+		me.Attributes.WriteTo(out)
+		fmt.Fprintf(out, ";\n")
+	}
+}
+
+type Digraph struct {
+	Attributes
+	nodes []*Node
+	edges []*Edge
+}
+
+func NewDigraph(attributes ...string) *Digraph {
+	me := &Digraph{}
+	me.Attributes = NewAttributes(attributes...)
+	return me
+}
+
+func (me *Digraph) AddNode(id string, attributes ...string) *Node {
+	node := NewNode(id, attributes...)
+	me.nodes = append(me.nodes, node)
+	return node
+}
+
+func (me *Digraph) AddEdge(from, to *Node, attributes ...string) *Edge {
+	edge := NewEdge(from, to, attributes...)
+	me.edges = append(me.edges, edge)
+	return edge
+}
+
+func (me *Digraph) FindNode(id string) *Node {
+	/* XXX stupid linear search for now... */
+	for _, node := range me.nodes {
+		if node.ID == id {
+			return node
+		}
+	}
+	return nil
+}
+
+func (me *Digraph) AddEdgeByName(from, to string, attributes ...string) *Edge {
+	node_from := me.FindNode(from)
+	node_to := me.FindNode(to)
+	return me.AddEdge(node_from, node_to, attributes...)
+}
+
+func (me *Digraph) WriteTo(out io.Writer) {
+	fmt.Fprintf(out, "digraph {\n")
+	me.Attributes.WriteForGraphTo(out)
+	for _, node := range me.nodes {
+		node.WriteTo(out)
+	}
+	for _, edge := range me.edges {
+		edge.WriteTo(out)
+	}
+	fmt.Fprintf(out, "\n}\n")
+}
+
+func (me *Digraph) Dotty() error {
+	file, err := ioutil.TempFile("", "woe_gv_")
+	if file == nil {
+		return err
+	}
+
+	me.WriteTo(file)
+	name := file.Name()
+	file.Close()
+
+	cmd := exec.Command("dotty", name)
+	cmd.Stderr = os.Stderr
+	cmd.Stdout = os.Stdout
+	err = cmd.Run()
+	if err != nil {
+		return err
+	}
+	cmd.Wait()
+	return nil
+}

+ 15 - 0
graphviz/graphviz_test.go

@@ -0,0 +1,15 @@
+package graphviz
+
+import (
+	"testing"
+)
+
+func TestShow(test *testing.T) {
+	g := NewDigraph("bgcolor", "pink")
+	n_foo := g.AddNode("foo", "color", "red", "label", "FOO\nFOO")
+	n_bar := g.AddNode("bar", "color", "green")
+	_ = g.AddNode("baz", "color", "yellow")
+	g.AddEdge(n_foo, n_bar, "color", "gray")
+	_ = g.AddEdgeByName("foo", "baz", "color", "magenta")
+	g.Dotty()
+}

+ 522 - 66
raku/raku.go

@@ -39,10 +39,10 @@ import (
 	"io"
 	"reflect"
 	"runtime"
-	"sort"
 	"strings"
 	"unicode"
 
+	"github.com/beoran/woe/graphviz"
 	"github.com/beoran/woe/monolog"
 	"github.com/beoran/woe/tree"
 )
@@ -57,10 +57,10 @@ type Position struct {
 }
 
 const (
-	TokenEOS          TokenType = TokenType('.')
+	TokenPeriod       TokenType = TokenType('.')
 	TokenComma        TokenType = TokenType(',')
-	TokenSemicolumn   TokenType = TokenType(';')
-	TokenColumn       TokenType = TokenType(':')
+	TokenSemicolon    TokenType = TokenType(';')
+	TokenColon        TokenType = TokenType(':')
 	TokenOpenParen    TokenType = TokenType('(')
 	TokenCloseParen   TokenType = TokenType(')')
 	TokenOpenBrace    TokenType = TokenType('{')
@@ -68,16 +68,23 @@ const (
 	TokenOpenBracket  TokenType = TokenType('[')
 	TokenCloseBracket TokenType = TokenType(']')
 
-	TokenNone     TokenType = 0
-	TokenError    TokenType = -1
-	TokenWord     TokenType = -2
-	TokenEOL      TokenType = -3
-	TokenEOF      TokenType = -4
-	TokenNumber   TokenType = -5
-	TokenOperator TokenType = -6
-	TokenString   TokenType = -7
-	TokenKeyword  TokenType = -8
-	TokenLast     TokenType = -9
+	TokenNone         TokenType = 0
+	TokenError        TokenType = -1
+	TokenWord         TokenType = -2
+	TokenEOL          TokenType = -3
+	TokenEOF          TokenType = -4
+	TokenNumber       TokenType = -5
+	TokenOperator     TokenType = -6
+	TokenString       TokenType = -7
+	TokenSymbol       TokenType = -8
+	TokenFirstKeyword TokenType = -9
+	TokenKeywordA     TokenType = -10
+	TokenKeywordDo    TokenType = -11
+	TokenKeywordEnd   TokenType = -12
+	TokenKeywordThe   TokenType = -13
+	TokenKeywordTo    TokenType = -14
+	TokenLastKeyword  TokenType = -15
+	TokenLast         TokenType = -15
 )
 
 type Token struct {
@@ -86,30 +93,57 @@ type Token struct {
 	Position
 }
 
-var tokenTypeNames []string = []string{
-	"TokenNone", "TokenError", "TokenWord", "TokenEOL", "TokenEOF", "TokenNumber", "TokenOperator", "TokenString", "TokenKeyword",
-}
-
-var keywordList []string = []string{
-	"a", "do", "end", "the", "to",
+var tokenTypeMap map[TokenType]string = map[TokenType]string{
+	TokenNone:       "TokenNone",
+	TokenError:      "TokenError",
+	TokenWord:       "TokenWord",
+	TokenEOL:        "TokenEOL",
+	TokenEOF:        "TokenEOF",
+	TokenNumber:     "TokenNumber",
+	TokenOperator:   "TokenOperator",
+	TokenString:     "TokenString",
+	TokenSymbol:     "TokenSymbol",
+	TokenKeywordA:   "TokenKeywordA",
+	TokenKeywordDo:  "TokenKeywordDo",
+	TokenKeywordEnd: "TokenKeywordEnd",
+	TokenKeywordThe: "TokenKeywordThe",
+	TokenKeywordTo:  "TokenKeywordTo",
+}
+
+var keywordMap map[string]TokenType = map[string]TokenType{
+	"a":   TokenKeywordA,
+	"do":  TokenKeywordDo,
+	"end": TokenKeywordEnd,
+	"the": TokenKeywordThe,
+	"to":  TokenKeywordTo,
+}
+
+var sigilMap map[string]TokenType = map[string]TokenType{
+	"[": TokenOpenBracket,
+	"{": TokenOpenBrace,
+	"(": TokenOpenParen,
+	"]": TokenCloseBracket,
+	"}": TokenCloseBrace,
+	")": TokenCloseParen,
 }
 
 func (me TokenType) String() string {
-	if int(me) > 0 {
-		return fmt.Sprintf("Token %c", rune(me))
-	} else if me > TokenLast {
-		return tokenTypeNames[-int(me)]
+	name, found := tokenTypeMap[me]
+	if found {
+		return name
 	} else {
+		if (me > 0) && (me < 256) {
+			return fmt.Sprintf("TokenChar<%c>", byte(me))
+		}
 		return fmt.Sprintf("Unknown Token %d", int(me))
 	}
-
 }
 
 func (me Token) String() string {
 	return fmt.Sprintf("Token: %s >%s< %d %d %d.", me.TokenType, string(me.Value), me.Index, me.Row, me.Column)
 }
 
-type TokenChannel chan Token
+type TokenChannel chan *Token
 
 type Lexer struct {
 	Reader  io.Reader
@@ -125,7 +159,7 @@ type Lexer struct {
 type LexerRule func(lexer *Lexer) LexerRule
 
 func (me *Lexer) Emit(t TokenType, v Value) {
-	tok := Token{t, v, me.Current}
+	tok := &Token{t, v, me.Current}
 	me.Output <- tok
 }
 
@@ -151,24 +185,39 @@ func (me *Lexer) SkipComment() bool {
 	return true
 }
 
-func IsKeyword(word string) bool {
-	i := sort.SearchStrings(keywordList, word)
-	if i >= len(keywordList) {
-		return false
-	}
-	return word == keywordList[i]
+/* Returns whether or not a keyword was found, and if so, the TokenType
+of the keyword.*/
+func LookupKeyword(word string) (bool, TokenType) {
+	kind, found := keywordMap[word]
+	return found, kind
+}
+
+/* Returns whether or not a special operator or sigil was found, and if so,
+returns the TokenTyp of the sigil.*/
+func LookupSigil(sigil string) (bool, TokenType) {
+	fmt.Printf("LookupSigil: %s\n", sigil)
+	kind, found := sigilMap[sigil]
+	return found, kind
 }
 
 func LexWord(me *Lexer) LexerRule {
 	me.SkipNotIn(" \t\r\n'")
-	if IsKeyword(me.CurrentStringValue()) {
-		me.Found(TokenKeyword)
+
+	iskw, kind := LookupKeyword(me.CurrentStringValue())
+	if iskw {
+		me.Found(kind)
 	} else {
 		me.Found(TokenWord)
 	}
 	return LexNormal
 }
 
+func LexSymbol(me *Lexer) LexerRule {
+	me.SkipNotIn(" \t\r\n'")
+	me.Found(TokenSymbol)
+	return LexNormal
+}
+
 func LexNumber(me *Lexer) LexerRule {
 	me.SkipNotIn(" \tBBBT\r\n")
 	me.Found(TokenNumber)
@@ -203,7 +252,12 @@ func LexEOL(me *Lexer) LexerRule {
 
 func LexOperator(me *Lexer) LexerRule {
 	me.SkipNotIn(" \t\r\n")
-	me.Found(TokenOperator)
+	issig, kind := LookupSigil(me.CurrentStringValue())
+	if issig {
+		me.Found(kind)
+	} else {
+		me.Found(TokenOperator)
+	}
 	return LexNormal
 }
 
@@ -250,6 +304,8 @@ func LexNormal(me *Lexer) LexerRule {
 		return LexWhitespace
 	} else if strings.ContainsRune(".,;:", peek) {
 		return LexPunctuator
+	} else if strings.ContainsRune("$", peek) {
+		return LexSymbol
 	} else if strings.ContainsRune("\r\n", peek) {
 		return LexEOL
 	} else if strings.ContainsRune("+-", peek) {
@@ -287,7 +343,6 @@ func (me *Lexer) ReadReaderOnce() (bool, error) {
 	}
 
 	if err == io.EOF {
-		me.Emit(TokenEOF, "")
 		return true, nil
 	} else if err != nil {
 		me.Error("Error reading from reader: %s", err)
@@ -328,7 +383,7 @@ func (me *Lexer) Next() rune {
 	}
 	me.Current.Index++
 	if me.Current.Index >= len(me.runes) {
-		me.Emit(TokenEOF, "")
+		//me.Emit(TokenEOF, "")
 	}
 	return me.Peek()
 }
@@ -439,7 +494,8 @@ const (
 	AstTypeExpression
 	AstTypeWordExpression
 	AstTypeWordCallop
-	AstTypeWordOperation
+	AstTypeOperation
+	AstTypeOperations
 	AstTypeWordCall
 	AstTypeValueExpression
 	AstTypeValueCallop
@@ -448,14 +504,52 @@ const (
 	AstTypeParameters
 	AstTypeParameter
 	AstTypeBlock
-	AstTypeWordvalue
+	AstTypeWordValue
+	AstTypeWord
 	AstTypeValue
 	AstTypeEox
+	AstTypeOperator
 	AstTypeError
 )
 
+var astTypeMap map[AstType]string = map[AstType]string{
+	AstTypeProgram:            "AstTypeProgram",
+	AstTypeStatements:         "AstTypeStatements",
+	AstTypeStatement:          "AstTypeStatement:",
+	AstTypeDefinition:         "AstTypeDefinition",
+	AstTypeWords:              "AstTypeWords",
+	AstTypeExpression:         "AstTypeExpression",
+	AstTypeWordExpression:     "AstTypeWordExpression",
+	AstTypeWordCallop:         "AstTypeWordCallop",
+	AstTypeOperation:          "AstTypeOperation",
+	AstTypeOperations:         "AstTypeOperations",
+	AstTypeWordCall:           "AstTypeWordCall",
+	AstTypeValueExpression:    "AstTypeValueExpression",
+	AstTypeValueCallop:        "AstTypeValueCallop",
+	AstTypeValueCall:          "AstTypeValueCall",
+	AstTypeParametersNonempty: "AstTypeParametersNonempty",
+	AstTypeParameters:         "AstTypeParameters",
+	AstTypeParameter:          "AstTypeParameter",
+	AstTypeBlock:              "AstTypeBlock",
+	AstTypeWordValue:          "AstTypeWordValue",
+	AstTypeWord:               "AstTypeWord",
+	AstTypeValue:              "AstTypeValue",
+	AstTypeEox:                "AstTypeEox",
+	AstTypeOperator:           "AstTypeOperator",
+	AstTypeError:              "AstTypeError",
+}
+
+func (me AstType) String() string {
+	name, found := astTypeMap[me]
+	if found {
+		return name
+	} else {
+		return fmt.Sprintf("Unknown AstType %d", int(me))
+	}
+}
+
 type Ast struct {
-	*tree.Node
+	tree.Node
 	AstType
 	*Token
 }
@@ -464,56 +558,419 @@ func (me *Ast) NewChild(kind AstType, token *Token) *Ast {
 	child := &Ast{}
 	child.AstType = kind
 	child.Token = token
-	child.Node = me.Node.NewChild(child)
+	tree.AppendChild(me, child)
 	return child
 }
 
 func (me *Ast) Walk(walker func(ast *Ast) *Ast) *Ast {
-	node_res := me.Node.Walk(
-		func(node *tree.Node) *tree.Node {
-			ast_res := walker(node.Data.(*Ast))
+	node_res := tree.Walk(me,
+		func(node tree.Noder) tree.Noder {
+			ast_res := walker(node.(*Ast))
 			if ast_res == nil {
 				return nil
 			} else {
-				return ast_res.Node
+				return ast_res
 			}
 		})
-	return node_res.Data.(*Ast)
+	if node_res != nil {
+		return node_res.(*Ast)
+	} else {
+		return nil
+	}
+}
+
+func (me *Ast) Remove() {
+	_ = tree.Remove(me)
 }
 
 func NewAst(kind AstType) *Ast {
 	ast := &Ast{}
-	ast.Node = tree.New(nil, ast)
 	ast.AstType = kind
 	ast.Token = nil
 	return ast
 }
 
+type ParseAction func(parser *Parser) bool
+
+type RuleType int
+
+const (
+	RuleTypeNone = RuleType(iota)
+	RuleTypeAlternate
+	RuleTypeSequence
+)
+
+type Rule struct {
+	tree.Node
+	Name string
+	RuleType
+	ParseAction
+}
+
+func NewRule(name string, ruty RuleType) *Rule {
+	res := &Rule{}
+	res.RuleType = ruty
+	res.Name = name
+	return res
+}
+
+func (me *Rule) NewChild(action ParseAction) *Rule {
+	child := NewRule("foo", RuleTypeNone)
+	tree.AppendChild(me, child)
+	return child
+}
+
+func (me *Rule) Walk(walker func(rule *Rule) *Rule) *Rule {
+	node_res := tree.Walk(me,
+		func(node tree.Noder) tree.Noder {
+			rule_res := walker(node.(*Rule))
+			if rule_res == nil {
+				return nil
+			} else {
+				return rule_res
+			}
+		})
+	return node_res.(*Rule)
+}
+
 type Parser struct {
 	*Ast
 	*Lexer
+	now       *Ast
+	lookahead *Token
 }
 
-func (me *Parser) ParseDefinition() {
-	/*
-		ParseWords()
-		ParseBlock()
-	*/
+func (me *Parser) SetupRules() {
+
 }
 
-func (me *Parser) ParseProgram() {
-	me.Ast = NewAst(AstTypeProgram)
-	token := <-me.Lexer.Output
-	switch token.TokenType {
-	case TokenKeyword:
-		if token.Value == "to" {
-			me.ParseDefinition()
-			return
+func (me *Parser) Expect(types ...TokenType) bool {
+	fmt.Print("Expecting: ", types, " from ", me.now.AstType, " have ", me.LookaheadType(), " \n")
+	for _, t := range types {
+		if me.LookaheadType() == t {
+			fmt.Print("Found: ", t, "\n")
+			return true
+		}
+	}
+	fmt.Print("Not found.\n")
+	return false
+}
+
+type Parsable interface {
+	isParsable()
+}
+
+func (me TokenType) isParsable() {
+}
+
+func (me ParseAction) isParsable() {
+}
+
+/* Advance the lexer but only of there is no lookahead token already available in me.lookahead.
+ */
+func (me *Parser) Advance() *Token {
+	if me.lookahead == nil {
+		me.lookahead = <-me.Lexer.Output
+	}
+	return me.lookahead
+}
+
+func (me *Parser) DropLookahead() {
+	me.lookahead = nil
+}
+
+func (me *Parser) Lookahead() *Token {
+	return me.lookahead
+}
+
+func (me *Parser) LookaheadType() TokenType {
+	if me.lookahead == nil {
+		return TokenError
+	}
+	return me.Lookahead().TokenType
+}
+
+func (me *Parser) Consume(atyp AstType, types ...TokenType) bool {
+	me.Advance()
+	res := me.Expect(types...)
+	if res {
+		me.NewAstChild(atyp)
+		me.DropLookahead()
+	}
+	return res
+}
+
+/*
+func (me * Parser) OneOf(restype AstType, options ...Parsable) bool {
+	res := false
+	k, v := range options {
+		switch option := v.Type {
+			case TokenType: res := Consume(restype, option)
+			case ParseAction: res := option(me)
 		}
-		fallthrough
+	}
+	return res
+}
+*/
+
+func (me *Parser) ParseEOX() bool {
+	return me.Consume(AstTypeEox, TokenEOL, TokenPeriod)
+}
+
+func (me *Parser) ParseValue() bool {
+	return me.Consume(AstTypeValue, TokenString, TokenNumber, TokenSymbol)
+}
+
+func (me *Parser) ParseWord() bool {
+	return me.Consume(AstTypeWord, TokenWord)
+}
+
+func (me *Parser) ParseWordValue() bool {
+	me.NewAstChildDescend(AstTypeWordValue)
+	res := me.ParseValue() || me.ParseWord()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseParameter() bool {
+	me.NewAstChildDescend(AstTypeParameter)
+	res := me.ParseWordValue() || me.ParseBlock()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseParametersNonempty() bool {
+	res := false
+	for me.ParseParameter() {
+		res = true
+	}
+	return res
+}
+
+func (me *Parser) ParseParameters() bool {
+	me.NewAstChildDescend(AstTypeParameters)
+	_ = me.ParseParametersNonempty()
+	me.AstAscend(true)
+	return true
+}
+
+func (me *Parser) ParseWordCall() bool {
+	me.NewAstChildDescend(AstTypeWordCall)
+	res := me.ParseParameters() && me.ParseEOX()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseOperator() bool {
+	return me.Consume(AstTypeOperator, TokenOperator)
+}
+
+func (me *Parser) ParseOperation() bool {
+	me.NewAstChildDescend(AstTypeOperation)
+	res := me.ParseOperator() && me.ParseParametersNonempty()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseOperations() bool {
+	me.NewAstChildDescend(AstTypeOperations)
+	res := me.ParseOperation()
+	for me.ParseOperation() {
+	}
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseWordCallOp() bool {
+	me.NewAstChildDescend(AstTypeWordCallop)
+	res := me.ParseWordCall() || me.ParseOperations()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseWordExpression() bool {
+	me.NewAstChildDescend(AstTypeWordExpression)
+	res := me.ParseWord() && me.ParseWordCallOp()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseValueCall() bool {
+	me.NewAstChildDescend(AstTypeValueCall)
+	res := me.ParseParameters() && me.ParseEOX()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseValueCallOp() bool {
+	me.NewAstChildDescend(AstTypeValueCallop)
+	res := me.ParseValueCall() || me.ParseOperations()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseValueExpression() bool {
+	me.NewAstChildDescend(AstTypeValueExpression)
+	res := me.ParseValue() && me.ParseValueCallOp()
+	me.AstAscend(res)
+	return false
+}
+
+func (me *Parser) NewAstChild(tyty AstType) *Ast {
+	return me.now.NewChild(tyty, me.lookahead)
+}
+
+func (me *Parser) NewAstChildDescend(tyty AstType) {
+	node := me.NewAstChild(tyty)
+	me.now = node
+}
+
+func (me *Parser) AstAscend(keep bool) {
+	if me.now.Parent() != nil {
+		now := me.now
+		me.now = now.Parent().(*Ast)
+		if !keep {
+			now.Remove()
+		}
+	}
+}
+
+func (me TokenType) CloseForOpen() (TokenType, bool) {
+	switch me {
+	case TokenOpenBrace:
+		return TokenCloseBrace, true
+	case TokenOpenBracket:
+		return TokenCloseBracket, true
+	case TokenOpenParen:
+		return TokenCloseParen, true
+	case TokenKeywordDo:
+		return TokenKeywordEnd, true
 	default:
-		me.Ast.NewChild(AstTypeError, &token)
+		return TokenError, false
+	}
+
+}
+
+func (me *Parser) ParseBlock() bool {
+	me.Advance()
+	open := me.LookaheadType()
+	done, ok := open.CloseForOpen()
+	if !ok {
+		/* Not an opening of a block, so no block found. */
+		return false
+	}
+	me.DropLookahead()
+	me.NewAstChildDescend(AstTypeBlock)
+	res := me.ParseStatements()
+	me.AstAscend(res)
+	if res {
+		me.Advance()
+		if me.LookaheadType() != done {
+			return me.ParseError()
+		}
+		me.DropLookahead()
 	}
+	return res
+}
+
+func (me *Parser) ParseWords() bool {
+	me.NewAstChildDescend(AstTypeWords)
+	res := me.ParseWord()
+	for me.ParseWord() {
+	}
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseDefinition() bool {
+	me.Advance()
+	res := me.Consume(AstTypeDefinition, TokenKeywordTo)
+	if !res {
+		return false
+	}
+	res = res && me.ParseWords()
+	if !res {
+		_ = me.ParseError()
+	}
+	res = res && me.ParseBlock()
+	if !res {
+		_ = me.ParseError()
+	}
+	me.AstAscend(true)
+	return res
+}
+
+func (me *Parser) ParseError() bool {
+	me.now.NewChild(AstTypeError, me.lookahead)
+	fmt.Printf("Parse error: at %s\n", me.lookahead)
+	return false
+}
+
+func (me *Parser) ParseExpression() bool {
+	return me.ParseWordExpression() || me.ParseValueExpression()
+}
+
+func (me *Parser) ParseStatement() bool {
+
+	me.NewAstChildDescend(AstTypeStatement)
+	/* First case is for an empty expression/statement. */
+	res := me.ParseEOX() || me.ParseDefinition() || me.ParseExpression() || me.ParseBlock()
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseEOF() bool {
+	return me.Consume(AstTypeEox, TokenEOF)
+}
+
+func (me *Parser) ParseStatements() bool {
+	me.NewAstChildDescend(AstTypeStatements)
+	res := me.ParseStatement()
+
+	for me.ParseStatement() {
+	}
+
+	me.AstAscend(res)
+	return res
+}
+
+func (me *Parser) ParseProgram() bool {
+	return me.ParseStatements() && me.ParseEOF()
+}
+
+func NewParserForLexer(lexer *Lexer) *Parser {
+	me := &Parser{}
+	me.Ast = NewAst(AstTypeProgram)
+	me.now = me.Ast
+	me.Lexer = lexer
+	me.Ast.Token = &Token{}
+	go me.Lexer.Start()
+	return me
+}
+
+func NewParserForText(text string) *Parser {
+	lexer := OpenLexer(strings.NewReader(text))
+	return NewParserForLexer(lexer)
+}
+
+func (me *Ast) DotID() string {
+	return fmt.Sprintf("ast_%p", me)
+}
+
+func (me *Ast) Dotty() {
+	g := graphviz.NewDigraph("rankdir", "LR")
+	me.Walk(func(ast *Ast) *Ast {
+		label := ast.AstType.String()
+		if ast.Token != nil {
+			label = label + "\n" + ast.Token.String()
+		}
+		g.AddNode(ast.DotID(), "label", label)
+		if ast.Parent() != nil {
+			g.AddEdgeByName(ast.Parent().(*Ast).DotID(), ast.DotID())
+		}
+		return nil
+	})
+	g.Dotty()
 }
 
 /*
@@ -525,15 +982,14 @@ WORDS -> word WORDS | .
 EXPRESSION -> WORD_EXPRESSION | VALUE_EXPRESSION.
 WORD_EXPRESSION -> word WORD_CALLOP.
 WORD_CALLOP -> WORD_OPERATION | WORD_CALL.
-WORD_OPERATION -> operator PARAMETERS_NONEMPTY EOX.
+OPERATION -> operator PARAMETERS_NONEMPTY EOX.
 WORD_CALL -> PARAMETERS EOX.
 VALUE_EXPRESSION -> value VALUE_CALLOP.
 VALUE_CALLOP -> VALUE_OPERATION | VALUE_CALL.
-VALUE_OPERATION -> operator PARAMETERS_NONEMPTY EOX.
 VALUE_CALL -> EOX.
 PARAMETERS_NONEMPTY -> PARAMETER PARAMETERS.
 PARAMETERS -> PARAMETERS_NONEMPTY | .
-PARAMETER -> BLOCK | WORDVALUE .
+PARAMETER -> BLOCK | WORDVALUE | OPERATION.
 BLOCK -> ob STATEMENTS cb | op STATEMENTS cp | oa STATEMENTS ca | do STATEMENTS end.
 WORDVALUE -> word | VALUE.
 VALUE -> string | number | symbol.

+ 132 - 0
raku/raku_test.go

@@ -4,6 +4,9 @@ package raku
 import (
 	"strings"
 	"testing"
+
+	_ "github.com/beoran/woe/monolog"
+	"github.com/beoran/woe/tree"
 )
 
 func HelperTryLexing(me *Lexer, test *testing.T) {
@@ -16,6 +19,13 @@ func HelperTryLexing(me *Lexer, test *testing.T) {
 	}
 }
 
+func Assert(test *testing.T, ok bool, text string) bool {
+	if !ok {
+		test.Error(text)
+	}
+	return ok
+}
+
 func TestLexing(test *testing.T) {
 	const input = `
 say "hello \"world\\"
@@ -32,3 +42,125 @@ end
 	HelperTryLexing(lexer, test)
 	test.Log("Hi test!")
 }
+
+func TestLexing2(test *testing.T) {
+	const input = `say`
+	lexer := OpenLexer(strings.NewReader(input))
+	HelperTryLexing(lexer, test)
+	test.Log("Hi test!")
+}
+
+func TestLexing3(test *testing.T) {
+	const input = `$sym`
+	lexer := OpenLexer(strings.NewReader(input))
+	HelperTryLexing(lexer, test)
+	test.Log("Hi test!")
+}
+
+func TestParseValue(test *testing.T) {
+	const input = `"hello \"world\\"`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseValue(), "Could not parse value")
+	tree.Display(parser.Ast)
+}
+
+func TestParseValue2(test *testing.T) {
+	const input = `2.1`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseValue(), "Could not parse value")
+	tree.Display(parser.Ast)
+}
+
+func TestParseValue3(test *testing.T) {
+	const input = `$sym`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseValue(), "Could not parse value")
+	tree.Display(parser.Ast)
+}
+
+func TestParseEox(test *testing.T) {
+	const input = `
+`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseEOX(), "Could not parse EOX")
+	tree.Display(parser.Ast)
+}
+
+func TestParseEox2(test *testing.T) {
+	const input = `.
+`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseEOX(), "Could not parse EOX")
+	tree.Display(parser.Ast)
+}
+
+func TestParseWord(test *testing.T) {
+	const input = `say`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseWord(), "Could not parse word")
+	tree.Display(parser.Ast)
+}
+
+func TestParseWordExpression(test *testing.T) {
+	const input = `say "hello world" three times
+	`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseWordExpression(), "Could not parse word expression")
+	tree.Display(parser.Ast)
+}
+
+func TestParseWordExpression2(test *testing.T) {
+	const input = `val + 10 * z
+	`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseWordExpression(), "Could not parse word expression with operators")
+	tree.Display(parser.Ast)
+}
+
+func TestParseStatements(test *testing.T) {
+	const input = `val + 10 * z. open door.
+	`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseStatements(), "Could not parse statements with only a parse word expression with operators")
+	tree.Display(parser.Ast)
+}
+
+func TestParseProgram(test *testing.T) {
+	const input = `val + 10 * z. open door.
+	`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseProgram(), "Could not parse program.")
+	tree.Display(parser.Ast)
+}
+
+func TestParseProgram2(test *testing.T) {
+	const input = `to greet someone [
+say "hello" someone
+]
+
+greet bob
+
+if mp < cost do
+	say "Not enough mana!"
+end else do
+	say "Zap!"
+end
+
+`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseProgram(), "Could not parse program.")
+	tree.Display(parser.Ast)
+}
+
+func TestParseblock(test *testing.T) {
+	// monolog.Setup("raku_test.log", true, false)
+	const input = `[
+say "hello"
+say "world"
+]
+`
+	parser := NewParserForText(input)
+	Assert(test, parser.ParseBlock(), "Could not parse block.")
+	tree.Display(parser.Ast)
+	parser.Ast.Dotty()
+}

+ 135 - 43
tree/tree.go

@@ -1,81 +1,173 @@
 // tree project tree.go
-// a relativey simple recursive tree with an arbitrary amount of children on each level.
+// a relativey simple recursive intrusive tree with an arbitrary amount of children on each level.
 package tree
 
+import "fmt"
+
+/* Add a Node to the Struct you want to use these functions with,
+and write an initializer maker func(...interface{}) Noder for use with
+the New* functions. Everything else goes by itself.
+*/
 type Node struct {
-	Child  *Node
-	After  *Node
-	Before *Node
-	Parent *Node
-	Data   interface{}
+	Parent_ Noder
+	Child_  Noder
+	Before_ Noder
+	After_  Noder
+}
+
+type Noder interface {
+	Child() Noder
+	Parent() Noder
+	Before() Noder
+	After() Noder
+	SetChild(Noder) Noder
+	SetParent(Noder) Noder
+	SetBefore(Noder) Noder
+	SetAfter(Noder) Noder
+}
+
+func (me *Node) Child() Noder {
+	return me.Child_
+}
+
+func (me *Node) Parent() Noder {
+	return me.Parent_
+}
+
+func (me *Node) After() Noder {
+	return me.After_
 }
 
-func NewEmpty() *Node {
-	return &Node{}
+func (me *Node) Before() Noder {
+	return me.Before_
 }
 
-func New(parent *Node, data interface{}) *Node {
-	node := NewEmpty()
-	node.Parent = parent
-	node.Data = data
-	return node
+func (me *Node) SetChild(val Noder) Noder {
+	me.Child_ = val
+	return me.Child_
+}
+
+func (me *Node) SetParent(val Noder) Noder {
+	me.Parent_ = val
+	return me.Parent_
+}
+
+func (me *Node) SetAfter(val Noder) Noder {
+	me.After_ = val
+	return me.After_
+}
+
+func (me *Node) SetBefore(val Noder) Noder {
+	me.Before_ = val
+	return me.Before_
+}
+
+func NewNoder(parent Noder, maker func(...interface{}) Noder, args ...interface{}) Noder {
+	child := maker(args...)
+	child.SetParent(parent)
+	return child
 }
 
-func (me *Node) LastSibling() *Node {
-	res := me
-	for res != nil && res.After != nil {
-		res = res.After
+func LastSibling(me Noder) Noder {
+	var res Noder = me
+	for res != nil && res.After() != nil {
+		res = res.After()
 	}
 	return res
 }
 
-func (me *Node) InsertSibling(sibling *Node) *Node {
-	after := me.After
-	me.After = sibling
-	sibling.Before = me
-	sibling.After = after
-	sibling.Parent = me.Parent
+func LastChild(me Noder) Noder {
+	return LastSibling(me.Child())
+}
+
+/* Detaches, I.E removes this node and all it's children from the parent tree. */
+func Remove(me Noder) Noder {
+	parent := me.Parent()
+	before := me.Before()
+	after := me.After()
+	if before != nil {
+		before.SetAfter(after)
+	}
+	if after != nil {
+		after.SetBefore(before)
+	}
+	if parent != nil {
+		/* Special case if me is the first child of it's parent. */
+		if me == parent.Child() {
+			parent.SetChild(after)
+		}
+	}
+	me.SetParent(nil)
+	return me
+}
+
+func InsertSibling(me, sibling Noder) Noder {
+	after := me.After()
+	me.SetAfter(sibling)
+	sibling.SetBefore(me)
+	sibling.SetAfter(after)
+	if after != nil {
+		after.SetBefore(sibling)
+	}
+	sibling.SetParent(me.Parent())
 	return sibling
 }
 
-func (me *Node) AppendSibling(sibling *Node) *Node {
-	return me.LastSibling().InsertSibling(sibling)
+func AppendSibling(me, sibling Noder) Noder {
+	return InsertSibling(LastSibling(me), sibling)
 }
 
-func (me *Node) AppendChild(child *Node) *Node {
-	child.Parent = me
-	if me.Child == nil {
-		me.Child = child
+func AppendChild(me, child Noder) Noder {
+	child.SetParent(me)
+	if me.Child() == nil {
+		me.SetChild(child)
 	} else {
-		me.Child.AppendSibling(child)
+		AppendSibling(me.Child(), child)
 	}
 	return child
 }
 
-func (me *Node) NewSibling(data interface{}) *Node {
-	node := New(me.Parent, data)
-	return me.AppendSibling(node)
+func NewSibling(me Noder, maker func(...interface{}) Noder, args ...interface{}) Noder {
+	node := NewNoder(me.Parent(), maker, args...)
+	return AppendSibling(me, node)
 }
 
-func (me *Node) NewChild(data interface{}) *Node {
-	node := New(me, data)
-	return me.AppendChild(node)
+func NewChild(me Noder, maker func(...interface{}) Noder, args ...interface{}) Noder {
+	node := NewNoder(me, maker, args...)
+	return AppendChild(me, node)
 }
 
-func (me *Node) Walk(walker func(me *Node) *Node) *Node {
-	node := me
-	if found := walker(node); found != nil {
+func Walk(me Noder, walker func(me Noder) Noder) Noder {
+	if found := walker(me); found != nil {
 		return found
 	}
-	if me.Child != nil {
-		if found := me.Child.Walk(walker); found != nil {
+	if me.Child() != nil {
+		if found := Walk(me.Child(), walker); found != nil {
 			return found
 		}
 	}
-	if me.After != nil {
-		if found := me.After.Walk(walker); found != nil {
+	if me.After() != nil {
+		if found := Walk(me.After(), walker); found != nil {
 			return found
 		}
 	}
 	return nil
 }
+
+func Display(me Noder) {
+	Walk(me, func(node Noder) Noder {
+		fmt.Printf("Tree: %v\n", node)
+		return nil
+	})
+}
+
+/*
+interface Walker {
+	Walk(walker func(me *Node) *Node) *Node
+}
+
+
+func WalkWalker(walker Walker) {
+
+}
+*/

+ 128 - 13
tree/tree_test.go

@@ -2,49 +2,164 @@
 package tree
 
 import (
+	"fmt"
 	_ "strings"
 	"testing"
 )
 
+type StringTree struct {
+	Node
+	Data string
+}
+
+func NewStringTree(value string) *StringTree {
+	res := &StringTree{}
+	res.Data = value
+	res.SetParent(nil)
+	return res
+}
+
+func InitStringTree(args ...interface{}) Noder {
+	str := args[0].(string)
+	return NewStringTree(str)
+}
+
+func TestSibling(test *testing.T) {
+	n1 := NewStringTree("s1")
+	n2 := NewStringTree("s2")
+	n3 := NewStringTree("s3")
+	AppendSibling(n1, n2)
+	AppendSibling(n1, n3)
+	NewSibling(n1, InitStringTree, "s4")
+	Display(n1)
+}
+
 func TestNode(test *testing.T) {
-	tree := New(nil, "root")
+	tree := NewStringTree("root")
 	s1 := "l1 c1"
 	s2 := "l1 c2"
 	s3 := "l1 c3"
 	s4 := "l2 c1"
-	l1c1 := tree.NewChild(s1)
-	l1c2 := tree.NewChild(s2)
-	l1c3 := tree.NewChild(s3)
-	l2c1 := l1c1.NewChild(s4)
+	l1c1 := NewChild(tree, InitStringTree, s1)
+
+	if tree.Child() != l1c1 {
+		test.Errorf("Child() %v: %v<->%v", tree, tree.Child(), l1c1)
+	}
 
-	if l1c1.Data != s1 {
+	if LastChild(tree) != l1c1 {
+		test.Errorf("LastChild() %v: %v<->%v", tree, tree.Child(), l1c1)
+	}
+
+	l1c2 := NewChild(tree, InitStringTree, s2)
+
+	if tree.Child() != l1c1 {
+		test.Errorf("Child() %v: %v<->%v", tree, tree.Child(), l1c1)
+	}
+
+	if LastChild(tree) != l1c2 {
+		test.Errorf("LastChild() %v: %v<->%v", tree, tree.Child(), l1c2)
+	}
+
+	l1c3 := NewChild(tree, InitStringTree, s3)
+
+	if LastChild(tree) != l1c3 {
+		test.Errorf("LastChild() %v: %v<->%v", tree, tree.Child(), l1c3)
+	}
+
+	l2c1 := NewChild(l1c2, InitStringTree, s4)
+
+	if l1c2.Child() != l2c1 {
+		test.Errorf("Child() %v: %v<->%v", l1c2, l1c2.Child(), l2c1)
+	}
+
+	if LastChild(l1c2) != l2c1 {
+		test.Errorf("LastChild() %v: %v<->%v", l1c2, l1c2.Child(), l2c1)
+	}
+
+	if l1c1.(*StringTree).Data != s1 {
 		test.Error("Data ")
 	}
 
-	if l1c2.Data != s2 {
+	if l1c2.(*StringTree).Data != s2 {
 		test.Error("Data ")
 	}
 
-	if l1c3.Data != s3 {
+	if l1c3.(*StringTree).Data != s3 {
 		test.Error("Data ")
 	}
 
-	if l2c1.Data != s4 {
+	if l2c1.(*StringTree).Data != s4 {
 		test.Error("Data ")
 	}
 
-	n := tree.Walk(func(node *Node) *Node {
-		if node.Data == s4 {
+	Display(tree)
+
+	if tree.Child() != l1c1 {
+		test.Errorf("Child() %v: %v<->%v", tree, tree.Child(), l1c1)
+	}
+
+	if l1c1.After() != l1c2 {
+		test.Errorf("After()  %v<->%v", tree.After(), l1c2)
+	}
+
+	if l1c2.After() != l1c3 {
+		test.Error("After()")
+	}
+
+	if l1c2.Child() != l2c1 {
+		test.Error("Child()")
+	}
+
+	n := Walk(tree, func(node Noder) Noder {
+		fmt.Println("%v", node)
+		if node.(*StringTree).Data == s4 {
 			return node
 		}
 		return nil
 	})
 
-	if n.Data != s4 {
+	if n.(*StringTree).Data != s4 {
 		test.Error("Data ")
 	}
 
-	test.Logf("%v", n.Data)
+	test.Logf("%v", n.(*StringTree).Data)
 
 	test.Log("Hi tree!")
 }
+
+type Tn struct {
+	Node
+	data string
+}
+
+func TestNoder(test *testing.T) {
+	test.Log("Hi treenoder!")
+}
+
+func TestDelete(test *testing.T) {
+
+	tree := NewStringTree("root")
+	s1 := "l1 c1"
+	s2 := "l1 c2"
+	s3 := "l1 c3"
+	s4 := "l2 c1"
+	_ = NewChild(tree, InitStringTree, s1)
+	l1c2 := NewChild(tree, InitStringTree, s2)
+	_ = NewChild(tree, InitStringTree, s3)
+	_ = NewChild(l1c2, InitStringTree, s4)
+	Remove(l1c2)
+	Display(tree)
+
+	n := Walk(tree, func(node Noder) Noder {
+		fmt.Println("%v", node)
+		if node.(*StringTree).Data == l1c2.(*StringTree).Data {
+			return node
+		}
+		return nil
+	})
+
+	if n != nil {
+		test.Errorf("Not deleted: %v", n)
+	}
+
+}