Browse Source

Work on Raku. Transition to go-peg parser since that's much easier to use.

Beoran 6 years ago
parent
commit
b12c416007
12 changed files with 2106 additions and 938 deletions
  1. 104 0
      data/var/raku/raku.peg
  2. 48 0
      raku/classifier.go
  3. 2 0
      raku/classifier_test.go
  4. 418 0
      raku/parser.go
  5. 1 0
      raku/parser_test.go
  6. 223 0
      raku/peg.go
  7. 898 796
      raku/raku.go
  8. 136 135
      raku/raku_test.go
  9. 31 0
      raku/scope.go
  10. 1 0
      raku/scope_test.go
  11. 237 3
      raku/tokenizer.go
  12. 7 4
      raku/tokenizer_test.go

+ 104 - 0
data/var/raku/raku.peg

@@ -0,0 +1,104 @@
+    # A Raku program conssts of statements, separated by end of expression.
+    # End of expression is a period or a newline not escaped by a preceding \
+    PROGRAM         ←  STATEMENTS
+    STATEMENTS      ←  (STATEMENT EOX)+
+    
+    # A statement is a command, a block or a call, optionallly followed by comment, or a comment
+    STATEMENT       ←  (COMMENT / COMMAND COMMENT? / BLOCK COMMENT? / CALL COMMENT?)
+    
+    # A comment starts with #, comment remark or rem
+    ~COMMENT        ←  LONG_COMMENT / LINE_COMMENT
+    
+    # Reuse the string macro to absorb a long comment
+    LONG_COMMENT    ←  RAWSM("#{", "}#")
+    LINE_COMMENT    ←  ("#" / "comment" / "remark" / "rem") (!([\r\n]+).)*
+    
+    # A Commans starts with a word which is the verb, and is followed by optional arguments
+    COMMAND         ←  VERB ARGUMENTS?
+    ~BLOCK_KEYWORD  ← < ( "do" / "sub" / "block") > WHITESPACE
+    ~END_KEYWORD    ← < ( "so" / "end" / "done" ) > WHITESPACE
+    ~CALL_KEYWORD   ← < ( "be" / "run" / "call" ) > WHITESPACE
+    HAVING_KEYWORD  ← < ( "having" / "given" ) > WHITESPACE
+    BLOCK           ←  BLOCK_KEYWORD BODY END_KEYWORD / "{" BODY "}"    
+    CALL            ←  CALL_KEYWORD BODY END_KEYWORD / "(" BODY ")" 
+    BODY            ←  OPERATION / (EOX? STATEMENTS? STATEMENT?)
+    TARGET          ←  ARGSEP? (EXPRESSION /  BLOCK / CALL)
+    ARGUMENTS       ←  TARGET ARGUMENT*
+    ARGSEP          ←  ',' / ((PREPOSITION / ARTICLE)+) / 
+                        ((HAVING_KEYWORD ARTICLE? WORD ARTICLE?))
+    PREPOSITION     ←  < ( "else"    / "otherwise"   /
+     "aboard"   / "about"       / "above"   / "across"      / "after" / "against" / 
+     "alongside"/ "along"       / "amidst"  / "amidst"      / "among" / "around"  /
+     "as"       / "at"          / "atop"    /  "before"     / "behind"/ "below"   /
+     "beneath"  / "besides"     / "beside"  / "between"     / "beyond"/ "but"     /  
+     "by"       / "circa"       / "despite" / "down"        / "during"/ "except"  /
+     "for"      / "from"        / "inside"  / "into"        / "in"    / "less"    /
+     "like"     / "near"        / "nothwithstanding"        / "off"   / "onto"    /
+     "on"       / "opposite"    / "outside" / "out"         / "over"  / "since"   /
+     "than"     / "through"     / "thru"    / "towards"     / "throughout" / "to" /
+     "underneath"/ "under"      / "unlike"  / "until"       / "upon"       / "upside" /
+     "up"       / "versus"      / "via"     / "within"      / "without"    / "with" ) > WHITESPACE
+    ~ARTICLE        ←  <("a" / "an" / "the")> WHITESPACE
+    ARGUMENT        ← ARGSEP EXPRESSION / ARGSEP? BLOCK / ARGSEP? CALL
+    EXPRESSION      ← ARRAY / STRING / NUMBER / SPECIALS / OPERATION / NAME
+
+    OPERATION       ←  UNARY_OPERATION / BIN_OPERATION
+    OPERAND         ←  UNOPER? (ARRAY/ STRING / NUMBER / SPECIALS / NAME / BLOCK / CALL )
+    BIN_OPERATION   ← (OPERAND (BINOPER OPERAND)+)
+    UNARY_OPERATION ←  (UNOPER OPERAND) 
+    KWUNOPER        ← "not" / "negate" / "complement"
+    KWBINOPER       ←   "greater_or_equal" / "lesser_or_equal" / 
+                    "plus" / "minus" / "times" / "divide" /  "of" / "'s" / "'" / 
+                    "and" / "or" / "equals" / "equal" / "differs" /  "isn't" / 
+                    "is" / "aint" / "greater" / "lesser" / "compare" / 
+                    "xor" / "binand" / "binor" / "rshift" / "lshift"
+    BINOPER         ←  < [-+/*] / "<-" / "->" / 
+                    "&" / "|" /  "&&"/ "||" /  ":" /  "==" / "!=" /  "<=>" / "<<" / ">>" / ">=" / "<=" /  ">" / "<" / "^" /  
+                    KWBINOPER > 
+    UNOPER          ←  < [-!~] / KWUNOPER >
+    
+    CONJUNCTION     ← < ( "and"   / "or" )  > WHITESPACE
+    ~ARRAY_KEYWORD  ← < ( "list" / "array") > WHITESPACE
+    ARRAY           ← ("[" ACONTENTS? "]") / (ARRAY_KEYWORD ACONTENTS? END_KEYWORD)
+    ARRAY_SEP       ← (CONJUNCTION / ",")
+    ACONTENTS       ← (TARGET ARRAY_SEP)* TARGET ARRAY_SEP?
+    SPECIALS        ← "true" / "false" / "nil" / "nothing" / "me" / "my" / 
+                      "parent" / "this" / "it" / "its" 
+    
+    # Strings
+    STRING          ← ESCSTR / RAWSTR
+    # Raw string macro
+    RAWSM(O,C)      ← O < (!(C).)* > C
+    # Escaped string macro.
+    ESCSM(O,C)      ← O < (!(C)STRCHAR)* > C
+    ESCSTR          ← ESCSM("`", "`") / ESCSM('"', '"')
+    RAWSTR          ← RAWSM('<<', '>>') /  RAWSM('«', '»') / RAWSM('‹', '›') / RAWSM('“', '”')
+    STRESC1         ← "\\" [nrtfv\'\\"\[\]\\]
+    STRESC2         ← "\\" [0-3] [0-7] [0-7]
+    STRESC3         ← "\\" [0-7] [0-7]*
+    STRESC4         ← "\\x" [0-9a-fA-F] [0-9a-fA-F]?
+    STRESC5         ← "\\u" [0-9a-fA-F]+
+    STRNOESC        ← (!('\\\\').)
+    STRCHAR         ←   STRESC1 / STRESC2 / STRESC3 / STRESC4 / STRESC5 / STRNOESC    
+    NUMBER          ←  DECIMAL / INTEGER
+    DECIMAL         ← < [-]?[0-9]+[.][0-9]+ >
+    INTEGER         ← < [-]?[0-9]+ >
+    KEYWORD         ←  BLOCK_KEYWORD / END_KEYWORD / CALL_KEYWORD / HAVING_KEYWORD /
+                       PREPOSITION   / ARTICLE     / SPECIALS     / KWUNOPER    /
+                       KWBINOPER 
+    NAME            ← ARTICLE? WORD+ 
+    VERB            ← WORD
+    WORD            ← !KEYWORD < [a-zA-Z_][a-zA-Z0-9_]* >
+    ESC_EOX         ← ([ \t]* "\\" ( "\r\n" / "\r" / "\n"))
+    ~EOX            ← (!ESC_EOX)([ \t]*[\r\n.]+[ \t]*)+
+    ~WHITESPACE     ← < ESC_EOX / [ \t]* >    
+    %whitespace     ←  WHITESPACE    
+    %word           ← [a-zA-Z_][a-zA-Z0-9_]*
+    # ---
+    # Expression parsing option
+    # %expr  = BINOPER  # Rule to apply 'precedence climbing method' to
+    # %binop = L && || and or  
+    # %binop = L == equals equal is 
+    # %binop = L + - plus minus  
+    # %binop = L * / times divide
+    # %binop = L of 's <- -> ' 

+ 48 - 0
raku/classifier.go

@@ -0,0 +1,48 @@
+package raku
+
+/* A classifier classifies Word tokens by their semantic type. */
+type Classifier interface {
+    /* Classify the given token text. Return the type, and a bool that indicate 
+     * whether or not the token was known. */
+    Classify(TokenText) (TokenType, bool)
+    /* Adds a token type mapping to the classifier. Returns whether or
+     * not the addition was acceptable. */
+    Add(text TokenText, typ TokenType) bool
+}
+
+
+type DefaultClassifier struct {
+    text2type map[TokenText]TokenType; 
+}
+
+func (clf DefaultClassifier) Add(text TokenText, typ TokenType) bool {
+    if clf.text2type == nil {
+        clf.text2type = make(map[TokenText]TokenType)
+    }
+    clf.text2type[text] = typ
+    return true
+}
+
+func (clf DefaultClassifier) AddMany(tokenmap map[string] TokenType) bool {
+    result := true
+    for text, typ := range tokenmap {
+        tokentext := TokenText(text);
+        call := clf.Add(tokentext, typ)
+        result = result && call
+    }
+    return result
+}
+
+func (clf DefaultClassifier) Classify(text TokenText) (TokenType, bool) {
+    typ, ok := clf.text2type[text]
+    return typ, ok
+}
+
+type ClasssifierObject interface {
+    Send(message string, arguments ... ClasssifierObject) (ClasssifierObject, error)
+}
+
+
+
+
+

+ 2 - 0
raku/classifier_test.go

@@ -0,0 +1,2 @@
+package raku
+

+ 418 - 0
raku/parser.go

@@ -0,0 +1,418 @@
+package raku
+
+import (
+    "fmt"
+    "strings"
+
+    "gitlab.com/beoran/woe/graphviz"
+    "gitlab.com/beoran/woe/monolog"
+    "gitlab.com/beoran/woe/tree"
+)
+
+
+type ParseAction func(parser *ManualParser) bool
+
+type RuleType int
+
+const (
+    RuleTypeNone = RuleType(iota)
+    RuleTypeAlternate
+    RuleTypeSequence
+)
+
+type Rule struct {
+    tree.Node
+    Name string
+    RuleType
+    ParseAction
+}
+
+func NewRule(name string, ruty RuleType) *Rule {
+    res := &Rule{}
+    res.RuleType = ruty
+    res.Name = name
+    return res
+}
+
+func (me *Rule) NewChild(action ParseAction) *Rule {
+    child := NewRule("foo", RuleTypeNone)
+    tree.AppendChild(me, child)
+    return child
+}
+
+func (me *Rule) Walk(walker func(rule *Rule) *Rule) *Rule {
+    node_res := tree.Walk(me,
+        func(node tree.Noder) tree.Noder {
+            rule_res := walker(node.(*Rule))
+            if rule_res == nil {
+                return nil
+            } else {
+                return rule_res
+            }
+        })
+    return node_res.(*Rule)
+}
+
+type ManualParser struct {
+    *Ast
+    *Tokenizer
+    Classifier
+    now           *Ast
+    position       int
+    tokens      []*Token
+    lookahead     *Token
+}
+
+func (me *ManualParser) SetupRules() {
+
+}
+
+func (me *ManualParser) Expect(types ...TokenType) bool {
+    monolog.Debug("Expecting: ", types, " from ", me.now.AstType, " have ", me.LookaheadType(), " \n")
+    for _, t := range types {
+        if me.LookaheadType() == t {
+            monolog.Debug("Found: ", t, "\n")
+            return true
+        }
+    }
+    monolog.Debug("Not found.\n")
+    return false
+}
+
+type Parsable interface {
+    isParsable()
+}
+
+func (me TokenType) isParsable() {
+}
+
+func (me ParseAction) isParsable() {
+}
+
+/* Advance the lexer but only of there is no lookahead token already available in me.lookahead.
+ */
+func (me *ManualParser) Advance() *Token {
+    if me.lookahead == nil {
+        me.lookahead = me.tokens[me.position]
+        me.position++
+    }
+    return me.lookahead
+}
+
+func (me *ManualParser) DropLookahead() {
+    me.lookahead = nil
+}
+
+func (me *ManualParser) Lookahead() *Token {
+    return me.lookahead
+}
+
+func (me *ManualParser) LookaheadType() TokenType {
+    if me.lookahead == nil {
+        return TokenError
+    }
+    return me.Lookahead().TokenType
+}
+
+func (me *ManualParser) Consume(atyp AstType, types ...TokenType) bool {
+    me.Advance()
+    res := me.Expect(types...)
+    if res {
+        me.NewAstChild(atyp)
+        me.DropLookahead()
+    }
+    return res
+}
+
+func (me *ManualParser) ConsumeWithoutAst(types ...TokenType) bool {
+    me.Advance()
+    res := me.Expect(types...)
+    if res {
+        me.DropLookahead()
+    }
+    return res
+}
+
+/*
+func (me * ManualParser) OneOf(restype AstType, options ...Parsable) bool {
+    res := false
+    k, v := range options {
+        switch option := v.Type {
+            case TokenType: res := Consume(restype, option)
+            case ParseAction: res := option(me)
+        }
+    }
+    return res
+}
+*/
+
+func (me *ManualParser) ParseEOX() bool {
+    return me.ConsumeWithoutAst(TokenEOL, TokenPeriod)
+}
+
+func (me *ManualParser) ParseValue() bool {
+    return me.Consume(AstTypeValue, TokenString, TokenNumber, TokenSymbol)
+}
+
+func (me *ManualParser) ParseWord() bool {
+    return me.Consume(AstTypeWord, TokenWord, TokenArticle)
+}
+
+func (me *ManualParser) ParseWordValue() bool {
+    me.NewAstChildDescend(AstTypeWordValue)
+    res := me.ParseValue() || me.ParseWord()
+    me.AstAscend(res)
+    return res
+}
+
+func (me *ManualParser) ParseParametersNonempty() bool {
+    res := false
+    for me.ParseParameter() {
+        res = true
+    }
+    return res
+}
+
+func (me *ManualParser) ParseCallArgs() bool {
+    me.NewAstChildDescend(AstTypeCallArgs)
+    res := me.ParseParameters() && me.ParseEOX()
+    me.AstAscend(res)
+    return res
+}
+
+func (me *ManualParser) ParseOperator() bool {
+    return me.Consume(AstTypeOperator, TokenOperator)
+}
+
+func (me *ManualParser) NewAstChild(tyty AstType) *Ast {
+    return me.now.NewChild(tyty, me.lookahead)
+}
+
+func (me *ManualParser) NewAstChildDescend(tyty AstType) {
+    node := me.NewAstChild(tyty)
+    me.now = node
+}
+
+func (me *ManualParser) AstAscend(keep bool) {
+    if me.now.Parent() != nil {
+        now := me.now
+        me.now = now.Parent().(*Ast)
+        if !keep {
+            now.Remove()
+        }
+    }
+}
+
+func (me TokenType) BlockCloseForOpen() (TokenType, bool) {
+    switch me {
+    case TokenOpenBrace:
+        return TokenCloseBrace, true
+    case TokenDo:
+        return TokenEnd, true
+    default:
+        return TokenError, false
+    }
+
+}
+
+func (me TokenType) ParenthesisCloseForOpen() (TokenType, bool) {
+    switch me {
+    case TokenOpenBracket:
+        return TokenCloseBracket, true
+    case TokenOpenParen:
+        return TokenCloseParen, true
+    default:
+        return TokenError, false
+    }
+}
+
+func (me *ManualParser) ParseBlock() bool {
+    me.Advance()
+    open := me.LookaheadType()
+    done, ok := open.BlockCloseForOpen()
+    if !ok {
+        /* Not an opening of a block, so no block found. */
+        return false
+    }
+    me.DropLookahead()
+    me.NewAstChildDescend(AstTypeBlock)
+    res := me.ParseStatements()
+    me.AstAscend(res)
+    if res {
+        me.Advance()
+        if me.LookaheadType() != done {
+            return me.ParseError()
+        }
+        me.DropLookahead()
+    }
+    return res
+}
+
+func (me *ManualParser) ParseParenthesis() bool {
+    me.Advance()
+    open := me.LookaheadType()
+    done, ok := open.ParenthesisCloseForOpen()
+    if !ok {
+        /* Not an opening of a parenthesis, so no parenthesis found. */
+        return false
+    }
+    me.DropLookahead()
+    me.NewAstChildDescend(AstTypeParenthesis)
+    res := me.ParseExpression()
+    me.AstAscend(res)
+    if res {
+        me.Advance()
+        if me.LookaheadType() != done {
+            return me.ParseError()
+        }
+        me.DropLookahead()
+    }
+    return res
+}
+
+func (me *ManualParser) ParseWords() bool {
+    me.NewAstChildDescend(AstTypeWords)
+    res := me.ParseWord()
+    for me.ParseWord() {
+    }
+    me.AstAscend(res)
+    return res
+}
+
+func (me *ManualParser) ParseDefinition() bool {
+    me.Advance()
+    res := me.Consume(AstTypeDefinition, TokenDef)
+    if !res {
+        return false
+    }
+    res = res && (me.ParseWord() || me.ParseOperator())
+    if !res {
+        _ = me.ParseError()
+    }
+    res = res && me.ParseParametersNonempty()
+    if !res {
+        _ = me.ParseError()
+    }
+    me.AstAscend(res)
+    return res
+}
+
+func (me *ManualParser) ParseParameter() bool {
+    me.NewAstChildDescend(AstTypeParameter)
+    res := me.ParseWordValue() || me.ParseOperator() ||
+        me.ParseParenthesis() || me.ParseBlock()
+    me.AstAscend(res)
+    return res
+}
+
+func (me *ManualParser) ParseParameters() bool {
+    for me.ParseParameter() {
+    }
+    return true
+}
+
+func (me *ManualParser) ParseError() bool {
+    me.now.NewChild(AstTypeError, me.lookahead)
+    fmt.Printf("Parse error: at %s\n", me.lookahead)
+    return false
+}
+
+func (me *ManualParser) ParseExpression() bool {
+    return (me.ParseWordValue() || me.ParseOperator()) && me.ParseParameters()
+}
+
+func (me *ManualParser) ParseStatement() bool {
+    me.NewAstChildDescend(AstTypeStatement)
+    /* First case is for an empty expression/statement. */
+    res := me.ParseEOX() ||
+        me.ParseDefinition() ||
+        (me.ParseExpression() && me.ParseEOX()) ||
+        me.ParseBlock()
+
+    me.AstAscend(res)
+    return res
+}
+
+func (me *ManualParser) ParseEOF() bool {
+    return me.Consume(AstTypeEox, TokenEOF)
+}
+
+func (me *ManualParser) ParseStatements() bool {
+    me.NewAstChildDescend(AstTypeStatements)
+    res := me.ParseStatement()
+
+    for me.ParseStatement() {
+    }
+
+    me.AstAscend(res)
+    return res
+}
+
+func (me *ManualParser) ParseProgram() bool {
+    return me.ParseStatements() && me.ParseEOF()
+}
+
+
+func (me *Ast) DotID() string {
+    return fmt.Sprintf("ast_%p", me)
+}
+
+func (me *Ast) ToGraph() *graphviz.Digraph {
+    g := graphviz.NewDigraph("rankdir", "LR")
+    me.Walk(func(ast *Ast) *Ast {
+        label := ast.AstType.String()        
+        if ast.Token != nil {
+            token := ast.Token.ShortString()
+            label = label + "\n" + token
+        }
+        g.AddNode(ast.DotID(), "label", label)
+        if ast.Parent() != nil {
+            g.AddEdgeByName(ast.Parent().(*Ast).DotID(), ast.DotID())
+        }
+        return nil
+    })
+    return g
+}
+
+func (me *Ast) Dotty() {
+    g := me.ToGraph()
+    g.Dotty()
+}
+
+func (me *Ast) ToAscii() {    
+    me.Walk(func(ast *Ast) *Ast {
+        depth := tree.Depth(ast)
+        nchild:= tree.CountChildren(ast)
+        label := ast.AstType.String()
+        indent:= strings.Repeat("--", depth)
+        if ast.Token != nil {
+            token := ast.Token.ShortString()            
+            fmt.Printf("%s>%s: %s\n", indent, label, token);
+        } else {
+            fmt.Printf("%s>%s: (%d)\n", indent, label, nchild );    
+        }        
+        return nil
+    })
+}
+
+/*
+
+PROGRAM -> STATEMENTS.
+STATEMENTS -> STATEMENT STATEMENTS | .
+STATEMENT -> EXPRESSION EOX  | DEFINITION | BLOCK .
+DEFINITION -> define WORDOP WORDOPS BLOCK.
+WORDOPS -> WORDOP WORDOPS | .
+EXPRESSION -> WORDVALUE PARAMETERS.
+PARAMETERS -> PARAMETER PARAMETERS | .
+PARAMETER -> WORDVALUE | PARENTHESIS | BLOCK | operator.
+PARENTHESIS -> '(' EXPRESSION ')' | ot EXPRESSION ct.
+BLOCK -> oe STATEMENTS ce | do STATEMENTS end .
+WORDOP -> word | operator | a | the.
+WORDVALUE -> word | VALUE | a | the.
+VALUE -> string | number | symbol.
+EOX -> eol | period.
+
+)
+*/
+
+

+ 1 - 0
raku/parser_test.go

@@ -0,0 +1 @@
+package raku

+ 223 - 0
raku/peg.go

@@ -0,0 +1,223 @@
+package raku
+
+import (
+    "errors"
+    "fmt"
+    "io/ioutil"
+    "os"
+    "github.com/yhirose/go-peg"    
+)
+
+type Parser struct {
+    *peg.Parser
+}
+
+type Result struct {
+    * peg.Ast
+}
+
+func parserCheck(err error) {
+    if perr, ok := err.(*peg.Error); ok {
+        for _, d := range perr.Details {
+            fmt.Println(d)
+        }
+        os.Exit(1)
+    }
+}
+
+var defaultParser *Parser
+
+/** Loads a PEG parser from the given file name. */
+func LoadParser(filename string) (*Parser, error) {
+    result := &Parser{}
+    data, err := ioutil.ReadFile(filename)
+    if err == nil {
+        result.Parser, err = peg.NewParser(string(data))
+        if err == nil {
+            result.EnableAst()            
+        }        
+        return result, err
+    } else {
+        return nil, err
+    }
+}
+
+
+func InitDefaultParser(peg_filename string) error {
+    var err error
+    defaultParser, err = LoadParser(peg_filename)
+    return err
+}
+
+func wrapResult(wrapme peg.Any) *Result {
+    result := &Result{};
+    ast, ok := wrapme.(*peg.Ast)
+    if ok {
+        result.Ast = ast
+        return result
+    } 
+    return nil    
+}
+
+func (parser Parser) Parse(source string) (*Result, error) {
+    ast, err := parser.ParseAndGetValue(source, nil)
+    return wrapResult(ast), err
+}
+
+func (parser Parser) ParseFile(filename string) (*Result, error) {    
+    source, err := ioutil.ReadFile(filename)
+    if err == nil {
+        return parser.Parse(string(source))
+    } else {
+        return nil, err
+    }
+}
+
+func parse(source string) (*Result, error) {
+    if defaultParser == nil {
+        return nil, errors.New("Default parser not initialized!")
+    }
+    return defaultParser.Parse(source)
+}
+
+func parseFile(filename string) (*Result, error) {
+    if defaultParser == nil {
+        return nil, errors.New("Default parser not initialized!")
+    }
+    return defaultParser.ParseFile(filename)
+}
+
+
+type Method func (raku * Raku, message string, target Object, args ... interface{}) (Object, error)
+
+type Command struct {
+    Name string
+    Method 
+}
+
+type CommandMap = map[string] Command
+
+type Class interface {
+    Object
+    Parent() * Class
+    Selector(name string) * Method
+}
+
+type Object interface {
+    Class() Class
+    Send(raku * Raku, message string, args ... interface {}) (Object, error)
+}
+
+var RootClass * DefaultClass = &DefaultClass{}
+
+type DefaultObject struct {
+    class Class
+    * Raku
+    CommandMap
+}
+
+func (object DefaultObject) Selector(name string) Method {
+    command, ok := object.CommandMap[name]
+    if !ok {
+        method := object.class.Selector(name)
+        if method == nil { 
+            return nil
+        }
+        return method
+    }
+    return command.Method
+}
+
+
+func (object DefaultObject) Class() Object {
+    return RootClass
+}
+
+const METHOD_MISSING = "method_missing"
+
+func (object DefaultObject) Send(raku * Raku, message string, args ... interface {}) (Object, error) {    
+    method := object.Selector(message)
+    if (method != nil) {
+        return method(object.Raku, message, object, args ...)
+    }
+    method = object.Selector(METHOD_MISSING)
+    if (method != nil) {
+        return method(object.Raku, message, object, args ...)
+    }
+    return nil, fmt.Errorf("Cannot send message %s to object %v.", message, object)
+}
+
+type DefaultClass struct {
+    parent * Class
+    DefaultObject
+}
+
+func (class DefaultClass) Parent() Object {
+    return class.parent
+}
+
+type NilClass struct {
+    DefaultClass
+}
+
+var Nil Object = &DefaultObject{NilClass};
+
+var Root * Object = &DefaultObject{RootClass}
+
+type Boolean struct {
+    DefaultObject
+}
+
+var True  * Boolean = &Boolean{Root}
+var False * Boolean = &Boolean{Root}
+
+
+func (boolean Boolean) Parent() Object {
+    return Nil
+}
+
+func (boolean Boolean) Send(raku * Raku, message string, args ... interface {}) (Object, error) {
+    return Root.Send(raku, message, args ...);
+}
+
+
+func (method Method) Parent() Object {
+    return Nil
+}
+
+func (method Method) Send(raku * Raku, message string, args ... interface {}) (Object, error) {
+    return method(raku, message, method, args...) 
+}
+
+
+
+
+type Raku struct {
+    * Parser
+    CommandMap
+    Root    * Object
+    Nil     * NilClass    
+    True    * Boolean
+    False   * Boolean
+
+}
+
+func New(peg_filename string) (*Raku, error) {
+    var err error
+    result := &Raku{}
+    result.Parser, err = LoadParser(peg_filename)
+    if (err == nil) {
+                
+    } 
+    return result, err
+}
+
+
+func Evaluate(raku * Raku, result Result) {
+    _
+}
+
+
+
+
+

+ 898 - 796
raku/raku.go

@@ -2,6 +2,19 @@
 
 /* Raku is an easy to use scripting language that can also be used easily interactively
 
+Desired simplified syntax:
+
+PROGRAM -> STATEMENTS.
+STATEMENTS -> STATEMENT STATEMENTS | .
+STATEMENT -> EXPRESSION eox | BLOCK .
+EXPRESSION -> word PARAMETERS.
+PARAMETERS -> PARAMETER PARAMETERS | .
+PARAMETER -> word | VALUE | PARENTHESIS | BLOCK | operator.
+PARENTHESIS -> bep PARAMETER eop .
+BLOCK -> bob STATEMENTS eob .
+VALUE -> string | long | double | symbol.
+
+
 Desrired syntax (verified LL(1) on smlweb.cpsc.ucalgary.ca)
 
 PROGRAM -> STATEMENTS.
@@ -9,7 +22,7 @@ STATEMENTS -> STATEMENT STATEMENTS | .
 STATEMENT -> EXPRESSION EOX  | DEFINITION | BLOCK .
 DEFINITION -> define WORDOP WORDOPS BLOCK.
 WORDOPS -> WORDOP WORDOPS | .
-EXPRESSION -> WORDVALUE PARAMETERSS.
+EXPRESSION -> WORDVALUE PARAMETERS.
 PARAMETERS -> PARAMETER PARAMETERS | .
 PARAMETER -> WORDVALUE | PARENTHESIS | BLOCK | operator.
 PARENTHESIS -> '(' EXPRESSION ')' | ot EXPRESSION ct.
@@ -22,1014 +35,1103 @@ EOX -> eol | period.
 Lexer:
 
 
+Yet another syntax, which supports operators, but requires () to use them,
+and [ ]  to indicate expressions inside expressions. 
+
+PROGRAM -> STATEMENTS.
+
+STATEMENTS -> STATEMENT STATEMENTS | .
+
+STATEMENT ->
+  EXPRESSION EOX
+| BLOCK .
+
+EXPRESSION -> 
+  CALL
+| PARENTHESIS
+| RECTANGLE
+.
+
+CALL -> WORD PARAMETERS .
+RECTANGLE -> '[' EXPRESSION ']' | 
+with EXPRESSION end .
+PARENTHESIS -> '(' OPERATION ')' 
+| let OPERATION  end .
+OPERATION -> PARAMETER OPLIST .
+OPLIST -> op OPLIST | .
+OP -> operator PARAMETER .
+PARAMETERS -> PARAMETER PARAMETERS | .
+PARAMETER -> WORDVALUE | BLOCK | 
+PARENTHESIS | RECTANGLE .
+BLOCK -> '{' STATEMENTS '}' | do STATEMENTS end .
+WORDVALUE -> word | VALUE | a | the.
+VALUE -> string | number | symbol.
+EOX -> '\n' .
+
+
+Most simple "lisp but with less parenthesis" syntax:
+
+
+PROGRAM -> STATEMENTS.
+STATEMENTS -> STATEMENT STATEMENTS | .
+STATEMENT -> CALL | EOX | BLOCK .
+BLOCK -> '{' STATEMENTS '}' .
+CALL -> word PARAMETERS EOX .
+PARAMETERS -> PARAMETER PARAMETERS | .
+PARAMETER -> WORDVALUE | BLOCK .
+WORDVALUE -> word | VALUE .
+VALUE -> string | number | symbol.
+EOX -> '\n' .
+
+
+LMore TCL-is allows operators in () and forces evaluation of blocks in [].
+
+PROGRAM -> STATEMENTS.
+STATEMENTS -> STATEMENT STATEMENTS | .
+STATEMENT -> 
+  COMMAND
+| SUBSTITUTION
+| BLOCK
+| EXPRESSION
+| EOX 
+.
+
+BLOCK -> 
+  '{' STATEMENTS '}' 
+| do STATEMENTS end .
+
+SUBSTITUTION ->
+  '[' STATEMENTS ']'
+| evaluate STATEMENTS end .
+
+EXPRESSION -> 
+  '(' EXPRBODY ')'
+| calculate EXPRBODY end .
+
+EXPRBODY -> OPERAND OPERANDS.
+OPERANDS ->  operator OPERANDS | .
+OPERAND -> PARAMETER .
+
+COMMAND -> word PARAMETERS EOX .
+ARGUMENTS -> WORDVALUE ARGUMENTS | .
+PARAMETERS -> PARAMETER PARAMETERS | .
+PARAMETER -> WORDVALUE | SUBSTITUTION | EXPRESSION | BLOCK .
+WORDVALUE -> word | VALUE .
+VALUE -> string | number | symbol | true | false | nothing .
+EOX -> '\n' .
+
+set (door's state) to closed .
+
+
+# Type a grammar here:
+PROGRAM -> STATEMENTS.
+STATEMENTS -> STATEMENT STATEMENTS | .
+STATEMENT -> 
+  COMMAND
+| SUBSTITUTION
+| BLOCK
+|  EXPRESSION
+| EOX 
+.
+
+BLOCK -> 
+  '{' STATEMENTS '}' 
+| do STATEMENTS end .
+
+SUBSTITUTION ->
+  '[' STATEMENTS ']'
+| evaluate STATEMENTS end .
+
+EXPRESSION -> 
+  '(' EXPRBODY ')'
+| calculate EXPRBODY end .
+
+EXPRBODY -> OPERAND OPERANDS.
+OPERANDS ->  operator OPERANDS | .
+OPERAND -> PARAMETER .
+
+COMMAND -> word PARAMETERS EOX .
+ARGUMENTS -> WORDVALUE ARGUMENTS | .
+PARAMETERS -> PARAMETER PARAMETERS | .
+PARAMETER -> WORDVALUE | SUBSTITUTION | EXPRESSION | BLOCK .
+WORDVALUE -> word | VALUE .
+VALUE -> string | number | symbol | true | false | nothing .
+EOX -> '\n' .
+
+
+
+
+# Or, this gramar, also useful as a generic command parser for 
+# AIF or MUx itself. Though necessarily more complex .
+PROGRAM -> STATEMENTS.
+STATEMENTS -> STATEMENT STATEMENTS | .
+STATEMENT -> 
+  COMMAND
+| SUBSTITUTION
+| BLOCK
+|  EXPRESSION
+| EOX 
+.
+
+BLOCK -> 
+  '{' STATEMENTS '}' 
+| do STATEMENTS end .
+
+SUBSTITUTION ->
+  '[' STATEMENTS ']'
+| evaluate STATEMENTS end .
+
+EXPRESSION -> 
+  '(' EXPRBODY ')'
+| calculate EXPRBODY end .
+
+EXPRBODY -> OPERAND OPERANDS.
+OPERANDS ->  operator OPERANDS | .
+OPERAND -> PARAMETER .
+
+COMMAND -> word ARGUMENTS EOX .
+ARGUMENTS -> ARGUMENT ARGUMENT_SEP ARGUMENTS | .
+ARGUMENT_SEP -> ',' | preposition | article | . 
+ARGUMENT -> LITERAL | SUBSTITUTION | EXPRESSION | BLOCK .
+WORDLIT -> word | LITERAL .
+LITERAL -> string | number | symbol | true | false | nothing .
+EOX -> '\n' .
+
+
+
+
+
+type Duration (is a) number
+( also could say a Duration (is a) number )
+
+type Effect (is an) integer
+
+constant No Effect is 1
+constant Healing Effect is an Effect which is 1 
+the Damaging Effect is an Effect which is 2 
+( the is another way to say constant / variable )
+
+type Spell (is a) record (which) has  
+    (a) name (which is a/as a) String
+    (a) Duration
+    (an) Effect
+end
+
+
+
+variable cure light is a Spell which has 
+    name is "Cure Light"
+    Duration is Duration 0.0
+    Effect is Healing Effect
+end
+( could have been the cure light is a spell ... )
+
+to cast (a) Spell at (a) Being do
+( ... )
+end 
+
+to cast (a) s which is a Spell at (a) b which is a Being do
+( ... )
+end 
+
+to add n1 which is a Number to n2 which is a Number do
+
+end
+
+
+to add one Number to another Number do
+    one becomes one plus another
+end
+
+to duck do
+    let text be "You duck"
+    one becomes one plus another
+end
+
+
+
+type spellike (is an) interface which has 
+    cast (a) at Being
+end
+
+type Spell aliases spell
+
+
+
+
+
+
+
+
+cast cure light at Ben
+
+
+English single word prepositions :
+
+in
+aboard
+about
+above
+absent
+across
+after
+against
+along
+alongside
+amid
+amidst
+among
+apropos
+apud
+around
+as
+astride
+at
+on
+atop 
+ontop
+bar
+before
+behind
+below
+beneath
+beside
+besides
+between
+beyond
+but
+by
+chez
+circa
+come
+dehors
+despite
+down
+during
+except
+for
+from
+in
+inside
+into
+less
+like
+minus
+near
+nearer
+nearest
+notwithstanding
+of
+off
+on
+onto
+opposite
+out
+outside
+over
+pace
+past
+per
+post
+pre
+pro
+qua
+re
+sans
+save
+short
+since
+than
+through
+thru
+throughout
+to
+toward 
+towards
+under
+underneath
+unlike
+until
+up
+upon
+upside
+versus
+via
+vice
+vis-à-vis
+with
+within
+without
+worth
+
+
+
+
+
 */
 package raku
 
 import (
-	"bytes"
-	"fmt"
-	"io"
-	"reflect"
-	"runtime"
-	"strings"
-	"unicode"
-
-	"gitlab.com/beoran/woe/graphviz"
-	"gitlab.com/beoran/woe/monolog"
-	"gitlab.com/beoran/woe/tree"
+    "bytes"
+    "errors"
+    "fmt"
+    "io"
+    "reflect"
+    "runtime"
+    "strings"
+    "unicode"
+
+    // "gitlab.com/beoran/woe/graphviz"
+    "gitlab.com/beoran/woe/monolog"
+    "gitlab.com/beoran/woe/tree"
 )
 
-type Value string
-type TokenType int64
 
-type Position struct {
-	Index  int
-	Row    int
-	Column int
+type TokenChannel chan *Token
+
+type Lexer struct {
+    Reader  io.Reader
+    Positions []Position
+    Token   Token
+    rule    LexerRule
+    Output  TokenChannel
+    buffer  []byte
+    runes   []rune
 }
 
-const (
-	TokenPeriod       TokenType = TokenType('.')
-	TokenComma        TokenType = TokenType(',')
-	TokenSemicolon    TokenType = TokenType(';')
-	TokenColon        TokenType = TokenType(':')
-	TokenOpenParen    TokenType = TokenType('(')
-	TokenCloseParen   TokenType = TokenType(')')
-	TokenOpenBrace    TokenType = TokenType('{')
-	TokenCloseBrace   TokenType = TokenType('}')
-	TokenOpenBracket  TokenType = TokenType('[')
-	TokenCloseBracket TokenType = TokenType(']')
-
-	TokenNone         TokenType = 0
-	TokenError        TokenType = -1
-	TokenWord         TokenType = -2
-	TokenEOL          TokenType = -3
-	TokenEOF          TokenType = -4
-	TokenNumber       TokenType = -5
-	TokenOperator     TokenType = -6
-	TokenString       TokenType = -7
-	TokenSymbol       TokenType = -8
-	TokenFirstKeyword TokenType = -9
-	TokenKeywordA     TokenType = -10
-	TokenKeywordDo    TokenType = -11
-	TokenKeywordEnd   TokenType = -12
-	TokenKeywordThe   TokenType = -13
-	TokenKeywordDef   TokenType = -14
-	TokenLastKeyword  TokenType = -15
-	TokenLast         TokenType = -15
-)
+type LexerRule func(lexer *Lexer) LexerRule
 
-type Token struct {
-	TokenType
-	Value
-	Position
-}
-
-var tokenTypeMap map[TokenType]string = map[TokenType]string{
-	TokenNone:       "TokenNone",
-	TokenError:      "TokenError",
-	TokenWord:       "TokenWord",
-	TokenEOL:        "TokenEOL",
-	TokenEOF:        "TokenEOF",
-	TokenNumber:     "TokenNumber",
-	TokenOperator:   "TokenOperator",
-	TokenString:     "TokenString",
-	TokenSymbol:     "TokenSymbol",
-	TokenKeywordA:   "TokenKeywordA",
-	TokenKeywordDo:  "TokenKeywordDo",
-	TokenKeywordEnd: "TokenKeywordEnd",
-	TokenKeywordThe: "TokenKeywordThe",
-	TokenKeywordDef: "TokenKeywordDef",
-}
-
-var keywordMap map[string]TokenType = map[string]TokenType{
-	"a":      TokenKeywordA,
-	"an":     TokenKeywordA,
-	"do":     TokenKeywordDo,
-	"def":    TokenKeywordDef,
-	"define": TokenKeywordDef,
-	"end":    TokenKeywordEnd,
-	"the":    TokenKeywordThe,
-}
-
-var sigilMap map[string]TokenType = map[string]TokenType{
-	"[": TokenOpenBracket,
-	"{": TokenOpenBrace,
-	"(": TokenOpenParen,
-	"]": TokenCloseBracket,
-	"}": TokenCloseBrace,
-	")": TokenCloseParen,
-}
-
-const operator_chars = "&|@'^-*%/+=<>~\\"
-
-func (me TokenType) String() string {
-	name, found := tokenTypeMap[me]
-	if found {
-		return name
-	} else {
-		if (me > 0) && (me < 256) {
-			return fmt.Sprintf("TokenChar<%c>", byte(me))
-		}
-		return fmt.Sprintf("Unknown Token %d", int(me))
-	}
-}
-
-func (me Token) String() string {
-	return fmt.Sprintf("Token: %s >%s< %d %d %d.", me.TokenType, string(me.Value), me.Index, me.Row, me.Column)
+func (me *Lexer) Last() Position {
+    return me.Positions[1]
 }
 
-type TokenChannel chan *Token
+func (me *Lexer) Current() Position {
+    return me.Positions[0]
+}
 
-type Lexer struct {
-	Reader  io.Reader
-	Current Position
-	Last    Position
-	Token   Token
-	rule    LexerRule
-	Output  TokenChannel
-	buffer  []byte
-	runes   []rune
+func (me *Lexer) LastPtr() * Position {
+    return &me.Positions[1]
 }
 
-type LexerRule func(lexer *Lexer) LexerRule
+func (me *Lexer) CurrentPtr() * Position {
+    return &me.Positions[0]
+}
 
-func (me *Lexer) Emit(t TokenType, v Value) {
-	tok := &Token{t, v, me.Current}
-	me.Output <- tok
+
+func (me *Lexer) PushPosition(pos Position) {
+    newpos := make([]Position, len(me.Positions) + 1)
+    newpos[0] = pos
+    for i := 1 ; i < len(me.Positions); i++ {
+        newpos[i] = me.Positions[i-1]
+    }
+    me.Positions = newpos
+}
+
+func (me *Lexer) PushCurrentPosition()  {
+    current := me.Current()
+    me.PushPosition(current)
+}
+
+func (me *Lexer) PopPosition() * Position {
+    if (len(me.Positions) <= 2) { 
+        return nil
+    }
+    
+    result := &me.Positions[0];
+    newpos := make([]Position, len(me.Positions) - 1)
+    for i := 1 ; i < len(me.Positions); i++ {
+        newpos[i-1] = me.Positions[i]
+    }    
+    me.Positions = newpos
+    return result
+}
+
+
+func (me *Lexer) Emit(t TokenType, v TokenText) {
+    tok := &Token{t, v, me.Current()}
+    me.Output <- tok
 }
 
 func (me *Lexer) Error(message string, args ...interface{}) {
-	value := fmt.Sprintf(message, args...)
-	monolog.Error("Lex Error: %s", value)
-	me.Emit(TokenError, Value(value))
+    value := fmt.Sprintf(message, args...)
+    monolog.Error("Lex Error: %s", value)
+    me.Emit(TokenError, TokenText(value))
 }
 
 func LexError(me *Lexer) LexerRule {
-	me.Error("Error")
-	return nil
+    me.Error("Error")
+    return nil
 }
 
 func (me *Lexer) SkipComment() bool {
-	if me.Peek() == '#' {
-		if me.Next() == '(' {
-			return me.SkipNotIn(")")
-		} else {
-			return me.SkipNotIn("\r\n")
-		}
-	}
-	return true
+    if me.Peek() == '#' {
+        if me.Next() == '(' {
+            return me.SkipNotIn(")")
+        } else {
+            return me.SkipNotIn("\r\n")
+        }
+    }
+    return true
 }
 
 /* Returns whether or not a keyword was found, and if so, the TokenType
 of the keyword.*/
 func LookupKeyword(word string) (bool, TokenType) {
-	kind, found := keywordMap[word]
-	return found, kind
+    kind, found := keywordMap[word]
+    return found, kind
 }
 
 /* Returns whether or not a special operator or sigil was found, and if so,
 returns the TokenTyp of the sigil.*/
 func LookupSigil(sigil string) (bool, TokenType) {
-	fmt.Printf("LookupSigil: %s\n", sigil)
-	kind, found := sigilMap[sigil]
-	return found, kind
+    fmt.Printf("LookupSigil: %s\n", sigil)
+    kind, found := sigilMap[sigil]
+    return found, kind
 }
 
 func LexSigil(me *Lexer) LexerRule {
-	me.Found(TokenType(me.Peek()))
-	_ = me.Next()
-	me.Advance()
-	return LexNormal
+    me.Found(TokenType(me.Peek()))
+    _ = me.Next()
+    me.Advance()
+    return LexNormal
 }
 
 func LexWord(me *Lexer) LexerRule {
-	me.SkipNotIn(" \t\r\n'({[]})")
+    me.SkipNotIn(" \t\r\n'({[]})")
 
-	iskw, kind := LookupKeyword(me.CurrentStringValue())
-	if iskw {
-		me.Found(kind)
-	} else {
-		me.Found(TokenWord)
-	}
-	return LexNormal
+    iskw, kind := LookupKeyword(me.CurrentStringValue())
+    if iskw {
+        me.Found(kind)
+    } else {
+        me.Found(TokenWord)
+    }
+    return LexNormal
 }
 
 func LexSymbol(me *Lexer) LexerRule {
-	me.SkipNotIn(" \t\r\n'({[]})")
-	me.Found(TokenSymbol)
-	return LexNormal
+    me.SkipNotIn(" \t\r\n'({[]})")
+    me.Found(TokenSymbol)
+    return LexNormal
 }
 
 func LexNumber(me *Lexer) LexerRule {
-	me.SkipNotIn(" \t\r\n'({[]})")
-	me.Found(TokenNumber)
-	return LexNormal
+    me.SkipNotIn(" \t\r\n'({[]})")
+    me.Found(TokenNumber)
+    return LexNormal
 }
 
 func LexWhitespace(me *Lexer) LexerRule {
-	me.SkipWhitespace()
-	me.Advance()
-	return LexNormal
+    me.SkipWhitespace()
+    me.Advance()
+    return LexNormal
 }
 
 func LexComment(me *Lexer) LexerRule {
-	if !me.SkipComment() {
-		me.Error("Unterminated comment")
-		return LexError
-	}
-	me.Advance()
-	return LexNormal
+    if !me.SkipComment() {
+        me.Error("Unterminated comment")
+        return LexError
+    }
+    me.Advance()
+    return LexNormal
 }
 
 func LexPunctuator(me *Lexer) LexerRule {
-	me.Found(TokenType(me.Peek()))
-	me.Advance()
-	return LexNormal
+    me.Found(TokenType(me.Peek()))
+    _ = me.Next()
+    me.Advance()
+    return LexNormal
 }
 
 func LexEOL(me *Lexer) LexerRule {
-	me.SkipIn("\r\n")
-	me.Found(TokenEOL)
-	return LexNormal
+    me.SkipIn("\r\n")
+    me.Found(TokenEOL)
+    return LexNormal
 }
 
 func LexOperator(me *Lexer) LexerRule {
-	me.SkipIn(operator_chars)
-	me.Found(TokenOperator)
-	return LexNormal
+    me.SkipIn(operator_chars)
+    me.Found(TokenOperator)
+    return LexNormal
 }
 
 func lexEscape(me *Lexer) error {
-	_ = me.Next()
-	return nil
+    _ = me.Next()
+    return nil
 }
 
 func LexString(me *Lexer) LexerRule {
-	open := me.Peek()
-	do_escape := open == '"'
-	peek := me.Next()
-	me.Advance()
-	for ; peek != '\000'; peek = me.Next() {
-		if do_escape && peek == '\\' {
-			if err := lexEscape(me); err != nil {
-				return LexError
-			}
-		} else if peek == open {
-			me.Found(TokenString)
-			_ = me.Next()
-			me.Advance()
-			return LexNormal
-		}
-	}
-	me.Error("Unexpected EOF in string.")
-	return nil
+    open := me.Peek()
+    do_escape := open == '"'
+    peek := me.Next()
+    me.Advance()
+    for ; peek != '\000'; peek = me.Next() {
+        if do_escape && peek == '\\' {
+            if err := lexEscape(me); err != nil {
+                return LexError
+            }
+        } else if peek == open {
+            me.Found(TokenString)
+            _ = me.Next()
+            me.Advance()
+            return LexNormal
+        }
+    }
+    me.Error("Unexpected EOF in string.")
+    return nil
 }
 
 func LexNumberOrOperator(me *Lexer) LexerRule {
-	if unicode.IsDigit(me.Next()) {
-		return LexNumber
-	} else {
-		_ = me.Previous()
-		return LexOperator
-	}
+    if unicode.IsDigit(me.Next()) {
+        return LexNumber
+    } else {
+        _ = me.Previous()
+        return LexOperator
+    }
 }
 
 func LexNormal(me *Lexer) LexerRule {
-	peek := me.Peek()
-	if peek == '#' {
-		return LexComment
-	} else if strings.ContainsRune(" \t", peek) {
-		return LexWhitespace
-	} else if strings.ContainsRune(".,;:", peek) {
-		return LexPunctuator
-	} else if strings.ContainsRune("([{}])", peek) {
-		return LexSigil
-	} else if strings.ContainsRune("$", peek) {
-		return LexSymbol
-	} else if strings.ContainsRune("\r\n", peek) {
-		return LexEOL
-	} else if strings.ContainsRune("+-", peek) {
-		return LexNumberOrOperator
-	} else if strings.ContainsRune("\"`", peek) {
-		return LexString
-	} else if peek == '\000' {
-		me.Emit(TokenEOF, "")
-		return nil
-	} else if unicode.IsLetter(peek) {
-		return LexWord
-	} else if unicode.IsDigit(peek) {
-		return LexNumber
-	} else if strings.ContainsRune(operator_chars, peek) {
-		return LexOperator
-	} else {
-		return LexError
-	}
+    peek := me.Peek()
+    if peek == '#' {
+        return LexComment
+    } else if strings.ContainsRune(" \t", peek) {
+        return LexWhitespace
+    } else if strings.ContainsRune(".,;:", peek) {
+        return LexPunctuator
+    } else if strings.ContainsRune("([{}])", peek) {
+        return LexSigil
+    } else if strings.ContainsRune("$", peek) {
+        return LexSymbol
+    } else if strings.ContainsRune("\r\n", peek) {
+        return LexEOL
+    } else if strings.ContainsRune("+-", peek) {
+        return LexNumberOrOperator
+    } else if strings.ContainsRune("\"`", peek) {
+        return LexString
+    } else if peek == '\000' {
+        me.Emit(TokenEOF, "")
+        return nil
+    } else if unicode.IsLetter(peek) {
+        return LexWord
+    } else if unicode.IsDigit(peek) {
+        return LexNumber
+    } else if strings.ContainsRune(operator_chars, peek) {
+        return LexOperator
+    } else {
+        return LexError
+    }
 }
 
 func OpenLexer(reader io.Reader) *Lexer {
-	lexer := &Lexer{}
-	lexer.Reader = reader
-	lexer.Output = make(TokenChannel)
-	// lexer.buffer = new(byte[1024])
-	return lexer
+    lexer := &Lexer{}
+    lexer.Reader    = reader
+    lexer.Output    = make(TokenChannel)
+    lexer.Positions = make([]Position, 2)
+    // lexer.buffer = new(byte[1024])
+    return lexer
 }
 
 func (me *Lexer) ReadReaderOnce() (bool, error) {
-	buffer := make([]byte, 1024)
+    buffer := make([]byte, 1024)
 
-	n, err := me.Reader.Read(buffer)
-	monolog.Debug("read %v %d %v\n", buffer[:n], n, err)
-	if n > 0 {
-		me.buffer = append(me.buffer, buffer[:n]...)
-		monolog.Debug("append  %s", me.buffer)
-	}
+    n, err := me.Reader.Read(buffer)
+    monolog.Debug("read %v %d %v\n", buffer[:n], n, err)
+    if n > 0 {
+        me.buffer = append(me.buffer, buffer[:n]...)
+        monolog.Debug("append  %s", me.buffer)
+    }
 
-	if err == io.EOF {
-		return true, nil
-	} else if err != nil {
-		me.Error("Error reading from reader: %s", err)
-		return true, err
-	}
-	return false, nil
+    if err == io.EOF {
+        return true, nil
+    } else if err != nil {
+        me.Error("Error reading from reader: %s", err)
+        return true, err
+    }
+    return false, nil
 }
 
 func (me *Lexer) ReadReader() error {
-	me.buffer = make([]byte, 0)
-	more, err := me.ReadReaderOnce()
-	for err == nil && more {
-		more, err = me.ReadReaderOnce()
-	}
-	me.runes = bytes.Runes(me.buffer)
+    me.buffer = make([]byte, 0)
+    more, err := me.ReadReaderOnce()
+    for err == nil && more {
+        more, err = me.ReadReaderOnce()
+    }
+    me.runes = bytes.Runes(me.buffer)
 
-	return err
+    return err
 }
 
 func (me *Lexer) Peek() rune {
-	if (me.Current.Index) >= len(me.runes) {
-		return '\000'
-	}
-	return me.runes[me.Current.Index]
+    if (me.Current().Index) >= len(me.runes) {
+        return '\000'
+    }
+    return me.runes[me.Current().Index]
 }
 
 func (me *Lexer) PeekNext() rune {
-	if (me.Current.Index + 1) >= len(me.runes) {
-		return '\000'
-	}
-	return me.runes[me.Current.Index+1]
+    if (me.Current().Index + 1) >= len(me.runes) {
+        return '\000'
+    }
+    return me.runes[me.Current().Index+1]
 }
 
 func (me *Lexer) Next() rune {
-	if me.Peek() == '\n' {
-		me.Current.Column = 0
-		me.Current.Row++
-	}
-	me.Current.Index++
-	if me.Current.Index >= len(me.runes) {
-		//me.Emit(TokenEOF, "")
-	}
-	return me.Peek()
+    if me.Peek() == '\n' {
+        me.CurrentPtr().Column = 0
+        me.CurrentPtr().Row++
+    }
+    me.CurrentPtr().Index++
+    if me.Current().Index >= len(me.runes) {
+        //me.Emit(TokenEOF, "")
+    }
+    return me.Peek()
 }
 
 func (me *Lexer) Previous() rune {
-	if me.Current.Index > 0 {
-		me.Current.Index--
+    if me.Current().Index > 0 {
+        me.CurrentPtr().Index--
 
-		if me.Peek() == '\n' {
-			me.Current.Column = 0
-			me.Current.Row++
-		}
-	}
-	return me.Peek()
+        if me.Peek() == '\n' {
+            me.CurrentPtr().Column = 0
+            me.CurrentPtr().Row++
+        }
+    }
+    return me.Peek()
 }
 
 func (me *Lexer) SkipRune() {
-	_ = me.Next()
+    _ = me.Next()
 }
 
 func (me *Lexer) SkipIn(set string) bool {
-	for strings.ContainsRune(set, me.Next()) {
-		monolog.Debug("SkipIn: %s %c\n", set, me.Peek())
-		if me.Peek() == '\000' {
-			return false
-		}
-	}
-	return true
+    for strings.ContainsRune(set, me.Next()) {
+        monolog.Debug("SkipIn: %s %c\n", set, me.Peek())
+        if me.Peek() == '\000' {
+            return false
+        }
+    }
+    return true
 }
 
 func (me *Lexer) SkipNotIn(set string) bool {
-	_ = me.Next()
-	for !strings.ContainsRune(set, me.Peek()) {
-		if me.Next() == '\000' {
-			return false
-		}
-	}
-	return true
+    _ = me.Next()
+    for !strings.ContainsRune(set, me.Peek()) {
+        if me.Next() == '\000' {
+            return false
+        }
+    }
+    return true
 }
 
 func (me *Lexer) SkipWhile(should_skip func(r rune) bool) bool {
-	for should_skip(me.Peek()) {
-		if me.Next() == '\000' {
-			return false
-		}
-	}
-	return true
+    for should_skip(me.Peek()) {
+        if me.Next() == '\000' {
+            return false
+        }
+    }
+    return true
 }
 
 func (me *Lexer) SkipWhitespace() {
-	me.SkipIn(" \t")
+    me.SkipIn(" \t")
 }
 
 func (me *Lexer) Advance() {
-	me.Last = me.Current
+    (*me.LastPtr()) = me.Current()
 }
 
 func (me *Lexer) Rewind() {
-	me.Current = me.Last
+    (*me.CurrentPtr()) = me.Last()
 }
 
 func (me *Lexer) CurrentRuneValue() []rune {
-	return me.runes[me.Last.Index:me.Current.Index]
+    return me.runes[me.Last().Index:me.Current().Index]
 }
 
 func (me *Lexer) CurrentStringValue() string {
-	return string(me.CurrentRuneValue())
+    return string(me.CurrentRuneValue())
 }
 
 func (me *Lexer) Found(kind TokenType) {
-	me.Emit(kind, Value(me.CurrentStringValue()))
-	me.Advance()
+    me.Emit(kind, TokenText(me.CurrentStringValue()))
+    me.Advance()
 }
 
 func GetFunctionName(fun interface{}) string {
-	return runtime.FuncForPC(reflect.ValueOf(fun).Pointer()).Name()
+    return runtime.FuncForPC(reflect.ValueOf(fun).Pointer()).Name()
 }
 
 func (me *Lexer) Start() {
-	if err := me.ReadReader(); err == nil || err == io.EOF {
-		rule := LexNormal
-		for rule != nil {
-			monolog.Debug("Lexer Rule: %s\n", GetFunctionName(rule))
-			rule = rule(me)
-		}
-	} else {
-		me.Error("Could not read in input buffer: %s", err)
-	}
-	close(me.Output)
+    if err := me.ReadReader(); err == nil || err == io.EOF {
+        rule := LexNormal
+        for rule != nil {
+            monolog.Debug("Lexer Rule: %s\n", GetFunctionName(rule))
+            rule = rule(me)
+        }
+    } else {
+        me.Error("Could not read in input buffer: %s", err)
+    }
+    close(me.Output)
 }
 
 func (me *Lexer) TryLexing() {
-	go me.Start()
+    go me.Start()
 
-	for token := range me.Output {
-		monolog.Info("Token %s", token)
-	}
+    for token := range me.Output {
+        monolog.Info("Token %s", token)
+    }
 }
 
 type AstType int
 
 const (
-	AstTypeProgram = AstType(iota)
-	AstTypeStatements
-	AstTypeStatement
-	AstTypeDefinition
-	AstTypeWords
-	AstTypeExpression
-	AstTypeWordExpression
-	AstTypeWordCallop
-	AstTypeOperation
-	AstTypeOperations
-	AstTypeCallArgs
-	AstTypeValueExpression
-	AstTypeValueCallop
-	AstTypeParametersNonempty
-	AstTypeParameters
-	AstTypeParameter
-	AstTypeBlock
-	AstTypeWordValue
-	AstTypeWord
-	AstTypeValue
-	AstTypeEox
-	AstTypeOperator
-	AstTypeParenthesis
-	AstTypeModifier
-	AstTypeError
+    AstTypeProgram = AstType(iota)
+    AstTypeStatements
+    AstTypeStatement
+    AstTypeDefinition
+    AstTypeWords
+    AstTypeExpression
+    AstTypeWordExpression
+    AstTypeWordCallop
+    AstTypeOperation
+    AstTypeOperations
+    AstTypeCallArgs
+    AstTypeValueExpression
+    AstTypeValueCallop
+    AstTypeParametersNonempty
+    AstTypeParameters
+    AstTypeParameter
+    AstTypeBlock
+    AstTypeWordValue
+    AstTypeWord
+    AstTypeValue
+    AstTypeEox
+    AstTypeOperator
+    AstTypeParenthesis
+    AstTypeModifier
+    AstTypeError
 )
 
 var astTypeMap map[AstType]string = map[AstType]string{
-	AstTypeProgram:            "AstTypeProgram",
-	AstTypeStatements:         "AstTypeStatements",
-	AstTypeStatement:          "AstTypeStatement:",
-	AstTypeDefinition:         "AstTypeDefinition",
-	AstTypeWords:              "AstTypeWords",
-	AstTypeExpression:         "AstTypeExpression",
-	AstTypeWordExpression:     "AstTypeWordExpression",
-	AstTypeWordCallop:         "AstTypeWordCallop",
-	AstTypeOperation:          "AstTypeOperation",
-	AstTypeOperations:         "AstTypeOperations",
-	AstTypeCallArgs:           "AstTypeCallArgs",
-	AstTypeValueExpression:    "AstTypeValueExpression",
-	AstTypeValueCallop:        "AstTypeValueCallop",
-	AstTypeParametersNonempty: "AstTypeParametersNonempty",
-	AstTypeParameters:         "AstTypeParameters",
-	AstTypeParameter:          "AstTypeParameter",
-	AstTypeBlock:              "AstTypeBlock",
-	AstTypeWordValue:          "AstTypeWordValue",
-	AstTypeWord:               "AstTypeWord",
-	AstTypeValue:              "AstTypeValue",
-	AstTypeEox:                "AstTypeEox",
-	AstTypeOperator:           "AstTypeOperator",
-	AstTypeParenthesis:        "AstTypeParenthesis",
-	AstTypeModifier:           "AstTypeModifier",
-	AstTypeError:              "AstTypeError",
+    AstTypeProgram:            "Program",
+    AstTypeStatements:         "Statements",
+    AstTypeStatement:          "Statement",
+    AstTypeDefinition:         "Definition",
+    AstTypeWords:              "Words",
+    AstTypeExpression:         "Expression",
+    AstTypeWordExpression:     "WordExpression",
+    AstTypeWordCallop:         "WordCallop",
+    AstTypeOperation:          "Operation",
+    AstTypeOperations:         "Operations",
+    AstTypeCallArgs:           "CallArgs",
+    AstTypeValueExpression:    "ValueExpression",
+    AstTypeValueCallop:        "ValueCallop",
+    AstTypeParametersNonempty: "ParametersNonempty",
+    AstTypeParameters:         "Parameters",
+    AstTypeParameter:          "Parameter",
+    AstTypeBlock:              "Block",
+    AstTypeWordValue:          "WordValue",
+    AstTypeWord:               "Word",
+    AstTypeValue:              "Value",
+    AstTypeEox:                "Eox",
+    AstTypeOperator:           "Operator",
+    AstTypeParenthesis:        "Parenthesis",
+    AstTypeModifier:           "Modifier",
+    AstTypeError:              "Error",
 }
 
 func (me AstType) String() string {
-	name, found := astTypeMap[me]
-	if found {
-		return name
-	} else {
-		return fmt.Sprintf("Unknown AstType %d", int(me))
-	}
+    name, found := astTypeMap[me]
+    if found {
+        return name
+    } else {
+        return fmt.Sprintf("Unknown AstType %d", int(me))
+    }
 }
 
 type Ast struct {
-	tree.Node
-	AstType
-	*Token
+    tree.Node
+    AstType
+    *Token
+}
+
+func (me *Ast) Run(run *Runtime) (*Value, error) {
+    switch me.AstType {
+    case AstTypeProgram:
+        return me.RunProgram(run)
+    case AstTypeStatements:
+        return me.RunStatements(run)
+    case AstTypeStatement:
+        return me.RunStatement(run)
+    case AstTypeDefinition:
+        return me.RunDefinition(run)
+    case AstTypeWords:
+        return me.RunWords(run)
+    case AstTypeExpression:
+        return me.RunExpression(run)
+    case AstTypeWordExpression:
+        return me.RunWord(run)
+    case AstTypeWordCallop:
+        return me.RunWordCallop(run)
+    case AstTypeOperation:
+        return me.RunOperation(run)
+    case AstTypeOperations:
+        return me.RunOperations(run)
+    case AstTypeCallArgs:
+        return me.RunCallArgs(run)
+    case AstTypeValueExpression:
+        return me.RunValueExpression(run)
+    case AstTypeValueCallop:
+        return me.RunValueCallop(run)
+    case AstTypeParametersNonempty:
+        return me.RunParametersNonempty(run)
+    case AstTypeParameters:
+        return me.RunParameters(run)
+    case AstTypeParameter:
+        return me.RunParameter(run)
+    case AstTypeBlock:
+        return me.RunBlock(run)
+    case AstTypeWordValue:
+        return me.RunWordValue(run)
+    case AstTypeWord:
+        return me.RunWord(run)
+    case AstTypeValue:
+        return me.RunValue(run)
+    case AstTypeEox:
+        return me.RunEox(run)
+    case AstTypeOperator:
+        return me.RunOperator(run)
+    case AstTypeParenthesis:
+        return me.RunParenthesis(run)
+    case AstTypeModifier:
+        return me.RunModifier(run)
+    case AstTypeError:
+        return me.RunError(run)
+    default:
+        return nil, errors.New("Shoudln't happen")
+    }
+}
+
+func (me *Ast) RunProgram(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunStatements(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunStatement(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunDefinition(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunWords(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunExpression(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunWordExpression(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunWordCallop(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunOperation(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunOperations(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunCallArgs(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunValueExpression(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunValueCallop(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunParametersNonempty(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunParameters(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunParameter(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunBlock(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunWordValue(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunWord(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunValue(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunEox(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunOperator(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunParenthesis(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunModifier(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
+}
+func (me *Ast) RunError(run *Runtime) (*Value, error) {
+    return nil, errors.New("Not implemented")
 }
 
 func (me *Ast) NewChild(kind AstType, token *Token) *Ast {
-	child := &Ast{}
-	child.AstType = kind
-	child.Token = token
-	tree.AppendChild(me, child)
-	return child
+    child := &Ast{}
+    child.AstType = kind
+    child.Token = token
+    tree.AppendChild(me, child)
+    return child
 }
 
 func (me *Ast) Walk(walker func(ast *Ast) *Ast) *Ast {
-	node_res := tree.Walk(me,
-		func(node tree.Noder) tree.Noder {
-			ast_res := walker(node.(*Ast))
-			if ast_res == nil {
-				return nil
-			} else {
-				return ast_res
-			}
-		})
-	if node_res != nil {
-		return node_res.(*Ast)
-	} else {
-		return nil
-	}
+    node_res := tree.Walk(me,
+        func(node tree.Noder) tree.Noder {
+            ast_res := walker(node.(*Ast))
+            if ast_res == nil {
+                return nil
+            } else {
+                return ast_res
+            }
+        })
+    if node_res != nil {
+        return node_res.(*Ast)
+    } else {
+        return nil
+    }
 }
 
 func (me *Ast) Remove() {
-	_ = tree.Remove(me)
+    _ = tree.Remove(me)
 }
 
 func NewAst(kind AstType) *Ast {
-	ast := &Ast{}
-	ast.AstType = kind
-	ast.Token = nil
-	return ast
+    ast := &Ast{}
+    ast.AstType = kind
+    ast.Token = nil
+    return ast
 }
 
-type ParseAction func(parser *Parser) bool
 
-type RuleType int
+type DefineType int
 
 const (
-	RuleTypeNone = RuleType(iota)
-	RuleTypeAlternate
-	RuleTypeSequence
+    DefineTypeNone = DefineType(iota)
+    DefineTypeGo
+    DefineTypeUser
+    DefineTypeVar
 )
 
-type Rule struct {
-	tree.Node
-	Name string
-	RuleType
-	ParseAction
-}
-
-func NewRule(name string, ruty RuleType) *Rule {
-	res := &Rule{}
-	res.RuleType = ruty
-	res.Name = name
-	return res
-}
-
-func (me *Rule) NewChild(action ParseAction) *Rule {
-	child := NewRule("foo", RuleTypeNone)
-	tree.AppendChild(me, child)
-	return child
-}
-
-func (me *Rule) Walk(walker func(rule *Rule) *Rule) *Rule {
-	node_res := tree.Walk(me,
-		func(node tree.Noder) tree.Noder {
-			rule_res := walker(node.(*Rule))
-			if rule_res == nil {
-				return nil
-			} else {
-				return rule_res
-			}
-		})
-	return node_res.(*Rule)
-}
-
-type Parser struct {
-	*Ast
-	*Lexer
-	now       *Ast
-	lookahead *Token
-}
-
-func (me *Parser) SetupRules() {
-
+type Value interface {
 }
 
-func (me *Parser) Expect(types ...TokenType) bool {
-	monolog.Debug("Expecting: ", types, " from ", me.now.AstType, " have ", me.LookaheadType(), " \n")
-	for _, t := range types {
-		if me.LookaheadType() == t {
-			monolog.Debug("Found: ", t, "\n")
-			return true
-		}
-	}
-	monolog.Debug("Not found.\n")
-	return false
-}
+type StringValue string
 
-type Parsable interface {
-	isParsable()
-}
+type SymbolValue string
 
-func (me TokenType) isParsable() {
-}
+type IntegerValue int64
 
-func (me ParseAction) isParsable() {
-}
+type FloatValue float64
 
-/* Advance the lexer but only of there is no lookahead token already available in me.lookahead.
- */
-func (me *Parser) Advance() *Token {
-	if me.lookahead == nil {
-		me.lookahead = <-me.Lexer.Output
-	}
-	return me.lookahead
-}
+type ArrayValue []Value
 
-func (me *Parser) DropLookahead() {
-	me.lookahead = nil
-}
+type MapValue map[string]Value
 
-func (me *Parser) Lookahead() *Token {
-	return me.lookahead
-}
+type BoolValue bool
 
-func (me *Parser) LookaheadType() TokenType {
-	if me.lookahead == nil {
-		return TokenError
-	}
-	return me.Lookahead().TokenType
-}
 
-func (me *Parser) Consume(atyp AstType, types ...TokenType) bool {
-	me.Advance()
-	res := me.Expect(types...)
-	if res {
-		me.NewAstChild(atyp)
-		me.DropLookahead()
-	}
-	return res
+type Variable struct {
+    Value
+    Name string
 }
 
-func (me *Parser) ConsumeWithoutAst(types ...TokenType) bool {
-	me.Advance()
-	res := me.Expect(types...)
-	if res {
-		me.DropLookahead()
-	}
-	return res
-}
-
-/*
-func (me * Parser) OneOf(restype AstType, options ...Parsable) bool {
-	res := false
-	k, v := range options {
-		switch option := v.Type {
-			case TokenType: res := Consume(restype, option)
-			case ParseAction: res := option(me)
-		}
-	}
-	return res
-}
-*/
-
-func (me *Parser) ParseEOX() bool {
-	return me.ConsumeWithoutAst(TokenEOL, TokenPeriod)
-}
-
-func (me *Parser) ParseValue() bool {
-	return me.Consume(AstTypeValue, TokenString, TokenNumber, TokenSymbol)
-}
-
-func (me *Parser) ParseWord() bool {
-	return me.Consume(AstTypeWord, TokenWord, TokenKeywordA, TokenKeywordThe)
-}
-
-func (me *Parser) ParseWordValue() bool {
-	me.NewAstChildDescend(AstTypeWordValue)
-	res := me.ParseValue() || me.ParseWord()
-	me.AstAscend(res)
-	return res
-}
-
-func (me *Parser) ParseParametersNonempty() bool {
-	res := false
-	for me.ParseParameter() {
-		res = true
-	}
-	return res
-}
-
-func (me *Parser) ParseCallArgs() bool {
-	me.NewAstChildDescend(AstTypeCallArgs)
-	res := me.ParseParameters() && me.ParseEOX()
-	me.AstAscend(res)
-	return res
-}
-
-func (me *Parser) ParseOperator() bool {
-	return me.Consume(AstTypeOperator, TokenOperator)
-}
-
-func (me *Parser) NewAstChild(tyty AstType) *Ast {
-	return me.now.NewChild(tyty, me.lookahead)
-}
-
-func (me *Parser) NewAstChildDescend(tyty AstType) {
-	node := me.NewAstChild(tyty)
-	me.now = node
-}
-
-func (me *Parser) AstAscend(keep bool) {
-	if me.now.Parent() != nil {
-		now := me.now
-		me.now = now.Parent().(*Ast)
-		if !keep {
-			now.Remove()
-		}
-	}
-}
-
-func (me TokenType) BlockCloseForOpen() (TokenType, bool) {
-	switch me {
-	case TokenOpenBrace:
-		return TokenCloseBrace, true
-	case TokenKeywordDo:
-		return TokenKeywordEnd, true
-	default:
-		return TokenError, false
-	}
-
-}
-
-func (me TokenType) ParenthesisCloseForOpen() (TokenType, bool) {
-	switch me {
-	case TokenOpenBracket:
-		return TokenCloseBracket, true
-	case TokenOpenParen:
-		return TokenCloseParen, true
-	default:
-		return TokenError, false
-	}
-
-}
-
-func (me *Parser) ParseBlock() bool {
-	me.Advance()
-	open := me.LookaheadType()
-	done, ok := open.BlockCloseForOpen()
-	if !ok {
-		/* Not an opening of a block, so no block found. */
-		return false
-	}
-	me.DropLookahead()
-	me.NewAstChildDescend(AstTypeBlock)
-	res := me.ParseStatements()
-	me.AstAscend(res)
-	if res {
-		me.Advance()
-		if me.LookaheadType() != done {
-			return me.ParseError()
-		}
-		me.DropLookahead()
-	}
-	return res
-}
-
-func (me *Parser) ParseParenthesis() bool {
-	me.Advance()
-	open := me.LookaheadType()
-	done, ok := open.ParenthesisCloseForOpen()
-	if !ok {
-		/* Not an opening of a parenthesis, so no parenthesis found. */
-		return false
-	}
-	me.DropLookahead()
-	me.NewAstChildDescend(AstTypeParenthesis)
-	res := me.ParseExpression()
-	me.AstAscend(res)
-	if res {
-		me.Advance()
-		if me.LookaheadType() != done {
-			return me.ParseError()
-		}
-		me.DropLookahead()
-	}
-	return res
-}
-
-func (me *Parser) ParseWords() bool {
-	me.NewAstChildDescend(AstTypeWords)
-	res := me.ParseWord()
-	for me.ParseWord() {
-	}
-	me.AstAscend(res)
-	return res
-}
-
-func (me *Parser) ParseDefinition() bool {
-	me.Advance()
-	res := me.Consume(AstTypeDefinition, TokenKeywordDef)
-	if !res {
-		return false
-	}
-	res = res && (me.ParseWord() || me.ParseOperator())
-	if !res {
-		_ = me.ParseError()
-	}
-	res = res && me.ParseParametersNonempty()
-	if !res {
-		_ = me.ParseError()
-	}
-	me.AstAscend(res)
-	return res
-}
-
-func (me *Parser) ParseParameter() bool {
-	me.NewAstChildDescend(AstTypeParameter)
-	res := me.ParseWordValue() || me.ParseOperator() ||
-		me.ParseParenthesis() || me.ParseBlock()
-	me.AstAscend(res)
-	return res
-}
-
-func (me *Parser) ParseParameters() bool {
-	for me.ParseParameter() {
-	}
-	return true
-}
-
-func (me *Parser) ParseError() bool {
-	me.now.NewChild(AstTypeError, me.lookahead)
-	fmt.Printf("Parse error: at %s\n", me.lookahead)
-	return false
-}
-
-func (me *Parser) ParseExpression() bool {
-	return (me.ParseWordValue() || me.ParseOperator()) && me.ParseParameters()
-}
-
-func (me *Parser) ParseStatement() bool {
-	me.NewAstChildDescend(AstTypeStatement)
-	/* First case is for an empty expression/statement. */
-	res := me.ParseEOX() ||
-		me.ParseDefinition() ||
-		(me.ParseExpression() && me.ParseEOX()) ||
-		me.ParseBlock()
-
-	me.AstAscend(res)
-	return res
-}
-
-func (me *Parser) ParseEOF() bool {
-	return me.Consume(AstTypeEox, TokenEOF)
-}
-
-func (me *Parser) ParseStatements() bool {
-	me.NewAstChildDescend(AstTypeStatements)
-	res := me.ParseStatement()
-
-	for me.ParseStatement() {
-	}
-
-	me.AstAscend(res)
-	return res
+type DefinePattern struct {
+    Parts []string
 }
 
-func (me *Parser) ParseProgram() bool {
-	return me.ParseStatements() && me.ParseEOF()
-}
+type GoDefineFunc func(runtime *Runtime, args ...Value) Value
 
-func NewParserForLexer(lexer *Lexer) *Parser {
-	me := &Parser{}
-	me.Ast = NewAst(AstTypeProgram)
-	me.now = me.Ast
-	me.Lexer = lexer
-	me.Ast.Token = &Token{}
-	go me.Lexer.Start()
-	return me
+type ScriptDefine struct {
+    DefineType
+    DefinePattern
+    *Ast
 }
 
-func NewParserForText(text string) *Parser {
-	lexer := OpenLexer(strings.NewReader(text))
-	return NewParserForLexer(lexer)
+type GoDefine struct {
+    DefineType
+    *DefinePattern
+    GoDefineFunc
 }
 
-func (me *Ast) DotID() string {
-	return fmt.Sprintf("ast_%p", me)
+type Define interface {
 }
 
-func (me *Ast) Dotty() {
-	g := graphviz.NewDigraph("rankdir", "LR")
-	me.Walk(func(ast *Ast) *Ast {
-		label := ast.AstType.String()
-		if ast.Token != nil {
-			label = label + "\n" + ast.Token.String()
-		}
-		g.AddNode(ast.DotID(), "label", label)
-		if ast.Parent() != nil {
-			g.AddEdgeByName(ast.Parent().(*Ast).DotID(), ast.DotID())
-		}
-		return nil
-	})
-	g.Dotty()
+type Environment struct {
+    Parent    *Environment
+    Defines   map[string]Define
+    Variables map[string]*Variable
+    Stack     []Value
 }
 
-/*
-
-PROGRAM -> STATEMENTS.
-STATEMENTS -> STATEMENT STATEMENTS | .
-STATEMENT -> EXPRESSION EOX  | DEFINITION | BLOCK .
-DEFINITION -> define WORDOP WORDOPS BLOCK.
-WORDOPS -> WORDOP WORDOPS | .
-EXPRESSION -> WORDVALUE PARAMETERSS.
-PARAMETERS -> PARAMETER PARAMETERS | .
-PARAMETER -> WORDVALUE | PARENTHESIS | BLOCK | operator.
-PARENTHESIS -> '(' EXPRESSION ')' | ot EXPRESSION ct.
-BLOCK -> oe STATEMENTS ce | do STATEMENTS end .
-WORDOP -> word | operator | a | the.
-WORDVALUE -> word | VALUE | a | the.
-VALUE -> string | number | symbol.
-EOX -> eol | period.
-
-)
-*/
-
-type DefineType int
+type Instruction int
 
 const (
-	DefineTypeNone = DefineType(iota)
-	DefineTypeGo
-	DefineTypeUser
-	DefineTypeVar
+    InstructionNop = Instruction(iota)
+    InstructionCall
+    InstructionPush
+    InstructionPop
 )
 
-type Var interface {
+func (env *Environment) AddDefine(name string, def Define) {
+    env.Defines[name] = def
 }
 
-type DefinePattern struct {
-	Parts []string
+func (env *Environment) NewGoDefine(name string, fn GoDefineFunc, pattern ...string) {
+    defpattern := new(DefinePattern)
+    defpattern.Parts = append(defpattern.Parts, pattern...)
+    godefine := &GoDefine{DefineTypeGo, defpattern, fn}
+    env.AddDefine(name, godefine)
 }
 
-type GoDefineFunc func(runtime Runtime, args ...Var) Var
-
-type UserDefine struct {
-	DefinePattern
-	*Ast
+type Runtime struct {
+    Environment
+    start *Ast
+    now   *Ast
 }
 
-type GoDefine struct {
-	DefinePattern
-	*GoDefineFunc
+func RuntimePuts(runtime *Runtime, args ...Value) Value {
+    var iargs []interface{}
+    for arg := range args {
+        iargs = append(iargs, arg)
+    }
+    fmt.Print(iargs)
+    return true
 }
 
-type Define struct {
-	DefineType
-	definition *Ast
+func (run *Runtime) Init() {
+    run.NewGoDefine("puts", RuntimePuts, "$", "*")
 }
 
-type Environment struct {
-	Parent *Environment
+func (run *Runtime) Start(ast *Ast) {
+    run.start = ast
+    run.now = ast
 }
 
-type Runtime struct {
-	Environment
+func (run *Runtime) RunOnce() {
+    // run.now.Node
 }
 
 func main() {
-	fmt.Println("Hello World!")
+    fmt.Println("Hello World!")
 }

+ 136 - 135
raku/raku_test.go

@@ -2,185 +2,186 @@
 package raku
 
 import (
-	"strings"
-	"testing"
+    "strings"
+    "testing"
 
-	_ "gitlab.com/beoran/woe/monolog"
-	"gitlab.com/beoran/woe/tree"
+    _ "gitlab.com/beoran/woe/monolog"
+    // "gitlab.com/beoran/woe/tree"
 )
 
 func HelperTryLexing(me *Lexer, test *testing.T) {
-	go me.Start()
-	me.Advance()
-	test.Logf("Lexing started:")
-	test.Logf("Lexer buffer: %v", me.buffer)
+    go me.Start()
+    me.Advance()
+    test.Logf("Lexing started:")
+    test.Logf("Lexer buffer: %v", me.buffer)
 
-	for token := range me.Output {
-		// test.Logf("Token %s", token)
-		_ = token
-	}
+    for token := range me.Output {
+        test.Logf("Token %s", token)
+        _ = token
+    }
 }
 
 func LexAll(me *Lexer) []*Token {
-	res := make([]*Token, 0)
-	go me.Start()
+    res := make([]*Token, 0)
+    go me.Start()
 
-	for token := range me.Output {
-		res = append(res, token)
-	}
-	return res
+    for token := range me.Output {
+        res = append(res, token)
+    }
+    return res
 }
 
 func LexText(input string) []*Token {
-	lexer := OpenLexer(strings.NewReader(input))
-	tokens := LexAll(lexer)
-	return tokens
+    lexer := OpenLexer(strings.NewReader(input))
+    tokens := LexAll(lexer)
+    return tokens
 }
 
 func Assert(test *testing.T, ok bool, text string) bool {
-	if !ok {
-		test.Error(text)
-	}
-	return ok
+    if !ok {
+        test.Error(text)
+    }
+    return ok
 }
 
 func TestLexing(test *testing.T) {
-	const input = `
+    const input = `
 say "hello \"world\\"
 
 define open a door do
-	set door's open to true
+    set (door's open) true
+    let door 's open be true 
 end
 
 def increment variable by value do
-	variable = variable + value 
+    ( variable = ( variable + value ) )
 end
 `
-	lexer := OpenLexer(strings.NewReader(input))
-	HelperTryLexing(lexer, test)
-	test.Log("Hi test!")
-}
+    lexer := OpenLexer(strings.NewReader(input))
+    HelperTryLexing(lexer, test)
+    test.Log("Hi test!")
+} 
 
 func TestLexing2(test *testing.T) {
-	const input = `say`
-	lexer := OpenLexer(strings.NewReader(input))
-	HelperTryLexing(lexer, test)
-	test.Log("Hi test!")
+    const input = `say`
+    lexer := OpenLexer(strings.NewReader(input))
+    HelperTryLexing(lexer, test)
+    test.Log("Hi test!")
 }
 
 func TestLexing3(test *testing.T) {
-	const input = `$sym`
-	lexer := OpenLexer(strings.NewReader(input))
-	HelperTryLexing(lexer, test)
-	test.Log("Hi test!")
+    const input = `$sym`
+    lexer := OpenLexer(strings.NewReader(input))
+    HelperTryLexing(lexer, test)
+    test.Log("Hi test!")
 }
 
 func TestParseValue(test *testing.T) {
-	const input = `"hello \"world\\"`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseValue(), "Could not parse value")
-	tree.Display(parser.Ast)
+    const input = `"hello \"world\\"`
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseValue(), "Could not parse value")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseValue2(test *testing.T) {
-	const input = `2.1`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseValue(), "Could not parse value")
-	tree.Display(parser.Ast)
+    const input = `2.1`
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseValue(), "Could not parse value")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseValue3(test *testing.T) {
-	const input = `$sym`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseValue(), "Could not parse value")
-	tree.Display(parser.Ast)
+    const input = `$sym`
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseValue(), "Could not parse value")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseEox(test *testing.T) {
-	const input = `
+    const input = `
 `
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseEOX(), "Could not parse EOX")
-	tree.Display(parser.Ast)
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseEOX(), "Could not parse EOX")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseEox2(test *testing.T) {
-	const input = `.
+    const input = `.
 `
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseEOX(), "Could not parse EOX")
-	tree.Display(parser.Ast)
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseEOX(), "Could not parse EOX")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseWord(test *testing.T) {
-	const input = `say`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseWord(), "Could not parse word")
-	tree.Display(parser.Ast)
+    const input = `say`
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseWord(), "Could not parse word")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseWordExpression(test *testing.T) {
-	const input = `say "hello world" three times
-	`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseExpression(), "Could not parse word expression")
-	tree.Display(parser.Ast)
+    const input = `say "hello world" three times
+    `
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseExpression(), "Could not parse word expression")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseWordExpression2(test *testing.T) {
-	const input = `val + 10 * z
-	`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseExpression(), "Could not parse word expression with operators")
-	tree.Display(parser.Ast)
+    const input = `val + 10 * z
+    `
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseExpression(), "Could not parse word expression with operators")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseStatements(test *testing.T) {
-	const input = `val + 10 * z. open door.
-	`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseStatements(), "Could not parse statements with only a parse word expression with operators")
-	tree.Display(parser.Ast)
+    const input = `val + 10 * z. open door.
+    `
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseStatements(), "Could not parse statements with only a parse word expression with operators")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseProgram(test *testing.T) {
-	const input = `val + 10 * z. open door.
-	`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseProgram(), "Could not parse program.")
-	tree.Display(parser.Ast)
+    const input = `val + 10 * z. open door.
+    `
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseProgram(), "Could not parse program.")
+    // tree.Display(parser.Ast)
 }
 
 func TestParseProgram2(test *testing.T) {
-	const input = `define greet a person  do
+    const input = `define greet person  do
 say "hello" someone
 end
 
 greet bob
 greet sally
 if 0 do
-	foo
+    foo
 end else {
-	bar
+    bar
 }
 
 
 if mp < cost do
-	say "Not enough mana!"
+    say "Not enough mana!"
 end else do
-	say "Zap!"
+    say "Zap!"
 end
 
 `
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseProgram(), "Could not parse program.")
-	tree.Display(parser.Ast)
-	parser.Ast.Dotty()
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseProgram(), "Could not parse program.")
+    // tree.Display(parser.Ast)
+    // parser.Ast.ToAscii()
 }
 
 func TestParseblock(test *testing.T) {
-	// monolog.Setup("raku_test.log", true, false)
-	const input = `{
+    // monolog.Setup("raku_test.log", true, false)
+    const input = `{
 say "hello"
 say "world"
 let i be 3 + 4
@@ -189,64 +190,64 @@ let ij be i * j
 return ij
 }
 `
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseBlock(), "Could not parse block.")
-	tree.Display(parser.Ast)
-	// parser.Ast.Dotty()
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseBlock(), "Could not parse block.")
+    // tree.Display(parser.Ast)
+    parser.Ast.ToAscii()
 }
 
 func TestParseProgram3(test *testing.T) {
-	// monolog.Setup("raku_test.log", true, false)
-	const input = `set foo to (3 + 4)
+    // monolog.Setup("raku_test.log", true, false)
+    const input = `set foo to (3 + 4)
 `
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseProgram(), "Could not parse program.")
-	tree.Display(parser.Ast)
-	parser.Ast.Dotty()
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseProgram(), "Could not parse program.")
+    // tree.Display(parser.Ast)
+    // parser.Ast.Dotty()
 }
 
 func TestParseParenthesis(test *testing.T) {
-	// monolog.Setup("raku_test.log", true, false)
-	const input = `(3 + 4 * 5)`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseParenthesis(), "Could not parse parenthesis.")
-	tree.Display(parser.Ast)
-	parser.Ast.Dotty()
+    // monolog.Setup("raku_test.log", true, false)
+    const input = `(3 + 4 * 5)`
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseParenthesis(), "Could not parse parenthesis.")
+    // tree.Display(parser.Ast)
+    parser.Ast.ToAscii()
 }
 
 func TestParseBlock2(test *testing.T) {
-	// monolog.Setup("raku_test.log", true, false)
-	const input = `{ . }`
-	parser := NewParserForText(input)
-	Assert(test, parser.ParseBlock(), "Could not parse block.")
-	tree.Display(parser.Ast)
-	parser.Ast.Dotty()
+    // monolog.Setup("raku_test.log", true, false)
+    const input = `{ . }`
+    parser := NewParserForText(input)
+    Assert(test, parser.ParseBlock(), "Could not parse block.")
+    // tree.Display(parser.Ast)
+    // parser.Ast.ToAscii()
 }
 
 func LexingTest(test *testing.T, input string, expected ...TokenType) {
-	tokens := LexText(input)
-	if len(tokens) != len(expected) {
-		test.Errorf("Amount of tokens does not match expected amount: %d, should be %d", len(tokens), len(expected))
-	}
-	for index := 0; index < len(expected); index++ {
-		want := expected[index]
-		tok := tokens[index]
-		if tok.TokenType != want {
-			test.Errorf("Wrong token type recognized: %v, should be %s", tok, want)
-		}
-	}
+    tokens := LexText(input)
+    if len(tokens) != len(expected) {
+        test.Errorf("Amount of tokens does not match expected amount: %d, should be %d", len(tokens), len(expected))
+    }
+    for index := 0; index < len(expected); index++ {
+        want := expected[index]
+        tok := tokens[index]
+        if tok.TokenType != want {
+            test.Errorf("Wrong token type recognized: %v, should be %s", tok, want)
+        }
+    }
 }
 
 func TestLexingParen(test *testing.T) {
-	LexingTest(test, "(", TokenOpenParen, TokenEOF)
-	LexingTest(test, "((", TokenOpenParen, TokenOpenParen, TokenEOF)
+    LexingTest(test, "(", TokenOpenParen, TokenEOF)
+    LexingTest(test, "((", TokenOpenParen, TokenOpenParen, TokenEOF)
 }
 
 func TestLexingDoEnd(test *testing.T) {
-	LexingTest(test, "do", TokenKeywordDo, TokenEOF)
-	LexingTest(test, "end", TokenKeywordEnd, TokenEOF)
-	LexingTest(test, "do\nend", TokenKeywordDo, TokenEOL, TokenKeywordEnd, TokenEOF)
-	LexingTest(test, ".}", TokenPeriod, TokenCloseBrace, TokenEOF)
-	LexingTest(test, "{.}", TokenOpenBrace, TokenPeriod, TokenCloseBrace, TokenEOF)
+    LexingTest(test, "do", TokenDo, TokenEOF)
+    LexingTest(test, "end", TokenEnd, TokenEOF)
+    LexingTest(test, "do\nend", TokenDo, TokenEOL, TokenEnd, TokenEOF)
+    LexingTest(test, ".}", TokenPeriod, TokenCloseBrace, TokenEOF)
+    LexingTest(test, "{.}", TokenOpenBrace, TokenPeriod, TokenCloseBrace, TokenEOF)
 
 }

+ 31 - 0
raku/scope.go

@@ -0,0 +1,31 @@
+package raku
+
+type Scope interface {
+    /* Chains a child scope to a parent scope. */
+    Chain(*Scope) bool
+    Classifier
+}
+
+
+type DefaultScope struct {
+    DefaultClassifier
+    Parent Scope
+}
+
+
+func (scope * DefaultScope) Chain(parent Scope) bool {
+    scope.Parent = parent
+    return true
+}
+
+func (scope * DefaultScope) Classify(text TokenText) (TokenType, bool) {
+    typ, ok := scope.DefaultClassifier.Classify(text)
+    if (!ok && scope.Parent != nil) {
+        typ, ok = scope.Parent.Classify(text)
+    }
+    return typ, ok
+}
+
+
+
+

+ 1 - 0
raku/scope_test.go

@@ -0,0 +1 @@
+package raku

+ 237 - 3
raku/tokenizer.go

@@ -7,6 +7,220 @@ import "reflect"
 import "runtime"
 import  "gitlab.com/beoran/woe/monolog"
 
+const tokenDelimiter = " \t\r\n'({[]}),;.:"
+const operator_chars = "&|@'^-*%/+=<>~"
+
+
+type TokenText string
+type TokenType int64
+
+type Position struct {
+    Index  int
+    Row    int
+    Column int
+}
+
+const (
+    TokenPeriod       TokenType = TokenType('.')
+    TokenComma        TokenType = TokenType(',')
+    TokenSemicolon    TokenType = TokenType(';')
+    TokenColon        TokenType = TokenType(':')
+    TokenOpenParen    TokenType = TokenType('(')
+    TokenCloseParen   TokenType = TokenType(')')
+    TokenOpenBrace    TokenType = TokenType('{')
+    TokenCloseBrace   TokenType = TokenType('}')
+    TokenOpenBracket  TokenType = TokenType('[')
+    TokenCloseBracket TokenType = TokenType(']')
+
+    TokenNone         TokenType = 0
+    TokenError        TokenType = -1
+    TokenWord         TokenType = -2
+    TokenEOL          TokenType = -3
+    TokenEOF          TokenType = -4
+    TokenNumber       TokenType = -5
+    TokenOperator     TokenType = -6
+    TokenString       TokenType = -7
+    TokenSymbol       TokenType = -8
+    TokenFirstKeyword TokenType = -9
+    TokenArticle      TokenType = -10
+    TokenDo           TokenType = -11
+    TokenEnd          TokenType = -12
+    TokenDef          TokenType = -13
+    TokenPreposition  TokenType = -14
+    TokenVerb         TokenType = -15  
+    TokenNoun         TokenType = -16  
+    TokenAdverb       TokenType = -17  
+    TokenAdjective    TokenType = -18  
+    TokenLastKeyword  TokenType = -19
+    TokenLast         TokenType = -19
+)
+
+type Token struct {
+    TokenType
+    TokenText
+    Position
+}
+
+
+var tokenTypeMap map[TokenType]string = map[TokenType]string{
+    TokenNone:       "None",
+    TokenError:      "Error",
+    TokenWord:       "Word",
+    TokenEOL:        "EOL",
+    TokenEOF:        "EOF",
+    TokenNumber:     "Number",
+    TokenOperator:   "Operator",
+    TokenString:     "String",
+    TokenSymbol:     "Symbol",
+    TokenArticle:    "Article",
+    TokenPreposition:"Preposition",
+    TokenDo:         "Do",
+    TokenEnd:        "End",
+    TokenDef:        "KeywordDef",
+    TokenVerb:       "Verb",
+    TokenAdjective:  "Adjective",
+    TokenAdverb:     "Adverb",
+    TokenNoun:       "Noun",
+}
+
+var keywordMap map[string]TokenType = map[string]TokenType{
+    "a"                 : TokenArticle,
+    "an"                : TokenArticle,
+    "the"               : TokenArticle,
+    "do"                : TokenDo,
+    "begin"             : TokenDo,
+    "then"              : TokenDo,
+    "has"               : TokenDo,
+    "end"               : TokenEnd,
+    "done"              : TokenEnd,
+    "endif"             : TokenEnd,
+    "def"               : TokenDef,
+    "define"            : TokenDef,    
+    "aboard"            : TokenPreposition,
+    "about"             : TokenPreposition,
+    "above"             : TokenPreposition,
+    "absent"            : TokenPreposition,
+    "across"            : TokenPreposition,
+    "after"             : TokenPreposition,
+    "against"           : TokenPreposition,
+    "along"             : TokenPreposition,
+    "alongside"         : TokenPreposition,
+    "amid"              : TokenPreposition,
+    "amidst"            : TokenPreposition,
+    "among"             : TokenPreposition,
+    "apropos"           : TokenPreposition,
+    "apud"              : TokenPreposition,
+    "around"            : TokenPreposition,
+    "as"                : TokenPreposition,
+    "astride"           : TokenPreposition,
+    "at"                : TokenPreposition,
+    "atop"              : TokenPreposition,
+    "ontop"             : TokenPreposition,
+    "bar"               : TokenPreposition,
+    "before"            : TokenPreposition,
+    "behind"            : TokenPreposition,
+    "below"             : TokenPreposition,
+    "beneath"           : TokenPreposition,
+    "beside"            : TokenPreposition,
+    "besides"           : TokenPreposition,
+    "between"           : TokenPreposition,
+    "beyond"            : TokenPreposition,
+    "but"               : TokenPreposition,
+    "by"                : TokenPreposition,
+    "chez"              : TokenPreposition,
+    "circa"             : TokenPreposition,
+    "come"              : TokenPreposition,
+    "dehors"            : TokenPreposition,
+    "despite"           : TokenPreposition,
+    "down"              : TokenPreposition,
+    "during"            : TokenPreposition,
+    "except"            : TokenPreposition,
+    "for"               : TokenPreposition,
+    "from"              : TokenPreposition,
+    "in"                : TokenPreposition,
+    "inside"            : TokenPreposition,
+    "into"              : TokenPreposition,
+    "less"              : TokenPreposition,
+    "like"              : TokenPreposition,
+    "minus"             : TokenPreposition,
+    "near"              : TokenPreposition,
+    "nearer"            : TokenPreposition,
+    "nearest"           : TokenPreposition,
+    "notwithstanding"   : TokenPreposition,
+    "of"                : TokenPreposition,
+    "off"               : TokenPreposition,
+    "on"                : TokenPreposition,
+    "onto"              : TokenPreposition,
+    "opposite"          : TokenPreposition,
+    "out"               : TokenPreposition,
+    "outside"           : TokenPreposition,
+    "over"              : TokenPreposition,
+    "pace"              : TokenPreposition,
+    "past"              : TokenPreposition,
+    "per"               : TokenPreposition,
+    "post"              : TokenPreposition,
+    "pre"               : TokenPreposition,
+    "pro"               : TokenPreposition,
+    "qua"               : TokenPreposition,
+    "re"                : TokenPreposition,
+    "sans"              : TokenPreposition,
+    "save"              : TokenPreposition,
+    "short"             : TokenPreposition,
+    "since"             : TokenPreposition,
+    "than"              : TokenPreposition,
+    "through"           : TokenPreposition,
+    "thru"              : TokenPreposition,
+    "throughout"        : TokenPreposition,
+    "to"                : TokenPreposition,
+    "toward"            : TokenPreposition,
+    "towards"           : TokenPreposition,
+    "under"             : TokenPreposition,
+    "underneath"        : TokenPreposition,
+    "unlike"            : TokenPreposition,
+    "until"             : TokenPreposition,
+    "up"                : TokenPreposition,
+    "upon"              : TokenPreposition,
+    "upside"            : TokenPreposition,
+    "versus"            : TokenPreposition,
+    "via"               : TokenPreposition,
+    "vice"              : TokenPreposition,
+    "vis-à-vis"         : TokenPreposition,
+    "with"              : TokenPreposition,
+    "within"            : TokenPreposition,
+    "without"           : TokenPreposition,
+    "worth"             : TokenPreposition,    
+}
+
+var sigilMap map[string]TokenType = map[string]TokenType{
+    "[": TokenOpenBracket,
+    "{": TokenOpenBrace,
+    "(": TokenOpenParen,
+    "]": TokenCloseBracket,
+    "}": TokenCloseBrace,
+    ")": TokenCloseParen,
+}
+
+
+func (me TokenType) String() string {
+    name, found := tokenTypeMap[me]
+    if found {
+        return name
+    } else {
+        if (me > 0) && (me < 256) {
+            return fmt.Sprintf("Char<%c>", byte(me))
+        }
+        return fmt.Sprintf("Unknown Token %d", int(me))
+    }
+}
+
+func (me Token) String() string {
+    return fmt.Sprintf("Token: %s >%s< %d %d %d.", me.TokenType, string(me.TokenText), me.Index, me.Row, me.Column)
+}
+
+func (me Token) ShortString() string {
+    return fmt.Sprintf("T: %s >%s<", me.TokenType, string(me.TokenText))
+}
+
 
 /* The tokenizer splits up text in tokens without classifying Word tokens. */
 type Tokenizer struct {
@@ -80,7 +294,6 @@ func TokenizeSigil(tkz *Tokenizer) TokenizerRule {
     return TokenizeNormal
 }
 
-const tokenDelimiter = " \t\r\n'({[]})"
 
 
 func TokenizeWord(tkz *Tokenizer) TokenizerRule {
@@ -131,6 +344,7 @@ func TokenizeEOL(tkz *Tokenizer) TokenizerRule {
 
 func TokenizeOperator(tkz *Tokenizer) TokenizerRule {
     tkz.SkipIn(operator_chars)
+    tkz.SkipCurrentNotIn(tokenDelimiter)
     tkz.Found(TokenOperator)
     return TokenizeNormal
 }
@@ -170,6 +384,17 @@ func TokenizeNumberOrOperator(tkz *Tokenizer) TokenizerRule {
     }
 }
 
+func TokenizeEscapedNewline(tkz * Tokenizer) TokenizerRule {
+    tkz.SkipWhitespace()
+    peek := tkz.Peek()
+    if strings.ContainsRune("\n\r", peek) {
+        tkz.SkipIn("\n\r")
+    } else {
+        tkz.Error("Stray backslash character.")
+    }
+    return TokenizeNormal 
+}
+
 func TokenizeNormal(tkz *Tokenizer) TokenizerRule {
     peek := tkz.Peek()
     if peek == '#' {
@@ -188,6 +413,8 @@ func TokenizeNormal(tkz *Tokenizer) TokenizerRule {
         return TokenizeNumberOrOperator
     } else if strings.ContainsRune("\"`", peek) {
         return TokenizeString
+    } else if strings.ContainsRune("\\", peek) {
+        return TokenizeEscapedNewline    
     } else if peek == '\000' {
         tkz.Emit(TokenEOF, "")
         return nil
@@ -252,8 +479,7 @@ func (tkz *Tokenizer) SkipIn(set string) bool {
     return true
 }
 
-func (tkz *Tokenizer) SkipNotIn(set string) bool {
-    _ = tkz.Next()
+func (tkz *Tokenizer) SkipCurrentNotIn(set string) bool {
     for c := tkz.Peek() ;  !strings.ContainsRune(set,c) ; c = tkz.Next() {
         monolog.Debug("SkipNotIn: %c %s", c, tkz.Current())
         if c == '\000' {
@@ -263,6 +489,14 @@ func (tkz *Tokenizer) SkipNotIn(set string) bool {
     return true
 }
 
+
+func (tkz *Tokenizer) SkipNotIn(set string) bool {
+    _ = tkz.Next()
+    return tkz.SkipCurrentNotIn(set)
+}
+
+
+
 func (tkz *Tokenizer) SkipWhile(should_skip func(r rune) bool) bool {
     for should_skip(tkz.Peek()) {
         if tkz.Next() == '\000' {

+ 7 - 4
raku/tokenizer_test.go

@@ -20,18 +20,21 @@ func HelperTryTokenizing(input string, test *testing.T) {
 
 func TestTokenizing1(test *testing.T) {
     const input = `
-say "hello \"world\\"
-
+say "hello \"world\""
+        
 define open a door do
     set (door's open) true
     let door 's open be true 
 end
 
-def increment variable by value do
+def increment variable by value do \
     ( variable = ( variable + value ) )
 end
 
-"
+: foo bar, baz, quuux {
+  print foo, bar, bqz, quux
+}     
+
 `
     HelperTryTokenizing(input, test)
     test.Log("Hi test!")