|
@@ -2,6 +2,19 @@
|
|
|
|
|
|
|
|
|
|
|
|
+Desired simplified syntax:
|
|
|
+
|
|
|
+PROGRAM -> STATEMENTS.
|
|
|
+STATEMENTS -> STATEMENT STATEMENTS | .
|
|
|
+STATEMENT -> EXPRESSION eox | BLOCK .
|
|
|
+EXPRESSION -> word PARAMETERS.
|
|
|
+PARAMETERS -> PARAMETER PARAMETERS | .
|
|
|
+PARAMETER -> word | VALUE | PARENTHESIS | BLOCK | operator.
|
|
|
+PARENTHESIS -> bep PARAMETER eop .
|
|
|
+BLOCK -> bob STATEMENTS eob .
|
|
|
+VALUE -> string | long | double | symbol.
|
|
|
+
|
|
|
+
|
|
|
Desrired syntax (verified LL(1) on smlweb.cpsc.ucalgary.ca)
|
|
|
|
|
|
PROGRAM -> STATEMENTS.
|
|
@@ -9,7 +22,7 @@ STATEMENTS -> STATEMENT STATEMENTS | .
|
|
|
STATEMENT -> EXPRESSION EOX | DEFINITION | BLOCK .
|
|
|
DEFINITION -> define WORDOP WORDOPS BLOCK.
|
|
|
WORDOPS -> WORDOP WORDOPS | .
|
|
|
-EXPRESSION -> WORDVALUE PARAMETERSS.
|
|
|
+EXPRESSION -> WORDVALUE PARAMETERS.
|
|
|
PARAMETERS -> PARAMETER PARAMETERS | .
|
|
|
PARAMETER -> WORDVALUE | PARENTHESIS | BLOCK | operator.
|
|
|
PARENTHESIS -> '(' EXPRESSION ')' | ot EXPRESSION ct.
|
|
@@ -22,1014 +35,1103 @@ EOX -> eol | period.
|
|
|
Lexer:
|
|
|
|
|
|
|
|
|
+Yet another syntax, which supports operators, but requires () to use them,
|
|
|
+and [ ] to indicate expressions inside expressions.
|
|
|
+
|
|
|
+PROGRAM -> STATEMENTS.
|
|
|
+
|
|
|
+STATEMENTS -> STATEMENT STATEMENTS | .
|
|
|
+
|
|
|
+STATEMENT ->
|
|
|
+ EXPRESSION EOX
|
|
|
+| BLOCK .
|
|
|
+
|
|
|
+EXPRESSION ->
|
|
|
+ CALL
|
|
|
+| PARENTHESIS
|
|
|
+| RECTANGLE
|
|
|
+.
|
|
|
+
|
|
|
+CALL -> WORD PARAMETERS .
|
|
|
+RECTANGLE -> '[' EXPRESSION ']' |
|
|
|
+with EXPRESSION end .
|
|
|
+PARENTHESIS -> '(' OPERATION ')'
|
|
|
+| let OPERATION end .
|
|
|
+OPERATION -> PARAMETER OPLIST .
|
|
|
+OPLIST -> op OPLIST | .
|
|
|
+OP -> operator PARAMETER .
|
|
|
+PARAMETERS -> PARAMETER PARAMETERS | .
|
|
|
+PARAMETER -> WORDVALUE | BLOCK |
|
|
|
+PARENTHESIS | RECTANGLE .
|
|
|
+BLOCK -> '{' STATEMENTS '}' | do STATEMENTS end .
|
|
|
+WORDVALUE -> word | VALUE | a | the.
|
|
|
+VALUE -> string | number | symbol.
|
|
|
+EOX -> '\n' .
|
|
|
+
|
|
|
+
|
|
|
+Most simple "lisp but with less parenthesis" syntax:
|
|
|
+
|
|
|
+
|
|
|
+PROGRAM -> STATEMENTS.
|
|
|
+STATEMENTS -> STATEMENT STATEMENTS | .
|
|
|
+STATEMENT -> CALL | EOX | BLOCK .
|
|
|
+BLOCK -> '{' STATEMENTS '}' .
|
|
|
+CALL -> word PARAMETERS EOX .
|
|
|
+PARAMETERS -> PARAMETER PARAMETERS | .
|
|
|
+PARAMETER -> WORDVALUE | BLOCK .
|
|
|
+WORDVALUE -> word | VALUE .
|
|
|
+VALUE -> string | number | symbol.
|
|
|
+EOX -> '\n' .
|
|
|
+
|
|
|
+
|
|
|
+LMore TCL-is allows operators in () and forces evaluation of blocks in [].
|
|
|
+
|
|
|
+PROGRAM -> STATEMENTS.
|
|
|
+STATEMENTS -> STATEMENT STATEMENTS | .
|
|
|
+STATEMENT ->
|
|
|
+ COMMAND
|
|
|
+| SUBSTITUTION
|
|
|
+| BLOCK
|
|
|
+| EXPRESSION
|
|
|
+| EOX
|
|
|
+.
|
|
|
+
|
|
|
+BLOCK ->
|
|
|
+ '{' STATEMENTS '}'
|
|
|
+| do STATEMENTS end .
|
|
|
+
|
|
|
+SUBSTITUTION ->
|
|
|
+ '[' STATEMENTS ']'
|
|
|
+| evaluate STATEMENTS end .
|
|
|
+
|
|
|
+EXPRESSION ->
|
|
|
+ '(' EXPRBODY ')'
|
|
|
+| calculate EXPRBODY end .
|
|
|
+
|
|
|
+EXPRBODY -> OPERAND OPERANDS.
|
|
|
+OPERANDS -> operator OPERANDS | .
|
|
|
+OPERAND -> PARAMETER .
|
|
|
+
|
|
|
+COMMAND -> word PARAMETERS EOX .
|
|
|
+ARGUMENTS -> WORDVALUE ARGUMENTS | .
|
|
|
+PARAMETERS -> PARAMETER PARAMETERS | .
|
|
|
+PARAMETER -> WORDVALUE | SUBSTITUTION | EXPRESSION | BLOCK .
|
|
|
+WORDVALUE -> word | VALUE .
|
|
|
+VALUE -> string | number | symbol | true | false | nothing .
|
|
|
+EOX -> '\n' .
|
|
|
+
|
|
|
+set (door's state) to closed .
|
|
|
+
|
|
|
+
|
|
|
+# Type a grammar here:
|
|
|
+PROGRAM -> STATEMENTS.
|
|
|
+STATEMENTS -> STATEMENT STATEMENTS | .
|
|
|
+STATEMENT ->
|
|
|
+ COMMAND
|
|
|
+| SUBSTITUTION
|
|
|
+| BLOCK
|
|
|
+| EXPRESSION
|
|
|
+| EOX
|
|
|
+.
|
|
|
+
|
|
|
+BLOCK ->
|
|
|
+ '{' STATEMENTS '}'
|
|
|
+| do STATEMENTS end .
|
|
|
+
|
|
|
+SUBSTITUTION ->
|
|
|
+ '[' STATEMENTS ']'
|
|
|
+| evaluate STATEMENTS end .
|
|
|
+
|
|
|
+EXPRESSION ->
|
|
|
+ '(' EXPRBODY ')'
|
|
|
+| calculate EXPRBODY end .
|
|
|
+
|
|
|
+EXPRBODY -> OPERAND OPERANDS.
|
|
|
+OPERANDS -> operator OPERANDS | .
|
|
|
+OPERAND -> PARAMETER .
|
|
|
+
|
|
|
+COMMAND -> word PARAMETERS EOX .
|
|
|
+ARGUMENTS -> WORDVALUE ARGUMENTS | .
|
|
|
+PARAMETERS -> PARAMETER PARAMETERS | .
|
|
|
+PARAMETER -> WORDVALUE | SUBSTITUTION | EXPRESSION | BLOCK .
|
|
|
+WORDVALUE -> word | VALUE .
|
|
|
+VALUE -> string | number | symbol | true | false | nothing .
|
|
|
+EOX -> '\n' .
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+# Or, this gramar, also useful as a generic command parser for
|
|
|
+# AIF or MUx itself. Though necessarily more complex .
|
|
|
+PROGRAM -> STATEMENTS.
|
|
|
+STATEMENTS -> STATEMENT STATEMENTS | .
|
|
|
+STATEMENT ->
|
|
|
+ COMMAND
|
|
|
+| SUBSTITUTION
|
|
|
+| BLOCK
|
|
|
+| EXPRESSION
|
|
|
+| EOX
|
|
|
+.
|
|
|
+
|
|
|
+BLOCK ->
|
|
|
+ '{' STATEMENTS '}'
|
|
|
+| do STATEMENTS end .
|
|
|
+
|
|
|
+SUBSTITUTION ->
|
|
|
+ '[' STATEMENTS ']'
|
|
|
+| evaluate STATEMENTS end .
|
|
|
+
|
|
|
+EXPRESSION ->
|
|
|
+ '(' EXPRBODY ')'
|
|
|
+| calculate EXPRBODY end .
|
|
|
+
|
|
|
+EXPRBODY -> OPERAND OPERANDS.
|
|
|
+OPERANDS -> operator OPERANDS | .
|
|
|
+OPERAND -> PARAMETER .
|
|
|
+
|
|
|
+COMMAND -> word ARGUMENTS EOX .
|
|
|
+ARGUMENTS -> ARGUMENT ARGUMENT_SEP ARGUMENTS | .
|
|
|
+ARGUMENT_SEP -> ',' | preposition | article | .
|
|
|
+ARGUMENT -> LITERAL | SUBSTITUTION | EXPRESSION | BLOCK .
|
|
|
+WORDLIT -> word | LITERAL .
|
|
|
+LITERAL -> string | number | symbol | true | false | nothing .
|
|
|
+EOX -> '\n' .
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+type Duration (is a) number
|
|
|
+( also could say a Duration (is a) number )
|
|
|
+
|
|
|
+type Effect (is an) integer
|
|
|
+
|
|
|
+constant No Effect is 1
|
|
|
+constant Healing Effect is an Effect which is 1
|
|
|
+the Damaging Effect is an Effect which is 2
|
|
|
+( the is another way to say constant / variable )
|
|
|
+
|
|
|
+type Spell (is a) record (which) has
|
|
|
+ (a) name (which is a/as a) String
|
|
|
+ (a) Duration
|
|
|
+ (an) Effect
|
|
|
+end
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+variable cure light is a Spell which has
|
|
|
+ name is "Cure Light"
|
|
|
+ Duration is Duration 0.0
|
|
|
+ Effect is Healing Effect
|
|
|
+end
|
|
|
+( could have been the cure light is a spell ... )
|
|
|
+
|
|
|
+to cast (a) Spell at (a) Being do
|
|
|
+( ... )
|
|
|
+end
|
|
|
+
|
|
|
+to cast (a) s which is a Spell at (a) b which is a Being do
|
|
|
+( ... )
|
|
|
+end
|
|
|
+
|
|
|
+to add n1 which is a Number to n2 which is a Number do
|
|
|
+
|
|
|
+end
|
|
|
+
|
|
|
+
|
|
|
+to add one Number to another Number do
|
|
|
+ one becomes one plus another
|
|
|
+end
|
|
|
+
|
|
|
+to duck do
|
|
|
+ let text be "You duck"
|
|
|
+ one becomes one plus another
|
|
|
+end
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+type spellike (is an) interface which has
|
|
|
+ cast (a) at Being
|
|
|
+end
|
|
|
+
|
|
|
+type Spell aliases spell
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+cast cure light at Ben
|
|
|
+
|
|
|
+
|
|
|
+English single word prepositions :
|
|
|
+
|
|
|
+in
|
|
|
+aboard
|
|
|
+about
|
|
|
+above
|
|
|
+absent
|
|
|
+across
|
|
|
+after
|
|
|
+against
|
|
|
+along
|
|
|
+alongside
|
|
|
+amid
|
|
|
+amidst
|
|
|
+among
|
|
|
+apropos
|
|
|
+apud
|
|
|
+around
|
|
|
+as
|
|
|
+astride
|
|
|
+at
|
|
|
+on
|
|
|
+atop
|
|
|
+ontop
|
|
|
+bar
|
|
|
+before
|
|
|
+behind
|
|
|
+below
|
|
|
+beneath
|
|
|
+beside
|
|
|
+besides
|
|
|
+between
|
|
|
+beyond
|
|
|
+but
|
|
|
+by
|
|
|
+chez
|
|
|
+circa
|
|
|
+come
|
|
|
+dehors
|
|
|
+despite
|
|
|
+down
|
|
|
+during
|
|
|
+except
|
|
|
+for
|
|
|
+from
|
|
|
+in
|
|
|
+inside
|
|
|
+into
|
|
|
+less
|
|
|
+like
|
|
|
+minus
|
|
|
+near
|
|
|
+nearer
|
|
|
+nearest
|
|
|
+notwithstanding
|
|
|
+of
|
|
|
+off
|
|
|
+on
|
|
|
+onto
|
|
|
+opposite
|
|
|
+out
|
|
|
+outside
|
|
|
+over
|
|
|
+pace
|
|
|
+past
|
|
|
+per
|
|
|
+post
|
|
|
+pre
|
|
|
+pro
|
|
|
+qua
|
|
|
+re
|
|
|
+sans
|
|
|
+save
|
|
|
+short
|
|
|
+since
|
|
|
+than
|
|
|
+through
|
|
|
+thru
|
|
|
+throughout
|
|
|
+to
|
|
|
+toward
|
|
|
+towards
|
|
|
+under
|
|
|
+underneath
|
|
|
+unlike
|
|
|
+until
|
|
|
+up
|
|
|
+upon
|
|
|
+upside
|
|
|
+versus
|
|
|
+via
|
|
|
+vice
|
|
|
+vis-à-vis
|
|
|
+with
|
|
|
+within
|
|
|
+without
|
|
|
+worth
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
+
|
|
|
*/
|
|
|
package raku
|
|
|
|
|
|
import (
|
|
|
- "bytes"
|
|
|
- "fmt"
|
|
|
- "io"
|
|
|
- "reflect"
|
|
|
- "runtime"
|
|
|
- "strings"
|
|
|
- "unicode"
|
|
|
-
|
|
|
- "gitlab.com/beoran/woe/graphviz"
|
|
|
- "gitlab.com/beoran/woe/monolog"
|
|
|
- "gitlab.com/beoran/woe/tree"
|
|
|
+ "bytes"
|
|
|
+ "errors"
|
|
|
+ "fmt"
|
|
|
+ "io"
|
|
|
+ "reflect"
|
|
|
+ "runtime"
|
|
|
+ "strings"
|
|
|
+ "unicode"
|
|
|
+
|
|
|
+
|
|
|
+ "gitlab.com/beoran/woe/monolog"
|
|
|
+ "gitlab.com/beoran/woe/tree"
|
|
|
)
|
|
|
|
|
|
-type Value string
|
|
|
-type TokenType int64
|
|
|
|
|
|
-type Position struct {
|
|
|
- Index int
|
|
|
- Row int
|
|
|
- Column int
|
|
|
+type TokenChannel chan *Token
|
|
|
+
|
|
|
+type Lexer struct {
|
|
|
+ Reader io.Reader
|
|
|
+ Positions []Position
|
|
|
+ Token Token
|
|
|
+ rule LexerRule
|
|
|
+ Output TokenChannel
|
|
|
+ buffer []byte
|
|
|
+ runes []rune
|
|
|
}
|
|
|
|
|
|
-const (
|
|
|
- TokenPeriod TokenType = TokenType('.')
|
|
|
- TokenComma TokenType = TokenType(',')
|
|
|
- TokenSemicolon TokenType = TokenType(';')
|
|
|
- TokenColon TokenType = TokenType(':')
|
|
|
- TokenOpenParen TokenType = TokenType('(')
|
|
|
- TokenCloseParen TokenType = TokenType(')')
|
|
|
- TokenOpenBrace TokenType = TokenType('{')
|
|
|
- TokenCloseBrace TokenType = TokenType('}')
|
|
|
- TokenOpenBracket TokenType = TokenType('[')
|
|
|
- TokenCloseBracket TokenType = TokenType(']')
|
|
|
-
|
|
|
- TokenNone TokenType = 0
|
|
|
- TokenError TokenType = -1
|
|
|
- TokenWord TokenType = -2
|
|
|
- TokenEOL TokenType = -3
|
|
|
- TokenEOF TokenType = -4
|
|
|
- TokenNumber TokenType = -5
|
|
|
- TokenOperator TokenType = -6
|
|
|
- TokenString TokenType = -7
|
|
|
- TokenSymbol TokenType = -8
|
|
|
- TokenFirstKeyword TokenType = -9
|
|
|
- TokenKeywordA TokenType = -10
|
|
|
- TokenKeywordDo TokenType = -11
|
|
|
- TokenKeywordEnd TokenType = -12
|
|
|
- TokenKeywordThe TokenType = -13
|
|
|
- TokenKeywordDef TokenType = -14
|
|
|
- TokenLastKeyword TokenType = -15
|
|
|
- TokenLast TokenType = -15
|
|
|
-)
|
|
|
+type LexerRule func(lexer *Lexer) LexerRule
|
|
|
|
|
|
-type Token struct {
|
|
|
- TokenType
|
|
|
- Value
|
|
|
- Position
|
|
|
-}
|
|
|
-
|
|
|
-var tokenTypeMap map[TokenType]string = map[TokenType]string{
|
|
|
- TokenNone: "TokenNone",
|
|
|
- TokenError: "TokenError",
|
|
|
- TokenWord: "TokenWord",
|
|
|
- TokenEOL: "TokenEOL",
|
|
|
- TokenEOF: "TokenEOF",
|
|
|
- TokenNumber: "TokenNumber",
|
|
|
- TokenOperator: "TokenOperator",
|
|
|
- TokenString: "TokenString",
|
|
|
- TokenSymbol: "TokenSymbol",
|
|
|
- TokenKeywordA: "TokenKeywordA",
|
|
|
- TokenKeywordDo: "TokenKeywordDo",
|
|
|
- TokenKeywordEnd: "TokenKeywordEnd",
|
|
|
- TokenKeywordThe: "TokenKeywordThe",
|
|
|
- TokenKeywordDef: "TokenKeywordDef",
|
|
|
-}
|
|
|
-
|
|
|
-var keywordMap map[string]TokenType = map[string]TokenType{
|
|
|
- "a": TokenKeywordA,
|
|
|
- "an": TokenKeywordA,
|
|
|
- "do": TokenKeywordDo,
|
|
|
- "def": TokenKeywordDef,
|
|
|
- "define": TokenKeywordDef,
|
|
|
- "end": TokenKeywordEnd,
|
|
|
- "the": TokenKeywordThe,
|
|
|
-}
|
|
|
-
|
|
|
-var sigilMap map[string]TokenType = map[string]TokenType{
|
|
|
- "[": TokenOpenBracket,
|
|
|
- "{": TokenOpenBrace,
|
|
|
- "(": TokenOpenParen,
|
|
|
- "]": TokenCloseBracket,
|
|
|
- "}": TokenCloseBrace,
|
|
|
- ")": TokenCloseParen,
|
|
|
-}
|
|
|
-
|
|
|
-const operator_chars = "&|@'^-*%/+=<>~\\"
|
|
|
-
|
|
|
-func (me TokenType) String() string {
|
|
|
- name, found := tokenTypeMap[me]
|
|
|
- if found {
|
|
|
- return name
|
|
|
- } else {
|
|
|
- if (me > 0) && (me < 256) {
|
|
|
- return fmt.Sprintf("TokenChar<%c>", byte(me))
|
|
|
- }
|
|
|
- return fmt.Sprintf("Unknown Token %d", int(me))
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-func (me Token) String() string {
|
|
|
- return fmt.Sprintf("Token: %s >%s< %d %d %d.", me.TokenType, string(me.Value), me.Index, me.Row, me.Column)
|
|
|
+func (me *Lexer) Last() Position {
|
|
|
+ return me.Positions[1]
|
|
|
}
|
|
|
|
|
|
-type TokenChannel chan *Token
|
|
|
+func (me *Lexer) Current() Position {
|
|
|
+ return me.Positions[0]
|
|
|
+}
|
|
|
|
|
|
-type Lexer struct {
|
|
|
- Reader io.Reader
|
|
|
- Current Position
|
|
|
- Last Position
|
|
|
- Token Token
|
|
|
- rule LexerRule
|
|
|
- Output TokenChannel
|
|
|
- buffer []byte
|
|
|
- runes []rune
|
|
|
+func (me *Lexer) LastPtr() * Position {
|
|
|
+ return &me.Positions[1]
|
|
|
}
|
|
|
|
|
|
-type LexerRule func(lexer *Lexer) LexerRule
|
|
|
+func (me *Lexer) CurrentPtr() * Position {
|
|
|
+ return &me.Positions[0]
|
|
|
+}
|
|
|
|
|
|
-func (me *Lexer) Emit(t TokenType, v Value) {
|
|
|
- tok := &Token{t, v, me.Current}
|
|
|
- me.Output <- tok
|
|
|
+
|
|
|
+func (me *Lexer) PushPosition(pos Position) {
|
|
|
+ newpos := make([]Position, len(me.Positions) + 1)
|
|
|
+ newpos[0] = pos
|
|
|
+ for i := 1 ; i < len(me.Positions); i++ {
|
|
|
+ newpos[i] = me.Positions[i-1]
|
|
|
+ }
|
|
|
+ me.Positions = newpos
|
|
|
+}
|
|
|
+
|
|
|
+func (me *Lexer) PushCurrentPosition() {
|
|
|
+ current := me.Current()
|
|
|
+ me.PushPosition(current)
|
|
|
+}
|
|
|
+
|
|
|
+func (me *Lexer) PopPosition() * Position {
|
|
|
+ if (len(me.Positions) <= 2) {
|
|
|
+ return nil
|
|
|
+ }
|
|
|
+
|
|
|
+ result := &me.Positions[0];
|
|
|
+ newpos := make([]Position, len(me.Positions) - 1)
|
|
|
+ for i := 1 ; i < len(me.Positions); i++ {
|
|
|
+ newpos[i-1] = me.Positions[i]
|
|
|
+ }
|
|
|
+ me.Positions = newpos
|
|
|
+ return result
|
|
|
+}
|
|
|
+
|
|
|
+
|
|
|
+func (me *Lexer) Emit(t TokenType, v TokenText) {
|
|
|
+ tok := &Token{t, v, me.Current()}
|
|
|
+ me.Output <- tok
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) Error(message string, args ...interface{}) {
|
|
|
- value := fmt.Sprintf(message, args...)
|
|
|
- monolog.Error("Lex Error: %s", value)
|
|
|
- me.Emit(TokenError, Value(value))
|
|
|
+ value := fmt.Sprintf(message, args...)
|
|
|
+ monolog.Error("Lex Error: %s", value)
|
|
|
+ me.Emit(TokenError, TokenText(value))
|
|
|
}
|
|
|
|
|
|
func LexError(me *Lexer) LexerRule {
|
|
|
- me.Error("Error")
|
|
|
- return nil
|
|
|
+ me.Error("Error")
|
|
|
+ return nil
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) SkipComment() bool {
|
|
|
- if me.Peek() == '#' {
|
|
|
- if me.Next() == '(' {
|
|
|
- return me.SkipNotIn(")")
|
|
|
- } else {
|
|
|
- return me.SkipNotIn("\r\n")
|
|
|
- }
|
|
|
- }
|
|
|
- return true
|
|
|
+ if me.Peek() == '#' {
|
|
|
+ if me.Next() == '(' {
|
|
|
+ return me.SkipNotIn(")")
|
|
|
+ } else {
|
|
|
+ return me.SkipNotIn("\r\n")
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return true
|
|
|
}
|
|
|
|
|
|
|
|
|
of the keyword.*/
|
|
|
func LookupKeyword(word string) (bool, TokenType) {
|
|
|
- kind, found := keywordMap[word]
|
|
|
- return found, kind
|
|
|
+ kind, found := keywordMap[word]
|
|
|
+ return found, kind
|
|
|
}
|
|
|
|
|
|
|
|
|
returns the TokenTyp of the sigil.*/
|
|
|
func LookupSigil(sigil string) (bool, TokenType) {
|
|
|
- fmt.Printf("LookupSigil: %s\n", sigil)
|
|
|
- kind, found := sigilMap[sigil]
|
|
|
- return found, kind
|
|
|
+ fmt.Printf("LookupSigil: %s\n", sigil)
|
|
|
+ kind, found := sigilMap[sigil]
|
|
|
+ return found, kind
|
|
|
}
|
|
|
|
|
|
func LexSigil(me *Lexer) LexerRule {
|
|
|
- me.Found(TokenType(me.Peek()))
|
|
|
- _ = me.Next()
|
|
|
- me.Advance()
|
|
|
- return LexNormal
|
|
|
+ me.Found(TokenType(me.Peek()))
|
|
|
+ _ = me.Next()
|
|
|
+ me.Advance()
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func LexWord(me *Lexer) LexerRule {
|
|
|
- me.SkipNotIn(" \t\r\n'({[]})")
|
|
|
+ me.SkipNotIn(" \t\r\n'({[]})")
|
|
|
|
|
|
- iskw, kind := LookupKeyword(me.CurrentStringValue())
|
|
|
- if iskw {
|
|
|
- me.Found(kind)
|
|
|
- } else {
|
|
|
- me.Found(TokenWord)
|
|
|
- }
|
|
|
- return LexNormal
|
|
|
+ iskw, kind := LookupKeyword(me.CurrentStringValue())
|
|
|
+ if iskw {
|
|
|
+ me.Found(kind)
|
|
|
+ } else {
|
|
|
+ me.Found(TokenWord)
|
|
|
+ }
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func LexSymbol(me *Lexer) LexerRule {
|
|
|
- me.SkipNotIn(" \t\r\n'({[]})")
|
|
|
- me.Found(TokenSymbol)
|
|
|
- return LexNormal
|
|
|
+ me.SkipNotIn(" \t\r\n'({[]})")
|
|
|
+ me.Found(TokenSymbol)
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func LexNumber(me *Lexer) LexerRule {
|
|
|
- me.SkipNotIn(" \t\r\n'({[]})")
|
|
|
- me.Found(TokenNumber)
|
|
|
- return LexNormal
|
|
|
+ me.SkipNotIn(" \t\r\n'({[]})")
|
|
|
+ me.Found(TokenNumber)
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func LexWhitespace(me *Lexer) LexerRule {
|
|
|
- me.SkipWhitespace()
|
|
|
- me.Advance()
|
|
|
- return LexNormal
|
|
|
+ me.SkipWhitespace()
|
|
|
+ me.Advance()
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func LexComment(me *Lexer) LexerRule {
|
|
|
- if !me.SkipComment() {
|
|
|
- me.Error("Unterminated comment")
|
|
|
- return LexError
|
|
|
- }
|
|
|
- me.Advance()
|
|
|
- return LexNormal
|
|
|
+ if !me.SkipComment() {
|
|
|
+ me.Error("Unterminated comment")
|
|
|
+ return LexError
|
|
|
+ }
|
|
|
+ me.Advance()
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func LexPunctuator(me *Lexer) LexerRule {
|
|
|
- me.Found(TokenType(me.Peek()))
|
|
|
- me.Advance()
|
|
|
- return LexNormal
|
|
|
+ me.Found(TokenType(me.Peek()))
|
|
|
+ _ = me.Next()
|
|
|
+ me.Advance()
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func LexEOL(me *Lexer) LexerRule {
|
|
|
- me.SkipIn("\r\n")
|
|
|
- me.Found(TokenEOL)
|
|
|
- return LexNormal
|
|
|
+ me.SkipIn("\r\n")
|
|
|
+ me.Found(TokenEOL)
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func LexOperator(me *Lexer) LexerRule {
|
|
|
- me.SkipIn(operator_chars)
|
|
|
- me.Found(TokenOperator)
|
|
|
- return LexNormal
|
|
|
+ me.SkipIn(operator_chars)
|
|
|
+ me.Found(TokenOperator)
|
|
|
+ return LexNormal
|
|
|
}
|
|
|
|
|
|
func lexEscape(me *Lexer) error {
|
|
|
- _ = me.Next()
|
|
|
- return nil
|
|
|
+ _ = me.Next()
|
|
|
+ return nil
|
|
|
}
|
|
|
|
|
|
func LexString(me *Lexer) LexerRule {
|
|
|
- open := me.Peek()
|
|
|
- do_escape := open == '"'
|
|
|
- peek := me.Next()
|
|
|
- me.Advance()
|
|
|
- for ; peek != '\000'; peek = me.Next() {
|
|
|
- if do_escape && peek == '\\' {
|
|
|
- if err := lexEscape(me); err != nil {
|
|
|
- return LexError
|
|
|
- }
|
|
|
- } else if peek == open {
|
|
|
- me.Found(TokenString)
|
|
|
- _ = me.Next()
|
|
|
- me.Advance()
|
|
|
- return LexNormal
|
|
|
- }
|
|
|
- }
|
|
|
- me.Error("Unexpected EOF in string.")
|
|
|
- return nil
|
|
|
+ open := me.Peek()
|
|
|
+ do_escape := open == '"'
|
|
|
+ peek := me.Next()
|
|
|
+ me.Advance()
|
|
|
+ for ; peek != '\000'; peek = me.Next() {
|
|
|
+ if do_escape && peek == '\\' {
|
|
|
+ if err := lexEscape(me); err != nil {
|
|
|
+ return LexError
|
|
|
+ }
|
|
|
+ } else if peek == open {
|
|
|
+ me.Found(TokenString)
|
|
|
+ _ = me.Next()
|
|
|
+ me.Advance()
|
|
|
+ return LexNormal
|
|
|
+ }
|
|
|
+ }
|
|
|
+ me.Error("Unexpected EOF in string.")
|
|
|
+ return nil
|
|
|
}
|
|
|
|
|
|
func LexNumberOrOperator(me *Lexer) LexerRule {
|
|
|
- if unicode.IsDigit(me.Next()) {
|
|
|
- return LexNumber
|
|
|
- } else {
|
|
|
- _ = me.Previous()
|
|
|
- return LexOperator
|
|
|
- }
|
|
|
+ if unicode.IsDigit(me.Next()) {
|
|
|
+ return LexNumber
|
|
|
+ } else {
|
|
|
+ _ = me.Previous()
|
|
|
+ return LexOperator
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
func LexNormal(me *Lexer) LexerRule {
|
|
|
- peek := me.Peek()
|
|
|
- if peek == '#' {
|
|
|
- return LexComment
|
|
|
- } else if strings.ContainsRune(" \t", peek) {
|
|
|
- return LexWhitespace
|
|
|
- } else if strings.ContainsRune(".,;:", peek) {
|
|
|
- return LexPunctuator
|
|
|
- } else if strings.ContainsRune("([{}])", peek) {
|
|
|
- return LexSigil
|
|
|
- } else if strings.ContainsRune("$", peek) {
|
|
|
- return LexSymbol
|
|
|
- } else if strings.ContainsRune("\r\n", peek) {
|
|
|
- return LexEOL
|
|
|
- } else if strings.ContainsRune("+-", peek) {
|
|
|
- return LexNumberOrOperator
|
|
|
- } else if strings.ContainsRune("\"`", peek) {
|
|
|
- return LexString
|
|
|
- } else if peek == '\000' {
|
|
|
- me.Emit(TokenEOF, "")
|
|
|
- return nil
|
|
|
- } else if unicode.IsLetter(peek) {
|
|
|
- return LexWord
|
|
|
- } else if unicode.IsDigit(peek) {
|
|
|
- return LexNumber
|
|
|
- } else if strings.ContainsRune(operator_chars, peek) {
|
|
|
- return LexOperator
|
|
|
- } else {
|
|
|
- return LexError
|
|
|
- }
|
|
|
+ peek := me.Peek()
|
|
|
+ if peek == '#' {
|
|
|
+ return LexComment
|
|
|
+ } else if strings.ContainsRune(" \t", peek) {
|
|
|
+ return LexWhitespace
|
|
|
+ } else if strings.ContainsRune(".,;:", peek) {
|
|
|
+ return LexPunctuator
|
|
|
+ } else if strings.ContainsRune("([{}])", peek) {
|
|
|
+ return LexSigil
|
|
|
+ } else if strings.ContainsRune("$", peek) {
|
|
|
+ return LexSymbol
|
|
|
+ } else if strings.ContainsRune("\r\n", peek) {
|
|
|
+ return LexEOL
|
|
|
+ } else if strings.ContainsRune("+-", peek) {
|
|
|
+ return LexNumberOrOperator
|
|
|
+ } else if strings.ContainsRune("\"`", peek) {
|
|
|
+ return LexString
|
|
|
+ } else if peek == '\000' {
|
|
|
+ me.Emit(TokenEOF, "")
|
|
|
+ return nil
|
|
|
+ } else if unicode.IsLetter(peek) {
|
|
|
+ return LexWord
|
|
|
+ } else if unicode.IsDigit(peek) {
|
|
|
+ return LexNumber
|
|
|
+ } else if strings.ContainsRune(operator_chars, peek) {
|
|
|
+ return LexOperator
|
|
|
+ } else {
|
|
|
+ return LexError
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
func OpenLexer(reader io.Reader) *Lexer {
|
|
|
- lexer := &Lexer{}
|
|
|
- lexer.Reader = reader
|
|
|
- lexer.Output = make(TokenChannel)
|
|
|
-
|
|
|
- return lexer
|
|
|
+ lexer := &Lexer{}
|
|
|
+ lexer.Reader = reader
|
|
|
+ lexer.Output = make(TokenChannel)
|
|
|
+ lexer.Positions = make([]Position, 2)
|
|
|
+
|
|
|
+ return lexer
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) ReadReaderOnce() (bool, error) {
|
|
|
- buffer := make([]byte, 1024)
|
|
|
+ buffer := make([]byte, 1024)
|
|
|
|
|
|
- n, err := me.Reader.Read(buffer)
|
|
|
- monolog.Debug("read %v %d %v\n", buffer[:n], n, err)
|
|
|
- if n > 0 {
|
|
|
- me.buffer = append(me.buffer, buffer[:n]...)
|
|
|
- monolog.Debug("append %s", me.buffer)
|
|
|
- }
|
|
|
+ n, err := me.Reader.Read(buffer)
|
|
|
+ monolog.Debug("read %v %d %v\n", buffer[:n], n, err)
|
|
|
+ if n > 0 {
|
|
|
+ me.buffer = append(me.buffer, buffer[:n]...)
|
|
|
+ monolog.Debug("append %s", me.buffer)
|
|
|
+ }
|
|
|
|
|
|
- if err == io.EOF {
|
|
|
- return true, nil
|
|
|
- } else if err != nil {
|
|
|
- me.Error("Error reading from reader: %s", err)
|
|
|
- return true, err
|
|
|
- }
|
|
|
- return false, nil
|
|
|
+ if err == io.EOF {
|
|
|
+ return true, nil
|
|
|
+ } else if err != nil {
|
|
|
+ me.Error("Error reading from reader: %s", err)
|
|
|
+ return true, err
|
|
|
+ }
|
|
|
+ return false, nil
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) ReadReader() error {
|
|
|
- me.buffer = make([]byte, 0)
|
|
|
- more, err := me.ReadReaderOnce()
|
|
|
- for err == nil && more {
|
|
|
- more, err = me.ReadReaderOnce()
|
|
|
- }
|
|
|
- me.runes = bytes.Runes(me.buffer)
|
|
|
+ me.buffer = make([]byte, 0)
|
|
|
+ more, err := me.ReadReaderOnce()
|
|
|
+ for err == nil && more {
|
|
|
+ more, err = me.ReadReaderOnce()
|
|
|
+ }
|
|
|
+ me.runes = bytes.Runes(me.buffer)
|
|
|
|
|
|
- return err
|
|
|
+ return err
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) Peek() rune {
|
|
|
- if (me.Current.Index) >= len(me.runes) {
|
|
|
- return '\000'
|
|
|
- }
|
|
|
- return me.runes[me.Current.Index]
|
|
|
+ if (me.Current().Index) >= len(me.runes) {
|
|
|
+ return '\000'
|
|
|
+ }
|
|
|
+ return me.runes[me.Current().Index]
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) PeekNext() rune {
|
|
|
- if (me.Current.Index + 1) >= len(me.runes) {
|
|
|
- return '\000'
|
|
|
- }
|
|
|
- return me.runes[me.Current.Index+1]
|
|
|
+ if (me.Current().Index + 1) >= len(me.runes) {
|
|
|
+ return '\000'
|
|
|
+ }
|
|
|
+ return me.runes[me.Current().Index+1]
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) Next() rune {
|
|
|
- if me.Peek() == '\n' {
|
|
|
- me.Current.Column = 0
|
|
|
- me.Current.Row++
|
|
|
- }
|
|
|
- me.Current.Index++
|
|
|
- if me.Current.Index >= len(me.runes) {
|
|
|
-
|
|
|
- }
|
|
|
- return me.Peek()
|
|
|
+ if me.Peek() == '\n' {
|
|
|
+ me.CurrentPtr().Column = 0
|
|
|
+ me.CurrentPtr().Row++
|
|
|
+ }
|
|
|
+ me.CurrentPtr().Index++
|
|
|
+ if me.Current().Index >= len(me.runes) {
|
|
|
+
|
|
|
+ }
|
|
|
+ return me.Peek()
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) Previous() rune {
|
|
|
- if me.Current.Index > 0 {
|
|
|
- me.Current.Index--
|
|
|
+ if me.Current().Index > 0 {
|
|
|
+ me.CurrentPtr().Index--
|
|
|
|
|
|
- if me.Peek() == '\n' {
|
|
|
- me.Current.Column = 0
|
|
|
- me.Current.Row++
|
|
|
- }
|
|
|
- }
|
|
|
- return me.Peek()
|
|
|
+ if me.Peek() == '\n' {
|
|
|
+ me.CurrentPtr().Column = 0
|
|
|
+ me.CurrentPtr().Row++
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return me.Peek()
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) SkipRune() {
|
|
|
- _ = me.Next()
|
|
|
+ _ = me.Next()
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) SkipIn(set string) bool {
|
|
|
- for strings.ContainsRune(set, me.Next()) {
|
|
|
- monolog.Debug("SkipIn: %s %c\n", set, me.Peek())
|
|
|
- if me.Peek() == '\000' {
|
|
|
- return false
|
|
|
- }
|
|
|
- }
|
|
|
- return true
|
|
|
+ for strings.ContainsRune(set, me.Next()) {
|
|
|
+ monolog.Debug("SkipIn: %s %c\n", set, me.Peek())
|
|
|
+ if me.Peek() == '\000' {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return true
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) SkipNotIn(set string) bool {
|
|
|
- _ = me.Next()
|
|
|
- for !strings.ContainsRune(set, me.Peek()) {
|
|
|
- if me.Next() == '\000' {
|
|
|
- return false
|
|
|
- }
|
|
|
- }
|
|
|
- return true
|
|
|
+ _ = me.Next()
|
|
|
+ for !strings.ContainsRune(set, me.Peek()) {
|
|
|
+ if me.Next() == '\000' {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return true
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) SkipWhile(should_skip func(r rune) bool) bool {
|
|
|
- for should_skip(me.Peek()) {
|
|
|
- if me.Next() == '\000' {
|
|
|
- return false
|
|
|
- }
|
|
|
- }
|
|
|
- return true
|
|
|
+ for should_skip(me.Peek()) {
|
|
|
+ if me.Next() == '\000' {
|
|
|
+ return false
|
|
|
+ }
|
|
|
+ }
|
|
|
+ return true
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) SkipWhitespace() {
|
|
|
- me.SkipIn(" \t")
|
|
|
+ me.SkipIn(" \t")
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) Advance() {
|
|
|
- me.Last = me.Current
|
|
|
+ (*me.LastPtr()) = me.Current()
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) Rewind() {
|
|
|
- me.Current = me.Last
|
|
|
+ (*me.CurrentPtr()) = me.Last()
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) CurrentRuneValue() []rune {
|
|
|
- return me.runes[me.Last.Index:me.Current.Index]
|
|
|
+ return me.runes[me.Last().Index:me.Current().Index]
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) CurrentStringValue() string {
|
|
|
- return string(me.CurrentRuneValue())
|
|
|
+ return string(me.CurrentRuneValue())
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) Found(kind TokenType) {
|
|
|
- me.Emit(kind, Value(me.CurrentStringValue()))
|
|
|
- me.Advance()
|
|
|
+ me.Emit(kind, TokenText(me.CurrentStringValue()))
|
|
|
+ me.Advance()
|
|
|
}
|
|
|
|
|
|
func GetFunctionName(fun interface{}) string {
|
|
|
- return runtime.FuncForPC(reflect.ValueOf(fun).Pointer()).Name()
|
|
|
+ return runtime.FuncForPC(reflect.ValueOf(fun).Pointer()).Name()
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) Start() {
|
|
|
- if err := me.ReadReader(); err == nil || err == io.EOF {
|
|
|
- rule := LexNormal
|
|
|
- for rule != nil {
|
|
|
- monolog.Debug("Lexer Rule: %s\n", GetFunctionName(rule))
|
|
|
- rule = rule(me)
|
|
|
- }
|
|
|
- } else {
|
|
|
- me.Error("Could not read in input buffer: %s", err)
|
|
|
- }
|
|
|
- close(me.Output)
|
|
|
+ if err := me.ReadReader(); err == nil || err == io.EOF {
|
|
|
+ rule := LexNormal
|
|
|
+ for rule != nil {
|
|
|
+ monolog.Debug("Lexer Rule: %s\n", GetFunctionName(rule))
|
|
|
+ rule = rule(me)
|
|
|
+ }
|
|
|
+ } else {
|
|
|
+ me.Error("Could not read in input buffer: %s", err)
|
|
|
+ }
|
|
|
+ close(me.Output)
|
|
|
}
|
|
|
|
|
|
func (me *Lexer) TryLexing() {
|
|
|
- go me.Start()
|
|
|
+ go me.Start()
|
|
|
|
|
|
- for token := range me.Output {
|
|
|
- monolog.Info("Token %s", token)
|
|
|
- }
|
|
|
+ for token := range me.Output {
|
|
|
+ monolog.Info("Token %s", token)
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
type AstType int
|
|
|
|
|
|
const (
|
|
|
- AstTypeProgram = AstType(iota)
|
|
|
- AstTypeStatements
|
|
|
- AstTypeStatement
|
|
|
- AstTypeDefinition
|
|
|
- AstTypeWords
|
|
|
- AstTypeExpression
|
|
|
- AstTypeWordExpression
|
|
|
- AstTypeWordCallop
|
|
|
- AstTypeOperation
|
|
|
- AstTypeOperations
|
|
|
- AstTypeCallArgs
|
|
|
- AstTypeValueExpression
|
|
|
- AstTypeValueCallop
|
|
|
- AstTypeParametersNonempty
|
|
|
- AstTypeParameters
|
|
|
- AstTypeParameter
|
|
|
- AstTypeBlock
|
|
|
- AstTypeWordValue
|
|
|
- AstTypeWord
|
|
|
- AstTypeValue
|
|
|
- AstTypeEox
|
|
|
- AstTypeOperator
|
|
|
- AstTypeParenthesis
|
|
|
- AstTypeModifier
|
|
|
- AstTypeError
|
|
|
+ AstTypeProgram = AstType(iota)
|
|
|
+ AstTypeStatements
|
|
|
+ AstTypeStatement
|
|
|
+ AstTypeDefinition
|
|
|
+ AstTypeWords
|
|
|
+ AstTypeExpression
|
|
|
+ AstTypeWordExpression
|
|
|
+ AstTypeWordCallop
|
|
|
+ AstTypeOperation
|
|
|
+ AstTypeOperations
|
|
|
+ AstTypeCallArgs
|
|
|
+ AstTypeValueExpression
|
|
|
+ AstTypeValueCallop
|
|
|
+ AstTypeParametersNonempty
|
|
|
+ AstTypeParameters
|
|
|
+ AstTypeParameter
|
|
|
+ AstTypeBlock
|
|
|
+ AstTypeWordValue
|
|
|
+ AstTypeWord
|
|
|
+ AstTypeValue
|
|
|
+ AstTypeEox
|
|
|
+ AstTypeOperator
|
|
|
+ AstTypeParenthesis
|
|
|
+ AstTypeModifier
|
|
|
+ AstTypeError
|
|
|
)
|
|
|
|
|
|
var astTypeMap map[AstType]string = map[AstType]string{
|
|
|
- AstTypeProgram: "AstTypeProgram",
|
|
|
- AstTypeStatements: "AstTypeStatements",
|
|
|
- AstTypeStatement: "AstTypeStatement:",
|
|
|
- AstTypeDefinition: "AstTypeDefinition",
|
|
|
- AstTypeWords: "AstTypeWords",
|
|
|
- AstTypeExpression: "AstTypeExpression",
|
|
|
- AstTypeWordExpression: "AstTypeWordExpression",
|
|
|
- AstTypeWordCallop: "AstTypeWordCallop",
|
|
|
- AstTypeOperation: "AstTypeOperation",
|
|
|
- AstTypeOperations: "AstTypeOperations",
|
|
|
- AstTypeCallArgs: "AstTypeCallArgs",
|
|
|
- AstTypeValueExpression: "AstTypeValueExpression",
|
|
|
- AstTypeValueCallop: "AstTypeValueCallop",
|
|
|
- AstTypeParametersNonempty: "AstTypeParametersNonempty",
|
|
|
- AstTypeParameters: "AstTypeParameters",
|
|
|
- AstTypeParameter: "AstTypeParameter",
|
|
|
- AstTypeBlock: "AstTypeBlock",
|
|
|
- AstTypeWordValue: "AstTypeWordValue",
|
|
|
- AstTypeWord: "AstTypeWord",
|
|
|
- AstTypeValue: "AstTypeValue",
|
|
|
- AstTypeEox: "AstTypeEox",
|
|
|
- AstTypeOperator: "AstTypeOperator",
|
|
|
- AstTypeParenthesis: "AstTypeParenthesis",
|
|
|
- AstTypeModifier: "AstTypeModifier",
|
|
|
- AstTypeError: "AstTypeError",
|
|
|
+ AstTypeProgram: "Program",
|
|
|
+ AstTypeStatements: "Statements",
|
|
|
+ AstTypeStatement: "Statement",
|
|
|
+ AstTypeDefinition: "Definition",
|
|
|
+ AstTypeWords: "Words",
|
|
|
+ AstTypeExpression: "Expression",
|
|
|
+ AstTypeWordExpression: "WordExpression",
|
|
|
+ AstTypeWordCallop: "WordCallop",
|
|
|
+ AstTypeOperation: "Operation",
|
|
|
+ AstTypeOperations: "Operations",
|
|
|
+ AstTypeCallArgs: "CallArgs",
|
|
|
+ AstTypeValueExpression: "ValueExpression",
|
|
|
+ AstTypeValueCallop: "ValueCallop",
|
|
|
+ AstTypeParametersNonempty: "ParametersNonempty",
|
|
|
+ AstTypeParameters: "Parameters",
|
|
|
+ AstTypeParameter: "Parameter",
|
|
|
+ AstTypeBlock: "Block",
|
|
|
+ AstTypeWordValue: "WordValue",
|
|
|
+ AstTypeWord: "Word",
|
|
|
+ AstTypeValue: "Value",
|
|
|
+ AstTypeEox: "Eox",
|
|
|
+ AstTypeOperator: "Operator",
|
|
|
+ AstTypeParenthesis: "Parenthesis",
|
|
|
+ AstTypeModifier: "Modifier",
|
|
|
+ AstTypeError: "Error",
|
|
|
}
|
|
|
|
|
|
func (me AstType) String() string {
|
|
|
- name, found := astTypeMap[me]
|
|
|
- if found {
|
|
|
- return name
|
|
|
- } else {
|
|
|
- return fmt.Sprintf("Unknown AstType %d", int(me))
|
|
|
- }
|
|
|
+ name, found := astTypeMap[me]
|
|
|
+ if found {
|
|
|
+ return name
|
|
|
+ } else {
|
|
|
+ return fmt.Sprintf("Unknown AstType %d", int(me))
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
type Ast struct {
|
|
|
- tree.Node
|
|
|
- AstType
|
|
|
- *Token
|
|
|
+ tree.Node
|
|
|
+ AstType
|
|
|
+ *Token
|
|
|
+}
|
|
|
+
|
|
|
+func (me *Ast) Run(run *Runtime) (*Value, error) {
|
|
|
+ switch me.AstType {
|
|
|
+ case AstTypeProgram:
|
|
|
+ return me.RunProgram(run)
|
|
|
+ case AstTypeStatements:
|
|
|
+ return me.RunStatements(run)
|
|
|
+ case AstTypeStatement:
|
|
|
+ return me.RunStatement(run)
|
|
|
+ case AstTypeDefinition:
|
|
|
+ return me.RunDefinition(run)
|
|
|
+ case AstTypeWords:
|
|
|
+ return me.RunWords(run)
|
|
|
+ case AstTypeExpression:
|
|
|
+ return me.RunExpression(run)
|
|
|
+ case AstTypeWordExpression:
|
|
|
+ return me.RunWord(run)
|
|
|
+ case AstTypeWordCallop:
|
|
|
+ return me.RunWordCallop(run)
|
|
|
+ case AstTypeOperation:
|
|
|
+ return me.RunOperation(run)
|
|
|
+ case AstTypeOperations:
|
|
|
+ return me.RunOperations(run)
|
|
|
+ case AstTypeCallArgs:
|
|
|
+ return me.RunCallArgs(run)
|
|
|
+ case AstTypeValueExpression:
|
|
|
+ return me.RunValueExpression(run)
|
|
|
+ case AstTypeValueCallop:
|
|
|
+ return me.RunValueCallop(run)
|
|
|
+ case AstTypeParametersNonempty:
|
|
|
+ return me.RunParametersNonempty(run)
|
|
|
+ case AstTypeParameters:
|
|
|
+ return me.RunParameters(run)
|
|
|
+ case AstTypeParameter:
|
|
|
+ return me.RunParameter(run)
|
|
|
+ case AstTypeBlock:
|
|
|
+ return me.RunBlock(run)
|
|
|
+ case AstTypeWordValue:
|
|
|
+ return me.RunWordValue(run)
|
|
|
+ case AstTypeWord:
|
|
|
+ return me.RunWord(run)
|
|
|
+ case AstTypeValue:
|
|
|
+ return me.RunValue(run)
|
|
|
+ case AstTypeEox:
|
|
|
+ return me.RunEox(run)
|
|
|
+ case AstTypeOperator:
|
|
|
+ return me.RunOperator(run)
|
|
|
+ case AstTypeParenthesis:
|
|
|
+ return me.RunParenthesis(run)
|
|
|
+ case AstTypeModifier:
|
|
|
+ return me.RunModifier(run)
|
|
|
+ case AstTypeError:
|
|
|
+ return me.RunError(run)
|
|
|
+ default:
|
|
|
+ return nil, errors.New("Shoudln't happen")
|
|
|
+ }
|
|
|
+}
|
|
|
+
|
|
|
+func (me *Ast) RunProgram(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunStatements(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunStatement(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunDefinition(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunWords(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunExpression(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunWordExpression(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunWordCallop(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunOperation(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunOperations(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunCallArgs(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunValueExpression(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunValueCallop(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunParametersNonempty(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunParameters(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunParameter(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunBlock(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunWordValue(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunWord(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunValue(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunEox(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunOperator(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunParenthesis(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunModifier(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
+}
|
|
|
+func (me *Ast) RunError(run *Runtime) (*Value, error) {
|
|
|
+ return nil, errors.New("Not implemented")
|
|
|
}
|
|
|
|
|
|
func (me *Ast) NewChild(kind AstType, token *Token) *Ast {
|
|
|
- child := &Ast{}
|
|
|
- child.AstType = kind
|
|
|
- child.Token = token
|
|
|
- tree.AppendChild(me, child)
|
|
|
- return child
|
|
|
+ child := &Ast{}
|
|
|
+ child.AstType = kind
|
|
|
+ child.Token = token
|
|
|
+ tree.AppendChild(me, child)
|
|
|
+ return child
|
|
|
}
|
|
|
|
|
|
func (me *Ast) Walk(walker func(ast *Ast) *Ast) *Ast {
|
|
|
- node_res := tree.Walk(me,
|
|
|
- func(node tree.Noder) tree.Noder {
|
|
|
- ast_res := walker(node.(*Ast))
|
|
|
- if ast_res == nil {
|
|
|
- return nil
|
|
|
- } else {
|
|
|
- return ast_res
|
|
|
- }
|
|
|
- })
|
|
|
- if node_res != nil {
|
|
|
- return node_res.(*Ast)
|
|
|
- } else {
|
|
|
- return nil
|
|
|
- }
|
|
|
+ node_res := tree.Walk(me,
|
|
|
+ func(node tree.Noder) tree.Noder {
|
|
|
+ ast_res := walker(node.(*Ast))
|
|
|
+ if ast_res == nil {
|
|
|
+ return nil
|
|
|
+ } else {
|
|
|
+ return ast_res
|
|
|
+ }
|
|
|
+ })
|
|
|
+ if node_res != nil {
|
|
|
+ return node_res.(*Ast)
|
|
|
+ } else {
|
|
|
+ return nil
|
|
|
+ }
|
|
|
}
|
|
|
|
|
|
func (me *Ast) Remove() {
|
|
|
- _ = tree.Remove(me)
|
|
|
+ _ = tree.Remove(me)
|
|
|
}
|
|
|
|
|
|
func NewAst(kind AstType) *Ast {
|
|
|
- ast := &Ast{}
|
|
|
- ast.AstType = kind
|
|
|
- ast.Token = nil
|
|
|
- return ast
|
|
|
+ ast := &Ast{}
|
|
|
+ ast.AstType = kind
|
|
|
+ ast.Token = nil
|
|
|
+ return ast
|
|
|
}
|
|
|
|
|
|
-type ParseAction func(parser *Parser) bool
|
|
|
|
|
|
-type RuleType int
|
|
|
+type DefineType int
|
|
|
|
|
|
const (
|
|
|
- RuleTypeNone = RuleType(iota)
|
|
|
- RuleTypeAlternate
|
|
|
- RuleTypeSequence
|
|
|
+ DefineTypeNone = DefineType(iota)
|
|
|
+ DefineTypeGo
|
|
|
+ DefineTypeUser
|
|
|
+ DefineTypeVar
|
|
|
)
|
|
|
|
|
|
-type Rule struct {
|
|
|
- tree.Node
|
|
|
- Name string
|
|
|
- RuleType
|
|
|
- ParseAction
|
|
|
-}
|
|
|
-
|
|
|
-func NewRule(name string, ruty RuleType) *Rule {
|
|
|
- res := &Rule{}
|
|
|
- res.RuleType = ruty
|
|
|
- res.Name = name
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Rule) NewChild(action ParseAction) *Rule {
|
|
|
- child := NewRule("foo", RuleTypeNone)
|
|
|
- tree.AppendChild(me, child)
|
|
|
- return child
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Rule) Walk(walker func(rule *Rule) *Rule) *Rule {
|
|
|
- node_res := tree.Walk(me,
|
|
|
- func(node tree.Noder) tree.Noder {
|
|
|
- rule_res := walker(node.(*Rule))
|
|
|
- if rule_res == nil {
|
|
|
- return nil
|
|
|
- } else {
|
|
|
- return rule_res
|
|
|
- }
|
|
|
- })
|
|
|
- return node_res.(*Rule)
|
|
|
-}
|
|
|
-
|
|
|
-type Parser struct {
|
|
|
- *Ast
|
|
|
- *Lexer
|
|
|
- now *Ast
|
|
|
- lookahead *Token
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) SetupRules() {
|
|
|
-
|
|
|
+type Value interface {
|
|
|
}
|
|
|
|
|
|
-func (me *Parser) Expect(types ...TokenType) bool {
|
|
|
- monolog.Debug("Expecting: ", types, " from ", me.now.AstType, " have ", me.LookaheadType(), " \n")
|
|
|
- for _, t := range types {
|
|
|
- if me.LookaheadType() == t {
|
|
|
- monolog.Debug("Found: ", t, "\n")
|
|
|
- return true
|
|
|
- }
|
|
|
- }
|
|
|
- monolog.Debug("Not found.\n")
|
|
|
- return false
|
|
|
-}
|
|
|
+type StringValue string
|
|
|
|
|
|
-type Parsable interface {
|
|
|
- isParsable()
|
|
|
-}
|
|
|
+type SymbolValue string
|
|
|
|
|
|
-func (me TokenType) isParsable() {
|
|
|
-}
|
|
|
+type IntegerValue int64
|
|
|
|
|
|
-func (me ParseAction) isParsable() {
|
|
|
-}
|
|
|
+type FloatValue float64
|
|
|
|
|
|
-
|
|
|
- */
|
|
|
-func (me *Parser) Advance() *Token {
|
|
|
- if me.lookahead == nil {
|
|
|
- me.lookahead = <-me.Lexer.Output
|
|
|
- }
|
|
|
- return me.lookahead
|
|
|
-}
|
|
|
+type ArrayValue []Value
|
|
|
|
|
|
-func (me *Parser) DropLookahead() {
|
|
|
- me.lookahead = nil
|
|
|
-}
|
|
|
+type MapValue map[string]Value
|
|
|
|
|
|
-func (me *Parser) Lookahead() *Token {
|
|
|
- return me.lookahead
|
|
|
-}
|
|
|
+type BoolValue bool
|
|
|
|
|
|
-func (me *Parser) LookaheadType() TokenType {
|
|
|
- if me.lookahead == nil {
|
|
|
- return TokenError
|
|
|
- }
|
|
|
- return me.Lookahead().TokenType
|
|
|
-}
|
|
|
|
|
|
-func (me *Parser) Consume(atyp AstType, types ...TokenType) bool {
|
|
|
- me.Advance()
|
|
|
- res := me.Expect(types...)
|
|
|
- if res {
|
|
|
- me.NewAstChild(atyp)
|
|
|
- me.DropLookahead()
|
|
|
- }
|
|
|
- return res
|
|
|
+type Variable struct {
|
|
|
+ Value
|
|
|
+ Name string
|
|
|
}
|
|
|
|
|
|
-func (me *Parser) ConsumeWithoutAst(types ...TokenType) bool {
|
|
|
- me.Advance()
|
|
|
- res := me.Expect(types...)
|
|
|
- if res {
|
|
|
- me.DropLookahead()
|
|
|
- }
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-
|
|
|
-func (me * Parser) OneOf(restype AstType, options ...Parsable) bool {
|
|
|
- res := false
|
|
|
- k, v := range options {
|
|
|
- switch option := v.Type {
|
|
|
- case TokenType: res := Consume(restype, option)
|
|
|
- case ParseAction: res := option(me)
|
|
|
- }
|
|
|
- }
|
|
|
- return res
|
|
|
-}
|
|
|
-*/
|
|
|
-
|
|
|
-func (me *Parser) ParseEOX() bool {
|
|
|
- return me.ConsumeWithoutAst(TokenEOL, TokenPeriod)
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseValue() bool {
|
|
|
- return me.Consume(AstTypeValue, TokenString, TokenNumber, TokenSymbol)
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseWord() bool {
|
|
|
- return me.Consume(AstTypeWord, TokenWord, TokenKeywordA, TokenKeywordThe)
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseWordValue() bool {
|
|
|
- me.NewAstChildDescend(AstTypeWordValue)
|
|
|
- res := me.ParseValue() || me.ParseWord()
|
|
|
- me.AstAscend(res)
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseParametersNonempty() bool {
|
|
|
- res := false
|
|
|
- for me.ParseParameter() {
|
|
|
- res = true
|
|
|
- }
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseCallArgs() bool {
|
|
|
- me.NewAstChildDescend(AstTypeCallArgs)
|
|
|
- res := me.ParseParameters() && me.ParseEOX()
|
|
|
- me.AstAscend(res)
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseOperator() bool {
|
|
|
- return me.Consume(AstTypeOperator, TokenOperator)
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) NewAstChild(tyty AstType) *Ast {
|
|
|
- return me.now.NewChild(tyty, me.lookahead)
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) NewAstChildDescend(tyty AstType) {
|
|
|
- node := me.NewAstChild(tyty)
|
|
|
- me.now = node
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) AstAscend(keep bool) {
|
|
|
- if me.now.Parent() != nil {
|
|
|
- now := me.now
|
|
|
- me.now = now.Parent().(*Ast)
|
|
|
- if !keep {
|
|
|
- now.Remove()
|
|
|
- }
|
|
|
- }
|
|
|
-}
|
|
|
-
|
|
|
-func (me TokenType) BlockCloseForOpen() (TokenType, bool) {
|
|
|
- switch me {
|
|
|
- case TokenOpenBrace:
|
|
|
- return TokenCloseBrace, true
|
|
|
- case TokenKeywordDo:
|
|
|
- return TokenKeywordEnd, true
|
|
|
- default:
|
|
|
- return TokenError, false
|
|
|
- }
|
|
|
-
|
|
|
-}
|
|
|
-
|
|
|
-func (me TokenType) ParenthesisCloseForOpen() (TokenType, bool) {
|
|
|
- switch me {
|
|
|
- case TokenOpenBracket:
|
|
|
- return TokenCloseBracket, true
|
|
|
- case TokenOpenParen:
|
|
|
- return TokenCloseParen, true
|
|
|
- default:
|
|
|
- return TokenError, false
|
|
|
- }
|
|
|
-
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseBlock() bool {
|
|
|
- me.Advance()
|
|
|
- open := me.LookaheadType()
|
|
|
- done, ok := open.BlockCloseForOpen()
|
|
|
- if !ok {
|
|
|
-
|
|
|
- return false
|
|
|
- }
|
|
|
- me.DropLookahead()
|
|
|
- me.NewAstChildDescend(AstTypeBlock)
|
|
|
- res := me.ParseStatements()
|
|
|
- me.AstAscend(res)
|
|
|
- if res {
|
|
|
- me.Advance()
|
|
|
- if me.LookaheadType() != done {
|
|
|
- return me.ParseError()
|
|
|
- }
|
|
|
- me.DropLookahead()
|
|
|
- }
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseParenthesis() bool {
|
|
|
- me.Advance()
|
|
|
- open := me.LookaheadType()
|
|
|
- done, ok := open.ParenthesisCloseForOpen()
|
|
|
- if !ok {
|
|
|
-
|
|
|
- return false
|
|
|
- }
|
|
|
- me.DropLookahead()
|
|
|
- me.NewAstChildDescend(AstTypeParenthesis)
|
|
|
- res := me.ParseExpression()
|
|
|
- me.AstAscend(res)
|
|
|
- if res {
|
|
|
- me.Advance()
|
|
|
- if me.LookaheadType() != done {
|
|
|
- return me.ParseError()
|
|
|
- }
|
|
|
- me.DropLookahead()
|
|
|
- }
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseWords() bool {
|
|
|
- me.NewAstChildDescend(AstTypeWords)
|
|
|
- res := me.ParseWord()
|
|
|
- for me.ParseWord() {
|
|
|
- }
|
|
|
- me.AstAscend(res)
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseDefinition() bool {
|
|
|
- me.Advance()
|
|
|
- res := me.Consume(AstTypeDefinition, TokenKeywordDef)
|
|
|
- if !res {
|
|
|
- return false
|
|
|
- }
|
|
|
- res = res && (me.ParseWord() || me.ParseOperator())
|
|
|
- if !res {
|
|
|
- _ = me.ParseError()
|
|
|
- }
|
|
|
- res = res && me.ParseParametersNonempty()
|
|
|
- if !res {
|
|
|
- _ = me.ParseError()
|
|
|
- }
|
|
|
- me.AstAscend(res)
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseParameter() bool {
|
|
|
- me.NewAstChildDescend(AstTypeParameter)
|
|
|
- res := me.ParseWordValue() || me.ParseOperator() ||
|
|
|
- me.ParseParenthesis() || me.ParseBlock()
|
|
|
- me.AstAscend(res)
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseParameters() bool {
|
|
|
- for me.ParseParameter() {
|
|
|
- }
|
|
|
- return true
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseError() bool {
|
|
|
- me.now.NewChild(AstTypeError, me.lookahead)
|
|
|
- fmt.Printf("Parse error: at %s\n", me.lookahead)
|
|
|
- return false
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseExpression() bool {
|
|
|
- return (me.ParseWordValue() || me.ParseOperator()) && me.ParseParameters()
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseStatement() bool {
|
|
|
- me.NewAstChildDescend(AstTypeStatement)
|
|
|
-
|
|
|
- res := me.ParseEOX() ||
|
|
|
- me.ParseDefinition() ||
|
|
|
- (me.ParseExpression() && me.ParseEOX()) ||
|
|
|
- me.ParseBlock()
|
|
|
-
|
|
|
- me.AstAscend(res)
|
|
|
- return res
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseEOF() bool {
|
|
|
- return me.Consume(AstTypeEox, TokenEOF)
|
|
|
-}
|
|
|
-
|
|
|
-func (me *Parser) ParseStatements() bool {
|
|
|
- me.NewAstChildDescend(AstTypeStatements)
|
|
|
- res := me.ParseStatement()
|
|
|
-
|
|
|
- for me.ParseStatement() {
|
|
|
- }
|
|
|
-
|
|
|
- me.AstAscend(res)
|
|
|
- return res
|
|
|
+type DefinePattern struct {
|
|
|
+ Parts []string
|
|
|
}
|
|
|
|
|
|
-func (me *Parser) ParseProgram() bool {
|
|
|
- return me.ParseStatements() && me.ParseEOF()
|
|
|
-}
|
|
|
+type GoDefineFunc func(runtime *Runtime, args ...Value) Value
|
|
|
|
|
|
-func NewParserForLexer(lexer *Lexer) *Parser {
|
|
|
- me := &Parser{}
|
|
|
- me.Ast = NewAst(AstTypeProgram)
|
|
|
- me.now = me.Ast
|
|
|
- me.Lexer = lexer
|
|
|
- me.Ast.Token = &Token{}
|
|
|
- go me.Lexer.Start()
|
|
|
- return me
|
|
|
+type ScriptDefine struct {
|
|
|
+ DefineType
|
|
|
+ DefinePattern
|
|
|
+ *Ast
|
|
|
}
|
|
|
|
|
|
-func NewParserForText(text string) *Parser {
|
|
|
- lexer := OpenLexer(strings.NewReader(text))
|
|
|
- return NewParserForLexer(lexer)
|
|
|
+type GoDefine struct {
|
|
|
+ DefineType
|
|
|
+ *DefinePattern
|
|
|
+ GoDefineFunc
|
|
|
}
|
|
|
|
|
|
-func (me *Ast) DotID() string {
|
|
|
- return fmt.Sprintf("ast_%p", me)
|
|
|
+type Define interface {
|
|
|
}
|
|
|
|
|
|
-func (me *Ast) Dotty() {
|
|
|
- g := graphviz.NewDigraph("rankdir", "LR")
|
|
|
- me.Walk(func(ast *Ast) *Ast {
|
|
|
- label := ast.AstType.String()
|
|
|
- if ast.Token != nil {
|
|
|
- label = label + "\n" + ast.Token.String()
|
|
|
- }
|
|
|
- g.AddNode(ast.DotID(), "label", label)
|
|
|
- if ast.Parent() != nil {
|
|
|
- g.AddEdgeByName(ast.Parent().(*Ast).DotID(), ast.DotID())
|
|
|
- }
|
|
|
- return nil
|
|
|
- })
|
|
|
- g.Dotty()
|
|
|
+type Environment struct {
|
|
|
+ Parent *Environment
|
|
|
+ Defines map[string]Define
|
|
|
+ Variables map[string]*Variable
|
|
|
+ Stack []Value
|
|
|
}
|
|
|
|
|
|
-
|
|
|
-
|
|
|
-PROGRAM -> STATEMENTS.
|
|
|
-STATEMENTS -> STATEMENT STATEMENTS | .
|
|
|
-STATEMENT -> EXPRESSION EOX | DEFINITION | BLOCK .
|
|
|
-DEFINITION -> define WORDOP WORDOPS BLOCK.
|
|
|
-WORDOPS -> WORDOP WORDOPS | .
|
|
|
-EXPRESSION -> WORDVALUE PARAMETERSS.
|
|
|
-PARAMETERS -> PARAMETER PARAMETERS | .
|
|
|
-PARAMETER -> WORDVALUE | PARENTHESIS | BLOCK | operator.
|
|
|
-PARENTHESIS -> '(' EXPRESSION ')' | ot EXPRESSION ct.
|
|
|
-BLOCK -> oe STATEMENTS ce | do STATEMENTS end .
|
|
|
-WORDOP -> word | operator | a | the.
|
|
|
-WORDVALUE -> word | VALUE | a | the.
|
|
|
-VALUE -> string | number | symbol.
|
|
|
-EOX -> eol | period.
|
|
|
-
|
|
|
-)
|
|
|
-*/
|
|
|
-
|
|
|
-type DefineType int
|
|
|
+type Instruction int
|
|
|
|
|
|
const (
|
|
|
- DefineTypeNone = DefineType(iota)
|
|
|
- DefineTypeGo
|
|
|
- DefineTypeUser
|
|
|
- DefineTypeVar
|
|
|
+ InstructionNop = Instruction(iota)
|
|
|
+ InstructionCall
|
|
|
+ InstructionPush
|
|
|
+ InstructionPop
|
|
|
)
|
|
|
|
|
|
-type Var interface {
|
|
|
+func (env *Environment) AddDefine(name string, def Define) {
|
|
|
+ env.Defines[name] = def
|
|
|
}
|
|
|
|
|
|
-type DefinePattern struct {
|
|
|
- Parts []string
|
|
|
+func (env *Environment) NewGoDefine(name string, fn GoDefineFunc, pattern ...string) {
|
|
|
+ defpattern := new(DefinePattern)
|
|
|
+ defpattern.Parts = append(defpattern.Parts, pattern...)
|
|
|
+ godefine := &GoDefine{DefineTypeGo, defpattern, fn}
|
|
|
+ env.AddDefine(name, godefine)
|
|
|
}
|
|
|
|
|
|
-type GoDefineFunc func(runtime Runtime, args ...Var) Var
|
|
|
-
|
|
|
-type UserDefine struct {
|
|
|
- DefinePattern
|
|
|
- *Ast
|
|
|
+type Runtime struct {
|
|
|
+ Environment
|
|
|
+ start *Ast
|
|
|
+ now *Ast
|
|
|
}
|
|
|
|
|
|
-type GoDefine struct {
|
|
|
- DefinePattern
|
|
|
- *GoDefineFunc
|
|
|
+func RuntimePuts(runtime *Runtime, args ...Value) Value {
|
|
|
+ var iargs []interface{}
|
|
|
+ for arg := range args {
|
|
|
+ iargs = append(iargs, arg)
|
|
|
+ }
|
|
|
+ fmt.Print(iargs)
|
|
|
+ return true
|
|
|
}
|
|
|
|
|
|
-type Define struct {
|
|
|
- DefineType
|
|
|
- definition *Ast
|
|
|
+func (run *Runtime) Init() {
|
|
|
+ run.NewGoDefine("puts", RuntimePuts, "$", "*")
|
|
|
}
|
|
|
|
|
|
-type Environment struct {
|
|
|
- Parent *Environment
|
|
|
+func (run *Runtime) Start(ast *Ast) {
|
|
|
+ run.start = ast
|
|
|
+ run.now = ast
|
|
|
}
|
|
|
|
|
|
-type Runtime struct {
|
|
|
- Environment
|
|
|
+func (run *Runtime) RunOnce() {
|
|
|
+
|
|
|
}
|
|
|
|
|
|
func main() {
|
|
|
- fmt.Println("Hello World!")
|
|
|
+ fmt.Println("Hello World!")
|
|
|
}
|