|
@@ -74,7 +74,7 @@ const (
|
|
TokenKeywordDo TokenType = -11
|
|
TokenKeywordDo TokenType = -11
|
|
TokenKeywordEnd TokenType = -12
|
|
TokenKeywordEnd TokenType = -12
|
|
TokenKeywordThe TokenType = -13
|
|
TokenKeywordThe TokenType = -13
|
|
- TokenKeywordTo TokenType = -14
|
|
|
|
|
|
+ TokenKeywordDef TokenType = -14
|
|
TokenLastKeyword TokenType = -15
|
|
TokenLastKeyword TokenType = -15
|
|
TokenLast TokenType = -15
|
|
TokenLast TokenType = -15
|
|
)
|
|
)
|
|
@@ -99,16 +99,17 @@ var tokenTypeMap map[TokenType]string = map[TokenType]string{
|
|
TokenKeywordDo: "TokenKeywordDo",
|
|
TokenKeywordDo: "TokenKeywordDo",
|
|
TokenKeywordEnd: "TokenKeywordEnd",
|
|
TokenKeywordEnd: "TokenKeywordEnd",
|
|
TokenKeywordThe: "TokenKeywordThe",
|
|
TokenKeywordThe: "TokenKeywordThe",
|
|
- TokenKeywordTo: "TokenKeywordTo",
|
|
|
|
|
|
+ TokenKeywordDef: "TokenKeywordDef",
|
|
}
|
|
}
|
|
|
|
|
|
var keywordMap map[string]TokenType = map[string]TokenType{
|
|
var keywordMap map[string]TokenType = map[string]TokenType{
|
|
- "a": TokenKeywordA,
|
|
|
|
- "an": TokenKeywordA,
|
|
|
|
- "do": TokenKeywordDo,
|
|
|
|
- "end": TokenKeywordEnd,
|
|
|
|
- "the": TokenKeywordThe,
|
|
|
|
- "to": TokenKeywordTo,
|
|
|
|
|
|
+ "a": TokenKeywordA,
|
|
|
|
+ "an": TokenKeywordA,
|
|
|
|
+ "do": TokenKeywordDo,
|
|
|
|
+ "def": TokenKeywordDef,
|
|
|
|
+ "define": TokenKeywordDef,
|
|
|
|
+ "end": TokenKeywordEnd,
|
|
|
|
+ "the": TokenKeywordThe,
|
|
}
|
|
}
|
|
|
|
|
|
var sigilMap map[string]TokenType = map[string]TokenType{
|
|
var sigilMap map[string]TokenType = map[string]TokenType{
|
|
@@ -193,8 +194,15 @@ func LookupSigil(sigil string) (bool, TokenType) {
|
|
return found, kind
|
|
return found, kind
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+func LexSigil(me *Lexer) LexerRule {
|
|
|
|
+ me.Found(TokenType(me.Peek()))
|
|
|
|
+ _ = me.Next()
|
|
|
|
+ me.Advance()
|
|
|
|
+ return LexNormal
|
|
|
|
+}
|
|
|
|
+
|
|
func LexWord(me *Lexer) LexerRule {
|
|
func LexWord(me *Lexer) LexerRule {
|
|
- me.SkipNotIn(" \t\r\n'")
|
|
|
|
|
|
+ me.SkipNotIn(" \t\r\n'({[]})")
|
|
|
|
|
|
iskw, kind := LookupKeyword(me.CurrentStringValue())
|
|
iskw, kind := LookupKeyword(me.CurrentStringValue())
|
|
if iskw {
|
|
if iskw {
|
|
@@ -206,13 +214,13 @@ func LexWord(me *Lexer) LexerRule {
|
|
}
|
|
}
|
|
|
|
|
|
func LexSymbol(me *Lexer) LexerRule {
|
|
func LexSymbol(me *Lexer) LexerRule {
|
|
- me.SkipNotIn(" \t\r\n'")
|
|
|
|
|
|
+ me.SkipNotIn(" \t\r\n'({[]})")
|
|
me.Found(TokenSymbol)
|
|
me.Found(TokenSymbol)
|
|
return LexNormal
|
|
return LexNormal
|
|
}
|
|
}
|
|
|
|
|
|
func LexNumber(me *Lexer) LexerRule {
|
|
func LexNumber(me *Lexer) LexerRule {
|
|
- me.SkipNotIn(" \tBBBT\r\n")
|
|
|
|
|
|
+ me.SkipNotIn(" \t\r\n'({[]})")
|
|
me.Found(TokenNumber)
|
|
me.Found(TokenNumber)
|
|
return LexNormal
|
|
return LexNormal
|
|
}
|
|
}
|
|
@@ -233,7 +241,8 @@ func LexComment(me *Lexer) LexerRule {
|
|
}
|
|
}
|
|
|
|
|
|
func LexPunctuator(me *Lexer) LexerRule {
|
|
func LexPunctuator(me *Lexer) LexerRule {
|
|
- me.Found(TokenType(me.Peek()))
|
|
|
|
|
|
+ me.Found(TokenType(me.Next()))
|
|
|
|
+ me.Advance()
|
|
return LexNormal
|
|
return LexNormal
|
|
}
|
|
}
|
|
|
|
|
|
@@ -244,7 +253,7 @@ func LexEOL(me *Lexer) LexerRule {
|
|
}
|
|
}
|
|
|
|
|
|
func LexOperator(me *Lexer) LexerRule {
|
|
func LexOperator(me *Lexer) LexerRule {
|
|
- me.SkipNotIn(" \t\r\n")
|
|
|
|
|
|
+ me.SkipNotIn(" \t\r\n({[]})")
|
|
issig, kind := LookupSigil(me.CurrentStringValue())
|
|
issig, kind := LookupSigil(me.CurrentStringValue())
|
|
if issig {
|
|
if issig {
|
|
me.Found(kind)
|
|
me.Found(kind)
|
|
@@ -297,6 +306,8 @@ func LexNormal(me *Lexer) LexerRule {
|
|
return LexWhitespace
|
|
return LexWhitespace
|
|
} else if strings.ContainsRune(".,;:", peek) {
|
|
} else if strings.ContainsRune(".,;:", peek) {
|
|
return LexPunctuator
|
|
return LexPunctuator
|
|
|
|
+ } else if strings.ContainsRune("([{}])", peek) {
|
|
|
|
+ return LexSigil
|
|
} else if strings.ContainsRune("$", peek) {
|
|
} else if strings.ContainsRune("$", peek) {
|
|
return LexSymbol
|
|
return LexSymbol
|
|
} else if strings.ContainsRune("\r\n", peek) {
|
|
} else if strings.ContainsRune("\r\n", peek) {
|
|
@@ -501,6 +512,8 @@ const (
|
|
AstTypeValue
|
|
AstTypeValue
|
|
AstTypeEox
|
|
AstTypeEox
|
|
AstTypeOperator
|
|
AstTypeOperator
|
|
|
|
+ AstTypeParenthesis
|
|
|
|
+ AstTypeModifier
|
|
AstTypeError
|
|
AstTypeError
|
|
)
|
|
)
|
|
|
|
|
|
@@ -527,6 +540,8 @@ var astTypeMap map[AstType]string = map[AstType]string{
|
|
AstTypeValue: "AstTypeValue",
|
|
AstTypeValue: "AstTypeValue",
|
|
AstTypeEox: "AstTypeEox",
|
|
AstTypeEox: "AstTypeEox",
|
|
AstTypeOperator: "AstTypeOperator",
|
|
AstTypeOperator: "AstTypeOperator",
|
|
|
|
+ AstTypeParenthesis: "AstTypeParenthesis",
|
|
|
|
+ AstTypeModifier: "AstTypeModifier",
|
|
AstTypeError: "AstTypeError",
|
|
AstTypeError: "AstTypeError",
|
|
}
|
|
}
|
|
|
|
|
|
@@ -636,14 +651,14 @@ func (me *Parser) SetupRules() {
|
|
}
|
|
}
|
|
|
|
|
|
func (me *Parser) Expect(types ...TokenType) bool {
|
|
func (me *Parser) Expect(types ...TokenType) bool {
|
|
- fmt.Print("Expecting: ", types, " from ", me.now.AstType, " have ", me.LookaheadType(), " \n")
|
|
|
|
|
|
+ monolog.Debug("Expecting: ", types, " from ", me.now.AstType, " have ", me.LookaheadType(), " \n")
|
|
for _, t := range types {
|
|
for _, t := range types {
|
|
if me.LookaheadType() == t {
|
|
if me.LookaheadType() == t {
|
|
- fmt.Print("Found: ", t, "\n")
|
|
|
|
|
|
+ monolog.Debug("Found: ", t, "\n")
|
|
return true
|
|
return true
|
|
}
|
|
}
|
|
}
|
|
}
|
|
- fmt.Print("Not found.\n")
|
|
|
|
|
|
+ monolog.Debug("Not found.\n")
|
|
return false
|
|
return false
|
|
}
|
|
}
|
|
|
|
|
|
@@ -765,12 +780,14 @@ func (me *Parser) ParseOperator() bool {
|
|
return me.Consume(AstTypeOperator, TokenOperator)
|
|
return me.Consume(AstTypeOperator, TokenOperator)
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+/*
|
|
func (me *Parser) ParseOperation() bool {
|
|
func (me *Parser) ParseOperation() bool {
|
|
me.NewAstChildDescend(AstTypeOperation)
|
|
me.NewAstChildDescend(AstTypeOperation)
|
|
res := me.ParseOperator() && me.ParseParameter()
|
|
res := me.ParseOperator() && me.ParseParameter()
|
|
me.AstAscend(res)
|
|
me.AstAscend(res)
|
|
return res
|
|
return res
|
|
}
|
|
}
|
|
|
|
+*/
|
|
|
|
|
|
func (me *Parser) ParseOperations() bool {
|
|
func (me *Parser) ParseOperations() bool {
|
|
me.NewAstChildDescend(AstTypeOperations)
|
|
me.NewAstChildDescend(AstTypeOperations)
|
|
@@ -828,16 +845,24 @@ func (me *Parser) AstAscend(keep bool) {
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
-func (me TokenType) CloseForOpen() (TokenType, bool) {
|
|
|
|
|
|
+func (me TokenType) BlockCloseForOpen() (TokenType, bool) {
|
|
switch me {
|
|
switch me {
|
|
case TokenOpenBrace:
|
|
case TokenOpenBrace:
|
|
return TokenCloseBrace, true
|
|
return TokenCloseBrace, true
|
|
|
|
+ case TokenOpenParen:
|
|
|
|
+ return TokenCloseParen, true
|
|
|
|
+ default:
|
|
|
|
+ return TokenError, false
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+func (me TokenType) ParenthesisCloseForOpen() (TokenType, bool) {
|
|
|
|
+ switch me {
|
|
case TokenOpenBracket:
|
|
case TokenOpenBracket:
|
|
return TokenCloseBracket, true
|
|
return TokenCloseBracket, true
|
|
case TokenOpenParen:
|
|
case TokenOpenParen:
|
|
return TokenCloseParen, true
|
|
return TokenCloseParen, true
|
|
- case TokenKeywordDo:
|
|
|
|
- return TokenKeywordEnd, true
|
|
|
|
default:
|
|
default:
|
|
return TokenError, false
|
|
return TokenError, false
|
|
}
|
|
}
|
|
@@ -847,7 +872,7 @@ func (me TokenType) CloseForOpen() (TokenType, bool) {
|
|
func (me *Parser) ParseBlock() bool {
|
|
func (me *Parser) ParseBlock() bool {
|
|
me.Advance()
|
|
me.Advance()
|
|
open := me.LookaheadType()
|
|
open := me.LookaheadType()
|
|
- done, ok := open.CloseForOpen()
|
|
|
|
|
|
+ done, ok := open.BlockCloseForOpen()
|
|
if !ok {
|
|
if !ok {
|
|
/* Not an opening of a block, so no block found. */
|
|
/* Not an opening of a block, so no block found. */
|
|
return false
|
|
return false
|
|
@@ -866,6 +891,28 @@ func (me *Parser) ParseBlock() bool {
|
|
return res
|
|
return res
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+func (me *Parser) ParseParenthesis() bool {
|
|
|
|
+ me.Advance()
|
|
|
|
+ open := me.LookaheadType()
|
|
|
|
+ done, ok := open.ParenthesisCloseForOpen()
|
|
|
|
+ if !ok {
|
|
|
|
+ /* Not an opening of a parenthesis, so no parenthesis found. */
|
|
|
|
+ return false
|
|
|
|
+ }
|
|
|
|
+ me.DropLookahead()
|
|
|
|
+ me.NewAstChildDescend(AstTypeParenthesis)
|
|
|
|
+ res := me.ParseExpression()
|
|
|
|
+ me.AstAscend(res)
|
|
|
|
+ if res {
|
|
|
|
+ me.Advance()
|
|
|
|
+ if me.LookaheadType() != done {
|
|
|
|
+ return me.ParseError()
|
|
|
|
+ }
|
|
|
|
+ me.DropLookahead()
|
|
|
|
+ }
|
|
|
|
+ return res
|
|
|
|
+}
|
|
|
|
+
|
|
func (me *Parser) ParseWords() bool {
|
|
func (me *Parser) ParseWords() bool {
|
|
me.NewAstChildDescend(AstTypeWords)
|
|
me.NewAstChildDescend(AstTypeWords)
|
|
res := me.ParseWord()
|
|
res := me.ParseWord()
|
|
@@ -877,7 +924,7 @@ func (me *Parser) ParseWords() bool {
|
|
|
|
|
|
func (me *Parser) ParseDefinition() bool {
|
|
func (me *Parser) ParseDefinition() bool {
|
|
me.Advance()
|
|
me.Advance()
|
|
- res := me.Consume(AstTypeDefinition, TokenKeywordTo)
|
|
|
|
|
|
+ res := me.Consume(AstTypeDefinition, TokenKeywordDef)
|
|
if !res {
|
|
if !res {
|
|
return false
|
|
return false
|
|
}
|
|
}
|
|
@@ -893,6 +940,27 @@ func (me *Parser) ParseDefinition() bool {
|
|
return res
|
|
return res
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+func (me *Parser) ParseOperation() bool {
|
|
|
|
+ me.NewAstChildDescend(AstTypeOperation)
|
|
|
|
+ res := me.ParseOperator() && me.ParseModifier()
|
|
|
|
+ me.AstAscend(res)
|
|
|
|
+ return res
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+func (me *Parser) ParseModifier() bool {
|
|
|
|
+ me.NewAstChildDescend(AstTypeModifier)
|
|
|
|
+ res := me.ParseOperation() || me.ParseWordValue() ||
|
|
|
|
+ me.ParseParenthesis() || me.ParseBlock()
|
|
|
|
+ me.AstAscend(res)
|
|
|
|
+ return res
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+func (me *Parser) ParseModifiers() bool {
|
|
|
|
+ for me.ParseModifier() {
|
|
|
|
+ }
|
|
|
|
+ return true
|
|
|
|
+}
|
|
|
|
+
|
|
func (me *Parser) ParseError() bool {
|
|
func (me *Parser) ParseError() bool {
|
|
me.now.NewChild(AstTypeError, me.lookahead)
|
|
me.now.NewChild(AstTypeError, me.lookahead)
|
|
fmt.Printf("Parse error: at %s\n", me.lookahead)
|
|
fmt.Printf("Parse error: at %s\n", me.lookahead)
|
|
@@ -900,14 +968,17 @@ func (me *Parser) ParseError() bool {
|
|
}
|
|
}
|
|
|
|
|
|
func (me *Parser) ParseExpression() bool {
|
|
func (me *Parser) ParseExpression() bool {
|
|
- return me.ParseWordExpression() || me.ParseValueExpression()
|
|
|
|
|
|
+ return me.ParseWordValue() && me.ParseModifiers()
|
|
}
|
|
}
|
|
|
|
|
|
func (me *Parser) ParseStatement() bool {
|
|
func (me *Parser) ParseStatement() bool {
|
|
-
|
|
|
|
me.NewAstChildDescend(AstTypeStatement)
|
|
me.NewAstChildDescend(AstTypeStatement)
|
|
/* First case is for an empty expression/statement. */
|
|
/* First case is for an empty expression/statement. */
|
|
- res := me.ParseEOX() || me.ParseDefinition() || me.ParseExpression() || me.ParseBlock()
|
|
|
|
|
|
+ res := me.ParseEOX() ||
|
|
|
|
+ me.ParseDefinition() ||
|
|
|
|
+ (me.ParseExpression() && me.ParseEOX()) ||
|
|
|
|
+ me.ParseBlock()
|
|
|
|
+
|
|
me.AstAscend(res)
|
|
me.AstAscend(res)
|
|
return res
|
|
return res
|
|
}
|
|
}
|
|
@@ -967,36 +1038,73 @@ func (me *Ast) Dotty() {
|
|
}
|
|
}
|
|
|
|
|
|
/*
|
|
/*
|
|
- PROGRAM -> STATEMENTS.
|
|
|
|
|
|
+
|
|
|
|
+PROGRAM -> STATEMENTS.
|
|
STATEMENTS -> STATEMENT STATEMENTS | .
|
|
STATEMENTS -> STATEMENT STATEMENTS | .
|
|
-STATEMENT -> DEFINITION | EXPRESSION | BLOCK .
|
|
|
|
-DEFINITION -> to WORDS BLOCK.
|
|
|
|
|
|
+STATEMENT -> EXPRESSION EOX | DEFINITION | BLOCK | EOX .
|
|
|
|
+DEFINITION -> define WORDS BLOCK.
|
|
WORDS -> word WORDS | .
|
|
WORDS -> word WORDS | .
|
|
-EXPRESSION -> WORD_EXPRESSION | VALUE_EXPRESSION.
|
|
|
|
-WORD_EXPRESSION -> word WORD_CALLOP.
|
|
|
|
-WORD_CALLOP -> WORD_OPERATION | WORD_CALL.
|
|
|
|
-OPERATION -> operator PARAMETERS_NONEMPTY EOX.
|
|
|
|
-WORD_CALL -> PARAMETERS EOX.
|
|
|
|
-VALUE_EXPRESSION -> value VALUE_CALLOP.
|
|
|
|
-VALUE_CALLOP -> VALUE_OPERATION | VALUE_CALL.
|
|
|
|
-VALUE_CALL -> EOX.
|
|
|
|
-PARAMETERS_NONEMPTY -> PARAMETER PARAMETERS.
|
|
|
|
-PARAMETERS -> PARAMETERS_NONEMPTY | .
|
|
|
|
-PARAMETER -> BLOCK | WORDVALUE | OPERATION.
|
|
|
|
-BLOCK -> ob STATEMENTS cb | op STATEMENTS cp | oa STATEMENTS ca | do STATEMENTS end.
|
|
|
|
-WORDVALUE -> word | VALUE.
|
|
|
|
|
|
+EXPRESSION -> WORDVALUE MODIFIERS.
|
|
|
|
+MODIFIERS -> MODIFIER MODIFIERS | .
|
|
|
|
+OPERATION -> operator MODIFIER .
|
|
|
|
+MODIFIER -> OPERATION | WORDVALUE | PARENTHESIS | BLOCK.
|
|
|
|
+PARENTHESIS -> '(' EXPRESSION ')' | ot EXPRESSION ct.
|
|
|
|
+BLOCK -> oe STATEMENTS ce | do STATEMENTS end .
|
|
|
|
+WORDVALUE -> word | VALUE | a | the.
|
|
VALUE -> string | number | symbol.
|
|
VALUE -> string | number | symbol.
|
|
EOX -> eol | period.
|
|
EOX -> eol | period.
|
|
|
|
|
|
-
|
|
|
|
AstNodeBlock = AstNodeType(iota)
|
|
AstNodeBlock = AstNodeType(iota)
|
|
)
|
|
)
|
|
*/
|
|
*/
|
|
|
|
|
|
|
|
+type DefineType int
|
|
|
|
+
|
|
|
|
+const (
|
|
|
|
+ DefineTypeNone = DefineType(iota),
|
|
|
|
+ DefineTypeGo,
|
|
|
|
+ DefineTypeUser,
|
|
|
|
+ DefineTypeVar,
|
|
|
|
+)
|
|
|
|
+
|
|
|
|
+type Value interface {
|
|
|
|
+
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+type DefinePattern struct {
|
|
|
|
+ Parts []string
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+type GoDefineFunc func(runtime Runtime, args ... Value) Value;
|
|
|
|
+
|
|
|
|
+type UserDefine struct {
|
|
|
|
+ DefinePattern
|
|
|
|
+ * Ast
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+type GoDefine struct {
|
|
|
|
+ DefinePattern
|
|
|
|
+ * GoDefineFunc
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+type Define struct {
|
|
|
|
+ DefineType
|
|
|
|
+ Ast * definition
|
|
|
|
+}
|
|
|
|
+
|
|
type Environment struct {
|
|
type Environment struct {
|
|
- Parent *Environment
|
|
|
|
|
|
+ Parent *Environment
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+type Runtime struct {
|
|
|
|
+ Environment
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+
|
|
|
|
+
|
|
|
|
+
|
|
func main() {
|
|
func main() {
|
|
fmt.Println("Hello World!")
|
|
fmt.Println("Hello World!")
|
|
}
|
|
}
|