// raku /* Raku is an easy to use scripting language that can also be used easily interactively Desired simplified syntax: PROGRAM -> STATEMENTS. STATEMENTS -> STATEMENT STATEMENTS | . STATEMENT -> EXPRESSION eox | BLOCK . EXPRESSION -> word PARAMETERS. PARAMETERS -> PARAMETER PARAMETERS | . PARAMETER -> word | VALUE | PARENTHESIS | BLOCK | operator. PARENTHESIS -> bep PARAMETER eop . BLOCK -> bob STATEMENTS eob . VALUE -> string | long | double | symbol. Desrired syntax (verified LL(1) on smlweb.cpsc.ucalgary.ca) PROGRAM -> STATEMENTS. STATEMENTS -> STATEMENT STATEMENTS | . STATEMENT -> EXPRESSION EOX | DEFINITION | BLOCK . DEFINITION -> define WORDOP WORDOPS BLOCK. WORDOPS -> WORDOP WORDOPS | . EXPRESSION -> WORDVALUE PARAMETERS. PARAMETERS -> PARAMETER PARAMETERS | . PARAMETER -> WORDVALUE | PARENTHESIS | BLOCK | operator. PARENTHESIS -> '(' EXPRESSION ')' | ot EXPRESSION ct. BLOCK -> oe STATEMENTS ce | do STATEMENTS end . WORDOP -> word | operator | a | the. WORDVALUE -> word | VALUE | a | the. VALUE -> string | number | symbol. EOX -> eol | period. Lexer: Yet another syntax, which supports operators, but requires () to use them, and [ ] to indicate expressions inside expressions. PROGRAM -> STATEMENTS. STATEMENTS -> STATEMENT STATEMENTS | . STATEMENT -> EXPRESSION EOX | BLOCK . EXPRESSION -> CALL | PARENTHESIS | RECTANGLE . CALL -> WORD PARAMETERS . RECTANGLE -> '[' EXPRESSION ']' | with EXPRESSION end . PARENTHESIS -> '(' OPERATION ')' | let OPERATION end . OPERATION -> PARAMETER OPLIST . OPLIST -> op OPLIST | . OP -> operator PARAMETER . PARAMETERS -> PARAMETER PARAMETERS | . PARAMETER -> WORDVALUE | BLOCK | PARENTHESIS | RECTANGLE . BLOCK -> '{' STATEMENTS '}' | do STATEMENTS end . WORDVALUE -> word | VALUE | a | the. VALUE -> string | number | symbol. EOX -> '\n' . Most simple "lisp but with less parenthesis" syntax: PROGRAM -> STATEMENTS. STATEMENTS -> STATEMENT STATEMENTS | . STATEMENT -> CALL | EOX | BLOCK . BLOCK -> '{' STATEMENTS '}' . CALL -> word PARAMETERS EOX . PARAMETERS -> PARAMETER PARAMETERS | . PARAMETER -> WORDVALUE | BLOCK . WORDVALUE -> word | VALUE . VALUE -> string | number | symbol. EOX -> '\n' . LMore TCL-is allows operators in () and forces evaluation of blocks in []. PROGRAM -> STATEMENTS. STATEMENTS -> STATEMENT STATEMENTS | . STATEMENT -> COMMAND | SUBSTITUTION | BLOCK | EXPRESSION | EOX . BLOCK -> '{' STATEMENTS '}' | do STATEMENTS end . SUBSTITUTION -> '[' STATEMENTS ']' | evaluate STATEMENTS end . EXPRESSION -> '(' EXPRBODY ')' | calculate EXPRBODY end . EXPRBODY -> OPERAND OPERANDS. OPERANDS -> operator OPERANDS | . OPERAND -> PARAMETER . COMMAND -> word PARAMETERS EOX . ARGUMENTS -> WORDVALUE ARGUMENTS | . PARAMETERS -> PARAMETER PARAMETERS | . PARAMETER -> WORDVALUE | SUBSTITUTION | EXPRESSION | BLOCK . WORDVALUE -> word | VALUE . VALUE -> string | number | symbol | true | false | nothing . EOX -> '\n' . set (door's state) to closed . # Type a grammar here: PROGRAM -> STATEMENTS. STATEMENTS -> STATEMENT STATEMENTS | . STATEMENT -> COMMAND | SUBSTITUTION | BLOCK | EXPRESSION | EOX . BLOCK -> '{' STATEMENTS '}' | do STATEMENTS end . SUBSTITUTION -> '[' STATEMENTS ']' | evaluate STATEMENTS end . EXPRESSION -> '(' EXPRBODY ')' | calculate EXPRBODY end . EXPRBODY -> OPERAND OPERANDS. OPERANDS -> operator OPERANDS | . OPERAND -> PARAMETER . COMMAND -> word PARAMETERS EOX . ARGUMENTS -> WORDVALUE ARGUMENTS | . PARAMETERS -> PARAMETER PARAMETERS | . PARAMETER -> WORDVALUE | SUBSTITUTION | EXPRESSION | BLOCK . WORDVALUE -> word | VALUE . VALUE -> string | number | symbol | true | false | nothing . EOX -> '\n' . # Or, this gramar, also useful as a generic command parser for # AIF or MUx itself. Though necessarily more complex . PROGRAM -> STATEMENTS. STATEMENTS -> STATEMENT STATEMENTS | . STATEMENT -> COMMAND | SUBSTITUTION | BLOCK | EXPRESSION | EOX . BLOCK -> '{' STATEMENTS '}' | do STATEMENTS end . SUBSTITUTION -> '[' STATEMENTS ']' | evaluate STATEMENTS end . EXPRESSION -> '(' EXPRBODY ')' | calculate EXPRBODY end . EXPRBODY -> OPERAND OPERANDS. OPERANDS -> operator OPERANDS | . OPERAND -> PARAMETER . COMMAND -> word ARGUMENTS EOX . ARGUMENTS -> ARGUMENT ARGUMENT_SEP ARGUMENTS | . ARGUMENT_SEP -> ',' | preposition | article | . ARGUMENT -> LITERAL | SUBSTITUTION | EXPRESSION | BLOCK . WORDLIT -> word | LITERAL . LITERAL -> string | number | symbol | true | false | nothing . EOX -> '\n' . type Duration (is a) number ( also could say a Duration (is a) number ) type Effect (is an) integer constant No Effect is 1 constant Healing Effect is an Effect which is 1 the Damaging Effect is an Effect which is 2 ( the is another way to say constant / variable ) type Spell (is a) record (which) has (a) name (which is a/as a) String (a) Duration (an) Effect end variable cure light is a Spell which has name is "Cure Light" Duration is Duration 0.0 Effect is Healing Effect end ( could have been the cure light is a spell ... ) to cast (a) Spell at (a) Being do ( ... ) end to cast (a) s which is a Spell at (a) b which is a Being do ( ... ) end to add n1 which is a Number to n2 which is a Number do end to add one Number to another Number do one becomes one plus another end to duck do let text be "You duck" one becomes one plus another end type spellike (is an) interface which has cast (a) at Being end type Spell aliases spell cast cure light at Ben English single word prepositions : in aboard about above absent across after against along alongside amid amidst among apropos apud around as astride at on atop ontop bar before behind below beneath beside besides between beyond but by chez circa come dehors despite down during except for from in inside into less like minus near nearer nearest notwithstanding of off on onto opposite out outside over pace past per post pre pro qua re sans save short since than through thru throughout to toward towards under underneath unlike until up upon upside versus via vice vis-à-vis with within without worth */ package raku import ( "bytes" "errors" "fmt" "io" "reflect" "runtime" "strings" "unicode" // "gitlab.com/beoran/woe/graphviz" "gitlab.com/beoran/woe/monolog" "gitlab.com/beoran/woe/tree" ) type TokenChannel chan *Token type Lexer struct { Reader io.Reader Positions []Position Token Token rule LexerRule Output TokenChannel buffer []byte runes []rune } type LexerRule func(lexer *Lexer) LexerRule func (me *Lexer) Last() Position { return me.Positions[1] } func (me *Lexer) Current() Position { return me.Positions[0] } func (me *Lexer) LastPtr() * Position { return &me.Positions[1] } func (me *Lexer) CurrentPtr() * Position { return &me.Positions[0] } func (me *Lexer) PushPosition(pos Position) { newpos := make([]Position, len(me.Positions) + 1) newpos[0] = pos for i := 1 ; i < len(me.Positions); i++ { newpos[i] = me.Positions[i-1] } me.Positions = newpos } func (me *Lexer) PushCurrentPosition() { current := me.Current() me.PushPosition(current) } func (me *Lexer) PopPosition() * Position { if (len(me.Positions) <= 2) { return nil } result := &me.Positions[0]; newpos := make([]Position, len(me.Positions) - 1) for i := 1 ; i < len(me.Positions); i++ { newpos[i-1] = me.Positions[i] } me.Positions = newpos return result } func (me *Lexer) Emit(t TokenType, v TokenText) { tok := &Token{t, v, me.Current()} me.Output <- tok } func (me *Lexer) Error(message string, args ...interface{}) { value := fmt.Sprintf(message, args...) monolog.Error("Lex Error: %s", value) me.Emit(TokenError, TokenText(value)) } func LexError(me *Lexer) LexerRule { me.Error("Error") return nil } func (me *Lexer) SkipComment() bool { if me.Peek() == '#' { if me.Next() == '(' { return me.SkipNotIn(")") } else { return me.SkipNotIn("\r\n") } } return true } /* Returns whether or not a keyword was found, and if so, the TokenType of the keyword.*/ func LookupKeyword(word string) (bool, TokenType) { kind, found := keywordMap[word] return found, kind } /* Returns whether or not a special operator or sigil was found, and if so, returns the TokenTyp of the sigil.*/ func LookupSigil(sigil string) (bool, TokenType) { fmt.Printf("LookupSigil: %s\n", sigil) kind, found := sigilMap[sigil] return found, kind } func LexSigil(me *Lexer) LexerRule { me.Found(TokenType(me.Peek())) _ = me.Next() me.Advance() return LexNormal } func LexWord(me *Lexer) LexerRule { me.SkipNotIn(" \t\r\n'({[]})") iskw, kind := LookupKeyword(me.CurrentStringValue()) if iskw { me.Found(kind) } else { me.Found(TokenWord) } return LexNormal } func LexSymbol(me *Lexer) LexerRule { me.SkipNotIn(" \t\r\n'({[]})") me.Found(TokenSymbol) return LexNormal } func LexNumber(me *Lexer) LexerRule { me.SkipNotIn(" \t\r\n'({[]})") me.Found(TokenNumber) return LexNormal } func LexWhitespace(me *Lexer) LexerRule { me.SkipWhitespace() me.Advance() return LexNormal } func LexComment(me *Lexer) LexerRule { if !me.SkipComment() { me.Error("Unterminated comment") return LexError } me.Advance() return LexNormal } func LexPunctuator(me *Lexer) LexerRule { me.Found(TokenType(me.Peek())) _ = me.Next() me.Advance() return LexNormal } func LexEOL(me *Lexer) LexerRule { me.SkipIn("\r\n") me.Found(TokenEOL) return LexNormal } func LexOperator(me *Lexer) LexerRule { me.SkipIn(operator_chars) me.Found(TokenOperator) return LexNormal } func lexEscape(me *Lexer) error { _ = me.Next() return nil } func LexString(me *Lexer) LexerRule { open := me.Peek() do_escape := open == '"' peek := me.Next() me.Advance() for ; peek != '\000'; peek = me.Next() { if do_escape && peek == '\\' { if err := lexEscape(me); err != nil { return LexError } } else if peek == open { me.Found(TokenString) _ = me.Next() me.Advance() return LexNormal } } me.Error("Unexpected EOF in string.") return nil } func LexNumberOrOperator(me *Lexer) LexerRule { if unicode.IsDigit(me.Next()) { return LexNumber } else { _ = me.Previous() return LexOperator } } func LexNormal(me *Lexer) LexerRule { peek := me.Peek() if peek == '#' { return LexComment } else if strings.ContainsRune(" \t", peek) { return LexWhitespace } else if strings.ContainsRune(".,;:", peek) { return LexPunctuator } else if strings.ContainsRune("([{}])", peek) { return LexSigil } else if strings.ContainsRune("$", peek) { return LexSymbol } else if strings.ContainsRune("\r\n", peek) { return LexEOL } else if strings.ContainsRune("+-", peek) { return LexNumberOrOperator } else if strings.ContainsRune("\"`", peek) { return LexString } else if peek == '\000' { me.Emit(TokenEOF, "") return nil } else if unicode.IsLetter(peek) { return LexWord } else if unicode.IsDigit(peek) { return LexNumber } else if strings.ContainsRune(operator_chars, peek) { return LexOperator } else { return LexError } } func OpenLexer(reader io.Reader) *Lexer { lexer := &Lexer{} lexer.Reader = reader lexer.Output = make(TokenChannel) lexer.Positions = make([]Position, 2) // lexer.buffer = new(byte[1024]) return lexer } func (me *Lexer) ReadReaderOnce() (bool, error) { buffer := make([]byte, 1024) n, err := me.Reader.Read(buffer) monolog.Debug("read %v %d %v\n", buffer[:n], n, err) if n > 0 { me.buffer = append(me.buffer, buffer[:n]...) monolog.Debug("append %s", me.buffer) } if err == io.EOF { return true, nil } else if err != nil { me.Error("Error reading from reader: %s", err) return true, err } return false, nil } func (me *Lexer) ReadReader() error { me.buffer = make([]byte, 0) more, err := me.ReadReaderOnce() for err == nil && more { more, err = me.ReadReaderOnce() } me.runes = bytes.Runes(me.buffer) return err } func (me *Lexer) Peek() rune { if (me.Current().Index) >= len(me.runes) { return '\000' } return me.runes[me.Current().Index] } func (me *Lexer) PeekNext() rune { if (me.Current().Index + 1) >= len(me.runes) { return '\000' } return me.runes[me.Current().Index+1] } func (me *Lexer) Next() rune { if me.Peek() == '\n' { me.CurrentPtr().Column = 0 me.CurrentPtr().Row++ } me.CurrentPtr().Index++ if me.Current().Index >= len(me.runes) { //me.Emit(TokenEOF, "") } return me.Peek() } func (me *Lexer) Previous() rune { if me.Current().Index > 0 { me.CurrentPtr().Index-- if me.Peek() == '\n' { me.CurrentPtr().Column = 0 me.CurrentPtr().Row++ } } return me.Peek() } func (me *Lexer) SkipRune() { _ = me.Next() } func (me *Lexer) SkipIn(set string) bool { for strings.ContainsRune(set, me.Next()) { monolog.Debug("SkipIn: %s %c\n", set, me.Peek()) if me.Peek() == '\000' { return false } } return true } func (me *Lexer) SkipNotIn(set string) bool { _ = me.Next() for !strings.ContainsRune(set, me.Peek()) { if me.Next() == '\000' { return false } } return true } func (me *Lexer) SkipWhile(should_skip func(r rune) bool) bool { for should_skip(me.Peek()) { if me.Next() == '\000' { return false } } return true } func (me *Lexer) SkipWhitespace() { me.SkipIn(" \t") } func (me *Lexer) Advance() { (*me.LastPtr()) = me.Current() } func (me *Lexer) Rewind() { (*me.CurrentPtr()) = me.Last() } func (me *Lexer) CurrentRuneValue() []rune { return me.runes[me.Last().Index:me.Current().Index] } func (me *Lexer) CurrentStringValue() string { return string(me.CurrentRuneValue()) } func (me *Lexer) Found(kind TokenType) { me.Emit(kind, TokenText(me.CurrentStringValue())) me.Advance() } func GetFunctionName(fun interface{}) string { return runtime.FuncForPC(reflect.ValueOf(fun).Pointer()).Name() } func (me *Lexer) Start() { if err := me.ReadReader(); err == nil || err == io.EOF { rule := LexNormal for rule != nil { monolog.Debug("Lexer Rule: %s\n", GetFunctionName(rule)) rule = rule(me) } } else { me.Error("Could not read in input buffer: %s", err) } close(me.Output) } func (me *Lexer) TryLexing() { go me.Start() for token := range me.Output { monolog.Info("Token %s", token) } } type AstType int const ( AstTypeProgram = AstType(iota) AstTypeStatements AstTypeStatement AstTypeDefinition AstTypeWords AstTypeExpression AstTypeWordExpression AstTypeWordCallop AstTypeOperation AstTypeOperations AstTypeCallArgs AstTypeValueExpression AstTypeValueCallop AstTypeParametersNonempty AstTypeParameters AstTypeParameter AstTypeBlock AstTypeWordValue AstTypeWord AstTypeValue AstTypeEox AstTypeOperator AstTypeParenthesis AstTypeModifier AstTypeError ) var astTypeMap map[AstType]string = map[AstType]string{ AstTypeProgram: "Program", AstTypeStatements: "Statements", AstTypeStatement: "Statement", AstTypeDefinition: "Definition", AstTypeWords: "Words", AstTypeExpression: "Expression", AstTypeWordExpression: "WordExpression", AstTypeWordCallop: "WordCallop", AstTypeOperation: "Operation", AstTypeOperations: "Operations", AstTypeCallArgs: "CallArgs", AstTypeValueExpression: "ValueExpression", AstTypeValueCallop: "ValueCallop", AstTypeParametersNonempty: "ParametersNonempty", AstTypeParameters: "Parameters", AstTypeParameter: "Parameter", AstTypeBlock: "Block", AstTypeWordValue: "WordValue", AstTypeWord: "Word", AstTypeValue: "Value", AstTypeEox: "Eox", AstTypeOperator: "Operator", AstTypeParenthesis: "Parenthesis", AstTypeModifier: "Modifier", AstTypeError: "Error", } func (me AstType) String() string { name, found := astTypeMap[me] if found { return name } else { return fmt.Sprintf("Unknown AstType %d", int(me)) } } type Ast struct { tree.Node AstType *Token } func (me *Ast) Run(run *Runtime) (*Value, error) { switch me.AstType { case AstTypeProgram: return me.RunProgram(run) case AstTypeStatements: return me.RunStatements(run) case AstTypeStatement: return me.RunStatement(run) case AstTypeDefinition: return me.RunDefinition(run) case AstTypeWords: return me.RunWords(run) case AstTypeExpression: return me.RunExpression(run) case AstTypeWordExpression: return me.RunWord(run) case AstTypeWordCallop: return me.RunWordCallop(run) case AstTypeOperation: return me.RunOperation(run) case AstTypeOperations: return me.RunOperations(run) case AstTypeCallArgs: return me.RunCallArgs(run) case AstTypeValueExpression: return me.RunValueExpression(run) case AstTypeValueCallop: return me.RunValueCallop(run) case AstTypeParametersNonempty: return me.RunParametersNonempty(run) case AstTypeParameters: return me.RunParameters(run) case AstTypeParameter: return me.RunParameter(run) case AstTypeBlock: return me.RunBlock(run) case AstTypeWordValue: return me.RunWordValue(run) case AstTypeWord: return me.RunWord(run) case AstTypeValue: return me.RunValue(run) case AstTypeEox: return me.RunEox(run) case AstTypeOperator: return me.RunOperator(run) case AstTypeParenthesis: return me.RunParenthesis(run) case AstTypeModifier: return me.RunModifier(run) case AstTypeError: return me.RunError(run) default: return nil, errors.New("Shoudln't happen") } } func (me *Ast) RunProgram(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunStatements(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunStatement(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunDefinition(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunWords(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunExpression(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunWordExpression(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunWordCallop(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunOperation(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunOperations(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunCallArgs(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunValueExpression(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunValueCallop(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunParametersNonempty(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunParameters(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunParameter(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunBlock(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunWordValue(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunWord(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunValue(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunEox(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunOperator(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunParenthesis(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunModifier(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) RunError(run *Runtime) (*Value, error) { return nil, errors.New("Not implemented") } func (me *Ast) NewChild(kind AstType, token *Token) *Ast { child := &Ast{} child.AstType = kind child.Token = token tree.AppendChild(me, child) return child } func (me *Ast) Walk(walker func(ast *Ast) *Ast) *Ast { node_res := tree.Walk(me, func(node tree.Noder) tree.Noder { ast_res := walker(node.(*Ast)) if ast_res == nil { return nil } else { return ast_res } }) if node_res != nil { return node_res.(*Ast) } else { return nil } } func (me *Ast) Remove() { _ = tree.Remove(me) } func NewAst(kind AstType) *Ast { ast := &Ast{} ast.AstType = kind ast.Token = nil return ast } type DefineType int const ( DefineTypeNone = DefineType(iota) DefineTypeGo DefineTypeUser DefineTypeVar ) type Variable struct { Value Name string } type DefinePattern struct { Parts []string } type GoDefineFunc func(runtime *Runtime, args ...Value) Value type ScriptDefine struct { DefineType DefinePattern *Ast } type GoDefine struct { DefineType *DefinePattern GoDefineFunc } type Define interface { } type Environment struct { Parent *Environment Defines map[string]Define Variables map[string]*Variable Stack []Value } type Instruction int const ( InstructionNop = Instruction(iota) InstructionCall InstructionPush InstructionPop ) func (env *Environment) AddDefine(name string, def Define) { env.Defines[name] = def } func (env *Environment) NewGoDefine(name string, fn GoDefineFunc, pattern ...string) { defpattern := new(DefinePattern) defpattern.Parts = append(defpattern.Parts, pattern...) godefine := &GoDefine{DefineTypeGo, defpattern, fn} env.AddDefine(name, godefine) } type Runtime struct { Environment start *Ast now *Ast } func RuntimePuts(runtime *Runtime, args ...Value) Value { var iargs []interface{} for arg := range args { iargs = append(iargs, arg) } fmt.Print(iargs) return true } func (run *Runtime) Init() { run.NewGoDefine("puts", RuntimePuts, "$", "*") } func (run *Runtime) Start(ast *Ast) { run.start = ast run.now = ast } func (run *Runtime) RunOnce() { // run.now.Node } func main() { fmt.Println("Hello World!") }