// ll1lex is a lexer generator based on regular expressions package main import "flag" import "os" // import "text/template" import "fmt" import "src.eruta.nl/beoran/ll1/common" import "src.eruta.nl/beoran/ll1/flexgen" import "src.eruta.nl/beoran/ll1/parser" import "src.eruta.nl/beoran/ll1/grammar" import "src.eruta.nl/beoran/ll1/ast" func showUsage() { fmt.Fprintf(flag.CommandLine.Output(), "%s: %s [options] input_file.ll1lex\n", os.Args[0], os.Args[0]) fmt.Fprintf(flag.CommandLine.Output(), "\n [options] may be one of the following:\n\n") flag.PrintDefaults() fmt.Fprintf(flag.CommandLine.Output(), "\n") } const helpText = ` ll1flex is a leger generator based on regular expressions. Usage: ll1lex [options] input_file.ll1lex The [options] are: -p name Name of the package to generate code for. -help -h Shows the help page. -o file Name of output file to overwrite. -v Be more verbose. Shows the scanned tokens as well. ` func showHelp() { fmt.Fprintf(flag.CommandLine.Output(), "\n%s\n", helpText) } // LL1Lex contains the options and variables of the ll1lex program. type Ll1Lex struct { packageName string outName string usedName string appendName string help bool verbose bool fout *os.File tokens []common.Token parser *parser.Parser grammar *grammar.Grammar } func main() { var err error flag.Usage = showUsage ll1lex := Ll1Lex{} flag.BoolVar(&ll1lex.verbose, "v", false, "Be more verbose. ") flag.StringVar(&ll1lex.outName, "o", "", "Name of output `file` to overwrite.") flag.StringVar(&ll1lex.packageName, "p", "lexer", "Name of package `package` to use.") flag.BoolVar(&ll1lex.help, "h", false, "Shows the help page.") flag.BoolVar(&ll1lex.help, "help", false, "Shows the help page.") flag.Parse() if ll1lex.help { showUsage() showHelp() os.Exit(1) } if len(flag.Args()) < 1 { showUsage() os.Exit(1) return } ll1lexName := flag.Arg(0) // Parse lexer description tokens, err := flexgen.LexFileName(ll1lexName) if err != nil { fmt.Fprintf(os.Stderr, "%v\n", err) } if ll1lex.verbose { for _, tok := range tokens { fmt.Fprintf(os.Stderr, "%s\n", tok) } } ll1lex.parser = flexgen.MakeFlexerParser() if ll1lex.verbose { fmt.Fprintf(os.Stderr, "Parsing\n") } err = ll1lex.parser.Parse(tokens) if err != nil { fmt.Fprintf(os.Stderr, "%s\n", err) for _, e := range ll1lex.parser.Errors { fmt.Fprintf(os.Stderr, "error: %v\n", e) } os.Exit(2) } // Determine output file ll1lex.usedName = ll1lex.outName if ll1lex.outName == "" { ll1lex.fout = os.Stdout } else { ll1lex.fout, err = os.Create(ll1lex.outName) if err != nil { fmt.Fprintf(os.Stderr, "Could not open output file %s: %s\n", ll1lex.outName, err) } defer ll1lex.fout.Close() } if ll1lex.verbose { fmt.Fprintf(os.Stderr, "Dumping\n") } i := ll1lex.parser.Index l := len(tokens) fmt.Fprintf(ll1lex.fout, "%s\nTokens:%d/%d\n", ast.Dump(ll1lex.parser.Result), i, l) if i < l { fmt.Fprintf(os.Stderr, "%s\n", tokens[i]) } }