lexer.go 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777
  1. package muesli
  2. import (
  3. "bufio"
  4. _ "bytes"
  5. _ "errors"
  6. "fmt"
  7. "io"
  8. _ "io"
  9. "os"
  10. _ "reflect"
  11. _ "runtime"
  12. "strconv"
  13. "strings"
  14. "unicode"
  15. _ "unicode"
  16. // "gitlab.com/beoran/woe/graphviz"
  17. // _ "gitlab.com/beoran/woe/monolog"
  18. )
  19. /* A Lexer splits scanned input into tokens.
  20. */
  21. type Lexer struct {
  22. Position
  23. Index int
  24. Start int
  25. io.RuneScanner
  26. buffer []rune
  27. Current rune
  28. Keywords map[string]*Keyword
  29. LoggerWrapper
  30. }
  31. func (lexer *Lexer) SetLogger(logger Logger) {
  32. lexer.LoggerWrapper = LoggerWrapper{logger}
  33. }
  34. func (lexer *Lexer) ClearBuffer() {
  35. lexer.buffer = make([]rune, 0)
  36. }
  37. func (lexer *Lexer) MakeIntegerToken() Token {
  38. var sbuffer = string(lexer.buffer)
  39. i, err := strconv.ParseInt(sbuffer, 0, 64)
  40. if err == nil {
  41. lexer.ClearBuffer()
  42. return NewToken(TokenKindInteger, IntValue(i), lexer.Position)
  43. } else {
  44. lexer.ClearBuffer()
  45. return lexer.MakeErrorToken(err)
  46. }
  47. }
  48. func (lexer *Lexer) MakeTokenFromKeyword(kw * Keyword) Token {
  49. lexer.ClearBuffer()
  50. return NewToken(kw.TokenKind, kw.Value, lexer.Position)
  51. }
  52. func (lexer *Lexer) MakeFloatToken() Token {
  53. var sbuffer = string(lexer.buffer)
  54. f, err := strconv.ParseFloat(sbuffer, 64)
  55. if err == nil {
  56. lexer.ClearBuffer()
  57. return NewToken(TokenKindFloat, FloatValue(f), lexer.Position)
  58. } else {
  59. lexer.ClearBuffer()
  60. return lexer.MakeErrorToken(err)
  61. }
  62. }
  63. func (lexer *Lexer) MakeBooleanToken(b bool) Token {
  64. lexer.ClearBuffer()
  65. if b {
  66. return NewToken(TokenKindBoolean, TrueValue, lexer.Position)
  67. } else {
  68. return NewToken(TokenKindBoolean, FalseValue, lexer.Position)
  69. }
  70. }
  71. func (lexer *Lexer) MakeNilToken() Token {
  72. lexer.ClearBuffer()
  73. return NewToken(TokenKindNil, NilValue, lexer.Position)
  74. }
  75. func (lexer *Lexer) MakeBuiltinToken() Token {
  76. var sbuffer = string(lexer.buffer)
  77. lexer.ClearBuffer()
  78. if sbuffer == "true" {
  79. lexer.ClearBuffer()
  80. return NewToken(TokenKindBoolean, TrueValue, lexer.Position)
  81. } else if sbuffer == "false" {
  82. return NewToken(TokenKindBoolean, FalseValue, lexer.Position)
  83. } else if sbuffer == "nil" {
  84. return NewToken(TokenKindNil, NilValue, lexer.Position)
  85. } else {
  86. return lexer.MakeErrorfToken("Not a builtin: %s", sbuffer)
  87. }
  88. }
  89. func (lexer *Lexer) MakeStringValueToken(kind TokenKind) Token {
  90. var sbuffer = string(lexer.buffer)
  91. return NewToken(kind, StringValue(sbuffer), lexer.Position)
  92. }
  93. func (lexer *Lexer) MakeTypeValueToken(kind TokenKind) Token {
  94. var sbuffer = string(lexer.buffer)
  95. return NewToken(kind, TypeValue(sbuffer), lexer.Position)
  96. }
  97. func (lexer *Lexer) MakeErrorValueToken(kind TokenKind) Token {
  98. var sbuffer = string(lexer.buffer)
  99. return NewToken(kind, NewErrorValuef("%s", sbuffer), lexer.Position)
  100. }
  101. func (lexer *Lexer) MakeWordValueToken(kind TokenKind) Token {
  102. var sbuffer = string(lexer.buffer)
  103. return NewToken(kind, WordValue(sbuffer), lexer.Position)
  104. }
  105. func (lexer *Lexer) MakeToken(kind TokenKind) Token {
  106. switch kind {
  107. case TokenKindInteger:
  108. return lexer.MakeIntegerToken()
  109. case TokenKindFloat:
  110. return lexer.MakeFloatToken()
  111. case TokenKindString:
  112. return lexer.MakeStringValueToken(kind)
  113. case TokenKindSymbol:
  114. return lexer.MakeWordValueToken(kind)
  115. case TokenKindType:
  116. return lexer.MakeTypeValueToken(kind)
  117. case TokenKindError:
  118. return lexer.MakeErrorValueToken(kind)
  119. case TokenKindWord:
  120. return lexer.MakeWordValueToken(kind)
  121. case TokenKindNil:
  122. fallthrough
  123. case TokenKindBoolean:
  124. return lexer.MakeBuiltinToken()
  125. case TokenKindGet:
  126. fallthrough
  127. case TokenKindSet:
  128. fallthrough
  129. case TokenKindOpenBlock:
  130. fallthrough
  131. case TokenKindCloseBlock:
  132. fallthrough
  133. case TokenKindOpenList:
  134. fallthrough
  135. case TokenKindCloseList:
  136. fallthrough
  137. case TokenKindOpenParen:
  138. fallthrough
  139. case TokenKindCloseParen:
  140. fallthrough
  141. case TokenKindEOX:
  142. fallthrough
  143. case TokenKindEOF:
  144. val := StringValue(string(lexer.buffer))
  145. lexer.ClearBuffer()
  146. return NewToken(kind, val, lexer.Position)
  147. default:
  148. return lexer.MakeErrorfToken("Internal error on token type %s", kind)
  149. }
  150. }
  151. func (lexer Lexer) MakeErrorToken(err error) Token {
  152. return NewToken(TokenKindError, ErrorValue{err}, lexer.Position)
  153. }
  154. func (lexer Lexer) MakeErrorfToken(format string, va ...interface{}) Token {
  155. err := fmt.Errorf(format, va...)
  156. return lexer.MakeErrorToken(err)
  157. }
  158. func (lexer Lexer) MakeEOFToken() Token {
  159. return NewToken(TokenKindEOF, &EmptyValue{}, lexer.Position)
  160. }
  161. func (lexer *Lexer) Peek() (rune, error) {
  162. r, _, err := lexer.RuneScanner.ReadRune()
  163. err2 := lexer.RuneScanner.UnreadRune()
  164. if err == nil {
  165. err = err2
  166. }
  167. return r, err
  168. }
  169. /* Advances the lexer's position based on the rune r read. */
  170. func (lexer *Lexer) advance(r rune) {
  171. lexer.Current = r
  172. lexer.Index++
  173. lexer.Position.Column++
  174. if r == '\n' {
  175. lexer.Position.Column = 1
  176. lexer.Position.Line++
  177. }
  178. }
  179. /* Append a rune to the lexer's buffer. */
  180. func (lexer *Lexer) appendRune(r rune) {
  181. lexer.buffer = append(lexer.buffer, r)
  182. }
  183. /* Advances the lexer's input buffer but does not store the rune read,
  184. * but just returns it. */
  185. func (lexer *Lexer) Skip() (rune, error) {
  186. r, _, err := lexer.RuneScanner.ReadRune()
  187. if err != nil {
  188. return 0, err
  189. }
  190. lexer.advance(r)
  191. return r, nil
  192. }
  193. /* Actually reads the next rune from the lexer's input source and stores
  194. * them in the lexer's token buffer.
  195. * Shorthand for r, err := lexer.Skip() ; lexer.appendRune(r) */
  196. func (lexer *Lexer) Next() (rune, error) {
  197. r, err := lexer.Skip()
  198. if err == nil {
  199. lexer.appendRune(r)
  200. }
  201. return r, nil
  202. }
  203. func (lexer *Lexer) DoIf(predicate func(rune) bool,
  204. todo func(*Lexer) (rune, error)) (bool, error) {
  205. r, err := lexer.Peek()
  206. if err != nil {
  207. return false, err
  208. }
  209. if predicate(r) {
  210. r, err = todo(lexer)
  211. if err != nil {
  212. return true, err
  213. }
  214. return true, nil
  215. }
  216. return false, nil
  217. }
  218. func (lexer *Lexer) NextIf(predicate func(rune) bool) (bool, error) {
  219. return lexer.DoIf(predicate, (*Lexer).Next)
  220. }
  221. func (lexer *Lexer) SkipIf(predicate func(rune) bool) (bool, error) {
  222. return lexer.DoIf(predicate, (*Lexer).Skip)
  223. }
  224. func (lexer *Lexer) NextWhile(predicate func(rune) bool) (bool, error) {
  225. result := true
  226. ok, err := lexer.NextIf(predicate)
  227. result = result || ok
  228. for ; ok && (err == nil); ok, err = lexer.NextIf(predicate) {
  229. result = result || ok
  230. }
  231. return result, err
  232. }
  233. func (lexer *Lexer) SkipWhile(predicate func(rune) bool) (bool, error) {
  234. result := true
  235. ok, err := lexer.SkipIf(predicate)
  236. result = result || ok
  237. for ; ok && (err == nil); ok, err = lexer.SkipIf(predicate) {
  238. result = result || ok
  239. }
  240. return result, err
  241. }
  242. func isEOX(r rune) bool {
  243. return r == '\n' || r == '.'
  244. }
  245. func isSpace(r rune) bool {
  246. return r == ' ' || r == '\t' || r == '\v' || r == '\r'
  247. }
  248. func isSpaceOrEOX(r rune) bool {
  249. return r == ' ' || r == '\t' || r == '\v' || r == '\r' || r == '\n' || r == '.'
  250. }
  251. func isComment(r rune) bool {
  252. return r == '#'
  253. }
  254. func (lexer *Lexer) SkipSpace() error {
  255. _, err := lexer.SkipWhile(isSpace)
  256. return err
  257. }
  258. func (lexer *Lexer) SkipBlockComment() error {
  259. var err error
  260. var r rune
  261. lexer.LogDebug("Skipping block comment.")
  262. for block := 1; block > 0 && err == nil; {
  263. _, err = lexer.Skip()
  264. if err != nil {
  265. return err
  266. }
  267. r, err = lexer.Peek()
  268. if r == '{' {
  269. block++
  270. } else if r == '}' {
  271. block--
  272. }
  273. lexer.LogDebug("Skipping block comment: %d", block)
  274. }
  275. _, err = lexer.Skip()
  276. return err
  277. }
  278. func (lexer *Lexer) SkipComment() error {
  279. r, err := lexer.Skip()
  280. lexer.LogDebug("Skipping %c.", r)
  281. if err != nil {
  282. return err
  283. }
  284. r, err = lexer.Peek()
  285. if r == '{' {
  286. return lexer.SkipBlockComment()
  287. }
  288. for r != '\n' && err == nil {
  289. lexer.LogDebug("Skipping comment %c.", r)
  290. _, err = lexer.Skip()
  291. if err != nil {
  292. return err
  293. }
  294. r, err = lexer.Peek()
  295. }
  296. if err != nil {
  297. return err
  298. }
  299. _, err = lexer.Skip()
  300. return err
  301. }
  302. /* Handles errors including EOF by either returning an error token or an
  303. * EOF token.
  304. */
  305. func (lexer *Lexer) handleError(err error) Token {
  306. if err == io.EOF {
  307. return lexer.MakeEOFToken()
  308. } else {
  309. return lexer.MakeErrorToken(err)
  310. }
  311. }
  312. func (lexer *Lexer) LexNumber() Token {
  313. isFloat := false
  314. // skip any first - or +
  315. _, err := lexer.NextIf(func(r rune) bool {
  316. return r == '-' || r == '+'
  317. })
  318. _, err = lexer.NextWhile(func(r rune) bool {
  319. if unicode.IsDigit(r) {
  320. return true
  321. } else if r == '.' {
  322. if isFloat {
  323. return false // double point in floating point
  324. } else {
  325. isFloat = true
  326. return true
  327. }
  328. } else {
  329. return false
  330. }
  331. })
  332. if err != nil {
  333. return lexer.MakeErrorfToken("when parsing number: %s", err)
  334. }
  335. if isFloat {
  336. return lexer.MakeToken(TokenKindFloat)
  337. } else {
  338. return lexer.MakeToken(TokenKindInteger)
  339. }
  340. }
  341. func isDoubleQuote(r rune) bool {
  342. return r == '"'
  343. }
  344. func (lexer *Lexer) handleEscapeHexChars(amount int) error {
  345. buffer := make([]byte, 0)
  346. r, err := lexer.Skip()
  347. for index := 0; err == nil && index < amount; {
  348. if unicode.Is(unicode.ASCII_Hex_Digit, r) {
  349. buffer = append(buffer, byte(r))
  350. } else {
  351. return fmt.Errorf("Not a hexadecimal digit: %c", r)
  352. }
  353. index++
  354. if index < amount {
  355. r, err = lexer.Skip()
  356. }
  357. }
  358. if err != nil {
  359. return err
  360. }
  361. i, err := strconv.ParseInt(string(buffer), 16, 32)
  362. if err != nil {
  363. return err
  364. }
  365. lexer.appendRune(rune(i))
  366. _, err = lexer.Peek()
  367. return err
  368. }
  369. func (lexer *Lexer) handleEscape() error {
  370. r, err := lexer.Skip()
  371. if err != nil {
  372. return err
  373. }
  374. switch r {
  375. case 'a':
  376. lexer.appendRune('\a')
  377. case 'b':
  378. lexer.appendRune('\b')
  379. case 'e':
  380. lexer.appendRune('\033')
  381. case 'f':
  382. lexer.appendRune('\f')
  383. case 'n':
  384. lexer.appendRune('\n')
  385. case 'r':
  386. lexer.appendRune('\r')
  387. case 't':
  388. lexer.appendRune('\t')
  389. case '\\':
  390. lexer.appendRune('\\')
  391. case '"':
  392. lexer.appendRune('"')
  393. // case 'o': fallthrough // No octals, for now.
  394. case 'x':
  395. err = lexer.handleEscapeHexChars(2)
  396. case 'u':
  397. err = lexer.handleEscapeHexChars(4)
  398. case 'U':
  399. err = lexer.handleEscapeHexChars(6)
  400. default:
  401. return fmt.Errorf("Unknown escape sequence character %c: %d", r, r)
  402. }
  403. return err
  404. }
  405. func (lexer *Lexer) LexString() Token {
  406. var err error
  407. var r rune
  408. _, err = lexer.Skip() // Skip first "
  409. if err != nil {
  410. return lexer.handleError(err)
  411. }
  412. r, err = lexer.Skip()
  413. for r != '"' && err == nil {
  414. if r == '\\' {
  415. err = lexer.handleEscape()
  416. if err != nil {
  417. return lexer.handleError(err)
  418. }
  419. } else {
  420. lexer.appendRune(r)
  421. // still inside the string
  422. }
  423. r, err = lexer.Skip()
  424. }
  425. if err != nil {
  426. return lexer.MakeErrorfToken("when parsing string: %s", err)
  427. }
  428. if err != nil {
  429. return lexer.handleError(err)
  430. }
  431. return lexer.MakeToken(TokenKindString)
  432. }
  433. func (lexer *Lexer) LexLongString() Token {
  434. var err error
  435. _, err = lexer.Skip()
  436. if err != nil {
  437. return lexer.handleError(err)
  438. }
  439. _, err = lexer.NextWhile(func(r rune) bool {
  440. return r != '`'
  441. })
  442. if err != nil {
  443. return lexer.MakeErrorfToken("when parsing long string: %s", err)
  444. }
  445. _, err = lexer.Skip()
  446. if err != nil {
  447. return lexer.handleError(err)
  448. }
  449. return lexer.MakeToken(TokenKindString)
  450. }
  451. func (lexer *Lexer) LexWordOrType(kind TokenKind) Token {
  452. var err error
  453. first := true
  454. _, err = lexer.Next()
  455. if err != nil {
  456. return lexer.handleError(err)
  457. }
  458. _, err = lexer.NextWhile(func(r rune) bool {
  459. if first {
  460. first = false
  461. return unicode.IsLetter(r) || r == '_'
  462. } else {
  463. return unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_'
  464. }
  465. })
  466. if err != nil {
  467. return lexer.handleError(err)
  468. }
  469. sbuffer := string(lexer.buffer)
  470. // handle keywords
  471. if kw, ok := lexer.Keywords[sbuffer] ; ok {
  472. return lexer.MakeTokenFromKeyword(kw)
  473. }
  474. return lexer.MakeToken(kind)
  475. }
  476. func (lexer *Lexer) LexWord() Token {
  477. return lexer.LexWordOrType(TokenKindWord)
  478. }
  479. func (lexer *Lexer) LexType() Token {
  480. return lexer.LexWordOrType(TokenKindType)
  481. }
  482. func (lexer *Lexer) LexSymbol() Token {
  483. var err error
  484. _, err = lexer.Skip()
  485. if err != nil {
  486. return lexer.handleError(err)
  487. }
  488. _, err = lexer.NextWhile(func(r rune) bool {
  489. return !isSpaceOrEOX(r)
  490. })
  491. if err != nil {
  492. return lexer.handleError(err)
  493. }
  494. return lexer.MakeToken(TokenKindSymbol)
  495. }
  496. func (lexer *Lexer) LexBuiltin() Token {
  497. var err error
  498. _, err = lexer.Skip()
  499. if err != nil {
  500. return lexer.handleError(err)
  501. }
  502. _, err = lexer.NextWhile(func(r rune) bool {
  503. return !isSpaceOrEOX(r)
  504. })
  505. if err != nil {
  506. return lexer.handleError(err)
  507. }
  508. return lexer.MakeBuiltinToken()
  509. }
  510. func (lexer *Lexer) skipSpaceAndCommentAndPeek() (rune, error) {
  511. r, err := lexer.Peek()
  512. if err != nil {
  513. return r, err
  514. }
  515. i := 0
  516. for isSpace(r) || isComment(r) {
  517. if isSpace(r) {
  518. err = lexer.SkipSpace()
  519. } else if isComment(r) {
  520. err = lexer.SkipComment()
  521. }
  522. if err != nil {
  523. return r, err
  524. }
  525. i++
  526. r, err = lexer.Peek()
  527. lexer.LogDebug("Peeked again: >%c< %v %v %d", r, isSpace(r), isComment(r), i)
  528. if err != nil {
  529. return r, err
  530. }
  531. }
  532. return r, err
  533. }
  534. func (lexer *Lexer) LexEOX() Token {
  535. lexer.Next()
  536. _, err := lexer.skipSpaceAndCommentAndPeek()
  537. if err != nil {
  538. return lexer.handleError(err)
  539. }
  540. _, err = lexer.NextWhile(func(r rune) bool {
  541. return isSpaceOrEOX(r) || r == '\n' || r == '.'
  542. })
  543. if err != nil {
  544. return lexer.handleError(err)
  545. }
  546. return lexer.MakeToken(TokenKindEOX)
  547. }
  548. func (lexer *Lexer) lex() Token {
  549. r, err := lexer.skipSpaceAndCommentAndPeek()
  550. lexer.LogDebug(" After skip: >%c< >%v<\n", r, err)
  551. if err != nil {
  552. return lexer.handleError(err)
  553. }
  554. if unicode.IsDigit(r) || r == '-' || r == '+' {
  555. return lexer.LexNumber()
  556. }
  557. if r == '\n' || r == '.' {
  558. return lexer.LexEOX()
  559. }
  560. if r == '"' {
  561. return lexer.LexString()
  562. }
  563. if r == '`' {
  564. return lexer.LexLongString()
  565. }
  566. if r == '!' {
  567. return lexer.LexBuiltin()
  568. }
  569. if r == ':' {
  570. return lexer.LexSymbol()
  571. }
  572. switch TokenKind(r) {
  573. case TokenKindGet:
  574. fallthrough
  575. case TokenKindSet:
  576. fallthrough
  577. case TokenKindOpenBlock:
  578. fallthrough
  579. case TokenKindCloseBlock:
  580. fallthrough
  581. case TokenKindOpenList:
  582. fallthrough
  583. case TokenKindCloseList:
  584. fallthrough
  585. case TokenKindOpenParen:
  586. fallthrough
  587. case TokenKindCloseParen:
  588. lexer.Next()
  589. return lexer.MakeToken(TokenKind(r))
  590. default:
  591. }
  592. if unicode.IsLetter(r) {
  593. if unicode.IsUpper(r) {
  594. return lexer.LexType()
  595. } else {
  596. return lexer.LexWord()
  597. }
  598. }
  599. // EOF character
  600. if r == 0x7f {
  601. return lexer.MakeEOFToken()
  602. }
  603. return lexer.MakeErrorfToken("Unknown character: %c", r)
  604. }
  605. func (lexer *Lexer) Lex() Token {
  606. res := lexer.lex()
  607. lexer.ClearBuffer() // ensure buffer is cleared after lexing, always.
  608. return res
  609. }
  610. func (lexer *Lexer) LexAll() []Token {
  611. var token Token
  612. res := make([]Token, 0)
  613. for token = lexer.Lex(); !token.IsLast(); token = lexer.Lex() {
  614. res = append(res, token)
  615. }
  616. res = append(res, token)
  617. return res
  618. }
  619. func NewLexer(scanner io.RuneScanner, filename string) *Lexer {
  620. lexer := &Lexer{}
  621. lexer.RuneScanner = scanner
  622. lexer.Position.FileName = filename
  623. lexer.Position.Column = 1
  624. lexer.Position.Line = 1
  625. lexer.LoggerWrapper = LoggerWrapper{nil}
  626. lexer.Keywords = make(map[string]*Keyword)
  627. return lexer
  628. }
  629. func (lexer * Lexer) Report() {
  630. if lexer == nil {
  631. fmt.Printf("Lexer: is nil\n")
  632. } else {
  633. fmt.Printf("Lexer: %s:%d:%d\n",
  634. lexer.Position.FileName,
  635. lexer.Position.Column,
  636. lexer.Position.Line)
  637. }
  638. }
  639. func (lexer *Lexer) AddKeyword(kw * Keyword) *Keyword {
  640. if kw != nil {
  641. lexer.Keywords[kw.Name] = kw
  642. }
  643. return kw
  644. }
  645. func (lexer *Lexer) NewKeyword(name string, kind TokenKind, value Value) *Keyword {
  646. kw := &Keyword{Name: name, TokenKind: kind, Value: value}
  647. return lexer.AddKeyword(kw)
  648. }
  649. func NewLexerFromString(input string) *Lexer {
  650. reader := strings.NewReader(input)
  651. return NewLexer(reader, "<input>")
  652. }
  653. func NewLexerFromFilename(filename string) (*Lexer, error) {
  654. read, err := os.Open(filename)
  655. if err == nil {
  656. bread := bufio.NewReader(read)
  657. lex := NewLexer(bread, filename)
  658. return lex, nil
  659. }
  660. return nil, err
  661. }