lexer.go 16 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785
  1. package muesli
  2. import (
  3. "bufio"
  4. _ "bytes"
  5. _ "errors"
  6. "fmt"
  7. "io"
  8. _ "io"
  9. "os"
  10. _ "reflect"
  11. _ "runtime"
  12. "strconv"
  13. "strings"
  14. "unicode"
  15. _ "unicode"
  16. // "gitlab.com/beoran/woe/graphviz"
  17. // _ "gitlab.com/beoran/woe/monolog"
  18. )
  19. /* Muesli has no key words by default, but they can be defined if desired
  20. * for ease of use. A key word is replaced by a token during lexing. */
  21. type Keyword struct {
  22. Name string
  23. TokenKind
  24. Value
  25. }
  26. /* A Lexer splits scanned input into tokens.
  27. */
  28. type Lexer struct {
  29. Position
  30. Index int
  31. Start int
  32. io.RuneScanner
  33. buffer []rune
  34. Current rune
  35. Keywords map[string]*Keyword
  36. LoggerWrapper
  37. }
  38. func (lexer *Lexer) SetLogger(logger Logger) {
  39. lexer.LoggerWrapper = LoggerWrapper{logger}
  40. }
  41. func (lexer *Lexer) ClearBuffer() {
  42. lexer.buffer = make([]rune, 0)
  43. }
  44. func (lexer *Lexer) MakeIntegerToken() Token {
  45. var sbuffer = string(lexer.buffer)
  46. i, err := strconv.ParseInt(sbuffer, 0, 64)
  47. if err == nil {
  48. lexer.ClearBuffer()
  49. return NewToken(TokenKindInteger, IntValue(i), lexer.Position)
  50. } else {
  51. lexer.ClearBuffer()
  52. return lexer.MakeErrorToken(err)
  53. }
  54. }
  55. func (lexer *Lexer) MakeTokenFromKeyword(kw * Keyword) Token {
  56. lexer.ClearBuffer()
  57. return NewToken(kw.TokenKind, kw.Value, lexer.Position)
  58. }
  59. func (lexer *Lexer) MakeFloatToken() Token {
  60. var sbuffer = string(lexer.buffer)
  61. f, err := strconv.ParseFloat(sbuffer, 64)
  62. if err == nil {
  63. lexer.ClearBuffer()
  64. return NewToken(TokenKindFloat, FloatValue(f), lexer.Position)
  65. } else {
  66. lexer.ClearBuffer()
  67. return lexer.MakeErrorToken(err)
  68. }
  69. }
  70. func (lexer *Lexer) MakeBooleanToken(b bool) Token {
  71. lexer.ClearBuffer()
  72. if b {
  73. return NewToken(TokenKindBoolean, TrueValue, lexer.Position)
  74. } else {
  75. return NewToken(TokenKindBoolean, FalseValue, lexer.Position)
  76. }
  77. }
  78. func (lexer *Lexer) MakeNilToken() Token {
  79. lexer.ClearBuffer()
  80. return NewToken(TokenKindNil, NilValue, lexer.Position)
  81. }
  82. func (lexer *Lexer) MakeBuiltinToken() Token {
  83. var sbuffer = string(lexer.buffer)
  84. lexer.ClearBuffer()
  85. if sbuffer == "true" {
  86. lexer.ClearBuffer()
  87. return NewToken(TokenKindBoolean, TrueValue, lexer.Position)
  88. } else if sbuffer == "false" {
  89. return NewToken(TokenKindBoolean, FalseValue, lexer.Position)
  90. } else if sbuffer == "nil" {
  91. return NewToken(TokenKindNil, NilValue, lexer.Position)
  92. } else {
  93. return lexer.MakeErrorfToken("Not a builtin: %s", sbuffer)
  94. }
  95. }
  96. func (lexer *Lexer) MakeStringValueToken(kind TokenKind) Token {
  97. var sbuffer = string(lexer.buffer)
  98. return NewToken(kind, StringValue(sbuffer), lexer.Position)
  99. }
  100. func (lexer *Lexer) MakeTypeValueToken(kind TokenKind) Token {
  101. var sbuffer = string(lexer.buffer)
  102. return NewToken(kind, TypeValue(sbuffer), lexer.Position)
  103. }
  104. func (lexer *Lexer) MakeErrorValueToken(kind TokenKind) Token {
  105. var sbuffer = string(lexer.buffer)
  106. return NewToken(kind, NewErrorValuef("%s", sbuffer), lexer.Position)
  107. }
  108. func (lexer *Lexer) MakeWordValueToken(kind TokenKind) Token {
  109. var sbuffer = string(lexer.buffer)
  110. return NewToken(kind, WordValue(sbuffer), lexer.Position)
  111. }
  112. func (lexer *Lexer) MakeToken(kind TokenKind) Token {
  113. switch kind {
  114. case TokenKindInteger:
  115. return lexer.MakeIntegerToken()
  116. case TokenKindFloat:
  117. return lexer.MakeFloatToken()
  118. case TokenKindString:
  119. return lexer.MakeStringValueToken(kind)
  120. case TokenKindSymbol:
  121. return lexer.MakeWordValueToken(kind)
  122. case TokenKindType:
  123. return lexer.MakeTypeValueToken(kind)
  124. case TokenKindError:
  125. return lexer.MakeErrorValueToken(kind)
  126. case TokenKindWord:
  127. return lexer.MakeWordValueToken(kind)
  128. case TokenKindNil:
  129. fallthrough
  130. case TokenKindBoolean:
  131. return lexer.MakeBuiltinToken()
  132. case TokenKindGet:
  133. fallthrough
  134. case TokenKindSet:
  135. fallthrough
  136. case TokenKindOpenBlock:
  137. fallthrough
  138. case TokenKindCloseBlock:
  139. fallthrough
  140. case TokenKindOpenList:
  141. fallthrough
  142. case TokenKindCloseList:
  143. fallthrough
  144. case TokenKindOpenParen:
  145. fallthrough
  146. case TokenKindCloseParen:
  147. fallthrough
  148. case TokenKindEOX:
  149. fallthrough
  150. case TokenKindEOF:
  151. val := StringValue(string(lexer.buffer))
  152. lexer.ClearBuffer()
  153. return NewToken(kind, val, lexer.Position)
  154. default:
  155. return lexer.MakeErrorfToken("Internal error on token type %s", kind)
  156. }
  157. }
  158. func (lexer Lexer) MakeErrorToken(err error) Token {
  159. return NewToken(TokenKindError, ErrorValue{err}, lexer.Position)
  160. }
  161. func (lexer Lexer) MakeErrorfToken(format string, va ...interface{}) Token {
  162. err := fmt.Errorf(format, va...)
  163. return lexer.MakeErrorToken(err)
  164. }
  165. func (lexer Lexer) MakeEOFToken() Token {
  166. return NewToken(TokenKindEOF, &EmptyValue{}, lexer.Position)
  167. }
  168. func (lexer *Lexer) Peek() (rune, error) {
  169. r, _, err := lexer.RuneScanner.ReadRune()
  170. err2 := lexer.RuneScanner.UnreadRune()
  171. if err == nil {
  172. err = err2
  173. }
  174. return r, err
  175. }
  176. /* Advances the lexer's position based on the rune r read. */
  177. func (lexer *Lexer) advance(r rune) {
  178. lexer.Current = r
  179. lexer.Index++
  180. lexer.Position.Column++
  181. if r == '\n' {
  182. lexer.Position.Column = 1
  183. lexer.Position.Line++
  184. }
  185. }
  186. /* Append a rune to the lexer's buffer. */
  187. func (lexer *Lexer) appendRune(r rune) {
  188. lexer.buffer = append(lexer.buffer, r)
  189. }
  190. /* Advances the lexer's input buffer but does not store the rune read,
  191. * but just returns it. */
  192. func (lexer *Lexer) Skip() (rune, error) {
  193. r, _, err := lexer.RuneScanner.ReadRune()
  194. if err != nil {
  195. return 0, err
  196. }
  197. lexer.advance(r)
  198. return r, nil
  199. }
  200. /* Actually reads the next rune from the lexer's input source and stores
  201. * them in the lexer's token buffer.
  202. * Shorthand for r, err := lexer.Skip() ; lexer.appendRune(r) */
  203. func (lexer *Lexer) Next() (rune, error) {
  204. r, err := lexer.Skip()
  205. if err == nil {
  206. lexer.appendRune(r)
  207. }
  208. return r, nil
  209. }
  210. func (lexer *Lexer) DoIf(predicate func(rune) bool,
  211. todo func(*Lexer) (rune, error)) (bool, error) {
  212. r, err := lexer.Peek()
  213. if err != nil {
  214. return false, err
  215. }
  216. if predicate(r) {
  217. r, err = todo(lexer)
  218. if err != nil {
  219. return true, err
  220. }
  221. return true, nil
  222. }
  223. return false, nil
  224. }
  225. func (lexer *Lexer) NextIf(predicate func(rune) bool) (bool, error) {
  226. return lexer.DoIf(predicate, (*Lexer).Next)
  227. }
  228. func (lexer *Lexer) SkipIf(predicate func(rune) bool) (bool, error) {
  229. return lexer.DoIf(predicate, (*Lexer).Skip)
  230. }
  231. func (lexer *Lexer) NextWhile(predicate func(rune) bool) (bool, error) {
  232. result := true
  233. ok, err := lexer.NextIf(predicate)
  234. result = result || ok
  235. for ; ok && (err == nil); ok, err = lexer.NextIf(predicate) {
  236. result = result || ok
  237. }
  238. return result, err
  239. }
  240. func (lexer *Lexer) SkipWhile(predicate func(rune) bool) (bool, error) {
  241. result := true
  242. ok, err := lexer.SkipIf(predicate)
  243. result = result || ok
  244. for ; ok && (err == nil); ok, err = lexer.SkipIf(predicate) {
  245. result = result || ok
  246. }
  247. return result, err
  248. }
  249. func isEOX(r rune) bool {
  250. return r == '\n' || r == '.'
  251. }
  252. func isSpace(r rune) bool {
  253. return r == ' ' || r == '\t' || r == '\v' || r == '\r'
  254. }
  255. func isSpaceOrEOX(r rune) bool {
  256. return r == ' ' || r == '\t' || r == '\v' || r == '\r' || r == '\n' || r == '.'
  257. }
  258. func isComment(r rune) bool {
  259. return r == '#'
  260. }
  261. func (lexer *Lexer) SkipSpace() error {
  262. _, err := lexer.SkipWhile(isSpace)
  263. return err
  264. }
  265. func (lexer *Lexer) SkipBlockComment() error {
  266. var err error
  267. var r rune
  268. lexer.LogDebug("Skipping block comment.")
  269. for block := 1; block > 0 && err == nil; {
  270. _, err = lexer.Skip()
  271. if err != nil {
  272. return err
  273. }
  274. r, err = lexer.Peek()
  275. if r == '{' {
  276. block++
  277. } else if r == '}' {
  278. block--
  279. }
  280. lexer.LogDebug("Skipping block comment: %d", block)
  281. }
  282. _, err = lexer.Skip()
  283. return err
  284. }
  285. func (lexer *Lexer) SkipComment() error {
  286. r, err := lexer.Skip()
  287. lexer.LogDebug("Skipping %c.", r)
  288. if err != nil {
  289. return err
  290. }
  291. r, err = lexer.Peek()
  292. if r == '{' {
  293. return lexer.SkipBlockComment()
  294. }
  295. for r != '\n' && err == nil {
  296. lexer.LogDebug("Skipping comment %c.", r)
  297. _, err = lexer.Skip()
  298. if err != nil {
  299. return err
  300. }
  301. r, err = lexer.Peek()
  302. }
  303. if err != nil {
  304. return err
  305. }
  306. _, err = lexer.Skip()
  307. return err
  308. }
  309. /* Handles errors including EOF by either returning an error token or an
  310. * EOF token.
  311. */
  312. func (lexer *Lexer) handleError(err error) Token {
  313. if err == io.EOF {
  314. return lexer.MakeEOFToken()
  315. } else {
  316. return lexer.MakeErrorToken(err)
  317. }
  318. }
  319. func (lexer *Lexer) LexNumber() Token {
  320. isFloat := false
  321. // skip any first - or +
  322. _, err := lexer.NextIf(func(r rune) bool {
  323. return r == '-' || r == '+'
  324. })
  325. _, err = lexer.NextWhile(func(r rune) bool {
  326. if unicode.IsDigit(r) {
  327. return true
  328. } else if r == '.' {
  329. if isFloat {
  330. return false // double point in floating point
  331. } else {
  332. isFloat = true
  333. return true
  334. }
  335. } else {
  336. return false
  337. }
  338. })
  339. if err != nil {
  340. return lexer.MakeErrorfToken("when parsing number: %s", err)
  341. }
  342. if isFloat {
  343. return lexer.MakeToken(TokenKindFloat)
  344. } else {
  345. return lexer.MakeToken(TokenKindInteger)
  346. }
  347. }
  348. func isDoubleQuote(r rune) bool {
  349. return r == '"'
  350. }
  351. func (lexer *Lexer) handleEscapeHexChars(amount int) error {
  352. buffer := make([]byte, 0)
  353. r, err := lexer.Skip()
  354. for index := 0; err == nil && index < amount; {
  355. if unicode.Is(unicode.ASCII_Hex_Digit, r) {
  356. buffer = append(buffer, byte(r))
  357. } else {
  358. return fmt.Errorf("Not a hexadecimal digit: %c", r)
  359. }
  360. index++
  361. if index < amount {
  362. r, err = lexer.Skip()
  363. }
  364. }
  365. if err != nil {
  366. return err
  367. }
  368. i, err := strconv.ParseInt(string(buffer), 16, 32)
  369. if err != nil {
  370. return err
  371. }
  372. lexer.appendRune(rune(i))
  373. _, err = lexer.Peek()
  374. return err
  375. }
  376. func (lexer *Lexer) handleEscape() error {
  377. r, err := lexer.Skip()
  378. if err != nil {
  379. return err
  380. }
  381. switch r {
  382. case 'a':
  383. lexer.appendRune('\a')
  384. case 'b':
  385. lexer.appendRune('\b')
  386. case 'e':
  387. lexer.appendRune('\033')
  388. case 'f':
  389. lexer.appendRune('\f')
  390. case 'n':
  391. lexer.appendRune('\n')
  392. case 'r':
  393. lexer.appendRune('\r')
  394. case 't':
  395. lexer.appendRune('\t')
  396. case '\\':
  397. lexer.appendRune('\\')
  398. case '"':
  399. lexer.appendRune('"')
  400. // case 'o': fallthrough // No octals, for now.
  401. case 'x':
  402. err = lexer.handleEscapeHexChars(2)
  403. case 'u':
  404. err = lexer.handleEscapeHexChars(4)
  405. case 'U':
  406. err = lexer.handleEscapeHexChars(6)
  407. default:
  408. return fmt.Errorf("Unknown escape sequence character %c: %d", r, r)
  409. }
  410. return err
  411. }
  412. func (lexer *Lexer) LexString() Token {
  413. var err error
  414. var r rune
  415. _, err = lexer.Skip() // Skip first "
  416. if err != nil {
  417. return lexer.handleError(err)
  418. }
  419. r, err = lexer.Skip()
  420. for r != '"' && err == nil {
  421. if r == '\\' {
  422. err = lexer.handleEscape()
  423. if err != nil {
  424. return lexer.handleError(err)
  425. }
  426. } else {
  427. lexer.appendRune(r)
  428. // still inside the string
  429. }
  430. r, err = lexer.Skip()
  431. }
  432. if err != nil {
  433. return lexer.MakeErrorfToken("when parsing string: %s", err)
  434. }
  435. if err != nil {
  436. return lexer.handleError(err)
  437. }
  438. return lexer.MakeToken(TokenKindString)
  439. }
  440. func (lexer *Lexer) LexLongString() Token {
  441. var err error
  442. _, err = lexer.Skip()
  443. if err != nil {
  444. return lexer.handleError(err)
  445. }
  446. _, err = lexer.NextWhile(func(r rune) bool {
  447. return r != '`'
  448. })
  449. if err != nil {
  450. return lexer.MakeErrorfToken("when parsing long string: %s", err)
  451. }
  452. _, err = lexer.Skip()
  453. if err != nil {
  454. return lexer.handleError(err)
  455. }
  456. return lexer.MakeToken(TokenKindString)
  457. }
  458. func (lexer *Lexer) LexWordOrType(kind TokenKind) Token {
  459. var err error
  460. first := true
  461. _, err = lexer.Next()
  462. if err != nil {
  463. return lexer.handleError(err)
  464. }
  465. _, err = lexer.NextWhile(func(r rune) bool {
  466. if first {
  467. first = false
  468. return unicode.IsLetter(r) || r == '_'
  469. } else {
  470. return unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_'
  471. }
  472. })
  473. if err != nil {
  474. return lexer.handleError(err)
  475. }
  476. sbuffer := string(lexer.buffer)
  477. // handle keywords
  478. if kw, ok := lexer.Keywords[sbuffer] ; ok {
  479. return lexer.MakeTokenFromKeyword(kw)
  480. }
  481. return lexer.MakeToken(kind)
  482. }
  483. func (lexer *Lexer) LexWord() Token {
  484. return lexer.LexWordOrType(TokenKindWord)
  485. }
  486. func (lexer *Lexer) LexType() Token {
  487. return lexer.LexWordOrType(TokenKindType)
  488. }
  489. func (lexer *Lexer) LexSymbol() Token {
  490. var err error
  491. _, err = lexer.Skip()
  492. if err != nil {
  493. return lexer.handleError(err)
  494. }
  495. _, err = lexer.NextWhile(func(r rune) bool {
  496. return !isSpaceOrEOX(r)
  497. })
  498. if err != nil {
  499. return lexer.handleError(err)
  500. }
  501. return lexer.MakeToken(TokenKindSymbol)
  502. }
  503. func (lexer *Lexer) LexBuiltin() Token {
  504. var err error
  505. _, err = lexer.Skip()
  506. if err != nil {
  507. return lexer.handleError(err)
  508. }
  509. _, err = lexer.NextWhile(func(r rune) bool {
  510. return !isSpaceOrEOX(r)
  511. })
  512. if err != nil {
  513. return lexer.handleError(err)
  514. }
  515. return lexer.MakeBuiltinToken()
  516. }
  517. func (lexer *Lexer) skipSpaceAndCommentAndPeek() (rune, error) {
  518. r, err := lexer.Peek()
  519. if err != nil {
  520. return r, err
  521. }
  522. i := 0
  523. for isSpace(r) || isComment(r) {
  524. if isSpace(r) {
  525. err = lexer.SkipSpace()
  526. } else if isComment(r) {
  527. err = lexer.SkipComment()
  528. }
  529. if err != nil {
  530. return r, err
  531. }
  532. i++
  533. r, err = lexer.Peek()
  534. lexer.LogDebug("Peeked again: >%c< %v %v %d", r, isSpace(r), isComment(r), i)
  535. if err != nil {
  536. return r, err
  537. }
  538. }
  539. return r, err
  540. }
  541. func (lexer *Lexer) LexEOX() Token {
  542. lexer.Next()
  543. _, err := lexer.skipSpaceAndCommentAndPeek()
  544. if err != nil {
  545. return lexer.handleError(err)
  546. }
  547. _, err = lexer.NextWhile(func(r rune) bool {
  548. return isSpaceOrEOX(r) || r == '\n' || r == '.'
  549. })
  550. if err != nil {
  551. return lexer.handleError(err)
  552. }
  553. return lexer.MakeToken(TokenKindEOX)
  554. }
  555. func (lexer *Lexer) lex() Token {
  556. r, err := lexer.skipSpaceAndCommentAndPeek()
  557. lexer.LogDebug(" After skip: >%c< >%v<\n", r, err)
  558. if err != nil {
  559. return lexer.handleError(err)
  560. }
  561. if unicode.IsDigit(r) || r == '-' || r == '+' {
  562. return lexer.LexNumber()
  563. }
  564. if r == '\n' || r == '.' {
  565. return lexer.LexEOX()
  566. }
  567. if r == '"' {
  568. return lexer.LexString()
  569. }
  570. if r == '`' {
  571. return lexer.LexLongString()
  572. }
  573. if r == '!' {
  574. return lexer.LexBuiltin()
  575. }
  576. if r == ':' {
  577. return lexer.LexSymbol()
  578. }
  579. switch TokenKind(r) {
  580. case TokenKindGet:
  581. fallthrough
  582. case TokenKindSet:
  583. fallthrough
  584. case TokenKindOpenBlock:
  585. fallthrough
  586. case TokenKindCloseBlock:
  587. fallthrough
  588. case TokenKindOpenList:
  589. fallthrough
  590. case TokenKindCloseList:
  591. fallthrough
  592. case TokenKindOpenParen:
  593. fallthrough
  594. case TokenKindCloseParen:
  595. lexer.Next()
  596. return lexer.MakeToken(TokenKind(r))
  597. default:
  598. }
  599. if unicode.IsLetter(r) {
  600. if unicode.IsUpper(r) {
  601. return lexer.LexType()
  602. } else {
  603. return lexer.LexWord()
  604. }
  605. }
  606. // EOF character
  607. if r == 0x7f {
  608. return lexer.MakeEOFToken()
  609. }
  610. return lexer.MakeErrorfToken("Unknown character: %c", r)
  611. }
  612. func (lexer *Lexer) Lex() Token {
  613. res := lexer.lex()
  614. lexer.ClearBuffer() // ensure buffer is cleared after lexing, always.
  615. return res
  616. }
  617. func (lexer *Lexer) LexAll() []Token {
  618. var token Token
  619. res := make([]Token, 0)
  620. for token = lexer.Lex(); !token.IsLast(); token = lexer.Lex() {
  621. res = append(res, token)
  622. }
  623. res = append(res, token)
  624. return res
  625. }
  626. func NewLexer(scanner io.RuneScanner, filename string) *Lexer {
  627. lexer := &Lexer{}
  628. lexer.RuneScanner = scanner
  629. lexer.Position.FileName = filename
  630. lexer.Position.Column = 1
  631. lexer.Position.Line = 1
  632. lexer.LoggerWrapper = LoggerWrapper{nil}
  633. lexer.Keywords = make(map[string]*Keyword)
  634. return lexer
  635. }
  636. func (lexer * Lexer) Report() {
  637. if lexer == nil {
  638. fmt.Printf("Lexer: is nil\n")
  639. } else {
  640. fmt.Printf("Lexer: %s:%d:%d\n",
  641. lexer.Position.FileName,
  642. lexer.Position.Column,
  643. lexer.Position.Line)
  644. }
  645. }
  646. func (lexer *Lexer) AddKeyword(kw * Keyword) *Keyword {
  647. if kw != nil {
  648. lexer.Keywords[kw.Name] = kw
  649. }
  650. return kw
  651. }
  652. func (lexer *Lexer) NewKeyword(name string, kind TokenKind, value Value) *Keyword {
  653. kw := &Keyword{Name: name, TokenKind: kind, Value: value}
  654. return lexer.AddKeyword(kw)
  655. }
  656. func NewLexerFromString(input string) *Lexer {
  657. reader := strings.NewReader(input)
  658. return NewLexer(reader, "<input>")
  659. }
  660. func NewLexerFromFilename(filename string) (*Lexer, error) {
  661. read, err := os.Open(filename)
  662. if err == nil {
  663. bread := bufio.NewReader(read)
  664. lex := NewLexer(bread, filename)
  665. return lex, nil
  666. }
  667. return nil, err
  668. }