Commit e8e55955 authored by Matthew Holt's avatar Matthew Holt

Report error when loading the lexer

parent 8b8afd72
...@@ -19,9 +19,10 @@ type Dispenser struct { ...@@ -19,9 +19,10 @@ type Dispenser struct {
// NewDispenser returns a Dispenser, ready to use for parsing the given input. // NewDispenser returns a Dispenser, ready to use for parsing the given input.
func NewDispenser(filename string, input io.Reader) Dispenser { func NewDispenser(filename string, input io.Reader) Dispenser {
tokens, _ := allTokens(input) // ignoring error because nothing to do with it
return Dispenser{ return Dispenser{
filename: filename, filename: filename,
tokens: allTokens(input), tokens: tokens,
cursor: -1, cursor: -1,
} }
} }
......
...@@ -33,7 +33,10 @@ func (l *lexer) load(input io.Reader) error { ...@@ -33,7 +33,10 @@ func (l *lexer) load(input io.Reader) error {
// discard byte order mark, if present // discard byte order mark, if present
firstCh, _, err := l.reader.ReadRune() firstCh, _, err := l.reader.ReadRune()
if err == nil && firstCh != 0xFEFF { if err != nil {
return err
}
if firstCh != 0xFEFF {
err := l.reader.UnreadRune() err := l.reader.UnreadRune()
if err != nil { if err != nil {
return err return err
......
...@@ -22,13 +22,17 @@ func Parse(filename string, input io.Reader, validDirectives []string) ([]Server ...@@ -22,13 +22,17 @@ func Parse(filename string, input io.Reader, validDirectives []string) ([]Server
// allTokens lexes the entire input, but does not parse it. // allTokens lexes the entire input, but does not parse it.
// It returns all the tokens from the input, unstructured // It returns all the tokens from the input, unstructured
// and in order. // and in order.
func allTokens(input io.Reader) (tokens []Token) { func allTokens(input io.Reader) ([]Token, error) {
l := new(lexer) l := new(lexer)
l.load(input) err := l.load(input)
if err != nil {
return nil, err
}
var tokens []Token
for l.next() { for l.next() {
tokens = append(tokens, l.token) tokens = append(tokens, l.token)
} }
return return tokens, nil
} }
type parser struct { type parser struct {
...@@ -294,7 +298,10 @@ func (p *parser) doSingleImport(importFile string) ([]Token, error) { ...@@ -294,7 +298,10 @@ func (p *parser) doSingleImport(importFile string) ([]Token, error) {
return nil, p.Errf("Could not import %s: is a directory", importFile) return nil, p.Errf("Could not import %s: is a directory", importFile)
} }
importedTokens := allTokens(file) importedTokens, err := allTokens(file)
if err != nil {
return nil, p.Errf("Could not read tokens while importing %s: %v", importFile, err)
}
// Tack the filename onto these tokens so errors show the imported file's name // Tack the filename onto these tokens so errors show the imported file's name
filename := filepath.Base(importFile) filename := filepath.Base(importFile)
......
...@@ -9,8 +9,11 @@ import ( ...@@ -9,8 +9,11 @@ import (
func TestAllTokens(t *testing.T) { func TestAllTokens(t *testing.T) {
input := strings.NewReader("a b c\nd e") input := strings.NewReader("a b c\nd e")
expected := []string{"a", "b", "c", "d", "e"} expected := []string{"a", "b", "c", "d", "e"}
tokens := allTokens(input) tokens, err := allTokens(input)
if err != nil {
t.Fatalf("Expected no error, got %v", err)
}
if len(tokens) != len(expected) { if len(tokens) != len(expected) {
t.Fatalf("Expected %d tokens, got %d", len(expected), len(tokens)) t.Fatalf("Expected %d tokens, got %d", len(expected), len(tokens))
} }
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment