mirror of
https://github.com/coredns/coredns.git
synced 2025-11-27 06:04:03 -05:00
First commit
This commit is contained in:
251
core/parse/dispenser.go
Normal file
251
core/parse/dispenser.go
Normal file
@@ -0,0 +1,251 @@
|
||||
package parse
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Dispenser is a type that dispenses tokens, similarly to a lexer,
|
||||
// except that it can do so with some notion of structure and has
|
||||
// some really convenient methods.
|
||||
type Dispenser struct {
|
||||
filename string
|
||||
tokens []token
|
||||
cursor int
|
||||
nesting int
|
||||
}
|
||||
|
||||
// NewDispenser returns a Dispenser, ready to use for parsing the given input.
|
||||
func NewDispenser(filename string, input io.Reader) Dispenser {
|
||||
return Dispenser{
|
||||
filename: filename,
|
||||
tokens: allTokens(input),
|
||||
cursor: -1,
|
||||
}
|
||||
}
|
||||
|
||||
// NewDispenserTokens returns a Dispenser filled with the given tokens.
|
||||
func NewDispenserTokens(filename string, tokens []token) Dispenser {
|
||||
return Dispenser{
|
||||
filename: filename,
|
||||
tokens: tokens,
|
||||
cursor: -1,
|
||||
}
|
||||
}
|
||||
|
||||
// Next loads the next token. Returns true if a token
|
||||
// was loaded; false otherwise. If false, all tokens
|
||||
// have been consumed.
|
||||
func (d *Dispenser) Next() bool {
|
||||
if d.cursor < len(d.tokens)-1 {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// NextArg loads the next token if it is on the same
|
||||
// line. Returns true if a token was loaded; false
|
||||
// otherwise. If false, all tokens on the line have
|
||||
// been consumed. It handles imported tokens correctly.
|
||||
func (d *Dispenser) NextArg() bool {
|
||||
if d.cursor < 0 {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
if d.cursor >= len(d.tokens) {
|
||||
return false
|
||||
}
|
||||
if d.cursor < len(d.tokens)-1 &&
|
||||
d.tokens[d.cursor].file == d.tokens[d.cursor+1].file &&
|
||||
d.tokens[d.cursor].line+d.numLineBreaks(d.cursor) == d.tokens[d.cursor+1].line {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// NextLine loads the next token only if it is not on the same
|
||||
// line as the current token, and returns true if a token was
|
||||
// loaded; false otherwise. If false, there is not another token
|
||||
// or it is on the same line. It handles imported tokens correctly.
|
||||
func (d *Dispenser) NextLine() bool {
|
||||
if d.cursor < 0 {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
if d.cursor >= len(d.tokens) {
|
||||
return false
|
||||
}
|
||||
if d.cursor < len(d.tokens)-1 &&
|
||||
(d.tokens[d.cursor].file != d.tokens[d.cursor+1].file ||
|
||||
d.tokens[d.cursor].line+d.numLineBreaks(d.cursor) < d.tokens[d.cursor+1].line) {
|
||||
d.cursor++
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// NextBlock can be used as the condition of a for loop
|
||||
// to load the next token as long as it opens a block or
|
||||
// is already in a block. It returns true if a token was
|
||||
// loaded, or false when the block's closing curly brace
|
||||
// was loaded and thus the block ended. Nested blocks are
|
||||
// not supported.
|
||||
func (d *Dispenser) NextBlock() bool {
|
||||
if d.nesting > 0 {
|
||||
d.Next()
|
||||
if d.Val() == "}" {
|
||||
d.nesting--
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
if !d.NextArg() { // block must open on same line
|
||||
return false
|
||||
}
|
||||
if d.Val() != "{" {
|
||||
d.cursor-- // roll back if not opening brace
|
||||
return false
|
||||
}
|
||||
d.Next()
|
||||
if d.Val() == "}" {
|
||||
// Open and then closed right away
|
||||
return false
|
||||
}
|
||||
d.nesting++
|
||||
return true
|
||||
}
|
||||
|
||||
// IncrNest adds a level of nesting to the dispenser.
|
||||
func (d *Dispenser) IncrNest() {
|
||||
d.nesting++
|
||||
return
|
||||
}
|
||||
|
||||
// Val gets the text of the current token. If there is no token
|
||||
// loaded, it returns empty string.
|
||||
func (d *Dispenser) Val() string {
|
||||
if d.cursor < 0 || d.cursor >= len(d.tokens) {
|
||||
return ""
|
||||
}
|
||||
return d.tokens[d.cursor].text
|
||||
}
|
||||
|
||||
// Line gets the line number of the current token. If there is no token
|
||||
// loaded, it returns 0.
|
||||
func (d *Dispenser) Line() int {
|
||||
if d.cursor < 0 || d.cursor >= len(d.tokens) {
|
||||
return 0
|
||||
}
|
||||
return d.tokens[d.cursor].line
|
||||
}
|
||||
|
||||
// File gets the filename of the current token. If there is no token loaded,
|
||||
// it returns the filename originally given when parsing started.
|
||||
func (d *Dispenser) File() string {
|
||||
if d.cursor < 0 || d.cursor >= len(d.tokens) {
|
||||
return d.filename
|
||||
}
|
||||
if tokenFilename := d.tokens[d.cursor].file; tokenFilename != "" {
|
||||
return tokenFilename
|
||||
}
|
||||
return d.filename
|
||||
}
|
||||
|
||||
// Args is a convenience function that loads the next arguments
|
||||
// (tokens on the same line) into an arbitrary number of strings
|
||||
// pointed to in targets. If there are fewer tokens available
|
||||
// than string pointers, the remaining strings will not be changed
|
||||
// and false will be returned. If there were enough tokens available
|
||||
// to fill the arguments, then true will be returned.
|
||||
func (d *Dispenser) Args(targets ...*string) bool {
|
||||
enough := true
|
||||
for i := 0; i < len(targets); i++ {
|
||||
if !d.NextArg() {
|
||||
enough = false
|
||||
break
|
||||
}
|
||||
*targets[i] = d.Val()
|
||||
}
|
||||
return enough
|
||||
}
|
||||
|
||||
// RemainingArgs loads any more arguments (tokens on the same line)
|
||||
// into a slice and returns them. Open curly brace tokens also indicate
|
||||
// the end of arguments, and the curly brace is not included in
|
||||
// the return value nor is it loaded.
|
||||
func (d *Dispenser) RemainingArgs() []string {
|
||||
var args []string
|
||||
|
||||
for d.NextArg() {
|
||||
if d.Val() == "{" {
|
||||
d.cursor--
|
||||
break
|
||||
}
|
||||
args = append(args, d.Val())
|
||||
}
|
||||
|
||||
return args
|
||||
}
|
||||
|
||||
// ArgErr returns an argument error, meaning that another
|
||||
// argument was expected but not found. In other words,
|
||||
// a line break or open curly brace was encountered instead of
|
||||
// an argument.
|
||||
func (d *Dispenser) ArgErr() error {
|
||||
if d.Val() == "{" {
|
||||
return d.Err("Unexpected token '{', expecting argument")
|
||||
}
|
||||
return d.Errf("Wrong argument count or unexpected line ending after '%s'", d.Val())
|
||||
}
|
||||
|
||||
// SyntaxErr creates a generic syntax error which explains what was
|
||||
// found and what was expected.
|
||||
func (d *Dispenser) SyntaxErr(expected string) error {
|
||||
msg := fmt.Sprintf("%s:%d - Syntax error: Unexpected token '%s', expecting '%s'", d.File(), d.Line(), d.Val(), expected)
|
||||
return errors.New(msg)
|
||||
}
|
||||
|
||||
// EOFErr returns an error indicating that the dispenser reached
|
||||
// the end of the input when searching for the next token.
|
||||
func (d *Dispenser) EOFErr() error {
|
||||
return d.Errf("Unexpected EOF")
|
||||
}
|
||||
|
||||
// Err generates a custom parse error with a message of msg.
|
||||
func (d *Dispenser) Err(msg string) error {
|
||||
msg = fmt.Sprintf("%s:%d - Parse error: %s", d.File(), d.Line(), msg)
|
||||
return errors.New(msg)
|
||||
}
|
||||
|
||||
// Errf is like Err, but for formatted error messages
|
||||
func (d *Dispenser) Errf(format string, args ...interface{}) error {
|
||||
return d.Err(fmt.Sprintf(format, args...))
|
||||
}
|
||||
|
||||
// numLineBreaks counts how many line breaks are in the token
|
||||
// value given by the token index tknIdx. It returns 0 if the
|
||||
// token does not exist or there are no line breaks.
|
||||
func (d *Dispenser) numLineBreaks(tknIdx int) int {
|
||||
if tknIdx < 0 || tknIdx >= len(d.tokens) {
|
||||
return 0
|
||||
}
|
||||
return strings.Count(d.tokens[tknIdx].text, "\n")
|
||||
}
|
||||
|
||||
// isNewLine determines whether the current token is on a different
|
||||
// line (higher line number) than the previous token. It handles imported
|
||||
// tokens correctly. If there isn't a previous token, it returns true.
|
||||
func (d *Dispenser) isNewLine() bool {
|
||||
if d.cursor < 1 {
|
||||
return true
|
||||
}
|
||||
if d.cursor > len(d.tokens)-1 {
|
||||
return false
|
||||
}
|
||||
return d.tokens[d.cursor-1].file != d.tokens[d.cursor].file ||
|
||||
d.tokens[d.cursor-1].line+d.numLineBreaks(d.cursor-1) < d.tokens[d.cursor].line
|
||||
}
|
||||
292
core/parse/dispenser_test.go
Normal file
292
core/parse/dispenser_test.go
Normal file
@@ -0,0 +1,292 @@
|
||||
package parse
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestDispenser_Val_Next(t *testing.T) {
|
||||
input := `host:port
|
||||
dir1 arg1
|
||||
dir2 arg2 arg3
|
||||
dir3`
|
||||
d := NewDispenser("Testfile", strings.NewReader(input))
|
||||
|
||||
if val := d.Val(); val != "" {
|
||||
t.Fatalf("Val(): Should return empty string when no token loaded; got '%s'", val)
|
||||
}
|
||||
|
||||
assertNext := func(shouldLoad bool, expectedCursor int, expectedVal string) {
|
||||
if loaded := d.Next(); loaded != shouldLoad {
|
||||
t.Errorf("Next(): Expected %v but got %v instead (val '%s')", shouldLoad, loaded, d.Val())
|
||||
}
|
||||
if d.cursor != expectedCursor {
|
||||
t.Errorf("Expected cursor to be %d, but was %d", expectedCursor, d.cursor)
|
||||
}
|
||||
if d.nesting != 0 {
|
||||
t.Errorf("Nesting should be 0, was %d instead", d.nesting)
|
||||
}
|
||||
if val := d.Val(); val != expectedVal {
|
||||
t.Errorf("Val(): Expected '%s' but got '%s'", expectedVal, val)
|
||||
}
|
||||
}
|
||||
|
||||
assertNext(true, 0, "host:port")
|
||||
assertNext(true, 1, "dir1")
|
||||
assertNext(true, 2, "arg1")
|
||||
assertNext(true, 3, "dir2")
|
||||
assertNext(true, 4, "arg2")
|
||||
assertNext(true, 5, "arg3")
|
||||
assertNext(true, 6, "dir3")
|
||||
// Note: This next test simply asserts existing behavior.
|
||||
// If desired, we may wish to empty the token value after
|
||||
// reading past the EOF. Open an issue if you want this change.
|
||||
assertNext(false, 6, "dir3")
|
||||
}
|
||||
|
||||
func TestDispenser_NextArg(t *testing.T) {
|
||||
input := `dir1 arg1
|
||||
dir2 arg2 arg3
|
||||
dir3`
|
||||
d := NewDispenser("Testfile", strings.NewReader(input))
|
||||
|
||||
assertNext := func(shouldLoad bool, expectedVal string, expectedCursor int) {
|
||||
if d.Next() != shouldLoad {
|
||||
t.Errorf("Next(): Should load token but got false instead (val: '%s')", d.Val())
|
||||
}
|
||||
if d.cursor != expectedCursor {
|
||||
t.Errorf("Next(): Expected cursor to be at %d, but it was %d", expectedCursor, d.cursor)
|
||||
}
|
||||
if val := d.Val(); val != expectedVal {
|
||||
t.Errorf("Val(): Expected '%s' but got '%s'", expectedVal, val)
|
||||
}
|
||||
}
|
||||
|
||||
assertNextArg := func(expectedVal string, loadAnother bool, expectedCursor int) {
|
||||
if d.NextArg() != true {
|
||||
t.Error("NextArg(): Should load next argument but got false instead")
|
||||
}
|
||||
if d.cursor != expectedCursor {
|
||||
t.Errorf("NextArg(): Expected cursor to be at %d, but it was %d", expectedCursor, d.cursor)
|
||||
}
|
||||
if val := d.Val(); val != expectedVal {
|
||||
t.Errorf("Val(): Expected '%s' but got '%s'", expectedVal, val)
|
||||
}
|
||||
if !loadAnother {
|
||||
if d.NextArg() != false {
|
||||
t.Fatalf("NextArg(): Should NOT load another argument, but got true instead (val: '%s')", d.Val())
|
||||
}
|
||||
if d.cursor != expectedCursor {
|
||||
t.Errorf("NextArg(): Expected cursor to remain at %d, but it was %d", expectedCursor, d.cursor)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
assertNext(true, "dir1", 0)
|
||||
assertNextArg("arg1", false, 1)
|
||||
assertNext(true, "dir2", 2)
|
||||
assertNextArg("arg2", true, 3)
|
||||
assertNextArg("arg3", false, 4)
|
||||
assertNext(true, "dir3", 5)
|
||||
assertNext(false, "dir3", 5)
|
||||
}
|
||||
|
||||
func TestDispenser_NextLine(t *testing.T) {
|
||||
input := `host:port
|
||||
dir1 arg1
|
||||
dir2 arg2 arg3`
|
||||
d := NewDispenser("Testfile", strings.NewReader(input))
|
||||
|
||||
assertNextLine := func(shouldLoad bool, expectedVal string, expectedCursor int) {
|
||||
if d.NextLine() != shouldLoad {
|
||||
t.Errorf("NextLine(): Should load token but got false instead (val: '%s')", d.Val())
|
||||
}
|
||||
if d.cursor != expectedCursor {
|
||||
t.Errorf("NextLine(): Expected cursor to be %d, instead was %d", expectedCursor, d.cursor)
|
||||
}
|
||||
if val := d.Val(); val != expectedVal {
|
||||
t.Errorf("Val(): Expected '%s' but got '%s'", expectedVal, val)
|
||||
}
|
||||
}
|
||||
|
||||
assertNextLine(true, "host:port", 0)
|
||||
assertNextLine(true, "dir1", 1)
|
||||
assertNextLine(false, "dir1", 1)
|
||||
d.Next() // arg1
|
||||
assertNextLine(true, "dir2", 3)
|
||||
assertNextLine(false, "dir2", 3)
|
||||
d.Next() // arg2
|
||||
assertNextLine(false, "arg2", 4)
|
||||
d.Next() // arg3
|
||||
assertNextLine(false, "arg3", 5)
|
||||
}
|
||||
|
||||
func TestDispenser_NextBlock(t *testing.T) {
|
||||
input := `foobar1 {
|
||||
sub1 arg1
|
||||
sub2
|
||||
}
|
||||
foobar2 {
|
||||
}`
|
||||
d := NewDispenser("Testfile", strings.NewReader(input))
|
||||
|
||||
assertNextBlock := func(shouldLoad bool, expectedCursor, expectedNesting int) {
|
||||
if loaded := d.NextBlock(); loaded != shouldLoad {
|
||||
t.Errorf("NextBlock(): Should return %v but got %v", shouldLoad, loaded)
|
||||
}
|
||||
if d.cursor != expectedCursor {
|
||||
t.Errorf("NextBlock(): Expected cursor to be %d, was %d", expectedCursor, d.cursor)
|
||||
}
|
||||
if d.nesting != expectedNesting {
|
||||
t.Errorf("NextBlock(): Nesting should be %d, not %d", expectedNesting, d.nesting)
|
||||
}
|
||||
}
|
||||
|
||||
assertNextBlock(false, -1, 0)
|
||||
d.Next() // foobar1
|
||||
assertNextBlock(true, 2, 1)
|
||||
assertNextBlock(true, 3, 1)
|
||||
assertNextBlock(true, 4, 1)
|
||||
assertNextBlock(false, 5, 0)
|
||||
d.Next() // foobar2
|
||||
assertNextBlock(false, 8, 0) // empty block is as if it didn't exist
|
||||
}
|
||||
|
||||
func TestDispenser_Args(t *testing.T) {
|
||||
var s1, s2, s3 string
|
||||
input := `dir1 arg1 arg2 arg3
|
||||
dir2 arg4 arg5
|
||||
dir3 arg6 arg7
|
||||
dir4`
|
||||
d := NewDispenser("Testfile", strings.NewReader(input))
|
||||
|
||||
d.Next() // dir1
|
||||
|
||||
// As many strings as arguments
|
||||
if all := d.Args(&s1, &s2, &s3); !all {
|
||||
t.Error("Args(): Expected true, got false")
|
||||
}
|
||||
if s1 != "arg1" {
|
||||
t.Errorf("Args(): Expected s1 to be 'arg1', got '%s'", s1)
|
||||
}
|
||||
if s2 != "arg2" {
|
||||
t.Errorf("Args(): Expected s2 to be 'arg2', got '%s'", s2)
|
||||
}
|
||||
if s3 != "arg3" {
|
||||
t.Errorf("Args(): Expected s3 to be 'arg3', got '%s'", s3)
|
||||
}
|
||||
|
||||
d.Next() // dir2
|
||||
|
||||
// More strings than arguments
|
||||
if all := d.Args(&s1, &s2, &s3); all {
|
||||
t.Error("Args(): Expected false, got true")
|
||||
}
|
||||
if s1 != "arg4" {
|
||||
t.Errorf("Args(): Expected s1 to be 'arg4', got '%s'", s1)
|
||||
}
|
||||
if s2 != "arg5" {
|
||||
t.Errorf("Args(): Expected s2 to be 'arg5', got '%s'", s2)
|
||||
}
|
||||
if s3 != "arg3" {
|
||||
t.Errorf("Args(): Expected s3 to be unchanged ('arg3'), instead got '%s'", s3)
|
||||
}
|
||||
|
||||
// (quick cursor check just for kicks and giggles)
|
||||
if d.cursor != 6 {
|
||||
t.Errorf("Cursor should be 6, but is %d", d.cursor)
|
||||
}
|
||||
|
||||
d.Next() // dir3
|
||||
|
||||
// More arguments than strings
|
||||
if all := d.Args(&s1); !all {
|
||||
t.Error("Args(): Expected true, got false")
|
||||
}
|
||||
if s1 != "arg6" {
|
||||
t.Errorf("Args(): Expected s1 to be 'arg6', got '%s'", s1)
|
||||
}
|
||||
|
||||
d.Next() // dir4
|
||||
|
||||
// No arguments or strings
|
||||
if all := d.Args(); !all {
|
||||
t.Error("Args(): Expected true, got false")
|
||||
}
|
||||
|
||||
// No arguments but at least one string
|
||||
if all := d.Args(&s1); all {
|
||||
t.Error("Args(): Expected false, got true")
|
||||
}
|
||||
}
|
||||
|
||||
func TestDispenser_RemainingArgs(t *testing.T) {
|
||||
input := `dir1 arg1 arg2 arg3
|
||||
dir2 arg4 arg5
|
||||
dir3 arg6 { arg7
|
||||
dir4`
|
||||
d := NewDispenser("Testfile", strings.NewReader(input))
|
||||
|
||||
d.Next() // dir1
|
||||
|
||||
args := d.RemainingArgs()
|
||||
if expected := []string{"arg1", "arg2", "arg3"}; !reflect.DeepEqual(args, expected) {
|
||||
t.Errorf("RemainingArgs(): Expected %v, got %v", expected, args)
|
||||
}
|
||||
|
||||
d.Next() // dir2
|
||||
|
||||
args = d.RemainingArgs()
|
||||
if expected := []string{"arg4", "arg5"}; !reflect.DeepEqual(args, expected) {
|
||||
t.Errorf("RemainingArgs(): Expected %v, got %v", expected, args)
|
||||
}
|
||||
|
||||
d.Next() // dir3
|
||||
|
||||
args = d.RemainingArgs()
|
||||
if expected := []string{"arg6"}; !reflect.DeepEqual(args, expected) {
|
||||
t.Errorf("RemainingArgs(): Expected %v, got %v", expected, args)
|
||||
}
|
||||
|
||||
d.Next() // {
|
||||
d.Next() // arg7
|
||||
d.Next() // dir4
|
||||
|
||||
args = d.RemainingArgs()
|
||||
if len(args) != 0 {
|
||||
t.Errorf("RemainingArgs(): Expected %v, got %v", []string{}, args)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDispenser_ArgErr_Err(t *testing.T) {
|
||||
input := `dir1 {
|
||||
}
|
||||
dir2 arg1 arg2`
|
||||
d := NewDispenser("Testfile", strings.NewReader(input))
|
||||
|
||||
d.cursor = 1 // {
|
||||
|
||||
if err := d.ArgErr(); err == nil || !strings.Contains(err.Error(), "{") {
|
||||
t.Errorf("ArgErr(): Expected an error message with { in it, but got '%v'", err)
|
||||
}
|
||||
|
||||
d.cursor = 5 // arg2
|
||||
|
||||
if err := d.ArgErr(); err == nil || !strings.Contains(err.Error(), "arg2") {
|
||||
t.Errorf("ArgErr(): Expected an error message with 'arg2' in it; got '%v'", err)
|
||||
}
|
||||
|
||||
err := d.Err("foobar")
|
||||
if err == nil {
|
||||
t.Fatalf("Err(): Expected an error, got nil")
|
||||
}
|
||||
|
||||
if !strings.Contains(err.Error(), "Testfile:3") {
|
||||
t.Errorf("Expected error message with filename:line in it; got '%v'", err)
|
||||
}
|
||||
|
||||
if !strings.Contains(err.Error(), "foobar") {
|
||||
t.Errorf("Expected error message with custom message in it ('foobar'); got '%v'", err)
|
||||
}
|
||||
}
|
||||
6
core/parse/import_glob0.txt
Normal file
6
core/parse/import_glob0.txt
Normal file
@@ -0,0 +1,6 @@
|
||||
glob0.host0 {
|
||||
dir2 arg1
|
||||
}
|
||||
|
||||
glob0.host1 {
|
||||
}
|
||||
4
core/parse/import_glob1.txt
Normal file
4
core/parse/import_glob1.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
glob1.host0 {
|
||||
dir1
|
||||
dir2 arg1
|
||||
}
|
||||
3
core/parse/import_glob2.txt
Normal file
3
core/parse/import_glob2.txt
Normal file
@@ -0,0 +1,3 @@
|
||||
glob2.host0 {
|
||||
dir2 arg1
|
||||
}
|
||||
2
core/parse/import_test1.txt
Normal file
2
core/parse/import_test1.txt
Normal file
@@ -0,0 +1,2 @@
|
||||
dir2 arg1 arg2
|
||||
dir3
|
||||
4
core/parse/import_test2.txt
Normal file
4
core/parse/import_test2.txt
Normal file
@@ -0,0 +1,4 @@
|
||||
host1 {
|
||||
dir1
|
||||
dir2 arg1
|
||||
}
|
||||
122
core/parse/lexer.go
Normal file
122
core/parse/lexer.go
Normal file
@@ -0,0 +1,122 @@
|
||||
package parse
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"io"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
type (
|
||||
// lexer is a utility which can get values, token by
|
||||
// token, from a Reader. A token is a word, and tokens
|
||||
// are separated by whitespace. A word can be enclosed
|
||||
// in quotes if it contains whitespace.
|
||||
lexer struct {
|
||||
reader *bufio.Reader
|
||||
token token
|
||||
line int
|
||||
}
|
||||
|
||||
// token represents a single parsable unit.
|
||||
token struct {
|
||||
file string
|
||||
line int
|
||||
text string
|
||||
}
|
||||
)
|
||||
|
||||
// load prepares the lexer to scan an input for tokens.
|
||||
func (l *lexer) load(input io.Reader) error {
|
||||
l.reader = bufio.NewReader(input)
|
||||
l.line = 1
|
||||
return nil
|
||||
}
|
||||
|
||||
// next loads the next token into the lexer.
|
||||
// A token is delimited by whitespace, unless
|
||||
// the token starts with a quotes character (")
|
||||
// in which case the token goes until the closing
|
||||
// quotes (the enclosing quotes are not included).
|
||||
// Inside quoted strings, quotes may be escaped
|
||||
// with a preceding \ character. No other chars
|
||||
// may be escaped. The rest of the line is skipped
|
||||
// if a "#" character is read in. Returns true if
|
||||
// a token was loaded; false otherwise.
|
||||
func (l *lexer) next() bool {
|
||||
var val []rune
|
||||
var comment, quoted, escaped bool
|
||||
|
||||
makeToken := func() bool {
|
||||
l.token.text = string(val)
|
||||
return true
|
||||
}
|
||||
|
||||
for {
|
||||
ch, _, err := l.reader.ReadRune()
|
||||
if err != nil {
|
||||
if len(val) > 0 {
|
||||
return makeToken()
|
||||
}
|
||||
if err == io.EOF {
|
||||
return false
|
||||
}
|
||||
panic(err)
|
||||
}
|
||||
|
||||
if quoted {
|
||||
if !escaped {
|
||||
if ch == '\\' {
|
||||
escaped = true
|
||||
continue
|
||||
} else if ch == '"' {
|
||||
quoted = false
|
||||
return makeToken()
|
||||
}
|
||||
}
|
||||
if ch == '\n' {
|
||||
l.line++
|
||||
}
|
||||
if escaped {
|
||||
// only escape quotes
|
||||
if ch != '"' {
|
||||
val = append(val, '\\')
|
||||
}
|
||||
}
|
||||
val = append(val, ch)
|
||||
escaped = false
|
||||
continue
|
||||
}
|
||||
|
||||
if unicode.IsSpace(ch) {
|
||||
if ch == '\r' {
|
||||
continue
|
||||
}
|
||||
if ch == '\n' {
|
||||
l.line++
|
||||
comment = false
|
||||
}
|
||||
if len(val) > 0 {
|
||||
return makeToken()
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if ch == '#' {
|
||||
comment = true
|
||||
}
|
||||
|
||||
if comment {
|
||||
continue
|
||||
}
|
||||
|
||||
if len(val) == 0 {
|
||||
l.token = token{line: l.line}
|
||||
if ch == '"' {
|
||||
quoted = true
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
val = append(val, ch)
|
||||
}
|
||||
}
|
||||
165
core/parse/lexer_test.go
Normal file
165
core/parse/lexer_test.go
Normal file
@@ -0,0 +1,165 @@
|
||||
package parse
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
type lexerTestCase struct {
|
||||
input string
|
||||
expected []token
|
||||
}
|
||||
|
||||
func TestLexer(t *testing.T) {
|
||||
testCases := []lexerTestCase{
|
||||
{
|
||||
input: `host:123`,
|
||||
expected: []token{
|
||||
{line: 1, text: "host:123"},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `host:123
|
||||
|
||||
directive`,
|
||||
expected: []token{
|
||||
{line: 1, text: "host:123"},
|
||||
{line: 3, text: "directive"},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `host:123 {
|
||||
directive
|
||||
}`,
|
||||
expected: []token{
|
||||
{line: 1, text: "host:123"},
|
||||
{line: 1, text: "{"},
|
||||
{line: 2, text: "directive"},
|
||||
{line: 3, text: "}"},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `host:123 { directive }`,
|
||||
expected: []token{
|
||||
{line: 1, text: "host:123"},
|
||||
{line: 1, text: "{"},
|
||||
{line: 1, text: "directive"},
|
||||
{line: 1, text: "}"},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `host:123 {
|
||||
#comment
|
||||
directive
|
||||
# comment
|
||||
foobar # another comment
|
||||
}`,
|
||||
expected: []token{
|
||||
{line: 1, text: "host:123"},
|
||||
{line: 1, text: "{"},
|
||||
{line: 3, text: "directive"},
|
||||
{line: 5, text: "foobar"},
|
||||
{line: 6, text: "}"},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `a "quoted value" b
|
||||
foobar`,
|
||||
expected: []token{
|
||||
{line: 1, text: "a"},
|
||||
{line: 1, text: "quoted value"},
|
||||
{line: 1, text: "b"},
|
||||
{line: 2, text: "foobar"},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `A "quoted \"value\" inside" B`,
|
||||
expected: []token{
|
||||
{line: 1, text: "A"},
|
||||
{line: 1, text: `quoted "value" inside`},
|
||||
{line: 1, text: "B"},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `"don't\escape"`,
|
||||
expected: []token{
|
||||
{line: 1, text: `don't\escape`},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `"don't\\escape"`,
|
||||
expected: []token{
|
||||
{line: 1, text: `don't\\escape`},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `A "quoted value with line
|
||||
break inside" {
|
||||
foobar
|
||||
}`,
|
||||
expected: []token{
|
||||
{line: 1, text: "A"},
|
||||
{line: 1, text: "quoted value with line\n\t\t\t\t\tbreak inside"},
|
||||
{line: 2, text: "{"},
|
||||
{line: 3, text: "foobar"},
|
||||
{line: 4, text: "}"},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `"C:\php\php-cgi.exe"`,
|
||||
expected: []token{
|
||||
{line: 1, text: `C:\php\php-cgi.exe`},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: `empty "" string`,
|
||||
expected: []token{
|
||||
{line: 1, text: `empty`},
|
||||
{line: 1, text: ``},
|
||||
{line: 1, text: `string`},
|
||||
},
|
||||
},
|
||||
{
|
||||
input: "skip those\r\nCR characters",
|
||||
expected: []token{
|
||||
{line: 1, text: "skip"},
|
||||
{line: 1, text: "those"},
|
||||
{line: 2, text: "CR"},
|
||||
{line: 2, text: "characters"},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for i, testCase := range testCases {
|
||||
actual := tokenize(testCase.input)
|
||||
lexerCompare(t, i, testCase.expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
func tokenize(input string) (tokens []token) {
|
||||
l := lexer{}
|
||||
l.load(strings.NewReader(input))
|
||||
for l.next() {
|
||||
tokens = append(tokens, l.token)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func lexerCompare(t *testing.T, n int, expected, actual []token) {
|
||||
if len(expected) != len(actual) {
|
||||
t.Errorf("Test case %d: expected %d token(s) but got %d", n, len(expected), len(actual))
|
||||
}
|
||||
|
||||
for i := 0; i < len(actual) && i < len(expected); i++ {
|
||||
if actual[i].line != expected[i].line {
|
||||
t.Errorf("Test case %d token %d ('%s'): expected line %d but was line %d",
|
||||
n, i, expected[i].text, expected[i].line, actual[i].line)
|
||||
break
|
||||
}
|
||||
if actual[i].text != expected[i].text {
|
||||
t.Errorf("Test case %d token %d: expected text '%s' but was '%s'",
|
||||
n, i, expected[i].text, actual[i].text)
|
||||
break
|
||||
}
|
||||
}
|
||||
}
|
||||
32
core/parse/parse.go
Normal file
32
core/parse/parse.go
Normal file
@@ -0,0 +1,32 @@
|
||||
// Package parse provides facilities for parsing configuration files.
|
||||
package parse
|
||||
|
||||
import "io"
|
||||
|
||||
// ServerBlocks parses the input just enough to organize tokens,
|
||||
// in order, by server block. No further parsing is performed.
|
||||
// If checkDirectives is true, only valid directives will be allowed
|
||||
// otherwise we consider it a parse error. Server blocks are returned
|
||||
// in the order in which they appear.
|
||||
func ServerBlocks(filename string, input io.Reader, checkDirectives bool) ([]ServerBlock, error) {
|
||||
p := parser{Dispenser: NewDispenser(filename, input)}
|
||||
p.checkDirectives = checkDirectives
|
||||
blocks, err := p.parseAll()
|
||||
return blocks, err
|
||||
}
|
||||
|
||||
// allTokens lexes the entire input, but does not parse it.
|
||||
// It returns all the tokens from the input, unstructured
|
||||
// and in order.
|
||||
func allTokens(input io.Reader) (tokens []token) {
|
||||
l := new(lexer)
|
||||
l.load(input)
|
||||
for l.next() {
|
||||
tokens = append(tokens, l.token)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// ValidDirectives is a set of directives that are valid (unordered). Populated
|
||||
// by config package's init function.
|
||||
var ValidDirectives = make(map[string]struct{})
|
||||
22
core/parse/parse_test.go
Normal file
22
core/parse/parse_test.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package parse
|
||||
|
||||
import (
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestAllTokens(t *testing.T) {
|
||||
input := strings.NewReader("a b c\nd e")
|
||||
expected := []string{"a", "b", "c", "d", "e"}
|
||||
tokens := allTokens(input)
|
||||
|
||||
if len(tokens) != len(expected) {
|
||||
t.Fatalf("Expected %d tokens, got %d", len(expected), len(tokens))
|
||||
}
|
||||
|
||||
for i, val := range expected {
|
||||
if tokens[i].text != val {
|
||||
t.Errorf("Token %d should be '%s' but was '%s'", i, val, tokens[i].text)
|
||||
}
|
||||
}
|
||||
}
|
||||
379
core/parse/parsing.go
Normal file
379
core/parse/parsing.go
Normal file
@@ -0,0 +1,379 @@
|
||||
package parse
|
||||
|
||||
import (
|
||||
"net"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/miekg/dns"
|
||||
)
|
||||
|
||||
type parser struct {
|
||||
Dispenser
|
||||
block ServerBlock // current server block being parsed
|
||||
eof bool // if we encounter a valid EOF in a hard place
|
||||
checkDirectives bool // if true, directives must be known
|
||||
}
|
||||
|
||||
func (p *parser) parseAll() ([]ServerBlock, error) {
|
||||
var blocks []ServerBlock
|
||||
|
||||
for p.Next() {
|
||||
err := p.parseOne()
|
||||
if err != nil {
|
||||
return blocks, err
|
||||
}
|
||||
if len(p.block.Addresses) > 0 {
|
||||
blocks = append(blocks, p.block)
|
||||
}
|
||||
}
|
||||
|
||||
return blocks, nil
|
||||
}
|
||||
|
||||
func (p *parser) parseOne() error {
|
||||
p.block = ServerBlock{Tokens: make(map[string][]token)}
|
||||
|
||||
err := p.begin()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) begin() error {
|
||||
if len(p.tokens) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
err := p.addresses()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if p.eof {
|
||||
// this happens if the Caddyfile consists of only
|
||||
// a line of addresses and nothing else
|
||||
return nil
|
||||
}
|
||||
|
||||
err = p.blockContents()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) addresses() error {
|
||||
var expectingAnother bool
|
||||
|
||||
for {
|
||||
tkn := replaceEnvVars(p.Val())
|
||||
|
||||
// special case: import directive replaces tokens during parse-time
|
||||
if tkn == "import" && p.isNewLine() {
|
||||
err := p.doImport()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
// Open brace definitely indicates end of addresses
|
||||
if tkn == "{" {
|
||||
if expectingAnother {
|
||||
return p.Errf("Expected another address but had '%s' - check for extra comma", tkn)
|
||||
}
|
||||
break
|
||||
}
|
||||
|
||||
if tkn != "" { // empty token possible if user typed "" in Caddyfile
|
||||
// Trailing comma indicates another address will follow, which
|
||||
// may possibly be on the next line
|
||||
if tkn[len(tkn)-1] == ',' {
|
||||
tkn = tkn[:len(tkn)-1]
|
||||
expectingAnother = true
|
||||
} else {
|
||||
expectingAnother = false // but we may still see another one on this line
|
||||
}
|
||||
|
||||
// Parse and save this address
|
||||
addr, err := standardAddress(tkn)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.block.Addresses = append(p.block.Addresses, addr)
|
||||
}
|
||||
|
||||
// Advance token and possibly break out of loop or return error
|
||||
hasNext := p.Next()
|
||||
if expectingAnother && !hasNext {
|
||||
return p.EOFErr()
|
||||
}
|
||||
if !hasNext {
|
||||
p.eof = true
|
||||
break // EOF
|
||||
}
|
||||
if !expectingAnother && p.isNewLine() {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *parser) blockContents() error {
|
||||
errOpenCurlyBrace := p.openCurlyBrace()
|
||||
if errOpenCurlyBrace != nil {
|
||||
// single-server configs don't need curly braces
|
||||
p.cursor--
|
||||
}
|
||||
|
||||
err := p.directives()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Only look for close curly brace if there was an opening
|
||||
if errOpenCurlyBrace == nil {
|
||||
err = p.closeCurlyBrace()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// directives parses through all the lines for directives
|
||||
// and it expects the next token to be the first
|
||||
// directive. It goes until EOF or closing curly brace
|
||||
// which ends the server block.
|
||||
func (p *parser) directives() error {
|
||||
for p.Next() {
|
||||
// end of server block
|
||||
if p.Val() == "}" {
|
||||
break
|
||||
}
|
||||
|
||||
// special case: import directive replaces tokens during parse-time
|
||||
if p.Val() == "import" {
|
||||
err := p.doImport()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
p.cursor-- // cursor is advanced when we continue, so roll back one more
|
||||
continue
|
||||
}
|
||||
|
||||
// normal case: parse a directive on this line
|
||||
if err := p.directive(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// doImport swaps out the import directive and its argument
|
||||
// (a total of 2 tokens) with the tokens in the specified file
|
||||
// or globbing pattern. When the function returns, the cursor
|
||||
// is on the token before where the import directive was. In
|
||||
// other words, call Next() to access the first token that was
|
||||
// imported.
|
||||
func (p *parser) doImport() error {
|
||||
// syntax check
|
||||
if !p.NextArg() {
|
||||
return p.ArgErr()
|
||||
}
|
||||
importPattern := p.Val()
|
||||
if p.NextArg() {
|
||||
return p.Err("Import takes only one argument (glob pattern or file)")
|
||||
}
|
||||
|
||||
// do glob
|
||||
matches, err := filepath.Glob(importPattern)
|
||||
if err != nil {
|
||||
return p.Errf("Failed to use import pattern %s: %v", importPattern, err)
|
||||
}
|
||||
if len(matches) == 0 {
|
||||
return p.Errf("No files matching import pattern %s", importPattern)
|
||||
}
|
||||
|
||||
// splice out the import directive and its argument (2 tokens total)
|
||||
tokensBefore := p.tokens[:p.cursor-1]
|
||||
tokensAfter := p.tokens[p.cursor+1:]
|
||||
|
||||
// collect all the imported tokens
|
||||
var importedTokens []token
|
||||
for _, importFile := range matches {
|
||||
newTokens, err := p.doSingleImport(importFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
importedTokens = append(importedTokens, newTokens...)
|
||||
}
|
||||
|
||||
// splice the imported tokens in the place of the import statement
|
||||
// and rewind cursor so Next() will land on first imported token
|
||||
p.tokens = append(tokensBefore, append(importedTokens, tokensAfter...)...)
|
||||
p.cursor--
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// doSingleImport lexes the individual file at importFile and returns
|
||||
// its tokens or an error, if any.
|
||||
func (p *parser) doSingleImport(importFile string) ([]token, error) {
|
||||
file, err := os.Open(importFile)
|
||||
if err != nil {
|
||||
return nil, p.Errf("Could not import %s: %v", importFile, err)
|
||||
}
|
||||
defer file.Close()
|
||||
importedTokens := allTokens(file)
|
||||
|
||||
// Tack the filename onto these tokens so errors show the imported file's name
|
||||
filename := filepath.Base(importFile)
|
||||
for i := 0; i < len(importedTokens); i++ {
|
||||
importedTokens[i].file = filename
|
||||
}
|
||||
|
||||
return importedTokens, nil
|
||||
}
|
||||
|
||||
// directive collects tokens until the directive's scope
|
||||
// closes (either end of line or end of curly brace block).
|
||||
// It expects the currently-loaded token to be a directive
|
||||
// (or } that ends a server block). The collected tokens
|
||||
// are loaded into the current server block for later use
|
||||
// by directive setup functions.
|
||||
func (p *parser) directive() error {
|
||||
dir := p.Val()
|
||||
nesting := 0
|
||||
|
||||
if p.checkDirectives {
|
||||
if _, ok := ValidDirectives[dir]; !ok {
|
||||
return p.Errf("Unknown directive '%s'", dir)
|
||||
}
|
||||
}
|
||||
|
||||
// The directive itself is appended as a relevant token
|
||||
p.block.Tokens[dir] = append(p.block.Tokens[dir], p.tokens[p.cursor])
|
||||
|
||||
for p.Next() {
|
||||
if p.Val() == "{" {
|
||||
nesting++
|
||||
} else if p.isNewLine() && nesting == 0 {
|
||||
p.cursor-- // read too far
|
||||
break
|
||||
} else if p.Val() == "}" && nesting > 0 {
|
||||
nesting--
|
||||
} else if p.Val() == "}" && nesting == 0 {
|
||||
return p.Err("Unexpected '}' because no matching opening brace")
|
||||
}
|
||||
p.tokens[p.cursor].text = replaceEnvVars(p.tokens[p.cursor].text)
|
||||
p.block.Tokens[dir] = append(p.block.Tokens[dir], p.tokens[p.cursor])
|
||||
}
|
||||
|
||||
if nesting > 0 {
|
||||
return p.EOFErr()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// openCurlyBrace expects the current token to be an
|
||||
// opening curly brace. This acts like an assertion
|
||||
// because it returns an error if the token is not
|
||||
// a opening curly brace. It does NOT advance the token.
|
||||
func (p *parser) openCurlyBrace() error {
|
||||
if p.Val() != "{" {
|
||||
return p.SyntaxErr("{")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// closeCurlyBrace expects the current token to be
|
||||
// a closing curly brace. This acts like an assertion
|
||||
// because it returns an error if the token is not
|
||||
// a closing curly brace. It does NOT advance the token.
|
||||
func (p *parser) closeCurlyBrace() error {
|
||||
if p.Val() != "}" {
|
||||
return p.SyntaxErr("}")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// standardAddress parses an address string into a structured format with separate
|
||||
// host, and port portions, as well as the original input string.
|
||||
func standardAddress(str string) (address, error) {
|
||||
var err error
|
||||
|
||||
// first check for scheme and strip it off
|
||||
input := str
|
||||
|
||||
// separate host and port
|
||||
host, port, err := net.SplitHostPort(str)
|
||||
if err != nil {
|
||||
host, port, err = net.SplitHostPort(str + ":")
|
||||
// no error check here; return err at end of function
|
||||
}
|
||||
|
||||
// see if we can set port based off scheme
|
||||
if port == "" {
|
||||
port = "53"
|
||||
}
|
||||
|
||||
return address{Original: input, Host: strings.ToLower(dns.Fqdn(host)), Port: port}, err
|
||||
}
|
||||
|
||||
// replaceEnvVars replaces environment variables that appear in the token
|
||||
// and understands both the $UNIX and %WINDOWS% syntaxes.
|
||||
func replaceEnvVars(s string) string {
|
||||
s = replaceEnvReferences(s, "{%", "%}")
|
||||
s = replaceEnvReferences(s, "{$", "}")
|
||||
return s
|
||||
}
|
||||
|
||||
// replaceEnvReferences performs the actual replacement of env variables
|
||||
// in s, given the placeholder start and placeholder end strings.
|
||||
func replaceEnvReferences(s, refStart, refEnd string) string {
|
||||
index := strings.Index(s, refStart)
|
||||
for index != -1 {
|
||||
endIndex := strings.Index(s, refEnd)
|
||||
if endIndex != -1 {
|
||||
ref := s[index : endIndex+len(refEnd)]
|
||||
s = strings.Replace(s, ref, os.Getenv(ref[len(refStart):len(ref)-len(refEnd)]), -1)
|
||||
} else {
|
||||
return s
|
||||
}
|
||||
index = strings.Index(s, refStart)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
type (
|
||||
// ServerBlock associates tokens with a list of addresses
|
||||
// and groups tokens by directive name.
|
||||
ServerBlock struct {
|
||||
Addresses []address
|
||||
Tokens map[string][]token
|
||||
}
|
||||
|
||||
address struct {
|
||||
Original, Host, Port string
|
||||
}
|
||||
)
|
||||
|
||||
// HostList converts the list of addresses that are
|
||||
// associated with this server block into a slice of
|
||||
// strings, where each address is as it was originally
|
||||
// read from the input.
|
||||
func (sb ServerBlock) HostList() []string {
|
||||
sbHosts := make([]string, len(sb.Addresses))
|
||||
for j, addr := range sb.Addresses {
|
||||
sbHosts[j] = addr.Original
|
||||
}
|
||||
return sbHosts
|
||||
}
|
||||
477
core/parse/parsing_test.go
Normal file
477
core/parse/parsing_test.go
Normal file
@@ -0,0 +1,477 @@
|
||||
package parse
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestStandardAddress(t *testing.T) {
|
||||
for i, test := range []struct {
|
||||
input string
|
||||
scheme, host, port string
|
||||
shouldErr bool
|
||||
}{
|
||||
{`localhost`, "", "localhost", "", false},
|
||||
{`localhost:1234`, "", "localhost", "1234", false},
|
||||
{`localhost:`, "", "localhost", "", false},
|
||||
{`0.0.0.0`, "", "0.0.0.0", "", false},
|
||||
{`127.0.0.1:1234`, "", "127.0.0.1", "1234", false},
|
||||
{`:1234`, "", "", "1234", false},
|
||||
{`[::1]`, "", "::1", "", false},
|
||||
{`[::1]:1234`, "", "::1", "1234", false},
|
||||
{`:`, "", "", "", false},
|
||||
{`localhost:http`, "http", "localhost", "80", false},
|
||||
{`localhost:https`, "https", "localhost", "443", false},
|
||||
{`:http`, "http", "", "80", false},
|
||||
{`:https`, "https", "", "443", false},
|
||||
{`http://localhost:https`, "", "", "", true}, // conflict
|
||||
{`http://localhost:http`, "", "", "", true}, // repeated scheme
|
||||
{`http://localhost:443`, "", "", "", true}, // not conventional
|
||||
{`https://localhost:80`, "", "", "", true}, // not conventional
|
||||
{`http://localhost`, "http", "localhost", "80", false},
|
||||
{`https://localhost`, "https", "localhost", "443", false},
|
||||
{`http://127.0.0.1`, "http", "127.0.0.1", "80", false},
|
||||
{`https://127.0.0.1`, "https", "127.0.0.1", "443", false},
|
||||
{`http://[::1]`, "http", "::1", "80", false},
|
||||
{`http://localhost:1234`, "http", "localhost", "1234", false},
|
||||
{`https://127.0.0.1:1234`, "https", "127.0.0.1", "1234", false},
|
||||
{`http://[::1]:1234`, "http", "::1", "1234", false},
|
||||
{``, "", "", "", false},
|
||||
{`::1`, "", "::1", "", true},
|
||||
{`localhost::`, "", "localhost::", "", true},
|
||||
{`#$%@`, "", "#$%@", "", true},
|
||||
} {
|
||||
actual, err := standardAddress(test.input)
|
||||
|
||||
if err != nil && !test.shouldErr {
|
||||
t.Errorf("Test %d (%s): Expected no error, but had error: %v", i, test.input, err)
|
||||
}
|
||||
if err == nil && test.shouldErr {
|
||||
t.Errorf("Test %d (%s): Expected error, but had none", i, test.input)
|
||||
}
|
||||
|
||||
if actual.Scheme != test.scheme {
|
||||
t.Errorf("Test %d (%s): Expected scheme '%s', got '%s'", i, test.input, test.scheme, actual.Scheme)
|
||||
}
|
||||
if actual.Host != test.host {
|
||||
t.Errorf("Test %d (%s): Expected host '%s', got '%s'", i, test.input, test.host, actual.Host)
|
||||
}
|
||||
if actual.Port != test.port {
|
||||
t.Errorf("Test %d (%s): Expected port '%s', got '%s'", i, test.input, test.port, actual.Port)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseOneAndImport(t *testing.T) {
|
||||
setupParseTests()
|
||||
|
||||
testParseOne := func(input string) (ServerBlock, error) {
|
||||
p := testParser(input)
|
||||
p.Next() // parseOne doesn't call Next() to start, so we must
|
||||
err := p.parseOne()
|
||||
return p.block, err
|
||||
}
|
||||
|
||||
for i, test := range []struct {
|
||||
input string
|
||||
shouldErr bool
|
||||
addresses []address
|
||||
tokens map[string]int // map of directive name to number of tokens expected
|
||||
}{
|
||||
{`localhost`, false, []address{
|
||||
{"localhost", "", "localhost", ""},
|
||||
}, map[string]int{}},
|
||||
|
||||
{`localhost
|
||||
dir1`, false, []address{
|
||||
{"localhost", "", "localhost", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 1,
|
||||
}},
|
||||
|
||||
{`localhost:1234
|
||||
dir1 foo bar`, false, []address{
|
||||
{"localhost:1234", "", "localhost", "1234"},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
}},
|
||||
|
||||
{`localhost {
|
||||
dir1
|
||||
}`, false, []address{
|
||||
{"localhost", "", "localhost", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 1,
|
||||
}},
|
||||
|
||||
{`localhost:1234 {
|
||||
dir1 foo bar
|
||||
dir2
|
||||
}`, false, []address{
|
||||
{"localhost:1234", "", "localhost", "1234"},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
"dir2": 1,
|
||||
}},
|
||||
|
||||
{`http://localhost https://localhost
|
||||
dir1 foo bar`, false, []address{
|
||||
{"http://localhost", "http", "localhost", "80"},
|
||||
{"https://localhost", "https", "localhost", "443"},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
}},
|
||||
|
||||
{`http://localhost https://localhost {
|
||||
dir1 foo bar
|
||||
}`, false, []address{
|
||||
{"http://localhost", "http", "localhost", "80"},
|
||||
{"https://localhost", "https", "localhost", "443"},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
}},
|
||||
|
||||
{`http://localhost, https://localhost {
|
||||
dir1 foo bar
|
||||
}`, false, []address{
|
||||
{"http://localhost", "http", "localhost", "80"},
|
||||
{"https://localhost", "https", "localhost", "443"},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
}},
|
||||
|
||||
{`http://localhost, {
|
||||
}`, true, []address{
|
||||
{"http://localhost", "http", "localhost", "80"},
|
||||
}, map[string]int{}},
|
||||
|
||||
{`host1:80, http://host2.com
|
||||
dir1 foo bar
|
||||
dir2 baz`, false, []address{
|
||||
{"host1:80", "", "host1", "80"},
|
||||
{"http://host2.com", "http", "host2.com", "80"},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
"dir2": 2,
|
||||
}},
|
||||
|
||||
{`http://host1.com,
|
||||
http://host2.com,
|
||||
https://host3.com`, false, []address{
|
||||
{"http://host1.com", "http", "host1.com", "80"},
|
||||
{"http://host2.com", "http", "host2.com", "80"},
|
||||
{"https://host3.com", "https", "host3.com", "443"},
|
||||
}, map[string]int{}},
|
||||
|
||||
{`http://host1.com:1234, https://host2.com
|
||||
dir1 foo {
|
||||
bar baz
|
||||
}
|
||||
dir2`, false, []address{
|
||||
{"http://host1.com:1234", "http", "host1.com", "1234"},
|
||||
{"https://host2.com", "https", "host2.com", "443"},
|
||||
}, map[string]int{
|
||||
"dir1": 6,
|
||||
"dir2": 1,
|
||||
}},
|
||||
|
||||
{`127.0.0.1
|
||||
dir1 {
|
||||
bar baz
|
||||
}
|
||||
dir2 {
|
||||
foo bar
|
||||
}`, false, []address{
|
||||
{"127.0.0.1", "", "127.0.0.1", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 5,
|
||||
"dir2": 5,
|
||||
}},
|
||||
|
||||
{`127.0.0.1
|
||||
unknown_directive`, true, []address{
|
||||
{"127.0.0.1", "", "127.0.0.1", ""},
|
||||
}, map[string]int{}},
|
||||
|
||||
{`localhost
|
||||
dir1 {
|
||||
foo`, true, []address{
|
||||
{"localhost", "", "localhost", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
}},
|
||||
|
||||
{`localhost
|
||||
dir1 {
|
||||
}`, false, []address{
|
||||
{"localhost", "", "localhost", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
}},
|
||||
|
||||
{`localhost
|
||||
dir1 {
|
||||
} }`, true, []address{
|
||||
{"localhost", "", "localhost", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 3,
|
||||
}},
|
||||
|
||||
{`localhost
|
||||
dir1 {
|
||||
nested {
|
||||
foo
|
||||
}
|
||||
}
|
||||
dir2 foo bar`, false, []address{
|
||||
{"localhost", "", "localhost", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 7,
|
||||
"dir2": 3,
|
||||
}},
|
||||
|
||||
{``, false, []address{}, map[string]int{}},
|
||||
|
||||
{`localhost
|
||||
dir1 arg1
|
||||
import import_test1.txt`, false, []address{
|
||||
{"localhost", "", "localhost", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 2,
|
||||
"dir2": 3,
|
||||
"dir3": 1,
|
||||
}},
|
||||
|
||||
{`import import_test2.txt`, false, []address{
|
||||
{"host1", "", "host1", ""},
|
||||
}, map[string]int{
|
||||
"dir1": 1,
|
||||
"dir2": 2,
|
||||
}},
|
||||
|
||||
{`import import_test1.txt import_test2.txt`, true, []address{}, map[string]int{}},
|
||||
|
||||
{`import not_found.txt`, true, []address{}, map[string]int{}},
|
||||
|
||||
{`""`, false, []address{}, map[string]int{}},
|
||||
|
||||
{``, false, []address{}, map[string]int{}},
|
||||
} {
|
||||
result, err := testParseOne(test.input)
|
||||
|
||||
if test.shouldErr && err == nil {
|
||||
t.Errorf("Test %d: Expected an error, but didn't get one", i)
|
||||
}
|
||||
if !test.shouldErr && err != nil {
|
||||
t.Errorf("Test %d: Expected no error, but got: %v", i, err)
|
||||
}
|
||||
|
||||
if len(result.Addresses) != len(test.addresses) {
|
||||
t.Errorf("Test %d: Expected %d addresses, got %d",
|
||||
i, len(test.addresses), len(result.Addresses))
|
||||
continue
|
||||
}
|
||||
for j, addr := range result.Addresses {
|
||||
if addr.Host != test.addresses[j].Host {
|
||||
t.Errorf("Test %d, address %d: Expected host to be '%s', but was '%s'",
|
||||
i, j, test.addresses[j].Host, addr.Host)
|
||||
}
|
||||
if addr.Port != test.addresses[j].Port {
|
||||
t.Errorf("Test %d, address %d: Expected port to be '%s', but was '%s'",
|
||||
i, j, test.addresses[j].Port, addr.Port)
|
||||
}
|
||||
}
|
||||
|
||||
if len(result.Tokens) != len(test.tokens) {
|
||||
t.Errorf("Test %d: Expected %d directives, had %d",
|
||||
i, len(test.tokens), len(result.Tokens))
|
||||
continue
|
||||
}
|
||||
for directive, tokens := range result.Tokens {
|
||||
if len(tokens) != test.tokens[directive] {
|
||||
t.Errorf("Test %d, directive '%s': Expected %d tokens, counted %d",
|
||||
i, directive, test.tokens[directive], len(tokens))
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParseAll(t *testing.T) {
|
||||
setupParseTests()
|
||||
|
||||
for i, test := range []struct {
|
||||
input string
|
||||
shouldErr bool
|
||||
addresses [][]address // addresses per server block, in order
|
||||
}{
|
||||
{`localhost`, false, [][]address{
|
||||
{{"localhost", "", "localhost", ""}},
|
||||
}},
|
||||
|
||||
{`localhost:1234`, false, [][]address{
|
||||
{{"localhost:1234", "", "localhost", "1234"}},
|
||||
}},
|
||||
|
||||
{`localhost:1234 {
|
||||
}
|
||||
localhost:2015 {
|
||||
}`, false, [][]address{
|
||||
{{"localhost:1234", "", "localhost", "1234"}},
|
||||
{{"localhost:2015", "", "localhost", "2015"}},
|
||||
}},
|
||||
|
||||
{`localhost:1234, http://host2`, false, [][]address{
|
||||
{{"localhost:1234", "", "localhost", "1234"}, {"http://host2", "http", "host2", "80"}},
|
||||
}},
|
||||
|
||||
{`localhost:1234, http://host2,`, true, [][]address{}},
|
||||
|
||||
{`http://host1.com, http://host2.com {
|
||||
}
|
||||
https://host3.com, https://host4.com {
|
||||
}`, false, [][]address{
|
||||
{{"http://host1.com", "http", "host1.com", "80"}, {"http://host2.com", "http", "host2.com", "80"}},
|
||||
{{"https://host3.com", "https", "host3.com", "443"}, {"https://host4.com", "https", "host4.com", "443"}},
|
||||
}},
|
||||
|
||||
{`import import_glob*.txt`, false, [][]address{
|
||||
{{"glob0.host0", "", "glob0.host0", ""}},
|
||||
{{"glob0.host1", "", "glob0.host1", ""}},
|
||||
{{"glob1.host0", "", "glob1.host0", ""}},
|
||||
{{"glob2.host0", "", "glob2.host0", ""}},
|
||||
}},
|
||||
} {
|
||||
p := testParser(test.input)
|
||||
blocks, err := p.parseAll()
|
||||
|
||||
if test.shouldErr && err == nil {
|
||||
t.Errorf("Test %d: Expected an error, but didn't get one", i)
|
||||
}
|
||||
if !test.shouldErr && err != nil {
|
||||
t.Errorf("Test %d: Expected no error, but got: %v", i, err)
|
||||
}
|
||||
|
||||
if len(blocks) != len(test.addresses) {
|
||||
t.Errorf("Test %d: Expected %d server blocks, got %d",
|
||||
i, len(test.addresses), len(blocks))
|
||||
continue
|
||||
}
|
||||
for j, block := range blocks {
|
||||
if len(block.Addresses) != len(test.addresses[j]) {
|
||||
t.Errorf("Test %d: Expected %d addresses in block %d, got %d",
|
||||
i, len(test.addresses[j]), j, len(block.Addresses))
|
||||
continue
|
||||
}
|
||||
for k, addr := range block.Addresses {
|
||||
if addr.Host != test.addresses[j][k].Host {
|
||||
t.Errorf("Test %d, block %d, address %d: Expected host to be '%s', but was '%s'",
|
||||
i, j, k, test.addresses[j][k].Host, addr.Host)
|
||||
}
|
||||
if addr.Port != test.addresses[j][k].Port {
|
||||
t.Errorf("Test %d, block %d, address %d: Expected port to be '%s', but was '%s'",
|
||||
i, j, k, test.addresses[j][k].Port, addr.Port)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestEnvironmentReplacement(t *testing.T) {
|
||||
setupParseTests()
|
||||
|
||||
os.Setenv("PORT", "8080")
|
||||
os.Setenv("ADDRESS", "servername.com")
|
||||
os.Setenv("FOOBAR", "foobar")
|
||||
|
||||
// basic test; unix-style env vars
|
||||
p := testParser(`{$ADDRESS}`)
|
||||
blocks, _ := p.parseAll()
|
||||
if actual, expected := blocks[0].Addresses[0].Host, "servername.com"; expected != actual {
|
||||
t.Errorf("Expected host to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
|
||||
// multiple vars per token
|
||||
p = testParser(`{$ADDRESS}:{$PORT}`)
|
||||
blocks, _ = p.parseAll()
|
||||
if actual, expected := blocks[0].Addresses[0].Host, "servername.com"; expected != actual {
|
||||
t.Errorf("Expected host to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
if actual, expected := blocks[0].Addresses[0].Port, "8080"; expected != actual {
|
||||
t.Errorf("Expected port to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
|
||||
// windows-style var and unix style in same token
|
||||
p = testParser(`{%ADDRESS%}:{$PORT}`)
|
||||
blocks, _ = p.parseAll()
|
||||
if actual, expected := blocks[0].Addresses[0].Host, "servername.com"; expected != actual {
|
||||
t.Errorf("Expected host to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
if actual, expected := blocks[0].Addresses[0].Port, "8080"; expected != actual {
|
||||
t.Errorf("Expected port to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
|
||||
// reverse order
|
||||
p = testParser(`{$ADDRESS}:{%PORT%}`)
|
||||
blocks, _ = p.parseAll()
|
||||
if actual, expected := blocks[0].Addresses[0].Host, "servername.com"; expected != actual {
|
||||
t.Errorf("Expected host to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
if actual, expected := blocks[0].Addresses[0].Port, "8080"; expected != actual {
|
||||
t.Errorf("Expected port to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
|
||||
// env var in server block body as argument
|
||||
p = testParser(":{%PORT%}\ndir1 {$FOOBAR}")
|
||||
blocks, _ = p.parseAll()
|
||||
if actual, expected := blocks[0].Addresses[0].Port, "8080"; expected != actual {
|
||||
t.Errorf("Expected port to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
if actual, expected := blocks[0].Tokens["dir1"][1].text, "foobar"; expected != actual {
|
||||
t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
|
||||
// combined windows env vars in argument
|
||||
p = testParser(":{%PORT%}\ndir1 {%ADDRESS%}/{%FOOBAR%}")
|
||||
blocks, _ = p.parseAll()
|
||||
if actual, expected := blocks[0].Tokens["dir1"][1].text, "servername.com/foobar"; expected != actual {
|
||||
t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
|
||||
// malformed env var (windows)
|
||||
p = testParser(":1234\ndir1 {%ADDRESS}")
|
||||
blocks, _ = p.parseAll()
|
||||
if actual, expected := blocks[0].Tokens["dir1"][1].text, "{%ADDRESS}"; expected != actual {
|
||||
t.Errorf("Expected host to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
|
||||
// malformed (non-existent) env var (unix)
|
||||
p = testParser(`:{$PORT$}`)
|
||||
blocks, _ = p.parseAll()
|
||||
if actual, expected := blocks[0].Addresses[0].Port, ""; expected != actual {
|
||||
t.Errorf("Expected port to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
|
||||
// in quoted field
|
||||
p = testParser(":1234\ndir1 \"Test {$FOOBAR} test\"")
|
||||
blocks, _ = p.parseAll()
|
||||
if actual, expected := blocks[0].Tokens["dir1"][1].text, "Test foobar test"; expected != actual {
|
||||
t.Errorf("Expected argument to be '%s' but was '%s'", expected, actual)
|
||||
}
|
||||
}
|
||||
|
||||
func setupParseTests() {
|
||||
// Set up some bogus directives for testing
|
||||
ValidDirectives = map[string]struct{}{
|
||||
"dir1": {},
|
||||
"dir2": {},
|
||||
"dir3": {},
|
||||
}
|
||||
}
|
||||
|
||||
func testParser(input string) parser {
|
||||
buf := strings.NewReader(input)
|
||||
p := parser{Dispenser: NewDispenser("Test", buf), checkDirectives: true}
|
||||
return p
|
||||
}
|
||||
Reference in New Issue
Block a user