new testing strat: file-based tests

master
Jordan Orelli 10 years ago
parent 1f06100987
commit c43d431a97

@ -1,116 +1,59 @@
package main package main
import ( import (
"bytes"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strconv"
"strings"
"testing" "testing"
) )
var primitivesTests = []struct { func runTest(t *testing.T, basepath, inpath, outpath string) {
in string in, err := os.Open(inpath)
out []token if err != nil {
}{ t.Errorf("unable to open input file %s: %s", inpath, err)
{`"x"`, []token{{t_string, "x"}}}, return
{`"yes"`, []token{{t_string, "yes"}}}, }
{`"this one has spaces"`, []token{{t_string, "this one has spaces"}}}, defer in.Close()
{`"this one has \"quotes\" in it"`, []token{{t_string, `this one has "quotes" in it`}}},
{"`this one is delimited by backticks`", []token{{t_string, "this one is delimited by backticks"}}}, expected, err := ioutil.ReadFile(outpath)
{` "this one has white space on either end" `, []token{{t_string, "this one has white space on either end"}}}, if err != nil {
{`name`, []token{{t_name, "name"}}}, t.Errorf("unable to read expected output for %s: %s", outpath, err)
{`name_with_underscore`, []token{{t_name, "name_with_underscore"}}}, return
{` name_surrounded_by_whitespace `, []token{{t_name, "name_surrounded_by_whitespace"}}}, }
{`name1`, []token{{t_name, "name1"}}},
{`camelName`, []token{{t_name, "camelName"}}}, r_inpath := filepath.Base(inpath)
{`Type`, []token{{t_type, "Type"}}}, n, err := strconv.ParseInt(strings.TrimSuffix(r_inpath, ".in"), 0, 64)
{`CamelType`, []token{{t_type, "CamelType"}}}, if err != nil {
{`Type_1_2`, []token{{t_type, "Type_1_2"}}}, t.Errorf("unable to get test number for path %s: %s", inpath, err)
{`:`, []token{{t_object_separator, ":"}}}, return
{` : `, []token{{t_object_separator, ":"}}}, }
{`"x" "y"`, []token{{t_string, "x"}, {t_string, "y"}}},
{`x: "sam"`, []token{ var buf bytes.Buffer
{t_name, "x"}, c := lex(in)
{t_object_separator, ":"}, for t := range c {
{t_string, "sam"}, fmt.Fprintln(&buf, t)
}}, }
{`# this is a comment`, []token{{t_comment, " this is a comment"}}},
{` if !bytes.Equal(buf.Bytes(), expected) {
# comment line one t.Logf("test %d: in: %s out: %s", n, inpath, outpath)
# comment line two t.Errorf("lex output does not match expected result for test %d", n)
`, []token{{t_comment, " comment line one"}, {t_comment, " comment line two"}}}, t.Logf("expected output:\n%s", expected)
{`[]`, []token{{t_list_start, "["}, {t_list_end, "]"}}}, t.Logf("received output:\n%s", buf.Bytes())
{`["item"]`, []token{{t_list_start, "["}, {t_string, "item"}, {t_list_end, "]"}}}, }
{`{}`, []token{{t_object_start, "{"}, {t_object_end, "}"}}},
{`{first_name: "jordan" last_name: "orelli"}`, []token{
{t_object_start, "{"},
{t_name, "first_name"},
{t_object_separator, ":"},
{t_string, "jordan"},
{t_name, "last_name"},
{t_object_separator, ":"},
{t_string, "orelli"},
{t_object_end, "}"},
}},
{`{
first_name: "jordan"
last_name: "orelli"
}`, []token{
{t_object_start, "{"},
{t_name, "first_name"},
{t_object_separator, ":"},
{t_string, "jordan"},
{t_name, "last_name"},
{t_object_separator, ":"},
{t_string, "orelli"},
{t_object_end, "}"},
}},
{`0`, []token{{t_real_number, "0"}}},
{`-0`, []token{{t_real_number, "-0"}}},
{`+0`, []token{{t_real_number, "+0"}}},
{`+125`, []token{{t_real_number, "+125"}}},
{`-125`, []token{{t_real_number, "-125"}}},
{`.0`, []token{{t_real_number, ".0"}}},
{`15`, []token{{t_real_number, "15"}}},
{`0x0`, []token{{t_real_number, "0x0"}}},
{`0xa`, []token{{t_real_number, "0xa"}}},
{`0xc0dea5cf`, []token{{t_real_number, "0xc0dea5cf"}}},
{`12.345`, []token{{t_real_number, "12.345"}}},
{`12.345 name`, []token{{t_real_number, "12.345"}, {t_name, "name"}}},
{`[12.345]`, []token{
{t_list_start, "["},
{t_real_number, "12.345"},
{t_list_end, "]"},
}},
{`[1 2 3]`, []token{
{t_list_start, "["},
{t_real_number, "1"},
{t_real_number, "2"},
{t_real_number, "3"},
{t_list_end, "]"},
}},
{`1i`, []token{{t_imaginary_number, "1i"}}},
// a complex number generates two lexemes; one for its real component,
// and one for its imaginary component.
{`1+2i`, []token{{t_real_number, "1"}, {t_imaginary_number, "+2i"}}},
{`1e9`, []token{{t_real_number, "1e9"}}},
{`1e+9`, []token{{t_real_number, "1e+9"}}},
{`1E-9`, []token{{t_real_number, "1E-9"}}},
} }
func TestLexPrimities(t *testing.T) { func TestLex(t *testing.T) {
for _, test := range primitivesTests { files, err := filepath.Glob("tests/lex/*.in")
tokens, err := fullTokens(lexString(test.in)) if err != nil {
if err != nil { t.Errorf("unable to find test files: %s", err)
t.Error(err) return
continue }
}
// tokens = tokens[:len(tokens)-1] for _, fname := range files {
if len(tokens) != len(test.out) { runTest(t, "tests/lex/", fname, strings.Replace(fname, "in", "out", -1))
t.Errorf("expected %d token, saw %d: %v", len(test.out), len(tokens), tokens)
continue
}
for i := range tokens {
if tokens[i] != test.out[i] {
t.Errorf("token %d is %v, expected %v", i, tokens[i], test.out[i])
}
}
t.Logf("OK: %s", test.in)
} }
} }

@ -0,0 +1 @@
# a comment

@ -0,0 +1 @@
{t_comment a comment}

@ -0,0 +1,7 @@
-1
1
+1
-0
0
+0

@ -0,0 +1,6 @@
{t_real_number -1}
{t_real_number 1}
{t_real_number +1}
{t_real_number -0}
{t_real_number 0}
{t_real_number +0}
Loading…
Cancel
Save