taking depth out of the lexer

I don't know why that was ever there to begin with
master
Jordan Orelli 12 years ago
parent 8f150e038b
commit 8c4285ec87

@ -45,10 +45,9 @@ type stateFn func(*lexer) (stateFn, error)
type lexer struct { type lexer struct {
io.RuneReader io.RuneReader
buf []rune buf []rune
cur rune cur rune
depth int out chan token
out chan token
} }
// clears the current lexem buffer and emits a token of the given type. // clears the current lexem buffer and emits a token of the given type.
@ -96,7 +95,6 @@ func debugPrint(s string) {
func lexOpenParen(l *lexer) (stateFn, error) { func lexOpenParen(l *lexer) (stateFn, error) {
debugPrint("-->lexOpenParen") debugPrint("-->lexOpenParen")
l.out <- token{"(", openParenToken} l.out <- token{"(", openParenToken}
l.depth++
switch l.cur { switch l.cur {
case ' ', '\t', '\n', '\r': case ' ', '\t', '\n', '\r':
return lexWhitespace, nil return lexWhitespace, nil
@ -233,7 +231,6 @@ func lexSymbol(l *lexer) (stateFn, error) {
func lexCloseParen(l *lexer) (stateFn, error) { func lexCloseParen(l *lexer) (stateFn, error) {
debugPrint("-->lexCloseParen") debugPrint("-->lexCloseParen")
l.out <- token{")", closeParenToken} l.out <- token{")", closeParenToken}
l.depth--
switch l.cur { switch l.cur {
case ' ', '\t', '\n', '\r': case ' ', '\t', '\n', '\r':
return lexWhitespace, nil return lexWhitespace, nil
@ -260,7 +257,7 @@ func lexComment(l *lexer) (stateFn, error) {
// new tokens. // new tokens.
func lex(input io.RuneReader, c chan token) { func lex(input io.RuneReader, c chan token) {
defer close(c) defer close(c)
l := &lexer{input, nil, ' ', 0, c} l := &lexer{input, nil, ' ', c}
var err error var err error
f := stateFn(lexWhitespace) f := stateFn(lexWhitespace)
@ -277,9 +274,6 @@ func lex(input io.RuneReader, c chan token) {
if err != io.EOF { if err != io.EOF {
fmt.Println(err) fmt.Println(err)
} }
if l.depth != 0 {
fmt.Println("error: unbalanced parenthesis")
}
} }
func lexs(input string, c chan token) { func lexs(input string, c chan token) {
lex(strings.NewReader(input), c) lex(strings.NewReader(input), c)

Loading…
Cancel
Save