zclsyntax: "peeker" helper
This will make it easier for the parser to walk through the token sequence with one token of lookahead, maintaining state of where we've got to and also allowing us to switch into and out of the newline-sensitive mode as we transition to and from newline-sensitive constructs.
This commit is contained in:
parent
1a6dfffc4a
commit
23cb83ef3e
67
zcl/zclsyntax/peeker.go
Normal file
67
zcl/zclsyntax/peeker.go
Normal file
@ -0,0 +1,67 @@
|
||||
package zclsyntax
|
||||
|
||||
type peeker struct {
|
||||
Tokens Tokens
|
||||
NextIndex int
|
||||
|
||||
IncludeComments bool
|
||||
IncludeNewlinesStack []bool
|
||||
}
|
||||
|
||||
func newPeeker(tokens Tokens, includeComments bool) *peeker {
|
||||
return &peeker{
|
||||
Tokens: tokens,
|
||||
IncludeComments: includeComments,
|
||||
|
||||
IncludeNewlinesStack: []bool{true},
|
||||
}
|
||||
}
|
||||
|
||||
func (p *peeker) Peek() Token {
|
||||
ret, _ := p.nextToken()
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *peeker) Read() Token {
|
||||
ret, nextIdx := p.nextToken()
|
||||
p.NextIndex = nextIdx
|
||||
return ret
|
||||
}
|
||||
|
||||
func (p *peeker) nextToken() (Token, int) {
|
||||
for i := p.NextIndex; i < len(p.Tokens); i++ {
|
||||
tok := p.Tokens[i]
|
||||
switch tok.Type {
|
||||
case TokenComment:
|
||||
if !p.IncludeComments {
|
||||
continue
|
||||
}
|
||||
case TokenNewline:
|
||||
if !p.includingNewlines() {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
return tok, i + 1
|
||||
}
|
||||
|
||||
// if we fall out here then we'll return the EOF token, and leave
|
||||
// our index pointed off the end of the array so we'll keep
|
||||
// returning EOF in future too.
|
||||
return p.Tokens[len(p.Tokens)-1], len(p.Tokens)
|
||||
}
|
||||
|
||||
func (p *peeker) includingNewlines() bool {
|
||||
return p.IncludeNewlinesStack[len(p.IncludeNewlinesStack)-1]
|
||||
}
|
||||
|
||||
func (p *peeker) PushIncludeNewlines(include bool) {
|
||||
p.IncludeNewlinesStack = append(p.IncludeNewlinesStack, include)
|
||||
}
|
||||
|
||||
func (p *peeker) PopIncludeNewlines() bool {
|
||||
stack := p.IncludeNewlinesStack
|
||||
remain, ret := stack[:len(stack)-1], stack[len(stack)-1]
|
||||
p.IncludeNewlinesStack = remain
|
||||
return ret
|
||||
}
|
158
zcl/zclsyntax/peeker_test.go
Normal file
158
zcl/zclsyntax/peeker_test.go
Normal file
@ -0,0 +1,158 @@
|
||||
package zclsyntax
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestPeeker(t *testing.T) {
|
||||
tokens := Tokens{
|
||||
{
|
||||
Type: TokenIdent,
|
||||
},
|
||||
{
|
||||
Type: TokenComment,
|
||||
},
|
||||
{
|
||||
Type: TokenIdent,
|
||||
},
|
||||
{
|
||||
Type: TokenComment,
|
||||
},
|
||||
{
|
||||
Type: TokenIdent,
|
||||
},
|
||||
{
|
||||
Type: TokenNewline,
|
||||
},
|
||||
{
|
||||
Type: TokenIdent,
|
||||
},
|
||||
{
|
||||
Type: TokenNewline,
|
||||
},
|
||||
{
|
||||
Type: TokenIdent,
|
||||
},
|
||||
{
|
||||
Type: TokenNewline,
|
||||
},
|
||||
{
|
||||
Type: TokenEOF,
|
||||
},
|
||||
}
|
||||
|
||||
{
|
||||
peeker := newPeeker(tokens, true)
|
||||
|
||||
wantTypes := []TokenType{
|
||||
TokenIdent,
|
||||
TokenComment,
|
||||
TokenIdent,
|
||||
TokenComment,
|
||||
TokenIdent,
|
||||
TokenNewline,
|
||||
TokenIdent,
|
||||
TokenNewline,
|
||||
TokenIdent,
|
||||
TokenNewline,
|
||||
TokenEOF,
|
||||
}
|
||||
var gotTypes []TokenType
|
||||
|
||||
for {
|
||||
peeked := peeker.Peek()
|
||||
read := peeker.Read()
|
||||
if peeked.Type != read.Type {
|
||||
t.Errorf("mismatched Peek %s and Read %s", peeked, read)
|
||||
}
|
||||
|
||||
gotTypes = append(gotTypes, read.Type)
|
||||
|
||||
if read.Type == TokenEOF {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(gotTypes, wantTypes) {
|
||||
t.Errorf("wrong types\ngot: %#v\nwant: %#v", gotTypes, wantTypes)
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
peeker := newPeeker(tokens, false)
|
||||
|
||||
wantTypes := []TokenType{
|
||||
TokenIdent,
|
||||
TokenIdent,
|
||||
TokenIdent,
|
||||
TokenNewline,
|
||||
TokenIdent,
|
||||
TokenNewline,
|
||||
TokenIdent,
|
||||
TokenNewline,
|
||||
TokenEOF,
|
||||
}
|
||||
var gotTypes []TokenType
|
||||
|
||||
for {
|
||||
peeked := peeker.Peek()
|
||||
read := peeker.Read()
|
||||
if peeked.Type != read.Type {
|
||||
t.Errorf("mismatched Peek %s and Read %s", peeked, read)
|
||||
}
|
||||
|
||||
gotTypes = append(gotTypes, read.Type)
|
||||
|
||||
if read.Type == TokenEOF {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(gotTypes, wantTypes) {
|
||||
t.Errorf("wrong types\ngot: %#v\nwant: %#v", gotTypes, wantTypes)
|
||||
}
|
||||
}
|
||||
|
||||
{
|
||||
peeker := newPeeker(tokens, false)
|
||||
|
||||
peeker.PushIncludeNewlines(false)
|
||||
|
||||
wantTypes := []TokenType{
|
||||
TokenIdent,
|
||||
TokenIdent,
|
||||
TokenIdent,
|
||||
TokenIdent,
|
||||
TokenIdent,
|
||||
TokenNewline, // we'll pop off the PushIncludeNewlines before we get here
|
||||
TokenEOF,
|
||||
}
|
||||
var gotTypes []TokenType
|
||||
|
||||
idx := 0
|
||||
for {
|
||||
peeked := peeker.Peek()
|
||||
read := peeker.Read()
|
||||
if peeked.Type != read.Type {
|
||||
t.Errorf("mismatched Peek %s and Read %s", peeked, read)
|
||||
}
|
||||
|
||||
gotTypes = append(gotTypes, read.Type)
|
||||
|
||||
if read.Type == TokenEOF {
|
||||
break
|
||||
}
|
||||
|
||||
if idx == 4 {
|
||||
peeker.PopIncludeNewlines()
|
||||
}
|
||||
|
||||
idx++
|
||||
}
|
||||
|
||||
if !reflect.DeepEqual(gotTypes, wantTypes) {
|
||||
t.Errorf("wrong types\ngot: %#v\nwant: %#v", gotTypes, wantTypes)
|
||||
}
|
||||
}
|
||||
}
|
Loading…
Reference in New Issue
Block a user