token: rename TokenType to Type
This commit is contained in:
parent
9ee8cdff12
commit
32f4e84345
@ -31,21 +31,21 @@ func TestParseType(t *testing.T) {
|
|||||||
|
|
||||||
func TestObjectKey(t *testing.T) {
|
func TestObjectKey(t *testing.T) {
|
||||||
keys := []struct {
|
keys := []struct {
|
||||||
exp []token.TokenType
|
exp []token.Type
|
||||||
src string
|
src string
|
||||||
}{
|
}{
|
||||||
{[]token.TokenType{token.IDENT}, `foo {}`},
|
{[]token.Type{token.IDENT}, `foo {}`},
|
||||||
{[]token.TokenType{token.IDENT}, `foo = {}`},
|
{[]token.Type{token.IDENT}, `foo = {}`},
|
||||||
{[]token.TokenType{token.IDENT}, `foo = bar`},
|
{[]token.Type{token.IDENT}, `foo = bar`},
|
||||||
{[]token.TokenType{token.IDENT}, `foo = 123`},
|
{[]token.Type{token.IDENT}, `foo = 123`},
|
||||||
{[]token.TokenType{token.IDENT}, `foo = "${var.bar}`},
|
{[]token.Type{token.IDENT}, `foo = "${var.bar}`},
|
||||||
{[]token.TokenType{token.STRING}, `"foo" {}`},
|
{[]token.Type{token.STRING}, `"foo" {}`},
|
||||||
{[]token.TokenType{token.STRING}, `"foo" = {}`},
|
{[]token.Type{token.STRING}, `"foo" = {}`},
|
||||||
{[]token.TokenType{token.STRING}, `"foo" = "${var.bar}`},
|
{[]token.Type{token.STRING}, `"foo" = "${var.bar}`},
|
||||||
{[]token.TokenType{token.IDENT, token.IDENT}, `foo bar {}`},
|
{[]token.Type{token.IDENT, token.IDENT}, `foo bar {}`},
|
||||||
{[]token.TokenType{token.IDENT, token.STRING}, `foo "bar" {}`},
|
{[]token.Type{token.IDENT, token.STRING}, `foo "bar" {}`},
|
||||||
{[]token.TokenType{token.STRING, token.IDENT}, `"foo" bar {}`},
|
{[]token.Type{token.STRING, token.IDENT}, `"foo" bar {}`},
|
||||||
{[]token.TokenType{token.IDENT, token.IDENT, token.IDENT}, `foo bar baz {}`},
|
{[]token.Type{token.IDENT, token.IDENT, token.IDENT}, `foo bar baz {}`},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, k := range keys {
|
for _, k := range keys {
|
||||||
@ -55,7 +55,7 @@ func TestObjectKey(t *testing.T) {
|
|||||||
t.Fatal(err)
|
t.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
tokens := []token.TokenType{}
|
tokens := []token.Type{}
|
||||||
for _, o := range keys {
|
for _, o := range keys {
|
||||||
tokens = append(tokens, o.Token.Type)
|
tokens = append(tokens, o.Token.Type)
|
||||||
}
|
}
|
||||||
|
@ -127,7 +127,7 @@ func (s *Scanner) Scan() token.Token {
|
|||||||
ch = s.next()
|
ch = s.next()
|
||||||
}
|
}
|
||||||
|
|
||||||
var tok token.TokenType
|
var tok token.Type
|
||||||
|
|
||||||
// token text markings
|
// token text markings
|
||||||
s.tokStart = s.srcPos.Offset - s.lastCharLen
|
s.tokStart = s.srcPos.Offset - s.lastCharLen
|
||||||
@ -246,7 +246,7 @@ func (s *Scanner) scanComment(ch rune) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// scanNumber scans a HCL number definition starting with the given rune
|
// scanNumber scans a HCL number definition starting with the given rune
|
||||||
func (s *Scanner) scanNumber(ch rune) token.TokenType {
|
func (s *Scanner) scanNumber(ch rune) token.Type {
|
||||||
if ch == '0' {
|
if ch == '0' {
|
||||||
// check for hexadecimal, octal or float
|
// check for hexadecimal, octal or float
|
||||||
ch = s.next()
|
ch = s.next()
|
||||||
|
@ -11,7 +11,7 @@ import (
|
|||||||
var f100 = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
|
var f100 = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff"
|
||||||
|
|
||||||
type tokenPair struct {
|
type tokenPair struct {
|
||||||
tok token.TokenType
|
tok token.Type
|
||||||
text string
|
text string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -272,7 +272,7 @@ func TestRealExample(t *testing.T) {
|
|||||||
}`
|
}`
|
||||||
|
|
||||||
literals := []struct {
|
literals := []struct {
|
||||||
tokenType token.TokenType
|
tokenType token.Type
|
||||||
literal string
|
literal string
|
||||||
}{
|
}{
|
||||||
{token.COMMENT, `// This comes from Terraform, as a test`},
|
{token.COMMENT, `// This comes from Terraform, as a test`},
|
||||||
@ -367,7 +367,7 @@ func TestError(t *testing.T) {
|
|||||||
testError(t, `/*/`, "1:4", "comment not terminated", token.COMMENT)
|
testError(t, `/*/`, "1:4", "comment not terminated", token.COMMENT)
|
||||||
}
|
}
|
||||||
|
|
||||||
func testError(t *testing.T, src, pos, msg string, tok token.TokenType) {
|
func testError(t *testing.T, src, pos, msg string, tok token.Type) {
|
||||||
s := New([]byte(src))
|
s := New([]byte(src))
|
||||||
|
|
||||||
errorCalled := false
|
errorCalled := false
|
||||||
|
@ -7,17 +7,17 @@ import (
|
|||||||
|
|
||||||
// Token defines a single HCL token which can be obtained via the Scanner
|
// Token defines a single HCL token which can be obtained via the Scanner
|
||||||
type Token struct {
|
type Token struct {
|
||||||
Type TokenType
|
Type Type
|
||||||
Pos Pos
|
Pos Pos
|
||||||
Text string
|
Text string
|
||||||
}
|
}
|
||||||
|
|
||||||
// TokenType is the set of lexical tokens of the HCL (HashiCorp Configuration Language)
|
// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language)
|
||||||
type TokenType int
|
type Type int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// Special tokens
|
// Special tokens
|
||||||
ILLEGAL TokenType = iota
|
ILLEGAL Type = iota
|
||||||
EOF
|
EOF
|
||||||
COMMENT
|
COMMENT
|
||||||
|
|
||||||
@ -72,9 +72,9 @@ var tokens = [...]string{
|
|||||||
}
|
}
|
||||||
|
|
||||||
// String returns the string corresponding to the token tok.
|
// String returns the string corresponding to the token tok.
|
||||||
func (t TokenType) String() string {
|
func (t Type) String() string {
|
||||||
s := ""
|
s := ""
|
||||||
if 0 <= t && t < TokenType(len(tokens)) {
|
if 0 <= t && t < Type(len(tokens)) {
|
||||||
s = tokens[t]
|
s = tokens[t]
|
||||||
}
|
}
|
||||||
if s == "" {
|
if s == "" {
|
||||||
@ -85,15 +85,15 @@ func (t TokenType) String() string {
|
|||||||
|
|
||||||
// IsIdentifier returns true for tokens corresponding to identifiers and basic
|
// IsIdentifier returns true for tokens corresponding to identifiers and basic
|
||||||
// type literals; it returns false otherwise.
|
// type literals; it returns false otherwise.
|
||||||
func (t TokenType) IsIdentifier() bool { return identifier_beg < t && t < identifier_end }
|
func (t Type) IsIdentifier() bool { return identifier_beg < t && t < identifier_end }
|
||||||
|
|
||||||
// IsLiteral returns true for tokens corresponding to basic type literals; it
|
// IsLiteral returns true for tokens corresponding to basic type literals; it
|
||||||
// returns false otherwise.
|
// returns false otherwise.
|
||||||
func (t TokenType) IsLiteral() bool { return literal_beg < t && t < literal_end }
|
func (t Type) IsLiteral() bool { return literal_beg < t && t < literal_end }
|
||||||
|
|
||||||
// IsOperator returns true for tokens corresponding to operators and
|
// IsOperator returns true for tokens corresponding to operators and
|
||||||
// delimiters; it returns false otherwise.
|
// delimiters; it returns false otherwise.
|
||||||
func (t TokenType) IsOperator() bool { return operator_beg < t && t < operator_end }
|
func (t Type) IsOperator() bool { return operator_beg < t && t < operator_end }
|
||||||
|
|
||||||
// String returns the token's literal text. Note that this is only
|
// String returns the token's literal text. Note that this is only
|
||||||
// applicable for certain token types, such as token.IDENT,
|
// applicable for certain token types, such as token.IDENT,
|
||||||
|
@ -2,9 +2,9 @@ package token
|
|||||||
|
|
||||||
import "testing"
|
import "testing"
|
||||||
|
|
||||||
func TestTokenTypeString(t *testing.T) {
|
func TestTypeString(t *testing.T) {
|
||||||
var tokens = []struct {
|
var tokens = []struct {
|
||||||
tt TokenType
|
tt Type
|
||||||
str string
|
str string
|
||||||
}{
|
}{
|
||||||
{ILLEGAL, "ILLEGAL"},
|
{ILLEGAL, "ILLEGAL"},
|
||||||
|
Loading…
Reference in New Issue
Block a user