From 32f4e84345b4866aa663b0f98e19fd1235881beb Mon Sep 17 00:00:00 2001 From: Fatih Arslan Date: Fri, 16 Oct 2015 23:23:23 +0300 Subject: [PATCH] token: rename TokenType to Type --- parser/parser_test.go | 28 ++++++++++++++-------------- scanner/scanner.go | 4 ++-- scanner/scanner_test.go | 6 +++--- token/token.go | 18 +++++++++--------- token/token_test.go | 4 ++-- 5 files changed, 30 insertions(+), 30 deletions(-) diff --git a/parser/parser_test.go b/parser/parser_test.go index 4af18ab..31216a2 100644 --- a/parser/parser_test.go +++ b/parser/parser_test.go @@ -31,21 +31,21 @@ func TestParseType(t *testing.T) { func TestObjectKey(t *testing.T) { keys := []struct { - exp []token.TokenType + exp []token.Type src string }{ - {[]token.TokenType{token.IDENT}, `foo {}`}, - {[]token.TokenType{token.IDENT}, `foo = {}`}, - {[]token.TokenType{token.IDENT}, `foo = bar`}, - {[]token.TokenType{token.IDENT}, `foo = 123`}, - {[]token.TokenType{token.IDENT}, `foo = "${var.bar}`}, - {[]token.TokenType{token.STRING}, `"foo" {}`}, - {[]token.TokenType{token.STRING}, `"foo" = {}`}, - {[]token.TokenType{token.STRING}, `"foo" = "${var.bar}`}, - {[]token.TokenType{token.IDENT, token.IDENT}, `foo bar {}`}, - {[]token.TokenType{token.IDENT, token.STRING}, `foo "bar" {}`}, - {[]token.TokenType{token.STRING, token.IDENT}, `"foo" bar {}`}, - {[]token.TokenType{token.IDENT, token.IDENT, token.IDENT}, `foo bar baz {}`}, + {[]token.Type{token.IDENT}, `foo {}`}, + {[]token.Type{token.IDENT}, `foo = {}`}, + {[]token.Type{token.IDENT}, `foo = bar`}, + {[]token.Type{token.IDENT}, `foo = 123`}, + {[]token.Type{token.IDENT}, `foo = "${var.bar}`}, + {[]token.Type{token.STRING}, `"foo" {}`}, + {[]token.Type{token.STRING}, `"foo" = {}`}, + {[]token.Type{token.STRING}, `"foo" = "${var.bar}`}, + {[]token.Type{token.IDENT, token.IDENT}, `foo bar {}`}, + {[]token.Type{token.IDENT, token.STRING}, `foo "bar" {}`}, + {[]token.Type{token.STRING, token.IDENT}, `"foo" bar {}`}, + {[]token.Type{token.IDENT, token.IDENT, token.IDENT}, `foo bar baz {}`}, } for _, k := range keys { @@ -55,7 +55,7 @@ func TestObjectKey(t *testing.T) { t.Fatal(err) } - tokens := []token.TokenType{} + tokens := []token.Type{} for _, o := range keys { tokens = append(tokens, o.Token.Type) } diff --git a/scanner/scanner.go b/scanner/scanner.go index f622b18..c600014 100644 --- a/scanner/scanner.go +++ b/scanner/scanner.go @@ -127,7 +127,7 @@ func (s *Scanner) Scan() token.Token { ch = s.next() } - var tok token.TokenType + var tok token.Type // token text markings s.tokStart = s.srcPos.Offset - s.lastCharLen @@ -246,7 +246,7 @@ func (s *Scanner) scanComment(ch rune) { } // scanNumber scans a HCL number definition starting with the given rune -func (s *Scanner) scanNumber(ch rune) token.TokenType { +func (s *Scanner) scanNumber(ch rune) token.Type { if ch == '0' { // check for hexadecimal, octal or float ch = s.next() diff --git a/scanner/scanner_test.go b/scanner/scanner_test.go index f9b2f5e..56ae526 100644 --- a/scanner/scanner_test.go +++ b/scanner/scanner_test.go @@ -11,7 +11,7 @@ import ( var f100 = "ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" type tokenPair struct { - tok token.TokenType + tok token.Type text string } @@ -272,7 +272,7 @@ func TestRealExample(t *testing.T) { }` literals := []struct { - tokenType token.TokenType + tokenType token.Type literal string }{ {token.COMMENT, `// This comes from Terraform, as a test`}, @@ -367,7 +367,7 @@ func TestError(t *testing.T) { testError(t, `/*/`, "1:4", "comment not terminated", token.COMMENT) } -func testError(t *testing.T, src, pos, msg string, tok token.TokenType) { +func testError(t *testing.T, src, pos, msg string, tok token.Type) { s := New([]byte(src)) errorCalled := false diff --git a/token/token.go b/token/token.go index ada0d86..7c51f69 100644 --- a/token/token.go +++ b/token/token.go @@ -7,17 +7,17 @@ import ( // Token defines a single HCL token which can be obtained via the Scanner type Token struct { - Type TokenType + Type Type Pos Pos Text string } -// TokenType is the set of lexical tokens of the HCL (HashiCorp Configuration Language) -type TokenType int +// Type is the set of lexical tokens of the HCL (HashiCorp Configuration Language) +type Type int const ( // Special tokens - ILLEGAL TokenType = iota + ILLEGAL Type = iota EOF COMMENT @@ -72,9 +72,9 @@ var tokens = [...]string{ } // String returns the string corresponding to the token tok. -func (t TokenType) String() string { +func (t Type) String() string { s := "" - if 0 <= t && t < TokenType(len(tokens)) { + if 0 <= t && t < Type(len(tokens)) { s = tokens[t] } if s == "" { @@ -85,15 +85,15 @@ func (t TokenType) String() string { // IsIdentifier returns true for tokens corresponding to identifiers and basic // type literals; it returns false otherwise. -func (t TokenType) IsIdentifier() bool { return identifier_beg < t && t < identifier_end } +func (t Type) IsIdentifier() bool { return identifier_beg < t && t < identifier_end } // IsLiteral returns true for tokens corresponding to basic type literals; it // returns false otherwise. -func (t TokenType) IsLiteral() bool { return literal_beg < t && t < literal_end } +func (t Type) IsLiteral() bool { return literal_beg < t && t < literal_end } // IsOperator returns true for tokens corresponding to operators and // delimiters; it returns false otherwise. -func (t TokenType) IsOperator() bool { return operator_beg < t && t < operator_end } +func (t Type) IsOperator() bool { return operator_beg < t && t < operator_end } // String returns the token's literal text. Note that this is only // applicable for certain token types, such as token.IDENT, diff --git a/token/token_test.go b/token/token_test.go index 534c1d0..796945c 100644 --- a/token/token_test.go +++ b/token/token_test.go @@ -2,9 +2,9 @@ package token import "testing" -func TestTokenTypeString(t *testing.T) { +func TestTypeString(t *testing.T) { var tokens = []struct { - tt TokenType + tt Type str string }{ {ILLEGAL, "ILLEGAL"},