hcl/json: allow more flexible use of arrays when describing bodies
Previously we allowed arrays only at the "leaf" of a set of objects describing a block and its labels. This is not sufficient because it is therefore impossible to preserve the relative ordering of a sequence of blocks that have different block types or labels. The spec now allows arrays of objects to be used in place of single objects when that value is representing either an HCL body or a set of labels on a nested block. This relaxing does not apply to JSON objects interpreted as expressions or bodies interpreted in dynamic attributes mode, since there is no requirement to preserve attribute ordering or support duplicate property names in those scenarios. This new model imposes additional constraints on the underlying JSON parser used to interpret JSON HCL: it must now be able to retain the relative ordering of object keys and accept multiple definitions of the same key. This requirement is not imposed on _producers_, which are free to use the allowance for arrays of objects to force ordering and duplicate keys with JSON-producing libraries that are unable to make these distinctions. Since we are now requiring a specialized parser anyway, we also require that it be able to represent numbers at full precision, whereas before we made some allowances for implementations to not support this.
This commit is contained in:
parent
77dc2cba20
commit
eea3a14a71
@ -61,6 +61,11 @@ func TestTerraformLike(t *testing.T) {
|
|||||||
Required: true,
|
Required: true,
|
||||||
Type: cty.String,
|
Type: cty.String,
|
||||||
},
|
},
|
||||||
|
"tags": &hcldec.AttrSpec{
|
||||||
|
Name: "tags",
|
||||||
|
Required: false,
|
||||||
|
Type: cty.Map(cty.String),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
securityGroupDecode := &hcldec.ObjectSpec{
|
securityGroupDecode := &hcldec.ObjectSpec{
|
||||||
"ingress": &hcldec.BlockListSpec{
|
"ingress": &hcldec.BlockListSpec{
|
||||||
@ -235,6 +240,10 @@ func TestTerraformLike(t *testing.T) {
|
|||||||
wantCfg := cty.ObjectVal(map[string]cty.Value{
|
wantCfg := cty.ObjectVal(map[string]cty.Value{
|
||||||
"instance_type": cty.StringVal("z3.weedy"),
|
"instance_type": cty.StringVal("z3.weedy"),
|
||||||
"image_id": cty.StringVal("image-1234"),
|
"image_id": cty.StringVal("image-1234"),
|
||||||
|
"tags": cty.MapVal(map[string]cty.Value{
|
||||||
|
"Name": cty.StringVal("foo"),
|
||||||
|
"Environment": cty.StringVal("prod"),
|
||||||
|
}),
|
||||||
})
|
})
|
||||||
if !cfg.RawEquals(wantCfg) {
|
if !cfg.RawEquals(wantCfg) {
|
||||||
t.Errorf("wrong config\ngot: %#v\nwant: %#v", cfg, wantCfg)
|
t.Errorf("wrong config\ngot: %#v\nwant: %#v", cfg, wantCfg)
|
||||||
@ -280,6 +289,11 @@ resource "happycloud_instance" "test" {
|
|||||||
instance_type = "z3.weedy"
|
instance_type = "z3.weedy"
|
||||||
image_id = var.image_id
|
image_id = var.image_id
|
||||||
|
|
||||||
|
tags = {
|
||||||
|
"Name" = "foo"
|
||||||
|
"${"Environment"}" = "prod"
|
||||||
|
}
|
||||||
|
|
||||||
depends_on = [
|
depends_on = [
|
||||||
happycloud_security_group.public,
|
happycloud_security_group.public,
|
||||||
]
|
]
|
||||||
@ -318,6 +332,10 @@ const terraformLikeJSON = `
|
|||||||
"test": {
|
"test": {
|
||||||
"instance_type": "z3.weedy",
|
"instance_type": "z3.weedy",
|
||||||
"image_id": "${var.image_id}",
|
"image_id": "${var.image_id}",
|
||||||
|
"tags": {
|
||||||
|
"Name": "foo",
|
||||||
|
"${\"Environment\"}": "prod"
|
||||||
|
},
|
||||||
"depends_on": [
|
"depends_on": [
|
||||||
"happycloud_security_group.public"
|
"happycloud_security_group.public"
|
||||||
]
|
]
|
||||||
|
@ -12,7 +12,7 @@ type node interface {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type objectVal struct {
|
type objectVal struct {
|
||||||
Attrs map[string]*objectAttr
|
Attrs []*objectAttr
|
||||||
SrcRange hcl.Range // range of the entire object, brace-to-brace
|
SrcRange hcl.Range // range of the entire object, brace-to-brace
|
||||||
OpenRange hcl.Range // range of the opening brace
|
OpenRange hcl.Range // range of the opening brace
|
||||||
CloseRange hcl.Range // range of the closing brace
|
CloseRange hcl.Range // range of the closing brace
|
||||||
|
@ -1,11 +1,12 @@
|
|||||||
package json
|
package json
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
type navigation struct {
|
type navigation struct {
|
||||||
root *objectVal
|
root node
|
||||||
}
|
}
|
||||||
|
|
||||||
// Implementation of hcled.ContextString
|
// Implementation of hcled.ContextString
|
||||||
@ -21,21 +22,49 @@ func (n navigation) ContextString(offset int) string {
|
|||||||
steps[i], steps[len(steps)-1-i] = steps[len(steps)-1-i], steps[i]
|
steps[i], steps[len(steps)-1-i] = steps[len(steps)-1-i], steps[i]
|
||||||
}
|
}
|
||||||
|
|
||||||
return strings.Join(steps, ".")
|
ret := strings.Join(steps, "")
|
||||||
|
if len(ret) > 0 && ret[0] == '.' {
|
||||||
|
ret = ret[1:]
|
||||||
|
}
|
||||||
|
return ret
|
||||||
}
|
}
|
||||||
|
|
||||||
func navigationStepsRev(obj *objectVal, offset int) []string {
|
func navigationStepsRev(v node, offset int) []string {
|
||||||
// Do any of our properties have an object that contains the target
|
switch tv := v.(type) {
|
||||||
// offset?
|
case *objectVal:
|
||||||
for k, attr := range obj.Attrs {
|
// Do any of our properties have an object that contains the target
|
||||||
ov, ok := attr.Value.(*objectVal)
|
// offset?
|
||||||
if !ok {
|
for _, attr := range tv.Attrs {
|
||||||
continue
|
k := attr.Name
|
||||||
}
|
av := attr.Value
|
||||||
|
|
||||||
if ov.SrcRange.ContainsOffset(offset) {
|
switch av.(type) {
|
||||||
return append(navigationStepsRev(ov, offset), k)
|
case *objectVal, *arrayVal:
|
||||||
|
// okay
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if av.Range().ContainsOffset(offset) {
|
||||||
|
return append(navigationStepsRev(av, offset), "."+k)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
case *arrayVal:
|
||||||
|
// Do any of our elements contain the target offset?
|
||||||
|
for i, elem := range tv.Values {
|
||||||
|
|
||||||
|
switch elem.(type) {
|
||||||
|
case *objectVal, *arrayVal:
|
||||||
|
// okay
|
||||||
|
default:
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
if elem.Range().ContainsOffset(offset) {
|
||||||
|
return append(navigationStepsRev(elem, offset), fmt.Sprintf("[%d]", i))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -1,6 +1,7 @@
|
|||||||
package json
|
package json
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"fmt"
|
||||||
"strconv"
|
"strconv"
|
||||||
"testing"
|
"testing"
|
||||||
)
|
)
|
||||||
@ -13,14 +14,20 @@ func TestNavigationContextString(t *testing.T) {
|
|||||||
"null_resource": {
|
"null_resource": {
|
||||||
"baz": {
|
"baz": {
|
||||||
"id": "foo"
|
"id": "foo"
|
||||||
}
|
},
|
||||||
|
"boz": [
|
||||||
|
{
|
||||||
|
"ov": { }
|
||||||
|
}
|
||||||
|
]
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
`
|
`
|
||||||
file, diags := Parse([]byte(src), "test.json")
|
file, diags := Parse([]byte(src), "test.json")
|
||||||
if len(diags) != 0 {
|
if len(diags) != 0 {
|
||||||
t.Errorf("Unexpected diagnostics: %#v", diags)
|
fmt.Printf("offset %d\n", diags[0].Subject.Start.Byte)
|
||||||
|
t.Errorf("Unexpected diagnostics: %s", diags)
|
||||||
}
|
}
|
||||||
if file == nil {
|
if file == nil {
|
||||||
t.Fatalf("Got nil file")
|
t.Fatalf("Got nil file")
|
||||||
@ -36,6 +43,7 @@ func TestNavigationContextString(t *testing.T) {
|
|||||||
{36, `resource`},
|
{36, `resource`},
|
||||||
{60, `resource.null_resource`},
|
{60, `resource.null_resource`},
|
||||||
{89, `resource.null_resource.baz`},
|
{89, `resource.null_resource.baz`},
|
||||||
|
{141, `resource.null_resource.boz`},
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, test := range tests {
|
for _, test := range tests {
|
||||||
|
@ -103,7 +103,7 @@ func parseObject(p *peeker) (node, hcl.Diagnostics) {
|
|||||||
var diags hcl.Diagnostics
|
var diags hcl.Diagnostics
|
||||||
|
|
||||||
open := p.Read()
|
open := p.Read()
|
||||||
attrs := map[string]*objectAttr{}
|
attrs := []*objectAttr{}
|
||||||
|
|
||||||
// recover is used to shift the peeker to what seems to be the end of
|
// recover is used to shift the peeker to what seems to be the end of
|
||||||
// our object, so that when we encounter an error we leave the peeker
|
// our object, so that when we encounter an error we leave the peeker
|
||||||
@ -191,24 +191,11 @@ Token:
|
|||||||
return nil, diags
|
return nil, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
if existing := attrs[key]; existing != nil {
|
attrs = append(attrs, &objectAttr{
|
||||||
// Generate a diagnostic for the duplicate key, but continue parsing
|
|
||||||
// anyway since this is a semantic error we can recover from.
|
|
||||||
diags = diags.Append(&hcl.Diagnostic{
|
|
||||||
Severity: hcl.DiagError,
|
|
||||||
Summary: "Duplicate JSON object property",
|
|
||||||
Detail: fmt.Sprintf(
|
|
||||||
"An property named %q was previously introduced at %s",
|
|
||||||
key, existing.NameRange.String(),
|
|
||||||
),
|
|
||||||
Subject: &keyStrNode.SrcRange,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
attrs[key] = &objectAttr{
|
|
||||||
Name: key,
|
Name: key,
|
||||||
Value: valNode,
|
Value: valNode,
|
||||||
NameRange: keyStrNode.SrcRange,
|
NameRange: keyStrNode.SrcRange,
|
||||||
}
|
})
|
||||||
|
|
||||||
switch p.Peek().Type {
|
switch p.Peek().Type {
|
||||||
case tokenComma:
|
case tokenComma:
|
||||||
|
@ -2,10 +2,9 @@ package json
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"math/big"
|
"math/big"
|
||||||
"reflect"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
"github.com/go-test/deep"
|
||||||
"github.com/hashicorp/hcl2/hcl"
|
"github.com/hashicorp/hcl2/hcl"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -222,8 +221,8 @@ func TestParse(t *testing.T) {
|
|||||||
{
|
{
|
||||||
`{"hello": true}`,
|
`{"hello": true}`,
|
||||||
&objectVal{
|
&objectVal{
|
||||||
Attrs: map[string]*objectAttr{
|
Attrs: []*objectAttr{
|
||||||
"hello": {
|
{
|
||||||
Name: "hello",
|
Name: "hello",
|
||||||
Value: &booleanVal{
|
Value: &booleanVal{
|
||||||
Value: true,
|
Value: true,
|
||||||
@ -256,8 +255,8 @@ func TestParse(t *testing.T) {
|
|||||||
{
|
{
|
||||||
`{"hello": true, "bye": false}`,
|
`{"hello": true, "bye": false}`,
|
||||||
&objectVal{
|
&objectVal{
|
||||||
Attrs: map[string]*objectAttr{
|
Attrs: []*objectAttr{
|
||||||
"hello": {
|
{
|
||||||
Name: "hello",
|
Name: "hello",
|
||||||
Value: &booleanVal{
|
Value: &booleanVal{
|
||||||
Value: true,
|
Value: true,
|
||||||
@ -271,7 +270,7 @@ func TestParse(t *testing.T) {
|
|||||||
End: hcl.Pos{Line: 1, Column: 9, Byte: 8},
|
End: hcl.Pos{Line: 1, Column: 9, Byte: 8},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
"bye": {
|
{
|
||||||
Name: "bye",
|
Name: "bye",
|
||||||
Value: &booleanVal{
|
Value: &booleanVal{
|
||||||
Value: false,
|
Value: false,
|
||||||
@ -304,7 +303,7 @@ func TestParse(t *testing.T) {
|
|||||||
{
|
{
|
||||||
`{}`,
|
`{}`,
|
||||||
&objectVal{
|
&objectVal{
|
||||||
Attrs: map[string]*objectAttr{},
|
Attrs: []*objectAttr{},
|
||||||
SrcRange: hcl.Range{
|
SrcRange: hcl.Range{
|
||||||
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
||||||
End: hcl.Pos{Line: 1, Column: 3, Byte: 2},
|
End: hcl.Pos{Line: 1, Column: 3, Byte: 2},
|
||||||
@ -355,8 +354,22 @@ func TestParse(t *testing.T) {
|
|||||||
{
|
{
|
||||||
`{"hello": true, "hello": true}`,
|
`{"hello": true, "hello": true}`,
|
||||||
&objectVal{
|
&objectVal{
|
||||||
Attrs: map[string]*objectAttr{
|
Attrs: []*objectAttr{
|
||||||
"hello": {
|
{
|
||||||
|
Name: "hello",
|
||||||
|
Value: &booleanVal{
|
||||||
|
Value: true,
|
||||||
|
SrcRange: hcl.Range{
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 11, Byte: 10},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 15, Byte: 14},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
NameRange: hcl.Range{
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 2, Byte: 1},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 9, Byte: 8},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
Name: "hello",
|
Name: "hello",
|
||||||
Value: &booleanVal{
|
Value: &booleanVal{
|
||||||
Value: true,
|
Value: true,
|
||||||
@ -384,7 +397,7 @@ func TestParse(t *testing.T) {
|
|||||||
End: hcl.Pos{Line: 1, Column: 31, Byte: 30},
|
End: hcl.Pos{Line: 1, Column: 31, Byte: 30},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
1,
|
0,
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`{"hello": true, "hello": true, "hello", true}`,
|
`{"hello": true, "hello": true, "hello", true}`,
|
||||||
@ -392,7 +405,7 @@ func TestParse(t *testing.T) {
|
|||||||
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
||||||
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
|
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
|
||||||
}},
|
}},
|
||||||
2,
|
1, // comma used where colon is expected
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
`{"hello", "world"}`,
|
`{"hello", "world"}`,
|
||||||
@ -590,11 +603,10 @@ func TestParse(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(got, test.Want) {
|
if diff := deep.Equal(got, test.Want); diff != nil {
|
||||||
t.Errorf(
|
for _, problem := range diff {
|
||||||
"wrong result\ninput: %s\ngot: %s\nwant: %s",
|
t.Error(problem)
|
||||||
test.Input, spew.Sdump(got), spew.Sdump(test.Want),
|
}
|
||||||
)
|
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
@ -19,19 +19,22 @@ import (
|
|||||||
// the subset of data that was able to be parsed, which may be none.
|
// the subset of data that was able to be parsed, which may be none.
|
||||||
func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
|
func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
|
||||||
rootNode, diags := parseFileContent(src, filename)
|
rootNode, diags := parseFileContent(src, filename)
|
||||||
if _, ok := rootNode.(*objectVal); !ok {
|
|
||||||
|
switch rootNode.(type) {
|
||||||
|
case *objectVal, *arrayVal:
|
||||||
|
// okay
|
||||||
|
default:
|
||||||
diags = diags.Append(&hcl.Diagnostic{
|
diags = diags.Append(&hcl.Diagnostic{
|
||||||
Severity: hcl.DiagError,
|
Severity: hcl.DiagError,
|
||||||
Summary: "Root value must be object",
|
Summary: "Root value must be object",
|
||||||
Detail: "The root value in a JSON-based configuration must be a JSON object.",
|
Detail: "The root value in a JSON-based configuration must be either a JSON object or a JSON array of objects.",
|
||||||
Subject: rootNode.StartRange().Ptr(),
|
Subject: rootNode.StartRange().Ptr(),
|
||||||
})
|
})
|
||||||
// Put in a placeholder objectVal just so the caller always gets
|
|
||||||
// a valid file, even if it appears empty. This is useful for callers
|
// Since we've already produced an error message for this being
|
||||||
// that are doing static analysis of possibly-erroneous source code,
|
// invalid, we'll return an empty placeholder here so that trying to
|
||||||
// which will try to process the returned file even if we return
|
// extract content from our root body won't produce a redundant
|
||||||
// diagnostics of severity error. This way, they'll get a file that
|
// error saying the same thing again in more general terms.
|
||||||
// has an empty body rather than a body that panics when probed.
|
|
||||||
fakePos := hcl.Pos{
|
fakePos := hcl.Pos{
|
||||||
Byte: 0,
|
Byte: 0,
|
||||||
Line: 1,
|
Line: 1,
|
||||||
@ -43,17 +46,18 @@ func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
|
|||||||
End: fakePos,
|
End: fakePos,
|
||||||
}
|
}
|
||||||
rootNode = &objectVal{
|
rootNode = &objectVal{
|
||||||
Attrs: map[string]*objectAttr{},
|
Attrs: []*objectAttr{},
|
||||||
SrcRange: fakeRange,
|
SrcRange: fakeRange,
|
||||||
OpenRange: fakeRange,
|
OpenRange: fakeRange,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
file := &hcl.File{
|
file := &hcl.File{
|
||||||
Body: &body{
|
Body: &body{
|
||||||
obj: rootNode.(*objectVal),
|
val: rootNode,
|
||||||
},
|
},
|
||||||
Bytes: src,
|
Bytes: src,
|
||||||
Nav: navigation{rootNode.(*objectVal)},
|
Nav: navigation{rootNode},
|
||||||
}
|
}
|
||||||
return file, diags
|
return file, diags
|
||||||
}
|
}
|
||||||
|
@ -19,7 +19,7 @@ func TestParse_nonObject(t *testing.T) {
|
|||||||
if file.Body == nil {
|
if file.Body == nil {
|
||||||
t.Fatalf("got nil Body; want actual body")
|
t.Fatalf("got nil Body; want actual body")
|
||||||
}
|
}
|
||||||
if file.Body.(*body).obj == nil {
|
if file.Body.(*body).val == nil {
|
||||||
t.Errorf("got nil Body object; want placeholder object")
|
t.Errorf("got nil Body object; want placeholder object")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
165
hcl/json/spec.md
165
hcl/json/spec.md
@ -13,19 +13,36 @@ grammar as-is, and merely defines a specific methodology for interpreting
|
|||||||
JSON constructs into HCL structural elements and expressions.
|
JSON constructs into HCL structural elements and expressions.
|
||||||
|
|
||||||
This mapping is defined such that valid JSON-serialized HCL input can be
|
This mapping is defined such that valid JSON-serialized HCL input can be
|
||||||
produced using standard JSON implementations in various programming languages.
|
_produced_ using standard JSON implementations in various programming languages.
|
||||||
_Parsing_ such JSON has some additional constraints not beyond what is normally
|
_Parsing_ such JSON has some additional constraints not beyond what is normally
|
||||||
supported by JSON parsers, though adaptations are defined to allow processing
|
supported by JSON parsers, so a specialized parser may be required that
|
||||||
with an off-the-shelf JSON parser with certain caveats, described in later
|
is able to:
|
||||||
sections.
|
|
||||||
|
* Preserve the relative ordering of properties defined in an object.
|
||||||
|
* Preserve multiple definitions of the same property name.
|
||||||
|
* Preserve numeric values to the precision required by the number type
|
||||||
|
in [the HCL syntax-agnostic information model](../spec.md).
|
||||||
|
* Retain source location information for parsed tokens/constructs in order
|
||||||
|
to produce good error messages.
|
||||||
|
|
||||||
## Structural Elements
|
## Structural Elements
|
||||||
|
|
||||||
The HCL language-agnostic information model defines a _body_ as an abstract
|
[The HCL syntax-agnostic information model](../spec.md) defines a _body_ as an
|
||||||
container for attribute definitions and child blocks. A body is represented
|
abstract container for attribute definitions and child blocks. A body is
|
||||||
in JSON as a JSON _object_.
|
represented in JSON as either a single JSON object or a JSON array of objects.
|
||||||
|
|
||||||
As defined in the language-agnostic model, body processing is done in terms
|
Body processing is in terms of JSON object properties, visited in the order
|
||||||
|
they appear in the input. Where a body is represented by a single JSON object,
|
||||||
|
the properties of that object are visited in order. Where a body is
|
||||||
|
represented by a JSON array, each of its elements are visited in order and
|
||||||
|
each element has its properties visited in order. If any element of the array
|
||||||
|
is not a JSON object then the input is erroneous.
|
||||||
|
|
||||||
|
When a body is being processed in the _dynamic attributes_ mode, the allowance
|
||||||
|
of a JSON array in the previous paragraph does not apply and instead a single
|
||||||
|
JSON object is always required.
|
||||||
|
|
||||||
|
As defined in the language-agnostic model, body processing is in terms
|
||||||
of a schema which provides context for interpreting the body's content. For
|
of a schema which provides context for interpreting the body's content. For
|
||||||
JSON bodies, the schema is crucial to allow differentiation of attribute
|
JSON bodies, the schema is crucial to allow differentiation of attribute
|
||||||
definitions and block definitions, both of which are represented via object
|
definitions and block definitions, both of which are represented via object
|
||||||
@ -61,14 +78,16 @@ the following provides a definition for that attribute:
|
|||||||
|
|
||||||
### Blocks
|
### Blocks
|
||||||
|
|
||||||
Where the given schema describes a block with a given type name, the object
|
Where the given schema describes a block with a given type name, each object
|
||||||
property with the matching name — if present — serves as a definition of
|
property with the matching name serves as a definition of zero or more blocks
|
||||||
zero or more blocks of that type.
|
of that type.
|
||||||
|
|
||||||
Processing of child blocks is in terms of nested JSON objects and arrays.
|
Processing of child blocks is in terms of nested JSON objects and arrays.
|
||||||
If the schema defines one or more _labels_ for the block type, a nested
|
If the schema defines one or more _labels_ for the block type, a nested JSON
|
||||||
object is required for each labelling level, with the object keys serving as
|
object or JSON array of objects is required for each labelling level. These
|
||||||
the label values at that level.
|
are flattened to a single ordered sequence of object properties using the
|
||||||
|
same algorithm as for body content as defined above. Each object property
|
||||||
|
serves as a label value at the corresponding level.
|
||||||
|
|
||||||
After any labelling levels, the next nested value is either a JSON object
|
After any labelling levels, the next nested value is either a JSON object
|
||||||
representing a single block body, or a JSON array of JSON objects that each
|
representing a single block body, or a JSON array of JSON objects that each
|
||||||
@ -111,7 +130,8 @@ of zero blocks, though generators should prefer to omit the property entirely
|
|||||||
in this scenario.
|
in this scenario.
|
||||||
|
|
||||||
Given a schema that calls for a block type named "foo" with _two_ labels, the
|
Given a schema that calls for a block type named "foo" with _two_ labels, the
|
||||||
extra label levels must be represented as objects as in the following examples:
|
extra label levels must be represented as objects or arrays of objects as in
|
||||||
|
the following examples:
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
@ -132,6 +152,7 @@ extra label levels must be represented as objects as in the following examples:
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"foo": {
|
"foo": {
|
||||||
@ -157,10 +178,70 @@ extra label levels must be represented as objects as in the following examples:
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Where multiple definitions are included for the same type and labels, the
|
```json
|
||||||
JSON array is always the value of the property representing the final label,
|
{
|
||||||
and contains objects representing block bodies. It is not valid to use an array
|
"foo": [
|
||||||
at any other point in the block definition structure.
|
{
|
||||||
|
"bar": {
|
||||||
|
"baz": {
|
||||||
|
"child_attr": "baz"
|
||||||
|
},
|
||||||
|
"boz": {
|
||||||
|
"child_attr": "baz"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"bar": {
|
||||||
|
"baz": [
|
||||||
|
{
|
||||||
|
"child_attr": "baz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"child_attr": "boz"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"bar": {
|
||||||
|
"baz": {
|
||||||
|
"child_attr": "baz"
|
||||||
|
},
|
||||||
|
"boz": {
|
||||||
|
"child_attr": "baz"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"bar": {
|
||||||
|
"baz": [
|
||||||
|
{
|
||||||
|
"child_attr": "baz"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"child_attr": "boz"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Arrays can be introduced at either the label definition or block body
|
||||||
|
definition levels to define multiple definitions of the same block type
|
||||||
|
or labels while preserving order.
|
||||||
|
|
||||||
|
A JSON HCL parser _must_ support duplicate definitions of the same property
|
||||||
|
name within a single object, preserving all of them and the relative ordering
|
||||||
|
between them. The array-based forms are also required so that JSON HCL
|
||||||
|
configurations can be produced with JSON producing libraries that are not
|
||||||
|
able to preserve property definition order and multiple definitions of
|
||||||
|
the same property.
|
||||||
|
|
||||||
## Expressions
|
## Expressions
|
||||||
|
|
||||||
@ -174,17 +255,24 @@ When interpreted as an expression, a JSON object represents a value of a HCL
|
|||||||
object type.
|
object type.
|
||||||
|
|
||||||
Each property of the JSON object represents an attribute of the HCL object type.
|
Each property of the JSON object represents an attribute of the HCL object type.
|
||||||
The object type is constructed by enumerating the JSON object properties,
|
The property name string given in the JSON input is interpreted as a string
|
||||||
creating for each an attribute whose name exactly matches the property name,
|
expression as described below, and its result is converted to string as defined
|
||||||
and whose type is the result of recursively applying the expression mapping
|
by the syntax-agnostic information model. If such a conversion is not possible,
|
||||||
rules.
|
an error is produced and evaluation fails.
|
||||||
|
|
||||||
An instance of the constructed object type is then created, whose values
|
An instance of the constructed object type is then created, whose values
|
||||||
are interpreted by again recursively applying the mapping rules defined in
|
are interpreted by again recursively applying the mapping rules defined in
|
||||||
this section.
|
this section to each of the property values.
|
||||||
|
|
||||||
|
If any evaluated property name strings produce null values, an error is
|
||||||
|
produced and evaluation fails. If any produce _unknown_ values, the _entire
|
||||||
|
object's_ result is an unknown value of the dynamic pseudo-type, signalling
|
||||||
|
that the type of the object cannot be determined.
|
||||||
|
|
||||||
It is an error to define the same property name multiple times within a single
|
It is an error to define the same property name multiple times within a single
|
||||||
JSON object interpreted as an expression.
|
JSON object interpreted as an expression. In full expression mode, this
|
||||||
|
constraint applies to the name expression results after conversion to string,
|
||||||
|
rather than the raw string that may contain interpolation expressions.
|
||||||
|
|
||||||
### Arrays
|
### Arrays
|
||||||
|
|
||||||
@ -205,18 +293,25 @@ section.
|
|||||||
|
|
||||||
When interpreted as an expression, a JSON number represents a HCL number value.
|
When interpreted as an expression, a JSON number represents a HCL number value.
|
||||||
|
|
||||||
HCL numbers are arbitrary-precision decimal values, so an ideal implementation
|
HCL numbers are arbitrary-precision decimal values, so a JSON HCL parser must
|
||||||
of this specification will translate exactly the value given to a number of
|
be able to translate exactly the value given to a number of corresponding
|
||||||
corresponding precision.
|
precision, within the constraints set by the HCL syntax-agnostic information
|
||||||
|
model.
|
||||||
|
|
||||||
In practice, off-the-shelf JSON parsers often do not support customizing the
|
In practice, off-the-shelf JSON serializers often do not support customizing the
|
||||||
processing of numbers, and instead force processing as 32-bit or 64-bit
|
processing of numbers, and instead force processing as 32-bit or 64-bit
|
||||||
floating point values with a potential loss of precision. It is permissable
|
floating point values.
|
||||||
for a HCL JSON parser to pass on such limitations _if and only if_ the
|
|
||||||
available precision and other constraints are defined in its documentation.
|
A _producer_ of JSON HCL that uses such a serializer can provide numeric values
|
||||||
Calling applications each have differing precision requirements, so calling
|
as JSON strings where they have precision too great for representation in the
|
||||||
applications are free to select an implementation with more limited precision
|
serializer's chosen numeric type in situations where the result will be
|
||||||
capabilities should high precision not be required for that application.
|
converted to number (using the standard conversion rules) by a calling
|
||||||
|
application.
|
||||||
|
|
||||||
|
Alternatively, for expressions that are evaluated in full expression mode an
|
||||||
|
embedded template interpolation can be used to faithfully represent a number,
|
||||||
|
such as `"${1e150}"`, which will then be evaluated by the underlying HCL native
|
||||||
|
syntax expression evaluator.
|
||||||
|
|
||||||
### Boolean Values
|
### Boolean Values
|
||||||
|
|
||||||
|
@ -6,12 +6,13 @@ import (
|
|||||||
"github.com/hashicorp/hcl2/hcl"
|
"github.com/hashicorp/hcl2/hcl"
|
||||||
"github.com/hashicorp/hcl2/hcl/hclsyntax"
|
"github.com/hashicorp/hcl2/hcl/hclsyntax"
|
||||||
"github.com/zclconf/go-cty/cty"
|
"github.com/zclconf/go-cty/cty"
|
||||||
|
"github.com/zclconf/go-cty/cty/convert"
|
||||||
)
|
)
|
||||||
|
|
||||||
// body is the implementation of "Body" used for files processed with the JSON
|
// body is the implementation of "Body" used for files processed with the JSON
|
||||||
// parser.
|
// parser.
|
||||||
type body struct {
|
type body struct {
|
||||||
obj *objectVal
|
val node
|
||||||
|
|
||||||
// If non-nil, the keys of this map cause the corresponding attributes to
|
// If non-nil, the keys of this map cause the corresponding attributes to
|
||||||
// be treated as non-existing. This is used when Body.PartialContent is
|
// be treated as non-existing. This is used when Body.PartialContent is
|
||||||
@ -43,7 +44,11 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
|
|||||||
nameSuggestions = append(nameSuggestions, blockS.Type)
|
nameSuggestions = append(nameSuggestions, blockS.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
for k, attr := range b.obj.Attrs {
|
jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
|
||||||
|
diags = append(diags, attrDiags...)
|
||||||
|
|
||||||
|
for _, attr := range jsonAttrs {
|
||||||
|
k := attr.Name
|
||||||
if k == "//" {
|
if k == "//" {
|
||||||
// Ignore "//" keys in objects representing bodies, to allow
|
// Ignore "//" keys in objects representing bodies, to allow
|
||||||
// their use as comments.
|
// their use as comments.
|
||||||
@ -51,16 +56,15 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
|
|||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := hiddenAttrs[k]; !ok {
|
if _, ok := hiddenAttrs[k]; !ok {
|
||||||
var fixItHint string
|
|
||||||
suggestion := nameSuggestion(k, nameSuggestions)
|
suggestion := nameSuggestion(k, nameSuggestions)
|
||||||
if suggestion != "" {
|
if suggestion != "" {
|
||||||
fixItHint = fmt.Sprintf(" Did you mean %q?", suggestion)
|
suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
|
||||||
}
|
}
|
||||||
|
|
||||||
diags = append(diags, &hcl.Diagnostic{
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
Severity: hcl.DiagError,
|
Severity: hcl.DiagError,
|
||||||
Summary: "Extraneous JSON object property",
|
Summary: "Extraneous JSON object property",
|
||||||
Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, fixItHint),
|
Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, suggestion),
|
||||||
Subject: &attr.NameRange,
|
Subject: &attr.NameRange,
|
||||||
Context: attr.Range().Ptr(),
|
Context: attr.Range().Ptr(),
|
||||||
})
|
})
|
||||||
@ -71,16 +75,17 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
|
func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
|
||||||
|
var diags hcl.Diagnostics
|
||||||
|
|
||||||
|
jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
|
||||||
|
diags = append(diags, attrDiags...)
|
||||||
|
|
||||||
obj := b.obj
|
|
||||||
jsonAttrs := obj.Attrs
|
|
||||||
usedNames := map[string]struct{}{}
|
usedNames := map[string]struct{}{}
|
||||||
if b.hiddenAttrs != nil {
|
if b.hiddenAttrs != nil {
|
||||||
for k := range b.hiddenAttrs {
|
for k := range b.hiddenAttrs {
|
||||||
usedNames[k] = struct{}{}
|
usedNames[k] = struct{}{}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
var diags hcl.Diagnostics
|
|
||||||
|
|
||||||
content := &hcl.BodyContent{
|
content := &hcl.BodyContent{
|
||||||
Attributes: map[string]*hcl.Attribute{},
|
Attributes: map[string]*hcl.Attribute{},
|
||||||
@ -89,43 +94,70 @@ func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Bod
|
|||||||
MissingItemRange: b.MissingItemRange(),
|
MissingItemRange: b.MissingItemRange(),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create some more convenient data structures for our work below.
|
||||||
|
attrSchemas := map[string]hcl.AttributeSchema{}
|
||||||
|
blockSchemas := map[string]hcl.BlockHeaderSchema{}
|
||||||
for _, attrS := range schema.Attributes {
|
for _, attrS := range schema.Attributes {
|
||||||
jsonAttr, exists := jsonAttrs[attrS.Name]
|
attrSchemas[attrS.Name] = attrS
|
||||||
_, used := usedNames[attrS.Name]
|
}
|
||||||
if used || !exists {
|
for _, blockS := range schema.Blocks {
|
||||||
if attrS.Required {
|
blockSchemas[blockS.Type] = blockS
|
||||||
diags = diags.Append(&hcl.Diagnostic{
|
|
||||||
Severity: hcl.DiagError,
|
|
||||||
Summary: "Missing required attribute",
|
|
||||||
Detail: fmt.Sprintf("The attribute %q is required, so a JSON object property must be present with this name.", attrS.Name),
|
|
||||||
Subject: &obj.OpenRange,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
content.Attributes[attrS.Name] = &hcl.Attribute{
|
|
||||||
Name: attrS.Name,
|
|
||||||
Expr: &expression{src: jsonAttr.Value},
|
|
||||||
Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
|
|
||||||
NameRange: jsonAttr.NameRange,
|
|
||||||
}
|
|
||||||
usedNames[attrS.Name] = struct{}{}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, blockS := range schema.Blocks {
|
for _, jsonAttr := range jsonAttrs {
|
||||||
jsonAttr, exists := jsonAttrs[blockS.Type]
|
attrName := jsonAttr.Name
|
||||||
_, used := usedNames[blockS.Type]
|
if _, used := b.hiddenAttrs[attrName]; used {
|
||||||
if used || !exists {
|
|
||||||
usedNames[blockS.Type] = struct{}{}
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
v := jsonAttr.Value
|
|
||||||
diags = append(diags, b.unpackBlock(v, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)...)
|
if attrS, defined := attrSchemas[attrName]; defined {
|
||||||
usedNames[blockS.Type] = struct{}{}
|
if existing, exists := content.Attributes[attrName]; exists {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Duplicate attribute definition",
|
||||||
|
Detail: fmt.Sprintf("The attribute %q was already defined at %s.", attrName, existing.Range),
|
||||||
|
Subject: &jsonAttr.NameRange,
|
||||||
|
Context: jsonAttr.Range().Ptr(),
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
content.Attributes[attrS.Name] = &hcl.Attribute{
|
||||||
|
Name: attrS.Name,
|
||||||
|
Expr: &expression{src: jsonAttr.Value},
|
||||||
|
Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
|
||||||
|
NameRange: jsonAttr.NameRange,
|
||||||
|
}
|
||||||
|
usedNames[attrName] = struct{}{}
|
||||||
|
|
||||||
|
} else if blockS, defined := blockSchemas[attrName]; defined {
|
||||||
|
bv := jsonAttr.Value
|
||||||
|
blockDiags := b.unpackBlock(bv, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)
|
||||||
|
diags = append(diags, blockDiags...)
|
||||||
|
usedNames[attrName] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// We ignore anything that isn't defined because that's the
|
||||||
|
// PartialContent contract. The Content method will catch leftovers.
|
||||||
|
}
|
||||||
|
|
||||||
|
// Make sure we got all the required attributes.
|
||||||
|
for _, attrS := range schema.Attributes {
|
||||||
|
if !attrS.Required {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, defined := content.Attributes[attrS.Name]; !defined {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Missing required attribute",
|
||||||
|
Detail: fmt.Sprintf("The attribute %q is required, but no definition was found.", attrS.Name),
|
||||||
|
Subject: b.MissingItemRange().Ptr(),
|
||||||
|
})
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
unusedBody := &body{
|
unusedBody := &body{
|
||||||
obj: b.obj,
|
val: b.val,
|
||||||
hiddenAttrs: usedNames,
|
hiddenAttrs: usedNames,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -135,8 +167,22 @@ func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Bod
|
|||||||
// JustAttributes for JSON bodies interprets all properties of the wrapped
|
// JustAttributes for JSON bodies interprets all properties of the wrapped
|
||||||
// JSON object as attributes and returns them.
|
// JSON object as attributes and returns them.
|
||||||
func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
|
func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
|
||||||
|
var diags hcl.Diagnostics
|
||||||
attrs := make(map[string]*hcl.Attribute)
|
attrs := make(map[string]*hcl.Attribute)
|
||||||
for name, jsonAttr := range b.obj.Attrs {
|
|
||||||
|
obj, ok := b.val.(*objectVal)
|
||||||
|
if !ok {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Incorrect JSON value type",
|
||||||
|
Detail: "A JSON object is required here, defining the attributes for this block.",
|
||||||
|
Subject: b.val.StartRange().Ptr(),
|
||||||
|
})
|
||||||
|
return attrs, diags
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, jsonAttr := range obj.Attrs {
|
||||||
|
name := jsonAttr.Name
|
||||||
if name == "//" {
|
if name == "//" {
|
||||||
// Ignore "//" keys in objects representing bodies, to allow
|
// Ignore "//" keys in objects representing bodies, to allow
|
||||||
// their use as comments.
|
// their use as comments.
|
||||||
@ -156,27 +202,29 @@ func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
|
|||||||
|
|
||||||
// No diagnostics possible here, since the parser already took care of
|
// No diagnostics possible here, since the parser already took care of
|
||||||
// finding duplicates and every JSON value can be a valid attribute value.
|
// finding duplicates and every JSON value can be a valid attribute value.
|
||||||
return attrs, nil
|
return attrs, diags
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *body) MissingItemRange() hcl.Range {
|
func (b *body) MissingItemRange() hcl.Range {
|
||||||
return b.obj.CloseRange
|
switch tv := b.val.(type) {
|
||||||
|
case *objectVal:
|
||||||
|
return tv.CloseRange
|
||||||
|
case *arrayVal:
|
||||||
|
return tv.OpenRange
|
||||||
|
default:
|
||||||
|
// Should not happen in correct operation, but might show up if the
|
||||||
|
// input is invalid and we are producing partial results.
|
||||||
|
return tv.StartRange()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) {
|
func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) {
|
||||||
if len(labelsLeft) > 0 {
|
if len(labelsLeft) > 0 {
|
||||||
labelName := labelsLeft[0]
|
labelName := labelsLeft[0]
|
||||||
ov, ok := v.(*objectVal)
|
jsonAttrs, attrDiags := b.collectDeepAttrs(v, &labelName)
|
||||||
if !ok {
|
diags = append(diags, attrDiags...)
|
||||||
diags = diags.Append(&hcl.Diagnostic{
|
|
||||||
Severity: hcl.DiagError,
|
if len(jsonAttrs) == 0 {
|
||||||
Summary: "Incorrect JSON value type",
|
|
||||||
Detail: fmt.Sprintf("A JSON object is required, whose keys represent the %s block's %s.", typeName, labelName),
|
|
||||||
Subject: v.StartRange().Ptr(),
|
|
||||||
})
|
|
||||||
return
|
|
||||||
}
|
|
||||||
if len(ov.Attrs) == 0 {
|
|
||||||
diags = diags.Append(&hcl.Diagnostic{
|
diags = diags.Append(&hcl.Diagnostic{
|
||||||
Severity: hcl.DiagError,
|
Severity: hcl.DiagError,
|
||||||
Summary: "Missing block label",
|
Summary: "Missing block label",
|
||||||
@ -187,7 +235,8 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
|
|||||||
}
|
}
|
||||||
labelsUsed := append(labelsUsed, "")
|
labelsUsed := append(labelsUsed, "")
|
||||||
labelRanges := append(labelRanges, hcl.Range{})
|
labelRanges := append(labelRanges, hcl.Range{})
|
||||||
for pk, p := range ov.Attrs {
|
for _, p := range jsonAttrs {
|
||||||
|
pk := p.Name
|
||||||
labelsUsed[len(labelsUsed)-1] = pk
|
labelsUsed[len(labelsUsed)-1] = pk
|
||||||
labelRanges[len(labelRanges)-1] = p.NameRange
|
labelRanges[len(labelRanges)-1] = p.NameRange
|
||||||
diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...)
|
diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...)
|
||||||
@ -212,7 +261,7 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
|
|||||||
Type: typeName,
|
Type: typeName,
|
||||||
Labels: labels,
|
Labels: labels,
|
||||||
Body: &body{
|
Body: &body{
|
||||||
obj: tv,
|
val: tv,
|
||||||
},
|
},
|
||||||
|
|
||||||
DefRange: tv.OpenRange,
|
DefRange: tv.OpenRange,
|
||||||
@ -222,22 +271,11 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
|
|||||||
case *arrayVal:
|
case *arrayVal:
|
||||||
// Multiple instances of the block
|
// Multiple instances of the block
|
||||||
for _, av := range tv.Values {
|
for _, av := range tv.Values {
|
||||||
ov, ok := av.(*objectVal)
|
|
||||||
if !ok {
|
|
||||||
diags = diags.Append(&hcl.Diagnostic{
|
|
||||||
Severity: hcl.DiagError,
|
|
||||||
Summary: "Incorrect JSON value type",
|
|
||||||
Detail: fmt.Sprintf("A JSON object is required, representing the contents of a %q block.", typeName),
|
|
||||||
Subject: v.StartRange().Ptr(),
|
|
||||||
})
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
*blocks = append(*blocks, &hcl.Block{
|
*blocks = append(*blocks, &hcl.Block{
|
||||||
Type: typeName,
|
Type: typeName,
|
||||||
Labels: labels,
|
Labels: labels,
|
||||||
Body: &body{
|
Body: &body{
|
||||||
obj: ov,
|
val: av, // might be mistyped; we'll find out when content is requested for this body
|
||||||
},
|
},
|
||||||
|
|
||||||
DefRange: tv.OpenRange,
|
DefRange: tv.OpenRange,
|
||||||
@ -256,6 +294,74 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// collectDeepAttrs takes either a single object or an array of objects and
|
||||||
|
// flattens it into a list of object attributes, collecting attributes from
|
||||||
|
// all of the objects in a given array.
|
||||||
|
//
|
||||||
|
// Ordering is preserved, so a list of objects that each have one property
|
||||||
|
// will result in those properties being returned in the same order as the
|
||||||
|
// objects appeared in the array.
|
||||||
|
//
|
||||||
|
// This is appropriate for use only for objects representing bodies or labels
|
||||||
|
// within a block.
|
||||||
|
//
|
||||||
|
// The labelName argument, if non-null, is used to tailor returned error
|
||||||
|
// messages to refer to block labels rather than attributes and child blocks.
|
||||||
|
// It has no other effect.
|
||||||
|
func (b *body) collectDeepAttrs(v node, labelName *string) ([]*objectAttr, hcl.Diagnostics) {
|
||||||
|
var diags hcl.Diagnostics
|
||||||
|
var attrs []*objectAttr
|
||||||
|
|
||||||
|
switch tv := v.(type) {
|
||||||
|
|
||||||
|
case *objectVal:
|
||||||
|
attrs = append(attrs, tv.Attrs...)
|
||||||
|
|
||||||
|
case *arrayVal:
|
||||||
|
for _, ev := range tv.Values {
|
||||||
|
switch tev := ev.(type) {
|
||||||
|
case *objectVal:
|
||||||
|
attrs = append(attrs, tev.Attrs...)
|
||||||
|
default:
|
||||||
|
if labelName != nil {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Incorrect JSON value type",
|
||||||
|
Detail: fmt.Sprintf("A JSON object is required here, to specify %s labels for this block.", *labelName),
|
||||||
|
Subject: ev.StartRange().Ptr(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Incorrect JSON value type",
|
||||||
|
Detail: "A JSON object is required here, to define attributes and child blocks.",
|
||||||
|
Subject: ev.StartRange().Ptr(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
if labelName != nil {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Incorrect JSON value type",
|
||||||
|
Detail: fmt.Sprintf("Either a JSON object or JSON array of objects is required here, to specify %s labels for this block.", *labelName),
|
||||||
|
Subject: v.StartRange().Ptr(),
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Incorrect JSON value type",
|
||||||
|
Detail: "Either a JSON object or JSON array of objects is required here, to define attributes and child blocks.",
|
||||||
|
Subject: v.StartRange().Ptr(),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return attrs, diags
|
||||||
|
}
|
||||||
|
|
||||||
func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
|
func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
|
||||||
switch v := e.src.(type) {
|
switch v := e.src.(type) {
|
||||||
case *stringVal:
|
case *stringVal:
|
||||||
@ -301,12 +407,75 @@ func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
|
|||||||
}
|
}
|
||||||
return cty.TupleVal(vals), nil
|
return cty.TupleVal(vals), nil
|
||||||
case *objectVal:
|
case *objectVal:
|
||||||
|
var diags hcl.Diagnostics
|
||||||
attrs := map[string]cty.Value{}
|
attrs := map[string]cty.Value{}
|
||||||
for name, jsonAttr := range v.Attrs {
|
attrRanges := map[string]hcl.Range{}
|
||||||
val, _ := (&expression{src: jsonAttr.Value}).Value(ctx)
|
known := true
|
||||||
attrs[name] = val
|
for _, jsonAttr := range v.Attrs {
|
||||||
|
// In this one context we allow keys to contain interpolation
|
||||||
|
// experessions too, assuming we're evaluating in interpolation
|
||||||
|
// mode. This achieves parity with the native syntax where
|
||||||
|
// object expressions can have dynamic keys, while block contents
|
||||||
|
// may not.
|
||||||
|
name, nameDiags := (&expression{src: &stringVal{
|
||||||
|
Value: jsonAttr.Name,
|
||||||
|
SrcRange: jsonAttr.NameRange,
|
||||||
|
}}).Value(ctx)
|
||||||
|
val, valDiags := (&expression{src: jsonAttr.Value}).Value(ctx)
|
||||||
|
diags = append(diags, nameDiags...)
|
||||||
|
diags = append(diags, valDiags...)
|
||||||
|
|
||||||
|
var err error
|
||||||
|
name, err = convert.Convert(name, cty.String)
|
||||||
|
if err != nil {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Invalid object key expression",
|
||||||
|
Detail: fmt.Sprintf("Cannot use this expression as an object key: %s.", err),
|
||||||
|
Subject: &jsonAttr.NameRange,
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if name.IsNull() {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Invalid object key expression",
|
||||||
|
Detail: "Cannot use null value as an object key.",
|
||||||
|
Subject: &jsonAttr.NameRange,
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if !name.IsKnown() {
|
||||||
|
// This is a bit of a weird case, since our usual rules require
|
||||||
|
// us to tolerate unknowns and just represent the result as
|
||||||
|
// best we can but if we don't know the key then we can't
|
||||||
|
// know the type of our object at all, and thus we must turn
|
||||||
|
// the whole thing into cty.DynamicVal. This is consistent with
|
||||||
|
// how this situation is handled in the native syntax.
|
||||||
|
// We'll keep iterating so we can collect other errors in
|
||||||
|
// subsequent attributes.
|
||||||
|
known = false
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
nameStr := name.AsString()
|
||||||
|
if _, defined := attrs[nameStr]; defined {
|
||||||
|
diags = append(diags, &hcl.Diagnostic{
|
||||||
|
Severity: hcl.DiagError,
|
||||||
|
Summary: "Duplicate object attribute",
|
||||||
|
Detail: fmt.Sprintf("An attribute named %q was already defined at %s.", nameStr, attrRanges[nameStr]),
|
||||||
|
Subject: &jsonAttr.NameRange,
|
||||||
|
})
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
attrs[nameStr] = val
|
||||||
|
attrRanges[nameStr] = jsonAttr.NameRange
|
||||||
}
|
}
|
||||||
return cty.ObjectVal(attrs), nil
|
if !known {
|
||||||
|
// We encountered an unknown key somewhere along the way, so
|
||||||
|
// we can't know what our type will eventually be.
|
||||||
|
return cty.DynamicVal, diags
|
||||||
|
}
|
||||||
|
return cty.ObjectVal(attrs), diags
|
||||||
default:
|
default:
|
||||||
// Default to DynamicVal so that ASTs containing invalid nodes can
|
// Default to DynamicVal so that ASTs containing invalid nodes can
|
||||||
// still be partially-evaluated.
|
// still be partially-evaluated.
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"github.com/davecgh/go-spew/spew"
|
"github.com/davecgh/go-spew/spew"
|
||||||
|
"github.com/go-test/deep"
|
||||||
"github.com/hashicorp/hcl2/hcl"
|
"github.com/hashicorp/hcl2/hcl"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -29,6 +30,45 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
`[]`,
|
||||||
|
&hcl.BodySchema{},
|
||||||
|
&hcl.BodyContent{
|
||||||
|
Attributes: map[string]*hcl.Attribute{},
|
||||||
|
MissingItemRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`[{}]`,
|
||||||
|
&hcl.BodySchema{},
|
||||||
|
&hcl.BodyContent{
|
||||||
|
Attributes: map[string]*hcl.Attribute{},
|
||||||
|
MissingItemRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`[[]]`,
|
||||||
|
&hcl.BodySchema{},
|
||||||
|
&hcl.BodyContent{
|
||||||
|
Attributes: map[string]*hcl.Attribute{},
|
||||||
|
MissingItemRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
1, // elements of root array must be objects
|
||||||
|
},
|
||||||
{
|
{
|
||||||
`{"//": "comment that should be ignored"}`,
|
`{"//": "comment that should be ignored"}`,
|
||||||
&hcl.BodySchema{},
|
&hcl.BodySchema{},
|
||||||
@ -42,6 +82,19 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
`{"//": "comment that should be ignored", "//": "another comment"}`,
|
||||||
|
&hcl.BodySchema{},
|
||||||
|
&hcl.BodyContent{
|
||||||
|
Attributes: map[string]*hcl.Attribute{},
|
||||||
|
MissingItemRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 65, Byte: 64},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 66, Byte: 65},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
`{"name":"Ermintrude"}`,
|
`{"name":"Ermintrude"}`,
|
||||||
&hcl.BodySchema{
|
&hcl.BodySchema{
|
||||||
@ -109,6 +162,73 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
`[{"name":"Ermintrude"}]`,
|
||||||
|
&hcl.BodySchema{
|
||||||
|
Attributes: []hcl.AttributeSchema{
|
||||||
|
{
|
||||||
|
Name: "name",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&hcl.BodyContent{
|
||||||
|
Attributes: map[string]*hcl.Attribute{
|
||||||
|
"name": &hcl.Attribute{
|
||||||
|
Name: "name",
|
||||||
|
Expr: &expression{
|
||||||
|
src: &stringVal{
|
||||||
|
Value: "Ermintrude",
|
||||||
|
SrcRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 9,
|
||||||
|
Line: 1,
|
||||||
|
Column: 10,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 21,
|
||||||
|
Line: 1,
|
||||||
|
Column: 22,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Range: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 2,
|
||||||
|
Line: 1,
|
||||||
|
Column: 3,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 21,
|
||||||
|
Line: 1,
|
||||||
|
Column: 22,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
NameRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 2,
|
||||||
|
Line: 1,
|
||||||
|
Column: 3,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 8,
|
||||||
|
Line: 1,
|
||||||
|
Column: 9,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingItemRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
`{"name":"Ermintrude"}`,
|
`{"name":"Ermintrude"}`,
|
||||||
&hcl.BodySchema{
|
&hcl.BodySchema{
|
||||||
@ -197,8 +317,8 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
Type: "resource",
|
Type: "resource",
|
||||||
Labels: []string{},
|
Labels: []string{},
|
||||||
Body: &body{
|
Body: &body{
|
||||||
obj: &objectVal{
|
val: &objectVal{
|
||||||
Attrs: map[string]*objectAttr{},
|
Attrs: []*objectAttr{},
|
||||||
SrcRange: hcl.Range{
|
SrcRange: hcl.Range{
|
||||||
Filename: "test.json",
|
Filename: "test.json",
|
||||||
Start: hcl.Pos{
|
Start: hcl.Pos{
|
||||||
@ -294,8 +414,8 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
Type: "resource",
|
Type: "resource",
|
||||||
Labels: []string{},
|
Labels: []string{},
|
||||||
Body: &body{
|
Body: &body{
|
||||||
obj: &objectVal{
|
val: &objectVal{
|
||||||
Attrs: map[string]*objectAttr{},
|
Attrs: []*objectAttr{},
|
||||||
SrcRange: hcl.Range{
|
SrcRange: hcl.Range{
|
||||||
Filename: "test.json",
|
Filename: "test.json",
|
||||||
Start: hcl.Pos{
|
Start: hcl.Pos{
|
||||||
@ -370,8 +490,8 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
Type: "resource",
|
Type: "resource",
|
||||||
Labels: []string{},
|
Labels: []string{},
|
||||||
Body: &body{
|
Body: &body{
|
||||||
obj: &objectVal{
|
val: &objectVal{
|
||||||
Attrs: map[string]*objectAttr{},
|
Attrs: []*objectAttr{},
|
||||||
SrcRange: hcl.Range{
|
SrcRange: hcl.Range{
|
||||||
Filename: "test.json",
|
Filename: "test.json",
|
||||||
Start: hcl.Pos{
|
Start: hcl.Pos{
|
||||||
@ -468,8 +588,8 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
Type: "resource",
|
Type: "resource",
|
||||||
Labels: []string{"foo_instance", "bar"},
|
Labels: []string{"foo_instance", "bar"},
|
||||||
Body: &body{
|
Body: &body{
|
||||||
obj: &objectVal{
|
val: &objectVal{
|
||||||
Attrs: map[string]*objectAttr{},
|
Attrs: []*objectAttr{},
|
||||||
SrcRange: hcl.Range{
|
SrcRange: hcl.Range{
|
||||||
Filename: "test.json",
|
Filename: "test.json",
|
||||||
Start: hcl.Pos{
|
Start: hcl.Pos{
|
||||||
@ -576,6 +696,234 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
},
|
},
|
||||||
0,
|
0,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
`{"resource":{"foo_instance":[{"bar":{}}, {"bar":{}}]}}`,
|
||||||
|
&hcl.BodySchema{
|
||||||
|
Blocks: []hcl.BlockHeaderSchema{
|
||||||
|
{
|
||||||
|
Type: "resource",
|
||||||
|
LabelNames: []string{"type", "name"},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&hcl.BodyContent{
|
||||||
|
Attributes: map[string]*hcl.Attribute{},
|
||||||
|
Blocks: hcl.Blocks{
|
||||||
|
{
|
||||||
|
Type: "resource",
|
||||||
|
Labels: []string{"foo_instance", "bar"},
|
||||||
|
Body: &body{
|
||||||
|
val: &objectVal{
|
||||||
|
Attrs: []*objectAttr{},
|
||||||
|
SrcRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 36,
|
||||||
|
Line: 1,
|
||||||
|
Column: 37,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 38,
|
||||||
|
Line: 1,
|
||||||
|
Column: 39,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
OpenRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 36,
|
||||||
|
Line: 1,
|
||||||
|
Column: 37,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 37,
|
||||||
|
Line: 1,
|
||||||
|
Column: 38,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
CloseRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 37,
|
||||||
|
Line: 1,
|
||||||
|
Column: 38,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 38,
|
||||||
|
Line: 1,
|
||||||
|
Column: 39,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
DefRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 36,
|
||||||
|
Line: 1,
|
||||||
|
Column: 37,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 37,
|
||||||
|
Line: 1,
|
||||||
|
Column: 38,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
TypeRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 1,
|
||||||
|
Line: 1,
|
||||||
|
Column: 2,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 11,
|
||||||
|
Line: 1,
|
||||||
|
Column: 12,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
LabelRanges: []hcl.Range{
|
||||||
|
{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 13,
|
||||||
|
Line: 1,
|
||||||
|
Column: 14,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 27,
|
||||||
|
Line: 1,
|
||||||
|
Column: 28,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 30,
|
||||||
|
Line: 1,
|
||||||
|
Column: 31,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 35,
|
||||||
|
Line: 1,
|
||||||
|
Column: 36,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Type: "resource",
|
||||||
|
Labels: []string{"foo_instance", "bar"},
|
||||||
|
Body: &body{
|
||||||
|
val: &objectVal{
|
||||||
|
Attrs: []*objectAttr{},
|
||||||
|
SrcRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 36,
|
||||||
|
Line: 1,
|
||||||
|
Column: 37,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 38,
|
||||||
|
Line: 1,
|
||||||
|
Column: 39,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
OpenRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 36,
|
||||||
|
Line: 1,
|
||||||
|
Column: 37,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 37,
|
||||||
|
Line: 1,
|
||||||
|
Column: 38,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
CloseRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 37,
|
||||||
|
Line: 1,
|
||||||
|
Column: 38,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 38,
|
||||||
|
Line: 1,
|
||||||
|
Column: 39,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
|
||||||
|
DefRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 48,
|
||||||
|
Line: 1,
|
||||||
|
Column: 49,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 49,
|
||||||
|
Line: 1,
|
||||||
|
Column: 50,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
TypeRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 1,
|
||||||
|
Line: 1,
|
||||||
|
Column: 2,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 11,
|
||||||
|
Line: 1,
|
||||||
|
Column: 12,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
LabelRanges: []hcl.Range{
|
||||||
|
{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 13,
|
||||||
|
Line: 1,
|
||||||
|
Column: 14,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 27,
|
||||||
|
Line: 1,
|
||||||
|
Column: 28,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 42,
|
||||||
|
Line: 1,
|
||||||
|
Column: 43,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 47,
|
||||||
|
Line: 1,
|
||||||
|
Column: 48,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingItemRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 54, Byte: 53},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 55, Byte: 54},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
0,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
`{"name":"Ermintrude"}`,
|
`{"name":"Ermintrude"}`,
|
||||||
&hcl.BodySchema{
|
&hcl.BodySchema{
|
||||||
@ -593,7 +941,74 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
End: hcl.Pos{Line: 1, Column: 22, Byte: 21},
|
End: hcl.Pos{Line: 1, Column: 22, Byte: 21},
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
1,
|
1, // name is supposed to be a block
|
||||||
|
},
|
||||||
|
{
|
||||||
|
`[{"name":"Ermintrude"},{"name":"Ermintrude"}]`,
|
||||||
|
&hcl.BodySchema{
|
||||||
|
Attributes: []hcl.AttributeSchema{
|
||||||
|
{
|
||||||
|
Name: "name",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
&hcl.BodyContent{
|
||||||
|
Attributes: map[string]*hcl.Attribute{
|
||||||
|
"name": {
|
||||||
|
Name: "name",
|
||||||
|
Expr: &expression{
|
||||||
|
src: &stringVal{
|
||||||
|
Value: "Ermintrude",
|
||||||
|
SrcRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 8,
|
||||||
|
Line: 1,
|
||||||
|
Column: 9,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 20,
|
||||||
|
Line: 1,
|
||||||
|
Column: 21,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
Range: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 2,
|
||||||
|
Line: 1,
|
||||||
|
Column: 3,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 21,
|
||||||
|
Line: 1,
|
||||||
|
Column: 22,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
NameRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{
|
||||||
|
Byte: 2,
|
||||||
|
Line: 1,
|
||||||
|
Column: 3,
|
||||||
|
},
|
||||||
|
End: hcl.Pos{
|
||||||
|
Byte: 8,
|
||||||
|
Line: 1,
|
||||||
|
Column: 9,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
MissingItemRange: hcl.Range{
|
||||||
|
Filename: "test.json",
|
||||||
|
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
|
||||||
|
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
1, // "name" attribute is defined twice
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -611,8 +1026,8 @@ func TestBodyPartialContent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !reflect.DeepEqual(got, test.want) {
|
for _, problem := range deep.Equal(got, test.want) {
|
||||||
t.Errorf("wrong result\ngot: %s\nwant: %s", spew.Sdump(got), spew.Sdump(test.want))
|
t.Error(problem)
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user