hcl/json: allow more flexible use of arrays when describing bodies

Previously we allowed arrays only at the "leaf" of a set of objects
describing a block and its labels. This is not sufficient because it is
therefore impossible to preserve the relative ordering of a sequence
of blocks that have different block types or labels.

The spec now allows arrays of objects to be used in place of single
objects when that value is representing either an HCL body or a set of
labels on a nested block. This relaxing does not apply to JSON objects
interpreted as expressions or bodies interpreted in dynamic attributes
mode, since there is no requirement to preserve attribute ordering or
support duplicate property names in those scenarios.

This new model imposes additional constraints on the underlying JSON
parser used to interpret JSON HCL: it must now be able to retain the
relative ordering of object keys and accept multiple definitions of the
same key. This requirement is not imposed on _producers_, which are free
to use the allowance for arrays of objects to force ordering and duplicate
keys with JSON-producing libraries that are unable to make these
distinctions.

Since we are now requiring a specialized parser anyway, we also require
that it be able to represent numbers at full precision, whereas before
we made some allowances for implementations to not support this.
This commit is contained in:
Martin Atkins 2018-02-17 10:26:58 -08:00
parent 77dc2cba20
commit eea3a14a71
11 changed files with 912 additions and 175 deletions

View File

@ -61,6 +61,11 @@ func TestTerraformLike(t *testing.T) {
Required: true,
Type: cty.String,
},
"tags": &hcldec.AttrSpec{
Name: "tags",
Required: false,
Type: cty.Map(cty.String),
},
}
securityGroupDecode := &hcldec.ObjectSpec{
"ingress": &hcldec.BlockListSpec{
@ -235,6 +240,10 @@ func TestTerraformLike(t *testing.T) {
wantCfg := cty.ObjectVal(map[string]cty.Value{
"instance_type": cty.StringVal("z3.weedy"),
"image_id": cty.StringVal("image-1234"),
"tags": cty.MapVal(map[string]cty.Value{
"Name": cty.StringVal("foo"),
"Environment": cty.StringVal("prod"),
}),
})
if !cfg.RawEquals(wantCfg) {
t.Errorf("wrong config\ngot: %#v\nwant: %#v", cfg, wantCfg)
@ -280,6 +289,11 @@ resource "happycloud_instance" "test" {
instance_type = "z3.weedy"
image_id = var.image_id
tags = {
"Name" = "foo"
"${"Environment"}" = "prod"
}
depends_on = [
happycloud_security_group.public,
]
@ -318,6 +332,10 @@ const terraformLikeJSON = `
"test": {
"instance_type": "z3.weedy",
"image_id": "${var.image_id}",
"tags": {
"Name": "foo",
"${\"Environment\"}": "prod"
},
"depends_on": [
"happycloud_security_group.public"
]

View File

@ -12,7 +12,7 @@ type node interface {
}
type objectVal struct {
Attrs map[string]*objectAttr
Attrs []*objectAttr
SrcRange hcl.Range // range of the entire object, brace-to-brace
OpenRange hcl.Range // range of the opening brace
CloseRange hcl.Range // range of the closing brace

View File

@ -1,11 +1,12 @@
package json
import (
"fmt"
"strings"
)
type navigation struct {
root *objectVal
root node
}
// Implementation of hcled.ContextString
@ -21,21 +22,49 @@ func (n navigation) ContextString(offset int) string {
steps[i], steps[len(steps)-1-i] = steps[len(steps)-1-i], steps[i]
}
return strings.Join(steps, ".")
ret := strings.Join(steps, "")
if len(ret) > 0 && ret[0] == '.' {
ret = ret[1:]
}
return ret
}
func navigationStepsRev(obj *objectVal, offset int) []string {
// Do any of our properties have an object that contains the target
// offset?
for k, attr := range obj.Attrs {
ov, ok := attr.Value.(*objectVal)
if !ok {
continue
}
func navigationStepsRev(v node, offset int) []string {
switch tv := v.(type) {
case *objectVal:
// Do any of our properties have an object that contains the target
// offset?
for _, attr := range tv.Attrs {
k := attr.Name
av := attr.Value
if ov.SrcRange.ContainsOffset(offset) {
return append(navigationStepsRev(ov, offset), k)
switch av.(type) {
case *objectVal, *arrayVal:
// okay
default:
continue
}
if av.Range().ContainsOffset(offset) {
return append(navigationStepsRev(av, offset), "."+k)
}
}
case *arrayVal:
// Do any of our elements contain the target offset?
for i, elem := range tv.Values {
switch elem.(type) {
case *objectVal, *arrayVal:
// okay
default:
continue
}
if elem.Range().ContainsOffset(offset) {
return append(navigationStepsRev(elem, offset), fmt.Sprintf("[%d]", i))
}
}
}
return nil
}

View File

@ -1,6 +1,7 @@
package json
import (
"fmt"
"strconv"
"testing"
)
@ -13,14 +14,20 @@ func TestNavigationContextString(t *testing.T) {
"null_resource": {
"baz": {
"id": "foo"
}
},
"boz": [
{
"ov": { }
}
]
}
}
}
`
file, diags := Parse([]byte(src), "test.json")
if len(diags) != 0 {
t.Errorf("Unexpected diagnostics: %#v", diags)
fmt.Printf("offset %d\n", diags[0].Subject.Start.Byte)
t.Errorf("Unexpected diagnostics: %s", diags)
}
if file == nil {
t.Fatalf("Got nil file")
@ -36,6 +43,7 @@ func TestNavigationContextString(t *testing.T) {
{36, `resource`},
{60, `resource.null_resource`},
{89, `resource.null_resource.baz`},
{141, `resource.null_resource.boz`},
}
for _, test := range tests {

View File

@ -103,7 +103,7 @@ func parseObject(p *peeker) (node, hcl.Diagnostics) {
var diags hcl.Diagnostics
open := p.Read()
attrs := map[string]*objectAttr{}
attrs := []*objectAttr{}
// recover is used to shift the peeker to what seems to be the end of
// our object, so that when we encounter an error we leave the peeker
@ -191,24 +191,11 @@ Token:
return nil, diags
}
if existing := attrs[key]; existing != nil {
// Generate a diagnostic for the duplicate key, but continue parsing
// anyway since this is a semantic error we can recover from.
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate JSON object property",
Detail: fmt.Sprintf(
"An property named %q was previously introduced at %s",
key, existing.NameRange.String(),
),
Subject: &keyStrNode.SrcRange,
})
}
attrs[key] = &objectAttr{
attrs = append(attrs, &objectAttr{
Name: key,
Value: valNode,
NameRange: keyStrNode.SrcRange,
}
})
switch p.Peek().Type {
case tokenComma:

View File

@ -2,10 +2,9 @@ package json
import (
"math/big"
"reflect"
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/go-test/deep"
"github.com/hashicorp/hcl2/hcl"
)
@ -222,8 +221,8 @@ func TestParse(t *testing.T) {
{
`{"hello": true}`,
&objectVal{
Attrs: map[string]*objectAttr{
"hello": {
Attrs: []*objectAttr{
{
Name: "hello",
Value: &booleanVal{
Value: true,
@ -256,8 +255,8 @@ func TestParse(t *testing.T) {
{
`{"hello": true, "bye": false}`,
&objectVal{
Attrs: map[string]*objectAttr{
"hello": {
Attrs: []*objectAttr{
{
Name: "hello",
Value: &booleanVal{
Value: true,
@ -271,7 +270,7 @@ func TestParse(t *testing.T) {
End: hcl.Pos{Line: 1, Column: 9, Byte: 8},
},
},
"bye": {
{
Name: "bye",
Value: &booleanVal{
Value: false,
@ -304,7 +303,7 @@ func TestParse(t *testing.T) {
{
`{}`,
&objectVal{
Attrs: map[string]*objectAttr{},
Attrs: []*objectAttr{},
SrcRange: hcl.Range{
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
End: hcl.Pos{Line: 1, Column: 3, Byte: 2},
@ -355,8 +354,22 @@ func TestParse(t *testing.T) {
{
`{"hello": true, "hello": true}`,
&objectVal{
Attrs: map[string]*objectAttr{
"hello": {
Attrs: []*objectAttr{
{
Name: "hello",
Value: &booleanVal{
Value: true,
SrcRange: hcl.Range{
Start: hcl.Pos{Line: 1, Column: 11, Byte: 10},
End: hcl.Pos{Line: 1, Column: 15, Byte: 14},
},
},
NameRange: hcl.Range{
Start: hcl.Pos{Line: 1, Column: 2, Byte: 1},
End: hcl.Pos{Line: 1, Column: 9, Byte: 8},
},
},
{
Name: "hello",
Value: &booleanVal{
Value: true,
@ -384,7 +397,7 @@ func TestParse(t *testing.T) {
End: hcl.Pos{Line: 1, Column: 31, Byte: 30},
},
},
1,
0,
},
{
`{"hello": true, "hello": true, "hello", true}`,
@ -392,7 +405,7 @@ func TestParse(t *testing.T) {
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
}},
2,
1, // comma used where colon is expected
},
{
`{"hello", "world"}`,
@ -590,11 +603,10 @@ func TestParse(t *testing.T) {
}
}
if !reflect.DeepEqual(got, test.Want) {
t.Errorf(
"wrong result\ninput: %s\ngot: %s\nwant: %s",
test.Input, spew.Sdump(got), spew.Sdump(test.Want),
)
if diff := deep.Equal(got, test.Want); diff != nil {
for _, problem := range diff {
t.Error(problem)
}
}
})
}

View File

@ -19,19 +19,22 @@ import (
// the subset of data that was able to be parsed, which may be none.
func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
rootNode, diags := parseFileContent(src, filename)
if _, ok := rootNode.(*objectVal); !ok {
switch rootNode.(type) {
case *objectVal, *arrayVal:
// okay
default:
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Root value must be object",
Detail: "The root value in a JSON-based configuration must be a JSON object.",
Detail: "The root value in a JSON-based configuration must be either a JSON object or a JSON array of objects.",
Subject: rootNode.StartRange().Ptr(),
})
// Put in a placeholder objectVal just so the caller always gets
// a valid file, even if it appears empty. This is useful for callers
// that are doing static analysis of possibly-erroneous source code,
// which will try to process the returned file even if we return
// diagnostics of severity error. This way, they'll get a file that
// has an empty body rather than a body that panics when probed.
// Since we've already produced an error message for this being
// invalid, we'll return an empty placeholder here so that trying to
// extract content from our root body won't produce a redundant
// error saying the same thing again in more general terms.
fakePos := hcl.Pos{
Byte: 0,
Line: 1,
@ -43,17 +46,18 @@ func Parse(src []byte, filename string) (*hcl.File, hcl.Diagnostics) {
End: fakePos,
}
rootNode = &objectVal{
Attrs: map[string]*objectAttr{},
Attrs: []*objectAttr{},
SrcRange: fakeRange,
OpenRange: fakeRange,
}
}
file := &hcl.File{
Body: &body{
obj: rootNode.(*objectVal),
val: rootNode,
},
Bytes: src,
Nav: navigation{rootNode.(*objectVal)},
Nav: navigation{rootNode},
}
return file, diags
}

View File

@ -19,7 +19,7 @@ func TestParse_nonObject(t *testing.T) {
if file.Body == nil {
t.Fatalf("got nil Body; want actual body")
}
if file.Body.(*body).obj == nil {
if file.Body.(*body).val == nil {
t.Errorf("got nil Body object; want placeholder object")
}
}

View File

@ -13,19 +13,36 @@ grammar as-is, and merely defines a specific methodology for interpreting
JSON constructs into HCL structural elements and expressions.
This mapping is defined such that valid JSON-serialized HCL input can be
produced using standard JSON implementations in various programming languages.
_produced_ using standard JSON implementations in various programming languages.
_Parsing_ such JSON has some additional constraints not beyond what is normally
supported by JSON parsers, though adaptations are defined to allow processing
with an off-the-shelf JSON parser with certain caveats, described in later
sections.
supported by JSON parsers, so a specialized parser may be required that
is able to:
* Preserve the relative ordering of properties defined in an object.
* Preserve multiple definitions of the same property name.
* Preserve numeric values to the precision required by the number type
in [the HCL syntax-agnostic information model](../spec.md).
* Retain source location information for parsed tokens/constructs in order
to produce good error messages.
## Structural Elements
The HCL language-agnostic information model defines a _body_ as an abstract
container for attribute definitions and child blocks. A body is represented
in JSON as a JSON _object_.
[The HCL syntax-agnostic information model](../spec.md) defines a _body_ as an
abstract container for attribute definitions and child blocks. A body is
represented in JSON as either a single JSON object or a JSON array of objects.
As defined in the language-agnostic model, body processing is done in terms
Body processing is in terms of JSON object properties, visited in the order
they appear in the input. Where a body is represented by a single JSON object,
the properties of that object are visited in order. Where a body is
represented by a JSON array, each of its elements are visited in order and
each element has its properties visited in order. If any element of the array
is not a JSON object then the input is erroneous.
When a body is being processed in the _dynamic attributes_ mode, the allowance
of a JSON array in the previous paragraph does not apply and instead a single
JSON object is always required.
As defined in the language-agnostic model, body processing is in terms
of a schema which provides context for interpreting the body's content. For
JSON bodies, the schema is crucial to allow differentiation of attribute
definitions and block definitions, both of which are represented via object
@ -61,14 +78,16 @@ the following provides a definition for that attribute:
### Blocks
Where the given schema describes a block with a given type name, the object
property with the matching name — if present — serves as a definition of
zero or more blocks of that type.
Where the given schema describes a block with a given type name, each object
property with the matching name serves as a definition of zero or more blocks
of that type.
Processing of child blocks is in terms of nested JSON objects and arrays.
If the schema defines one or more _labels_ for the block type, a nested
object is required for each labelling level, with the object keys serving as
the label values at that level.
If the schema defines one or more _labels_ for the block type, a nested JSON
object or JSON array of objects is required for each labelling level. These
are flattened to a single ordered sequence of object properties using the
same algorithm as for body content as defined above. Each object property
serves as a label value at the corresponding level.
After any labelling levels, the next nested value is either a JSON object
representing a single block body, or a JSON array of JSON objects that each
@ -111,7 +130,8 @@ of zero blocks, though generators should prefer to omit the property entirely
in this scenario.
Given a schema that calls for a block type named "foo" with _two_ labels, the
extra label levels must be represented as objects as in the following examples:
extra label levels must be represented as objects or arrays of objects as in
the following examples:
```json
{
@ -132,6 +152,7 @@ extra label levels must be represented as objects as in the following examples:
}
}
```
```json
{
"foo": {
@ -157,10 +178,70 @@ extra label levels must be represented as objects as in the following examples:
}
```
Where multiple definitions are included for the same type and labels, the
JSON array is always the value of the property representing the final label,
and contains objects representing block bodies. It is not valid to use an array
at any other point in the block definition structure.
```json
{
"foo": [
{
"bar": {
"baz": {
"child_attr": "baz"
},
"boz": {
"child_attr": "baz"
}
},
},
{
"bar": {
"baz": [
{
"child_attr": "baz"
},
{
"child_attr": "boz"
}
]
}
}
]
}
```
```json
{
"foo": {
"bar": {
"baz": {
"child_attr": "baz"
},
"boz": {
"child_attr": "baz"
}
},
"bar": {
"baz": [
{
"child_attr": "baz"
},
{
"child_attr": "boz"
}
]
}
}
}
```
Arrays can be introduced at either the label definition or block body
definition levels to define multiple definitions of the same block type
or labels while preserving order.
A JSON HCL parser _must_ support duplicate definitions of the same property
name within a single object, preserving all of them and the relative ordering
between them. The array-based forms are also required so that JSON HCL
configurations can be produced with JSON producing libraries that are not
able to preserve property definition order and multiple definitions of
the same property.
## Expressions
@ -174,17 +255,24 @@ When interpreted as an expression, a JSON object represents a value of a HCL
object type.
Each property of the JSON object represents an attribute of the HCL object type.
The object type is constructed by enumerating the JSON object properties,
creating for each an attribute whose name exactly matches the property name,
and whose type is the result of recursively applying the expression mapping
rules.
The property name string given in the JSON input is interpreted as a string
expression as described below, and its result is converted to string as defined
by the syntax-agnostic information model. If such a conversion is not possible,
an error is produced and evaluation fails.
An instance of the constructed object type is then created, whose values
are interpreted by again recursively applying the mapping rules defined in
this section.
this section to each of the property values.
If any evaluated property name strings produce null values, an error is
produced and evaluation fails. If any produce _unknown_ values, the _entire
object's_ result is an unknown value of the dynamic pseudo-type, signalling
that the type of the object cannot be determined.
It is an error to define the same property name multiple times within a single
JSON object interpreted as an expression.
JSON object interpreted as an expression. In full expression mode, this
constraint applies to the name expression results after conversion to string,
rather than the raw string that may contain interpolation expressions.
### Arrays
@ -205,18 +293,25 @@ section.
When interpreted as an expression, a JSON number represents a HCL number value.
HCL numbers are arbitrary-precision decimal values, so an ideal implementation
of this specification will translate exactly the value given to a number of
corresponding precision.
HCL numbers are arbitrary-precision decimal values, so a JSON HCL parser must
be able to translate exactly the value given to a number of corresponding
precision, within the constraints set by the HCL syntax-agnostic information
model.
In practice, off-the-shelf JSON parsers often do not support customizing the
In practice, off-the-shelf JSON serializers often do not support customizing the
processing of numbers, and instead force processing as 32-bit or 64-bit
floating point values with a potential loss of precision. It is permissable
for a HCL JSON parser to pass on such limitations _if and only if_ the
available precision and other constraints are defined in its documentation.
Calling applications each have differing precision requirements, so calling
applications are free to select an implementation with more limited precision
capabilities should high precision not be required for that application.
floating point values.
A _producer_ of JSON HCL that uses such a serializer can provide numeric values
as JSON strings where they have precision too great for representation in the
serializer's chosen numeric type in situations where the result will be
converted to number (using the standard conversion rules) by a calling
application.
Alternatively, for expressions that are evaluated in full expression mode an
embedded template interpolation can be used to faithfully represent a number,
such as `"${1e150}"`, which will then be evaluated by the underlying HCL native
syntax expression evaluator.
### Boolean Values

View File

@ -6,12 +6,13 @@ import (
"github.com/hashicorp/hcl2/hcl"
"github.com/hashicorp/hcl2/hcl/hclsyntax"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/convert"
)
// body is the implementation of "Body" used for files processed with the JSON
// parser.
type body struct {
obj *objectVal
val node
// If non-nil, the keys of this map cause the corresponding attributes to
// be treated as non-existing. This is used when Body.PartialContent is
@ -43,7 +44,11 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
nameSuggestions = append(nameSuggestions, blockS.Type)
}
for k, attr := range b.obj.Attrs {
jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
diags = append(diags, attrDiags...)
for _, attr := range jsonAttrs {
k := attr.Name
if k == "//" {
// Ignore "//" keys in objects representing bodies, to allow
// their use as comments.
@ -51,16 +56,15 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
}
if _, ok := hiddenAttrs[k]; !ok {
var fixItHint string
suggestion := nameSuggestion(k, nameSuggestions)
if suggestion != "" {
fixItHint = fmt.Sprintf(" Did you mean %q?", suggestion)
suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
}
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Extraneous JSON object property",
Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, fixItHint),
Detail: fmt.Sprintf("No attribute or block type is named %q.%s", k, suggestion),
Subject: &attr.NameRange,
Context: attr.Range().Ptr(),
})
@ -71,16 +75,17 @@ func (b *body) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostic
}
func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) {
var diags hcl.Diagnostics
jsonAttrs, attrDiags := b.collectDeepAttrs(b.val, nil)
diags = append(diags, attrDiags...)
obj := b.obj
jsonAttrs := obj.Attrs
usedNames := map[string]struct{}{}
if b.hiddenAttrs != nil {
for k := range b.hiddenAttrs {
usedNames[k] = struct{}{}
}
}
var diags hcl.Diagnostics
content := &hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{},
@ -89,43 +94,70 @@ func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Bod
MissingItemRange: b.MissingItemRange(),
}
// Create some more convenient data structures for our work below.
attrSchemas := map[string]hcl.AttributeSchema{}
blockSchemas := map[string]hcl.BlockHeaderSchema{}
for _, attrS := range schema.Attributes {
jsonAttr, exists := jsonAttrs[attrS.Name]
_, used := usedNames[attrS.Name]
if used || !exists {
if attrS.Required {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required attribute",
Detail: fmt.Sprintf("The attribute %q is required, so a JSON object property must be present with this name.", attrS.Name),
Subject: &obj.OpenRange,
})
}
continue
}
content.Attributes[attrS.Name] = &hcl.Attribute{
Name: attrS.Name,
Expr: &expression{src: jsonAttr.Value},
Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
NameRange: jsonAttr.NameRange,
}
usedNames[attrS.Name] = struct{}{}
attrSchemas[attrS.Name] = attrS
}
for _, blockS := range schema.Blocks {
blockSchemas[blockS.Type] = blockS
}
for _, blockS := range schema.Blocks {
jsonAttr, exists := jsonAttrs[blockS.Type]
_, used := usedNames[blockS.Type]
if used || !exists {
usedNames[blockS.Type] = struct{}{}
for _, jsonAttr := range jsonAttrs {
attrName := jsonAttr.Name
if _, used := b.hiddenAttrs[attrName]; used {
continue
}
v := jsonAttr.Value
diags = append(diags, b.unpackBlock(v, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)...)
usedNames[blockS.Type] = struct{}{}
if attrS, defined := attrSchemas[attrName]; defined {
if existing, exists := content.Attributes[attrName]; exists {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate attribute definition",
Detail: fmt.Sprintf("The attribute %q was already defined at %s.", attrName, existing.Range),
Subject: &jsonAttr.NameRange,
Context: jsonAttr.Range().Ptr(),
})
continue
}
content.Attributes[attrS.Name] = &hcl.Attribute{
Name: attrS.Name,
Expr: &expression{src: jsonAttr.Value},
Range: hcl.RangeBetween(jsonAttr.NameRange, jsonAttr.Value.Range()),
NameRange: jsonAttr.NameRange,
}
usedNames[attrName] = struct{}{}
} else if blockS, defined := blockSchemas[attrName]; defined {
bv := jsonAttr.Value
blockDiags := b.unpackBlock(bv, blockS.Type, &jsonAttr.NameRange, blockS.LabelNames, nil, nil, &content.Blocks)
diags = append(diags, blockDiags...)
usedNames[attrName] = struct{}{}
}
// We ignore anything that isn't defined because that's the
// PartialContent contract. The Content method will catch leftovers.
}
// Make sure we got all the required attributes.
for _, attrS := range schema.Attributes {
if !attrS.Required {
continue
}
if _, defined := content.Attributes[attrS.Name]; !defined {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing required attribute",
Detail: fmt.Sprintf("The attribute %q is required, but no definition was found.", attrS.Name),
Subject: b.MissingItemRange().Ptr(),
})
}
}
unusedBody := &body{
obj: b.obj,
val: b.val,
hiddenAttrs: usedNames,
}
@ -135,8 +167,22 @@ func (b *body) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Bod
// JustAttributes for JSON bodies interprets all properties of the wrapped
// JSON object as attributes and returns them.
func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
var diags hcl.Diagnostics
attrs := make(map[string]*hcl.Attribute)
for name, jsonAttr := range b.obj.Attrs {
obj, ok := b.val.(*objectVal)
if !ok {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: "A JSON object is required here, defining the attributes for this block.",
Subject: b.val.StartRange().Ptr(),
})
return attrs, diags
}
for _, jsonAttr := range obj.Attrs {
name := jsonAttr.Name
if name == "//" {
// Ignore "//" keys in objects representing bodies, to allow
// their use as comments.
@ -156,27 +202,29 @@ func (b *body) JustAttributes() (hcl.Attributes, hcl.Diagnostics) {
// No diagnostics possible here, since the parser already took care of
// finding duplicates and every JSON value can be a valid attribute value.
return attrs, nil
return attrs, diags
}
func (b *body) MissingItemRange() hcl.Range {
return b.obj.CloseRange
switch tv := b.val.(type) {
case *objectVal:
return tv.CloseRange
case *arrayVal:
return tv.OpenRange
default:
// Should not happen in correct operation, but might show up if the
// input is invalid and we are producing partial results.
return tv.StartRange()
}
}
func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labelsLeft []string, labelsUsed []string, labelRanges []hcl.Range, blocks *hcl.Blocks) (diags hcl.Diagnostics) {
if len(labelsLeft) > 0 {
labelName := labelsLeft[0]
ov, ok := v.(*objectVal)
if !ok {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("A JSON object is required, whose keys represent the %s block's %s.", typeName, labelName),
Subject: v.StartRange().Ptr(),
})
return
}
if len(ov.Attrs) == 0 {
jsonAttrs, attrDiags := b.collectDeepAttrs(v, &labelName)
diags = append(diags, attrDiags...)
if len(jsonAttrs) == 0 {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Missing block label",
@ -187,7 +235,8 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
}
labelsUsed := append(labelsUsed, "")
labelRanges := append(labelRanges, hcl.Range{})
for pk, p := range ov.Attrs {
for _, p := range jsonAttrs {
pk := p.Name
labelsUsed[len(labelsUsed)-1] = pk
labelRanges[len(labelRanges)-1] = p.NameRange
diags = append(diags, b.unpackBlock(p.Value, typeName, typeRange, labelsLeft[1:], labelsUsed, labelRanges, blocks)...)
@ -212,7 +261,7 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
Type: typeName,
Labels: labels,
Body: &body{
obj: tv,
val: tv,
},
DefRange: tv.OpenRange,
@ -222,22 +271,11 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
case *arrayVal:
// Multiple instances of the block
for _, av := range tv.Values {
ov, ok := av.(*objectVal)
if !ok {
diags = diags.Append(&hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("A JSON object is required, representing the contents of a %q block.", typeName),
Subject: v.StartRange().Ptr(),
})
continue
}
*blocks = append(*blocks, &hcl.Block{
Type: typeName,
Labels: labels,
Body: &body{
obj: ov,
val: av, // might be mistyped; we'll find out when content is requested for this body
},
DefRange: tv.OpenRange,
@ -256,6 +294,74 @@ func (b *body) unpackBlock(v node, typeName string, typeRange *hcl.Range, labels
return
}
// collectDeepAttrs takes either a single object or an array of objects and
// flattens it into a list of object attributes, collecting attributes from
// all of the objects in a given array.
//
// Ordering is preserved, so a list of objects that each have one property
// will result in those properties being returned in the same order as the
// objects appeared in the array.
//
// This is appropriate for use only for objects representing bodies or labels
// within a block.
//
// The labelName argument, if non-null, is used to tailor returned error
// messages to refer to block labels rather than attributes and child blocks.
// It has no other effect.
func (b *body) collectDeepAttrs(v node, labelName *string) ([]*objectAttr, hcl.Diagnostics) {
var diags hcl.Diagnostics
var attrs []*objectAttr
switch tv := v.(type) {
case *objectVal:
attrs = append(attrs, tv.Attrs...)
case *arrayVal:
for _, ev := range tv.Values {
switch tev := ev.(type) {
case *objectVal:
attrs = append(attrs, tev.Attrs...)
default:
if labelName != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("A JSON object is required here, to specify %s labels for this block.", *labelName),
Subject: ev.StartRange().Ptr(),
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: "A JSON object is required here, to define attributes and child blocks.",
Subject: ev.StartRange().Ptr(),
})
}
}
}
default:
if labelName != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: fmt.Sprintf("Either a JSON object or JSON array of objects is required here, to specify %s labels for this block.", *labelName),
Subject: v.StartRange().Ptr(),
})
} else {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Incorrect JSON value type",
Detail: "Either a JSON object or JSON array of objects is required here, to define attributes and child blocks.",
Subject: v.StartRange().Ptr(),
})
}
}
return attrs, diags
}
func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
switch v := e.src.(type) {
case *stringVal:
@ -301,12 +407,75 @@ func (e *expression) Value(ctx *hcl.EvalContext) (cty.Value, hcl.Diagnostics) {
}
return cty.TupleVal(vals), nil
case *objectVal:
var diags hcl.Diagnostics
attrs := map[string]cty.Value{}
for name, jsonAttr := range v.Attrs {
val, _ := (&expression{src: jsonAttr.Value}).Value(ctx)
attrs[name] = val
attrRanges := map[string]hcl.Range{}
known := true
for _, jsonAttr := range v.Attrs {
// In this one context we allow keys to contain interpolation
// experessions too, assuming we're evaluating in interpolation
// mode. This achieves parity with the native syntax where
// object expressions can have dynamic keys, while block contents
// may not.
name, nameDiags := (&expression{src: &stringVal{
Value: jsonAttr.Name,
SrcRange: jsonAttr.NameRange,
}}).Value(ctx)
val, valDiags := (&expression{src: jsonAttr.Value}).Value(ctx)
diags = append(diags, nameDiags...)
diags = append(diags, valDiags...)
var err error
name, err = convert.Convert(name, cty.String)
if err != nil {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid object key expression",
Detail: fmt.Sprintf("Cannot use this expression as an object key: %s.", err),
Subject: &jsonAttr.NameRange,
})
continue
}
if name.IsNull() {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Invalid object key expression",
Detail: "Cannot use null value as an object key.",
Subject: &jsonAttr.NameRange,
})
continue
}
if !name.IsKnown() {
// This is a bit of a weird case, since our usual rules require
// us to tolerate unknowns and just represent the result as
// best we can but if we don't know the key then we can't
// know the type of our object at all, and thus we must turn
// the whole thing into cty.DynamicVal. This is consistent with
// how this situation is handled in the native syntax.
// We'll keep iterating so we can collect other errors in
// subsequent attributes.
known = false
continue
}
nameStr := name.AsString()
if _, defined := attrs[nameStr]; defined {
diags = append(diags, &hcl.Diagnostic{
Severity: hcl.DiagError,
Summary: "Duplicate object attribute",
Detail: fmt.Sprintf("An attribute named %q was already defined at %s.", nameStr, attrRanges[nameStr]),
Subject: &jsonAttr.NameRange,
})
continue
}
attrs[nameStr] = val
attrRanges[nameStr] = jsonAttr.NameRange
}
return cty.ObjectVal(attrs), nil
if !known {
// We encountered an unknown key somewhere along the way, so
// we can't know what our type will eventually be.
return cty.DynamicVal, diags
}
return cty.ObjectVal(attrs), diags
default:
// Default to DynamicVal so that ASTs containing invalid nodes can
// still be partially-evaluated.

View File

@ -6,6 +6,7 @@ import (
"testing"
"github.com/davecgh/go-spew/spew"
"github.com/go-test/deep"
"github.com/hashicorp/hcl2/hcl"
)
@ -29,6 +30,45 @@ func TestBodyPartialContent(t *testing.T) {
},
0,
},
{
`[]`,
&hcl.BodySchema{},
&hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{},
MissingItemRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
},
},
0,
},
{
`[{}]`,
&hcl.BodySchema{},
&hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{},
MissingItemRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
},
},
0,
},
{
`[[]]`,
&hcl.BodySchema{},
&hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{},
MissingItemRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
},
},
1, // elements of root array must be objects
},
{
`{"//": "comment that should be ignored"}`,
&hcl.BodySchema{},
@ -42,6 +82,19 @@ func TestBodyPartialContent(t *testing.T) {
},
0,
},
{
`{"//": "comment that should be ignored", "//": "another comment"}`,
&hcl.BodySchema{},
&hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{},
MissingItemRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{Line: 1, Column: 65, Byte: 64},
End: hcl.Pos{Line: 1, Column: 66, Byte: 65},
},
},
0,
},
{
`{"name":"Ermintrude"}`,
&hcl.BodySchema{
@ -109,6 +162,73 @@ func TestBodyPartialContent(t *testing.T) {
},
0,
},
{
`[{"name":"Ermintrude"}]`,
&hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: "name",
},
},
},
&hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{
"name": &hcl.Attribute{
Name: "name",
Expr: &expression{
src: &stringVal{
Value: "Ermintrude",
SrcRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 9,
Line: 1,
Column: 10,
},
End: hcl.Pos{
Byte: 21,
Line: 1,
Column: 22,
},
},
},
},
Range: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 2,
Line: 1,
Column: 3,
},
End: hcl.Pos{
Byte: 21,
Line: 1,
Column: 22,
},
},
NameRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 2,
Line: 1,
Column: 3,
},
End: hcl.Pos{
Byte: 8,
Line: 1,
Column: 9,
},
},
},
},
MissingItemRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
},
},
0,
},
{
`{"name":"Ermintrude"}`,
&hcl.BodySchema{
@ -197,8 +317,8 @@ func TestBodyPartialContent(t *testing.T) {
Type: "resource",
Labels: []string{},
Body: &body{
obj: &objectVal{
Attrs: map[string]*objectAttr{},
val: &objectVal{
Attrs: []*objectAttr{},
SrcRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
@ -294,8 +414,8 @@ func TestBodyPartialContent(t *testing.T) {
Type: "resource",
Labels: []string{},
Body: &body{
obj: &objectVal{
Attrs: map[string]*objectAttr{},
val: &objectVal{
Attrs: []*objectAttr{},
SrcRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
@ -370,8 +490,8 @@ func TestBodyPartialContent(t *testing.T) {
Type: "resource",
Labels: []string{},
Body: &body{
obj: &objectVal{
Attrs: map[string]*objectAttr{},
val: &objectVal{
Attrs: []*objectAttr{},
SrcRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
@ -468,8 +588,8 @@ func TestBodyPartialContent(t *testing.T) {
Type: "resource",
Labels: []string{"foo_instance", "bar"},
Body: &body{
obj: &objectVal{
Attrs: map[string]*objectAttr{},
val: &objectVal{
Attrs: []*objectAttr{},
SrcRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
@ -576,6 +696,234 @@ func TestBodyPartialContent(t *testing.T) {
},
0,
},
{
`{"resource":{"foo_instance":[{"bar":{}}, {"bar":{}}]}}`,
&hcl.BodySchema{
Blocks: []hcl.BlockHeaderSchema{
{
Type: "resource",
LabelNames: []string{"type", "name"},
},
},
},
&hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{},
Blocks: hcl.Blocks{
{
Type: "resource",
Labels: []string{"foo_instance", "bar"},
Body: &body{
val: &objectVal{
Attrs: []*objectAttr{},
SrcRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 36,
Line: 1,
Column: 37,
},
End: hcl.Pos{
Byte: 38,
Line: 1,
Column: 39,
},
},
OpenRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 36,
Line: 1,
Column: 37,
},
End: hcl.Pos{
Byte: 37,
Line: 1,
Column: 38,
},
},
CloseRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 37,
Line: 1,
Column: 38,
},
End: hcl.Pos{
Byte: 38,
Line: 1,
Column: 39,
},
},
},
},
DefRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 36,
Line: 1,
Column: 37,
},
End: hcl.Pos{
Byte: 37,
Line: 1,
Column: 38,
},
},
TypeRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 1,
Line: 1,
Column: 2,
},
End: hcl.Pos{
Byte: 11,
Line: 1,
Column: 12,
},
},
LabelRanges: []hcl.Range{
{
Filename: "test.json",
Start: hcl.Pos{
Byte: 13,
Line: 1,
Column: 14,
},
End: hcl.Pos{
Byte: 27,
Line: 1,
Column: 28,
},
},
{
Filename: "test.json",
Start: hcl.Pos{
Byte: 30,
Line: 1,
Column: 31,
},
End: hcl.Pos{
Byte: 35,
Line: 1,
Column: 36,
},
},
},
},
{
Type: "resource",
Labels: []string{"foo_instance", "bar"},
Body: &body{
val: &objectVal{
Attrs: []*objectAttr{},
SrcRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 36,
Line: 1,
Column: 37,
},
End: hcl.Pos{
Byte: 38,
Line: 1,
Column: 39,
},
},
OpenRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 36,
Line: 1,
Column: 37,
},
End: hcl.Pos{
Byte: 37,
Line: 1,
Column: 38,
},
},
CloseRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 37,
Line: 1,
Column: 38,
},
End: hcl.Pos{
Byte: 38,
Line: 1,
Column: 39,
},
},
},
},
DefRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 48,
Line: 1,
Column: 49,
},
End: hcl.Pos{
Byte: 49,
Line: 1,
Column: 50,
},
},
TypeRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 1,
Line: 1,
Column: 2,
},
End: hcl.Pos{
Byte: 11,
Line: 1,
Column: 12,
},
},
LabelRanges: []hcl.Range{
{
Filename: "test.json",
Start: hcl.Pos{
Byte: 13,
Line: 1,
Column: 14,
},
End: hcl.Pos{
Byte: 27,
Line: 1,
Column: 28,
},
},
{
Filename: "test.json",
Start: hcl.Pos{
Byte: 42,
Line: 1,
Column: 43,
},
End: hcl.Pos{
Byte: 47,
Line: 1,
Column: 48,
},
},
},
},
},
MissingItemRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{Line: 1, Column: 54, Byte: 53},
End: hcl.Pos{Line: 1, Column: 55, Byte: 54},
},
},
0,
},
{
`{"name":"Ermintrude"}`,
&hcl.BodySchema{
@ -593,7 +941,74 @@ func TestBodyPartialContent(t *testing.T) {
End: hcl.Pos{Line: 1, Column: 22, Byte: 21},
},
},
1,
1, // name is supposed to be a block
},
{
`[{"name":"Ermintrude"},{"name":"Ermintrude"}]`,
&hcl.BodySchema{
Attributes: []hcl.AttributeSchema{
{
Name: "name",
},
},
},
&hcl.BodyContent{
Attributes: map[string]*hcl.Attribute{
"name": {
Name: "name",
Expr: &expression{
src: &stringVal{
Value: "Ermintrude",
SrcRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 8,
Line: 1,
Column: 9,
},
End: hcl.Pos{
Byte: 20,
Line: 1,
Column: 21,
},
},
},
},
Range: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 2,
Line: 1,
Column: 3,
},
End: hcl.Pos{
Byte: 21,
Line: 1,
Column: 22,
},
},
NameRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{
Byte: 2,
Line: 1,
Column: 3,
},
End: hcl.Pos{
Byte: 8,
Line: 1,
Column: 9,
},
},
},
},
MissingItemRange: hcl.Range{
Filename: "test.json",
Start: hcl.Pos{Line: 1, Column: 1, Byte: 0},
End: hcl.Pos{Line: 1, Column: 2, Byte: 1},
},
},
1, // "name" attribute is defined twice
},
}
@ -611,8 +1026,8 @@ func TestBodyPartialContent(t *testing.T) {
}
}
if !reflect.DeepEqual(got, test.want) {
t.Errorf("wrong result\ngot: %s\nwant: %s", spew.Sdump(got), spew.Sdump(test.want))
for _, problem := range deep.Equal(got, test.want) {
t.Error(problem)
}
})
}