2017-09-11 23:40:37 +00:00
|
|
|
package hclsyntax
|
2017-06-17 16:05:15 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
|
|
|
"strings"
|
|
|
|
"unicode"
|
|
|
|
|
2020-03-07 01:17:12 +00:00
|
|
|
"github.com/apparentlymart/go-textseg/v12/textseg"
|
2019-09-09 23:08:19 +00:00
|
|
|
"github.com/hashicorp/hcl/v2"
|
2017-06-17 16:05:15 +00:00
|
|
|
"github.com/zclconf/go-cty/cty"
|
|
|
|
)
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
func (p *parser) ParseTemplate() (Expression, hcl.Diagnostics) {
|
2018-12-14 01:22:41 +00:00
|
|
|
return p.parseTemplate(TokenEOF, false)
|
2017-06-17 16:05:15 +00:00
|
|
|
}
|
|
|
|
|
2018-12-14 01:22:41 +00:00
|
|
|
func (p *parser) parseTemplate(end TokenType, flushHeredoc bool) (Expression, hcl.Diagnostics) {
|
|
|
|
exprs, passthru, rng, diags := p.parseTemplateInner(end, flushHeredoc)
|
2017-06-17 16:05:15 +00:00
|
|
|
|
2017-06-18 14:44:57 +00:00
|
|
|
if passthru {
|
|
|
|
if len(exprs) != 1 {
|
|
|
|
panic("passthru set with len(exprs) != 1")
|
|
|
|
}
|
|
|
|
return &TemplateWrapExpr{
|
|
|
|
Wrapped: exprs[0],
|
|
|
|
SrcRange: rng,
|
|
|
|
}, diags
|
|
|
|
}
|
2017-06-17 16:05:15 +00:00
|
|
|
|
2017-06-18 14:44:57 +00:00
|
|
|
return &TemplateExpr{
|
|
|
|
Parts: exprs,
|
2017-06-17 16:05:15 +00:00
|
|
|
SrcRange: rng,
|
|
|
|
}, diags
|
|
|
|
}
|
|
|
|
|
2018-12-14 01:22:41 +00:00
|
|
|
func (p *parser) parseTemplateInner(end TokenType, flushHeredoc bool) ([]Expression, bool, hcl.Range, hcl.Diagnostics) {
|
2017-06-17 16:05:15 +00:00
|
|
|
parts, diags := p.parseTemplateParts(end)
|
2018-12-14 01:22:41 +00:00
|
|
|
if flushHeredoc {
|
|
|
|
flushHeredocTemplateParts(parts) // Trim off leading spaces on lines per the flush heredoc spec
|
|
|
|
}
|
2017-06-17 16:05:15 +00:00
|
|
|
tp := templateParser{
|
|
|
|
Tokens: parts.Tokens,
|
|
|
|
SrcRange: parts.SrcRange,
|
|
|
|
}
|
|
|
|
exprs, exprsDiags := tp.parseRoot()
|
|
|
|
diags = append(diags, exprsDiags...)
|
|
|
|
|
2017-06-18 14:44:57 +00:00
|
|
|
passthru := false
|
2017-06-17 16:05:15 +00:00
|
|
|
if len(parts.Tokens) == 2 { // one real token and one synthetic "end" token
|
|
|
|
if _, isInterp := parts.Tokens[0].(*templateInterpToken); isInterp {
|
2017-06-18 14:44:57 +00:00
|
|
|
passthru = true
|
2017-06-17 16:05:15 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-18 14:44:57 +00:00
|
|
|
return exprs, passthru, parts.SrcRange, diags
|
2017-06-17 16:05:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
type templateParser struct {
|
|
|
|
Tokens []templateToken
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
|
|
|
|
pos int
|
|
|
|
}
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
func (p *templateParser) parseRoot() ([]Expression, hcl.Diagnostics) {
|
2017-06-17 16:05:15 +00:00
|
|
|
var exprs []Expression
|
2017-09-11 23:40:37 +00:00
|
|
|
var diags hcl.Diagnostics
|
2017-06-17 16:05:15 +00:00
|
|
|
|
|
|
|
for {
|
|
|
|
next := p.Peek()
|
|
|
|
if _, isEnd := next.(*templateEndToken); isEnd {
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
expr, exprDiags := p.parseExpr()
|
|
|
|
diags = append(diags, exprDiags...)
|
|
|
|
exprs = append(exprs, expr)
|
|
|
|
}
|
|
|
|
|
|
|
|
return exprs, diags
|
|
|
|
}
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
func (p *templateParser) parseExpr() (Expression, hcl.Diagnostics) {
|
2017-06-17 17:07:59 +00:00
|
|
|
next := p.Peek()
|
2017-06-17 16:05:15 +00:00
|
|
|
switch tok := next.(type) {
|
|
|
|
|
|
|
|
case *templateLiteralToken:
|
2017-06-17 17:07:59 +00:00
|
|
|
p.Read() // eat literal
|
2017-06-17 16:05:15 +00:00
|
|
|
return &LiteralValueExpr{
|
|
|
|
Val: cty.StringVal(tok.Val),
|
|
|
|
SrcRange: tok.SrcRange,
|
|
|
|
}, nil
|
|
|
|
|
|
|
|
case *templateInterpToken:
|
2017-06-17 17:07:59 +00:00
|
|
|
p.Read() // eat interp
|
2017-06-17 16:05:15 +00:00
|
|
|
return tok.Expr, nil
|
|
|
|
|
|
|
|
case *templateIfToken:
|
2017-06-17 17:07:59 +00:00
|
|
|
return p.parseIf()
|
2017-06-17 16:05:15 +00:00
|
|
|
|
|
|
|
case *templateForToken:
|
2017-06-18 14:44:57 +00:00
|
|
|
return p.parseFor()
|
2017-06-17 16:05:15 +00:00
|
|
|
|
|
|
|
case *templateEndToken:
|
2017-06-17 17:07:59 +00:00
|
|
|
p.Read() // eat erroneous token
|
2017-09-11 23:40:37 +00:00
|
|
|
return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{
|
2017-06-17 16:05:15 +00:00
|
|
|
{
|
|
|
|
// This is a particularly unhelpful diagnostic, so callers
|
|
|
|
// should attempt to pre-empt it and produce a more helpful
|
|
|
|
// diagnostic that is context-aware.
|
2017-09-11 23:40:37 +00:00
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 16:05:15 +00:00
|
|
|
Summary: "Unexpected end of template",
|
|
|
|
Detail: "The control directives within this template are unbalanced.",
|
|
|
|
Subject: &tok.SrcRange,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
case *templateEndCtrlToken:
|
2017-06-17 17:07:59 +00:00
|
|
|
p.Read() // eat erroneous token
|
2017-09-11 23:40:37 +00:00
|
|
|
return errPlaceholderExpr(tok.SrcRange), hcl.Diagnostics{
|
2017-06-17 16:05:15 +00:00
|
|
|
{
|
2017-09-11 23:40:37 +00:00
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 16:05:15 +00:00
|
|
|
Summary: fmt.Sprintf("Unexpected %s directive", tok.Name()),
|
|
|
|
Detail: "The control directives within this template are unbalanced.",
|
|
|
|
Subject: &tok.SrcRange,
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
default:
|
|
|
|
// should never happen, because above should be exhaustive
|
|
|
|
panic(fmt.Sprintf("unhandled template token type %T", next))
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
func (p *templateParser) parseIf() (Expression, hcl.Diagnostics) {
|
2017-06-17 17:07:59 +00:00
|
|
|
open := p.Read()
|
|
|
|
openIf, isIf := open.(*templateIfToken)
|
|
|
|
if !isIf {
|
|
|
|
// should never happen if caller is behaving
|
|
|
|
panic("parseIf called with peeker not pointing at if token")
|
|
|
|
}
|
|
|
|
|
|
|
|
var ifExprs, elseExprs []Expression
|
2017-09-11 23:40:37 +00:00
|
|
|
var diags hcl.Diagnostics
|
|
|
|
var endifRange hcl.Range
|
2017-06-17 17:07:59 +00:00
|
|
|
|
|
|
|
currentExprs := &ifExprs
|
|
|
|
Token:
|
|
|
|
for {
|
|
|
|
next := p.Peek()
|
|
|
|
if end, isEnd := next.(*templateEndToken); isEnd {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 17:07:59 +00:00
|
|
|
Summary: "Unexpected end of template",
|
|
|
|
Detail: fmt.Sprintf(
|
|
|
|
"The if directive at %s is missing its corresponding endif directive.",
|
|
|
|
openIf.SrcRange,
|
|
|
|
),
|
|
|
|
Subject: &end.SrcRange,
|
|
|
|
})
|
|
|
|
return errPlaceholderExpr(end.SrcRange), diags
|
|
|
|
}
|
|
|
|
if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd {
|
|
|
|
p.Read() // eat end directive
|
|
|
|
|
|
|
|
switch end.Type {
|
|
|
|
|
|
|
|
case templateElse:
|
|
|
|
if currentExprs == &ifExprs {
|
|
|
|
currentExprs = &elseExprs
|
|
|
|
continue Token
|
|
|
|
}
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 17:07:59 +00:00
|
|
|
Summary: "Unexpected else directive",
|
|
|
|
Detail: fmt.Sprintf(
|
|
|
|
"Already in the else clause for the if started at %s.",
|
|
|
|
openIf.SrcRange,
|
|
|
|
),
|
|
|
|
Subject: &end.SrcRange,
|
|
|
|
})
|
|
|
|
|
|
|
|
case templateEndIf:
|
|
|
|
endifRange = end.SrcRange
|
|
|
|
break Token
|
|
|
|
|
|
|
|
default:
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 17:07:59 +00:00
|
|
|
Summary: fmt.Sprintf("Unexpected %s directive", end.Name()),
|
|
|
|
Detail: fmt.Sprintf(
|
|
|
|
"Expecting an endif directive for the if started at %s.",
|
|
|
|
openIf.SrcRange,
|
|
|
|
),
|
|
|
|
Subject: &end.SrcRange,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return errPlaceholderExpr(end.SrcRange), diags
|
|
|
|
}
|
|
|
|
|
|
|
|
expr, exprDiags := p.parseExpr()
|
|
|
|
diags = append(diags, exprDiags...)
|
|
|
|
*currentExprs = append(*currentExprs, expr)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(ifExprs) == 0 {
|
|
|
|
ifExprs = append(ifExprs, &LiteralValueExpr{
|
|
|
|
Val: cty.StringVal(""),
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.Range{
|
2017-06-17 17:07:59 +00:00
|
|
|
Filename: openIf.SrcRange.Filename,
|
|
|
|
Start: openIf.SrcRange.End,
|
|
|
|
End: openIf.SrcRange.End,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
if len(elseExprs) == 0 {
|
|
|
|
elseExprs = append(elseExprs, &LiteralValueExpr{
|
|
|
|
Val: cty.StringVal(""),
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.Range{
|
2017-06-17 17:07:59 +00:00
|
|
|
Filename: endifRange.Filename,
|
|
|
|
Start: endifRange.Start,
|
|
|
|
End: endifRange.Start,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
trueExpr := &TemplateExpr{
|
|
|
|
Parts: ifExprs,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(ifExprs[0].Range(), ifExprs[len(ifExprs)-1].Range()),
|
2017-06-17 17:07:59 +00:00
|
|
|
}
|
|
|
|
falseExpr := &TemplateExpr{
|
|
|
|
Parts: elseExprs,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(elseExprs[0].Range(), elseExprs[len(elseExprs)-1].Range()),
|
2017-06-17 17:07:59 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return &ConditionalExpr{
|
|
|
|
Condition: openIf.CondExpr,
|
|
|
|
TrueResult: trueExpr,
|
|
|
|
FalseResult: falseExpr,
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(openIf.SrcRange, endifRange),
|
2017-06-17 17:07:59 +00:00
|
|
|
}, diags
|
|
|
|
}
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
func (p *templateParser) parseFor() (Expression, hcl.Diagnostics) {
|
2017-06-18 15:14:55 +00:00
|
|
|
open := p.Read()
|
|
|
|
openFor, isFor := open.(*templateForToken)
|
|
|
|
if !isFor {
|
|
|
|
// should never happen if caller is behaving
|
|
|
|
panic("parseFor called with peeker not pointing at for token")
|
|
|
|
}
|
|
|
|
|
|
|
|
var contentExprs []Expression
|
2017-09-11 23:40:37 +00:00
|
|
|
var diags hcl.Diagnostics
|
|
|
|
var endforRange hcl.Range
|
2017-06-18 15:14:55 +00:00
|
|
|
|
|
|
|
Token:
|
|
|
|
for {
|
|
|
|
next := p.Peek()
|
|
|
|
if end, isEnd := next.(*templateEndToken); isEnd {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-18 15:14:55 +00:00
|
|
|
Summary: "Unexpected end of template",
|
|
|
|
Detail: fmt.Sprintf(
|
|
|
|
"The for directive at %s is missing its corresponding endfor directive.",
|
|
|
|
openFor.SrcRange,
|
|
|
|
),
|
|
|
|
Subject: &end.SrcRange,
|
|
|
|
})
|
|
|
|
return errPlaceholderExpr(end.SrcRange), diags
|
|
|
|
}
|
|
|
|
if end, isCtrlEnd := next.(*templateEndCtrlToken); isCtrlEnd {
|
|
|
|
p.Read() // eat end directive
|
|
|
|
|
|
|
|
switch end.Type {
|
|
|
|
|
|
|
|
case templateElse:
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-18 15:14:55 +00:00
|
|
|
Summary: "Unexpected else directive",
|
|
|
|
Detail: "An else clause is not expected for a for directive.",
|
|
|
|
Subject: &end.SrcRange,
|
|
|
|
})
|
|
|
|
|
|
|
|
case templateEndFor:
|
|
|
|
endforRange = end.SrcRange
|
|
|
|
break Token
|
|
|
|
|
|
|
|
default:
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-18 15:14:55 +00:00
|
|
|
Summary: fmt.Sprintf("Unexpected %s directive", end.Name()),
|
|
|
|
Detail: fmt.Sprintf(
|
|
|
|
"Expecting an endfor directive corresponding to the for directive at %s.",
|
|
|
|
openFor.SrcRange,
|
|
|
|
),
|
|
|
|
Subject: &end.SrcRange,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
return errPlaceholderExpr(end.SrcRange), diags
|
|
|
|
}
|
|
|
|
|
|
|
|
expr, exprDiags := p.parseExpr()
|
|
|
|
diags = append(diags, exprDiags...)
|
|
|
|
contentExprs = append(contentExprs, expr)
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(contentExprs) == 0 {
|
|
|
|
contentExprs = append(contentExprs, &LiteralValueExpr{
|
|
|
|
Val: cty.StringVal(""),
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.Range{
|
2017-06-18 15:14:55 +00:00
|
|
|
Filename: openFor.SrcRange.Filename,
|
|
|
|
Start: openFor.SrcRange.End,
|
|
|
|
End: openFor.SrcRange.End,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
contentExpr := &TemplateExpr{
|
|
|
|
Parts: contentExprs,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(contentExprs[0].Range(), contentExprs[len(contentExprs)-1].Range()),
|
2017-06-18 15:14:55 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
forExpr := &ForExpr{
|
|
|
|
KeyVar: openFor.KeyVar,
|
|
|
|
ValVar: openFor.ValVar,
|
|
|
|
|
|
|
|
CollExpr: openFor.CollExpr,
|
|
|
|
ValExpr: contentExpr,
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(openFor.SrcRange, endforRange),
|
2017-06-18 15:14:55 +00:00
|
|
|
OpenRange: openFor.SrcRange,
|
|
|
|
CloseRange: endforRange,
|
|
|
|
}
|
|
|
|
|
|
|
|
return &TemplateJoinExpr{
|
|
|
|
Tuple: forExpr,
|
|
|
|
}, diags
|
|
|
|
}
|
|
|
|
|
2017-06-17 16:05:15 +00:00
|
|
|
func (p *templateParser) Peek() templateToken {
|
|
|
|
return p.Tokens[p.pos]
|
|
|
|
}
|
|
|
|
|
|
|
|
func (p *templateParser) Read() templateToken {
|
|
|
|
ret := p.Peek()
|
|
|
|
if _, end := ret.(*templateEndToken); !end {
|
|
|
|
p.pos++
|
|
|
|
}
|
|
|
|
return ret
|
|
|
|
}
|
|
|
|
|
|
|
|
// parseTemplateParts produces a flat sequence of "template tokens", which are
|
|
|
|
// either literal values (with any "trimming" already applied), interpolation
|
|
|
|
// sequences, or control flow markers.
|
|
|
|
//
|
|
|
|
// A further pass is required on the result to turn it into an AST.
|
2017-09-11 23:40:37 +00:00
|
|
|
func (p *parser) parseTemplateParts(end TokenType) (*templateParts, hcl.Diagnostics) {
|
2017-06-17 16:05:15 +00:00
|
|
|
var parts []templateToken
|
2017-09-11 23:40:37 +00:00
|
|
|
var diags hcl.Diagnostics
|
2017-06-17 16:05:15 +00:00
|
|
|
|
|
|
|
startRange := p.NextRange()
|
|
|
|
ltrimNext := false
|
|
|
|
nextCanTrimPrev := false
|
2017-09-11 23:40:37 +00:00
|
|
|
var endRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
|
|
|
|
Token:
|
|
|
|
for {
|
|
|
|
next := p.Read()
|
|
|
|
if next.Type == end {
|
|
|
|
// all done!
|
|
|
|
endRange = next.Range
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
ltrim := ltrimNext
|
|
|
|
ltrimNext = false
|
|
|
|
canTrimPrev := nextCanTrimPrev
|
|
|
|
nextCanTrimPrev = false
|
|
|
|
|
|
|
|
switch next.Type {
|
|
|
|
case TokenStringLit, TokenQuotedLit:
|
2019-07-20 15:23:02 +00:00
|
|
|
str, strDiags := ParseStringLiteralToken(next)
|
2017-06-17 16:05:15 +00:00
|
|
|
diags = append(diags, strDiags...)
|
|
|
|
|
|
|
|
if ltrim {
|
|
|
|
str = strings.TrimLeftFunc(str, unicode.IsSpace)
|
|
|
|
}
|
|
|
|
|
|
|
|
parts = append(parts, &templateLiteralToken{
|
|
|
|
Val: str,
|
|
|
|
SrcRange: next.Range,
|
|
|
|
})
|
|
|
|
nextCanTrimPrev = true
|
|
|
|
|
|
|
|
case TokenTemplateInterp:
|
|
|
|
// if the opener is ${~ then we want to eat any trailing whitespace
|
|
|
|
// in the preceding literal token, assuming it is indeed a literal
|
|
|
|
// token.
|
|
|
|
if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 {
|
|
|
|
prevExpr := parts[len(parts)-1]
|
|
|
|
if lexpr, ok := prevExpr.(*templateLiteralToken); ok {
|
|
|
|
lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
p.PushIncludeNewlines(false)
|
|
|
|
expr, exprDiags := p.ParseExpression()
|
|
|
|
diags = append(diags, exprDiags...)
|
|
|
|
close := p.Peek()
|
|
|
|
if close.Type != TokenTemplateSeqEnd {
|
|
|
|
if !p.recovery {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 16:05:15 +00:00
|
|
|
Summary: "Extra characters after interpolation expression",
|
|
|
|
Detail: "Expected a closing brace to end the interpolation expression, but found extra characters.",
|
|
|
|
Subject: &close.Range,
|
2017-09-11 23:40:37 +00:00
|
|
|
Context: hcl.RangeBetween(startRange, close.Range).Ptr(),
|
2017-06-17 16:05:15 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
p.recover(TokenTemplateSeqEnd)
|
|
|
|
} else {
|
|
|
|
p.Read() // eat closing brace
|
|
|
|
|
|
|
|
// If the closer is ~} then we want to eat any leading
|
|
|
|
// whitespace on the next token, if it turns out to be a
|
|
|
|
// literal token.
|
|
|
|
if len(close.Bytes) == 2 && close.Bytes[0] == '~' {
|
|
|
|
ltrimNext = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
p.PopIncludeNewlines()
|
|
|
|
parts = append(parts, &templateInterpToken{
|
|
|
|
Expr: expr,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(next.Range, close.Range),
|
2017-06-17 16:05:15 +00:00
|
|
|
})
|
2017-06-17 17:07:59 +00:00
|
|
|
|
2017-06-17 16:05:15 +00:00
|
|
|
case TokenTemplateControl:
|
2018-01-19 16:11:25 +00:00
|
|
|
// if the opener is %{~ then we want to eat any trailing whitespace
|
2017-06-17 17:07:59 +00:00
|
|
|
// in the preceding literal token, assuming it is indeed a literal
|
|
|
|
// token.
|
|
|
|
if canTrimPrev && len(next.Bytes) == 3 && next.Bytes[2] == '~' && len(parts) > 0 {
|
|
|
|
prevExpr := parts[len(parts)-1]
|
|
|
|
if lexpr, ok := prevExpr.(*templateLiteralToken); ok {
|
|
|
|
lexpr.Val = strings.TrimRightFunc(lexpr.Val, unicode.IsSpace)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
p.PushIncludeNewlines(false)
|
|
|
|
|
|
|
|
kw := p.Peek()
|
|
|
|
if kw.Type != TokenIdent {
|
|
|
|
if !p.recovery {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-18 15:14:55 +00:00
|
|
|
Summary: "Invalid template directive",
|
2018-01-19 16:11:25 +00:00
|
|
|
Detail: "A template directive keyword (\"if\", \"for\", etc) is expected at the beginning of a %{ sequence.",
|
2017-06-17 17:07:59 +00:00
|
|
|
Subject: &kw.Range,
|
2017-09-11 23:40:37 +00:00
|
|
|
Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(),
|
2017-06-17 17:07:59 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
p.recover(TokenTemplateSeqEnd)
|
|
|
|
p.PopIncludeNewlines()
|
|
|
|
continue Token
|
|
|
|
}
|
|
|
|
p.Read() // eat keyword token
|
|
|
|
|
|
|
|
switch {
|
|
|
|
|
|
|
|
case ifKeyword.TokenMatches(kw):
|
|
|
|
condExpr, exprDiags := p.ParseExpression()
|
|
|
|
diags = append(diags, exprDiags...)
|
|
|
|
parts = append(parts, &templateIfToken{
|
|
|
|
CondExpr: condExpr,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
|
2017-06-17 17:07:59 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
case elseKeyword.TokenMatches(kw):
|
|
|
|
parts = append(parts, &templateEndCtrlToken{
|
|
|
|
Type: templateElse,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
|
2017-06-17 17:07:59 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
case endifKeyword.TokenMatches(kw):
|
|
|
|
parts = append(parts, &templateEndCtrlToken{
|
|
|
|
Type: templateEndIf,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
|
2017-06-17 17:07:59 +00:00
|
|
|
})
|
|
|
|
|
2017-06-18 15:14:55 +00:00
|
|
|
case forKeyword.TokenMatches(kw):
|
|
|
|
var keyName, valName string
|
|
|
|
if p.Peek().Type != TokenIdent {
|
|
|
|
if !p.recovery {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-18 15:14:55 +00:00
|
|
|
Summary: "Invalid 'for' directive",
|
|
|
|
Detail: "For directive requires variable name after 'for'.",
|
|
|
|
Subject: p.Peek().Range.Ptr(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
p.recover(TokenTemplateSeqEnd)
|
|
|
|
p.PopIncludeNewlines()
|
|
|
|
continue Token
|
|
|
|
}
|
|
|
|
|
|
|
|
valName = string(p.Read().Bytes)
|
|
|
|
|
|
|
|
if p.Peek().Type == TokenComma {
|
|
|
|
// What we just read was actually the key, then.
|
|
|
|
keyName = valName
|
|
|
|
p.Read() // eat comma
|
|
|
|
|
|
|
|
if p.Peek().Type != TokenIdent {
|
|
|
|
if !p.recovery {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-18 15:14:55 +00:00
|
|
|
Summary: "Invalid 'for' directive",
|
|
|
|
Detail: "For directive requires value variable name after comma.",
|
|
|
|
Subject: p.Peek().Range.Ptr(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
p.recover(TokenTemplateSeqEnd)
|
|
|
|
p.PopIncludeNewlines()
|
|
|
|
continue Token
|
|
|
|
}
|
|
|
|
|
|
|
|
valName = string(p.Read().Bytes)
|
|
|
|
}
|
|
|
|
|
|
|
|
if !inKeyword.TokenMatches(p.Peek()) {
|
|
|
|
if !p.recovery {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-18 15:14:55 +00:00
|
|
|
Summary: "Invalid 'for' directive",
|
|
|
|
Detail: "For directive requires 'in' keyword after names.",
|
|
|
|
Subject: p.Peek().Range.Ptr(),
|
|
|
|
})
|
|
|
|
}
|
|
|
|
p.recover(TokenTemplateSeqEnd)
|
|
|
|
p.PopIncludeNewlines()
|
|
|
|
continue Token
|
|
|
|
}
|
|
|
|
p.Read() // eat 'in' keyword
|
|
|
|
|
|
|
|
collExpr, collDiags := p.ParseExpression()
|
|
|
|
diags = append(diags, collDiags...)
|
|
|
|
parts = append(parts, &templateForToken{
|
|
|
|
KeyVar: keyName,
|
|
|
|
ValVar: valName,
|
|
|
|
CollExpr: collExpr,
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
|
2017-06-18 15:14:55 +00:00
|
|
|
})
|
|
|
|
|
|
|
|
case endforKeyword.TokenMatches(kw):
|
|
|
|
parts = append(parts, &templateEndCtrlToken{
|
|
|
|
Type: templateEndFor,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(next.Range, p.NextRange()),
|
2017-06-18 15:14:55 +00:00
|
|
|
})
|
|
|
|
|
2017-06-17 17:07:59 +00:00
|
|
|
default:
|
|
|
|
if !p.recovery {
|
|
|
|
suggestions := []string{"if", "for", "else", "endif", "endfor"}
|
|
|
|
given := string(kw.Bytes)
|
|
|
|
suggestion := nameSuggestion(given, suggestions)
|
|
|
|
if suggestion != "" {
|
|
|
|
suggestion = fmt.Sprintf(" Did you mean %q?", suggestion)
|
|
|
|
}
|
|
|
|
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 17:07:59 +00:00
|
|
|
Summary: "Invalid template control keyword",
|
|
|
|
Detail: fmt.Sprintf("%q is not a valid template control keyword.%s", given, suggestion),
|
|
|
|
Subject: &kw.Range,
|
2017-09-11 23:40:37 +00:00
|
|
|
Context: hcl.RangeBetween(next.Range, kw.Range).Ptr(),
|
2017-06-17 17:07:59 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
p.recover(TokenTemplateSeqEnd)
|
|
|
|
p.PopIncludeNewlines()
|
|
|
|
continue Token
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
close := p.Peek()
|
|
|
|
if close.Type != TokenTemplateSeqEnd {
|
|
|
|
if !p.recovery {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 17:07:59 +00:00
|
|
|
Summary: fmt.Sprintf("Extra characters in %s marker", kw.Bytes),
|
|
|
|
Detail: "Expected a closing brace to end the sequence, but found extra characters.",
|
|
|
|
Subject: &close.Range,
|
2017-09-11 23:40:37 +00:00
|
|
|
Context: hcl.RangeBetween(startRange, close.Range).Ptr(),
|
2017-06-17 17:07:59 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
p.recover(TokenTemplateSeqEnd)
|
|
|
|
} else {
|
|
|
|
p.Read() // eat closing brace
|
|
|
|
|
|
|
|
// If the closer is ~} then we want to eat any leading
|
|
|
|
// whitespace on the next token, if it turns out to be a
|
|
|
|
// literal token.
|
|
|
|
if len(close.Bytes) == 2 && close.Bytes[0] == '~' {
|
|
|
|
ltrimNext = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
p.PopIncludeNewlines()
|
2017-06-17 16:05:15 +00:00
|
|
|
|
|
|
|
default:
|
|
|
|
if !p.recovery {
|
2017-09-11 23:40:37 +00:00
|
|
|
diags = append(diags, &hcl.Diagnostic{
|
|
|
|
Severity: hcl.DiagError,
|
2017-06-17 16:05:15 +00:00
|
|
|
Summary: "Unterminated template string",
|
|
|
|
Detail: "No closing marker was found for the string.",
|
|
|
|
Subject: &next.Range,
|
2017-09-11 23:40:37 +00:00
|
|
|
Context: hcl.RangeBetween(startRange, next.Range).Ptr(),
|
2017-06-17 16:05:15 +00:00
|
|
|
})
|
|
|
|
}
|
|
|
|
final := p.recover(end)
|
|
|
|
endRange = final.Range
|
|
|
|
break Token
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(parts) == 0 {
|
|
|
|
// If a sequence has no content, we'll treat it as if it had an
|
|
|
|
// empty string in it because that's what the user probably means
|
|
|
|
// if they write "" in configuration.
|
|
|
|
parts = append(parts, &templateLiteralToken{
|
|
|
|
Val: "",
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.Range{
|
2017-06-17 16:05:15 +00:00
|
|
|
// Range is the zero-character span immediately after the
|
|
|
|
// opening quote.
|
|
|
|
Filename: startRange.Filename,
|
|
|
|
Start: startRange.End,
|
|
|
|
End: startRange.End,
|
|
|
|
},
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
|
|
|
// Always end with an end token, so the parser can produce diagnostics
|
|
|
|
// about unclosed items with proper position information.
|
|
|
|
parts = append(parts, &templateEndToken{
|
|
|
|
SrcRange: endRange,
|
|
|
|
})
|
|
|
|
|
|
|
|
ret := &templateParts{
|
|
|
|
Tokens: parts,
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange: hcl.RangeBetween(startRange, endRange),
|
2017-06-17 16:05:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
return ret, diags
|
|
|
|
}
|
|
|
|
|
2018-12-14 01:22:41 +00:00
|
|
|
// flushHeredocTemplateParts modifies in-place the line-leading literal strings
|
|
|
|
// to apply the flush heredoc processing rule: find the line with the smallest
|
|
|
|
// number of whitespace characters as prefix and then trim that number of
|
|
|
|
// characters from all of the lines.
|
|
|
|
//
|
|
|
|
// This rule is applied to static tokens rather than to the rendered result,
|
|
|
|
// so interpolating a string with leading whitespace cannot affect the chosen
|
|
|
|
// prefix length.
|
|
|
|
func flushHeredocTemplateParts(parts *templateParts) {
|
|
|
|
if len(parts.Tokens) == 0 {
|
|
|
|
// Nothing to do
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
const maxInt = int((^uint(0)) >> 1)
|
|
|
|
|
|
|
|
minSpaces := maxInt
|
|
|
|
newline := true
|
|
|
|
var adjust []*templateLiteralToken
|
|
|
|
for _, ttok := range parts.Tokens {
|
|
|
|
if newline {
|
|
|
|
newline = false
|
|
|
|
var spaces int
|
|
|
|
if lit, ok := ttok.(*templateLiteralToken); ok {
|
|
|
|
orig := lit.Val
|
|
|
|
trimmed := strings.TrimLeftFunc(orig, unicode.IsSpace)
|
|
|
|
// If a token is entirely spaces and ends with a newline
|
|
|
|
// then it's a "blank line" and thus not considered for
|
|
|
|
// space-prefix-counting purposes.
|
|
|
|
if len(trimmed) == 0 && strings.HasSuffix(orig, "\n") {
|
|
|
|
spaces = maxInt
|
|
|
|
} else {
|
|
|
|
spaceBytes := len(lit.Val) - len(trimmed)
|
|
|
|
spaces, _ = textseg.TokenCount([]byte(orig[:spaceBytes]), textseg.ScanGraphemeClusters)
|
|
|
|
adjust = append(adjust, lit)
|
|
|
|
}
|
|
|
|
} else if _, ok := ttok.(*templateEndToken); ok {
|
|
|
|
break // don't process the end token since it never has spaces before it
|
|
|
|
}
|
|
|
|
if spaces < minSpaces {
|
|
|
|
minSpaces = spaces
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if lit, ok := ttok.(*templateLiteralToken); ok {
|
|
|
|
if strings.HasSuffix(lit.Val, "\n") {
|
|
|
|
newline = true // The following token, if any, begins a new line
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, lit := range adjust {
|
|
|
|
// Since we want to count space _characters_ rather than space _bytes_,
|
|
|
|
// we can't just do a straightforward slice operation here and instead
|
|
|
|
// need to hunt for the split point with a scanner.
|
|
|
|
valBytes := []byte(lit.Val)
|
|
|
|
spaceByteCount := 0
|
|
|
|
for i := 0; i < minSpaces; i++ {
|
|
|
|
adv, _, _ := textseg.ScanGraphemeClusters(valBytes, true)
|
|
|
|
spaceByteCount += adv
|
|
|
|
valBytes = valBytes[adv:]
|
|
|
|
}
|
|
|
|
lit.Val = lit.Val[spaceByteCount:]
|
|
|
|
lit.SrcRange.Start.Column += minSpaces
|
|
|
|
lit.SrcRange.Start.Byte += spaceByteCount
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-06-17 16:05:15 +00:00
|
|
|
type templateParts struct {
|
|
|
|
Tokens []templateToken
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
// templateToken is a higher-level token that represents a single atom within
|
|
|
|
// the template language. Our template parsing first raises the raw token
|
|
|
|
// stream to a sequence of templateToken, and then transforms the result into
|
|
|
|
// an expression tree.
|
|
|
|
type templateToken interface {
|
|
|
|
templateToken() templateToken
|
|
|
|
}
|
|
|
|
|
|
|
|
type templateLiteralToken struct {
|
|
|
|
Val string
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
isTemplateToken
|
|
|
|
}
|
|
|
|
|
|
|
|
type templateInterpToken struct {
|
|
|
|
Expr Expression
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
isTemplateToken
|
|
|
|
}
|
|
|
|
|
|
|
|
type templateIfToken struct {
|
|
|
|
CondExpr Expression
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
isTemplateToken
|
|
|
|
}
|
|
|
|
|
|
|
|
type templateForToken struct {
|
|
|
|
KeyVar string // empty if ignoring key
|
|
|
|
ValVar string
|
|
|
|
CollExpr Expression
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
isTemplateToken
|
|
|
|
}
|
|
|
|
|
|
|
|
type templateEndCtrlType int
|
|
|
|
|
|
|
|
const (
|
|
|
|
templateEndIf templateEndCtrlType = iota
|
|
|
|
templateElse
|
|
|
|
templateEndFor
|
|
|
|
)
|
|
|
|
|
|
|
|
type templateEndCtrlToken struct {
|
|
|
|
Type templateEndCtrlType
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
isTemplateToken
|
|
|
|
}
|
|
|
|
|
|
|
|
func (t *templateEndCtrlToken) Name() string {
|
|
|
|
switch t.Type {
|
|
|
|
case templateEndIf:
|
|
|
|
return "endif"
|
|
|
|
case templateElse:
|
|
|
|
return "else"
|
|
|
|
case templateEndFor:
|
|
|
|
return "endfor"
|
|
|
|
default:
|
|
|
|
// should never happen
|
|
|
|
panic("invalid templateEndCtrlType")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
type templateEndToken struct {
|
2017-09-11 23:40:37 +00:00
|
|
|
SrcRange hcl.Range
|
2017-06-17 16:05:15 +00:00
|
|
|
isTemplateToken
|
|
|
|
}
|
|
|
|
|
|
|
|
type isTemplateToken [0]int
|
|
|
|
|
|
|
|
func (t isTemplateToken) templateToken() templateToken {
|
|
|
|
return t
|
|
|
|
}
|