Updates hashicorp/hcl and hashicorp/hil.

This required a small mod to core Consul code to cope with an interface
change.
This commit is contained in:
James Phillips 2016-08-09 16:53:09 -07:00
parent 8d03a2c948
commit 99ab3390c2
No known key found for this signature in database
GPG key ID: 77183E682AC5FC11
35 changed files with 1301 additions and 232 deletions

View file

@ -167,15 +167,15 @@ func (ct *CompiledTemplate) Render(name string) (*structs.PreparedQuery, error)
return nil
}
hv, ht, err := hil.Eval(tree, config)
res, err := hil.Eval(tree, config)
if err != nil {
return fmt.Errorf("Bad evaluation for '%s' in Service%s: %s", v.String(), path, err)
}
if ht != ast.TypeString {
return fmt.Errorf("Expected Service%s field to be a string, got %s", path, ht)
if res.Type != hil.TypeString {
return fmt.Errorf("Expected Service%s field to be a string, got %s", path, res.Type)
}
v.SetString(hv.(string))
v.SetString(res.Value.(string))
return nil
}
if err := walk(&query.Service, eval); err != nil {

View file

@ -1,7 +0,0 @@
y.output
# ignore intellij files
.idea
*.iml
*.ipr
*.iws

View file

@ -1,3 +0,0 @@
sudo: false
language: go
go: 1.5

View file

@ -29,7 +29,7 @@ and some people wanted machine-friendly languages.
JSON fits a nice balance in this, but is fairly verbose and most
importantly doesn't support comments. With YAML, we found that beginners
had a really hard time determining what the actual structure was, and
ended up guessing more than not whether to use a hyphen, colon, etc.
ended up guessing more often than not whether to use a hyphen, colon, etc.
in order to represent some configuration key.
Full programming languages such as Ruby enable complex behavior
@ -64,6 +64,16 @@ of the syntax and grammar is listed here.
* Strings are double-quoted and can contain any UTF-8 characters.
Example: `"Hello, World"`
* Multi-line strings start with `<<EOF` at the end of a line, and end
with `EOF` on its own line ([here documents](https://en.wikipedia.org/wiki/Here_document)).
Any text may be used in place of `EOF`. Example:
```
<<FOO
hello
world
FOO
```
* Numbers are assumed to be base 10. If you prefix a number with 0x,
it is treated as a hexadecimal. If it is prefixed with 0, it is
treated as an octal. Numbers can be in scientific notation: "1e10".
@ -71,9 +81,20 @@ of the syntax and grammar is listed here.
* Boolean values: `true`, `false`
* Arrays can be made by wrapping it in `[]`. Example:
`["foo", "bar", 42]`. Arrays can contain primitives
and other arrays, but cannot contain objects. Objects must
use the block syntax shown below.
`["foo", "bar", 42]`. Arrays can contain primitives,
other arrays, and objects. As an alternative, lists
of objects can be created with repeated blocks, using
this structure:
```hcl
service {
key = "value"
}
service {
key = "value"
}
```
Objects and nested objects are created using the structure shown below:

16
vendor/github.com/hashicorp/hcl/appveyor.yml generated vendored Normal file
View file

@ -0,0 +1,16 @@
version: "build-{branch}-{build}"
image: Visual Studio 2015
clone_folder: c:\gopath\src\github.com\hashicorp\hcl
environment:
GOPATH: c:\gopath
init:
- git config --global core.autocrlf true
install:
- cmd: >-
echo %Path%
go version
go env
build_script:
- cmd: go test -v ./...

View file

@ -21,6 +21,17 @@ var (
nodeType reflect.Type = findNodeType()
)
// Unmarshal accepts a byte slice as input and writes the
// data to the value pointed to by v.
func Unmarshal(bs []byte, v interface{}) error {
root, err := parse(bs)
if err != nil {
return err
}
return DecodeObject(v, root)
}
// Decode reads the given input and decodes it into the structure
// given by `out`.
func Decode(out interface{}, in string) error {
@ -326,6 +337,14 @@ func (d *decoder) decodeMap(name string, node ast.Node, result reflect.Value) er
continue
}
// github.com/hashicorp/terraform/issue/5740
if len(item.Keys) == 0 {
return &parser.PosError{
Pos: node.Pos(),
Err: fmt.Errorf("%s: map must have string keys", name),
}
}
// Get the key we're dealing with, which is the first item
keyStr := item.Keys[0].Token.Value().(string)
@ -466,6 +485,14 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
node = ot.List
}
// Handle the special case where the object itself is a literal. Previously
// the yacc parser would always ensure top-level elements were arrays. The new
// parser does not make the same guarantees, thus we need to convert any
// top-level literal elements into a list.
if _, ok := node.(*ast.LiteralType); ok {
node = &ast.ObjectList{Items: []*ast.ObjectItem{item}}
}
list, ok := node.(*ast.ObjectList)
if !ok {
return &parser.PosError{
@ -490,6 +517,12 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
structType := structVal.Type()
for i := 0; i < structType.NumField(); i++ {
fieldType := structType.Field(i)
tagParts := strings.Split(fieldType.Tag.Get(tagName), ",")
// Ignore fields with tag name "-"
if tagParts[0] == "-" {
continue
}
if fieldType.Anonymous {
fieldKind := fieldType.Type.Kind()
@ -504,7 +537,6 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
// We have an embedded field. We "squash" the fields down
// if specified in the tag.
squash := false
tagParts := strings.Split(fieldType.Tag.Get(tagName), ",")
for _, tag := range tagParts[1:] {
if tag == "squash" {
squash = true

View file

@ -133,6 +133,12 @@ type ObjectItem struct {
}
func (o *ObjectItem) Pos() token.Pos {
// I'm not entirely sure what causes this, but removing this causes
// a test failure. We should investigate at some point.
if len(o.Keys) == 0 {
return token.Pos{}
}
return o.Keys[0].Pos()
}

View file

@ -5,6 +5,7 @@ package parser
import (
"errors"
"fmt"
"strings"
"github.com/hashicorp/hcl/hcl/ast"
"github.com/hashicorp/hcl/hcl/scanner"
@ -78,6 +79,13 @@ func (p *Parser) objectList() (*ast.ObjectList, error) {
}
node.Add(n)
// object lists can be optionally comma-delimited e.g. when a list of maps
// is being expressed, so a comma is allowed here - it's simply consumed
tok := p.scan()
if tok.Type != token.COMMA {
p.unscan()
}
}
return node, nil
}
@ -122,6 +130,24 @@ func (p *Parser) objectItem() (*ast.ObjectItem, error) {
defer un(trace(p, "ParseObjectItem"))
keys, err := p.objectKey()
if len(keys) > 0 && err == errEofToken {
// We ignore eof token here since it is an error if we didn't
// receive a value (but we did receive a key) for the item.
err = nil
}
if len(keys) > 0 && err != nil && p.tok.Type == token.RBRACE {
// This is a strange boolean statement, but what it means is:
// We have keys with no value, and we're likely in an object
// (since RBrace ends an object). For this, we set err to nil so
// we continue and get the error below of having the wrong value
// type.
err = nil
// Reset the token type so we don't think it completed fine. See
// objectType which uses p.tok.Type to check if we're done with
// the object.
p.tok.Type = token.EOF
}
if err != nil {
return nil, err
}
@ -147,6 +173,15 @@ func (p *Parser) objectItem() (*ast.ObjectItem, error) {
if err != nil {
return nil, err
}
default:
keyStr := make([]string, 0, len(keys))
for _, k := range keys {
keyStr = append(keyStr, k.Token.Text)
}
return nil, fmt.Errorf(
"key '%s' expected start of object ('{') or assignment ('=')",
strings.Join(keyStr, " "))
}
// do a look-ahead for line comment
@ -168,7 +203,11 @@ func (p *Parser) objectKey() ([]*ast.ObjectKey, error) {
tok := p.scan()
switch tok.Type {
case token.EOF:
return nil, errEofToken
// It is very important to also return the keys here as well as
// the error. This is because we need to be able to tell if we
// did parse keys prior to finding the EOF, or if we just found
// a bare EOF.
return keys, errEofToken
case token.ASSIGN:
// assignment or object only, but not nested objects. this is not
// allowed: `foo bar = {}`
@ -188,15 +227,26 @@ func (p *Parser) objectKey() ([]*ast.ObjectKey, error) {
return keys, nil
case token.LBRACE:
var err error
// If we have no keys, then it is a syntax error. i.e. {{}} is not
// allowed.
if len(keys) == 0 {
err = &PosError{
Pos: p.tok.Pos,
Err: fmt.Errorf("expected: IDENT | STRING got: %s", p.tok.Type),
}
}
// object
return keys, nil
return keys, err
case token.IDENT, token.STRING:
keyCount++
keys = append(keys, &ast.ObjectKey{Token: p.tok})
case token.ILLEGAL:
fmt.Println("illegal")
default:
return nil, &PosError{
return keys, &PosError{
Pos: p.tok.Pos,
Err: fmt.Errorf("expected: IDENT | STRING | ASSIGN | LBRACE got: %s", p.tok.Type),
}
@ -246,6 +296,11 @@ func (p *Parser) objectType() (*ast.ObjectType, error) {
return nil, err
}
// If there is no error, we should be at a RBRACE to end the object
if p.tok.Type != token.RBRACE {
return nil, fmt.Errorf("object expected closing RBRACE got: %s", p.tok.Type)
}
o.List = l
o.Rbrace = p.tok.Pos // advanced via parseObjectList
return o, nil
@ -263,15 +318,20 @@ func (p *Parser) listType() (*ast.ListType, error) {
needComma := false
for {
tok := p.scan()
switch tok.Type {
case token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC:
if needComma {
switch tok.Type {
case token.COMMA, token.RBRACK:
default:
return nil, &PosError{
Pos: tok.Pos,
Err: fmt.Errorf("unexpected token: %s. Expecting %s", tok.Type, token.COMMA),
Err: fmt.Errorf(
"error parsing list, expected comma or list end, got: %s",
tok.Type),
}
}
}
switch tok.Type {
case token.NUMBER, token.FLOAT, token.STRING, token.HEREDOC:
node, err := p.literalType()
if err != nil {
return nil, err
@ -283,7 +343,7 @@ func (p *Parser) listType() (*ast.ListType, error) {
// get next list item or we are at the end
// do a look-ahead for line comment
p.scan()
if p.lineComment != nil {
if p.lineComment != nil && len(l.List) > 0 {
lit, ok := l.List[len(l.List)-1].(*ast.LiteralType)
if ok {
lit.LineComment = p.lineComment
@ -295,6 +355,18 @@ func (p *Parser) listType() (*ast.ListType, error) {
needComma = false
continue
case token.LBRACE:
// Looks like a nested object, so parse it out
node, err := p.objectType()
if err != nil {
return nil, &PosError{
Pos: tok.Pos,
Err: fmt.Errorf(
"error while trying to parse object within list: %s", err),
}
}
l.Add(node)
needComma = true
case token.BOOL:
// TODO(arslan) should we support? not supported by HCL yet
case token.LBRACK:

View file

@ -6,6 +6,7 @@ import (
"bytes"
"fmt"
"os"
"regexp"
"unicode"
"unicode/utf8"
@ -376,7 +377,7 @@ func (s *Scanner) scanExponent(ch rune) rune {
return ch
}
// scanHeredoc scans a heredoc string.
// scanHeredoc scans a heredoc string
func (s *Scanner) scanHeredoc() {
// Scan the second '<' in example: '<<EOF'
if s.next() != '<' {
@ -389,6 +390,12 @@ func (s *Scanner) scanHeredoc() {
// Scan the identifier
ch := s.next()
// Indented heredoc syntax
if ch == '-' {
ch = s.next()
}
for isLetter(ch) || isDigit(ch) {
ch = s.next()
}
@ -414,6 +421,17 @@ func (s *Scanner) scanHeredoc() {
// Read the identifier
identBytes := s.src[offs : s.srcPos.Offset-s.lastCharLen]
if len(identBytes) == 0 {
s.err("zero-length heredoc anchor")
return
}
var identRegexp *regexp.Regexp
if identBytes[0] == '-' {
identRegexp = regexp.MustCompile(fmt.Sprintf(`[[:space:]]*%s\z`, identBytes[1:]))
} else {
identRegexp = regexp.MustCompile(fmt.Sprintf(`[[:space:]]*%s\z`, identBytes))
}
// Read the actual string value
lineStart := s.srcPos.Offset
@ -422,12 +440,11 @@ func (s *Scanner) scanHeredoc() {
// Special newline handling.
if ch == '\n' {
// Math is fast, so we first compare the byte counts to
// see if we have a chance of seeing the same identifier. If those
// match, then we compare the string values directly.
// Math is fast, so we first compare the byte counts to see if we have a chance
// of seeing the same identifier - if the length is less than the number of bytes
// in the identifier, this cannot be a valid terminator.
lineBytesLen := s.srcPos.Offset - s.lastCharLen - lineStart
if lineBytesLen == len(identBytes) &&
bytes.Equal(identBytes, s.src[lineStart:s.srcPos.Offset-s.lastCharLen]) {
if lineBytesLen >= len(identBytes) && identRegexp.Match(s.src[lineStart:s.srcPos.Offset-s.lastCharLen]) {
break
}
@ -452,7 +469,7 @@ func (s *Scanner) scanString() {
// read character after quote
ch := s.next()
if ch == '\n' || ch < 0 || ch == eof {
if ch < 0 || ch == eof {
s.err("literal not terminated")
return
}
@ -508,16 +525,27 @@ func (s *Scanner) scanEscape() rune {
// scanDigits scans a rune with the given base for n times. For example an
// octal notation \184 would yield in scanDigits(ch, 8, 3)
func (s *Scanner) scanDigits(ch rune, base, n int) rune {
start := n
for n > 0 && digitVal(ch) < base {
ch = s.next()
if ch == eof {
// If we see an EOF, we halt any more scanning of digits
// immediately.
break
}
n--
}
if n > 0 {
s.err("illegal char escape")
}
// we scanned all digits, put the last non digit char back
if n != start {
// we scanned all digits, put the last non digit char back,
// only if we read anything at all
s.unread()
}
return ch
}

View file

@ -27,9 +27,6 @@ func Unquote(s string) (t string, err error) {
if quote != '"' {
return "", ErrSyntax
}
if contains(s, '\n') {
return "", ErrSyntax
}
// Is it trivial? Avoid allocation.
if !contains(s, '\\') && !contains(s, quote) && !contains(s, '$') {
@ -49,7 +46,7 @@ func Unquote(s string) (t string, err error) {
for len(s) > 0 {
// If we're starting a '${}' then let it through un-unquoted.
// Specifically: we don't unquote any characters within the `${}`
// section, except for escaped quotes, which we handle specifically.
// section, except for escaped backslashes, which we handle specifically.
if s[0] == '$' && len(s) > 1 && s[1] == '{' {
buf = append(buf, '$', '{')
s = s[2:]
@ -64,10 +61,12 @@ func Unquote(s string) (t string, err error) {
s = s[size:]
// We special case escaped double quotes in interpolations, converting
// them to straight double quotes.
// We special case escaped backslashes in interpolations, converting
// them to their unescaped equivalents.
if r == '\\' {
if q, _ := utf8.DecodeRuneInString(s); q == '"' {
q, _ := utf8.DecodeRuneInString(s)
switch q {
case '\\':
continue
}
}

View file

@ -142,13 +142,7 @@ func (t Token) Value() interface{} {
case IDENT:
return t.Text
case HEREDOC:
// We need to find the end of the marker
idx := strings.IndexByte(t.Text, '\n')
if idx == -1 {
panic("heredoc doesn't contain newline")
}
return string(t.Text[idx+1 : len(t.Text)-idx+1])
return unindentHeredoc(t.Text)
case STRING:
// Determine the Unquote method to use. If it came from JSON,
// then we need to use the built-in unquote since we have to
@ -158,6 +152,11 @@ func (t Token) Value() interface{} {
f = strconv.Unquote
}
// This case occurs if json null is used
if t.Text == "" {
return ""
}
v, err := f(t.Text)
if err != nil {
panic(fmt.Sprintf("unquote %s err: %s", t.Text, err))
@ -168,3 +167,53 @@ func (t Token) Value() interface{} {
panic(fmt.Sprintf("unimplemented Value for type: %s", t.Type))
}
}
// unindentHeredoc returns the string content of a HEREDOC if it is started with <<
// and the content of a HEREDOC with the hanging indent removed if it is started with
// a <<-, and the terminating line is at least as indented as the least indented line.
func unindentHeredoc(heredoc string) string {
// We need to find the end of the marker
idx := strings.IndexByte(heredoc, '\n')
if idx == -1 {
panic("heredoc doesn't contain newline")
}
unindent := heredoc[2] == '-'
// We can optimize if the heredoc isn't marked for indentation
if !unindent {
return string(heredoc[idx+1 : len(heredoc)-idx+1])
}
// We need to unindent each line based on the indentation level of the marker
lines := strings.Split(string(heredoc[idx+1:len(heredoc)-idx+2]), "\n")
whitespacePrefix := lines[len(lines)-1]
isIndented := true
for _, v := range lines {
if strings.HasPrefix(v, whitespacePrefix) {
continue
}
isIndented = false
break
}
// If all lines are not at least as indented as the terminating mark, return the
// heredoc as is, but trim the leading space from the marker on the final line.
if !isIndented {
return strings.TrimRight(string(heredoc[idx+1:len(heredoc)-idx+1]), " \t")
}
unindentedLines := make([]string, len(lines))
for k, v := range lines {
if k == len(lines)-1 {
unindentedLines[k] = ""
break
}
unindentedLines[k] = strings.TrimPrefix(v, whitespacePrefix)
}
return strings.Join(unindentedLines, "\n")
}

View file

@ -128,6 +128,12 @@ func (p *Parser) objectKey() ([]*ast.ObjectKey, error) {
Token: p.tok.HCLToken(),
})
case token.COLON:
// If we have a zero keycount it means that we never got
// an object key, i.e. `{ :`. This is a syntax error.
if keyCount == 0 {
return nil, fmt.Errorf("expected: STRING got: %s", p.tok.Type)
}
// Done
return keys, nil
case token.ILLEGAL:

View file

@ -2,6 +2,7 @@ package hcl
import (
"unicode"
"unicode/utf8"
)
type lexModeValue byte
@ -14,17 +15,23 @@ const (
// lexMode returns whether we're going to be parsing in JSON
// mode or HCL mode.
func lexMode(v string) lexModeValue {
for _, r := range v {
func lexMode(v []byte) lexModeValue {
var (
r rune
w int
offset int
)
for {
r, w = utf8.DecodeRune(v[offset:])
offset += w
if unicode.IsSpace(r) {
continue
}
if r == '{' {
return lexModeJson
} else {
return lexModeHcl
}
break
}
return lexModeHcl

View file

@ -8,16 +8,32 @@ import (
jsonParser "github.com/hashicorp/hcl/json/parser"
)
// Parse parses the given input and returns the root object.
// ParseBytes accepts as input byte slice and returns ast tree.
//
// The input format can be either HCL or JSON.
func Parse(input string) (*ast.File, error) {
switch lexMode(input) {
// Input can be either JSON or HCL
func ParseBytes(in []byte) (*ast.File, error) {
return parse(in)
}
// ParseString accepts input as a string and returns ast tree.
func ParseString(input string) (*ast.File, error) {
return parse([]byte(input))
}
func parse(in []byte) (*ast.File, error) {
switch lexMode(in) {
case lexModeHcl:
return hclParser.Parse([]byte(input))
return hclParser.Parse(in)
case lexModeJson:
return jsonParser.Parse([]byte(input))
return jsonParser.Parse(in)
}
return nil, fmt.Errorf("unknown config format")
}
// Parse parses the given input and returns the root object.
//
// The input format can be either HCL or JSON.
func Parse(input string) (*ast.File, error) {
return parse([]byte(input))
}

View file

@ -1,3 +0,0 @@
.DS_Store
.idea
*.iml

View file

@ -1,3 +0,0 @@
sudo: false
language: go
go: 1.5

353
vendor/github.com/hashicorp/hil/LICENSE generated vendored Normal file
View file

@ -0,0 +1,353 @@
Mozilla Public License, version 2.0
1. Definitions
1.1. “Contributor”
means each individual or legal entity that creates, contributes to the
creation of, or owns Covered Software.
1.2. “Contributor Version”
means the combination of the Contributions of others (if any) used by a
Contributor and that particular Contributors Contribution.
1.3. “Contribution”
means Covered Software of a particular Contributor.
1.4. “Covered Software”
means Source Code Form to which the initial Contributor has attached the
notice in Exhibit A, the Executable Form of such Source Code Form, and
Modifications of such Source Code Form, in each case including portions
thereof.
1.5. “Incompatible With Secondary Licenses”
means
a. that the initial Contributor has attached the notice described in
Exhibit B to the Covered Software; or
b. that the Covered Software was made available under the terms of version
1.1 or earlier of the License, but not also under the terms of a
Secondary License.
1.6. “Executable Form”
means any form of the work other than Source Code Form.
1.7. “Larger Work”
means a work that combines Covered Software with other material, in a separate
file or files, that is not Covered Software.
1.8. “License”
means this document.
1.9. “Licensable”
means having the right to grant, to the maximum extent possible, whether at the
time of the initial grant or subsequently, any and all of the rights conveyed by
this License.
1.10. “Modifications”
means any of the following:
a. any file in Source Code Form that results from an addition to, deletion
from, or modification of the contents of Covered Software; or
b. any new file in Source Code Form that contains any Covered Software.
1.11. “Patent Claims” of a Contributor
means any patent claim(s), including without limitation, method, process,
and apparatus claims, in any patent Licensable by such Contributor that
would be infringed, but for the grant of the License, by the making,
using, selling, offering for sale, having made, import, or transfer of
either its Contributions or its Contributor Version.
1.12. “Secondary License”
means either the GNU General Public License, Version 2.0, the GNU Lesser
General Public License, Version 2.1, the GNU Affero General Public
License, Version 3.0, or any later versions of those licenses.
1.13. “Source Code Form”
means the form of the work preferred for making modifications.
1.14. “You” (or “Your”)
means an individual or a legal entity exercising rights under this
License. For legal entities, “You” includes any entity that controls, is
controlled by, or is under common control with You. For purposes of this
definition, “control” means (a) the power, direct or indirect, to cause
the direction or management of such entity, whether by contract or
otherwise, or (b) ownership of more than fifty percent (50%) of the
outstanding shares or beneficial ownership of such entity.
2. License Grants and Conditions
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
a. under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or as
part of a Larger Work; and
b. under Patent Claims of such Contributor to make, use, sell, offer for
sale, have made, import, and otherwise transfer either its Contributions
or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution become
effective for each Contribution on the date the Contributor first distributes
such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under this
License. No additional rights or licenses will be implied from the distribution
or licensing of Covered Software under this License. Notwithstanding Section
2.1(b) above, no patent license is granted by a Contributor:
a. for any code that a Contributor has removed from Covered Software; or
b. for infringements caused by: (i) Your and any other third partys
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
c. under Patent Claims infringed by Covered Software in the absence of its
Contributions.
This License does not grant any rights in the trademarks, service marks, or
logos of any Contributor (except as may be necessary to comply with the
notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this License
(see Section 10.2) or under the terms of a Secondary License (if permitted
under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its Contributions
are its original creation(s) or it has sufficient rights to grant the
rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under applicable
copyright doctrines of fair use, fair dealing, or other equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
Section 2.1.
3. Responsibilities
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under the
terms of this License. You must inform recipients that the Source Code Form
of the Covered Software is governed by the terms of this License, and how
they can obtain a copy of this License. You may not attempt to alter or
restrict the recipients rights in the Source Code Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
a. such Covered Software must also be made available in Source Code Form,
as described in Section 3.1, and You must inform recipients of the
Executable Form how they can obtain a copy of such Source Code Form by
reasonable means in a timely manner, at a charge no more than the cost
of distribution to the recipient; and
b. You may distribute such Executable Form under the terms of this License,
or sublicense it under different terms, provided that the license for
the Executable Form does not attempt to limit or alter the recipients
rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for the
Covered Software. If the Larger Work is a combination of Covered Software
with a work governed by one or more Secondary Licenses, and the Covered
Software is not Incompatible With Secondary Licenses, this License permits
You to additionally distribute such Covered Software under the terms of
such Secondary License(s), so that the recipient of the Larger Work may, at
their option, further distribute the Covered Software under the terms of
either this License or such Secondary License(s).
3.4. Notices
You may not remove or alter the substance of any license notices (including
copyright notices, patent notices, disclaimers of warranty, or limitations
of liability) contained within the Source Code Form of the Covered
Software, except that You may alter any license notices to the extent
required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on behalf
of any Contributor. You must make it absolutely clear that any such
warranty, support, indemnity, or liability obligation is offered by You
alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
If it is impossible for You to comply with any of the terms of this License
with respect to some or all of the Covered Software due to statute, judicial
order, or regulation then You must: (a) comply with the terms of this License
to the maximum extent possible; and (b) describe the limitations and the code
they affect. Such description must be placed in a text file included with all
distributions of the Covered Software under this License. Except to the
extent prohibited by statute or regulation, such description must be
sufficiently detailed for a recipient of ordinary skill to be able to
understand it.
5. Termination
5.1. The rights granted under this License will terminate automatically if You
fail to comply with any of its terms. However, if You become compliant,
then the rights granted under this License from a particular Contributor
are reinstated (a) provisionally, unless and until such Contributor
explicitly and finally terminates Your grants, and (b) on an ongoing basis,
if such Contributor fails to notify You of the non-compliance by some
reasonable means prior to 60 days after You have come back into compliance.
Moreover, Your grants from a particular Contributor are reinstated on an
ongoing basis if such Contributor notifies You of the non-compliance by
some reasonable means, this is the first time You have received notice of
non-compliance with this License from such Contributor, and You become
compliant prior to 30 days after Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions, counter-claims,
and cross-claims) alleging that a Contributor Version directly or
indirectly infringes any patent, then the rights granted to You by any and
all Contributors for the Covered Software under Section 2.1 of this License
shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
license agreements (excluding distributors and resellers) which have been
validly granted by You or Your distributors under this License prior to
termination shall survive termination.
6. Disclaimer of Warranty
Covered Software is provided under this License on an “as is” basis, without
warranty of any kind, either expressed, implied, or statutory, including,
without limitation, warranties that the Covered Software is free of defects,
merchantable, fit for a particular purpose or non-infringing. The entire
risk as to the quality and performance of the Covered Software is with You.
Should any Covered Software prove defective in any respect, You (not any
Contributor) assume the cost of any necessary servicing, repair, or
correction. This disclaimer of warranty constitutes an essential part of this
License. No use of any Covered Software is authorized under this License
except under this disclaimer.
7. Limitation of Liability
Under no circumstances and under no legal theory, whether tort (including
negligence), contract, or otherwise, shall any Contributor, or anyone who
distributes Covered Software as permitted above, be liable to You for any
direct, indirect, special, incidental, or consequential damages of any
character including, without limitation, damages for lost profits, loss of
goodwill, work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses, even if such party shall have been
informed of the possibility of such damages. This limitation of liability
shall not apply to liability for death or personal injury resulting from such
partys negligence to the extent applicable law prohibits such limitation.
Some jurisdictions do not allow the exclusion or limitation of incidental or
consequential damages, so this exclusion and limitation may not apply to You.
8. Litigation
Any litigation relating to this License may be brought only in the courts of
a jurisdiction where the defendant maintains its principal place of business
and such litigation shall be governed by laws of that jurisdiction, without
reference to its conflict-of-law provisions. Nothing in this Section shall
prevent a partys ability to bring cross-claims or counter-claims.
9. Miscellaneous
This License represents the complete agreement concerning the subject matter
hereof. If any provision of this License is held to be unenforceable, such
provision shall be reformed only to the extent necessary to make it
enforceable. Any law or regulation which provides that the language of a
contract shall be construed against the drafter shall not be used to construe
this License against a Contributor.
10. Versions of the License
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version of
the License under which You originally received the Covered Software, or
under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a modified
version of this License if you rename the license and remove any
references to the name of the license steward (except to note that such
modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
This Source Code Form is subject to the
terms of the Mozilla Public License, v.
2.0. If a copy of the MPL was not
distributed with this file, You can
obtain one at
http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular file, then
You may include the notice in a location (such as a LICENSE file in a relevant
directory) where a recipient would be likely to look for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - “Incompatible With Secondary Licenses” Notice
This Source Code Form is “Incompatible
With Secondary Licenses”, as defined by
the Mozilla Public License, v. 2.0.

View file

@ -72,7 +72,7 @@ docs, we'll assume you're within `${}`.
`add(1, var.foo)` or even nested function calls:
`add(1, get("some value"))`.
* Witin strings, further interpolations can be opened with `${}`.
* Within strings, further interpolations can be opened with `${}`.
Example: `"Hello ${nested}"`. A full example including the
original `${}` (remember this list assumes were inside of one
already) could be: `foo ${func("hello ${var.foo}")}`.

18
vendor/github.com/hashicorp/hil/appveyor.yml generated vendored Normal file
View file

@ -0,0 +1,18 @@
version: "build-{branch}-{build}"
image: Visual Studio 2015
clone_folder: c:\gopath\src\github.com\hashicorp\hil
environment:
GOPATH: c:\gopath
init:
- git config --global core.autocrlf true
install:
- cmd: >-
echo %Path%
go version
go env
go get -d -v -t ./...
build_script:
- cmd: go test -v ./...

View file

@ -53,4 +53,26 @@ const (
TypeInt
TypeFloat
TypeList
TypeMap
)
func (t Type) Printable() string {
switch t {
case TypeInvalid:
return "invalid type"
case TypeAny:
return "any type"
case TypeString:
return "type string"
case TypeInt:
return "type int"
case TypeFloat:
return "type float"
case TypeList:
return "type list"
case TypeMap:
return "type map"
default:
return "unknown type"
}
}

View file

@ -1,42 +0,0 @@
package ast
import (
"bytes"
"fmt"
)
// Concat represents a node where the result of two or more expressions are
// concatenated. The result of all expressions must be a string.
type Concat struct {
Exprs []Node
Posx Pos
}
func (n *Concat) Accept(v Visitor) Node {
for i, expr := range n.Exprs {
n.Exprs[i] = expr.Accept(v)
}
return v(n)
}
func (n *Concat) Pos() Pos {
return n.Posx
}
func (n *Concat) GoString() string {
return fmt.Sprintf("*%#v", *n)
}
func (n *Concat) String() string {
var b bytes.Buffer
for _, expr := range n.Exprs {
b.WriteString(fmt.Sprintf("%s", expr))
}
return b.String()
}
func (n *Concat) Type(Scope) (Type, error) {
return TypeString, nil
}

View file

@ -34,33 +34,39 @@ func (n *Index) Type(s Scope) (Type, error) {
if !ok {
return TypeInvalid, fmt.Errorf("unknown variable accessed: %s", variableAccess.Name)
}
if variable.Type != TypeList {
switch variable.Type {
case TypeList:
return n.typeList(variable, variableAccess.Name)
case TypeMap:
return n.typeMap(variable, variableAccess.Name)
default:
return TypeInvalid, fmt.Errorf("invalid index operation into non-indexable type: %s", variable.Type)
}
}
func (n *Index) typeList(variable Variable, variableName string) (Type, error) {
// We assume type checking has already determined that this is a list
list := variable.Value.([]Variable)
// Ensure that the types of the list elements are homogenous
listTypes := make(map[Type]struct{})
for _, v := range list {
if _, ok := listTypes[v.Type]; ok {
continue
}
listTypes[v.Type] = struct{}{}
return VariableListElementTypesAreHomogenous(variableName, list)
}
if len(listTypes) != 1 {
typesFound := make([]string, len(listTypes))
func (n *Index) typeMap(variable Variable, variableName string) (Type, error) {
// We assume type checking has already determined that this is a map
vmap := variable.Value.(map[string]Variable)
return VariableMapValueTypesAreHomogenous(variableName, vmap)
}
func reportTypes(typesFound map[Type]struct{}) string {
stringTypes := make([]string, len(typesFound))
i := 0
for k, _ := range listTypes {
typesFound[0] = k.String()
for k, _ := range typesFound {
stringTypes[0] = k.String()
i++
}
types := strings.Join(typesFound, ", ")
return TypeInvalid, fmt.Errorf("list %q does not have homogenous types. found %s", variableAccess.Name, types)
}
return list[0].Type, nil
return strings.Join(stringTypes, ", ")
}
func (n *Index) GoString() string {

78
vendor/github.com/hashicorp/hil/ast/output.go generated vendored Normal file
View file

@ -0,0 +1,78 @@
package ast
import (
"bytes"
"fmt"
)
// Output represents the root node of all interpolation evaluations. If the
// output only has one expression which is either a TypeList or TypeMap, the
// Output can be type-asserted to []interface{} or map[string]interface{}
// respectively. Otherwise the Output evaluates as a string, and concatenates
// the evaluation of each expression.
type Output struct {
Exprs []Node
Posx Pos
}
func (n *Output) Accept(v Visitor) Node {
for i, expr := range n.Exprs {
n.Exprs[i] = expr.Accept(v)
}
return v(n)
}
func (n *Output) Pos() Pos {
return n.Posx
}
func (n *Output) GoString() string {
return fmt.Sprintf("*%#v", *n)
}
func (n *Output) String() string {
var b bytes.Buffer
for _, expr := range n.Exprs {
b.WriteString(fmt.Sprintf("%s", expr))
}
return b.String()
}
func (n *Output) Type(s Scope) (Type, error) {
// Special case no expressions for backward compatibility
if len(n.Exprs) == 0 {
return TypeString, nil
}
// Special case a single expression of types list or map
if len(n.Exprs) == 1 {
exprType, err := n.Exprs[0].Type(s)
if err != nil {
return TypeInvalid, err
}
switch exprType {
case TypeList:
return TypeList, nil
case TypeMap:
return TypeMap, nil
}
}
// Otherwise ensure all our expressions are strings
for index, expr := range n.Exprs {
exprType, err := expr.Type(s)
if err != nil {
return TypeInvalid, err
}
// We only look for things we know we can't coerce with an implicit conversion func
if exprType == TypeList || exprType == TypeMap {
return TypeInvalid, fmt.Errorf(
"multi-expression HIL outputs may only have string inputs: %d is type %s",
index, exprType)
}
}
return TypeString, nil
}

View file

@ -11,6 +11,7 @@ const (
_Type_name_3 = "TypeInt"
_Type_name_4 = "TypeFloat"
_Type_name_5 = "TypeList"
_Type_name_6 = "TypeMap"
)
var (
@ -20,6 +21,7 @@ var (
_Type_index_3 = [...]uint8{0, 7}
_Type_index_4 = [...]uint8{0, 9}
_Type_index_5 = [...]uint8{0, 8}
_Type_index_6 = [...]uint8{0, 7}
)
func (i Type) String() string {
@ -36,6 +38,8 @@ func (i Type) String() string {
return _Type_name_4
case i == 32:
return _Type_name_5
case i == 64:
return _Type_name_6
default:
return fmt.Sprintf("Type(%d)", i)
}

View file

@ -0,0 +1,45 @@
package ast
import "fmt"
func VariableListElementTypesAreHomogenous(variableName string, list []Variable) (Type, error) {
listTypes := make(map[Type]struct{})
for _, v := range list {
if _, ok := listTypes[v.Type]; ok {
continue
}
listTypes[v.Type] = struct{}{}
}
if len(listTypes) != 1 && len(list) != 0 {
return TypeInvalid, fmt.Errorf("list %q does not have homogenous types. found %s", variableName, reportTypes(listTypes))
}
if len(list) > 0 {
return list[0].Type, nil
}
return TypeInvalid, fmt.Errorf("list %q does not have any elements so cannot determine type.", variableName)
}
func VariableMapValueTypesAreHomogenous(variableName string, vmap map[string]Variable) (Type, error) {
valueTypes := make(map[Type]struct{})
for _, v := range vmap {
if _, ok := valueTypes[v.Type]; ok {
continue
}
valueTypes[v.Type] = struct{}{}
}
if len(valueTypes) != 1 && len(vmap) != 0 {
return TypeInvalid, fmt.Errorf("map %q does not have homogenous value types. found %s", variableName, reportTypes(valueTypes))
}
// For loop here is an easy way to get a single key, we return immediately.
for _, v := range vmap {
return v.Type, nil
}
// This means the map is empty
return TypeInvalid, fmt.Errorf("map %q does not have any elements so cannot determine type.", variableName)
}

View file

@ -22,6 +22,7 @@ func registerBuiltins(scope *ast.BasicScope) *ast.BasicScope {
scope.FuncMap["__builtin_IntToFloat"] = builtinIntToFloat()
scope.FuncMap["__builtin_IntToString"] = builtinIntToString()
scope.FuncMap["__builtin_StringToInt"] = builtinStringToInt()
scope.FuncMap["__builtin_StringToFloat"] = builtinStringToFloat()
// Math operations
scope.FuncMap["__builtin_IntMath"] = builtinIntMath()
@ -142,3 +143,18 @@ func builtinStringToInt() ast.Function {
},
}
}
func builtinStringToFloat() ast.Function {
return ast.Function{
ArgTypes: []ast.Type{ast.TypeString},
ReturnType: ast.TypeFloat,
Callback: func(args []interface{}) (interface{}, error) {
v, err := strconv.ParseFloat(args[0].(string), 64)
if err != nil {
return nil, err
}
return v, nil
},
}
}

View file

@ -35,7 +35,7 @@ func (c *IdentifierCheck) visit(raw ast.Node) ast.Node {
c.visitCall(n)
case *ast.VariableAccess:
c.visitVariableAccess(n)
case *ast.Concat:
case *ast.Output:
// Ignore
case *ast.LiteralNode:
// Ignore

View file

@ -64,8 +64,8 @@ func (v *TypeCheck) visit(raw ast.Node) ast.Node {
case *ast.Index:
tc := &typeCheckIndex{n}
result, err = tc.TypeCheck(v)
case *ast.Concat:
tc := &typeCheckConcat{n}
case *ast.Output:
tc := &typeCheckOutput{n}
result, err = tc.TypeCheck(v)
case *ast.LiteralNode:
tc := &typeCheckLiteral{n}
@ -199,7 +199,7 @@ func (tc *typeCheckCall) TypeCheck(v *TypeCheck) (ast.Node, error) {
return nil, fmt.Errorf(
"%s: argument %d should be %s, got %s",
tc.n.Func, i+1, expected, args[i])
tc.n.Func, i+1, expected.Printable(), args[i].Printable())
}
}
@ -219,7 +219,7 @@ func (tc *typeCheckCall) TypeCheck(v *TypeCheck) (ast.Node, error) {
return nil, fmt.Errorf(
"%s: argument %d should be %s, got %s",
tc.n.Func, realI,
function.VariadicType, t)
function.VariadicType.Printable(), t.Printable())
}
}
}
@ -230,18 +230,30 @@ func (tc *typeCheckCall) TypeCheck(v *TypeCheck) (ast.Node, error) {
return tc.n, nil
}
type typeCheckConcat struct {
n *ast.Concat
type typeCheckOutput struct {
n *ast.Output
}
func (tc *typeCheckConcat) TypeCheck(v *TypeCheck) (ast.Node, error) {
func (tc *typeCheckOutput) TypeCheck(v *TypeCheck) (ast.Node, error) {
n := tc.n
types := make([]ast.Type, len(n.Exprs))
for i, _ := range n.Exprs {
types[len(n.Exprs)-1-i] = v.StackPop()
}
// All concat args must be strings, so validate that
// If there is only one argument and it is a list, we evaluate to a list
if len(types) == 1 && types[0] == ast.TypeList {
v.StackPush(ast.TypeList)
return n, nil
}
// If there is only one argument and it is a map, we evaluate to a map
if len(types) == 1 && types[0] == ast.TypeMap {
v.StackPush(ast.TypeMap)
return n, nil
}
// Otherwise, all concat args must be strings, so validate that
for i, t := range types {
if t != ast.TypeString {
cn := v.ImplicitConversion(t, ast.TypeString, n.Exprs[i])
@ -251,7 +263,7 @@ func (tc *typeCheckConcat) TypeCheck(v *TypeCheck) (ast.Node, error) {
}
return nil, fmt.Errorf(
"argument %d must be a string", i+1)
"output of an HIL expression must be a string, or a single list (argument %d is %s)", i+1, t)
}
}
@ -293,15 +305,6 @@ type typeCheckIndex struct {
}
func (tc *typeCheckIndex) TypeCheck(v *TypeCheck) (ast.Node, error) {
value, err := tc.n.Key.Type(v.Scope)
if err != nil {
return nil, err
}
if value != ast.TypeInt {
return nil, fmt.Errorf("key of an index must be an int, was %s", value)
}
// Ensure we have a VariableAccess as the target
varAccessNode, ok := tc.n.Target.(*ast.VariableAccess)
if !ok {
@ -313,28 +316,40 @@ func (tc *typeCheckIndex) TypeCheck(v *TypeCheck) (ast.Node, error) {
if !ok {
return nil, fmt.Errorf("unknown variable accessed: %s", varAccessNode.Name)
}
if variable.Type != ast.TypeList {
keyType, err := tc.n.Key.Type(v.Scope)
if err != nil {
return nil, err
}
switch variable.Type {
case ast.TypeList:
if keyType != ast.TypeInt {
return nil, fmt.Errorf("key of an index must be an int, was %s", keyType)
}
valType, err := ast.VariableListElementTypesAreHomogenous(varAccessNode.Name, variable.Value.([]ast.Variable))
if err != nil {
return tc.n, err
}
v.StackPush(valType)
return tc.n, nil
case ast.TypeMap:
if keyType != ast.TypeString {
return nil, fmt.Errorf("key of an index must be a string, was %s", keyType)
}
valType, err := ast.VariableMapValueTypesAreHomogenous(varAccessNode.Name, variable.Value.(map[string]ast.Variable))
if err != nil {
return tc.n, err
}
v.StackPush(valType)
return tc.n, nil
default:
return nil, fmt.Errorf("invalid index operation into non-indexable type: %s", variable.Type)
}
list := variable.Value.([]ast.Variable)
// Ensure that the types of the list elements are homogenous
listTypes := make(map[ast.Type]struct{})
for _, v := range list {
if _, ok := listTypes[v.Type]; ok {
continue
}
listTypes[v.Type] = struct{}{}
}
if len(listTypes) != 1 {
return nil, fmt.Errorf("list %q does not have homogenous types (%s)", varAccessNode.Name)
}
// This is the type since the list is homogenous in type
v.StackPush(list[0].Type)
return tc.n, nil
}
func (v *TypeCheck) ImplicitConversion(

148
vendor/github.com/hashicorp/hil/convert.go generated vendored Normal file
View file

@ -0,0 +1,148 @@
package hil
import (
"fmt"
"reflect"
"github.com/hashicorp/hil/ast"
"github.com/mitchellh/mapstructure"
)
var hilMapstructureDecodeHookSlice []interface{}
var hilMapstructureDecodeHookStringSlice []string
var hilMapstructureDecodeHookMap map[string]interface{}
// hilMapstructureWeakDecode behaves in the same way as mapstructure.WeakDecode
// but has a DecodeHook which defeats the backward compatibility mode of mapstructure
// which WeakDecodes []interface{}{} into an empty map[string]interface{}. This
// allows us to use WeakDecode (desirable), but not fail on empty lists.
func hilMapstructureWeakDecode(m interface{}, rawVal interface{}) error {
config := &mapstructure.DecoderConfig{
DecodeHook: func(source reflect.Type, target reflect.Type, val interface{}) (interface{}, error) {
sliceType := reflect.TypeOf(hilMapstructureDecodeHookSlice)
stringSliceType := reflect.TypeOf(hilMapstructureDecodeHookStringSlice)
mapType := reflect.TypeOf(hilMapstructureDecodeHookMap)
if (source == sliceType || source == stringSliceType) && target == mapType {
return nil, fmt.Errorf("Cannot convert %s into a %s", source, target)
}
return val, nil
},
WeaklyTypedInput: true,
Result: rawVal,
}
decoder, err := mapstructure.NewDecoder(config)
if err != nil {
return err
}
return decoder.Decode(m)
}
func InterfaceToVariable(input interface{}) (ast.Variable, error) {
if inputVariable, ok := input.(ast.Variable); ok {
return inputVariable, nil
}
var stringVal string
if err := hilMapstructureWeakDecode(input, &stringVal); err == nil {
return ast.Variable{
Type: ast.TypeString,
Value: stringVal,
}, nil
}
var mapVal map[string]interface{}
if err := hilMapstructureWeakDecode(input, &mapVal); err == nil {
elements := make(map[string]ast.Variable)
for i, element := range mapVal {
varElement, err := InterfaceToVariable(element)
if err != nil {
return ast.Variable{}, err
}
elements[i] = varElement
}
return ast.Variable{
Type: ast.TypeMap,
Value: elements,
}, nil
}
var sliceVal []interface{}
if err := hilMapstructureWeakDecode(input, &sliceVal); err == nil {
elements := make([]ast.Variable, len(sliceVal))
for i, element := range sliceVal {
varElement, err := InterfaceToVariable(element)
if err != nil {
return ast.Variable{}, err
}
elements[i] = varElement
}
return ast.Variable{
Type: ast.TypeList,
Value: elements,
}, nil
}
return ast.Variable{}, fmt.Errorf("value for conversion must be a string, interface{} or map[string]interface: got %T", input)
}
func VariableToInterface(input ast.Variable) (interface{}, error) {
if input.Type == ast.TypeString {
if inputStr, ok := input.Value.(string); ok {
return inputStr, nil
} else {
return nil, fmt.Errorf("ast.Variable with type string has value which is not a string")
}
}
if input.Type == ast.TypeList {
inputList, ok := input.Value.([]ast.Variable)
if !ok {
return nil, fmt.Errorf("ast.Variable with type list has value which is not a []ast.Variable")
}
result := make([]interface{}, 0)
if len(inputList) == 0 {
return result, nil
}
for _, element := range inputList {
if convertedElement, err := VariableToInterface(element); err == nil {
result = append(result, convertedElement)
} else {
return nil, err
}
}
return result, nil
}
if input.Type == ast.TypeMap {
inputMap, ok := input.Value.(map[string]ast.Variable)
if !ok {
return nil, fmt.Errorf("ast.Variable with type map has value which is not a map[string]ast.Variable")
}
result := make(map[string]interface{}, 0)
if len(inputMap) == 0 {
return result, nil
}
for key, value := range inputMap {
if convertedValue, err := VariableToInterface(value); err == nil {
result[key] = convertedValue
} else {
return nil, err
}
}
return result, nil
}
return nil, fmt.Errorf("unknown input type: %s", input.Type)
}

View file

@ -23,9 +23,76 @@ type EvalConfig struct {
// semantic check on an AST tree. This will be called with the root node.
type SemanticChecker func(ast.Node) error
// EvalType represents the type of the output returned from a HIL
// evaluation.
type EvalType uint32
const (
TypeInvalid EvalType = 0
TypeString EvalType = 1 << iota
TypeList
TypeMap
)
//go:generate stringer -type=EvalType
// EvaluationResult is a struct returned from the hil.Eval function,
// representing the result of an interpolation. Results are returned in their
// "natural" Go structure rather than in terms of the HIL AST. For the types
// currently implemented, this means that the Value field can be interpreted as
// the following Go types:
// TypeInvalid: undefined
// TypeString: string
// TypeList: []interface{}
// TypeMap: map[string]interface{}
type EvaluationResult struct {
Type EvalType
Value interface{}
}
// InvalidResult is a structure representing the result of a HIL interpolation
// which has invalid syntax, missing variables, or some other type of error.
// The error is described out of band in the accompanying error return value.
var InvalidResult = EvaluationResult{Type: TypeInvalid, Value: nil}
func Eval(root ast.Node, config *EvalConfig) (EvaluationResult, error) {
output, outputType, err := internalEval(root, config)
if err != nil {
return InvalidResult, err
}
switch outputType {
case ast.TypeList:
val, err := VariableToInterface(ast.Variable{
Type: ast.TypeList,
Value: output,
})
return EvaluationResult{
Type: TypeList,
Value: val,
}, err
case ast.TypeMap:
val, err := VariableToInterface(ast.Variable{
Type: ast.TypeMap,
Value: output,
})
return EvaluationResult{
Type: TypeMap,
Value: val,
}, err
case ast.TypeString:
return EvaluationResult{
Type: TypeString,
Value: output,
}, nil
default:
return InvalidResult, fmt.Errorf("unknown type %s as interpolation output", outputType)
}
}
// Eval evaluates the given AST tree and returns its output value, the type
// of the output, and any error that occurred.
func Eval(root ast.Node, config *EvalConfig) (interface{}, ast.Type, error) {
func internalEval(root ast.Node, config *EvalConfig) (interface{}, ast.Type, error) {
// Copy the scope so we can add our builtins
if config == nil {
config = new(EvalConfig)
@ -42,6 +109,7 @@ func Eval(root ast.Node, config *EvalConfig) (interface{}, ast.Type, error) {
},
ast.TypeString: {
ast.TypeInt: "__builtin_StringToInt",
ast.TypeFloat: "__builtin_StringToFloat",
},
}
@ -144,8 +212,8 @@ func evalNode(raw ast.Node) (EvalNode, error) {
return &evalIndex{n}, nil
case *ast.Call:
return &evalCall{n}, nil
case *ast.Concat:
return &evalConcat{n}, nil
case *ast.Output:
return &evalOutput{n}, nil
case *ast.LiteralNode:
return &evalLiteralNode{n}, nil
case *ast.VariableAccess:
@ -199,16 +267,35 @@ func (v *evalIndex) Eval(scope ast.Scope, stack *ast.Stack) (interface{}, ast.Ty
if err != nil {
return nil, ast.TypeInvalid, err
}
key, keyType, err := evalKey.Eval(scope, stack)
// Last sanity check
if targetType != ast.TypeList {
return nil, ast.TypeInvalid, fmt.Errorf("target for indexing must be ast.TypeList, is %s", targetType)
if err != nil {
return nil, ast.TypeInvalid, err
}
variableName := v.Index.Target.(*ast.VariableAccess).Name
switch targetType {
case ast.TypeList:
if keyType != ast.TypeInt {
return nil, ast.TypeInvalid, fmt.Errorf("key for indexing must be ast.TypeInt, is %s", keyType)
return nil, ast.TypeInvalid, fmt.Errorf("key for indexing list %q must be an int, is %s", variableName, keyType)
}
return v.evalListIndex(variableName, target, key)
case ast.TypeMap:
if keyType != ast.TypeString {
return nil, ast.TypeInvalid, fmt.Errorf("key for indexing map %q must be a string, is %s", variableName, keyType)
}
return v.evalMapIndex(variableName, target, key)
default:
return nil, ast.TypeInvalid, fmt.Errorf("target %q for indexing must be ast.TypeList or ast.TypeMap, is %s", variableName, targetType)
}
}
func (v *evalIndex) evalListIndex(variableName string, target interface{}, key interface{}) (interface{}, ast.Type, error) {
// We assume type checking was already done and we can assume that target
// is a list and key is an int
list, ok := target.([]ast.Variable)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf("cannot cast target to []Variable")
@ -224,7 +311,7 @@ func (v *evalIndex) Eval(scope ast.Scope, stack *ast.Stack) (interface{}, ast.Ty
}
if keyInt < 0 || len(list) < keyInt+1 {
return nil, ast.TypeInvalid, fmt.Errorf("index %d out of range (max %d)", keyInt, len(list))
return nil, ast.TypeInvalid, fmt.Errorf("index %d out of range for list %s (max %d)", keyInt, variableName, len(list))
}
returnVal := list[keyInt].Value
@ -233,9 +320,34 @@ func (v *evalIndex) Eval(scope ast.Scope, stack *ast.Stack) (interface{}, ast.Ty
return returnVal, returnType, nil
}
type evalConcat struct{ *ast.Concat }
func (v *evalIndex) evalMapIndex(variableName string, target interface{}, key interface{}) (interface{}, ast.Type, error) {
// We assume type checking was already done and we can assume that target
// is a map and key is a string
vmap, ok := target.(map[string]ast.Variable)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf("cannot cast target to map[string]Variable")
}
func (v *evalConcat) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
keyString, ok := key.(string)
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf("cannot cast key to string")
}
if len(vmap) == 0 {
return nil, ast.TypeInvalid, fmt.Errorf("map is empty")
}
value, ok := vmap[keyString]
if !ok {
return nil, ast.TypeInvalid, fmt.Errorf("key %q does not exist in map %s", keyString, variableName)
}
return value.Value, value.Type, nil
}
type evalOutput struct{ *ast.Output }
func (v *evalOutput) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type, error) {
// The expressions should all be on the stack in reverse
// order. So pop them off, reverse their order, and concatenate.
nodes := make([]*ast.LiteralNode, 0, len(v.Exprs))
@ -243,6 +355,15 @@ func (v *evalConcat) Eval(s ast.Scope, stack *ast.Stack) (interface{}, ast.Type,
nodes = append(nodes, stack.Pop().(*ast.LiteralNode))
}
// Special case the single list and map
if len(nodes) == 1 && nodes[0].Typex == ast.TypeList {
return nodes[0].Value, ast.TypeList, nil
}
if len(nodes) == 1 && nodes[0].Typex == ast.TypeMap {
return nodes[0].Value, ast.TypeMap, nil
}
// Otherwise concatenate the strings
var buf bytes.Buffer
for i := len(nodes) - 1; i >= 0; i-- {
buf.WriteString(nodes[i].Value.(string))

34
vendor/github.com/hashicorp/hil/evaltype_string.go generated vendored Normal file
View file

@ -0,0 +1,34 @@
// Code generated by "stringer -type=EvalType"; DO NOT EDIT
package hil
import "fmt"
const (
_EvalType_name_0 = "TypeInvalid"
_EvalType_name_1 = "TypeString"
_EvalType_name_2 = "TypeList"
_EvalType_name_3 = "TypeMap"
)
var (
_EvalType_index_0 = [...]uint8{0, 11}
_EvalType_index_1 = [...]uint8{0, 10}
_EvalType_index_2 = [...]uint8{0, 8}
_EvalType_index_3 = [...]uint8{0, 7}
)
func (i EvalType) String() string {
switch {
case i == 0:
return _EvalType_name_0
case i == 2:
return _EvalType_name_1
case i == 4:
return _EvalType_name_2
case i == 8:
return _EvalType_name_3
default:
return fmt.Sprintf("EvalType(%d)", i)
}
}

View file

@ -44,17 +44,17 @@ top:
{
parserResult = $1
// We want to make sure that the top value is always a Concat
// so that the return value is always a string type from an
// We want to make sure that the top value is always an Output
// so that the return value is always a string, list of map from an
// interpolation.
//
// The logic for checking for a LiteralNode is a little annoying
// because functionally the AST is the same, but we do that because
// it makes for an easy literal check later (to check if a string
// has any interpolations).
if _, ok := $1.(*ast.Concat); !ok {
if _, ok := $1.(*ast.Output); !ok {
if n, ok := $1.(*ast.LiteralNode); !ok || n.Typex != ast.TypeString {
parserResult = &ast.Concat{
parserResult = &ast.Output{
Exprs: []ast.Node{$1},
Posx: $1.Pos(),
}
@ -70,13 +70,13 @@ literalModeTop:
| literalModeTop literalModeValue
{
var result []ast.Node
if c, ok := $1.(*ast.Concat); ok {
if c, ok := $1.(*ast.Output); ok {
result = append(c.Exprs, $2)
} else {
result = []ast.Node{$1, $2}
}
$$ = &ast.Concat{
$$ = &ast.Output{
Exprs: result,
Posx: result[0].Pos(),
}

View file

@ -14,7 +14,7 @@ func FixedValueTransform(root ast.Node, Value *ast.LiteralNode) ast.Node {
// We visit the nodes in top-down order
result := root
switch n := result.(type) {
case *ast.Concat:
case *ast.Output:
for i, v := range n.Exprs {
n.Exprs[i] = FixedValueTransform(v, Value)
}

49
vendor/github.com/hashicorp/hil/y.go generated vendored
View file

@ -55,7 +55,7 @@ var parserStatenames = [...]string{}
const parserEofCode = 1
const parserErrCode = 2
const parserMaxDepth = 200
const parserInitialStackSize = 16
//line lang.y:196
@ -157,18 +157,17 @@ type parserParser interface {
}
type parserParserImpl struct {
lookahead func() int
lval parserSymType
stack [parserInitialStackSize]parserSymType
char int
}
func (p *parserParserImpl) Lookahead() int {
return p.lookahead()
return p.char
}
func parserNewParser() parserParser {
p := &parserParserImpl{
lookahead: func() int { return -1 },
}
return p
return &parserParserImpl{}
}
const parserFlag = -1000
@ -296,22 +295,20 @@ func parserParse(parserlex parserLexer) int {
func (parserrcvr *parserParserImpl) Parse(parserlex parserLexer) int {
var parsern int
var parserlval parserSymType
var parserVAL parserSymType
var parserDollar []parserSymType
_ = parserDollar // silence set and not used
parserS := make([]parserSymType, parserMaxDepth)
parserS := parserrcvr.stack[:]
Nerrs := 0 /* number of errors */
Errflag := 0 /* error recovery flag */
parserstate := 0
parserchar := -1
parsertoken := -1 // parserchar translated into internal numbering
parserrcvr.lookahead = func() int { return parserchar }
parserrcvr.char = -1
parsertoken := -1 // parserrcvr.char translated into internal numbering
defer func() {
// Make sure we report no lookahead when not parsing.
parserstate = -1
parserchar = -1
parserrcvr.char = -1
parsertoken = -1
}()
parserp := -1
@ -343,8 +340,8 @@ parsernewstate:
if parsern <= parserFlag {
goto parserdefault /* simple state */
}
if parserchar < 0 {
parserchar, parsertoken = parserlex1(parserlex, &parserlval)
if parserrcvr.char < 0 {
parserrcvr.char, parsertoken = parserlex1(parserlex, &parserrcvr.lval)
}
parsern += parsertoken
if parsern < 0 || parsern >= parserLast {
@ -352,9 +349,9 @@ parsernewstate:
}
parsern = parserAct[parsern]
if parserChk[parsern] == parsertoken { /* valid shift */
parserchar = -1
parserrcvr.char = -1
parsertoken = -1
parserVAL = parserlval
parserVAL = parserrcvr.lval
parserstate = parsern
if Errflag > 0 {
Errflag--
@ -366,8 +363,8 @@ parserdefault:
/* default state action */
parsern = parserDef[parserstate]
if parsern == -2 {
if parserchar < 0 {
parserchar, parsertoken = parserlex1(parserlex, &parserlval)
if parserrcvr.char < 0 {
parserrcvr.char, parsertoken = parserlex1(parserlex, &parserrcvr.lval)
}
/* look through exception table */
@ -430,7 +427,7 @@ parserdefault:
if parsertoken == parserEofCode {
goto ret1
}
parserchar = -1
parserrcvr.char = -1
parsertoken = -1
goto parsernewstate /* try again in the same state */
}
@ -487,17 +484,17 @@ parserdefault:
{
parserResult = parserDollar[1].node
// We want to make sure that the top value is always a Concat
// so that the return value is always a string type from an
// We want to make sure that the top value is always an Output
// so that the return value is always a string, list of map from an
// interpolation.
//
// The logic for checking for a LiteralNode is a little annoying
// because functionally the AST is the same, but we do that because
// it makes for an easy literal check later (to check if a string
// has any interpolations).
if _, ok := parserDollar[1].node.(*ast.Concat); !ok {
if _, ok := parserDollar[1].node.(*ast.Output); !ok {
if n, ok := parserDollar[1].node.(*ast.LiteralNode); !ok || n.Typex != ast.TypeString {
parserResult = &ast.Concat{
parserResult = &ast.Output{
Exprs: []ast.Node{parserDollar[1].node},
Posx: parserDollar[1].node.Pos(),
}
@ -515,13 +512,13 @@ parserdefault:
//line lang.y:71
{
var result []ast.Node
if c, ok := parserDollar[1].node.(*ast.Concat); ok {
if c, ok := parserDollar[1].node.(*ast.Output); ok {
result = append(c.Exprs, parserDollar[2].node)
} else {
result = []ast.Node{parserDollar[1].node, parserDollar[2].node}
}
parserVAL.node = &ast.Concat{
parserVAL.node = &ast.Output{
Exprs: result,
Posx: result[0].Pos(),
}

48
vendor/vendor.json vendored
View file

@ -252,52 +252,70 @@
"revisionTime": "2016-02-07T21:47:19Z"
},
{
"checksumSHA1": "ydHBPi04mEh+Tir+2JkpSIMckcw=",
"path": "github.com/hashicorp/hcl",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "IxyvRpCFeoJBGl2obLKJV7RCGjg=",
"path": "github.com/hashicorp/hcl/hcl/ast",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "l2oQxBsZRwn6eZjf+whXr8c9+8c=",
"path": "github.com/hashicorp/hcl/hcl/parser",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "vjhDQVlgHhdxml1V8/cj0vOe+j8=",
"path": "github.com/hashicorp/hcl/hcl/scanner",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "JlZmnzqdmFFyb1+2afLyR3BOE/8=",
"path": "github.com/hashicorp/hcl/hcl/strconv",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "c6yprzj06ASwCo18TtbbNNBHljA=",
"path": "github.com/hashicorp/hcl/hcl/token",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "jQ45CCc1ed/nlV7bbSnx6z72q1M=",
"path": "github.com/hashicorp/hcl/json/parser",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "S1e0F9ZKSnqgOLfjDTYazRL28tA=",
"path": "github.com/hashicorp/hcl/json/scanner",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "fNlXQCQEnb+B3k5UDL/r15xtSJY=",
"path": "github.com/hashicorp/hcl/json/token",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
},
{
"path": "github.com/hashicorp/hcl/testhelper",
"revision": "578dd9746824a54637686b51a41bad457a56bcef"
"revision": "d8c773c4cba11b11539e3d45f93daeaa5dcf1fa1",
"revisionTime": "2016-07-11T23:17:52Z"
},
{
"checksumSHA1": "kqCMCHy2b+RBMKC+ER+OPqp8C3E=",
"path": "github.com/hashicorp/hil",
"revision": "0457360d54ca4d081a769eaa1617e0462153fd70"
"revision": "1e86c6b523c55d1fa6c6e930ce80b548664c95c2",
"revisionTime": "2016-07-11T23:18:37Z"
},
{
"checksumSHA1": "UICubs001+Q4MsUf9zl2vcMzWQQ=",
"path": "github.com/hashicorp/hil/ast",
"revision": "0457360d54ca4d081a769eaa1617e0462153fd70"
"revision": "1e86c6b523c55d1fa6c6e930ce80b548664c95c2",
"revisionTime": "2016-07-11T23:18:37Z"
},
{
"path": "github.com/hashicorp/logutils",