Merge pull request #9936 from hashicorp/b-hcl2-task-env
Allow expressing `meta` and `env` blocks as map attributes as well. `env` and `meta` should support arbitrary key and values, yet hcl2 restricts the keys to valid identifiers. For example, block attribute identifiers may not contain dots, `.`, which frequently used in meta fields, and sometimes in environment variable fields. By adding attribute syntax support, we maintain backward compatibility and relax the block attribute key restrictions. This change attempts to parse `env`/`meta` both as an attribute and as a block. Additionally, the change allows better expressivity for env/meta blocks, using functions and for expressions. For example, one can reuse a set of environment variables for multiple tasks, using a local `common_envs` value: ```hcl env = merge(local.common_envs, {"more_env_key", "..."}) ``` Note that the map/attribute compatibility workaround is currently a pattern we recommend for driver config blocks: https://www.nomadproject.io/docs/job-specification/hcl2#blocks . :( Sadly, the document isn't accurate, as only `meta` appearing inside driver config was handled in 1.0.1. Closes https://github.com/hashicorp/nomad/issues/9606
This commit is contained in:
commit
4693d8f65c
|
@ -298,7 +298,7 @@ func TestJobGetter_LocalFile_InvalidHCL2(t *testing.T) {
|
|||
{
|
||||
"invalid HCL2",
|
||||
`job "example" {
|
||||
meta = { "a" = "b" }
|
||||
meta { "key.with.dot" = "b" }
|
||||
}`,
|
||||
true,
|
||||
},
|
||||
|
|
|
@ -248,12 +248,19 @@ func decodeConstraint(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.
|
|||
|
||||
func decodeTaskGroup(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
|
||||
tg := val.(*api.TaskGroup)
|
||||
|
||||
var diags hcl.Diagnostics
|
||||
|
||||
metaAttr, body, moreDiags := decodeAsAttribute(body, ctx, "meta")
|
||||
diags = append(diags, moreDiags...)
|
||||
|
||||
tgExtra := struct {
|
||||
Vault *api.Vault `hcl:"vault,block"`
|
||||
}{}
|
||||
|
||||
extra, _ := gohcl.ImpliedBodySchema(tgExtra)
|
||||
content, tgBody, diags := body.PartialContent(extra)
|
||||
content, tgBody, moreDiags := body.PartialContent(extra)
|
||||
diags = append(diags, moreDiags...)
|
||||
if len(diags) != 0 {
|
||||
return diags
|
||||
}
|
||||
|
@ -270,6 +277,10 @@ func decodeTaskGroup(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.D
|
|||
d.RegisterBlockDecoder(reflect.TypeOf(api.Task{}), decodeTask)
|
||||
diags = d.DecodeBody(tgBody, ctx, tg)
|
||||
|
||||
if metaAttr != nil {
|
||||
tg.Meta = metaAttr
|
||||
}
|
||||
|
||||
if tgExtra.Vault != nil {
|
||||
for _, t := range tg.Tasks {
|
||||
if t.Vault == nil {
|
||||
|
@ -291,20 +302,102 @@ func decodeTaskGroup(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.D
|
|||
func decodeTask(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics {
|
||||
// special case scaling policy
|
||||
t := val.(*api.Task)
|
||||
b, remain, diags := body.PartialContent(&hcl.BodySchema{
|
||||
|
||||
var diags hcl.Diagnostics
|
||||
|
||||
// special case env and meta
|
||||
envAttr, body, moreDiags := decodeAsAttribute(body, ctx, "env")
|
||||
diags = append(diags, moreDiags...)
|
||||
metaAttr, body, moreDiags := decodeAsAttribute(body, ctx, "meta")
|
||||
diags = append(diags, moreDiags...)
|
||||
|
||||
b, remain, moreDiags := body.PartialContent(&hcl.BodySchema{
|
||||
Blocks: []hcl.BlockHeaderSchema{
|
||||
{Type: "scaling", LabelNames: []string{"name"}},
|
||||
},
|
||||
})
|
||||
|
||||
diags = append(diags, moreDiags...)
|
||||
diags = append(diags, decodeTaskScalingPolicies(b.Blocks, ctx, t)...)
|
||||
|
||||
decoder := newHCLDecoder()
|
||||
diags = append(diags, decoder.DecodeBody(remain, ctx, val)...)
|
||||
|
||||
if envAttr != nil {
|
||||
t.Env = envAttr
|
||||
}
|
||||
if metaAttr != nil {
|
||||
t.Meta = metaAttr
|
||||
}
|
||||
|
||||
return diags
|
||||
}
|
||||
|
||||
// decodeAsAttribute decodes the named field as an attribute assignment if found.
|
||||
//
|
||||
// Nomad jobs contain attributes (e.g. `env`, `meta`) that are meant to contain arbitrary
|
||||
// keys. HCLv1 allowed both block syntax (the preferred and documented one) as well as attribute
|
||||
// assignment syntax:
|
||||
//
|
||||
// ```hcl
|
||||
// # block assignment
|
||||
// env {
|
||||
// ENV = "production"
|
||||
// }
|
||||
//
|
||||
// # as attribute
|
||||
// env = { ENV: "production" }
|
||||
// ```
|
||||
//
|
||||
// HCLv2 block syntax, though, restricts valid input and doesn't allow dots or invalid identifiers
|
||||
// as block attribute keys.
|
||||
// Thus, we support both syntax to unrestrict users.
|
||||
//
|
||||
// This function attempts to read the named field, as an attribute, and returns
|
||||
// found map, the remaining body and diagnostics. If the named field is found
|
||||
// with block syntax, it returns a nil map, and caller falls back to reading
|
||||
// with block syntax.
|
||||
//
|
||||
func decodeAsAttribute(body hcl.Body, ctx *hcl.EvalContext, name string) (map[string]string, hcl.Body, hcl.Diagnostics) {
|
||||
b, remain, diags := body.PartialContent(&hcl.BodySchema{
|
||||
Attributes: []hcl.AttributeSchema{
|
||||
{Name: name, Required: false},
|
||||
},
|
||||
})
|
||||
|
||||
if diags.HasErrors() || b.Attributes[name] == nil {
|
||||
// ignoring errors, to avoid duplicate errors. True errors will
|
||||
// reported in the fallback path
|
||||
return nil, body, nil
|
||||
}
|
||||
|
||||
attr := b.Attributes[name]
|
||||
|
||||
if attr != nil {
|
||||
// check if there is another block
|
||||
bb, _, _ := remain.PartialContent(&hcl.BodySchema{
|
||||
Blocks: []hcl.BlockHeaderSchema{{Type: name}},
|
||||
})
|
||||
if len(bb.Blocks) != 0 {
|
||||
diags = diags.Append(&hcl.Diagnostic{
|
||||
Severity: hcl.DiagError,
|
||||
Summary: fmt.Sprintf("Duplicate %v block", name),
|
||||
Detail: fmt.Sprintf("%v may not be defined more than once. Another definition is defined at %s.",
|
||||
name, attr.Range.String()),
|
||||
Subject: &bb.Blocks[0].DefRange,
|
||||
})
|
||||
return nil, remain, diags
|
||||
}
|
||||
}
|
||||
|
||||
envExpr := attr.Expr
|
||||
|
||||
result := map[string]string{}
|
||||
diags = append(diags, hclDecoder.DecodeExpression(envExpr, ctx, &result)...)
|
||||
|
||||
return result, remain, diags
|
||||
}
|
||||
|
||||
func decodeTaskScalingPolicies(blocks hcl.Blocks, ctx *hcl.EvalContext, task *api.Task) hcl.Diagnostics {
|
||||
if len(blocks) == 0 {
|
||||
return nil
|
||||
|
|
|
@ -3,6 +3,7 @@ package jobspec2
|
|||
import (
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/hashicorp/nomad/api"
|
||||
|
@ -194,6 +195,7 @@ job "example" {
|
|||
|
||||
require.Equal(t, meta, out.Meta)
|
||||
}
|
||||
|
||||
func TestParse_Locals(t *testing.T) {
|
||||
hcl := `
|
||||
variables {
|
||||
|
@ -629,3 +631,194 @@ job "job-webserver" {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse_TaskEnvs(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
envSnippet string
|
||||
expected map[string]string
|
||||
}{
|
||||
{
|
||||
"none",
|
||||
``,
|
||||
nil,
|
||||
},
|
||||
{
|
||||
"block",
|
||||
`
|
||||
env {
|
||||
key = "value"
|
||||
} `,
|
||||
map[string]string{"key": "value"},
|
||||
},
|
||||
{
|
||||
"attribute",
|
||||
`
|
||||
env = {
|
||||
"key.dot" = "val1"
|
||||
key_unquoted_without_dot = "val2"
|
||||
} `,
|
||||
map[string]string{"key.dot": "val1", "key_unquoted_without_dot": "val2"},
|
||||
},
|
||||
{
|
||||
"attribute_colons",
|
||||
`env = {
|
||||
"key.dot" : "val1"
|
||||
key_unquoted_without_dot : "val2"
|
||||
} `,
|
||||
map[string]string{"key.dot": "val1", "key_unquoted_without_dot": "val2"},
|
||||
},
|
||||
{
|
||||
"attribute_empty",
|
||||
`env = {}`,
|
||||
map[string]string{},
|
||||
},
|
||||
{
|
||||
"attribute_expression",
|
||||
`env = {for k in ["a", "b"]: k => "val-${k}" }`,
|
||||
map[string]string{"a": "val-a", "b": "val-b"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
hcl := `
|
||||
job "example" {
|
||||
group "group" {
|
||||
task "task" {
|
||||
driver = "docker"
|
||||
config {}
|
||||
|
||||
` + c.envSnippet + `
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
out, err := ParseWithConfig(&ParseConfig{
|
||||
Path: "input.hcl",
|
||||
Body: []byte(hcl),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, c.expected, out.TaskGroups[0].Tasks[0].Env)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse_TaskEnvs_Multiple(t *testing.T) {
|
||||
hcl := `
|
||||
job "example" {
|
||||
group "group" {
|
||||
task "task" {
|
||||
driver = "docker"
|
||||
config {}
|
||||
|
||||
env = {"a": "b"}
|
||||
env {
|
||||
c = "d"
|
||||
}
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
_, err := ParseWithConfig(&ParseConfig{
|
||||
Path: "input.hcl",
|
||||
Body: []byte(hcl),
|
||||
})
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), "Duplicate env block")
|
||||
}
|
||||
|
||||
func Test_TaskEnvs_Invalid(t *testing.T) {
|
||||
cases := []struct {
|
||||
name string
|
||||
envSnippet string
|
||||
expectedErr string
|
||||
}{
|
||||
{
|
||||
"attr: invalid expression",
|
||||
`env = { key = local.undefined_local }`,
|
||||
`does not have an attribute named "undefined_local"`,
|
||||
},
|
||||
{
|
||||
"block: invalid block expression",
|
||||
`env {
|
||||
for k in ["a", "b"]: k => k
|
||||
}`,
|
||||
"Invalid block definition",
|
||||
},
|
||||
{
|
||||
"attr: not make sense",
|
||||
`env = [ "a" ]`,
|
||||
"Unsuitable value: map of string required",
|
||||
},
|
||||
}
|
||||
|
||||
for _, c := range cases {
|
||||
t.Run(c.name, func(t *testing.T) {
|
||||
hcl := `
|
||||
job "example" {
|
||||
group "group" {
|
||||
task "task" {
|
||||
driver = "docker"
|
||||
config {}
|
||||
|
||||
` + c.envSnippet + `
|
||||
}
|
||||
}
|
||||
}`
|
||||
_, err := ParseWithConfig(&ParseConfig{
|
||||
Path: "input.hcl",
|
||||
Body: []byte(hcl),
|
||||
})
|
||||
require.Error(t, err)
|
||||
require.Contains(t, err.Error(), c.expectedErr)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse_Meta_Alternatives(t *testing.T) {
|
||||
hcl := ` job "example" {
|
||||
group "group" {
|
||||
task "task" {
|
||||
driver = "config"
|
||||
config {}
|
||||
|
||||
meta {
|
||||
source = "task"
|
||||
}
|
||||
}
|
||||
|
||||
meta {
|
||||
source = "group"
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
meta {
|
||||
source = "job"
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
asBlock, err := ParseWithConfig(&ParseConfig{
|
||||
Path: "input.hcl",
|
||||
Body: []byte(hcl),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
hclAsAttr := strings.ReplaceAll(hcl, "meta {", "meta = {")
|
||||
require.Equal(t, 3, strings.Count(hclAsAttr, "meta = {"))
|
||||
|
||||
asAttr, err := ParseWithConfig(&ParseConfig{
|
||||
Path: "input.hcl",
|
||||
Body: []byte(hclAsAttr),
|
||||
})
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, asBlock, asAttr)
|
||||
require.Equal(t, map[string]string{"source": "job"}, asBlock.Meta)
|
||||
require.Equal(t, map[string]string{"source": "group"}, asBlock.TaskGroups[0].Meta)
|
||||
require.Equal(t, map[string]string{"source": "task"}, asBlock.TaskGroups[0].Tasks[0].Meta)
|
||||
|
||||
}
|
||||
|
|
|
@ -261,6 +261,9 @@ func (c *jobConfig) decodeJob(content *hcl.BodyContent, ctx *hcl.EvalContext) hc
|
|||
|
||||
c.JobID = b.Labels[0]
|
||||
|
||||
metaAttr, body, mdiags := decodeAsAttribute(body, ctx, "meta")
|
||||
diags = append(diags, mdiags...)
|
||||
|
||||
extra, remain, mdiags := body.PartialContent(&hcl.BodySchema{
|
||||
Blocks: []hcl.BlockHeaderSchema{
|
||||
{Type: "vault"},
|
||||
|
@ -271,6 +274,10 @@ func (c *jobConfig) decodeJob(content *hcl.BodyContent, ctx *hcl.EvalContext) hc
|
|||
diags = append(diags, mdiags...)
|
||||
diags = append(diags, c.decodeTopLevelExtras(extra, ctx)...)
|
||||
diags = append(diags, hclDecoder.DecodeBody(remain, ctx, c.Job)...)
|
||||
|
||||
if metaAttr != nil {
|
||||
c.Job.Meta = metaAttr
|
||||
}
|
||||
}
|
||||
|
||||
if found == nil {
|
||||
|
|
Loading…
Reference in New Issue