open-nomad/client/taskenv/env_test.go
Mahmood Ali 4e5d867644 client: stop using alloc.TaskResources
Now that alloc.Canonicalize() is called in all alloc sources in the
client (i.e. on state restore and RPC fetching), we no longer need to
check alloc.TaskResources.

alloc.AllocatedResources is always non-nil through alloc runner.
Though, early on, we check for alloc validity, so NewTaskRunner and
TaskEnv must still check.  `TestClient_AddAllocError` test validates
that behavior.
2020-01-09 09:25:07 -05:00

854 lines
25 KiB
Go

package taskenv
import (
"fmt"
"os"
"reflect"
"sort"
"strings"
"testing"
"github.com/hashicorp/hcl2/gohcl"
"github.com/hashicorp/hcl2/hcl"
"github.com/hashicorp/hcl2/hcl/hclsyntax"
"github.com/hashicorp/nomad/nomad/mock"
"github.com/hashicorp/nomad/nomad/structs"
"github.com/hashicorp/nomad/plugins/drivers"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
const (
// Node values that tests can rely on
metaKey = "instance"
metaVal = "t2-micro"
attrKey = "arch"
attrVal = "amd64"
nodeName = "test node"
nodeClass = "test class"
// Environment variable values that tests can rely on
envOneKey = "NOMAD_IP"
envOneVal = "127.0.0.1"
envTwoKey = "NOMAD_PORT_WEB"
envTwoVal = ":80"
)
var (
// portMap for use in tests as its set after Builder creation
portMap = map[string]int{
"https": 443,
}
)
func testEnvBuilder() *Builder {
n := mock.Node()
n.Attributes = map[string]string{
attrKey: attrVal,
}
n.Meta = map[string]string{
metaKey: metaVal,
}
n.Name = nodeName
n.NodeClass = nodeClass
task := mock.Job().TaskGroups[0].Tasks[0]
task.Env = map[string]string{
envOneKey: envOneVal,
envTwoKey: envTwoVal,
}
return NewBuilder(n, mock.Alloc(), task, "global")
}
func TestEnvironment_ParseAndReplace_Env(t *testing.T) {
env := testEnvBuilder()
input := []string{fmt.Sprintf(`"${%v}"!`, envOneKey), fmt.Sprintf("${%s}${%s}", envOneKey, envTwoKey)}
act := env.Build().ParseAndReplace(input)
exp := []string{fmt.Sprintf(`"%s"!`, envOneVal), fmt.Sprintf("%s%s", envOneVal, envTwoVal)}
if !reflect.DeepEqual(act, exp) {
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
}
}
func TestEnvironment_ParseAndReplace_Meta(t *testing.T) {
input := []string{fmt.Sprintf("${%v%v}", nodeMetaPrefix, metaKey)}
exp := []string{metaVal}
env := testEnvBuilder()
act := env.Build().ParseAndReplace(input)
if !reflect.DeepEqual(act, exp) {
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
}
}
func TestEnvironment_ParseAndReplace_Attr(t *testing.T) {
input := []string{fmt.Sprintf("${%v%v}", nodeAttributePrefix, attrKey)}
exp := []string{attrVal}
env := testEnvBuilder()
act := env.Build().ParseAndReplace(input)
if !reflect.DeepEqual(act, exp) {
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
}
}
func TestEnvironment_ParseAndReplace_Node(t *testing.T) {
input := []string{fmt.Sprintf("${%v}", nodeNameKey), fmt.Sprintf("${%v}", nodeClassKey)}
exp := []string{nodeName, nodeClass}
env := testEnvBuilder()
act := env.Build().ParseAndReplace(input)
if !reflect.DeepEqual(act, exp) {
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
}
}
func TestEnvironment_ParseAndReplace_Mixed(t *testing.T) {
input := []string{
fmt.Sprintf("${%v}${%v%v}", nodeNameKey, nodeAttributePrefix, attrKey),
fmt.Sprintf("${%v}${%v%v}", nodeClassKey, nodeMetaPrefix, metaKey),
fmt.Sprintf("${%v}${%v}", envTwoKey, nodeClassKey),
}
exp := []string{
fmt.Sprintf("%v%v", nodeName, attrVal),
fmt.Sprintf("%v%v", nodeClass, metaVal),
fmt.Sprintf("%v%v", envTwoVal, nodeClass),
}
env := testEnvBuilder()
act := env.Build().ParseAndReplace(input)
if !reflect.DeepEqual(act, exp) {
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
}
}
func TestEnvironment_ReplaceEnv_Mixed(t *testing.T) {
input := fmt.Sprintf("${%v}${%v%v}", nodeNameKey, nodeAttributePrefix, attrKey)
exp := fmt.Sprintf("%v%v", nodeName, attrVal)
env := testEnvBuilder()
act := env.Build().ReplaceEnv(input)
if act != exp {
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
}
}
func TestEnvironment_AsList(t *testing.T) {
n := mock.Node()
n.Meta = map[string]string{
"metaKey": "metaVal",
}
a := mock.Alloc()
a.AllocatedResources.Tasks["web"].Networks[0] = &structs.NetworkResource{
Device: "eth0",
IP: "127.0.0.1",
ReservedPorts: []structs.Port{{Label: "https", Value: 8080}},
MBits: 50,
DynamicPorts: []structs.Port{{Label: "http", Value: 80}},
}
a.AllocatedResources.Tasks["ssh"] = &structs.AllocatedTaskResources{
Networks: []*structs.NetworkResource{
{
Device: "eth0",
IP: "192.168.0.100",
MBits: 50,
ReservedPorts: []structs.Port{
{Label: "ssh", Value: 22},
{Label: "other", Value: 1234},
},
},
},
}
a.Namespace = "not-default"
task := a.Job.TaskGroups[0].Tasks[0]
task.Env = map[string]string{
"taskEnvKey": "taskEnvVal",
}
env := NewBuilder(n, a, task, "global").SetDriverNetwork(
&drivers.DriverNetwork{PortMap: map[string]int{"https": 443}},
)
act := env.Build().List()
exp := []string{
"taskEnvKey=taskEnvVal",
"NOMAD_ADDR_http=127.0.0.1:80",
"NOMAD_PORT_http=80",
"NOMAD_IP_http=127.0.0.1",
"NOMAD_ADDR_https=127.0.0.1:8080",
"NOMAD_PORT_https=443",
"NOMAD_IP_https=127.0.0.1",
"NOMAD_HOST_PORT_http=80",
"NOMAD_HOST_PORT_https=8080",
"NOMAD_TASK_NAME=web",
"NOMAD_GROUP_NAME=web",
"NOMAD_ADDR_ssh_other=192.168.0.100:1234",
"NOMAD_ADDR_ssh_ssh=192.168.0.100:22",
"NOMAD_IP_ssh_other=192.168.0.100",
"NOMAD_IP_ssh_ssh=192.168.0.100",
"NOMAD_PORT_ssh_other=1234",
"NOMAD_PORT_ssh_ssh=22",
"NOMAD_CPU_LIMIT=500",
"NOMAD_DC=dc1",
"NOMAD_NAMESPACE=not-default",
"NOMAD_REGION=global",
"NOMAD_MEMORY_LIMIT=256",
"NOMAD_META_ELB_CHECK_INTERVAL=30s",
"NOMAD_META_ELB_CHECK_MIN=3",
"NOMAD_META_ELB_CHECK_TYPE=http",
"NOMAD_META_FOO=bar",
"NOMAD_META_OWNER=armon",
"NOMAD_META_elb_check_interval=30s",
"NOMAD_META_elb_check_min=3",
"NOMAD_META_elb_check_type=http",
"NOMAD_META_foo=bar",
"NOMAD_META_owner=armon",
"NOMAD_JOB_NAME=my-job",
fmt.Sprintf("NOMAD_ALLOC_ID=%s", a.ID),
"NOMAD_ALLOC_INDEX=0",
}
sort.Strings(act)
sort.Strings(exp)
require.Equal(t, exp, act)
}
// COMPAT(0.11): Remove in 0.11
func TestEnvironment_AsList_Old(t *testing.T) {
n := mock.Node()
n.Meta = map[string]string{
"metaKey": "metaVal",
}
a := mock.Alloc()
a.AllocatedResources = nil
a.Resources = &structs.Resources{
CPU: 500,
MemoryMB: 256,
DiskMB: 150,
Networks: []*structs.NetworkResource{
{
Device: "eth0",
IP: "192.168.0.100",
ReservedPorts: []structs.Port{
{Label: "ssh", Value: 22},
{Label: "other", Value: 1234},
},
MBits: 50,
DynamicPorts: []structs.Port{{Label: "http", Value: 2000}},
},
},
}
a.TaskResources = map[string]*structs.Resources{
"web": {
CPU: 500,
MemoryMB: 256,
Networks: []*structs.NetworkResource{
{
Device: "eth0",
IP: "127.0.0.1",
ReservedPorts: []structs.Port{{Label: "https", Value: 8080}},
MBits: 50,
DynamicPorts: []structs.Port{{Label: "http", Value: 80}},
},
},
},
}
a.TaskResources["ssh"] = &structs.Resources{
Networks: []*structs.NetworkResource{
{
Device: "eth0",
IP: "192.168.0.100",
MBits: 50,
ReservedPorts: []structs.Port{
{Label: "ssh", Value: 22},
{Label: "other", Value: 1234},
},
},
},
}
// simulate canonicalization on restore or fetch
a.Canonicalize()
task := a.Job.TaskGroups[0].Tasks[0]
task.Env = map[string]string{
"taskEnvKey": "taskEnvVal",
}
task.Resources.Networks = []*structs.NetworkResource{
// Nomad 0.8 didn't fully populate the fields in task Resource Networks
{
IP: "",
ReservedPorts: []structs.Port{{Label: "https"}},
DynamicPorts: []structs.Port{{Label: "http"}},
},
}
env := NewBuilder(n, a, task, "global").SetDriverNetwork(
&drivers.DriverNetwork{PortMap: map[string]int{"https": 443}},
)
act := env.Build().List()
exp := []string{
"taskEnvKey=taskEnvVal",
"NOMAD_ADDR_http=127.0.0.1:80",
"NOMAD_PORT_http=80",
"NOMAD_IP_http=127.0.0.1",
"NOMAD_ADDR_https=127.0.0.1:8080",
"NOMAD_PORT_https=443",
"NOMAD_IP_https=127.0.0.1",
"NOMAD_HOST_PORT_http=80",
"NOMAD_HOST_PORT_https=8080",
"NOMAD_TASK_NAME=web",
"NOMAD_GROUP_NAME=web",
"NOMAD_ADDR_ssh_other=192.168.0.100:1234",
"NOMAD_ADDR_ssh_ssh=192.168.0.100:22",
"NOMAD_IP_ssh_other=192.168.0.100",
"NOMAD_IP_ssh_ssh=192.168.0.100",
"NOMAD_PORT_ssh_other=1234",
"NOMAD_PORT_ssh_ssh=22",
"NOMAD_CPU_LIMIT=500",
"NOMAD_DC=dc1",
"NOMAD_NAMESPACE=default",
"NOMAD_REGION=global",
"NOMAD_MEMORY_LIMIT=256",
"NOMAD_META_ELB_CHECK_INTERVAL=30s",
"NOMAD_META_ELB_CHECK_MIN=3",
"NOMAD_META_ELB_CHECK_TYPE=http",
"NOMAD_META_FOO=bar",
"NOMAD_META_OWNER=armon",
"NOMAD_META_elb_check_interval=30s",
"NOMAD_META_elb_check_min=3",
"NOMAD_META_elb_check_type=http",
"NOMAD_META_foo=bar",
"NOMAD_META_owner=armon",
"NOMAD_JOB_NAME=my-job",
fmt.Sprintf("NOMAD_ALLOC_ID=%s", a.ID),
"NOMAD_ALLOC_INDEX=0",
}
sort.Strings(act)
sort.Strings(exp)
require.Equal(t, exp, act)
}
func TestEnvironment_AllValues(t *testing.T) {
t.Parallel()
n := mock.Node()
n.Meta = map[string]string{
"metaKey": "metaVal",
"nested.meta.key": "a",
"invalid...metakey": "b",
}
a := mock.ConnectAlloc()
a.AllocatedResources.Tasks["web"].Networks[0] = &structs.NetworkResource{
Device: "eth0",
IP: "127.0.0.1",
ReservedPorts: []structs.Port{{Label: "https", Value: 8080}},
MBits: 50,
DynamicPorts: []structs.Port{{Label: "http", Value: 80}},
}
a.AllocatedResources.Tasks["ssh"] = &structs.AllocatedTaskResources{
Networks: []*structs.NetworkResource{
{
Device: "eth0",
IP: "192.168.0.100",
MBits: 50,
ReservedPorts: []structs.Port{
{Label: "ssh", Value: 22},
{Label: "other", Value: 1234},
},
},
},
}
sharedNet := a.AllocatedResources.Shared.Networks[0]
// Add group network port with only a host port.
sharedNet.DynamicPorts = append(sharedNet.DynamicPorts, structs.Port{
Label: "hostonly",
Value: 9998,
})
// Add group network reserved port with a To value.
sharedNet.ReservedPorts = append(sharedNet.ReservedPorts, structs.Port{
Label: "static",
Value: 9997,
To: 97,
})
task := a.Job.TaskGroups[0].Tasks[0]
task.Env = map[string]string{
"taskEnvKey": "taskEnvVal",
"nested.task.key": "x",
"invalid...taskkey": "y",
".a": "a",
"b.": "b",
".": "c",
}
env := NewBuilder(n, a, task, "global").SetDriverNetwork(
&drivers.DriverNetwork{PortMap: map[string]int{"https": 443}},
)
values, errs, err := env.Build().AllValues()
require.NoError(t, err)
// Assert the keys we couldn't nest were reported
require.Len(t, errs, 5)
require.Contains(t, errs, "invalid...taskkey")
require.Contains(t, errs, "meta.invalid...metakey")
require.Contains(t, errs, ".a")
require.Contains(t, errs, "b.")
require.Contains(t, errs, ".")
exp := map[string]string{
// Node
"node.unique.id": n.ID,
"node.region": "global",
"node.datacenter": n.Datacenter,
"node.unique.name": n.Name,
"node.class": n.NodeClass,
"meta.metaKey": "metaVal",
"attr.arch": "x86",
"attr.driver.exec": "1",
"attr.driver.mock_driver": "1",
"attr.kernel.name": "linux",
"attr.nomad.version": "0.5.0",
// 0.9 style meta and attr
"node.meta.metaKey": "metaVal",
"node.attr.arch": "x86",
"node.attr.driver.exec": "1",
"node.attr.driver.mock_driver": "1",
"node.attr.kernel.name": "linux",
"node.attr.nomad.version": "0.5.0",
// Env
"taskEnvKey": "taskEnvVal",
"NOMAD_ADDR_http": "127.0.0.1:80",
"NOMAD_PORT_http": "80",
"NOMAD_IP_http": "127.0.0.1",
"NOMAD_ADDR_https": "127.0.0.1:8080",
"NOMAD_PORT_https": "443",
"NOMAD_IP_https": "127.0.0.1",
"NOMAD_HOST_PORT_http": "80",
"NOMAD_HOST_PORT_https": "8080",
"NOMAD_TASK_NAME": "web",
"NOMAD_GROUP_NAME": "web",
"NOMAD_ADDR_ssh_other": "192.168.0.100:1234",
"NOMAD_ADDR_ssh_ssh": "192.168.0.100:22",
"NOMAD_IP_ssh_other": "192.168.0.100",
"NOMAD_IP_ssh_ssh": "192.168.0.100",
"NOMAD_PORT_ssh_other": "1234",
"NOMAD_PORT_ssh_ssh": "22",
"NOMAD_CPU_LIMIT": "500",
"NOMAD_DC": "dc1",
"NOMAD_NAMESPACE": "default",
"NOMAD_REGION": "global",
"NOMAD_MEMORY_LIMIT": "256",
"NOMAD_META_ELB_CHECK_INTERVAL": "30s",
"NOMAD_META_ELB_CHECK_MIN": "3",
"NOMAD_META_ELB_CHECK_TYPE": "http",
"NOMAD_META_FOO": "bar",
"NOMAD_META_OWNER": "armon",
"NOMAD_META_elb_check_interval": "30s",
"NOMAD_META_elb_check_min": "3",
"NOMAD_META_elb_check_type": "http",
"NOMAD_META_foo": "bar",
"NOMAD_META_owner": "armon",
"NOMAD_JOB_NAME": "my-job",
"NOMAD_ALLOC_ID": a.ID,
"NOMAD_ALLOC_INDEX": "0",
"NOMAD_PORT_connect_proxy_testconnect": "9999",
"NOMAD_HOST_PORT_connect_proxy_testconnect": "9999",
"NOMAD_PORT_hostonly": "9998",
"NOMAD_HOST_PORT_hostonly": "9998",
"NOMAD_PORT_static": "97",
"NOMAD_HOST_PORT_static": "9997",
// 0.9 style env map
`env["taskEnvKey"]`: "taskEnvVal",
`env["NOMAD_ADDR_http"]`: "127.0.0.1:80",
`env["nested.task.key"]`: "x",
`env["invalid...taskkey"]`: "y",
`env[".a"]`: "a",
`env["b."]`: "b",
`env["."]`: "c",
}
evalCtx := &hcl.EvalContext{
Variables: values,
}
for k, expectedVal := range exp {
t.Run(k, func(t *testing.T) {
// Parse HCL containing the test key
hclStr := fmt.Sprintf(`"${%s}"`, k)
expr, diag := hclsyntax.ParseExpression([]byte(hclStr), "test.hcl", hcl.Pos{})
require.Empty(t, diag)
// Decode with the TaskEnv values
out := ""
diag = gohcl.DecodeExpression(expr, evalCtx, &out)
require.Empty(t, diag)
require.Equal(t, out, expectedVal)
})
}
}
func TestEnvironment_VaultToken(t *testing.T) {
n := mock.Node()
a := mock.Alloc()
env := NewBuilder(n, a, a.Job.TaskGroups[0].Tasks[0], "global")
env.SetVaultToken("123", "vault-namespace", false)
{
act := env.Build().All()
if act[VaultToken] != "" {
t.Fatalf("Unexpected environment variables: %s=%q", VaultToken, act[VaultToken])
}
if act[VaultNamespace] != "" {
t.Fatalf("Unexpected environment variables: %s=%q", VaultNamespace, act[VaultNamespace])
}
}
{
act := env.SetVaultToken("123", "", true).Build().List()
exp := "VAULT_TOKEN=123"
found := false
foundNs := false
for _, entry := range act {
if entry == exp {
found = true
}
if strings.HasPrefix(entry, "VAULT_NAMESPACE=") {
foundNs = true
}
}
if !found {
t.Fatalf("did not find %q in:\n%s", exp, strings.Join(act, "\n"))
}
if foundNs {
t.Fatalf("found unwanted VAULT_NAMESPACE in:\n%s", strings.Join(act, "\n"))
}
}
{
act := env.SetVaultToken("123", "vault-namespace", true).Build().List()
exp := "VAULT_TOKEN=123"
expNs := "VAULT_NAMESPACE=vault-namespace"
found := false
foundNs := false
for _, entry := range act {
if entry == exp {
found = true
}
if entry == expNs {
foundNs = true
}
}
if !found {
t.Fatalf("did not find %q in:\n%s", exp, strings.Join(act, "\n"))
}
if !foundNs {
t.Fatalf("did not find %q in:\n%s", expNs, strings.Join(act, "\n"))
}
}
}
func TestEnvironment_Envvars(t *testing.T) {
envMap := map[string]string{"foo": "baz", "bar": "bang"}
n := mock.Node()
a := mock.Alloc()
task := a.Job.TaskGroups[0].Tasks[0]
task.Env = envMap
net := &drivers.DriverNetwork{PortMap: portMap}
act := NewBuilder(n, a, task, "global").SetDriverNetwork(net).Build().All()
for k, v := range envMap {
actV, ok := act[k]
if !ok {
t.Fatalf("missing %q in %#v", k, act)
}
if v != actV {
t.Fatalf("expected %s=%q but found %q", k, v, actV)
}
}
}
// TestEnvironment_HookVars asserts hook env vars are LWW and deletes of later
// writes allow earlier hook's values to be visible.
func TestEnvironment_HookVars(t *testing.T) {
n := mock.Node()
a := mock.Alloc()
builder := NewBuilder(n, a, a.Job.TaskGroups[0].Tasks[0], "global")
// Add vars from two hooks and assert the second one wins on
// conflicting keys.
builder.SetHookEnv("hookA", map[string]string{
"foo": "bar",
"baz": "quux",
})
builder.SetHookEnv("hookB", map[string]string{
"foo": "123",
"hookB": "wins",
})
{
out := builder.Build().All()
assert.Equal(t, "123", out["foo"])
assert.Equal(t, "quux", out["baz"])
assert.Equal(t, "wins", out["hookB"])
}
// Asserting overwriting hook vars allows the first hooks original
// value to be used.
builder.SetHookEnv("hookB", nil)
{
out := builder.Build().All()
assert.Equal(t, "bar", out["foo"])
assert.Equal(t, "quux", out["baz"])
assert.NotContains(t, out, "hookB")
}
}
// TestEnvironment_DeviceHookVars asserts device hook env vars are accessible
// separately.
func TestEnvironment_DeviceHookVars(t *testing.T) {
require := require.New(t)
n := mock.Node()
a := mock.Alloc()
builder := NewBuilder(n, a, a.Job.TaskGroups[0].Tasks[0], "global")
// Add vars from two hooks and assert the second one wins on
// conflicting keys.
builder.SetHookEnv("hookA", map[string]string{
"foo": "bar",
"baz": "quux",
})
builder.SetDeviceHookEnv("devices", map[string]string{
"hook": "wins",
})
b := builder.Build()
deviceEnv := b.DeviceEnv()
require.Len(deviceEnv, 1)
require.Contains(deviceEnv, "hook")
all := b.Map()
require.Contains(all, "foo")
}
func TestEnvironment_Interpolate(t *testing.T) {
n := mock.Node()
n.Attributes["arch"] = "x86"
n.NodeClass = "test class"
a := mock.Alloc()
task := a.Job.TaskGroups[0].Tasks[0]
task.Env = map[string]string{"test": "${node.class}", "test2": "${attr.arch}"}
env := NewBuilder(n, a, task, "global").Build()
exp := []string{fmt.Sprintf("test=%s", n.NodeClass), fmt.Sprintf("test2=%s", n.Attributes["arch"])}
found1, found2 := false, false
for _, entry := range env.List() {
switch entry {
case exp[0]:
found1 = true
case exp[1]:
found2 = true
}
}
if !found1 || !found2 {
t.Fatalf("expected to find %q and %q but got:\n%s",
exp[0], exp[1], strings.Join(env.List(), "\n"))
}
}
func TestEnvironment_AppendHostEnvvars(t *testing.T) {
host := os.Environ()
if len(host) < 2 {
t.Skip("No host environment variables. Can't test")
}
skip := strings.Split(host[0], "=")[0]
env := testEnvBuilder().
SetHostEnvvars([]string{skip}).
Build()
act := env.Map()
if len(act) < 1 {
t.Fatalf("Host environment variables not properly set")
}
if _, ok := act[skip]; ok {
t.Fatalf("Didn't filter environment variable %q", skip)
}
}
// TestEnvironment_DashesInTaskName asserts dashes in port labels are properly
// converted to underscores in environment variables.
// See: https://github.com/hashicorp/nomad/issues/2405
func TestEnvironment_DashesInTaskName(t *testing.T) {
a := mock.Alloc()
task := a.Job.TaskGroups[0].Tasks[0]
task.Env = map[string]string{
"test-one-two": "three-four",
"NOMAD_test_one_two": "three-five",
}
envMap := NewBuilder(mock.Node(), a, task, "global").Build().Map()
if envMap["test-one-two"] != "three-four" {
t.Fatalf("Expected test-one-two=three-four in TaskEnv; found:\n%#v", envMap)
}
if envMap["NOMAD_test_one_two"] != "three-five" {
t.Fatalf("Expected NOMAD_test_one_two=three-five in TaskEnv; found:\n%#v", envMap)
}
}
// TestEnvironment_UpdateTask asserts env vars and task meta are updated when a
// task is updated.
func TestEnvironment_UpdateTask(t *testing.T) {
a := mock.Alloc()
a.Job.TaskGroups[0].Meta = map[string]string{"tgmeta": "tgmetaval"}
task := a.Job.TaskGroups[0].Tasks[0]
task.Name = "orig"
task.Env = map[string]string{"env": "envval"}
task.Meta = map[string]string{"taskmeta": "taskmetaval"}
builder := NewBuilder(mock.Node(), a, task, "global")
origMap := builder.Build().Map()
if origMap["NOMAD_TASK_NAME"] != "orig" {
t.Errorf("Expected NOMAD_TASK_NAME=orig but found %q", origMap["NOMAD_TASK_NAME"])
}
if origMap["NOMAD_META_taskmeta"] != "taskmetaval" {
t.Errorf("Expected NOMAD_META_taskmeta=taskmetaval but found %q", origMap["NOMAD_META_taskmeta"])
}
if origMap["env"] != "envval" {
t.Errorf("Expected env=envva but found %q", origMap["env"])
}
if origMap["NOMAD_META_tgmeta"] != "tgmetaval" {
t.Errorf("Expected NOMAD_META_tgmeta=tgmetaval but found %q", origMap["NOMAD_META_tgmeta"])
}
a.Job.TaskGroups[0].Meta = map[string]string{"tgmeta2": "tgmetaval2"}
task.Name = "new"
task.Env = map[string]string{"env2": "envval2"}
task.Meta = map[string]string{"taskmeta2": "taskmetaval2"}
newMap := builder.UpdateTask(a, task).Build().Map()
if newMap["NOMAD_TASK_NAME"] != "new" {
t.Errorf("Expected NOMAD_TASK_NAME=new but found %q", newMap["NOMAD_TASK_NAME"])
}
if newMap["NOMAD_META_taskmeta2"] != "taskmetaval2" {
t.Errorf("Expected NOMAD_META_taskmeta=taskmetaval but found %q", newMap["NOMAD_META_taskmeta2"])
}
if newMap["env2"] != "envval2" {
t.Errorf("Expected env=envva but found %q", newMap["env2"])
}
if newMap["NOMAD_META_tgmeta2"] != "tgmetaval2" {
t.Errorf("Expected NOMAD_META_tgmeta=tgmetaval but found %q", newMap["NOMAD_META_tgmeta2"])
}
if v, ok := newMap["NOMAD_META_taskmeta"]; ok {
t.Errorf("Expected NOMAD_META_taskmeta to be unset but found: %q", v)
}
}
// TestEnvironment_InterpolateEmptyOptionalMeta asserts that in a parameterized
// job, if an optional meta field is not set, it will get interpolated as an
// empty string.
func TestEnvironment_InterpolateEmptyOptionalMeta(t *testing.T) {
require := require.New(t)
a := mock.Alloc()
a.Job.ParameterizedJob = &structs.ParameterizedJobConfig{
MetaOptional: []string{"metaopt1", "metaopt2"},
}
a.Job.Dispatched = true
task := a.Job.TaskGroups[0].Tasks[0]
task.Meta = map[string]string{"metaopt1": "metaopt1val"}
env := NewBuilder(mock.Node(), a, task, "global").Build()
require.Equal("metaopt1val", env.ReplaceEnv("${NOMAD_META_metaopt1}"))
require.Empty(env.ReplaceEnv("${NOMAD_META_metaopt2}"))
}
// TestEnvironment_Upsteams asserts that group.service.upstreams entries are
// added to the environment.
func TestEnvironment_Upstreams(t *testing.T) {
t.Parallel()
// Add some upstreams to the mock alloc
a := mock.Alloc()
tg := a.Job.LookupTaskGroup(a.TaskGroup)
tg.Services = []*structs.Service{
// Services without Connect should be ignored
{
Name: "ignoreme",
},
// All upstreams from a service should be added
{
Name: "remote_service",
Connect: &structs.ConsulConnect{
SidecarService: &structs.ConsulSidecarService{
Proxy: &structs.ConsulProxy{
Upstreams: []structs.ConsulUpstream{
{
DestinationName: "foo-bar",
LocalBindPort: 1234,
},
{
DestinationName: "bar",
LocalBindPort: 5678,
},
},
},
},
},
},
}
// Ensure the upstreams can be interpolated
tg.Tasks[0].Env = map[string]string{
"foo": "${NOMAD_UPSTREAM_ADDR_foo_bar}",
"bar": "${NOMAD_UPSTREAM_PORT_foo-bar}",
}
env := NewBuilder(mock.Node(), a, tg.Tasks[0], "global").Build().Map()
require.Equal(t, "127.0.0.1:1234", env["NOMAD_UPSTREAM_ADDR_foo_bar"])
require.Equal(t, "127.0.0.1", env["NOMAD_UPSTREAM_IP_foo_bar"])
require.Equal(t, "1234", env["NOMAD_UPSTREAM_PORT_foo_bar"])
require.Equal(t, "127.0.0.1:5678", env["NOMAD_UPSTREAM_ADDR_bar"])
require.Equal(t, "127.0.0.1", env["NOMAD_UPSTREAM_IP_bar"])
require.Equal(t, "5678", env["NOMAD_UPSTREAM_PORT_bar"])
require.Equal(t, "127.0.0.1:1234", env["foo"])
require.Equal(t, "1234", env["bar"])
}
func TestEnvironment_SetPortMapEnvs(t *testing.T) {
envs := map[string]string{
"foo": "bar",
"NOMAD_PORT_ssh": "2342",
}
ports := map[string]int{
"ssh": 22,
"http": 80,
}
envs = SetPortMapEnvs(envs, ports)
expected := map[string]string{
"foo": "bar",
"NOMAD_PORT_ssh": "22",
"NOMAD_PORT_http": "80",
}
require.Equal(t, expected, envs)
}
func TestEnvironment_TasklessBuilder(t *testing.T) {
node := mock.Node()
alloc := mock.Alloc()
alloc.Job.Meta["jobt"] = "foo"
alloc.Job.TaskGroups[0].Meta["groupt"] = "bar"
require := require.New(t)
var taskEnv *TaskEnv
require.NotPanics(func() {
taskEnv = NewBuilder(node, alloc, nil, "global").SetAllocDir("/tmp/alloc").Build()
})
require.Equal("foo", taskEnv.ReplaceEnv("${NOMAD_META_jobt}"))
require.Equal("bar", taskEnv.ReplaceEnv("${NOMAD_META_groupt}"))
}