2018-11-30 11:18:39 +00:00
|
|
|
package taskenv
|
2016-01-05 22:50:25 +00:00
|
|
|
|
|
|
|
import (
|
|
|
|
"fmt"
|
2016-03-23 18:45:03 +00:00
|
|
|
"os"
|
2016-01-05 22:50:25 +00:00
|
|
|
"reflect"
|
|
|
|
"sort"
|
2016-03-23 18:45:03 +00:00
|
|
|
"strings"
|
2016-01-05 22:50:25 +00:00
|
|
|
"testing"
|
|
|
|
|
2020-05-19 16:50:47 +00:00
|
|
|
hcl "github.com/hashicorp/hcl/v2"
|
|
|
|
"github.com/hashicorp/hcl/v2/gohcl"
|
|
|
|
"github.com/hashicorp/hcl/v2/hclsyntax"
|
2020-10-23 14:49:58 +00:00
|
|
|
"github.com/hashicorp/nomad/helper/uuid"
|
2016-01-05 22:50:25 +00:00
|
|
|
"github.com/hashicorp/nomad/nomad/mock"
|
2016-01-24 09:31:03 +00:00
|
|
|
"github.com/hashicorp/nomad/nomad/structs"
|
2019-01-04 23:01:35 +00:00
|
|
|
"github.com/hashicorp/nomad/plugins/drivers"
|
2018-11-27 19:53:47 +00:00
|
|
|
"github.com/stretchr/testify/assert"
|
2018-06-11 15:59:23 +00:00
|
|
|
"github.com/stretchr/testify/require"
|
2016-01-05 22:50:25 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
|
|
|
// Node values that tests can rely on
|
|
|
|
metaKey = "instance"
|
|
|
|
metaVal = "t2-micro"
|
|
|
|
attrKey = "arch"
|
|
|
|
attrVal = "amd64"
|
|
|
|
nodeName = "test node"
|
|
|
|
nodeClass = "test class"
|
|
|
|
|
|
|
|
// Environment variable values that tests can rely on
|
|
|
|
envOneKey = "NOMAD_IP"
|
|
|
|
envOneVal = "127.0.0.1"
|
|
|
|
envTwoKey = "NOMAD_PORT_WEB"
|
|
|
|
envTwoVal = ":80"
|
|
|
|
)
|
|
|
|
|
2016-01-24 09:31:03 +00:00
|
|
|
var (
|
2017-05-19 22:40:38 +00:00
|
|
|
// portMap for use in tests as its set after Builder creation
|
2016-01-24 09:31:03 +00:00
|
|
|
portMap = map[string]int{
|
|
|
|
"https": 443,
|
|
|
|
}
|
|
|
|
)
|
|
|
|
|
2017-05-19 22:40:38 +00:00
|
|
|
func testEnvBuilder() *Builder {
|
2016-01-05 22:50:25 +00:00
|
|
|
n := mock.Node()
|
|
|
|
n.Attributes = map[string]string{
|
|
|
|
attrKey: attrVal,
|
|
|
|
}
|
|
|
|
n.Meta = map[string]string{
|
|
|
|
metaKey: metaVal,
|
|
|
|
}
|
|
|
|
n.Name = nodeName
|
|
|
|
n.NodeClass = nodeClass
|
|
|
|
|
2017-05-19 22:40:38 +00:00
|
|
|
task := mock.Job().TaskGroups[0].Tasks[0]
|
|
|
|
task.Env = map[string]string{
|
2016-01-05 22:50:25 +00:00
|
|
|
envOneKey: envOneVal,
|
|
|
|
envTwoKey: envTwoVal,
|
|
|
|
}
|
2017-05-19 22:40:38 +00:00
|
|
|
return NewBuilder(n, mock.Alloc(), task, "global")
|
2016-01-05 22:50:25 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
func TestEnvironment_ParseAndReplace_Env(t *testing.T) {
|
2017-05-19 22:40:38 +00:00
|
|
|
env := testEnvBuilder()
|
2016-01-05 22:50:25 +00:00
|
|
|
|
2016-02-05 01:21:00 +00:00
|
|
|
input := []string{fmt.Sprintf(`"${%v}"!`, envOneKey), fmt.Sprintf("${%s}${%s}", envOneKey, envTwoKey)}
|
2017-05-19 22:40:38 +00:00
|
|
|
act := env.Build().ParseAndReplace(input)
|
2016-01-05 22:50:25 +00:00
|
|
|
exp := []string{fmt.Sprintf(`"%s"!`, envOneVal), fmt.Sprintf("%s%s", envOneVal, envTwoVal)}
|
|
|
|
|
|
|
|
if !reflect.DeepEqual(act, exp) {
|
|
|
|
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEnvironment_ParseAndReplace_Meta(t *testing.T) {
|
2016-02-05 01:21:00 +00:00
|
|
|
input := []string{fmt.Sprintf("${%v%v}", nodeMetaPrefix, metaKey)}
|
2016-01-05 22:50:25 +00:00
|
|
|
exp := []string{metaVal}
|
2017-05-19 22:40:38 +00:00
|
|
|
env := testEnvBuilder()
|
|
|
|
act := env.Build().ParseAndReplace(input)
|
2016-01-05 22:50:25 +00:00
|
|
|
|
|
|
|
if !reflect.DeepEqual(act, exp) {
|
|
|
|
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEnvironment_ParseAndReplace_Attr(t *testing.T) {
|
2016-02-05 01:21:00 +00:00
|
|
|
input := []string{fmt.Sprintf("${%v%v}", nodeAttributePrefix, attrKey)}
|
2016-01-05 22:50:25 +00:00
|
|
|
exp := []string{attrVal}
|
2017-05-19 22:40:38 +00:00
|
|
|
env := testEnvBuilder()
|
|
|
|
act := env.Build().ParseAndReplace(input)
|
2016-01-05 22:50:25 +00:00
|
|
|
|
|
|
|
if !reflect.DeepEqual(act, exp) {
|
|
|
|
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEnvironment_ParseAndReplace_Node(t *testing.T) {
|
2016-02-05 01:21:00 +00:00
|
|
|
input := []string{fmt.Sprintf("${%v}", nodeNameKey), fmt.Sprintf("${%v}", nodeClassKey)}
|
2016-01-05 22:50:25 +00:00
|
|
|
exp := []string{nodeName, nodeClass}
|
2017-05-19 22:40:38 +00:00
|
|
|
env := testEnvBuilder()
|
|
|
|
act := env.Build().ParseAndReplace(input)
|
2016-01-05 22:50:25 +00:00
|
|
|
|
|
|
|
if !reflect.DeepEqual(act, exp) {
|
|
|
|
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEnvironment_ParseAndReplace_Mixed(t *testing.T) {
|
|
|
|
input := []string{
|
2016-02-05 01:21:00 +00:00
|
|
|
fmt.Sprintf("${%v}${%v%v}", nodeNameKey, nodeAttributePrefix, attrKey),
|
|
|
|
fmt.Sprintf("${%v}${%v%v}", nodeClassKey, nodeMetaPrefix, metaKey),
|
|
|
|
fmt.Sprintf("${%v}${%v}", envTwoKey, nodeClassKey),
|
2016-01-05 22:50:25 +00:00
|
|
|
}
|
|
|
|
exp := []string{
|
|
|
|
fmt.Sprintf("%v%v", nodeName, attrVal),
|
|
|
|
fmt.Sprintf("%v%v", nodeClass, metaVal),
|
|
|
|
fmt.Sprintf("%v%v", envTwoVal, nodeClass),
|
|
|
|
}
|
2017-05-19 22:40:38 +00:00
|
|
|
env := testEnvBuilder()
|
|
|
|
act := env.Build().ParseAndReplace(input)
|
2016-01-05 22:50:25 +00:00
|
|
|
|
|
|
|
if !reflect.DeepEqual(act, exp) {
|
|
|
|
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEnvironment_ReplaceEnv_Mixed(t *testing.T) {
|
2016-02-05 01:21:00 +00:00
|
|
|
input := fmt.Sprintf("${%v}${%v%v}", nodeNameKey, nodeAttributePrefix, attrKey)
|
2016-01-05 22:50:25 +00:00
|
|
|
exp := fmt.Sprintf("%v%v", nodeName, attrVal)
|
2017-05-19 22:40:38 +00:00
|
|
|
env := testEnvBuilder()
|
|
|
|
act := env.Build().ReplaceEnv(input)
|
2016-01-05 22:50:25 +00:00
|
|
|
|
|
|
|
if act != exp {
|
|
|
|
t.Fatalf("ParseAndReplace(%v) returned %#v; want %#v", input, act, exp)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEnvironment_AsList(t *testing.T) {
|
|
|
|
n := mock.Node()
|
2017-05-19 22:40:38 +00:00
|
|
|
n.Meta = map[string]string{
|
|
|
|
"metaKey": "metaVal",
|
|
|
|
}
|
2017-01-20 21:43:22 +00:00
|
|
|
a := mock.Alloc()
|
2020-10-23 14:49:58 +00:00
|
|
|
a.Job.ParentID = fmt.Sprintf("mock-parent-service-%s", uuid.Generate())
|
2018-10-03 16:47:18 +00:00
|
|
|
a.AllocatedResources.Tasks["web"].Networks[0] = &structs.NetworkResource{
|
|
|
|
Device: "eth0",
|
|
|
|
IP: "127.0.0.1",
|
|
|
|
ReservedPorts: []structs.Port{{Label: "https", Value: 8080}},
|
|
|
|
MBits: 50,
|
|
|
|
DynamicPorts: []structs.Port{{Label: "http", Value: 80}},
|
|
|
|
}
|
|
|
|
a.AllocatedResources.Tasks["ssh"] = &structs.AllocatedTaskResources{
|
|
|
|
Networks: []*structs.NetworkResource{
|
|
|
|
{
|
|
|
|
Device: "eth0",
|
|
|
|
IP: "192.168.0.100",
|
|
|
|
MBits: 50,
|
|
|
|
ReservedPorts: []structs.Port{
|
|
|
|
{Label: "ssh", Value: 22},
|
|
|
|
{Label: "other", Value: 1234},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2019-08-27 15:38:07 +00:00
|
|
|
a.Namespace = "not-default"
|
2018-10-03 16:47:18 +00:00
|
|
|
task := a.Job.TaskGroups[0].Tasks[0]
|
|
|
|
task.Env = map[string]string{
|
|
|
|
"taskEnvKey": "taskEnvVal",
|
|
|
|
}
|
|
|
|
env := NewBuilder(n, a, task, "global").SetDriverNetwork(
|
2019-01-04 23:01:35 +00:00
|
|
|
&drivers.DriverNetwork{PortMap: map[string]int{"https": 443}},
|
2017-03-28 17:53:26 +00:00
|
|
|
)
|
2018-10-03 16:47:18 +00:00
|
|
|
|
|
|
|
act := env.Build().List()
|
|
|
|
exp := []string{
|
|
|
|
"taskEnvKey=taskEnvVal",
|
|
|
|
"NOMAD_ADDR_http=127.0.0.1:80",
|
|
|
|
"NOMAD_PORT_http=80",
|
|
|
|
"NOMAD_IP_http=127.0.0.1",
|
|
|
|
"NOMAD_ADDR_https=127.0.0.1:8080",
|
|
|
|
"NOMAD_PORT_https=443",
|
|
|
|
"NOMAD_IP_https=127.0.0.1",
|
|
|
|
"NOMAD_HOST_PORT_http=80",
|
|
|
|
"NOMAD_HOST_PORT_https=8080",
|
|
|
|
"NOMAD_TASK_NAME=web",
|
|
|
|
"NOMAD_GROUP_NAME=web",
|
|
|
|
"NOMAD_ADDR_ssh_other=192.168.0.100:1234",
|
|
|
|
"NOMAD_ADDR_ssh_ssh=192.168.0.100:22",
|
|
|
|
"NOMAD_IP_ssh_other=192.168.0.100",
|
|
|
|
"NOMAD_IP_ssh_ssh=192.168.0.100",
|
|
|
|
"NOMAD_PORT_ssh_other=1234",
|
|
|
|
"NOMAD_PORT_ssh_ssh=22",
|
|
|
|
"NOMAD_CPU_LIMIT=500",
|
|
|
|
"NOMAD_DC=dc1",
|
2019-08-27 15:38:07 +00:00
|
|
|
"NOMAD_NAMESPACE=not-default",
|
2018-10-03 16:47:18 +00:00
|
|
|
"NOMAD_REGION=global",
|
|
|
|
"NOMAD_MEMORY_LIMIT=256",
|
|
|
|
"NOMAD_META_ELB_CHECK_INTERVAL=30s",
|
|
|
|
"NOMAD_META_ELB_CHECK_MIN=3",
|
|
|
|
"NOMAD_META_ELB_CHECK_TYPE=http",
|
|
|
|
"NOMAD_META_FOO=bar",
|
|
|
|
"NOMAD_META_OWNER=armon",
|
|
|
|
"NOMAD_META_elb_check_interval=30s",
|
|
|
|
"NOMAD_META_elb_check_min=3",
|
|
|
|
"NOMAD_META_elb_check_type=http",
|
|
|
|
"NOMAD_META_foo=bar",
|
|
|
|
"NOMAD_META_owner=armon",
|
2020-10-23 14:49:58 +00:00
|
|
|
fmt.Sprintf("NOMAD_JOB_ID=%s", a.Job.ID),
|
2018-10-03 16:47:18 +00:00
|
|
|
"NOMAD_JOB_NAME=my-job",
|
2020-10-23 14:49:58 +00:00
|
|
|
fmt.Sprintf("NOMAD_JOB_PARENT_ID=%s", a.Job.ParentID),
|
2018-10-03 16:47:18 +00:00
|
|
|
fmt.Sprintf("NOMAD_ALLOC_ID=%s", a.ID),
|
|
|
|
"NOMAD_ALLOC_INDEX=0",
|
|
|
|
}
|
|
|
|
sort.Strings(act)
|
|
|
|
sort.Strings(exp)
|
|
|
|
require.Equal(t, exp, act)
|
|
|
|
}
|
|
|
|
|
|
|
|
// COMPAT(0.11): Remove in 0.11
|
|
|
|
func TestEnvironment_AsList_Old(t *testing.T) {
|
|
|
|
n := mock.Node()
|
|
|
|
n.Meta = map[string]string{
|
|
|
|
"metaKey": "metaVal",
|
|
|
|
}
|
|
|
|
a := mock.Alloc()
|
|
|
|
a.AllocatedResources = nil
|
|
|
|
a.Resources = &structs.Resources{
|
|
|
|
CPU: 500,
|
|
|
|
MemoryMB: 256,
|
|
|
|
DiskMB: 150,
|
|
|
|
Networks: []*structs.NetworkResource{
|
|
|
|
{
|
|
|
|
Device: "eth0",
|
|
|
|
IP: "192.168.0.100",
|
|
|
|
ReservedPorts: []structs.Port{
|
|
|
|
{Label: "ssh", Value: 22},
|
|
|
|
{Label: "other", Value: 1234},
|
|
|
|
},
|
|
|
|
MBits: 50,
|
client: handle 0.8 server network resources
Fixes https://github.com/hashicorp/nomad/issues/5587
When a nomad 0.9 client is handling an alloc generated by a nomad 0.8
server, we should check the alloc.TaskResources for networking details
rather than task.Resources.
We check alloc.TaskResources for networking for other tasks in the task
group [1], so it's a bit odd that we used the task.Resources struct
here. TaskRunner also uses `alloc.TaskResources`[2].
The task.Resources struct in 0.8 was sparsly populated, resulting to
storing of 0 in port mapping env vars:
```
vagrant@nomad-server-01:~$ nomad version
Nomad v0.8.7 (21a2d93eecf018ad2209a5eab6aae6c359267933+CHANGES)
vagrant@nomad-server-01:~$ nomad server members
Name Address Port Status Leader Protocol Build Datacenter Region
nomad-server-01.global 10.199.0.11 4648 alive true 2 0.8.7 dc1 global
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.Job.TaskGroups[0].Tasks[0].Resources.Networks'
[
{
"CIDR": "",
"Device": "",
"DynamicPorts": [
{
"Label": "db",
"Value": 0
}
],
"IP": "",
"MBits": 10,
"ReservedPorts": null
}
]
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.TaskResources'
{
"redis": {
"CPU": 500,
"DiskMB": 0,
"IOPS": 0,
"MemoryMB": 256,
"Networks": [
{
"CIDR": "",
"Device": "eth1",
"DynamicPorts": [
{
"Label": "db",
"Value": 21722
}
],
"IP": "10.199.0.21",
"MBits": 10,
"ReservedPorts": null
}
]
}
}
```
Also, updated the test values to mimic how Nomad 0.8 structs are
represented, and made its result match the non compact values in
`TestEnvironment_AsList`.
[1] https://github.com/hashicorp/nomad/blob/24e9040b18a4f893e2f353288948a0f7cd9d82e4/client/taskenv/env.go#L624-L639
[2] https://github.com/hashicorp/nomad/blob/master/client/allocrunner/taskrunner/task_runner.go#L287-L303
2019-05-02 15:58:04 +00:00
|
|
|
DynamicPorts: []structs.Port{{Label: "http", Value: 2000}},
|
2018-10-03 16:47:18 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
a.TaskResources = map[string]*structs.Resources{
|
|
|
|
"web": {
|
|
|
|
CPU: 500,
|
|
|
|
MemoryMB: 256,
|
|
|
|
Networks: []*structs.NetworkResource{
|
|
|
|
{
|
|
|
|
Device: "eth0",
|
client: handle 0.8 server network resources
Fixes https://github.com/hashicorp/nomad/issues/5587
When a nomad 0.9 client is handling an alloc generated by a nomad 0.8
server, we should check the alloc.TaskResources for networking details
rather than task.Resources.
We check alloc.TaskResources for networking for other tasks in the task
group [1], so it's a bit odd that we used the task.Resources struct
here. TaskRunner also uses `alloc.TaskResources`[2].
The task.Resources struct in 0.8 was sparsly populated, resulting to
storing of 0 in port mapping env vars:
```
vagrant@nomad-server-01:~$ nomad version
Nomad v0.8.7 (21a2d93eecf018ad2209a5eab6aae6c359267933+CHANGES)
vagrant@nomad-server-01:~$ nomad server members
Name Address Port Status Leader Protocol Build Datacenter Region
nomad-server-01.global 10.199.0.11 4648 alive true 2 0.8.7 dc1 global
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.Job.TaskGroups[0].Tasks[0].Resources.Networks'
[
{
"CIDR": "",
"Device": "",
"DynamicPorts": [
{
"Label": "db",
"Value": 0
}
],
"IP": "",
"MBits": 10,
"ReservedPorts": null
}
]
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.TaskResources'
{
"redis": {
"CPU": 500,
"DiskMB": 0,
"IOPS": 0,
"MemoryMB": 256,
"Networks": [
{
"CIDR": "",
"Device": "eth1",
"DynamicPorts": [
{
"Label": "db",
"Value": 21722
}
],
"IP": "10.199.0.21",
"MBits": 10,
"ReservedPorts": null
}
]
}
}
```
Also, updated the test values to mimic how Nomad 0.8 structs are
represented, and made its result match the non compact values in
`TestEnvironment_AsList`.
[1] https://github.com/hashicorp/nomad/blob/24e9040b18a4f893e2f353288948a0f7cd9d82e4/client/taskenv/env.go#L624-L639
[2] https://github.com/hashicorp/nomad/blob/master/client/allocrunner/taskrunner/task_runner.go#L287-L303
2019-05-02 15:58:04 +00:00
|
|
|
IP: "127.0.0.1",
|
|
|
|
ReservedPorts: []structs.Port{{Label: "https", Value: 8080}},
|
2018-10-03 16:47:18 +00:00
|
|
|
MBits: 50,
|
client: handle 0.8 server network resources
Fixes https://github.com/hashicorp/nomad/issues/5587
When a nomad 0.9 client is handling an alloc generated by a nomad 0.8
server, we should check the alloc.TaskResources for networking details
rather than task.Resources.
We check alloc.TaskResources for networking for other tasks in the task
group [1], so it's a bit odd that we used the task.Resources struct
here. TaskRunner also uses `alloc.TaskResources`[2].
The task.Resources struct in 0.8 was sparsly populated, resulting to
storing of 0 in port mapping env vars:
```
vagrant@nomad-server-01:~$ nomad version
Nomad v0.8.7 (21a2d93eecf018ad2209a5eab6aae6c359267933+CHANGES)
vagrant@nomad-server-01:~$ nomad server members
Name Address Port Status Leader Protocol Build Datacenter Region
nomad-server-01.global 10.199.0.11 4648 alive true 2 0.8.7 dc1 global
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.Job.TaskGroups[0].Tasks[0].Resources.Networks'
[
{
"CIDR": "",
"Device": "",
"DynamicPorts": [
{
"Label": "db",
"Value": 0
}
],
"IP": "",
"MBits": 10,
"ReservedPorts": null
}
]
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.TaskResources'
{
"redis": {
"CPU": 500,
"DiskMB": 0,
"IOPS": 0,
"MemoryMB": 256,
"Networks": [
{
"CIDR": "",
"Device": "eth1",
"DynamicPorts": [
{
"Label": "db",
"Value": 21722
}
],
"IP": "10.199.0.21",
"MBits": 10,
"ReservedPorts": null
}
]
}
}
```
Also, updated the test values to mimic how Nomad 0.8 structs are
represented, and made its result match the non compact values in
`TestEnvironment_AsList`.
[1] https://github.com/hashicorp/nomad/blob/24e9040b18a4f893e2f353288948a0f7cd9d82e4/client/taskenv/env.go#L624-L639
[2] https://github.com/hashicorp/nomad/blob/master/client/allocrunner/taskrunner/task_runner.go#L287-L303
2019-05-02 15:58:04 +00:00
|
|
|
DynamicPorts: []structs.Port{{Label: "http", Value: 80}},
|
2018-10-03 16:47:18 +00:00
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2017-03-28 17:16:27 +00:00
|
|
|
a.TaskResources["ssh"] = &structs.Resources{
|
|
|
|
Networks: []*structs.NetworkResource{
|
|
|
|
{
|
2017-03-28 17:53:26 +00:00
|
|
|
Device: "eth0",
|
|
|
|
IP: "192.168.0.100",
|
|
|
|
MBits: 50,
|
2017-03-28 17:16:27 +00:00
|
|
|
ReservedPorts: []structs.Port{
|
|
|
|
{Label: "ssh", Value: 22},
|
|
|
|
{Label: "other", Value: 1234},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2020-01-09 14:25:07 +00:00
|
|
|
|
|
|
|
// simulate canonicalization on restore or fetch
|
|
|
|
a.Canonicalize()
|
|
|
|
|
2017-05-19 22:40:38 +00:00
|
|
|
task := a.Job.TaskGroups[0].Tasks[0]
|
|
|
|
task.Env = map[string]string{
|
|
|
|
"taskEnvKey": "taskEnvVal",
|
|
|
|
}
|
|
|
|
task.Resources.Networks = []*structs.NetworkResource{
|
client: handle 0.8 server network resources
Fixes https://github.com/hashicorp/nomad/issues/5587
When a nomad 0.9 client is handling an alloc generated by a nomad 0.8
server, we should check the alloc.TaskResources for networking details
rather than task.Resources.
We check alloc.TaskResources for networking for other tasks in the task
group [1], so it's a bit odd that we used the task.Resources struct
here. TaskRunner also uses `alloc.TaskResources`[2].
The task.Resources struct in 0.8 was sparsly populated, resulting to
storing of 0 in port mapping env vars:
```
vagrant@nomad-server-01:~$ nomad version
Nomad v0.8.7 (21a2d93eecf018ad2209a5eab6aae6c359267933+CHANGES)
vagrant@nomad-server-01:~$ nomad server members
Name Address Port Status Leader Protocol Build Datacenter Region
nomad-server-01.global 10.199.0.11 4648 alive true 2 0.8.7 dc1 global
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.Job.TaskGroups[0].Tasks[0].Resources.Networks'
[
{
"CIDR": "",
"Device": "",
"DynamicPorts": [
{
"Label": "db",
"Value": 0
}
],
"IP": "",
"MBits": 10,
"ReservedPorts": null
}
]
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.TaskResources'
{
"redis": {
"CPU": 500,
"DiskMB": 0,
"IOPS": 0,
"MemoryMB": 256,
"Networks": [
{
"CIDR": "",
"Device": "eth1",
"DynamicPorts": [
{
"Label": "db",
"Value": 21722
}
],
"IP": "10.199.0.21",
"MBits": 10,
"ReservedPorts": null
}
]
}
}
```
Also, updated the test values to mimic how Nomad 0.8 structs are
represented, and made its result match the non compact values in
`TestEnvironment_AsList`.
[1] https://github.com/hashicorp/nomad/blob/24e9040b18a4f893e2f353288948a0f7cd9d82e4/client/taskenv/env.go#L624-L639
[2] https://github.com/hashicorp/nomad/blob/master/client/allocrunner/taskrunner/task_runner.go#L287-L303
2019-05-02 15:58:04 +00:00
|
|
|
// Nomad 0.8 didn't fully populate the fields in task Resource Networks
|
2017-09-26 22:26:33 +00:00
|
|
|
{
|
client: handle 0.8 server network resources
Fixes https://github.com/hashicorp/nomad/issues/5587
When a nomad 0.9 client is handling an alloc generated by a nomad 0.8
server, we should check the alloc.TaskResources for networking details
rather than task.Resources.
We check alloc.TaskResources for networking for other tasks in the task
group [1], so it's a bit odd that we used the task.Resources struct
here. TaskRunner also uses `alloc.TaskResources`[2].
The task.Resources struct in 0.8 was sparsly populated, resulting to
storing of 0 in port mapping env vars:
```
vagrant@nomad-server-01:~$ nomad version
Nomad v0.8.7 (21a2d93eecf018ad2209a5eab6aae6c359267933+CHANGES)
vagrant@nomad-server-01:~$ nomad server members
Name Address Port Status Leader Protocol Build Datacenter Region
nomad-server-01.global 10.199.0.11 4648 alive true 2 0.8.7 dc1 global
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.Job.TaskGroups[0].Tasks[0].Resources.Networks'
[
{
"CIDR": "",
"Device": "",
"DynamicPorts": [
{
"Label": "db",
"Value": 0
}
],
"IP": "",
"MBits": 10,
"ReservedPorts": null
}
]
vagrant@nomad-server-01:~$ nomad alloc status -json 5b34649b | jq '.TaskResources'
{
"redis": {
"CPU": 500,
"DiskMB": 0,
"IOPS": 0,
"MemoryMB": 256,
"Networks": [
{
"CIDR": "",
"Device": "eth1",
"DynamicPorts": [
{
"Label": "db",
"Value": 21722
}
],
"IP": "10.199.0.21",
"MBits": 10,
"ReservedPorts": null
}
]
}
}
```
Also, updated the test values to mimic how Nomad 0.8 structs are
represented, and made its result match the non compact values in
`TestEnvironment_AsList`.
[1] https://github.com/hashicorp/nomad/blob/24e9040b18a4f893e2f353288948a0f7cd9d82e4/client/taskenv/env.go#L624-L639
[2] https://github.com/hashicorp/nomad/blob/master/client/allocrunner/taskrunner/task_runner.go#L287-L303
2019-05-02 15:58:04 +00:00
|
|
|
IP: "",
|
|
|
|
ReservedPorts: []structs.Port{{Label: "https"}},
|
|
|
|
DynamicPorts: []structs.Port{{Label: "http"}},
|
2017-05-19 22:40:38 +00:00
|
|
|
},
|
|
|
|
}
|
2017-06-20 19:26:52 +00:00
|
|
|
env := NewBuilder(n, a, task, "global").SetDriverNetwork(
|
2019-01-04 23:01:35 +00:00
|
|
|
&drivers.DriverNetwork{PortMap: map[string]int{"https": 443}},
|
2017-06-20 19:26:52 +00:00
|
|
|
)
|
2017-05-19 22:40:38 +00:00
|
|
|
|
|
|
|
act := env.Build().List()
|
2016-01-24 09:31:03 +00:00
|
|
|
exp := []string{
|
2017-05-19 22:40:38 +00:00
|
|
|
"taskEnvKey=taskEnvVal",
|
2016-01-25 19:46:01 +00:00
|
|
|
"NOMAD_ADDR_http=127.0.0.1:80",
|
2016-04-15 17:27:51 +00:00
|
|
|
"NOMAD_PORT_http=80",
|
|
|
|
"NOMAD_IP_http=127.0.0.1",
|
2017-06-22 00:01:40 +00:00
|
|
|
"NOMAD_ADDR_https=127.0.0.1:8080",
|
2016-04-15 17:27:51 +00:00
|
|
|
"NOMAD_PORT_https=443",
|
|
|
|
"NOMAD_IP_https=127.0.0.1",
|
2016-07-09 00:42:34 +00:00
|
|
|
"NOMAD_HOST_PORT_http=80",
|
|
|
|
"NOMAD_HOST_PORT_https=8080",
|
2017-05-19 22:40:38 +00:00
|
|
|
"NOMAD_TASK_NAME=web",
|
2017-07-20 23:22:54 +00:00
|
|
|
"NOMAD_GROUP_NAME=web",
|
2017-03-28 17:53:26 +00:00
|
|
|
"NOMAD_ADDR_ssh_other=192.168.0.100:1234",
|
|
|
|
"NOMAD_ADDR_ssh_ssh=192.168.0.100:22",
|
|
|
|
"NOMAD_IP_ssh_other=192.168.0.100",
|
|
|
|
"NOMAD_IP_ssh_ssh=192.168.0.100",
|
2017-03-28 17:16:27 +00:00
|
|
|
"NOMAD_PORT_ssh_other=1234",
|
|
|
|
"NOMAD_PORT_ssh_ssh=22",
|
2017-05-19 22:40:38 +00:00
|
|
|
"NOMAD_CPU_LIMIT=500",
|
2017-05-23 23:46:29 +00:00
|
|
|
"NOMAD_DC=dc1",
|
2019-08-27 15:38:07 +00:00
|
|
|
"NOMAD_NAMESPACE=default",
|
2017-05-19 22:40:38 +00:00
|
|
|
"NOMAD_REGION=global",
|
|
|
|
"NOMAD_MEMORY_LIMIT=256",
|
2017-05-23 23:46:29 +00:00
|
|
|
"NOMAD_META_ELB_CHECK_INTERVAL=30s",
|
|
|
|
"NOMAD_META_ELB_CHECK_MIN=3",
|
|
|
|
"NOMAD_META_ELB_CHECK_TYPE=http",
|
|
|
|
"NOMAD_META_FOO=bar",
|
|
|
|
"NOMAD_META_OWNER=armon",
|
|
|
|
"NOMAD_META_elb_check_interval=30s",
|
|
|
|
"NOMAD_META_elb_check_min=3",
|
|
|
|
"NOMAD_META_elb_check_type=http",
|
|
|
|
"NOMAD_META_foo=bar",
|
|
|
|
"NOMAD_META_owner=armon",
|
2020-10-23 14:49:58 +00:00
|
|
|
fmt.Sprintf("NOMAD_JOB_ID=%s", a.Job.ID),
|
2017-05-19 22:40:38 +00:00
|
|
|
"NOMAD_JOB_NAME=my-job",
|
2017-03-28 17:53:26 +00:00
|
|
|
fmt.Sprintf("NOMAD_ALLOC_ID=%s", a.ID),
|
2017-07-07 20:55:39 +00:00
|
|
|
"NOMAD_ALLOC_INDEX=0",
|
2016-01-24 09:31:03 +00:00
|
|
|
}
|
2016-01-05 22:50:25 +00:00
|
|
|
sort.Strings(act)
|
|
|
|
sort.Strings(exp)
|
2018-10-03 16:47:18 +00:00
|
|
|
require.Equal(t, exp, act)
|
2016-01-05 22:50:25 +00:00
|
|
|
}
|
|
|
|
|
2018-11-06 01:39:02 +00:00
|
|
|
func TestEnvironment_AllValues(t *testing.T) {
|
2018-11-09 19:27:18 +00:00
|
|
|
t.Parallel()
|
|
|
|
|
2018-11-06 01:39:02 +00:00
|
|
|
n := mock.Node()
|
|
|
|
n.Meta = map[string]string{
|
2018-11-09 19:27:18 +00:00
|
|
|
"metaKey": "metaVal",
|
|
|
|
"nested.meta.key": "a",
|
|
|
|
"invalid...metakey": "b",
|
2018-11-06 01:39:02 +00:00
|
|
|
}
|
2019-10-18 22:42:28 +00:00
|
|
|
a := mock.ConnectAlloc()
|
2020-10-23 14:49:58 +00:00
|
|
|
a.Job.ParentID = fmt.Sprintf("mock-parent-service-%s", uuid.Generate())
|
2018-11-06 01:39:02 +00:00
|
|
|
a.AllocatedResources.Tasks["web"].Networks[0] = &structs.NetworkResource{
|
|
|
|
Device: "eth0",
|
|
|
|
IP: "127.0.0.1",
|
|
|
|
ReservedPorts: []structs.Port{{Label: "https", Value: 8080}},
|
|
|
|
MBits: 50,
|
|
|
|
DynamicPorts: []structs.Port{{Label: "http", Value: 80}},
|
|
|
|
}
|
|
|
|
a.AllocatedResources.Tasks["ssh"] = &structs.AllocatedTaskResources{
|
|
|
|
Networks: []*structs.NetworkResource{
|
|
|
|
{
|
|
|
|
Device: "eth0",
|
|
|
|
IP: "192.168.0.100",
|
|
|
|
MBits: 50,
|
|
|
|
ReservedPorts: []structs.Port{
|
|
|
|
{Label: "ssh", Value: 22},
|
|
|
|
{Label: "other", Value: 1234},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
2019-10-18 22:42:28 +00:00
|
|
|
|
2020-06-19 14:51:32 +00:00
|
|
|
a.AllocatedResources.Shared.Ports = structs.AllocatedPorts{
|
|
|
|
{
|
|
|
|
Label: "admin",
|
|
|
|
Value: 32000,
|
|
|
|
To: 9000,
|
|
|
|
HostIP: "127.0.0.1",
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
2019-10-18 22:42:28 +00:00
|
|
|
sharedNet := a.AllocatedResources.Shared.Networks[0]
|
|
|
|
|
|
|
|
// Add group network port with only a host port.
|
|
|
|
sharedNet.DynamicPorts = append(sharedNet.DynamicPorts, structs.Port{
|
|
|
|
Label: "hostonly",
|
|
|
|
Value: 9998,
|
|
|
|
})
|
|
|
|
|
|
|
|
// Add group network reserved port with a To value.
|
|
|
|
sharedNet.ReservedPorts = append(sharedNet.ReservedPorts, structs.Port{
|
|
|
|
Label: "static",
|
|
|
|
Value: 9997,
|
|
|
|
To: 97,
|
|
|
|
})
|
|
|
|
|
2018-11-06 01:39:02 +00:00
|
|
|
task := a.Job.TaskGroups[0].Tasks[0]
|
|
|
|
task.Env = map[string]string{
|
2018-11-09 19:27:18 +00:00
|
|
|
"taskEnvKey": "taskEnvVal",
|
|
|
|
"nested.task.key": "x",
|
|
|
|
"invalid...taskkey": "y",
|
2018-11-16 00:07:16 +00:00
|
|
|
".a": "a",
|
|
|
|
"b.": "b",
|
|
|
|
".": "c",
|
2018-11-06 01:39:02 +00:00
|
|
|
}
|
|
|
|
env := NewBuilder(n, a, task, "global").SetDriverNetwork(
|
2019-01-04 23:01:35 +00:00
|
|
|
&drivers.DriverNetwork{PortMap: map[string]int{"https": 443}},
|
2018-11-06 01:39:02 +00:00
|
|
|
)
|
|
|
|
|
2018-11-09 19:27:18 +00:00
|
|
|
values, errs, err := env.Build().AllValues()
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// Assert the keys we couldn't nest were reported
|
2018-11-16 00:07:16 +00:00
|
|
|
require.Len(t, errs, 5)
|
2018-11-09 19:27:18 +00:00
|
|
|
require.Contains(t, errs, "invalid...taskkey")
|
|
|
|
require.Contains(t, errs, "meta.invalid...metakey")
|
2018-11-16 00:07:16 +00:00
|
|
|
require.Contains(t, errs, ".a")
|
|
|
|
require.Contains(t, errs, "b.")
|
|
|
|
require.Contains(t, errs, ".")
|
2018-11-09 19:27:18 +00:00
|
|
|
|
2018-11-06 01:39:02 +00:00
|
|
|
exp := map[string]string{
|
|
|
|
// Node
|
|
|
|
"node.unique.id": n.ID,
|
|
|
|
"node.region": "global",
|
|
|
|
"node.datacenter": n.Datacenter,
|
|
|
|
"node.unique.name": n.Name,
|
|
|
|
"node.class": n.NodeClass,
|
|
|
|
"meta.metaKey": "metaVal",
|
|
|
|
"attr.arch": "x86",
|
|
|
|
"attr.driver.exec": "1",
|
|
|
|
"attr.driver.mock_driver": "1",
|
|
|
|
"attr.kernel.name": "linux",
|
|
|
|
"attr.nomad.version": "0.5.0",
|
|
|
|
|
2018-11-09 19:27:18 +00:00
|
|
|
// 0.9 style meta and attr
|
|
|
|
"node.meta.metaKey": "metaVal",
|
|
|
|
"node.attr.arch": "x86",
|
|
|
|
"node.attr.driver.exec": "1",
|
|
|
|
"node.attr.driver.mock_driver": "1",
|
|
|
|
"node.attr.kernel.name": "linux",
|
|
|
|
"node.attr.nomad.version": "0.5.0",
|
|
|
|
|
2018-11-06 01:39:02 +00:00
|
|
|
// Env
|
2019-10-18 22:42:28 +00:00
|
|
|
"taskEnvKey": "taskEnvVal",
|
|
|
|
"NOMAD_ADDR_http": "127.0.0.1:80",
|
|
|
|
"NOMAD_PORT_http": "80",
|
|
|
|
"NOMAD_IP_http": "127.0.0.1",
|
|
|
|
"NOMAD_ADDR_https": "127.0.0.1:8080",
|
|
|
|
"NOMAD_PORT_https": "443",
|
|
|
|
"NOMAD_IP_https": "127.0.0.1",
|
|
|
|
"NOMAD_HOST_PORT_http": "80",
|
|
|
|
"NOMAD_HOST_PORT_https": "8080",
|
|
|
|
"NOMAD_TASK_NAME": "web",
|
|
|
|
"NOMAD_GROUP_NAME": "web",
|
|
|
|
"NOMAD_ADDR_ssh_other": "192.168.0.100:1234",
|
|
|
|
"NOMAD_ADDR_ssh_ssh": "192.168.0.100:22",
|
|
|
|
"NOMAD_IP_ssh_other": "192.168.0.100",
|
|
|
|
"NOMAD_IP_ssh_ssh": "192.168.0.100",
|
|
|
|
"NOMAD_PORT_ssh_other": "1234",
|
|
|
|
"NOMAD_PORT_ssh_ssh": "22",
|
|
|
|
"NOMAD_CPU_LIMIT": "500",
|
|
|
|
"NOMAD_DC": "dc1",
|
|
|
|
"NOMAD_NAMESPACE": "default",
|
|
|
|
"NOMAD_REGION": "global",
|
|
|
|
"NOMAD_MEMORY_LIMIT": "256",
|
|
|
|
"NOMAD_META_ELB_CHECK_INTERVAL": "30s",
|
|
|
|
"NOMAD_META_ELB_CHECK_MIN": "3",
|
|
|
|
"NOMAD_META_ELB_CHECK_TYPE": "http",
|
|
|
|
"NOMAD_META_FOO": "bar",
|
|
|
|
"NOMAD_META_OWNER": "armon",
|
|
|
|
"NOMAD_META_elb_check_interval": "30s",
|
|
|
|
"NOMAD_META_elb_check_min": "3",
|
|
|
|
"NOMAD_META_elb_check_type": "http",
|
|
|
|
"NOMAD_META_foo": "bar",
|
|
|
|
"NOMAD_META_owner": "armon",
|
2020-10-23 14:49:58 +00:00
|
|
|
"NOMAD_JOB_ID": a.Job.ID,
|
2019-10-18 22:42:28 +00:00
|
|
|
"NOMAD_JOB_NAME": "my-job",
|
2020-10-23 14:49:58 +00:00
|
|
|
"NOMAD_JOB_PARENT_ID": a.Job.ParentID,
|
2019-10-18 22:42:28 +00:00
|
|
|
"NOMAD_ALLOC_ID": a.ID,
|
|
|
|
"NOMAD_ALLOC_INDEX": "0",
|
|
|
|
"NOMAD_PORT_connect_proxy_testconnect": "9999",
|
|
|
|
"NOMAD_HOST_PORT_connect_proxy_testconnect": "9999",
|
|
|
|
"NOMAD_PORT_hostonly": "9998",
|
|
|
|
"NOMAD_HOST_PORT_hostonly": "9998",
|
|
|
|
"NOMAD_PORT_static": "97",
|
|
|
|
"NOMAD_HOST_PORT_static": "9997",
|
2020-06-19 14:51:32 +00:00
|
|
|
"NOMAD_ADDR_admin": "127.0.0.1:32000",
|
|
|
|
"NOMAD_HOST_ADDR_admin": "127.0.0.1:32000",
|
|
|
|
"NOMAD_IP_admin": "127.0.0.1",
|
|
|
|
"NOMAD_HOST_IP_admin": "127.0.0.1",
|
|
|
|
"NOMAD_PORT_admin": "9000",
|
|
|
|
"NOMAD_ALLOC_PORT_admin": "9000",
|
|
|
|
"NOMAD_HOST_PORT_admin": "32000",
|
2018-11-09 19:27:18 +00:00
|
|
|
|
|
|
|
// 0.9 style env map
|
|
|
|
`env["taskEnvKey"]`: "taskEnvVal",
|
|
|
|
`env["NOMAD_ADDR_http"]`: "127.0.0.1:80",
|
|
|
|
`env["nested.task.key"]`: "x",
|
|
|
|
`env["invalid...taskkey"]`: "y",
|
2018-11-16 00:07:16 +00:00
|
|
|
`env[".a"]`: "a",
|
|
|
|
`env["b."]`: "b",
|
|
|
|
`env["."]`: "c",
|
2018-11-06 01:39:02 +00:00
|
|
|
}
|
|
|
|
|
2018-11-09 19:27:18 +00:00
|
|
|
evalCtx := &hcl.EvalContext{
|
|
|
|
Variables: values,
|
2018-11-06 01:39:02 +00:00
|
|
|
}
|
|
|
|
|
2018-11-09 19:27:18 +00:00
|
|
|
for k, expectedVal := range exp {
|
|
|
|
t.Run(k, func(t *testing.T) {
|
|
|
|
// Parse HCL containing the test key
|
|
|
|
hclStr := fmt.Sprintf(`"${%s}"`, k)
|
|
|
|
expr, diag := hclsyntax.ParseExpression([]byte(hclStr), "test.hcl", hcl.Pos{})
|
|
|
|
require.Empty(t, diag)
|
|
|
|
|
|
|
|
// Decode with the TaskEnv values
|
|
|
|
out := ""
|
|
|
|
diag = gohcl.DecodeExpression(expr, evalCtx, &out)
|
|
|
|
require.Empty(t, diag)
|
|
|
|
require.Equal(t, out, expectedVal)
|
|
|
|
})
|
|
|
|
}
|
2018-11-06 01:39:02 +00:00
|
|
|
}
|
|
|
|
|
2016-09-14 20:30:01 +00:00
|
|
|
func TestEnvironment_VaultToken(t *testing.T) {
|
|
|
|
n := mock.Node()
|
2017-05-19 22:40:38 +00:00
|
|
|
a := mock.Alloc()
|
|
|
|
env := NewBuilder(n, a, a.Job.TaskGroups[0].Tasks[0], "global")
|
2019-04-12 15:05:49 +00:00
|
|
|
env.SetVaultToken("123", "vault-namespace", false)
|
2016-09-14 20:30:01 +00:00
|
|
|
|
2017-05-19 22:40:38 +00:00
|
|
|
{
|
|
|
|
act := env.Build().All()
|
|
|
|
if act[VaultToken] != "" {
|
|
|
|
t.Fatalf("Unexpected environment variables: %s=%q", VaultToken, act[VaultToken])
|
|
|
|
}
|
2019-04-12 15:05:49 +00:00
|
|
|
if act[VaultNamespace] != "" {
|
|
|
|
t.Fatalf("Unexpected environment variables: %s=%q", VaultNamespace, act[VaultNamespace])
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
{
|
|
|
|
act := env.SetVaultToken("123", "", true).Build().List()
|
|
|
|
exp := "VAULT_TOKEN=123"
|
|
|
|
found := false
|
|
|
|
foundNs := false
|
|
|
|
for _, entry := range act {
|
|
|
|
if entry == exp {
|
|
|
|
found = true
|
|
|
|
}
|
|
|
|
if strings.HasPrefix(entry, "VAULT_NAMESPACE=") {
|
|
|
|
foundNs = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found {
|
|
|
|
t.Fatalf("did not find %q in:\n%s", exp, strings.Join(act, "\n"))
|
|
|
|
}
|
|
|
|
if foundNs {
|
|
|
|
t.Fatalf("found unwanted VAULT_NAMESPACE in:\n%s", strings.Join(act, "\n"))
|
|
|
|
}
|
2016-09-14 20:30:01 +00:00
|
|
|
}
|
|
|
|
|
2017-05-19 22:40:38 +00:00
|
|
|
{
|
2019-04-12 15:05:49 +00:00
|
|
|
act := env.SetVaultToken("123", "vault-namespace", true).Build().List()
|
2017-05-19 22:40:38 +00:00
|
|
|
exp := "VAULT_TOKEN=123"
|
2019-04-12 15:05:49 +00:00
|
|
|
expNs := "VAULT_NAMESPACE=vault-namespace"
|
2017-05-19 22:40:38 +00:00
|
|
|
found := false
|
2019-04-12 15:05:49 +00:00
|
|
|
foundNs := false
|
2017-05-19 22:40:38 +00:00
|
|
|
for _, entry := range act {
|
|
|
|
if entry == exp {
|
|
|
|
found = true
|
2019-04-12 15:05:49 +00:00
|
|
|
}
|
|
|
|
if entry == expNs {
|
|
|
|
foundNs = true
|
2017-05-19 22:40:38 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found {
|
|
|
|
t.Fatalf("did not find %q in:\n%s", exp, strings.Join(act, "\n"))
|
|
|
|
}
|
2019-04-12 15:05:49 +00:00
|
|
|
if !foundNs {
|
|
|
|
t.Fatalf("did not find %q in:\n%s", expNs, strings.Join(act, "\n"))
|
|
|
|
}
|
2016-09-14 20:30:01 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2017-05-19 22:40:38 +00:00
|
|
|
func TestEnvironment_Envvars(t *testing.T) {
|
|
|
|
envMap := map[string]string{"foo": "baz", "bar": "bang"}
|
2016-01-05 22:50:25 +00:00
|
|
|
n := mock.Node()
|
2017-05-19 22:40:38 +00:00
|
|
|
a := mock.Alloc()
|
|
|
|
task := a.Job.TaskGroups[0].Tasks[0]
|
|
|
|
task.Env = envMap
|
2019-01-04 23:01:35 +00:00
|
|
|
net := &drivers.DriverNetwork{PortMap: portMap}
|
2017-06-20 19:26:52 +00:00
|
|
|
act := NewBuilder(n, a, task, "global").SetDriverNetwork(net).Build().All()
|
2017-05-19 22:40:38 +00:00
|
|
|
for k, v := range envMap {
|
|
|
|
actV, ok := act[k]
|
|
|
|
if !ok {
|
|
|
|
t.Fatalf("missing %q in %#v", k, act)
|
|
|
|
}
|
|
|
|
if v != actV {
|
|
|
|
t.Fatalf("expected %s=%q but found %q", k, v, actV)
|
|
|
|
}
|
2016-01-05 22:50:25 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-11-27 19:53:47 +00:00
|
|
|
// TestEnvironment_HookVars asserts hook env vars are LWW and deletes of later
|
|
|
|
// writes allow earlier hook's values to be visible.
|
|
|
|
func TestEnvironment_HookVars(t *testing.T) {
|
|
|
|
n := mock.Node()
|
|
|
|
a := mock.Alloc()
|
|
|
|
builder := NewBuilder(n, a, a.Job.TaskGroups[0].Tasks[0], "global")
|
|
|
|
|
|
|
|
// Add vars from two hooks and assert the second one wins on
|
|
|
|
// conflicting keys.
|
|
|
|
builder.SetHookEnv("hookA", map[string]string{
|
|
|
|
"foo": "bar",
|
|
|
|
"baz": "quux",
|
|
|
|
})
|
|
|
|
builder.SetHookEnv("hookB", map[string]string{
|
|
|
|
"foo": "123",
|
|
|
|
"hookB": "wins",
|
|
|
|
})
|
|
|
|
|
|
|
|
{
|
|
|
|
out := builder.Build().All()
|
|
|
|
assert.Equal(t, "123", out["foo"])
|
|
|
|
assert.Equal(t, "quux", out["baz"])
|
|
|
|
assert.Equal(t, "wins", out["hookB"])
|
|
|
|
}
|
|
|
|
|
|
|
|
// Asserting overwriting hook vars allows the first hooks original
|
|
|
|
// value to be used.
|
|
|
|
builder.SetHookEnv("hookB", nil)
|
|
|
|
{
|
|
|
|
out := builder.Build().All()
|
|
|
|
assert.Equal(t, "bar", out["foo"])
|
|
|
|
assert.Equal(t, "quux", out["baz"])
|
|
|
|
assert.NotContains(t, out, "hookB")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-12-19 22:23:09 +00:00
|
|
|
// TestEnvironment_DeviceHookVars asserts device hook env vars are accessible
|
|
|
|
// separately.
|
|
|
|
func TestEnvironment_DeviceHookVars(t *testing.T) {
|
|
|
|
require := require.New(t)
|
|
|
|
n := mock.Node()
|
|
|
|
a := mock.Alloc()
|
|
|
|
builder := NewBuilder(n, a, a.Job.TaskGroups[0].Tasks[0], "global")
|
|
|
|
|
|
|
|
// Add vars from two hooks and assert the second one wins on
|
|
|
|
// conflicting keys.
|
|
|
|
builder.SetHookEnv("hookA", map[string]string{
|
|
|
|
"foo": "bar",
|
|
|
|
"baz": "quux",
|
|
|
|
})
|
|
|
|
builder.SetDeviceHookEnv("devices", map[string]string{
|
|
|
|
"hook": "wins",
|
|
|
|
})
|
|
|
|
|
|
|
|
b := builder.Build()
|
|
|
|
deviceEnv := b.DeviceEnv()
|
|
|
|
require.Len(deviceEnv, 1)
|
|
|
|
require.Contains(deviceEnv, "hook")
|
|
|
|
|
|
|
|
all := b.Map()
|
|
|
|
require.Contains(all, "foo")
|
|
|
|
}
|
|
|
|
|
2017-03-29 07:33:54 +00:00
|
|
|
func TestEnvironment_Interpolate(t *testing.T) {
|
2017-05-19 22:40:38 +00:00
|
|
|
n := mock.Node()
|
|
|
|
n.Attributes["arch"] = "x86"
|
|
|
|
n.NodeClass = "test class"
|
|
|
|
a := mock.Alloc()
|
|
|
|
task := a.Job.TaskGroups[0].Tasks[0]
|
|
|
|
task.Env = map[string]string{"test": "${node.class}", "test2": "${attr.arch}"}
|
|
|
|
env := NewBuilder(n, a, task, "global").Build()
|
|
|
|
|
|
|
|
exp := []string{fmt.Sprintf("test=%s", n.NodeClass), fmt.Sprintf("test2=%s", n.Attributes["arch"])}
|
|
|
|
found1, found2 := false, false
|
|
|
|
for _, entry := range env.List() {
|
|
|
|
switch entry {
|
|
|
|
case exp[0]:
|
|
|
|
found1 = true
|
|
|
|
case exp[1]:
|
|
|
|
found2 = true
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if !found1 || !found2 {
|
|
|
|
t.Fatalf("expected to find %q and %q but got:\n%s",
|
|
|
|
exp[0], exp[1], strings.Join(env.List(), "\n"))
|
2016-01-05 22:50:25 +00:00
|
|
|
}
|
|
|
|
}
|
2016-03-23 18:45:03 +00:00
|
|
|
|
2017-03-29 07:33:54 +00:00
|
|
|
func TestEnvironment_AppendHostEnvvars(t *testing.T) {
|
2016-03-23 18:45:03 +00:00
|
|
|
host := os.Environ()
|
|
|
|
if len(host) < 2 {
|
|
|
|
t.Skip("No host environment variables. Can't test")
|
|
|
|
}
|
|
|
|
skip := strings.Split(host[0], "=")[0]
|
2017-05-19 22:40:38 +00:00
|
|
|
env := testEnvBuilder().
|
|
|
|
SetHostEnvvars([]string{skip}).
|
2016-03-23 18:45:03 +00:00
|
|
|
Build()
|
|
|
|
|
2017-05-19 22:40:38 +00:00
|
|
|
act := env.Map()
|
2016-03-23 18:45:03 +00:00
|
|
|
if len(act) < 1 {
|
|
|
|
t.Fatalf("Host environment variables not properly set")
|
|
|
|
}
|
|
|
|
if _, ok := act[skip]; ok {
|
|
|
|
t.Fatalf("Didn't filter environment variable %q", skip)
|
|
|
|
}
|
|
|
|
}
|
2017-03-07 01:09:44 +00:00
|
|
|
|
|
|
|
// TestEnvironment_DashesInTaskName asserts dashes in port labels are properly
|
|
|
|
// converted to underscores in environment variables.
|
|
|
|
// See: https://github.com/hashicorp/nomad/issues/2405
|
|
|
|
func TestEnvironment_DashesInTaskName(t *testing.T) {
|
2017-05-19 22:40:38 +00:00
|
|
|
a := mock.Alloc()
|
|
|
|
task := a.Job.TaskGroups[0].Tasks[0]
|
2019-08-11 09:51:42 +00:00
|
|
|
task.Env = map[string]string{
|
|
|
|
"test-one-two": "three-four",
|
|
|
|
"NOMAD_test_one_two": "three-five",
|
|
|
|
}
|
2017-05-19 22:40:38 +00:00
|
|
|
envMap := NewBuilder(mock.Node(), a, task, "global").Build().Map()
|
2017-03-07 01:09:44 +00:00
|
|
|
|
2019-08-06 08:59:31 +00:00
|
|
|
if envMap["test-one-two"] != "three-four" {
|
|
|
|
t.Fatalf("Expected test-one-two=three-four in TaskEnv; found:\n%#v", envMap)
|
2017-03-07 01:09:44 +00:00
|
|
|
}
|
2019-08-11 09:51:42 +00:00
|
|
|
if envMap["NOMAD_test_one_two"] != "three-five" {
|
|
|
|
t.Fatalf("Expected NOMAD_test_one_two=three-five in TaskEnv; found:\n%#v", envMap)
|
2017-03-07 01:09:44 +00:00
|
|
|
}
|
|
|
|
}
|
2017-05-23 23:46:29 +00:00
|
|
|
|
|
|
|
// TestEnvironment_UpdateTask asserts env vars and task meta are updated when a
|
|
|
|
// task is updated.
|
|
|
|
func TestEnvironment_UpdateTask(t *testing.T) {
|
|
|
|
a := mock.Alloc()
|
|
|
|
a.Job.TaskGroups[0].Meta = map[string]string{"tgmeta": "tgmetaval"}
|
|
|
|
task := a.Job.TaskGroups[0].Tasks[0]
|
|
|
|
task.Name = "orig"
|
2018-11-30 11:18:39 +00:00
|
|
|
task.Env = map[string]string{"env": "envval"}
|
2017-05-23 23:46:29 +00:00
|
|
|
task.Meta = map[string]string{"taskmeta": "taskmetaval"}
|
|
|
|
builder := NewBuilder(mock.Node(), a, task, "global")
|
|
|
|
|
|
|
|
origMap := builder.Build().Map()
|
|
|
|
if origMap["NOMAD_TASK_NAME"] != "orig" {
|
|
|
|
t.Errorf("Expected NOMAD_TASK_NAME=orig but found %q", origMap["NOMAD_TASK_NAME"])
|
|
|
|
}
|
|
|
|
if origMap["NOMAD_META_taskmeta"] != "taskmetaval" {
|
|
|
|
t.Errorf("Expected NOMAD_META_taskmeta=taskmetaval but found %q", origMap["NOMAD_META_taskmeta"])
|
|
|
|
}
|
2018-11-30 11:18:39 +00:00
|
|
|
if origMap["env"] != "envval" {
|
|
|
|
t.Errorf("Expected env=envva but found %q", origMap["env"])
|
2017-05-23 23:46:29 +00:00
|
|
|
}
|
|
|
|
if origMap["NOMAD_META_tgmeta"] != "tgmetaval" {
|
|
|
|
t.Errorf("Expected NOMAD_META_tgmeta=tgmetaval but found %q", origMap["NOMAD_META_tgmeta"])
|
|
|
|
}
|
|
|
|
|
|
|
|
a.Job.TaskGroups[0].Meta = map[string]string{"tgmeta2": "tgmetaval2"}
|
|
|
|
task.Name = "new"
|
2018-11-30 11:18:39 +00:00
|
|
|
task.Env = map[string]string{"env2": "envval2"}
|
2017-05-23 23:46:29 +00:00
|
|
|
task.Meta = map[string]string{"taskmeta2": "taskmetaval2"}
|
|
|
|
|
|
|
|
newMap := builder.UpdateTask(a, task).Build().Map()
|
|
|
|
if newMap["NOMAD_TASK_NAME"] != "new" {
|
|
|
|
t.Errorf("Expected NOMAD_TASK_NAME=new but found %q", newMap["NOMAD_TASK_NAME"])
|
|
|
|
}
|
|
|
|
if newMap["NOMAD_META_taskmeta2"] != "taskmetaval2" {
|
|
|
|
t.Errorf("Expected NOMAD_META_taskmeta=taskmetaval but found %q", newMap["NOMAD_META_taskmeta2"])
|
|
|
|
}
|
2018-11-30 11:18:39 +00:00
|
|
|
if newMap["env2"] != "envval2" {
|
|
|
|
t.Errorf("Expected env=envva but found %q", newMap["env2"])
|
2017-05-23 23:46:29 +00:00
|
|
|
}
|
|
|
|
if newMap["NOMAD_META_tgmeta2"] != "tgmetaval2" {
|
|
|
|
t.Errorf("Expected NOMAD_META_tgmeta=tgmetaval but found %q", newMap["NOMAD_META_tgmeta2"])
|
|
|
|
}
|
|
|
|
if v, ok := newMap["NOMAD_META_taskmeta"]; ok {
|
|
|
|
t.Errorf("Expected NOMAD_META_taskmeta to be unset but found: %q", v)
|
|
|
|
}
|
|
|
|
}
|
2018-06-11 15:59:23 +00:00
|
|
|
|
|
|
|
// TestEnvironment_InterpolateEmptyOptionalMeta asserts that in a parameterized
|
|
|
|
// job, if an optional meta field is not set, it will get interpolated as an
|
|
|
|
// empty string.
|
|
|
|
func TestEnvironment_InterpolateEmptyOptionalMeta(t *testing.T) {
|
2018-06-11 17:50:50 +00:00
|
|
|
require := require.New(t)
|
2018-06-11 15:59:23 +00:00
|
|
|
a := mock.Alloc()
|
|
|
|
a.Job.ParameterizedJob = &structs.ParameterizedJobConfig{
|
|
|
|
MetaOptional: []string{"metaopt1", "metaopt2"},
|
|
|
|
}
|
2018-06-11 16:29:13 +00:00
|
|
|
a.Job.Dispatched = true
|
2018-06-11 15:59:23 +00:00
|
|
|
task := a.Job.TaskGroups[0].Tasks[0]
|
|
|
|
task.Meta = map[string]string{"metaopt1": "metaopt1val"}
|
|
|
|
env := NewBuilder(mock.Node(), a, task, "global").Build()
|
2018-06-11 17:50:50 +00:00
|
|
|
require.Equal("metaopt1val", env.ReplaceEnv("${NOMAD_META_metaopt1}"))
|
|
|
|
require.Empty(env.ReplaceEnv("${NOMAD_META_metaopt2}"))
|
2018-06-11 15:59:23 +00:00
|
|
|
}
|
2019-08-28 03:41:38 +00:00
|
|
|
|
|
|
|
// TestEnvironment_Upsteams asserts that group.service.upstreams entries are
|
|
|
|
// added to the environment.
|
|
|
|
func TestEnvironment_Upstreams(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
// Add some upstreams to the mock alloc
|
|
|
|
a := mock.Alloc()
|
|
|
|
tg := a.Job.LookupTaskGroup(a.TaskGroup)
|
|
|
|
tg.Services = []*structs.Service{
|
|
|
|
// Services without Connect should be ignored
|
|
|
|
{
|
|
|
|
Name: "ignoreme",
|
|
|
|
},
|
|
|
|
// All upstreams from a service should be added
|
|
|
|
{
|
|
|
|
Name: "remote_service",
|
|
|
|
Connect: &structs.ConsulConnect{
|
|
|
|
SidecarService: &structs.ConsulSidecarService{
|
|
|
|
Proxy: &structs.ConsulProxy{
|
|
|
|
Upstreams: []structs.ConsulUpstream{
|
|
|
|
{
|
|
|
|
DestinationName: "foo-bar",
|
|
|
|
LocalBindPort: 1234,
|
|
|
|
},
|
|
|
|
{
|
|
|
|
DestinationName: "bar",
|
|
|
|
LocalBindPort: 5678,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
// Ensure the upstreams can be interpolated
|
|
|
|
tg.Tasks[0].Env = map[string]string{
|
|
|
|
"foo": "${NOMAD_UPSTREAM_ADDR_foo_bar}",
|
|
|
|
"bar": "${NOMAD_UPSTREAM_PORT_foo-bar}",
|
|
|
|
}
|
|
|
|
|
|
|
|
env := NewBuilder(mock.Node(), a, tg.Tasks[0], "global").Build().Map()
|
|
|
|
require.Equal(t, "127.0.0.1:1234", env["NOMAD_UPSTREAM_ADDR_foo_bar"])
|
|
|
|
require.Equal(t, "127.0.0.1", env["NOMAD_UPSTREAM_IP_foo_bar"])
|
|
|
|
require.Equal(t, "1234", env["NOMAD_UPSTREAM_PORT_foo_bar"])
|
|
|
|
require.Equal(t, "127.0.0.1:5678", env["NOMAD_UPSTREAM_ADDR_bar"])
|
|
|
|
require.Equal(t, "127.0.0.1", env["NOMAD_UPSTREAM_IP_bar"])
|
|
|
|
require.Equal(t, "5678", env["NOMAD_UPSTREAM_PORT_bar"])
|
|
|
|
require.Equal(t, "127.0.0.1:1234", env["foo"])
|
|
|
|
require.Equal(t, "1234", env["bar"])
|
|
|
|
}
|
2019-09-04 13:33:35 +00:00
|
|
|
|
|
|
|
func TestEnvironment_SetPortMapEnvs(t *testing.T) {
|
|
|
|
envs := map[string]string{
|
|
|
|
"foo": "bar",
|
|
|
|
"NOMAD_PORT_ssh": "2342",
|
|
|
|
}
|
|
|
|
ports := map[string]int{
|
|
|
|
"ssh": 22,
|
|
|
|
"http": 80,
|
|
|
|
}
|
|
|
|
|
|
|
|
envs = SetPortMapEnvs(envs, ports)
|
|
|
|
|
|
|
|
expected := map[string]string{
|
|
|
|
"foo": "bar",
|
|
|
|
"NOMAD_PORT_ssh": "22",
|
|
|
|
"NOMAD_PORT_http": "80",
|
|
|
|
}
|
|
|
|
require.Equal(t, expected, envs)
|
|
|
|
}
|
2019-11-18 18:04:01 +00:00
|
|
|
|
|
|
|
func TestEnvironment_TasklessBuilder(t *testing.T) {
|
|
|
|
node := mock.Node()
|
|
|
|
alloc := mock.Alloc()
|
|
|
|
alloc.Job.Meta["jobt"] = "foo"
|
|
|
|
alloc.Job.TaskGroups[0].Meta["groupt"] = "bar"
|
|
|
|
require := require.New(t)
|
|
|
|
var taskEnv *TaskEnv
|
|
|
|
require.NotPanics(func() {
|
|
|
|
taskEnv = NewBuilder(node, alloc, nil, "global").SetAllocDir("/tmp/alloc").Build()
|
|
|
|
})
|
|
|
|
|
|
|
|
require.Equal("foo", taskEnv.ReplaceEnv("${NOMAD_META_jobt}"))
|
|
|
|
require.Equal("bar", taskEnv.ReplaceEnv("${NOMAD_META_groupt}"))
|
|
|
|
}
|