df633ee45f
This test is causing panics. Unlike the other similar tests, this one is using require.Eventually which is doing something bad, and this change replaces it with a for-loop like the other tests. Failure: === RUN TestE2E/Connect === RUN TestE2E/Connect/*connect.ConnectE2ETest === RUN TestE2E/Connect/*connect.ConnectE2ETest/TestConnectDemo === RUN TestE2E/Connect/*connect.ConnectE2ETest/TestMultiServiceConnect === RUN TestE2E/Connect/*connect.ConnectClientStateE2ETest panic: Fail in goroutine after TestE2E/Connect/*connect.ConnectE2ETest has completed goroutine 38 [running]: testing.(*common).Fail(0xc000656500) /opt/google/go/src/testing/testing.go:565 +0x11e testing.(*common).Fail(0xc000656100) /opt/google/go/src/testing/testing.go:559 +0x96 testing.(*common).FailNow(0xc000656100) /opt/google/go/src/testing/testing.go:587 +0x2b testing.(*common).Fatalf(0xc000656100, 0x1512f90, 0x10, 0xc000675f88, 0x1, 0x1) /opt/google/go/src/testing/testing.go:672 +0x91 github.com/hashicorp/nomad/e2e/connect.(*ConnectE2ETest).TestMultiServiceConnect.func1(0x0) /home/shoenig/go/src/github.com/hashicorp/nomad/e2e/connect/multi_service.go:72 +0x296 github.com/hashicorp/nomad/vendor/github.com/stretchr/testify/assert.Eventually.func1(0xc0004962a0, 0xc0002338f0) /home/shoenig/go/src/github.com/hashicorp/nomad/vendor/github.com/stretchr/testify/assert/assertions.go:1494 +0x27 created by github.com/hashicorp/nomad/vendor/github.com/stretchr/testify/assert.Eventually /home/shoenig/go/src/github.com/hashicorp/nomad/vendor/github.com/stretchr/testify/assert/assertions.go:1493 +0x272 FAIL github.com/hashicorp/nomad/e2e 21.427s
135 lines
3.2 KiB
Go
135 lines
3.2 KiB
Go
package connect
|
|
|
|
import (
|
|
"strings"
|
|
"time"
|
|
|
|
consulapi "github.com/hashicorp/consul/api"
|
|
"github.com/hashicorp/nomad/api"
|
|
"github.com/hashicorp/nomad/e2e/framework"
|
|
"github.com/hashicorp/nomad/helper/uuid"
|
|
"github.com/hashicorp/nomad/jobspec"
|
|
"github.com/kr/pretty"
|
|
"github.com/stretchr/testify/require"
|
|
)
|
|
|
|
// TestMultiServiceConnect tests running multiple envoy sidecars in the same allocation.
|
|
func (tc *ConnectE2ETest) TestMultiServiceConnect(f *framework.F) {
|
|
t := f.T()
|
|
uuid := uuid.Generate()
|
|
jobID := "connect" + uuid[0:8]
|
|
tc.jobIds = append(tc.jobIds, jobID)
|
|
jobapi := tc.Nomad().Jobs()
|
|
|
|
job, err := jobspec.ParseFile("connect/input/multi-service.nomad")
|
|
require.NoError(t, err)
|
|
job.ID = &jobID
|
|
|
|
resp, _, err := jobapi.Register(job, nil)
|
|
require.NoError(t, err)
|
|
require.NotNil(t, resp)
|
|
require.Zero(t, resp.Warnings)
|
|
|
|
EVAL:
|
|
qopts := &api.QueryOptions{
|
|
WaitIndex: resp.EvalCreateIndex,
|
|
}
|
|
evalapi := tc.Nomad().Evaluations()
|
|
eval, qmeta, err := evalapi.Info(resp.EvalID, qopts)
|
|
require.NoError(t, err)
|
|
qopts.WaitIndex = qmeta.LastIndex
|
|
|
|
switch eval.Status {
|
|
case "pending":
|
|
goto EVAL
|
|
case "complete":
|
|
// Ok!
|
|
case "failed", "canceled", "blocked":
|
|
t.Fatalf("eval %s\n%s\n", eval.Status, pretty.Sprint(eval))
|
|
default:
|
|
t.Fatalf("unknown eval status: %s\n%s\n", eval.Status, pretty.Sprint(eval))
|
|
}
|
|
|
|
// Assert there were 0 placement failures
|
|
require.Zero(t, eval.FailedTGAllocs, pretty.Sprint(eval.FailedTGAllocs))
|
|
require.Len(t, eval.QueuedAllocations, 1, pretty.Sprint(eval.QueuedAllocations))
|
|
|
|
// Assert allocs are running
|
|
for i := 0; i < 20; i++ {
|
|
allocs, qmeta, err := evalapi.Allocations(eval.ID, qopts)
|
|
require.NoError(t, err)
|
|
require.Len(t, allocs, 1)
|
|
qopts.WaitIndex = qmeta.LastIndex
|
|
|
|
running := 0
|
|
for _, alloc := range allocs {
|
|
switch alloc.ClientStatus {
|
|
case "running":
|
|
running++
|
|
case "pending":
|
|
// keep trying
|
|
default:
|
|
require.Failf(t, "alloc failed", "alloc: %s", pretty.Sprint(alloc))
|
|
}
|
|
}
|
|
|
|
if running == len(allocs) {
|
|
break
|
|
}
|
|
|
|
time.Sleep(500 * time.Millisecond)
|
|
}
|
|
|
|
allocs, _, err := evalapi.Allocations(eval.ID, qopts)
|
|
require.NoError(t, err)
|
|
allocIDs := make(map[string]bool, 1)
|
|
for _, a := range allocs {
|
|
if a.ClientStatus != "running" || a.DesiredStatus != "run" {
|
|
t.Fatalf("alloc %s (%s) terminal; client=%s desired=%s", a.TaskGroup, a.ID, a.ClientStatus, a.DesiredStatus)
|
|
}
|
|
allocIDs[a.ID] = true
|
|
}
|
|
|
|
// Check Consul service health
|
|
agentapi := tc.Consul().Agent()
|
|
|
|
failing := map[string]*consulapi.AgentCheck{}
|
|
require.Eventually(t, func() bool {
|
|
checks, err := agentapi.Checks()
|
|
require.NoError(t, err)
|
|
|
|
// Filter out checks for other services
|
|
for cid, check := range checks {
|
|
found := false
|
|
for allocID := range allocIDs {
|
|
if strings.Contains(check.ServiceID, allocID) {
|
|
found = true
|
|
break
|
|
}
|
|
}
|
|
|
|
if !found {
|
|
delete(checks, cid)
|
|
}
|
|
}
|
|
|
|
// Ensure checks are all passing
|
|
failing = map[string]*consulapi.AgentCheck{}
|
|
for _, check := range checks {
|
|
if check.Status != "passing" {
|
|
failing[check.CheckID] = check
|
|
break
|
|
}
|
|
}
|
|
|
|
if len(failing) == 0 {
|
|
return true
|
|
}
|
|
|
|
t.Logf("still %d checks not passing", len(failing))
|
|
return false
|
|
}, time.Minute, time.Second)
|
|
|
|
require.Len(t, failing, 0, pretty.Sprint(failing))
|
|
}
|