2020-10-02 18:23:30 +00:00
|
|
|
package api
|
|
|
|
|
|
|
|
import (
|
|
|
|
"context"
|
2020-11-05 18:04:18 +00:00
|
|
|
"encoding/json"
|
2020-10-02 18:23:30 +00:00
|
|
|
"testing"
|
|
|
|
"time"
|
|
|
|
|
2020-11-05 18:04:18 +00:00
|
|
|
"github.com/hashicorp/nomad/api/internal/testutil"
|
2021-03-23 17:55:34 +00:00
|
|
|
"github.com/mitchellh/mapstructure"
|
2020-10-02 18:23:30 +00:00
|
|
|
"github.com/stretchr/testify/require"
|
|
|
|
)
|
|
|
|
|
|
|
|
func TestEvent_Stream(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
c, s := makeClient(t, nil, nil)
|
|
|
|
defer s.Stop()
|
|
|
|
|
|
|
|
// register job to generate events
|
|
|
|
jobs := c.Jobs()
|
|
|
|
job := testJob()
|
|
|
|
resp2, _, err := jobs.Register(job, nil)
|
|
|
|
require.Nil(t, err)
|
|
|
|
require.NotNil(t, resp2)
|
|
|
|
|
|
|
|
// build event stream request
|
|
|
|
events := c.EventStream()
|
|
|
|
q := &QueryOptions{}
|
|
|
|
topics := map[Topic][]string{
|
2020-12-03 16:48:18 +00:00
|
|
|
TopicEvaluation: {"*"},
|
2020-10-02 18:23:30 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
|
|
defer cancel()
|
|
|
|
|
2020-10-06 15:08:12 +00:00
|
|
|
streamCh, err := events.Stream(ctx, topics, 0, q)
|
|
|
|
require.NoError(t, err)
|
2020-10-02 18:23:30 +00:00
|
|
|
|
2020-10-08 18:27:52 +00:00
|
|
|
select {
|
|
|
|
case event := <-streamCh:
|
|
|
|
if event.Err != nil {
|
|
|
|
require.Fail(t, err.Error())
|
2020-10-02 18:23:30 +00:00
|
|
|
}
|
2020-10-08 18:27:52 +00:00
|
|
|
require.Equal(t, len(event.Events), 1)
|
2020-12-03 16:48:18 +00:00
|
|
|
require.Equal(t, "Evaluation", string(event.Events[0].Topic))
|
2020-10-08 18:27:52 +00:00
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
require.Fail(t, "failed waiting for event stream event")
|
2020-10-02 18:23:30 +00:00
|
|
|
}
|
|
|
|
}
|
2020-10-06 15:08:12 +00:00
|
|
|
|
|
|
|
func TestEvent_Stream_Err_InvalidQueryParam(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
c, s := makeClient(t, nil, nil)
|
|
|
|
defer s.Stop()
|
|
|
|
|
|
|
|
// register job to generate events
|
|
|
|
jobs := c.Jobs()
|
|
|
|
job := testJob()
|
|
|
|
resp2, _, err := jobs.Register(job, nil)
|
|
|
|
require.Nil(t, err)
|
|
|
|
require.NotNil(t, resp2)
|
|
|
|
|
|
|
|
// build event stream request
|
|
|
|
events := c.EventStream()
|
|
|
|
q := &QueryOptions{}
|
|
|
|
topics := map[Topic][]string{
|
2020-12-03 16:48:18 +00:00
|
|
|
TopicEvaluation: {"::*"},
|
2020-10-06 15:08:12 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
_, err = events.Stream(ctx, topics, 0, q)
|
|
|
|
require.Error(t, err)
|
2020-10-08 18:27:52 +00:00
|
|
|
require.Contains(t, err.Error(), "400")
|
|
|
|
require.Contains(t, err.Error(), "Invalid key value pair")
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEvent_Stream_CloseCtx(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
c, s := makeClient(t, nil, nil)
|
|
|
|
defer s.Stop()
|
|
|
|
|
|
|
|
// register job to generate events
|
|
|
|
jobs := c.Jobs()
|
|
|
|
job := testJob()
|
|
|
|
resp2, _, err := jobs.Register(job, nil)
|
|
|
|
require.Nil(t, err)
|
|
|
|
require.NotNil(t, resp2)
|
|
|
|
|
|
|
|
// build event stream request
|
|
|
|
events := c.EventStream()
|
|
|
|
q := &QueryOptions{}
|
|
|
|
topics := map[Topic][]string{
|
2020-12-03 16:48:18 +00:00
|
|
|
TopicEvaluation: {"*"},
|
2020-10-08 18:27:52 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
|
|
|
|
|
|
streamCh, err := events.Stream(ctx, topics, 0, q)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
// cancel the request
|
|
|
|
cancel()
|
|
|
|
|
|
|
|
select {
|
|
|
|
case event, ok := <-streamCh:
|
|
|
|
require.False(t, ok)
|
|
|
|
require.Nil(t, event)
|
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
require.Fail(t, "failed waiting for event stream event")
|
|
|
|
}
|
2020-10-06 15:08:12 +00:00
|
|
|
}
|
2020-11-05 18:04:18 +00:00
|
|
|
|
|
|
|
func TestEventStream_PayloadValue(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
c, s := makeClient(t, nil, func(c *testutil.TestServerConfig) {
|
|
|
|
c.DevMode = true
|
|
|
|
})
|
|
|
|
defer s.Stop()
|
|
|
|
|
|
|
|
// register job to generate events
|
|
|
|
jobs := c.Jobs()
|
|
|
|
job := testJob()
|
|
|
|
resp2, _, err := jobs.Register(job, nil)
|
|
|
|
require.Nil(t, err)
|
|
|
|
require.NotNil(t, resp2)
|
|
|
|
|
|
|
|
// build event stream request
|
|
|
|
events := c.EventStream()
|
|
|
|
q := &QueryOptions{}
|
|
|
|
topics := map[Topic][]string{
|
2020-12-03 16:48:18 +00:00
|
|
|
TopicNode: {"*"},
|
2020-11-05 18:04:18 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
ctx, cancel := context.WithCancel(context.Background())
|
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
streamCh, err := events.Stream(ctx, topics, 0, q)
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
select {
|
|
|
|
case event := <-streamCh:
|
|
|
|
if event.Err != nil {
|
|
|
|
require.NoError(t, err)
|
|
|
|
}
|
|
|
|
for _, e := range event.Events {
|
2021-03-23 17:55:34 +00:00
|
|
|
// verify that we get a node
|
2020-11-05 18:04:18 +00:00
|
|
|
n, err := e.Node()
|
|
|
|
require.NoError(t, err)
|
2021-03-26 11:07:15 +00:00
|
|
|
require.NotEmpty(t, n.ID)
|
2021-03-23 17:55:34 +00:00
|
|
|
|
2021-03-26 11:07:15 +00:00
|
|
|
// perform a raw decoding and look for:
|
2021-03-26 17:03:15 +00:00
|
|
|
// - "ID" to make sure that raw decoding is working correctly
|
|
|
|
// - "SecretID" to make sure it's not present
|
2021-03-23 17:55:34 +00:00
|
|
|
raw := make(map[string]map[string]interface{}, 0)
|
|
|
|
cfg := &mapstructure.DecoderConfig{
|
|
|
|
Result: &raw,
|
|
|
|
}
|
|
|
|
dec, err := mapstructure.NewDecoder(cfg)
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.NoError(t, dec.Decode(e.Payload))
|
|
|
|
require.Contains(t, raw, "Node")
|
|
|
|
rawNode := raw["Node"]
|
|
|
|
require.Equal(t, n.ID, rawNode["ID"])
|
2021-05-07 17:58:40 +00:00
|
|
|
require.Empty(t, rawNode["SecretID"])
|
2020-11-05 18:04:18 +00:00
|
|
|
}
|
|
|
|
case <-time.After(5 * time.Second):
|
|
|
|
require.Fail(t, "failed waiting for event stream event")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func TestEventStream_PayloadValueHelpers(t *testing.T) {
|
|
|
|
t.Parallel()
|
|
|
|
|
|
|
|
testCases := []struct {
|
2020-12-03 16:48:18 +00:00
|
|
|
desc string
|
2020-11-05 18:04:18 +00:00
|
|
|
event Event
|
|
|
|
input []byte
|
|
|
|
err string
|
|
|
|
expectFn func(t *testing.T, event Event)
|
|
|
|
}{
|
|
|
|
{
|
2020-12-03 16:48:18 +00:00
|
|
|
desc: "deployment",
|
2020-11-05 18:04:18 +00:00
|
|
|
input: []byte(`{"Topic": "Deployment", "Payload": {"Deployment":{"ID":"some-id","JobID":"some-job-id", "TaskGroups": {"tg1": {"RequireProgressBy": "2020-11-05T11:52:54.370774000-05:00"}}}}}`),
|
|
|
|
expectFn: func(t *testing.T, event Event) {
|
|
|
|
eventTime, err := time.Parse(time.RFC3339, "2020-11-05T11:52:54.370774000-05:00")
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, TopicDeployment, event.Topic)
|
|
|
|
|
|
|
|
d, err := event.Deployment()
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, &Deployment{
|
|
|
|
ID: "some-id",
|
|
|
|
JobID: "some-job-id",
|
|
|
|
TaskGroups: map[string]*DeploymentState{
|
|
|
|
"tg1": {
|
|
|
|
RequireProgressBy: eventTime,
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}, d)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-12-03 16:48:18 +00:00
|
|
|
desc: "evaluation",
|
|
|
|
input: []byte(`{"Topic": "Evaluation", "Payload": {"Evaluation":{"ID":"some-id","Namespace":"some-namespace-id"}}}`),
|
2020-11-05 18:04:18 +00:00
|
|
|
expectFn: func(t *testing.T, event Event) {
|
2020-12-03 16:48:18 +00:00
|
|
|
require.Equal(t, TopicEvaluation, event.Topic)
|
2020-11-05 18:04:18 +00:00
|
|
|
eval, err := event.Evaluation()
|
|
|
|
require.NoError(t, err)
|
|
|
|
|
|
|
|
require.Equal(t, &Evaluation{
|
|
|
|
ID: "some-id",
|
|
|
|
Namespace: "some-namespace-id",
|
|
|
|
}, eval)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-12-03 16:48:18 +00:00
|
|
|
desc: "allocation",
|
|
|
|
input: []byte(`{"Topic": "Allocation", "Payload": {"Allocation":{"ID":"some-id","Namespace":"some-namespace-id"}}}`),
|
2020-11-05 18:04:18 +00:00
|
|
|
expectFn: func(t *testing.T, event Event) {
|
2020-12-03 16:48:18 +00:00
|
|
|
require.Equal(t, TopicAllocation, event.Topic)
|
2020-11-05 18:04:18 +00:00
|
|
|
a, err := event.Allocation()
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, &Allocation{
|
|
|
|
ID: "some-id",
|
|
|
|
Namespace: "some-namespace-id",
|
|
|
|
}, a)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
|
|
|
input: []byte(`{"Topic": "Job", "Payload": {"Job":{"ID":"some-id","Namespace":"some-namespace-id"}}}`),
|
|
|
|
expectFn: func(t *testing.T, event Event) {
|
|
|
|
require.Equal(t, TopicJob, event.Topic)
|
|
|
|
j, err := event.Job()
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, &Job{
|
|
|
|
ID: stringToPtr("some-id"),
|
|
|
|
Namespace: stringToPtr("some-namespace-id"),
|
|
|
|
}, j)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
{
|
2020-12-03 16:48:18 +00:00
|
|
|
desc: "node",
|
2020-11-05 18:04:18 +00:00
|
|
|
input: []byte(`{"Topic": "Node", "Payload": {"Node":{"ID":"some-id","Datacenter":"some-dc-id"}}}`),
|
|
|
|
expectFn: func(t *testing.T, event Event) {
|
|
|
|
require.Equal(t, TopicNode, event.Topic)
|
|
|
|
n, err := event.Node()
|
|
|
|
require.NoError(t, err)
|
|
|
|
require.Equal(t, &Node{
|
|
|
|
ID: "some-id",
|
|
|
|
Datacenter: "some-dc-id",
|
|
|
|
}, n)
|
|
|
|
},
|
|
|
|
},
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, tc := range testCases {
|
2020-12-03 16:48:18 +00:00
|
|
|
t.Run(tc.desc, func(t *testing.T) {
|
2020-11-05 18:04:18 +00:00
|
|
|
var out Event
|
|
|
|
err := json.Unmarshal(tc.input, &out)
|
|
|
|
require.NoError(t, err)
|
|
|
|
tc.expectFn(t, out)
|
|
|
|
})
|
|
|
|
}
|
|
|
|
}
|