2020-09-28 14:13:10 +00:00
|
|
|
package agent
|
|
|
|
|
|
|
|
import (
|
|
|
|
"bytes"
|
|
|
|
"context"
|
|
|
|
"fmt"
|
|
|
|
"io"
|
|
|
|
"net"
|
|
|
|
"net/http"
|
|
|
|
"net/url"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
|
|
|
|
|
|
|
"github.com/docker/docker/pkg/ioutils"
|
|
|
|
"github.com/hashicorp/go-msgpack/codec"
|
|
|
|
"github.com/hashicorp/nomad/nomad/structs"
|
2020-10-08 18:27:52 +00:00
|
|
|
"golang.org/x/sync/errgroup"
|
2020-09-28 14:13:10 +00:00
|
|
|
)
|
|
|
|
|
|
|
|
func (s *HTTPServer) EventStream(resp http.ResponseWriter, req *http.Request) (interface{}, error) {
|
2020-12-08 18:09:20 +00:00
|
|
|
if req.Method != http.MethodGet {
|
|
|
|
return nil, CodedError(http.StatusMethodNotAllowed, ErrInvalidMethod)
|
|
|
|
}
|
|
|
|
|
2020-09-28 14:13:10 +00:00
|
|
|
query := req.URL.Query()
|
|
|
|
|
|
|
|
indexStr := query.Get("index")
|
|
|
|
if indexStr == "" {
|
|
|
|
indexStr = "0"
|
|
|
|
}
|
|
|
|
index, err := strconv.Atoi(indexStr)
|
|
|
|
if err != nil {
|
|
|
|
return nil, CodedError(400, fmt.Sprintf("Unable to parse index: %v", err))
|
|
|
|
}
|
|
|
|
|
|
|
|
topics, err := parseEventTopics(query)
|
|
|
|
if err != nil {
|
|
|
|
return nil, CodedError(400, fmt.Sprintf("Invalid topic query: %v", err))
|
|
|
|
}
|
|
|
|
|
|
|
|
args := &structs.EventStreamRequest{
|
|
|
|
Topics: topics,
|
|
|
|
Index: index,
|
|
|
|
}
|
|
|
|
resp.Header().Set("Content-Type", "application/json")
|
|
|
|
resp.Header().Set("Cache-Control", "no-cache")
|
|
|
|
|
2020-10-08 18:27:52 +00:00
|
|
|
// Set region, namespace and authtoken to args
|
2020-09-28 14:13:10 +00:00
|
|
|
s.parse(resp, req, &args.QueryOptions.Region, &args.QueryOptions)
|
|
|
|
|
2020-10-08 18:27:52 +00:00
|
|
|
// Determine the RPC handler to use to find a server
|
2020-09-28 14:13:10 +00:00
|
|
|
var handler structs.StreamingRpcHandler
|
|
|
|
var handlerErr error
|
|
|
|
if server := s.agent.Server(); server != nil {
|
|
|
|
handler, handlerErr = server.StreamingRpcHandler("Event.Stream")
|
|
|
|
} else if client := s.agent.Client(); client != nil {
|
|
|
|
handler, handlerErr = client.RemoteStreamingRpcHandler("Event.Stream")
|
|
|
|
} else {
|
|
|
|
handlerErr = fmt.Errorf("misconfigured connection")
|
|
|
|
}
|
|
|
|
|
|
|
|
if handlerErr != nil {
|
|
|
|
return nil, CodedError(500, handlerErr.Error())
|
|
|
|
}
|
|
|
|
|
|
|
|
httpPipe, handlerPipe := net.Pipe()
|
|
|
|
decoder := codec.NewDecoder(httpPipe, structs.MsgpackHandle)
|
|
|
|
encoder := codec.NewEncoder(httpPipe, structs.MsgpackHandle)
|
|
|
|
|
|
|
|
// Create a goroutine that closes the pipe if the connection closes
|
|
|
|
ctx, cancel := context.WithCancel(req.Context())
|
|
|
|
defer cancel()
|
|
|
|
go func() {
|
|
|
|
<-ctx.Done()
|
|
|
|
httpPipe.Close()
|
|
|
|
}()
|
|
|
|
|
|
|
|
// Create an output that gets flushed on every write
|
|
|
|
output := ioutils.NewWriteFlusher(resp)
|
|
|
|
|
2020-10-08 18:27:52 +00:00
|
|
|
// send request and decode events
|
|
|
|
errs, errCtx := errgroup.WithContext(ctx)
|
|
|
|
errs.Go(func() error {
|
2020-09-28 14:13:10 +00:00
|
|
|
defer cancel()
|
|
|
|
|
|
|
|
// Send the request
|
|
|
|
if err := encoder.Encode(args); err != nil {
|
2020-10-08 18:27:52 +00:00
|
|
|
return CodedError(500, err.Error())
|
2020-09-28 14:13:10 +00:00
|
|
|
}
|
|
|
|
|
|
|
|
for {
|
|
|
|
select {
|
2020-10-08 18:27:52 +00:00
|
|
|
case <-errCtx.Done():
|
|
|
|
return nil
|
2020-09-28 14:13:10 +00:00
|
|
|
default:
|
|
|
|
}
|
|
|
|
|
|
|
|
// Decode the response
|
|
|
|
var res structs.EventStreamWrapper
|
|
|
|
if err := decoder.Decode(&res); err != nil {
|
2020-10-08 18:27:52 +00:00
|
|
|
return CodedError(500, err.Error())
|
2020-09-28 14:13:10 +00:00
|
|
|
}
|
|
|
|
decoder.Reset(httpPipe)
|
|
|
|
|
|
|
|
if err := res.Error; err != nil {
|
|
|
|
if err.Code != nil {
|
2020-10-08 18:27:52 +00:00
|
|
|
return CodedError(int(*err.Code), err.Error())
|
2020-09-28 14:13:10 +00:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
// Flush json entry to response
|
|
|
|
if _, err := io.Copy(output, bytes.NewReader(res.Event.Data)); err != nil {
|
2020-10-08 18:27:52 +00:00
|
|
|
return CodedError(500, err.Error())
|
2020-09-28 14:13:10 +00:00
|
|
|
}
|
2020-10-08 18:27:52 +00:00
|
|
|
// Each entry is its own new line according to ndjson.org
|
|
|
|
// append new line to each entry
|
|
|
|
fmt.Fprint(output, "\n")
|
2020-09-28 14:13:10 +00:00
|
|
|
}
|
2020-10-08 18:27:52 +00:00
|
|
|
})
|
2020-09-28 14:13:10 +00:00
|
|
|
|
|
|
|
// invoke handler
|
|
|
|
handler(handlerPipe)
|
|
|
|
cancel()
|
|
|
|
|
2020-10-08 18:27:52 +00:00
|
|
|
codedErr := errs.Wait()
|
2020-10-06 20:21:58 +00:00
|
|
|
if codedErr != nil && strings.Contains(codedErr.Error(), io.ErrClosedPipe.Error()) {
|
2020-09-28 14:13:10 +00:00
|
|
|
codedErr = nil
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil, codedErr
|
|
|
|
}
|
|
|
|
|
2020-10-04 19:12:35 +00:00
|
|
|
func parseEventTopics(query url.Values) (map[structs.Topic][]string, error) {
|
2020-09-28 14:13:10 +00:00
|
|
|
raw, ok := query["topic"]
|
|
|
|
if !ok {
|
|
|
|
return allTopics(), nil
|
|
|
|
}
|
2020-10-04 19:12:35 +00:00
|
|
|
topics := make(map[structs.Topic][]string)
|
2020-09-28 14:13:10 +00:00
|
|
|
|
|
|
|
for _, topic := range raw {
|
|
|
|
k, v, err := parseTopic(topic)
|
|
|
|
if err != nil {
|
|
|
|
return nil, fmt.Errorf("error parsing topics: %w", err)
|
|
|
|
}
|
|
|
|
|
2020-10-08 18:27:52 +00:00
|
|
|
topics[structs.Topic(k)] = append(topics[structs.Topic(k)], v)
|
2020-09-28 14:13:10 +00:00
|
|
|
}
|
|
|
|
return topics, nil
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseTopic(topic string) (string, string, error) {
|
|
|
|
parts := strings.Split(topic, ":")
|
2020-10-06 15:08:12 +00:00
|
|
|
// infer wildcard if only given a topic
|
|
|
|
if len(parts) == 1 {
|
|
|
|
return topic, "*", nil
|
|
|
|
} else if len(parts) != 2 {
|
2020-09-28 14:13:10 +00:00
|
|
|
return "", "", fmt.Errorf("Invalid key value pair for topic, topic: %s", topic)
|
|
|
|
}
|
|
|
|
return parts[0], parts[1], nil
|
|
|
|
}
|
|
|
|
|
2020-10-04 19:12:35 +00:00
|
|
|
func allTopics() map[structs.Topic][]string {
|
|
|
|
return map[structs.Topic][]string{"*": {"*"}}
|
2020-09-28 14:13:10 +00:00
|
|
|
}
|