restored Node.Sanitize() for RPC endpoints

multiple other updates from code review
This commit is contained in:
Chris Baker 2021-03-26 17:03:15 +00:00
parent 04081a983f
commit 770c9cecb5
23 changed files with 121 additions and 99 deletions

View File

@ -155,13 +155,12 @@ func TestEventStream_PayloadValue(t *testing.T) {
require.NotEmpty(t, n.ID) require.NotEmpty(t, n.ID)
// perform a raw decoding and look for: // perform a raw decoding and look for:
// - "ID", to make sure that raw decoding is correct // - "ID" to make sure that raw decoding is working correctly
// - "SecretID", to make sure it's not present // - "SecretID" to make sure it's not present
raw := make(map[string]map[string]interface{}, 0) raw := make(map[string]map[string]interface{}, 0)
cfg := &mapstructure.DecoderConfig{ cfg := &mapstructure.DecoderConfig{
Result: &raw, Result: &raw,
} }
dec, err := mapstructure.NewDecoder(cfg) dec, err := mapstructure.NewDecoder(cfg)
require.NoError(t, err) require.NoError(t, err)
require.NoError(t, dec.Decode(e.Payload)) require.NoError(t, dec.Decode(e.Payload))

View File

@ -202,7 +202,9 @@ func TestNodes_NoSecretID(t *testing.T) {
t.Fatalf("err: %s", err) t.Fatalf("err: %s", err)
}) })
// Query the node, ensure that .SecretID was not returned by the HTTP server // perform a raw http call and make sure that:
// - "ID" to make sure that raw decoding is working correctly
// - "SecretID" to make sure it's not present
resp := make(map[string]interface{}) resp := make(map[string]interface{})
_, err := c.query("/v1/node/"+nodeID, &resp, nil) _, err := c.query("/v1/node/"+nodeID, &resp, nil)
require.NoError(t, err) require.NoError(t, err)

View File

@ -13,7 +13,7 @@ import (
"github.com/hashicorp/nomad/command/agent/monitor" "github.com/hashicorp/nomad/command/agent/monitor"
"github.com/hashicorp/nomad/command/agent/pprof" "github.com/hashicorp/nomad/command/agent/pprof"
"github.com/hashicorp/nomad/helper" "github.com/hashicorp/nomad/helper"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
"github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/nomad/structs"
metrics "github.com/armon/go-metrics" metrics "github.com/armon/go-metrics"
@ -124,7 +124,7 @@ func (a *Agent) monitor(conn io.ReadWriteCloser) {
frames := make(chan *sframer.StreamFrame, streamFramesBuffer) frames := make(chan *sframer.StreamFrame, streamFramesBuffer)
errCh := make(chan error) errCh := make(chan error)
var buf bytes.Buffer var buf bytes.Buffer
frameCodec := codec.NewEncoder(&buf, handlers.JsonHandle) frameCodec := codec.NewEncoder(&buf, jsonhandles.JsonHandle)
framer := sframer.NewStreamFramer(frames, 1*time.Second, 200*time.Millisecond, 1024) framer := sframer.NewStreamFramer(frames, 1*time.Second, 200*time.Millisecond, 1024)
framer.Run() framer.Run()

View File

@ -15,7 +15,7 @@ import (
cstructs "github.com/hashicorp/nomad/client/structs" cstructs "github.com/hashicorp/nomad/client/structs"
"github.com/hashicorp/nomad/helper" "github.com/hashicorp/nomad/helper"
"github.com/hashicorp/nomad/helper/uuid" "github.com/hashicorp/nomad/helper/uuid"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
"github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/nomad/structs"
nstructs "github.com/hashicorp/nomad/nomad/structs" nstructs "github.com/hashicorp/nomad/nomad/structs"
"github.com/hashicorp/nomad/plugins/drivers" "github.com/hashicorp/nomad/plugins/drivers"
@ -281,7 +281,7 @@ func newExecStream(decoder *codec.Decoder, encoder *codec.Encoder) drivers.ExecT
buf: buf, buf: buf,
encoder: encoder, encoder: encoder,
frameCodec: codec.NewEncoder(buf, handlers.JsonHandle), frameCodec: codec.NewEncoder(buf, jsonhandles.JsonHandle),
} }
} }

View File

@ -24,7 +24,7 @@ import (
sframer "github.com/hashicorp/nomad/client/lib/streamframer" sframer "github.com/hashicorp/nomad/client/lib/streamframer"
cstructs "github.com/hashicorp/nomad/client/structs" cstructs "github.com/hashicorp/nomad/client/structs"
"github.com/hashicorp/nomad/helper" "github.com/hashicorp/nomad/helper"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
"github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/nomad/structs"
) )
@ -239,7 +239,7 @@ func (f *FileSystem) stream(conn io.ReadWriteCloser) {
frames := make(chan *sframer.StreamFrame, streamFramesBuffer) frames := make(chan *sframer.StreamFrame, streamFramesBuffer)
errCh := make(chan error) errCh := make(chan error)
var buf bytes.Buffer var buf bytes.Buffer
frameCodec := codec.NewEncoder(&buf, handlers.JsonHandle) frameCodec := codec.NewEncoder(&buf, jsonhandles.JsonHandle)
// Create the framer // Create the framer
framer := sframer.NewStreamFramer(frames, streamHeartbeatRate, streamBatchWindow, streamFrameSize) framer := sframer.NewStreamFramer(frames, streamHeartbeatRate, streamBatchWindow, streamFrameSize)
@ -470,7 +470,7 @@ func (f *FileSystem) logs(conn io.ReadWriteCloser) {
var streamErr error var streamErr error
buf := new(bytes.Buffer) buf := new(bytes.Buffer)
frameCodec := codec.NewEncoder(buf, handlers.JsonHandle) frameCodec := codec.NewEncoder(buf, jsonhandles.JsonHandle)
OUTER: OUTER:
for { for {
select { select {

View File

@ -24,7 +24,7 @@ import (
"github.com/hashicorp/nomad/helper/noxssrw" "github.com/hashicorp/nomad/helper/noxssrw"
"github.com/hashicorp/nomad/helper/tlsutil" "github.com/hashicorp/nomad/helper/tlsutil"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
"github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/nomad/structs"
) )
@ -496,13 +496,13 @@ func (s *HTTPServer) wrap(handler func(resp http.ResponseWriter, req *http.Reque
if obj != nil { if obj != nil {
var buf bytes.Buffer var buf bytes.Buffer
if prettyPrint { if prettyPrint {
enc := codec.NewEncoder(&buf, handlers.JsonHandlePretty) enc := codec.NewEncoder(&buf, jsonhandles.JsonHandlePretty)
err = enc.Encode(obj) err = enc.Encode(obj)
if err == nil { if err == nil {
buf.Write([]byte("\n")) buf.Write([]byte("\n"))
} }
} else { } else {
enc := codec.NewEncoder(&buf, handlers.JsonHandleWithExtensions) enc := codec.NewEncoder(&buf, jsonhandles.JsonHandleWithExtensions)
err = enc.Encode(obj) err = enc.Encode(obj)
} }
if err != nil { if err != nil {

View File

@ -30,8 +30,7 @@ import (
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
nomadjson "github.com/hashicorp/nomad/nomad/json/handlers"
) )
// makeHTTPServer returns a test server whose logs will be written to // makeHTTPServer returns a test server whose logs will be written to
@ -315,11 +314,11 @@ func testPrettyPrint(pretty string, prettyFmt bool, t *testing.T) {
var expected bytes.Buffer var expected bytes.Buffer
var err error var err error
if prettyFmt { if prettyFmt {
enc := codec.NewEncoder(&expected, nomadjson.JsonHandlePretty) enc := codec.NewEncoder(&expected, jsonhandles.JsonHandlePretty)
err = enc.Encode(r) err = enc.Encode(r)
expected.WriteByte('\n') expected.WriteByte('\n')
} else { } else {
enc := codec.NewEncoder(&expected, nomadjson.JsonHandleWithExtensions) enc := codec.NewEncoder(&expected, jsonhandles.JsonHandleWithExtensions)
err = enc.Encode(r) err = enc.Encode(r)
} }
if err != nil { if err != nil {
@ -1249,13 +1248,13 @@ func Test_decodeBody(t *testing.T) {
// BenchmarkHTTPServer_JSONEncodingWithExtensions benchmarks the performance of // BenchmarkHTTPServer_JSONEncodingWithExtensions benchmarks the performance of
// encoding JSON objects using extensions // encoding JSON objects using extensions
func BenchmarkHTTPServer_JSONEncodingWithExtensions(b *testing.B) { func BenchmarkHTTPServer_JSONEncodingWithExtensions(b *testing.B) {
benchmarkJsonEncoding(b, handlers.JsonHandleWithExtensions) benchmarkJsonEncoding(b, jsonhandles.JsonHandleWithExtensions)
} }
// BenchmarkHTTPServer_JSONEncodingWithoutExtensions benchmarks the performance of // BenchmarkHTTPServer_JSONEncodingWithoutExtensions benchmarks the performance of
// encoding JSON objects using extensions // encoding JSON objects using extensions
func BenchmarkHTTPServer_JSONEncodingWithoutExtensions(b *testing.B) { func BenchmarkHTTPServer_JSONEncodingWithoutExtensions(b *testing.B) {
benchmarkJsonEncoding(b, handlers.JsonHandle) benchmarkJsonEncoding(b, jsonhandles.JsonHandle)
} }
func benchmarkJsonEncoding(b *testing.B, handle *codec.JsonHandle) { func benchmarkJsonEncoding(b *testing.B, handle *codec.JsonHandle) {

View File

@ -62,9 +62,4 @@ a few ways that this can be accomplished:
This conversion can be performed in-situ in the agent HTTP handler, as long as the conversion doesn't need to This conversion can be performed in-situ in the agent HTTP handler, as long as the conversion doesn't need to
appear in other handlers. appear in other handlers.
Otherwise, it is possible to register an extension on the JSON encoding used by the HTTP agent; these extensions Otherwise, it is possible to register an extension on the JSON encoding used by the HTTP agent; these extensions
can be put in `nomad/json/extensions.go`. can be put in `nomad/json/extensions.go`.
Note, for simple transformations to encoding (like renaming or neglecting fields), we can use field tags on the structs
from `nomad/structs`. Our msgpack [encoding](http://ugorji.net/blog/go-codec-primer#drop-in-replacement-for-encoding-json-json-key-in-struct-tag-supported)
only uses the [`codec` tag](https://github.com/hashicorp/nomad/blob/v1.0.0/nomad/structs/structs.go#L10557-L10558).
Therefore, the `json` tag is available for customizing API output when encoding `structs` objects. See `structs.Node.SecretID`, for example.

View File

@ -11,7 +11,7 @@ import (
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
"github.com/hashicorp/nomad/helper/pluginutils/hclspecutils" "github.com/hashicorp/nomad/helper/pluginutils/hclspecutils"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
"github.com/hashicorp/nomad/plugins/drivers" "github.com/hashicorp/nomad/plugins/drivers"
"github.com/hashicorp/nomad/plugins/shared/hclspec" "github.com/hashicorp/nomad/plugins/shared/hclspec"
) )
@ -122,7 +122,7 @@ func JsonConfigToInterface(t *testing.T, config string) interface{} {
t.Helper() t.Helper()
// Decode from json // Decode from json
dec := codec.NewDecoderBytes([]byte(config), handlers.JsonHandle) dec := codec.NewDecoderBytes([]byte(config), jsonhandles.JsonHandle)
var m map[string]interface{} var m map[string]interface{}
err := dec.Decode(&m) err := dec.Decode(&m)

View File

@ -10,7 +10,7 @@ import (
"github.com/hashicorp/hcl/v2/hcldec" "github.com/hashicorp/hcl/v2/hcldec"
hjson "github.com/hashicorp/hcl/v2/json" hjson "github.com/hashicorp/hcl/v2/json"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
"github.com/zclconf/go-cty/cty" "github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/function" "github.com/zclconf/go-cty/cty/function"
@ -28,7 +28,7 @@ func ParseHclInterface(val interface{}, spec hcldec.Spec, vars map[string]cty.Va
// Encode to json // Encode to json
var buf bytes.Buffer var buf bytes.Buffer
enc := codec.NewEncoder(&buf, handlers.JsonHandle) enc := codec.NewEncoder(&buf, jsonhandles.JsonHandle)
err := enc.Encode(val) err := enc.Encode(val)
if err != nil { if err != nil {
// Convert to a hcl diagnostics message // Convert to a hcl diagnostics message

View File

@ -17,7 +17,7 @@ import (
"github.com/hashicorp/nomad/command/agent/monitor" "github.com/hashicorp/nomad/command/agent/monitor"
"github.com/hashicorp/nomad/command/agent/pprof" "github.com/hashicorp/nomad/command/agent/pprof"
"github.com/hashicorp/nomad/helper" "github.com/hashicorp/nomad/helper"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
"github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/nomad/structs"
"github.com/hashicorp/go-msgpack/codec" "github.com/hashicorp/go-msgpack/codec"
@ -187,7 +187,7 @@ func (a *Agent) monitor(conn io.ReadWriteCloser) {
frames := make(chan *sframer.StreamFrame, 32) frames := make(chan *sframer.StreamFrame, 32)
errCh := make(chan error) errCh := make(chan error)
var buf bytes.Buffer var buf bytes.Buffer
frameCodec := codec.NewEncoder(&buf, handlers.JsonHandle) frameCodec := codec.NewEncoder(&buf, jsonhandles.JsonHandle)
framer := sframer.NewStreamFramer(frames, 1*time.Second, 200*time.Millisecond, 1024) framer := sframer.NewStreamFramer(frames, 1*time.Second, 200*time.Millisecond, 1024)
framer.Run() framer.Run()

View File

@ -1,30 +0,0 @@
package json
import (
"reflect"
"github.com/hashicorp/nomad/nomad/structs"
)
// init register all extensions used by the API HTTP server when encoding JSON
func init() {
// TODO: this could be simplified by looking up the base type in the case of a pointer type
registerExtension(reflect.TypeOf(structs.Node{}), nodeExt)
registerExtension(reflect.TypeOf(&structs.Node{}), nodeExt)
}
// nodeExt adds the legacy field .Drain back to encoded Node objects
func nodeExt(v interface{}) interface{} {
node := v.(*structs.Node)
if node == nil {
return nil
}
type NodeAlias structs.Node
return &struct {
*NodeAlias
Drain bool
}{
NodeAlias: (*NodeAlias)(node),
Drain: node.DrainStrategy != nil,
}
}

View File

@ -1,4 +1,4 @@
package json package jsonhandles
import ( import (
"reflect" "reflect"
@ -9,16 +9,6 @@ import (
// extendFunc is a mapping from one struct to another, to change the shape of the encoded JSON // extendFunc is a mapping from one struct to another, to change the shape of the encoded JSON
type extendFunc func(interface{}) interface{} type extendFunc func(interface{}) interface{}
var (
// extendedTypes is a mapping of extended types to their extension function
extendedTypes = map[reflect.Type]extendFunc{}
)
// registerExtension registers an extension function against a particular type
func registerExtension(tpe reflect.Type, ext extendFunc) {
extendedTypes[tpe] = ext
}
// nomadJsonEncodingExtensions is a catch-all go-msgpack extension // nomadJsonEncodingExtensions is a catch-all go-msgpack extension
// it looks up the types in the list of registered extension functions and applies it // it looks up the types in the list of registered extension functions and applies it
type nomadJsonEncodingExtensions struct{} type nomadJsonEncodingExtensions struct{}

View File

@ -0,0 +1,33 @@
package jsonhandles
import (
"reflect"
"github.com/hashicorp/nomad/nomad/structs"
)
var (
// extendedTypes is a mapping of extended types to their extension function
// TODO: the duplicates could be simplified by looking up the base type in the case of a pointer type in ConvertExt
extendedTypes = map[reflect.Type]extendFunc{
reflect.TypeOf(structs.Node{}): nodeExt,
reflect.TypeOf(&structs.Node{}): nodeExt,
}
)
// nodeExt ensures the node is sanitized and adds the legacy field .Drain back to encoded Node objects
func nodeExt(v interface{}) interface{} {
node := v.(*structs.Node).Sanitize()
// transform to a struct with inlined Node fields plus the Drain field
// - using defined type (not an alias!) EmbeddedNode gives us free conversion to a distinct type
// - distinct type prevents this encoding extension from being called recursively/infinitely on the embedding
// - pointers mean the conversion function doesn't have to make a copy during conversion
type EmbeddedNode structs.Node
return &struct {
*EmbeddedNode
Drain bool
}{
EmbeddedNode: (*EmbeddedNode)(node),
Drain: node != nil && node.DrainStrategy != nil,
}
}

View File

@ -1,9 +1,7 @@
package handlers package jsonhandles
import ( import (
"github.com/hashicorp/go-msgpack/codec" "github.com/hashicorp/go-msgpack/codec"
"github.com/hashicorp/nomad/nomad/json"
) )
var ( var (
@ -14,10 +12,10 @@ var (
JsonHandle = &codec.JsonHandle{ JsonHandle = &codec.JsonHandle{
HTMLCharsAsIs: true, HTMLCharsAsIs: true,
} }
JsonHandleWithExtensions = json.NomadJsonEncodingExtensions(&codec.JsonHandle{ JsonHandleWithExtensions = NomadJsonEncodingExtensions(&codec.JsonHandle{
HTMLCharsAsIs: true, HTMLCharsAsIs: true,
}) })
JsonHandlePretty = json.NomadJsonEncodingExtensions(&codec.JsonHandle{ JsonHandlePretty = NomadJsonEncodingExtensions(&codec.JsonHandle{
HTMLCharsAsIs: true, HTMLCharsAsIs: true,
Indent: 4, Indent: 4,
}) })

View File

@ -801,6 +801,7 @@ func (n *Node) GetNode(args *structs.NodeSpecificRequest,
// Setup the output // Setup the output
if out != nil { if out != nil {
out = out.Sanitize()
reply.Node = out reply.Node = out
reply.Index = out.ModifyIndex reply.Index = out.ModifyIndex
} else { } else {

View File

@ -1314,6 +1314,7 @@ func TestClientEndpoint_GetNode(t *testing.T) {
// Update the status updated at value // Update the status updated at value
node.StatusUpdatedAt = resp2.Node.StatusUpdatedAt node.StatusUpdatedAt = resp2.Node.StatusUpdatedAt
node.SecretID = ""
node.Events = resp2.Node.Events node.Events = resp2.Node.Events
if !reflect.DeepEqual(node, resp2.Node) { if !reflect.DeepEqual(node, resp2.Node) {
t.Fatalf("bad: %#v \n %#v", node, resp2.Node) t.Fatalf("bad: %#v \n %#v", node, resp2.Node)

View File

@ -647,10 +647,7 @@ func evaluateNodePlan(snap *state.StateSnapshot, plan *structs.Plan, nodeID stri
} else if node.Status != structs.NodeStatusReady { } else if node.Status != structs.NodeStatusReady {
return false, "node is not ready for placements", nil return false, "node is not ready for placements", nil
} else if node.SchedulingEligibility == structs.NodeSchedulingIneligible { } else if node.SchedulingEligibility == structs.NodeSchedulingIneligible {
return false, "node is not eligible for draining", nil return false, "node is not eligible", nil
} else if node.DrainStrategy != nil {
// Deprecate in favor of scheduling eligibility and remove post-0.8
return false, "node is draining", nil
} }
// Get the existing allocations that are non-terminal // Get the existing allocations that are non-terminal

View File

@ -80,6 +80,7 @@ func eventFromChange(change memdb.Change) (structs.Event, bool) {
return structs.Event{}, false return structs.Event{}, false
} }
before = before.Sanitize()
return structs.Event{ return structs.Event{
Topic: structs.TopicNode, Topic: structs.TopicNode,
Key: before.ID, Key: before.ID,
@ -175,6 +176,7 @@ func eventFromChange(change memdb.Change) (structs.Event, bool) {
return structs.Event{}, false return structs.Event{}, false
} }
after = after.Sanitize()
return structs.Event{ return structs.Event{
Topic: structs.TopicNode, Topic: structs.TopicNode,
Key: after.ID, Key: after.ID,

View File

@ -8,7 +8,7 @@ import (
"github.com/hashicorp/go-msgpack/codec" "github.com/hashicorp/go-msgpack/codec"
"github.com/hashicorp/nomad/nomad/json/handlers" "github.com/hashicorp/nomad/nomad/jsonhandles"
"github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/nomad/structs"
) )
@ -75,7 +75,7 @@ func (n *JsonStream) Send(v interface{}) error {
} }
var buf bytes.Buffer var buf bytes.Buffer
enc := codec.NewEncoder(&buf, handlers.JsonHandleWithExtensions) enc := codec.NewEncoder(&buf, jsonhandles.JsonHandleWithExtensions)
err := enc.Encode(v) err := enc.Encode(v)
if err != nil { if err != nil {
return fmt.Errorf("error marshaling json for stream: %w", err) return fmt.Errorf("error marshaling json for stream: %w", err)

View File

@ -1907,6 +1907,20 @@ type Node struct {
ModifyIndex uint64 ModifyIndex uint64
} }
// Sanitize returns a copy of the Node omitting confidential fields
// It only returns a copy if the Node contains the confidential fields
func (n *Node) Sanitize() *Node {
if n == nil {
return nil
}
if n.SecretID == "" {
return n
}
clean := n.Copy()
clean.SecretID = ""
return clean
}
// Ready returns true if the node is ready for running allocations // Ready returns true if the node is ready for running allocations
func (n *Node) Ready() bool { func (n *Node) Ready() bool {
return n.Status == NodeStatusReady && n.DrainStrategy == nil && n.SchedulingEligibility == NodeSchedulingEligible return n.Status == NodeStatusReady && n.DrainStrategy == nil && n.SchedulingEligibility == NodeSchedulingEligible
@ -1917,6 +1931,16 @@ func (n *Node) Canonicalize() {
return return
} }
// Ensure SchedulingEligibility is set whenever draining so the plan applier and other scheduling logic only need
// to check SchedulingEligibility when determining whether a placement is feasible on a node.
if n.SchedulingEligibility == "" {
if n.DrainStrategy != nil {
n.SchedulingEligibility = NodeSchedulingIneligible
} else {
n.SchedulingEligibility = NodeSchedulingEligible
}
}
// COMPAT remove in 1.0 // COMPAT remove in 1.0
// In v0.12.0 we introduced a separate node specific network resource struct // In v0.12.0 we introduced a separate node specific network resource struct
// so we need to covert any pre 0.12 clients to the correct struct // so we need to covert any pre 0.12 clients to the correct struct
@ -1940,14 +1964,6 @@ func (n *Node) Canonicalize() {
} }
} }
} }
if n.SchedulingEligibility == "" {
if n.DrainStrategy != nil {
n.SchedulingEligibility = NodeSchedulingIneligible
} else {
n.SchedulingEligibility = NodeSchedulingEligible
}
}
} }
func (n *Node) Copy() *Node { func (n *Node) Copy() *Node {

View File

@ -5601,6 +5601,31 @@ func TestNode_Copy(t *testing.T) {
require.Equal(node.Drivers, node2.Drivers) require.Equal(node.Drivers, node2.Drivers)
} }
func TestNode_Sanitize(t *testing.T) {
require := require.New(t)
testCases := []*Node{
nil,
{
ID: uuid.Generate(),
SecretID: "",
},
{
ID: uuid.Generate(),
SecretID: uuid.Generate(),
},
}
for _, tc := range testCases {
sanitized := tc.Sanitize()
if tc == nil {
require.Nil(sanitized)
} else {
require.NotNil(sanitized)
require.Empty(sanitized.SecretID)
}
}
}
func TestSpread_Validate(t *testing.T) { func TestSpread_Validate(t *testing.T) {
type tc struct { type tc struct {
spread *Spread spread *Spread

View File

@ -252,13 +252,7 @@ func readyNodesInDCs(state State, dcs []string) ([]*structs.Node, map[string]int
// Filter on datacenter and status // Filter on datacenter and status
node := raw.(*structs.Node) node := raw.(*structs.Node)
if node.Status != structs.NodeStatusReady { if !node.Ready() {
continue
}
if node.DrainStrategy != nil {
continue
}
if node.SchedulingEligibility != structs.NodeSchedulingEligible {
continue continue
} }
if _, ok := dcMap[node.Datacenter]; !ok { if _, ok := dcMap[node.Datacenter]; !ok {