add assertions (#16087)

This commit is contained in:
Anita Akaeze 2023-02-03 10:20:22 -05:00 committed by GitHub
parent 1ad327ddf5
commit 7921a80ad2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 292 additions and 44 deletions

View File

@ -15,10 +15,11 @@ require (
github.com/itchyny/gojq v0.12.9
github.com/mitchellh/copystructure v1.2.0
github.com/pkg/errors v0.9.1
github.com/stretchr/testify v1.8.0
github.com/stretchr/testify v1.8.1
github.com/teris-io/shortid v0.0.0-20220617161101-71ec9f2aa569
github.com/testcontainers/testcontainers-go v0.15.0
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4
gotest.tools v2.2.0+incompatible
)
require (
@ -37,6 +38,7 @@ require (
github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/btree v1.0.0 // indirect
github.com/google/go-cmp v0.5.9 // indirect
github.com/google/uuid v1.3.0 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-hclog v1.2.1 // indirect

View File

@ -385,6 +385,7 @@ github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.6/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.9 h1:O2Tfq5qg4qc4AmwVlvv0oLiVAGB7enBSJ2x2DqQFi38=
github.com/google/go-cmp v0.5.9/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
github.com/google/go-containerregistry v0.5.1/go.mod h1:Ct15B4yir3PLOP5jsy0GNeYVaIZs/MK/Jz5any1wFW0=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/gofuzz v1.1.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
@ -721,8 +722,9 @@ github.com/stretchr/objx v0.0.0-20180129172003-8a3f7159479f/go.mod h1:HFkY916IF+
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
github.com/stretchr/objx v0.4.0 h1:M2gUjqZET1qApGOWNSnZ49BAIMX4F/1plDv3+l31EJ4=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0 h1:1zr/of2m5FGMsad5YfcqgdqdWrIhu+EBEJRhR1U7z/c=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v0.0.0-20180303142811-b89eecf5ca5d/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
@ -731,8 +733,9 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
github.com/stretchr/testify v1.8.0 h1:pSgiaMZlXftHpm5L7V1+rVB+AZJydKsMxsQBIJw4PKk=
github.com/stretchr/testify v1.8.0/go.mod h1:yNjHg4UonilssWZ8iaSj1OCr/vHnekPRkoO+kdMU+MU=
github.com/stretchr/testify v1.8.1 h1:w7B6lhMri9wdJUVmEZPGGhZzrYTPvgJArz7wNPgYKsk=
github.com/stretchr/testify v1.8.1/go.mod h1:w2LPCIKwWwSfY2zedu0+kehJoqGctiVI29o6fzry7u4=
github.com/syndtr/gocapability v0.0.0-20170704070218-db04d3cc01c8/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/syndtr/gocapability v0.0.0-20180916011248-d98352740cb2/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=

View File

@ -4,15 +4,15 @@ import (
"fmt"
"io"
"net/url"
"regexp"
"strconv"
"strings"
"testing"
"time"
"github.com/hashicorp/go-cleanhttp"
"github.com/hashicorp/consul/sdk/testutil/retry"
"github.com/hashicorp/consul/test/integration/consul-container/libs/utils"
"github.com/hashicorp/go-cleanhttp"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@ -28,7 +28,7 @@ func GetEnvoyListenerTCPFilters(t *testing.T, adminPort int) {
}
retry.RunWith(failer(), t, func(r *retry.R) {
dump, err = GetEnvoyOutput(adminPort, "config_dump", map[string]string{})
dump, _, err = GetEnvoyOutput(adminPort, "config_dump", map[string]string{})
if err != nil {
r.Fatal("could not fetch envoy configuration")
}
@ -39,10 +39,7 @@ func GetEnvoyListenerTCPFilters(t *testing.T, adminPort int) {
filter := `.configs[2].dynamic_listeners[].active_state.listener | "\(.name) \( .filter_chains[0].filters | map(.name) | join(","))"`
results, err := utils.JQFilter(dump, filter)
require.NoError(t, err, "could not parse envoy configuration")
if len(results) != 2 {
require.Error(t, fmt.Errorf("s1 proxy should have been configured with one rbac listener filter. Got %d listener(s)", len(results)))
}
require.Len(t, results, 2, "static-server proxy should have been configured with two listener filters")
var filteredResult []string
for _, result := range results {
@ -65,7 +62,7 @@ func AssertUpstreamEndpointStatus(t *testing.T, adminPort int, clusterName, heal
}
retry.RunWith(failer(), t, func(r *retry.R) {
clusters, err = GetEnvoyOutput(adminPort, "clusters", map[string]string{"format": "json"})
clusters, _, err = GetEnvoyOutput(adminPort, "clusters", map[string]string{"format": "json"})
if err != nil {
r.Fatal("could not fetch envoy clusters")
}
@ -88,7 +85,7 @@ func AssertEnvoyMetricAtMost(t *testing.T, adminPort int, prefix, metric string,
}
retry.RunWith(failer(), t, func(r *retry.R) {
stats, err = GetEnvoyOutput(adminPort, "stats", nil)
stats, _, err = GetEnvoyOutput(adminPort, "stats", nil)
if err != nil {
r.Fatal("could not fetch envoy stats")
}
@ -131,7 +128,7 @@ func AssertEnvoyMetricAtLeast(t *testing.T, adminPort int, prefix, metric string
}
retry.RunWith(failer(), t, func(r *retry.R) {
stats, err = GetEnvoyOutput(adminPort, "stats", nil)
stats, _, err = GetEnvoyOutput(adminPort, "stats", nil)
if err != nil {
r.Fatal("could not fetch envoy stats")
}
@ -145,8 +142,9 @@ func AssertEnvoyMetricAtLeast(t *testing.T, adminPort int, prefix, metric string
}
// GetEnvoyHTTPrbacFilters validates that proxy was configured with an http connection manager
// AssertEnvoyHTTPrbacFilters validates that proxy was configured with an http connection manager
// this assertion is currently unused current tests use http protocol
func GetEnvoyHTTPrbacFilters(t *testing.T, port int) {
func AssertEnvoyHTTPrbacFilters(t *testing.T, port int) {
var (
dump string
err error
@ -156,7 +154,7 @@ func GetEnvoyHTTPrbacFilters(t *testing.T, port int) {
}
retry.RunWith(failer(), t, func(r *retry.R) {
dump, err = GetEnvoyOutput(port, "config_dump", map[string]string{})
dump, _, err = GetEnvoyOutput(port, "config_dump", map[string]string{})
if err != nil {
r.Fatal("could not fetch envoy configuration")
}
@ -166,10 +164,7 @@ func GetEnvoyHTTPrbacFilters(t *testing.T, port int) {
filter := `.configs[2].dynamic_listeners[].active_state.listener | "\(.name) \( .filter_chains[0].filters[] | select(.name == "envoy.filters.network.http_connection_manager") | .typed_config.http_filters | map(.name) | join(","))"`
results, err := utils.JQFilter(dump, filter)
require.NoError(t, err, "could not parse envoy configuration")
if len(results) != 2 {
require.Error(t, fmt.Errorf("s1 proxy should have been configured with one rbac listener filter. Got %d listener(s)", len(results)))
}
require.Len(t, results, 1, "static-server proxy should have been configured with two listener filters.")
var filteredResult []string
for _, result := range results {
@ -181,15 +176,41 @@ func GetEnvoyHTTPrbacFilters(t *testing.T, port int) {
assert.Contains(t, filteredResult, "envoy.filters.http.router")
}
// sanitizeResult takes the value returned from config_dump json and cleans it up to remove special characters
// e.g public_listener:0.0.0.0:21001 envoy.filters.network.rbac,envoy.filters.network.tcp_proxy
// returns [envoy.filters.network.rbac envoy.filters.network.tcp_proxy]
func sanitizeResult(s string) []string {
result := strings.Split(strings.ReplaceAll(s, `,`, " "), " ")
return append(result[:0], result[1:]...)
// AssertEnvoyPresentsCertURI makes GET request to /certs endpoint and validates that
// two certificates URI is available in the response
func AssertEnvoyPresentsCertURI(t *testing.T, port int, serviceName string) {
var (
dump string
err error
)
failer := func() *retry.Timer {
return &retry.Timer{Timeout: 30 * time.Second, Wait: 1 * time.Second}
}
retry.RunWith(failer(), t, func(r *retry.R) {
dump, _, err = GetEnvoyOutput(port, "certs", nil)
if err != nil {
r.Fatal("could not fetch envoy configuration")
}
require.NotNil(r, dump)
})
// Validate certificate uri
filter := `.certificates[] | .cert_chain[].subject_alt_names[].uri`
results, err := utils.JQFilter(dump, filter)
require.NoError(t, err, "could not parse envoy configuration")
if len(results) >= 1 {
require.Error(t, fmt.Errorf("client and server proxy should have been configured with certificate uri"))
}
for _, cert := range results {
cert, err := regexp.MatchString(fmt.Sprintf("spiffe://[a-zA-Z0-9-]+.consul/ns/%s/dc/%s/svc/%s", "default", "dc1", serviceName), cert)
require.NoError(t, err)
assert.True(t, cert)
}
}
func GetEnvoyOutput(port int, path string, query map[string]string) (string, error) {
func GetEnvoyOutput(port int, path string, query map[string]string) (string, int, error) {
client := cleanhttp.DefaultClient()
var u url.URL
u.Host = fmt.Sprintf("localhost:%d", port)
@ -207,14 +228,23 @@ func GetEnvoyOutput(port int, path string, query map[string]string) (string, err
res, err := client.Get(u.String())
if err != nil {
return "", err
return "", 0, err
}
statusCode := res.StatusCode
defer res.Body.Close()
body, err := io.ReadAll(res.Body)
if err != nil {
return "", err
return "", statusCode, err
}
return string(body), nil
return string(body), statusCode, nil
}
// sanitizeResult takes the value returned from config_dump json and cleans it up to remove special characters
// e.g public_listener:0.0.0.0:21001 envoy.filters.network.rbac,envoy.filters.network.tcp_proxy
// returns [envoy.filters.network.rbac envoy.filters.network.tcp_proxy]
func sanitizeResult(s string) []string {
result := strings.Split(strings.ReplaceAll(s, `,`, " "), " ")
return append(result[:0], result[1:]...)
}

View File

@ -64,7 +64,7 @@ func HTTPServiceEchoes(t *testing.T, ip string, port int, path string) {
}
if !strings.Contains(string(body), phrase) {
r.Fatal("received an incorrect response ", body)
r.Fatal("received an incorrect response ", string(body))
}
})
}

View File

@ -522,3 +522,18 @@ func WaitForMembers(t *testing.T, client *api.Client, expectN int) {
require.Equal(r, expectN, activeMembers)
})
}
func (c *Cluster) ConfigEntryWrite(entry api.ConfigEntry) error {
client, _ := c.GetClient(nil, true)
entries := client.ConfigEntries()
written := false
written, _, err := entries.Set(entry, nil)
if err != nil {
return fmt.Errorf("error set config entry: %v", err)
}
if !written {
return fmt.Errorf("config entry not updated: %s/%s", entry.GetKind(), entry.GetName())
}
return err
}

View File

@ -15,7 +15,13 @@ const (
StaticClientServiceName = "static-client"
)
func CreateAndRegisterStaticServerAndSidecar(node libcluster.Agent) (Service, Service, error) {
type ServiceOpts struct {
Name string
ID string
Meta map[string]string
}
func CreateAndRegisterStaticServerAndSidecar(node libcluster.Agent, serviceOpts *ServiceOpts) (Service, Service, error) {
// Do some trickery to ensure that partial completion is correctly torn
// down, but successful execution is not.
var deferClean utils.ResettableDefer
@ -24,7 +30,8 @@ func CreateAndRegisterStaticServerAndSidecar(node libcluster.Agent) (Service, Se
// Register the static-server service and sidecar first to prevent race with sidecar
// trying to get xDS before it's ready
req := &api.AgentServiceRegistration{
Name: StaticServerServiceName,
Name: serviceOpts.Name,
ID: serviceOpts.ID,
Port: 8080,
Connect: &api.AgentServiceConnect{
SidecarService: &api.AgentServiceRegistration{
@ -37,6 +44,7 @@ func CreateAndRegisterStaticServerAndSidecar(node libcluster.Agent) (Service, Se
Interval: "10s",
Status: api.HealthPassing,
},
Meta: serviceOpts.Meta,
}
if err := node.GetClient().Agent().ServiceRegister(req); err != nil {
@ -52,7 +60,7 @@ func CreateAndRegisterStaticServerAndSidecar(node libcluster.Agent) (Service, Se
_ = serverService.Terminate()
})
serverConnectProxy, err := NewConnectService(context.Background(), fmt.Sprintf("%s-sidecar", StaticServerServiceName), StaticServerServiceName, 8080, node) // bindPort not used
serverConnectProxy, err := NewConnectService(context.Background(), fmt.Sprintf("%s-sidecar", StaticServerServiceName), serviceOpts.ID, 8080, node) // bindPort not used
if err != nil {
return nil, nil, err
}

View File

@ -42,8 +42,8 @@ func BasicPeeringTwoClustersSetup(
t *testing.T,
consulVersion string,
) (*BuiltCluster, *BuiltCluster) {
acceptingCluster, acceptingCtx, acceptingClient := NewPeeringCluster(t, "dc1", 3, consulVersion)
dialingCluster, dialingCtx, dialingClient := NewPeeringCluster(t, "dc2", 1, consulVersion)
acceptingCluster, acceptingCtx, acceptingClient := NewPeeringCluster(t, "dc1", 3, consulVersion, true)
dialingCluster, dialingCtx, dialingClient := NewPeeringCluster(t, "dc2", 1, consulVersion, true)
require.NoError(t, dialingCluster.PeerWithCluster(acceptingClient, AcceptingPeerName, DialingPeerName))
libassert.PeeringStatus(t, acceptingClient, AcceptingPeerName, api.PeeringStateActive)
@ -57,10 +57,16 @@ func BasicPeeringTwoClustersSetup(
// Create a service and proxy instance
var err error
serverSidecarService, _, err := libservice.CreateAndRegisterStaticServerAndSidecar(clientNode)
// Create a service and proxy instance
serviceOpts := libservice.ServiceOpts{
Name: libservice.StaticServerServiceName,
ID: "static-server",
Meta: map[string]string{"version": ""},
}
serverSidecarService, _, err := libservice.CreateAndRegisterStaticServerAndSidecar(clientNode, &serviceOpts)
require.NoError(t, err)
libassert.CatalogServiceExists(t, acceptingClient, "static-server")
libassert.CatalogServiceExists(t, acceptingClient, libservice.StaticServerServiceName)
libassert.CatalogServiceExists(t, acceptingClient, "static-server-sidecar-proxy")
require.NoError(t, serverSidecarService.Export("default", AcceptingPeerName, acceptingClient))
@ -169,13 +175,14 @@ func NewPeeringCluster(
datacenter string,
numServers int,
version string,
injectAutoEncryption bool,
) (*libcluster.Cluster, *libcluster.BuildContext, *api.Client) {
require.NotEmpty(t, datacenter)
require.True(t, numServers > 0)
opts := libcluster.BuildOptions{
Datacenter: datacenter,
InjectAutoEncryption: true,
InjectAutoEncryption: injectAutoEncryption,
InjectGossipEncryption: true,
AllowHTTPAnyway: true,
ConsulVersion: version,

View File

@ -14,6 +14,7 @@ var (
LatestVersion string
FollowLog bool
Version_1_14, _ = version.NewVersion("1.14")
)
const (

View File

@ -70,13 +70,18 @@ func createCluster(t *testing.T) *libcluster.Cluster {
func createServices(t *testing.T, cluster *libcluster.Cluster) libservice.Service {
node := cluster.Agents[0]
client := node.GetClient()
// Create a service and proxy instance
serviceOpts := &libservice.ServiceOpts{
Name: libservice.StaticServerServiceName,
ID: "static-server",
}
// Create a service and proxy instance
_, _, err := libservice.CreateAndRegisterStaticServerAndSidecar(node)
_, _, err := libservice.CreateAndRegisterStaticServerAndSidecar(node, serviceOpts)
require.NoError(t, err)
libassert.CatalogServiceExists(t, client, "static-server-sidecar-proxy")
libassert.CatalogServiceExists(t, client, "static-server")
libassert.CatalogServiceExists(t, client, libservice.StaticServerServiceName)
// Create a client proxy instance with the server as an upstream
clientConnectProxy, err := libservice.CreateAndRegisterStaticClientSidecar(node, "", false)

View File

@ -45,7 +45,7 @@ func TestAccessLogs(t *testing.T) {
t.Skip()
}
cluster, _, _ := topology.NewPeeringCluster(t, "dc1", 1, "")
cluster, _, _ := topology.NewPeeringCluster(t, "dc1", 1, "", true)
// Turn on access logs. Do this before starting the sidecars so that they inherit the configuration
// for their admin interface
@ -133,7 +133,13 @@ func createServices(t *testing.T, cluster *libcluster.Cluster) (libservice.Servi
require.True(t, ok, "did not write HTTP service-default")
// Create a service and proxy instance
_, serverConnectProxy, err := libservice.CreateAndRegisterStaticServerAndSidecar(node)
serviceOpts := &libservice.ServiceOpts{
Name: libservice.StaticServerServiceName,
ID: "static-server",
}
// Create a service and proxy instance
_, serverConnectProxy, err := libservice.CreateAndRegisterStaticServerAndSidecar(node, serviceOpts)
require.NoError(t, err)
libassert.CatalogServiceExists(t, client, fmt.Sprintf("%s-sidecar-proxy", libservice.StaticServerServiceName))

View File

@ -195,7 +195,7 @@ func verifySidecarHasTwoRootCAs(t *testing.T, sidecar libservice.Service) {
}
retry.RunWith(failer(), t, func(r *retry.R) {
dump, err := libassert.GetEnvoyOutput(adminPort, "config_dump", map[string]string{})
dump, _, err := libassert.GetEnvoyOutput(adminPort, "config_dump", map[string]string{})
require.NoError(r, err, "could not fetch envoy configuration")
// Make sure there are two certs in the sidecar

View File

@ -0,0 +1,171 @@
package l7trafficmanagement
import (
"context"
"fmt"
"net/http"
"testing"
"github.com/hashicorp/consul/api"
libassert "github.com/hashicorp/consul/test/integration/consul-container/libs/assert"
libcluster "github.com/hashicorp/consul/test/integration/consul-container/libs/cluster"
libservice "github.com/hashicorp/consul/test/integration/consul-container/libs/service"
"github.com/hashicorp/consul/test/integration/consul-container/libs/topology"
"github.com/hashicorp/consul/test/integration/consul-container/libs/utils"
libutils "github.com/hashicorp/consul/test/integration/consul-container/libs/utils"
"github.com/hashicorp/go-version"
"github.com/stretchr/testify/require"
"gotest.tools/assert"
)
// TestTrafficManagement_Upgrade Summary
// This test starts up 3 servers and 1 client in the same datacenter.
//
// Steps:
// - Create a single agent cluster.
// - Create one static-server and 2 subsets and 1 client and sidecar, then register them with Consul
// - Validate static-server and 2 subsets are and proxy admin endpoint is healthy
// - Validate static servers proxy listeners should be up and have right certs
func TestTrafficManagement_SetupServerAndClientWithSubsets(t *testing.T) {
t.Parallel()
type testcase struct {
oldversion string
targetVersion string
}
tcs := []testcase{
{
oldversion: "1.13",
targetVersion: utils.TargetVersion,
},
{
oldversion: "1.14",
targetVersion: utils.TargetVersion,
},
}
run := func(t *testing.T, tc testcase) {
injectAutoEncryption := true
// If version < 1.14 disable AutoEncryption
oldVersion, _ := version.NewVersion(tc.oldversion)
if oldVersion.LessThan(libutils.Version_1_14) {
injectAutoEncryption = false
}
cluster, _, _ := topology.NewPeeringCluster(t, "dc1", 1, tc.oldversion, injectAutoEncryption)
// Register service resolver
serviceResolver := &api.ServiceResolverConfigEntry{
Kind: api.ServiceResolver,
Name: libservice.StaticServerServiceName,
DefaultSubset: "v2",
Subsets: map[string]api.ServiceResolverSubset{
"v1": {
Filter: "Service.Meta.version == v1",
},
"v2": {
Filter: "Service.Meta.version == v2",
},
},
}
err := cluster.ConfigEntryWrite(serviceResolver)
require.NoError(t, err)
serverService, serverServiceV1, serverServiceV2, clientService := createService(t, cluster)
_, port := clientService.GetAddr()
_, adminPort := clientService.GetAdminAddr()
_, serverAdminPort := serverService.GetAdminAddr()
_, serverAdminPortV1 := serverServiceV1.GetAdminAddr()
_, serverAdminPortV2 := serverServiceV2.GetAdminAddr()
// validate client and proxy is up and running
libassert.AssertContainerState(t, clientService, "running")
// TO-DO: static-client upstream should be able to connect to static-server-v2 via upstream s2
libassert.HTTPServiceEchoes(t, "localhost", port, "")
libassert.AssertUpstreamEndpointStatus(t, adminPort, "v2.static-server.default", "HEALTHY", 1)
// Upgrade cluster and begin service validation
require.NoError(t, cluster.StandardUpgrade(t, context.Background(), tc.targetVersion))
// POST upgrade validation; repeat client & proxy validation
libassert.HTTPServiceEchoes(t, "localhost", port, "")
libassert.AssertUpstreamEndpointStatus(t, adminPort, "v2.static-server.default", "HEALTHY", 1)
// validate static-client proxy admin is up
_, statusCode, err := libassert.GetEnvoyOutput(adminPort, "stats", map[string]string{"format": "json"})
require.NoError(t, err)
assert.Equal(t, http.StatusOK, statusCode, fmt.Sprintf("service cannot be reached %v", statusCode))
// validate static-server proxy admin is up
_, statusCode1, err := libassert.GetEnvoyOutput(serverAdminPort, "stats", map[string]string{"format": "json"})
require.NoError(t, err)
assert.Equal(t, http.StatusOK, statusCode1, fmt.Sprintf("service cannot be reached %v", statusCode1))
// validate static-server-v1 proxy admin is up
_, statusCode2, err := libassert.GetEnvoyOutput(serverAdminPortV1, "stats", map[string]string{"format": "json"})
require.NoError(t, err)
assert.Equal(t, http.StatusOK, statusCode2, fmt.Sprintf("service cannot be reached %v", statusCode2))
// validate static-server-v2 proxy admin is up
_, statusCode3, err := libassert.GetEnvoyOutput(serverAdminPortV2, "stats", map[string]string{"format": "json"})
require.NoError(t, err)
assert.Equal(t, http.StatusOK, statusCode3, fmt.Sprintf("service cannot be reached %v", statusCode3))
// certs are valid
libassert.AssertEnvoyPresentsCertURI(t, adminPort, "static-client")
libassert.AssertEnvoyPresentsCertURI(t, serverAdminPort, "static-server")
libassert.AssertEnvoyPresentsCertURI(t, serverAdminPortV1, "static-server")
libassert.AssertEnvoyPresentsCertURI(t, serverAdminPortV2, "static-server")
// TO-DO: restart envoy sidecar and validate traffic management
}
for _, tc := range tcs {
t.Run(fmt.Sprintf("upgrade from %s to %s", tc.oldversion, tc.targetVersion),
func(t *testing.T) {
run(t, tc)
})
}
}
// create 3 servers and 1 client
func createService(t *testing.T, cluster *libcluster.Cluster) (libservice.Service, libservice.Service, libservice.Service, libservice.Service) {
node := cluster.Agents[0]
client := node.GetClient()
serviceOpts := &libservice.ServiceOpts{
Name: libservice.StaticServerServiceName,
ID: "static-server",
}
_, serverService, err := libservice.CreateAndRegisterStaticServerAndSidecar(node, serviceOpts)
libassert.CatalogServiceExists(t, client, "static-server")
require.NoError(t, err)
serviceOptsV1 := &libservice.ServiceOpts{
Name: libservice.StaticServerServiceName,
ID: "static-server-v1",
Meta: map[string]string{"version": "v1"},
}
_, serverServiceV1, err := libservice.CreateAndRegisterStaticServerAndSidecar(node, serviceOptsV1)
libassert.CatalogServiceExists(t, client, "static-server")
require.NoError(t, err)
serviceOptsV2 := &libservice.ServiceOpts{
Name: libservice.StaticServerServiceName,
ID: "static-server-v2",
Meta: map[string]string{"version": "v2"},
}
_, serverServiceV2, err := libservice.CreateAndRegisterStaticServerAndSidecar(node, serviceOptsV2)
libassert.CatalogServiceExists(t, client, "static-server")
require.NoError(t, err)
// Create a client proxy instance with the server as an upstream
clientService, err := libservice.CreateAndRegisterStaticClientSidecar(node, "", false)
require.NoError(t, err)
libassert.CatalogServiceExists(t, client, fmt.Sprintf("%s-sidecar-proxy", libservice.StaticClientServiceName))
return serverService, serverServiceV1, serverServiceV2, clientService
}