backport of commit ca9e08e6b5eee00d055b9429df5976a70cdcb2d6 (#18813)

Co-authored-by: James Rasell <jrasell@users.noreply.github.com>
This commit is contained in:
hc-github-team-nomad-core 2023-10-20 02:35:54 -05:00 committed by GitHub
parent 8f1713dcd4
commit 63c2013ec1
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
12 changed files with 141 additions and 64 deletions

12
.changelog/18795.txt Normal file
View File

@ -0,0 +1,12 @@
```release-note:improvement
cli: Added `log-include-location` flag to the `monitor` command
```
```release-note:improvement
cli: Added `log-include-location` flag to the `operator debug` command
```
```release-note:improvement
api: Added support for the `log_include_location` query parameter within the
`/v1/agent/monitor` HTTP endpoint
```

View File

@ -119,8 +119,9 @@ func (a *Agent) monitor(conn io.ReadWriteCloser) {
defer cancel() defer cancel()
monitor := monitor.New(512, a.c.logger, &log.LoggerOptions{ monitor := monitor.New(512, a.c.logger, &log.LoggerOptions{
JSONFormat: args.LogJSON, JSONFormat: args.LogJSON,
Level: logLevel, Level: logLevel,
IncludeLocation: args.LogIncludeLocation,
}) })
frames := make(chan *sframer.StreamFrame, streamFramesBuffer) frames := make(chan *sframer.StreamFrame, streamFramesBuffer)

View File

@ -45,6 +45,11 @@ type MonitorRequest struct {
// LogJSON specifies if log format should be unstructured or json // LogJSON specifies if log format should be unstructured or json
LogJSON bool LogJSON bool
// LogIncludeLocation dictates whether the logger includes file and line
// information on each log line. This is useful for Nomad development and
// debugging.
LogIncludeLocation bool
// NodeID is the node we want to track the logs of // NodeID is the node we want to track the logs of
NodeID string NodeID string

View File

@ -179,14 +179,26 @@ func (s *HTTPServer) AgentMonitor(resp http.ResponseWriter, req *http.Request) (
plainText = parsed plainText = parsed
} }
logIncludeLocation := false
logIncludeLocationStr := req.URL.Query().Get("log_include_location")
if logIncludeLocationStr != "" {
parsed, err := strconv.ParseBool(logIncludeLocationStr)
if err != nil {
return nil, CodedError(http.StatusBadRequest,
fmt.Sprintf("Unknown option for log_include_location: %v", err))
}
logIncludeLocation = parsed
}
nodeID := req.URL.Query().Get("node_id") nodeID := req.URL.Query().Get("node_id")
// Build the request and parse the ACL token // Build the request and parse the ACL token
args := cstructs.MonitorRequest{ args := cstructs.MonitorRequest{
NodeID: nodeID, NodeID: nodeID,
ServerID: req.URL.Query().Get("server_id"), ServerID: req.URL.Query().Get("server_id"),
LogLevel: logLevel, LogLevel: logLevel,
LogJSON: logJSON, LogJSON: logJSON,
PlainText: plainText, LogIncludeLocation: logIncludeLocation,
PlainText: plainText,
} }
// if node and server were requested return error // if node and server were requested return error

View File

@ -31,6 +31,7 @@ import (
"github.com/hashicorp/nomad/nomad/mock" "github.com/hashicorp/nomad/nomad/mock"
"github.com/hashicorp/nomad/nomad/structs" "github.com/hashicorp/nomad/nomad/structs"
"github.com/hashicorp/nomad/testutil" "github.com/hashicorp/nomad/testutil"
"github.com/shoenig/test/must"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
) )
@ -293,6 +294,18 @@ func TestHTTP_AgentMonitor(t *testing.T) {
}) })
}) })
t.Run("unknown log_include_location", func(t *testing.T) {
httpTest(t, nil, func(s *TestAgent) {
req, err := http.NewRequest(http.MethodGet, "/v1/agent/monitor?log_include_location=maybe", nil)
must.NoError(t, err)
resp := newClosableRecorder()
// Make the request
_, err = s.Server.AgentMonitor(resp, req)
must.Eq(t, http.StatusBadRequest, err.(HTTPCodedError).Code())
})
})
t.Run("check for specific log level", func(t *testing.T) { t.Run("check for specific log level", func(t *testing.T) {
httpTest(t, nil, func(s *TestAgent) { httpTest(t, nil, func(s *TestAgent) {
req, err := http.NewRequest(http.MethodGet, "/v1/agent/monitor?log_level=warn", nil) req, err := http.NewRequest(http.MethodGet, "/v1/agent/monitor?log_level=warn", nil)

View File

@ -19,6 +19,13 @@ import (
type MonitorCommand struct { type MonitorCommand struct {
Meta Meta
// Below this point is where CLI flag options are stored.
logLevel string
nodeID string
serverID string
logJSON bool
logIncludeLocation bool
} }
func (c *MonitorCommand) Help() string { func (c *MonitorCommand) Help() string {
@ -42,6 +49,9 @@ Monitor Specific Options:
-log-level <level> -log-level <level>
Sets the log level to monitor (default: INFO) Sets the log level to monitor (default: INFO)
-log-include-location
Include file and line information in each log line. The default is false.
-node-id <node-id> -node-id <node-id>
Sets the specific node to monitor Sets the specific node to monitor
@ -68,17 +78,13 @@ func (c *MonitorCommand) Run(args []string) int {
Ui: c.Ui, Ui: c.Ui,
} }
var logLevel string
var nodeID string
var serverID string
var logJSON bool
flags := c.Meta.FlagSet(c.Name(), FlagSetClient) flags := c.Meta.FlagSet(c.Name(), FlagSetClient)
flags.Usage = func() { c.Ui.Output(c.Help()) } flags.Usage = func() { c.Ui.Output(c.Help()) }
flags.StringVar(&logLevel, "log-level", "", "") flags.StringVar(&c.logLevel, "log-level", "", "")
flags.StringVar(&nodeID, "node-id", "", "") flags.BoolVar(&c.logIncludeLocation, "log-include-location", false, "")
flags.StringVar(&serverID, "server-id", "", "") flags.StringVar(&c.nodeID, "node-id", "", "")
flags.BoolVar(&logJSON, "json", false, "") flags.StringVar(&c.serverID, "server-id", "", "")
flags.BoolVar(&c.logJSON, "json", false, "")
if err := flags.Parse(args); err != nil { if err := flags.Parse(args); err != nil {
return 1 return 1
@ -99,8 +105,8 @@ func (c *MonitorCommand) Run(args []string) int {
} }
// Query the node info and lookup prefix // Query the node info and lookup prefix
if nodeID != "" { if c.nodeID != "" {
nodeID, err = lookupNodeID(client.Nodes(), nodeID) c.nodeID, err = lookupNodeID(client.Nodes(), c.nodeID)
if err != nil { if err != nil {
c.Ui.Error(err.Error()) c.Ui.Error(err.Error())
return 1 return 1
@ -108,10 +114,11 @@ func (c *MonitorCommand) Run(args []string) int {
} }
params := map[string]string{ params := map[string]string{
"log_level": logLevel, "log_level": c.logLevel,
"node_id": nodeID, "node_id": c.nodeID,
"server_id": serverID, "server_id": c.serverID,
"log_json": strconv.FormatBool(logJSON), "log_json": strconv.FormatBool(c.logJSON),
"log_include_location": strconv.FormatBool(c.logIncludeLocation),
} }
query := &api.QueryOptions{ query := &api.QueryOptions{

View File

@ -42,4 +42,13 @@ func TestMonitorCommand_Fails(t *testing.T) {
out = ui.ErrorWriter.String() out = ui.ErrorWriter.String()
must.StrContains(t, out, "No node(s) with prefix") must.StrContains(t, out, "No node(s) with prefix")
ui.ErrorWriter.Reset()
// Fails on passing a log-include-location flag which cannot be parsed.
code = cmd.Run([]string{"-address=" + url, "-log-include-location=maybe"})
must.One(t, code)
out = ui.ErrorWriter.String()
must.StrContains(t, out, `invalid boolean value "maybe" for -log-include-location`)
} }

View File

@ -39,28 +39,29 @@ import (
type OperatorDebugCommand struct { type OperatorDebugCommand struct {
Meta Meta
timestamp string timestamp string
collectDir string collectDir string
duration time.Duration duration time.Duration
interval time.Duration interval time.Duration
pprofInterval time.Duration pprofInterval time.Duration
pprofDuration time.Duration pprofDuration time.Duration
logLevel string logLevel string
maxNodes int logIncludeLocation bool
nodeClass string maxNodes int
nodeIDs []string nodeClass string
serverIDs []string nodeIDs []string
topics map[api.Topic][]string serverIDs []string
index uint64 topics map[api.Topic][]string
consul *external index uint64
vault *external consul *external
manifest []string vault *external
ctx context.Context manifest []string
cancel context.CancelFunc ctx context.Context
opts *api.QueryOptions cancel context.CancelFunc
verbose bool opts *api.QueryOptions
members *api.ServerMembers verbose bool
nodes []*api.NodeListStub members *api.ServerMembers
nodes []*api.NodeListStub
} }
const ( const (
@ -178,6 +179,10 @@ Debug Options:
-log-level=<level> -log-level=<level>
The log level to monitor. Defaults to DEBUG. The log level to monitor. Defaults to DEBUG.
-log-include-location
Include file and line information in each log line monitored. The default
is true.
-max-nodes=<count> -max-nodes=<count>
Cap the maximum number of client nodes included in the capture. Defaults Cap the maximum number of client nodes included in the capture. Defaults
to 10, set to 0 for unlimited. to 10, set to 0 for unlimited.
@ -225,20 +230,21 @@ func (c *OperatorDebugCommand) Synopsis() string {
func (c *OperatorDebugCommand) AutocompleteFlags() complete.Flags { func (c *OperatorDebugCommand) AutocompleteFlags() complete.Flags {
return mergeAutocompleteFlags(c.Meta.AutocompleteFlags(FlagSetClient), return mergeAutocompleteFlags(c.Meta.AutocompleteFlags(FlagSetClient),
complete.Flags{ complete.Flags{
"-duration": complete.PredictAnything, "-duration": complete.PredictAnything,
"-event-index": complete.PredictAnything, "-event-index": complete.PredictAnything,
"-event-topic": complete.PredictAnything, "-event-topic": complete.PredictAnything,
"-interval": complete.PredictAnything, "-interval": complete.PredictAnything,
"-log-level": complete.PredictSet("TRACE", "DEBUG", "INFO", "WARN", "ERROR"), "-log-level": complete.PredictSet("TRACE", "DEBUG", "INFO", "WARN", "ERROR"),
"-max-nodes": complete.PredictAnything, "-log-include-location": complete.PredictAnything,
"-node-class": NodeClassPredictor(c.Client), "-max-nodes": complete.PredictAnything,
"-node-id": NodePredictor(c.Client), "-node-class": NodeClassPredictor(c.Client),
"-server-id": ServerPredictor(c.Client), "-node-id": NodePredictor(c.Client),
"-output": complete.PredictDirs("*"), "-server-id": ServerPredictor(c.Client),
"-pprof-duration": complete.PredictAnything, "-output": complete.PredictDirs("*"),
"-consul-token": complete.PredictAnything, "-pprof-duration": complete.PredictAnything,
"-vault-token": complete.PredictAnything, "-consul-token": complete.PredictAnything,
"-verbose": complete.PredictAnything, "-vault-token": complete.PredictAnything,
"-verbose": complete.PredictAnything,
}) })
} }
@ -358,6 +364,7 @@ func (c *OperatorDebugCommand) Run(args []string) int {
flags.StringVar(&eventTopic, "event-topic", "none", "") flags.StringVar(&eventTopic, "event-topic", "none", "")
flags.StringVar(&interval, "interval", "30s", "") flags.StringVar(&interval, "interval", "30s", "")
flags.StringVar(&c.logLevel, "log-level", "DEBUG", "") flags.StringVar(&c.logLevel, "log-level", "DEBUG", "")
flags.BoolVar(&c.logIncludeLocation, "log-include-location", true, "")
flags.IntVar(&c.maxNodes, "max-nodes", 10, "") flags.IntVar(&c.maxNodes, "max-nodes", 10, "")
flags.StringVar(&c.nodeClass, "node-class", "", "") flags.StringVar(&c.nodeClass, "node-class", "", "")
flags.StringVar(&nodeIDs, "node-id", "all", "") flags.StringVar(&nodeIDs, "node-id", "all", "")
@ -769,8 +776,9 @@ func (c *OperatorDebugCommand) startMonitor(path, idKey, nodeID string, client *
qo := api.QueryOptions{ qo := api.QueryOptions{
Params: map[string]string{ Params: map[string]string{
idKey: nodeID, idKey: nodeID,
"log_level": c.logLevel, "log_level": c.logLevel,
"log_include_location": strconv.FormatBool(c.logIncludeLocation),
}, },
AllowStale: c.queryOpts().AllowStale, AllowStale: c.queryOpts().AllowStale,
} }

View File

@ -198,8 +198,9 @@ func (a *Agent) monitor(conn io.ReadWriteCloser) {
defer cancel() defer cancel()
monitor := monitor.New(512, a.srv.logger, &log.LoggerOptions{ monitor := monitor.New(512, a.srv.logger, &log.LoggerOptions{
Level: logLevel, Level: logLevel,
JSONFormat: args.LogJSON, JSONFormat: args.LogJSON,
IncludeLocation: args.LogIncludeLocation,
}) })
frames := make(chan *sframer.StreamFrame, 32) frames := make(chan *sframer.StreamFrame, 32)

View File

@ -441,8 +441,8 @@ The table below shows this endpoint's support for
### Parameters ### Parameters
- `node` `(string: <required>)` - Specifies the name of the node to force leave. - `node` `(string: <required>)` - Specifies the name of the node to force leave.
- `prune` `(boolean: <optional>)` - Removes failed or left server from the Serf - `prune` `(boolean: <optional>)` - Removes failed or left server from the Serf
member list immediately. If member is actually still alive, it will eventually rejoin member list immediately. If member is actually still alive, it will eventually rejoin
the cluster again. the cluster again.
@ -605,6 +605,9 @@ The table below shows this endpoint's support for
- `log_json` `(bool: false)` - Specifies if the log format for streamed logs - `log_json` `(bool: false)` - Specifies if the log format for streamed logs
should be JSON. should be JSON.
- `log_include_location` `(bool: false)` - Specifies if the logs streamed should
include file and line information.
- `node_id` `(string: "a57b2adb-1a30-2dda-8df0-25abb0881952")` - Specifies a text - `node_id` `(string: "a57b2adb-1a30-2dda-8df0-25abb0881952")` - Specifies a text
string containing a node-id to target for streaming. string containing a node-id to target for streaming.

View File

@ -36,6 +36,9 @@ capability.
- `-log-level`: The log level to use for log streaming. Defaults to `info`. - `-log-level`: The log level to use for log streaming. Defaults to `info`.
Possible values include `trace`, `debug`, `info`, `warn`, `error` Possible values include `trace`, `debug`, `info`, `warn`, `error`
- `-log-include-location`: Include file and line information in each log line.
The default is `false`.
- `-node-id`: Specifies the client node-id to stream logs from. If no - `-node-id`: Specifies the client node-id to stream logs from. If no
node-id is given the nomad server from the -address flag will be used. node-id is given the nomad server from the -address flag will be used.

View File

@ -55,6 +55,9 @@ true.
- `-log-level=DEBUG`: The log level to monitor. Defaults to `DEBUG`. - `-log-level=DEBUG`: The log level to monitor. Defaults to `DEBUG`.
- `-log-include-location`: Include file and line information in each log line
monitored. The default is `true`.
- `-max-nodes=<count>`: Cap the maximum number of client nodes included - `-max-nodes=<count>`: Cap the maximum number of client nodes included
in the capture. Defaults to 10, set to 0 for unlimited. in the capture. Defaults to 10, set to 0 for unlimited.