cli: add -json flag to support job commands (#12591)

* cli: add -json flag to support job commands

While the CLI has always supported running JSON jobs, its support has
been via HCLv2's JSON parsing. I have no idea what format it expects the
job to be in, but it's absolutely not in the same format as the API
expects.

So I ignored that and added a new -json flag to explicitly support *API*
style JSON jobspecs.

The jobspecs can even have the wrapping {"Job": {...}} envelope or not!

* docs: fix example for `nomad job validate`

We haven't been able to validate inside driver config stanzas ever since
the move to task driver plugins. 😭
This commit is contained in:
Michael Schurter 2022-04-21 13:20:36 -07:00 committed by GitHub
parent f4287c870d
commit 5db3a671db
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
14 changed files with 599 additions and 55 deletions

3
.changelog/12591.txt Normal file
View File

@ -0,0 +1,3 @@
```release-note:improvement
cli: Added -json flag to `nomad job {run,plan,validate}` to support parsing JSON formatted jobs
```

View File

@ -3,9 +3,9 @@ package command
import (
"bufio"
"bytes"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strconv"
@ -14,6 +14,7 @@ import (
gg "github.com/hashicorp/go-getter"
"github.com/hashicorp/nomad/api"
flaghelper "github.com/hashicorp/nomad/helper/flags"
"github.com/hashicorp/nomad/jobspec"
"github.com/hashicorp/nomad/jobspec2"
"github.com/kr/text"
@ -379,19 +380,54 @@ READ:
return l.ReadCloser.Read(p)
}
// JobGetter provides helpers for retrieving and parsing a jobpsec.
type JobGetter struct {
hcl1 bool
HCL1 bool
Vars flaghelper.StringFlag
VarFiles flaghelper.StringFlag
Strict bool
JSON bool
// The fields below can be overwritten for tests
testStdin io.Reader
}
func (j *JobGetter) Validate() error {
if j.HCL1 && j.Strict {
return fmt.Errorf("cannot parse job file as HCLv1 and HCLv2 strict.")
}
if j.HCL1 && j.JSON {
return fmt.Errorf("cannot parse job file as HCL and JSON.")
}
if len(j.Vars) > 0 && j.JSON {
return fmt.Errorf("cannot use variables with JSON files.")
}
if len(j.VarFiles) > 0 && j.JSON {
return fmt.Errorf("cannot use variables with JSON files.")
}
if len(j.Vars) > 0 && j.HCL1 {
return fmt.Errorf("cannot use variables with HCLv1.")
}
if len(j.VarFiles) > 0 && j.HCL1 {
return fmt.Errorf("cannot use variables with HCLv1.")
}
return nil
}
// ApiJob returns the Job struct from jobfile.
func (j *JobGetter) ApiJob(jpath string) (*api.Job, error) {
return j.ApiJobWithArgs(jpath, nil, nil, true)
}
func (j *JobGetter) ApiJobWithArgs(jpath string, vars []string, varfiles []string, strict bool) (*api.Job, error) {
j.Vars = vars
j.VarFiles = varfiles
j.Strict = strict
return j.Get(jpath)
}
func (j *JobGetter) Get(jpath string) (*api.Job, error) {
var jobfile io.Reader
pathName := filepath.Base(jpath)
switch jpath {
@ -401,19 +437,19 @@ func (j *JobGetter) ApiJobWithArgs(jpath string, vars []string, varfiles []strin
} else {
jobfile = os.Stdin
}
pathName = "stdin.hcl"
pathName = "stdin"
default:
if len(jpath) == 0 {
return nil, fmt.Errorf("Error jobfile path has to be specified.")
}
job, err := ioutil.TempFile("", "jobfile")
jobFile, err := os.CreateTemp("", "jobfile")
if err != nil {
return nil, err
}
defer os.Remove(job.Name())
defer os.Remove(jobFile.Name())
if err := job.Close(); err != nil {
if err := jobFile.Close(); err != nil {
return nil, err
}
@ -426,13 +462,13 @@ func (j *JobGetter) ApiJobWithArgs(jpath string, vars []string, varfiles []strin
client := &gg.Client{
Src: jpath,
Pwd: pwd,
Dst: job.Name(),
Dst: jobFile.Name(),
}
if err := client.Get(); err != nil {
return nil, fmt.Errorf("Error getting jobfile from %q: %v", jpath, err)
} else {
file, err := os.Open(job.Name())
file, err := os.Open(jobFile.Name())
if err != nil {
return nil, fmt.Errorf("Error opening file %q: %v", jpath, err)
}
@ -444,9 +480,27 @@ func (j *JobGetter) ApiJobWithArgs(jpath string, vars []string, varfiles []strin
// Parse the JobFile
var jobStruct *api.Job
var err error
if j.hcl1 {
switch {
case j.HCL1:
jobStruct, err = jobspec.Parse(jobfile)
} else {
case j.JSON:
// Support JSON files with both a top-level Job key as well as
// ones without.
eitherJob := struct {
NestedJob *api.Job `json:"Job"`
api.Job
}{}
if err := json.NewDecoder(jobfile).Decode(&eitherJob); err != nil {
return nil, fmt.Errorf("Failed to parse JSON job: %w", err)
}
if eitherJob.NestedJob != nil {
jobStruct = eitherJob.NestedJob
} else {
jobStruct = &eitherJob.Job
}
default:
var buf bytes.Buffer
_, err = io.Copy(&buf, jobfile)
if err != nil {
@ -455,11 +509,11 @@ func (j *JobGetter) ApiJobWithArgs(jpath string, vars []string, varfiles []strin
jobStruct, err = jobspec2.ParseWithConfig(&jobspec2.ParseConfig{
Path: pathName,
Body: buf.Bytes(),
ArgVars: vars,
ArgVars: j.Vars,
AllowFS: true,
VarFiles: varfiles,
VarFiles: j.VarFiles,
Envs: os.Environ(),
Strict: strict,
Strict: j.Strict,
})
if err != nil {

View File

@ -451,6 +451,84 @@ func TestJobGetter_HTTPServer(t *testing.T) {
}
}
func TestJobGetter_Validate(t *testing.T) {
cases := []struct {
name string
jg JobGetter
errContains string
}{
{
"StrictAndHCL1",
JobGetter{
HCL1: true,
Strict: true,
},
"HCLv1 and HCLv2 strict",
},
{
"JSONandHCL1",
JobGetter{
HCL1: true,
JSON: true,
},
"HCL and JSON",
},
{
"VarsAndHCL1",
JobGetter{
HCL1: true,
Vars: []string{"foo"},
},
"variables with HCLv1",
},
{
"VarFilesAndHCL1",
JobGetter{
HCL1: true,
VarFiles: []string{"foo.var"},
},
"variables with HCLv1",
},
{
"VarsAndJSON",
JobGetter{
JSON: true,
Vars: []string{"foo"},
},
"variables with JSON",
},
{
"VarFilesAndJSON",
JobGetter{
JSON: true,
VarFiles: []string{"foo.var"},
},
"variables with JSON files",
},
{
"JSON_OK",
JobGetter{
JSON: true,
},
"",
},
}
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
err := tc.jg.Validate()
switch tc.errContains {
case "":
require.NoError(t, err)
default:
require.ErrorContains(t, err, tc.errContains)
}
})
}
}
func TestPrettyTimeDiff(t *testing.T) {
// Grab the time and truncate to the nearest second. This allows our tests
// to be deterministic since we don't have to worry about rounding.

View File

@ -7,7 +7,6 @@ import (
"time"
"github.com/hashicorp/nomad/api"
flaghelper "github.com/hashicorp/nomad/helper/flags"
"github.com/hashicorp/nomad/scheduler"
"github.com/posener/complete"
)
@ -76,6 +75,11 @@ Plan Options:
Determines whether the diff between the remote job and planned job is shown.
Defaults to true.
-json
Parses the job file as JSON. If the outer object has a Job field, such as
from "nomad job inspect" or "nomad run -output", the value of the field is
used as the job.
-hcl1
Parses the job file as HCLv1.
@ -109,6 +113,7 @@ func (c *JobPlanCommand) AutocompleteFlags() complete.Flags {
"-diff": complete.PredictNothing,
"-policy-override": complete.PredictNothing,
"-verbose": complete.PredictNothing,
"-json": complete.PredictNothing,
"-hcl1": complete.PredictNothing,
"-hcl2-strict": complete.PredictNothing,
"-var": complete.PredictAnything,
@ -117,23 +122,27 @@ func (c *JobPlanCommand) AutocompleteFlags() complete.Flags {
}
func (c *JobPlanCommand) AutocompleteArgs() complete.Predictor {
return complete.PredictOr(complete.PredictFiles("*.nomad"), complete.PredictFiles("*.hcl"))
return complete.PredictOr(
complete.PredictFiles("*.nomad"),
complete.PredictFiles("*.hcl"),
complete.PredictFiles("*.json"),
)
}
func (c *JobPlanCommand) Name() string { return "job plan" }
func (c *JobPlanCommand) Run(args []string) int {
var diff, policyOverride, verbose, hcl2Strict bool
var varArgs, varFiles flaghelper.StringFlag
var diff, policyOverride, verbose bool
flagSet := c.Meta.FlagSet(c.Name(), FlagSetClient)
flagSet.Usage = func() { c.Ui.Output(c.Help()) }
flagSet.BoolVar(&diff, "diff", true, "")
flagSet.BoolVar(&policyOverride, "policy-override", false, "")
flagSet.BoolVar(&verbose, "verbose", false, "")
flagSet.BoolVar(&c.JobGetter.hcl1, "hcl1", false, "")
flagSet.BoolVar(&hcl2Strict, "hcl2-strict", true, "")
flagSet.Var(&varArgs, "var", "")
flagSet.Var(&varFiles, "var-file", "")
flagSet.BoolVar(&c.JobGetter.JSON, "json", false, "")
flagSet.BoolVar(&c.JobGetter.HCL1, "hcl1", false, "")
flagSet.BoolVar(&c.JobGetter.Strict, "hcl2-strict", true, "")
flagSet.Var(&c.JobGetter.Vars, "var", "")
flagSet.Var(&c.JobGetter.VarFiles, "var-file", "")
if err := flagSet.Parse(args); err != nil {
return 255
@ -147,9 +156,14 @@ func (c *JobPlanCommand) Run(args []string) int {
return 255
}
if err := c.JobGetter.Validate(); err != nil {
c.Ui.Error(fmt.Sprintf("Invalid job options: %s", err))
return 1
}
path := args[0]
// Get Job struct from Jobfile
job, err := c.JobGetter.ApiJobWithArgs(args[0], varArgs, varFiles, hcl2Strict)
job, err := c.JobGetter.Get(path)
if err != nil {
c.Ui.Error(fmt.Sprintf("Error getting job struct: %s", err))
return 255
@ -193,11 +207,11 @@ func (c *JobPlanCommand) Run(args []string) int {
}
runArgs := strings.Builder{}
for _, varArg := range varArgs {
for _, varArg := range c.JobGetter.Vars {
runArgs.WriteString(fmt.Sprintf("-var=%q ", varArg))
}
for _, varFile := range varFiles {
for _, varFile := range c.JobGetter.VarFiles {
runArgs.WriteString(fmt.Sprintf("-var-file=%q ", varFile))
}

View File

@ -255,3 +255,19 @@ func TestPlanCommad_Preemptions(t *testing.T) {
require.Contains(out, "batch")
require.Contains(out, "service")
}
func TestPlanCommad_JSON(t *testing.T) {
ui := cli.NewMockUi()
cmd := &JobPlanCommand{
Meta: Meta{Ui: ui},
}
args := []string{
"-address=http://nope",
"-json",
"testdata/example-short.json",
}
code := cmd.Run(args)
require.Equal(t, 255, code)
require.Contains(t, ui.ErrorWriter.String(), "Error during plan: Put")
}

View File

@ -11,7 +11,6 @@ import (
"github.com/hashicorp/nomad/api"
"github.com/hashicorp/nomad/helper"
flaghelper "github.com/hashicorp/nomad/helper/flags"
"github.com/posener/complete"
)
@ -90,6 +89,11 @@ Run Options:
Override the priority of the evaluations produced as a result of this job
submission. By default, this is set to the priority of the job.
-json
Parses the job file as JSON. If the outer object has a Job field, such as
from "nomad job inspect" or "nomad run -output", the value of the field is
used as the job.
-hcl1
Parses the job file as HCLv1.
@ -158,6 +162,7 @@ func (c *JobRunCommand) AutocompleteFlags() complete.Flags {
"-output": complete.PredictNothing,
"-policy-override": complete.PredictNothing,
"-preserve-counts": complete.PredictNothing,
"-json": complete.PredictNothing,
"-hcl1": complete.PredictNothing,
"-hcl2-strict": complete.PredictNothing,
"-var": complete.PredictAnything,
@ -167,15 +172,18 @@ func (c *JobRunCommand) AutocompleteFlags() complete.Flags {
}
func (c *JobRunCommand) AutocompleteArgs() complete.Predictor {
return complete.PredictOr(complete.PredictFiles("*.nomad"), complete.PredictFiles("*.hcl"))
return complete.PredictOr(
complete.PredictFiles("*.nomad"),
complete.PredictFiles("*.hcl"),
complete.PredictFiles("*.json"),
)
}
func (c *JobRunCommand) Name() string { return "job run" }
func (c *JobRunCommand) Run(args []string) int {
var detach, verbose, output, override, preserveCounts, hcl2Strict bool
var detach, verbose, output, override, preserveCounts bool
var checkIndexStr, consulToken, consulNamespace, vaultToken, vaultNamespace string
var varArgs, varFiles flaghelper.StringFlag
var evalPriority int
flagSet := c.Meta.FlagSet(c.Name(), FlagSetClient)
@ -185,15 +193,16 @@ func (c *JobRunCommand) Run(args []string) int {
flagSet.BoolVar(&output, "output", false, "")
flagSet.BoolVar(&override, "policy-override", false, "")
flagSet.BoolVar(&preserveCounts, "preserve-counts", false, "")
flagSet.BoolVar(&c.JobGetter.hcl1, "hcl1", false, "")
flagSet.BoolVar(&hcl2Strict, "hcl2-strict", true, "")
flagSet.BoolVar(&c.JobGetter.JSON, "json", false, "")
flagSet.BoolVar(&c.JobGetter.HCL1, "hcl1", false, "")
flagSet.BoolVar(&c.JobGetter.Strict, "hcl2-strict", true, "")
flagSet.StringVar(&checkIndexStr, "check-index", "", "")
flagSet.StringVar(&consulToken, "consul-token", "", "")
flagSet.StringVar(&consulNamespace, "consul-namespace", "", "")
flagSet.StringVar(&vaultToken, "vault-token", "", "")
flagSet.StringVar(&vaultNamespace, "vault-namespace", "", "")
flagSet.Var(&varArgs, "var", "")
flagSet.Var(&varFiles, "var-file", "")
flagSet.Var(&c.JobGetter.Vars, "var", "")
flagSet.Var(&c.JobGetter.VarFiles, "var-file", "")
flagSet.IntVar(&evalPriority, "eval-priority", 0, "")
if err := flagSet.Parse(args); err != nil {
@ -214,8 +223,13 @@ func (c *JobRunCommand) Run(args []string) int {
return 1
}
if err := c.JobGetter.Validate(); err != nil {
c.Ui.Error(fmt.Sprintf("Invalid job options: %s", err))
return 1
}
// Get Job struct from Jobfile
job, err := c.JobGetter.ApiJobWithArgs(args[0], varArgs, varFiles, hcl2Strict)
job, err := c.JobGetter.Get(args[0])
if err != nil {
c.Ui.Error(fmt.Sprintf("Error getting job struct: %s", err))
return 1

View File

@ -1,20 +1,22 @@
package command
import (
"io"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/hashicorp/nomad/ci"
"github.com/hashicorp/nomad/testutil"
"github.com/mitchellh/cli"
"github.com/stretchr/testify/require"
)
func TestRunCommand_Implements(t *testing.T) {
ci.Parallel(t)
var _ cli.Command = &JobRunCommand{}
}
var _ cli.Command = (*JobRunCommand)(nil)
func TestRunCommand_Output_Json(t *testing.T) {
ci.Parallel(t)
@ -215,3 +217,65 @@ func TestRunCommand_From_URL(t *testing.T) {
t.Fatalf("expected error getting jobfile, got: %s", out)
}
}
// TestRunCommand_JSON asserts that `nomad job run -json` accepts JSON jobs
// with or without a top level Job key.
func TestRunCommand_JSON(t *testing.T) {
ci.Parallel(t)
run := func(args ...string) (stdout string, stderr string, code int) {
ui := cli.NewMockUi()
cmd := &JobRunCommand{
Meta: Meta{Ui: ui},
}
t.Logf("run: nomad job run %s", strings.Join(args, " "))
code = cmd.Run(args)
return ui.OutputWriter.String(), ui.ErrorWriter.String(), code
}
// Agent startup is slow, do some work while we wait
agentReady := make(chan string)
go func() {
_, _, addr := testServer(t, false, nil)
agentReady <- addr
}()
// First convert HCL -> JSON with -output
stdout, stderr, code := run("-output", "assets/example-short.nomad")
require.Zero(t, code, stderr)
require.Empty(t, stderr)
require.NotEmpty(t, stdout)
t.Logf("run -output==> %s...", stdout[:12])
jsonFile := filepath.Join(t.TempDir(), "redis.json")
require.NoError(t, os.WriteFile(jsonFile, []byte(stdout), 0o640))
// Wait for agent to start and get its address
addr := ""
select {
case addr = <-agentReady:
case <-time.After(10 * time.Second):
t.Fatalf("timed out waiting for agent to start")
}
// Submit JSON
stdout, stderr, code = run("-detach", "-address", addr, "-json", jsonFile)
require.Zero(t, code, stderr)
require.Empty(t, stderr)
// Read the JSON from the API as it omits the Job envelope and
// therefore differs from -output
resp, err := http.Get(addr + "/v1/job/example")
require.NoError(t, err)
buf, err := io.ReadAll(resp.Body)
require.NoError(t, err)
require.NoError(t, resp.Body.Close())
require.NotEmpty(t, buf)
t.Logf("/v1/job/example==> %s...", string(buf[:12]))
require.NoError(t, os.WriteFile(jsonFile, buf, 0o640))
// Submit JSON
stdout, stderr, code = run("-detach", "-address", addr, "-json", jsonFile)
require.Zerof(t, code, "stderr: %s\njson: %s\n", stderr, string(buf))
require.Empty(t, stderr)
require.NotEmpty(t, stdout)
}

View File

@ -7,7 +7,6 @@ import (
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/nomad/api"
"github.com/hashicorp/nomad/command/agent"
flaghelper "github.com/hashicorp/nomad/helper/flags"
"github.com/hashicorp/nomad/nomad/structs"
"github.com/posener/complete"
)
@ -32,8 +31,17 @@ Alias: nomad validate
When ACLs are enabled, this command requires a token with the 'read-job'
capability for the job's namespace.
General Options:
` + generalOptionsUsage(usageOptsDefault) + `
Validate Options:
-json
Parses the job file as JSON. If the outer object has a Job field, such as
from "nomad job inspect" or "nomad run -output", the value of the field is
used as the job.
-hcl1
Parses the job file as HCLv1.
@ -65,21 +73,23 @@ func (c *JobValidateCommand) AutocompleteFlags() complete.Flags {
}
func (c *JobValidateCommand) AutocompleteArgs() complete.Predictor {
return complete.PredictOr(complete.PredictFiles("*.nomad"), complete.PredictFiles("*.hcl"))
return complete.PredictOr(
complete.PredictFiles("*.nomad"),
complete.PredictFiles("*.hcl"),
complete.PredictFiles("*.json"),
)
}
func (c *JobValidateCommand) Name() string { return "job validate" }
func (c *JobValidateCommand) Run(args []string) int {
var varArgs, varFiles flaghelper.StringFlag
var hcl2Strict bool
flagSet := c.Meta.FlagSet(c.Name(), FlagSetNone)
flagSet := c.Meta.FlagSet(c.Name(), FlagSetClient)
flagSet.Usage = func() { c.Ui.Output(c.Help()) }
flagSet.BoolVar(&c.JobGetter.hcl1, "hcl1", false, "")
flagSet.BoolVar(&hcl2Strict, "hcl2-strict", true, "")
flagSet.Var(&varArgs, "var", "")
flagSet.Var(&varFiles, "var-file", "")
flagSet.BoolVar(&c.JobGetter.JSON, "json", false, "")
flagSet.BoolVar(&c.JobGetter.HCL1, "hcl1", false, "")
flagSet.BoolVar(&c.JobGetter.Strict, "hcl2-strict", true, "")
flagSet.Var(&c.JobGetter.Vars, "var", "")
flagSet.Var(&c.JobGetter.VarFiles, "var-file", "")
if err := flagSet.Parse(args); err != nil {
return 1
@ -93,8 +103,13 @@ func (c *JobValidateCommand) Run(args []string) int {
return 1
}
if err := c.JobGetter.Validate(); err != nil {
c.Ui.Error(fmt.Sprintf("Invalid job options: %s", err))
return 1
}
// Get Job struct from Jobfile
job, err := c.JobGetter.ApiJobWithArgs(args[0], varArgs, varFiles, hcl2Strict)
job, err := c.JobGetter.Get(args[0])
if err != nil {
c.Ui.Error(fmt.Sprintf("Error getting job struct: %s", err))
return 1

View File

@ -9,6 +9,7 @@ import (
"github.com/hashicorp/nomad/ci"
"github.com/hashicorp/nomad/testutil"
"github.com/mitchellh/cli"
"github.com/stretchr/testify/require"
)
func TestValidateCommand_Implements(t *testing.T) {
@ -176,3 +177,24 @@ func TestValidateCommand_From_URL(t *testing.T) {
t.Fatalf("expected error getting jobfile, got: %s", out)
}
}
func TestValidateCommand_JSON(t *testing.T) {
ci.Parallel(t)
_, _, addr := testServer(t, false, nil)
ui := cli.NewMockUi()
cmd := &JobValidateCommand{
Meta: Meta{Ui: ui},
}
code := cmd.Run([]string{"-address", addr, "-json", "testdata/example-short.json"})
require.Zerof(t, code, "stdout: %s\nstdout: %s\n",
ui.OutputWriter.String(), ui.ErrorWriter.String())
code = cmd.Run([]string{"-address", addr, "-json", "testdata/example-short-bad.json"})
require.Equalf(t, 1, code, "stdout: %s\nstdout: %s\n",
ui.OutputWriter.String(), ui.ErrorWriter.String())
}

124
command/testdata/example-short-bad.json vendored Normal file
View File

@ -0,0 +1,124 @@
{
"Job": {
"Region": null,
"Namespace": null,
"ID": "example",
"Name": "example",
"Type": null,
"Priority": null,
"AllAtOnce": null,
"Constraints": null,
"Affinities": null,
"TaskGroups": [
{
"Name": "cache",
"Count": null,
"Constraints": null,
"Affinities": null,
"Tasks": [
{
"Name": "redis",
"Driver": "docker",
"User": "",
"Lifecycle": null,
"Config": {
"auth_soft_fail": true,
"image": "redis:3.2",
"ports": [
"db"
]
},
"Constraints": null,
"Affinities": null,
"Env": null,
"Services": null,
"Resources": {
"CPU": 500,
"Cores": null,
"MemoryMB": 256,
"MemoryMaxMB": null,
"DiskMB": null,
"Networks": null,
"Devices": null,
"IOPS": null
},
"RestartPolicy": null,
"Meta": null,
"KillTimeout": null,
"LogConfig": null,
"Artifacts": null,
"Vault": null,
"Templates": null,
"DispatchPayload": null,
"VolumeMounts": null,
"Leader": false,
"ShutdownDelay": 0,
"KillSignal": "",
"Kind": "",
"ScalingPolicies": null
}
],
"Spreads": null,
"Volumes": null,
"RestartPolicy": null,
"ReschedulePolicy": null,
"EphemeralDisk": null,
"Update": null,
"Migrate": null,
"Networks": [
{
"Mode": "",
"Device": "",
"CIDR": "",
"IP": "",
"DNS": null,
"ReservedPorts": null,
"DynamicPorts": [
{
"Label": "db",
"Value": 0,
"To": 6379,
"HostNetwork": ""
}
],
"Hostname": "",
"MBits": null
}
],
"Meta": null,
"Services": null,
"ShutdownDelay": null,
"StopAfterClientDisconnect": null,
"MaxClientDisconnect": null,
"Scaling": null,
"Consul": null
}
],
"Update": null,
"Multiregion": null,
"Spreads": null,
"Periodic": null,
"ParameterizedJob": null,
"Reschedule": null,
"Migrate": null,
"Meta": null,
"ConsulToken": null,
"VaultToken": null,
"Stop": null,
"ParentID": null,
"Dispatched": false,
"DispatchIdempotencyToken": null,
"Payload": null,
"ConsulNamespace": null,
"VaultNamespace": null,
"NomadTokenID": null,
"Status": null,
"StatusDescription": null,
"Stable": null,
"Version": null,
"SubmitTime": null,
"CreateIndex": null,
"ModifyIndex": null,
"JobModifyIndex": null
}
}

127
command/testdata/example-short.json vendored Normal file
View File

@ -0,0 +1,127 @@
{
"Job": {
"Region": null,
"Namespace": null,
"ID": "example",
"Name": "example",
"Type": null,
"Priority": null,
"AllAtOnce": null,
"Datacenters": [
"dc1"
],
"Constraints": null,
"Affinities": null,
"TaskGroups": [
{
"Name": "cache",
"Count": null,
"Constraints": null,
"Affinities": null,
"Tasks": [
{
"Name": "redis",
"Driver": "docker",
"User": "",
"Lifecycle": null,
"Config": {
"auth_soft_fail": true,
"image": "redis:3.2",
"ports": [
"db"
]
},
"Constraints": null,
"Affinities": null,
"Env": null,
"Services": null,
"Resources": {
"CPU": 500,
"Cores": null,
"MemoryMB": 256,
"MemoryMaxMB": null,
"DiskMB": null,
"Networks": null,
"Devices": null,
"IOPS": null
},
"RestartPolicy": null,
"Meta": null,
"KillTimeout": null,
"LogConfig": null,
"Artifacts": null,
"Vault": null,
"Templates": null,
"DispatchPayload": null,
"VolumeMounts": null,
"Leader": false,
"ShutdownDelay": 0,
"KillSignal": "",
"Kind": "",
"ScalingPolicies": null
}
],
"Spreads": null,
"Volumes": null,
"RestartPolicy": null,
"ReschedulePolicy": null,
"EphemeralDisk": null,
"Update": null,
"Migrate": null,
"Networks": [
{
"Mode": "",
"Device": "",
"CIDR": "",
"IP": "",
"DNS": null,
"ReservedPorts": null,
"DynamicPorts": [
{
"Label": "db",
"Value": 0,
"To": 6379,
"HostNetwork": ""
}
],
"Hostname": "",
"MBits": null
}
],
"Meta": null,
"Services": null,
"ShutdownDelay": null,
"StopAfterClientDisconnect": null,
"MaxClientDisconnect": null,
"Scaling": null,
"Consul": null
}
],
"Update": null,
"Multiregion": null,
"Spreads": null,
"Periodic": null,
"ParameterizedJob": null,
"Reschedule": null,
"Migrate": null,
"Meta": null,
"ConsulToken": null,
"VaultToken": null,
"Stop": null,
"ParentID": null,
"Dispatched": false,
"DispatchIdempotencyToken": null,
"Payload": null,
"ConsulNamespace": null,
"VaultNamespace": null,
"NomadTokenID": null,
"Status": null,
"StatusDescription": null,
"Stable": null,
"Version": null,
"SubmitTime": null,
"CreateIndex": null,
"ModifyIndex": null,
"JobModifyIndex": null
}
}

View File

@ -63,6 +63,10 @@ capability for the job's namespace.
- `-policy-override`: Sets the flag to force override any soft mandatory
Sentinel policies.
- `-json`: Parses the job file as JSON. If the outer object has a Job field,
such as from "nomad job inspect" or "nomad run -output", the value of the
field is used as the job.
- `-hcl1`: If set, HCL1 parser is used for parsing the job spec.
- `-hcl2-strict`: Whether an error should be produced from the HCL2 parser where

View File

@ -74,6 +74,10 @@ that volume.
- `-eval-priority`: Override the priority of the evaluations produced as a result
of this job submission. By default, this is set to the priority of the job.
- `-json`: Parses the job file as JSON. If the outer object has a Job field,
such as from "nomad job inspect" or "nomad run -output", the value of the
field is used as the job.
- `-hcl1`: If set, HCL1 parser is used for parsing the job spec.
- `-hcl2-strict`: Whether an error should be produced from the HCL2 parser where

View File

@ -32,8 +32,16 @@ of 1 indicates an error.
When ACLs are enabled, this command requires a token with the `read-job`
capability for the job's namespace.
## General Options
@include 'general_options.mdx'
## Validate Options
- `-json`: Parses the job file as JSON. If the outer object has a Job field,
such as from "nomad job inspect" or "nomad run -output", the value of the
field is used as the job.
- `-hcl1`: If set, HCL1 parser is used for parsing the job spec.
- `-hcl2-strict`: Whether an error should be produced from the HCL2 parser where
@ -46,16 +54,13 @@ Defaults to true.
## Examples
Validate a job with invalid syntax:
Validate a JSON job with invalid syntax:
```shell-session
$ nomad job validate example.nomad
$ nomad job validate -json example.json
Job validation errors:
1 error(s) occurred:
* group "cache" -> task "redis" -> config: 1 error(s) occurred:
* field "image" is required
1 error occurred:
* Missing job datacenters
```
Validate a job that has a configuration that causes warnings: