From b3c118ae2225b52bdce4752ad2e118585a199378 Mon Sep 17 00:00:00 2001 From: Mahmood Ali Date: Wed, 21 Oct 2020 10:19:46 -0400 Subject: [PATCH] Add hclv2 parser --- jobspec2/functions.go | 144 +++++++++++++++++ jobspec2/hcl_conversions.go | 281 +++++++++++++++++++++++++++++++++ jobspec2/hclutil/blockattrs.go | 175 ++++++++++++++++++++ jobspec2/parse.go | 109 +++++++++++++ jobspec2/parse_job.go | 169 ++++++++++++++++++++ jobspec2/parse_map.go | 172 ++++++++++++++++++++ jobspec2/parse_test.go | 145 +++++++++++++++++ 7 files changed, 1195 insertions(+) create mode 100644 jobspec2/functions.go create mode 100644 jobspec2/hcl_conversions.go create mode 100644 jobspec2/hclutil/blockattrs.go create mode 100644 jobspec2/parse.go create mode 100644 jobspec2/parse_job.go create mode 100644 jobspec2/parse_map.go create mode 100644 jobspec2/parse_test.go diff --git a/jobspec2/functions.go b/jobspec2/functions.go new file mode 100644 index 000000000..e8fe74d96 --- /dev/null +++ b/jobspec2/functions.go @@ -0,0 +1,144 @@ +package jobspec2 + +import ( + "fmt" + + "github.com/hashicorp/go-cty-funcs/cidr" + "github.com/hashicorp/go-cty-funcs/crypto" + "github.com/hashicorp/go-cty-funcs/encoding" + "github.com/hashicorp/go-cty-funcs/filesystem" + "github.com/hashicorp/go-cty-funcs/uuid" + "github.com/hashicorp/hcl/v2/ext/tryfunc" + "github.com/hashicorp/hcl/v2/ext/typeexpr" + ctyyaml "github.com/zclconf/go-cty-yaml" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/function" + "github.com/zclconf/go-cty/cty/function/stdlib" +) + +// Functions returns the set of functions that should be used to when +// evaluating expressions in the receiving scope. +// +// basedir is used with file functions and allows a user to reference a file +// using local path. Usually basedir is the directory in which the config file +// is located +// +func Functions(basedir string, allowFS bool) map[string]function.Function { + funcs := map[string]function.Function{ + "abs": stdlib.AbsoluteFunc, + "base64decode": encoding.Base64DecodeFunc, + "base64encode": encoding.Base64EncodeFunc, + "bcrypt": crypto.BcryptFunc, + "can": tryfunc.CanFunc, + "ceil": stdlib.CeilFunc, + "chomp": stdlib.ChompFunc, + "chunklist": stdlib.ChunklistFunc, + "cidrhost": cidr.HostFunc, + "cidrnetmask": cidr.NetmaskFunc, + "cidrsubnet": cidr.SubnetFunc, + "cidrsubnets": cidr.SubnetsFunc, + "coalesce": stdlib.CoalesceFunc, + "coalescelist": stdlib.CoalesceListFunc, + "compact": stdlib.CompactFunc, + "concat": stdlib.ConcatFunc, + "contains": stdlib.ContainsFunc, + "convert": typeexpr.ConvertFunc, + "csvdecode": stdlib.CSVDecodeFunc, + "distinct": stdlib.DistinctFunc, + "element": stdlib.ElementFunc, + "flatten": stdlib.FlattenFunc, + "floor": stdlib.FloorFunc, + "format": stdlib.FormatFunc, + "formatdate": stdlib.FormatDateFunc, + "formatlist": stdlib.FormatListFunc, + "indent": stdlib.IndentFunc, + "index": stdlib.IndexFunc, + "join": stdlib.JoinFunc, + "jsondecode": stdlib.JSONDecodeFunc, + "jsonencode": stdlib.JSONEncodeFunc, + "keys": stdlib.KeysFunc, + "length": stdlib.LengthFunc, + "log": stdlib.LogFunc, + "lookup": stdlib.LookupFunc, + "lower": stdlib.LowerFunc, + "max": stdlib.MaxFunc, + "md5": crypto.Md5Func, + "merge": stdlib.MergeFunc, + "min": stdlib.MinFunc, + "parseint": stdlib.ParseIntFunc, + "pow": stdlib.PowFunc, + "range": stdlib.RangeFunc, + "reverse": stdlib.ReverseFunc, + "replace": stdlib.ReplaceFunc, + "regex_replace": stdlib.RegexReplaceFunc, + "rsadecrypt": crypto.RsaDecryptFunc, + "setintersection": stdlib.SetIntersectionFunc, + "setproduct": stdlib.SetProductFunc, + "setunion": stdlib.SetUnionFunc, + "sha1": crypto.Sha1Func, + "sha256": crypto.Sha256Func, + "sha512": crypto.Sha512Func, + "signum": stdlib.SignumFunc, + "slice": stdlib.SliceFunc, + "sort": stdlib.SortFunc, + "split": stdlib.SplitFunc, + "strrev": stdlib.ReverseFunc, + "substr": stdlib.SubstrFunc, + //"timestamp": pkrfunction.TimestampFunc, + "timeadd": stdlib.TimeAddFunc, + "title": stdlib.TitleFunc, + "trim": stdlib.TrimFunc, + "trimprefix": stdlib.TrimPrefixFunc, + "trimspace": stdlib.TrimSpaceFunc, + "trimsuffix": stdlib.TrimSuffixFunc, + "try": tryfunc.TryFunc, + "upper": stdlib.UpperFunc, + "urlencode": encoding.URLEncodeFunc, + "uuidv4": uuid.V4Func, + "uuidv5": uuid.V5Func, + "values": stdlib.ValuesFunc, + //"vault": pkrfunction.VaultFunc, + "yamldecode": ctyyaml.YAMLDecodeFunc, + "yamlencode": ctyyaml.YAMLEncodeFunc, + "zipmap": stdlib.ZipmapFunc, + + // filesystem calls + "abspath": guardFS(allowFS, filesystem.AbsPathFunc), + "basename": guardFS(allowFS, filesystem.BasenameFunc), + "dirname": guardFS(allowFS, filesystem.DirnameFunc), + "file": guardFS(allowFS, filesystem.MakeFileFunc(basedir, false)), + "fileexists": guardFS(allowFS, filesystem.MakeFileExistsFunc(basedir)), + "fileset": guardFS(allowFS, filesystem.MakeFileSetFunc(basedir)), + "pathexpand": guardFS(allowFS, filesystem.PathExpandFunc), + } + + return funcs +} + +func guardFS(allowFS bool, fn function.Function) function.Function { + if allowFS { + return fn + } + + spec := &function.Spec{ + Params: fn.Params(), + VarParam: fn.VarParam(), + Type: func([]cty.Value) (cty.Type, error) { + return cty.DynamicPseudoType, fmt.Errorf("filesystem function disabled") + }, + Impl: func([]cty.Value, cty.Type) (cty.Value, error) { + return cty.DynamicVal, fmt.Errorf("filesystem functions disabled") + }, + } + + return function.New(spec) +} + +// var unimplFunc = function.New(&function.Spec{ +// Type: func([]cty.Value) (cty.Type, error) { +// return cty.DynamicPseudoType, fmt.Errorf("function not yet implemented") +// }, +// Impl: func([]cty.Value, cty.Type) (cty.Value, error) { +// return cty.DynamicVal, fmt.Errorf("function not yet implemented") +// }, +// }) diff --git a/jobspec2/hcl_conversions.go b/jobspec2/hcl_conversions.go new file mode 100644 index 000000000..413a9e148 --- /dev/null +++ b/jobspec2/hcl_conversions.go @@ -0,0 +1,281 @@ +package jobspec2 + +import ( + "fmt" + "reflect" + "time" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/gohcl" + "github.com/hashicorp/hcl/v2/hcldec" + "github.com/hashicorp/nomad/api" + "github.com/zclconf/go-cty/cty" + "github.com/zclconf/go-cty/cty/gocty" +) + +var hclDecoder *gohcl.Decoder + +func init() { + hclDecoder = newHCLDecoder() + hclDecoder.RegisterBlockDecoder(reflect.TypeOf(api.TaskGroup{}), decodeTaskGroup) +} + +func newHCLDecoder() *gohcl.Decoder { + decoder := &gohcl.Decoder{} + + // time conversion + d := time.Duration(0) + decoder.RegisterExpressionDecoder(reflect.TypeOf(d), decodeDuration) + decoder.RegisterExpressionDecoder(reflect.TypeOf(&d), decodeDuration) + + // custom nomad types + decoder.RegisterBlockDecoder(reflect.TypeOf(api.Affinity{}), decodeAffinity) + decoder.RegisterBlockDecoder(reflect.TypeOf(api.Constraint{}), decodeConstraint) + decoder.RegisterBlockDecoder(reflect.TypeOf(jobWrapper{}), decodeJob) + + return decoder +} + +func decodeDuration(expr hcl.Expression, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { + srcVal, diags := expr.Value(ctx) + + if srcVal.Type() == cty.String { + dur, err := time.ParseDuration(srcVal.AsString()) + if err != nil { + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unsuitable value type", + Detail: fmt.Sprintf("Unsuitable duration value: %s", err.Error()), + Subject: expr.StartRange().Ptr(), + Context: expr.Range().Ptr(), + }) + return diags + } + + srcVal = cty.NumberIntVal(int64(dur)) + } + + if srcVal.Type() != cty.Number { + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unsuitable value type", + Detail: fmt.Sprintf("Unsuitable value: expected a string but found %s", srcVal.Type()), + Subject: expr.StartRange().Ptr(), + Context: expr.Range().Ptr(), + }) + return diags + + } + + err := gocty.FromCtyValue(srcVal, val) + if err != nil { + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "Unsuitable value type", + Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()), + Subject: expr.StartRange().Ptr(), + Context: expr.Range().Ptr(), + }) + } + + return diags +} + +var affinitySpec = hcldec.ObjectSpec{ + "attribute": &hcldec.AttrSpec{Name: "attribute", Type: cty.String, Required: false}, + "value": &hcldec.AttrSpec{Name: "value", Type: cty.String, Required: false}, + "operator": &hcldec.AttrSpec{Name: "operator", Type: cty.String, Required: false}, + "weight": &hcldec.AttrSpec{Name: "weight", Type: cty.Number, Required: false}, + + api.ConstraintVersion: &hcldec.AttrSpec{Name: api.ConstraintVersion, Type: cty.String, Required: false}, + api.ConstraintSemver: &hcldec.AttrSpec{Name: api.ConstraintSemver, Type: cty.String, Required: false}, + api.ConstraintRegex: &hcldec.AttrSpec{Name: api.ConstraintRegex, Type: cty.String, Required: false}, + api.ConstraintSetContains: &hcldec.AttrSpec{Name: api.ConstraintSetContains, Type: cty.String, Required: false}, + api.ConstraintSetContainsAll: &hcldec.AttrSpec{Name: api.ConstraintSetContainsAll, Type: cty.String, Required: false}, + api.ConstraintSetContainsAny: &hcldec.AttrSpec{Name: api.ConstraintSetContainsAny, Type: cty.String, Required: false}, +} + +func decodeAffinity(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { + a := val.(*api.Affinity) + v, diags := hcldec.Decode(body, affinitySpec, ctx) + if len(diags) != 0 { + return diags + } + + attr := func(attr string) string { + a := v.GetAttr(attr) + if a.IsNull() { + return "" + } + return a.AsString() + } + a.LTarget = attr("attribute") + a.RTarget = attr("value") + a.Operand = attr("operator") + weight := v.GetAttr("weight") + if !weight.IsNull() { + w, _ := weight.AsBigFloat().Int64() + a.Weight = int8ToPtr(int8(w)) + } + + // If "version" is provided, set the operand + // to "version" and the value to the "RTarget" + if affinity := attr(api.ConstraintVersion); affinity != "" { + a.Operand = api.ConstraintVersion + a.RTarget = affinity + } + + // If "semver" is provided, set the operand + // to "semver" and the value to the "RTarget" + if affinity := attr(api.ConstraintSemver); affinity != "" { + a.Operand = api.ConstraintSemver + a.RTarget = affinity + } + + // If "regexp" is provided, set the operand + // to "regexp" and the value to the "RTarget" + if affinity := attr(api.ConstraintRegex); affinity != "" { + a.Operand = api.ConstraintRegex + a.RTarget = affinity + } + + // If "set_contains_any" is provided, set the operand + // to "set_contains_any" and the value to the "RTarget" + if affinity := attr(api.ConstraintSetContainsAny); affinity != "" { + a.Operand = api.ConstraintSetContainsAny + a.RTarget = affinity + } + + // If "set_contains_all" is provided, set the operand + // to "set_contains_all" and the value to the "RTarget" + if affinity := attr(api.ConstraintSetContainsAll); affinity != "" { + a.Operand = api.ConstraintSetContainsAll + a.RTarget = affinity + } + + // set_contains is a synonym of set_contains_all + if affinity := attr(api.ConstraintSetContains); affinity != "" { + a.Operand = api.ConstraintSetContains + a.RTarget = affinity + } + + if a.Operand == "" { + a.Operand = "=" + } + return diags +} + +var constraintSpec = hcldec.ObjectSpec{ + "attribute": &hcldec.AttrSpec{Name: "attribute", Type: cty.String, Required: false}, + "value": &hcldec.AttrSpec{Name: "value", Type: cty.String, Required: false}, + "operator": &hcldec.AttrSpec{Name: "operator", Type: cty.String, Required: false}, + + api.ConstraintDistinctProperty: &hcldec.AttrSpec{Name: api.ConstraintDistinctProperty, Type: cty.String, Required: false}, + api.ConstraintDistinctHosts: &hcldec.AttrSpec{Name: api.ConstraintDistinctHosts, Type: cty.Bool, Required: false}, + api.ConstraintRegex: &hcldec.AttrSpec{Name: api.ConstraintRegex, Type: cty.String, Required: false}, + api.ConstraintVersion: &hcldec.AttrSpec{Name: api.ConstraintVersion, Type: cty.String, Required: false}, + api.ConstraintSemver: &hcldec.AttrSpec{Name: api.ConstraintSemver, Type: cty.String, Required: false}, + api.ConstraintSetContains: &hcldec.AttrSpec{Name: api.ConstraintSetContains, Type: cty.String, Required: false}, + api.ConstraintSetContainsAll: &hcldec.AttrSpec{Name: api.ConstraintSetContainsAll, Type: cty.String, Required: false}, + api.ConstraintSetContainsAny: &hcldec.AttrSpec{Name: api.ConstraintSetContainsAny, Type: cty.String, Required: false}, + api.ConstraintAttributeIsSet: &hcldec.AttrSpec{Name: api.ConstraintAttributeIsSet, Type: cty.String, Required: false}, + api.ConstraintAttributeIsNotSet: &hcldec.AttrSpec{Name: api.ConstraintAttributeIsNotSet, Type: cty.String, Required: false}, +} + +func decodeConstraint(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { + c := val.(*api.Constraint) + + v, diags := hcldec.Decode(body, constraintSpec, ctx) + if len(diags) != 0 { + return diags + } + + attr := func(attr string) string { + a := v.GetAttr(attr) + if a.IsNull() { + return "" + } + return a.AsString() + } + + c.LTarget = attr("attribute") + c.RTarget = attr("value") + c.Operand = attr("operator") + + // If "version" is provided, set the operand + // to "version" and the value to the "RTarget" + if constraint := attr(api.ConstraintVersion); constraint != "" { + c.Operand = api.ConstraintVersion + c.RTarget = constraint + } + + // If "semver" is provided, set the operand + // to "semver" and the value to the "RTarget" + if constraint := attr(api.ConstraintSemver); constraint != "" { + c.Operand = api.ConstraintSemver + c.RTarget = constraint + } + + // If "regexp" is provided, set the operand + // to "regexp" and the value to the "RTarget" + if constraint := attr(api.ConstraintRegex); constraint != "" { + c.Operand = api.ConstraintRegex + c.RTarget = constraint + } + + // If "set_contains" is provided, set the operand + // to "set_contains" and the value to the "RTarget" + if constraint := attr(api.ConstraintSetContains); constraint != "" { + c.Operand = api.ConstraintSetContains + c.RTarget = constraint + } + + if d := v.GetAttr(api.ConstraintDistinctHosts); !d.IsNull() && d.True() { + c.Operand = api.ConstraintDistinctHosts + } + + if property := attr(api.ConstraintDistinctProperty); property != "" { + c.Operand = api.ConstraintDistinctProperty + c.LTarget = property + } + + if c.Operand == "" { + c.Operand = "=" + } + return diags +} + +func decodeTaskGroup(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { + tg := val.(*api.TaskGroup) + tgExtra := struct { + Vault *api.Vault `hcl:"vault,block"` + }{} + + extra, _ := gohcl.ImpliedBodySchema(tgExtra) + content, tgBody, diags := body.PartialContent(extra) + if len(diags) != 0 { + return diags + } + + for _, b := range content.Blocks { + if b.Type == "vault" { + v := &api.Vault{} + diags = append(diags, hclDecoder.DecodeBody(b.Body, ctx, v)...) + tgExtra.Vault = v + } + } + + d := newHCLDecoder() + diags = d.DecodeBody(tgBody, ctx, tg) + + if tgExtra.Vault != nil { + for _, t := range tg.Tasks { + if t.Vault == nil { + t.Vault = tgExtra.Vault + } + } + } + + return diags + +} diff --git a/jobspec2/hclutil/blockattrs.go b/jobspec2/hclutil/blockattrs.go new file mode 100644 index 000000000..d36e3aeb8 --- /dev/null +++ b/jobspec2/hclutil/blockattrs.go @@ -0,0 +1,175 @@ +package hclutil + +import ( + "github.com/hashicorp/hcl/v2" + hcls "github.com/hashicorp/hcl/v2/hclsyntax" +) + +// BlocksAsAttrs rewrites the hcl.Body so that hcl blocks are treated as +// attributes when schema is unknown. +// +// This conversion is necessary for parsing task driver configs, as they can be +// arbitrary nested without pre-defined schema. +// +// More concretely, it changes the following: +// +// ``` +// config { +// meta { ... } +// } +// ``` +// to +// +// ``` +// config { +// meta { ... } +// } +// ``` +func BlocksAsAttrs(body hcl.Body) hcl.Body { + if hclb, ok := body.(*hcls.Body); ok { + return &blockAttrs{body: hclb} + } + return body +} + +type blockAttrs struct { + body hcl.Body + + hiddenAttrs map[string]struct{} + hiddenBlocks map[string]struct{} +} + +func (b *blockAttrs) Content(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Diagnostics) { + bc, diags := b.body.Content(schema) + bc.Blocks = expandBlocks(bc.Blocks) + return bc, diags +} +func (b *blockAttrs) PartialContent(schema *hcl.BodySchema) (*hcl.BodyContent, hcl.Body, hcl.Diagnostics) { + bc, remainBody, diags := b.body.PartialContent(schema) + bc.Blocks = expandBlocks(bc.Blocks) + + remain := &blockAttrs{ + body: remainBody, + hiddenAttrs: map[string]struct{}{}, + hiddenBlocks: map[string]struct{}{}, + } + for name := range b.hiddenAttrs { + remain.hiddenAttrs[name] = struct{}{} + } + for typeName := range b.hiddenBlocks { + remain.hiddenBlocks[typeName] = struct{}{} + } + for _, attrS := range schema.Attributes { + remain.hiddenAttrs[attrS.Name] = struct{}{} + } + for _, blockS := range schema.Blocks { + remain.hiddenBlocks[blockS.Type] = struct{}{} + } + + return bc, remain, diags +} + +func (b *blockAttrs) JustAttributes() (hcl.Attributes, hcl.Diagnostics) { + body, ok := b.body.(*hcls.Body) + if !ok { + return b.body.JustAttributes() + } + + attrs := make(hcl.Attributes) + var diags hcl.Diagnostics + + if body.Attributes == nil && len(body.Blocks) == 0 { + return attrs, diags + } + + for name, attr := range body.Attributes { + if _, hidden := b.hiddenAttrs[name]; hidden { + continue + } + attrs[name] = attr.AsHCLAttribute() + } + + for _, blockS := range body.Blocks { + if _, hidden := b.hiddenBlocks[blockS.Type]; hidden { + continue + } + + attrs[blockS.Type] = convertToAttribute(blockS).AsHCLAttribute() + } + + return attrs, diags +} + +func (b *blockAttrs) MissingItemRange() hcl.Range { + return b.body.MissingItemRange() +} + +func expandBlocks(blocks hcl.Blocks) hcl.Blocks { + if len(blocks) == 0 { + return blocks + } + + r := make([]*hcl.Block, len(blocks)) + for i, b := range blocks { + nb := *b + nb.Body = BlocksAsAttrs(b.Body) + r[i] = &nb + } + return r +} + +func convertToAttribute(b *hcls.Block) *hcls.Attribute { + items := []hcls.ObjectConsItem{} + + for _, attr := range b.Body.Attributes { + keyExpr := &hcls.ScopeTraversalExpr{ + Traversal: hcl.Traversal{ + hcl.TraverseRoot{ + Name: attr.Name, + SrcRange: attr.NameRange, + }, + }, + SrcRange: attr.NameRange, + } + key := &hcls.ObjectConsKeyExpr{ + Wrapped: keyExpr, + } + + items = append(items, hcls.ObjectConsItem{ + KeyExpr: key, + ValueExpr: attr.Expr, + }) + } + + for _, block := range b.Body.Blocks { + keyExpr := &hcls.ScopeTraversalExpr{ + Traversal: hcl.Traversal{ + hcl.TraverseRoot{ + Name: block.Type, + SrcRange: block.TypeRange, + }, + }, + SrcRange: block.TypeRange, + } + key := &hcls.ObjectConsKeyExpr{ + Wrapped: keyExpr, + } + valExpr := convertToAttribute(block).Expr + items = append(items, hcls.ObjectConsItem{ + KeyExpr: key, + ValueExpr: valExpr, + }) + } + + attr := &hcls.Attribute{ + Name: b.Type, + NameRange: b.TypeRange, + EqualsRange: b.OpenBraceRange, + SrcRange: b.Body.SrcRange, + Expr: &hcls.ObjectConsExpr{ + Items: items, + }, + } + + return attr +} diff --git a/jobspec2/parse.go b/jobspec2/parse.go new file mode 100644 index 000000000..cc04d0b55 --- /dev/null +++ b/jobspec2/parse.go @@ -0,0 +1,109 @@ +package jobspec2 + +import ( + "bytes" + "errors" + "io" + "os" + "path/filepath" + "strings" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/ext/dynblock" + "github.com/hashicorp/hcl/v2/hclsyntax" + hcljson "github.com/hashicorp/hcl/v2/json" + "github.com/hashicorp/nomad/api" + "github.com/hashicorp/nomad/jobspec2/hclutil" + "github.com/zclconf/go-cty/cty" +) + +func Parse(path string, r io.Reader) (*api.Job, error) { + return ParseWithArgs(path, r, nil, false) +} + +func toVars(vars map[string]string) cty.Value { + attrs := make(map[string]cty.Value, len(vars)) + for k, v := range vars { + attrs[k] = cty.StringVal(v) + } + + return cty.ObjectVal(attrs) +} + +func ParseWithArgs(path string, r io.Reader, vars map[string]string, allowFS bool) (*api.Job, error) { + if path == "" { + if f, ok := r.(*os.File); ok { + path = f.Name() + } + } + basedir := filepath.Dir(path) + + // Copy the reader into an in-memory buffer first since HCL requires it. + var buf bytes.Buffer + if _, err := io.Copy(&buf, r); err != nil { + return nil, err + } + + evalContext := &hcl.EvalContext{ + Functions: Functions(basedir, allowFS), + Variables: map[string]cty.Value{ + "vars": toVars(vars), + }, + UnknownVariable: func(expr string) (cty.Value, error) { + v := "${" + expr + "}" + return cty.StringVal(v), nil + }, + } + var result struct { + Job jobWrapper `hcl:"job,block"` + } + err := decode(path, buf.Bytes(), evalContext, &result) + if err != nil { + return nil, err + } + + normalizeJob(&result.Job) + return result.Job.Job, nil +} + +func decode(filename string, src []byte, ctx *hcl.EvalContext, target interface{}) error { + var file *hcl.File + var diags hcl.Diagnostics + + if !isJSON(src) { + file, diags = hclsyntax.ParseConfig(src, filename, hcl.Pos{Line: 1, Column: 1}) + } else { + file, diags = hcljson.Parse(src, filename) + + } + if diags.HasErrors() { + return diags + } + + body := hclutil.BlocksAsAttrs(file.Body) + body = dynblock.Expand(body, ctx) + diags = hclDecoder.DecodeBody(body, ctx, target) + if diags.HasErrors() { + var str strings.Builder + for i, diag := range diags { + if i != 0 { + str.WriteByte('\n') + } + str.WriteString(diag.Error()) + } + return errors.New(str.String()) + } + diags = append(diags, decodeMapInterfaceType(target, ctx)...) + return nil +} + +func isJSON(src []byte) bool { + for _, c := range src { + if c == ' ' { + continue + } + + return c == '{' + } + return false +} diff --git a/jobspec2/parse_job.go b/jobspec2/parse_job.go new file mode 100644 index 000000000..ec50fe40f --- /dev/null +++ b/jobspec2/parse_job.go @@ -0,0 +1,169 @@ +package jobspec2 + +import ( + "time" + + "github.com/hashicorp/hcl/v2" + "github.com/hashicorp/hcl/v2/gohcl" + "github.com/hashicorp/nomad/api" +) + +type jobWrapper struct { + JobID string `hcl:",label"` + Job *api.Job + + Extra struct { + Vault *api.Vault `hcl:"vault,block"` + Tasks []*api.Task `hcl:"task,block"` + } +} + +func decodeJob(body hcl.Body, ctx *hcl.EvalContext, val interface{}) hcl.Diagnostics { + m := val.(*jobWrapper) + extra, _ := gohcl.ImpliedBodySchema(m.Extra) + content, job, diags := body.PartialContent(extra) + if len(diags) != 0 { + return diags + } + + for _, b := range content.Blocks { + if b.Type == "vault" { + v := &api.Vault{} + diags = append(diags, hclDecoder.DecodeBody(b.Body, ctx, v)...) + m.Extra.Vault = v + } else if b.Type == "task" { + t := &api.Task{} + diags = append(diags, hclDecoder.DecodeBody(b.Body, ctx, t)...) + if len(b.Labels) == 1 { + t.Name = b.Labels[0] + m.Extra.Tasks = append(m.Extra.Tasks, t) + } + } + } + + m.Job = &api.Job{} + return hclDecoder.DecodeBody(job, ctx, m.Job) +} + +func normalizeJob(jw *jobWrapper) { + j := jw.Job + if j.Name == nil { + j.Name = &jw.JobID + } + if j.ID == nil { + j.ID = &jw.JobID + } + + if j.Periodic != nil && j.Periodic.Spec != nil { + v := "cron" + j.Periodic.SpecType = &v + } + + normalizeVault(jw.Extra.Vault) + + if len(jw.Extra.Tasks) != 0 { + alone := make([]*api.TaskGroup, 0, len(jw.Extra.Tasks)) + for _, t := range jw.Extra.Tasks { + alone = append(alone, &api.TaskGroup{ + Name: &t.Name, + Tasks: []*api.Task{t}, + }) + } + alone = append(alone, j.TaskGroups...) + j.TaskGroups = alone + } + + for _, tg := range j.TaskGroups { + normalizeNetworkPorts(tg.Networks) + for _, t := range tg.Tasks { + if t.Resources != nil { + normalizeNetworkPorts(t.Resources.Networks) + } + + normalizeTemplates(t.Templates) + + // normalize Vault + normalizeVault(t.Vault) + + if t.Vault == nil { + t.Vault = jw.Extra.Vault + } + } + } +} + +func normalizeVault(v *api.Vault) { + if v == nil { + return + } + + if v.Env == nil { + v.Env = boolToPtr(true) + } + if v.ChangeMode == nil { + v.ChangeMode = stringToPtr("restart") + } +} + +func normalizeNetworkPorts(networks []*api.NetworkResource) { + if networks == nil { + return + } + for _, n := range networks { + if len(n.DynamicPorts) == 0 { + continue + } + + dynamic := make([]api.Port, 0, len(n.DynamicPorts)) + var reserved []api.Port + + for _, p := range n.DynamicPorts { + if p.Value > 0 { + reserved = append(reserved, p) + } else { + dynamic = append(dynamic, p) + } + } + if len(dynamic) == 0 { + dynamic = nil + } + + n.DynamicPorts = dynamic + n.ReservedPorts = reserved + } + +} + +func normalizeTemplates(templates []*api.Template) { + if len(templates) == 0 { + return + } + + for _, t := range templates { + if t.ChangeMode == nil { + t.ChangeMode = stringToPtr("restart") + } + if t.Perms == nil { + t.Perms = stringToPtr("0644") + } + if t.Splay == nil { + t.Splay = durationToPtr(5 * time.Second) + } + } +} + +func int8ToPtr(v int8) *int8 { + return &v +} + +func boolToPtr(v bool) *bool { + return &v +} + +func stringToPtr(v string) *string { + return &v +} + +func durationToPtr(v time.Duration) *time.Duration { + return &v +} diff --git a/jobspec2/parse_map.go b/jobspec2/parse_map.go new file mode 100644 index 000000000..95c04138d --- /dev/null +++ b/jobspec2/parse_map.go @@ -0,0 +1,172 @@ +package jobspec2 + +import ( + "fmt" + "math" + "math/big" + "reflect" + + "github.com/hashicorp/hcl/v2" + "github.com/mitchellh/reflectwalk" + "github.com/zclconf/go-cty/cty" +) + +// decodeMapInterfaceType decodes hcl instances of `map[string]interface{}` fields +// of v. +// +// The HCL parser stores the hcl AST as the map values, and decodeMapInterfaceType +// evaluates the AST and converts them to the native golang types. +func decodeMapInterfaceType(v interface{}, ctx *hcl.EvalContext) hcl.Diagnostics { + w := &walker{ctx: ctx} + err := reflectwalk.Walk(v, w) + if err != nil { + w.diags = append(w.diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "unexpected internal error", + Detail: err.Error(), + }) + } + return w.diags +} + +type walker struct { + ctx *hcl.EvalContext + diags hcl.Diagnostics +} + +var mapStringInterfaceType = reflect.TypeOf(map[string]interface{}{}) + +func (w *walker) Map(m reflect.Value) error { + if !m.Type().AssignableTo(mapStringInterfaceType) { + return nil + } + + for _, k := range m.MapKeys() { + v := m.MapIndex(k) + if attr, ok := v.Interface().(*hcl.Attribute); ok { + c, diags := decodeInterface(attr.Expr, w.ctx) + w.diags = append(w.diags, diags...) + + m.SetMapIndex(k, reflect.ValueOf(c)) + } + } + return nil +} + +func (w *walker) MapElem(m, k, v reflect.Value) error { + return nil +} +func decodeInterface(expr hcl.Expression, ctx *hcl.EvalContext) (interface{}, hcl.Diagnostics) { + srvVal, diags := expr.Value(ctx) + + dst, err := interfaceFromCtyValue(srvVal) + if err != nil { + diags = append(diags, &hcl.Diagnostic{ + Severity: hcl.DiagError, + Summary: "unsuitable value type", + Detail: fmt.Sprintf("Unsuitable value: %s", err.Error()), + Subject: expr.StartRange().Ptr(), + Context: expr.Range().Ptr(), + }) + } + return dst, diags +} + +func interfaceFromCtyValue(val cty.Value) (interface{}, error) { + t := val.Type() + + if val.IsNull() { + return nil, nil + } + + if !val.IsKnown() { + return nil, fmt.Errorf("value is not known") + } + + // The caller should've guaranteed that the given val is conformant with + // the given type t, so we'll proceed under that assumption here. + + switch { + case t.IsPrimitiveType(): + switch t { + case cty.String: + return val.AsString(), nil + case cty.Number: + if val.RawEquals(cty.PositiveInfinity) { + return math.Inf(1), nil + } else if val.RawEquals(cty.NegativeInfinity) { + return math.Inf(-1), nil + } else { + return smallestNumber(val.AsBigFloat()), nil + } + case cty.Bool: + return val.True(), nil + default: + panic("unsupported primitive type") + } + case t.IsListType(), t.IsSetType(), t.IsTupleType(): + result := []interface{}{} + + it := val.ElementIterator() + for it.Next() { + _, ev := it.Element() + evi, err := interfaceFromCtyValue(ev) + if err != nil { + return nil, err + } + result = append(result, evi) + } + return result, nil + case t.IsMapType(): + result := map[string]interface{}{} + it := val.ElementIterator() + for it.Next() { + ek, ev := it.Element() + + ekv := ek.AsString() + evv, err := interfaceFromCtyValue(ev) + if err != nil { + return nil, err + } + + result[ekv] = evv + } + return []map[string]interface{}{result}, nil + case t.IsObjectType(): + result := map[string]interface{}{} + + for k := range t.AttributeTypes() { + av := val.GetAttr(k) + avv, err := interfaceFromCtyValue(av) + if err != nil { + return nil, err + } + + result[k] = avv + } + return []map[string]interface{}{result}, nil + case t.IsCapsuleType(): + rawVal := val.EncapsulatedValue() + return rawVal, nil + default: + // should never happen + return nil, fmt.Errorf("cannot serialize %s", t.FriendlyName()) + } +} + +func smallestNumber(b *big.Float) interface{} { + + if v, acc := b.Int64(); acc == big.Exact { + // check if it fits in int + if int64(int(v)) == v { + return int(v) + } + return v + } + + if v, acc := b.Float64(); acc == big.Exact || acc == big.Above { + return v + } + + return b +} diff --git a/jobspec2/parse_test.go b/jobspec2/parse_test.go new file mode 100644 index 000000000..b4bb14675 --- /dev/null +++ b/jobspec2/parse_test.go @@ -0,0 +1,145 @@ +package jobspec2 + +import ( + "io/ioutil" + "os" + "strings" + "testing" + + "github.com/hashicorp/nomad/jobspec" + "github.com/stretchr/testify/require" +) + +func TestEquivalentToHCL1(t *testing.T) { + hclSpecDir := "../jobspec/test-fixtures/" + fis, err := ioutil.ReadDir(hclSpecDir) + require.NoError(t, err) + + for _, fi := range fis { + name := fi.Name() + + t.Run(name, func(t *testing.T) { + f, err := os.Open(hclSpecDir + name) + require.NoError(t, err) + defer f.Close() + + job1, err := jobspec.Parse(f) + if err != nil { + t.Skip("file is not parsable in v1") + } + + f.Seek(0, 0) + + job2, err := Parse(name, f) + require.NoError(t, err) + + require.Equal(t, job1, job2) + }) + } +} + +func TestParse_Variables(t *testing.T) { + hcl := ` +job "example" { + datacenters = [for s in ["dc1", "dc2"] : upper(s)] + region = vars.region_var +} +` + + out, err := ParseWithArgs("input.hcl", strings.NewReader(hcl), map[string]string{"region_var": "aug"}, true) + require.NoError(t, err) + + require.Equal(t, []string{"DC1", "DC2"}, out.Datacenters) + require.Equal(t, "aug", *out.Region) +} + +func TestParse_VarsAndFunctions(t *testing.T) { + hcl := ` +job "example" { + datacenters = [for s in ["dc1", "dc2"] : upper(s)] + region = vars.region_var +} +` + + out, err := ParseWithArgs("input.hcl", strings.NewReader(hcl), map[string]string{"region_var": "aug"}, true) + require.NoError(t, err) + + require.Equal(t, []string{"DC1", "DC2"}, out.Datacenters) + require.NotNil(t, out.Region) + require.Equal(t, "aug", *out.Region) +} + +// TestParse_UnknownVariables asserts that unknown variables are left intact for further processing +func TestParse_UnknownVariables(t *testing.T) { + hcl := ` +job "example" { + datacenters = [for s in ["dc1", "dc2"] : upper(s)] + region = vars.region_var + meta { + known_var = "${vars.region_var}" + unknown_var = "${UNKNOWN}" + } +} +` + + out, err := ParseWithArgs("input.hcl", strings.NewReader(hcl), map[string]string{"region_var": "aug"}, true) + require.NoError(t, err) + + meta := map[string]string{ + "known_var": "aug", + "unknown_var": "${UNKNOWN}", + } + + require.Equal(t, meta, out.Meta) +} + +func TestParse_FileOperators(t *testing.T) { + hcl := ` +job "example" { + region = file("parse_test.go") +} +` + + t.Run("enabled", func(t *testing.T) { + out, err := ParseWithArgs("input.hcl", strings.NewReader(hcl), nil, true) + require.NoError(t, err) + + expected, err := ioutil.ReadFile("parse_test.go") + require.NoError(t, err) + + require.NotNil(t, out.Region) + require.Equal(t, string(expected), *out.Region) + }) + + t.Run("disabled", func(t *testing.T) { + _, err := ParseWithArgs("input.hcl", strings.NewReader(hcl), nil, false) + require.Error(t, err) + require.Contains(t, err.Error(), "filesystem function disabled") + }) +} + +func TestParseDynamic(t *testing.T) { + hcl := ` +job "example" { + +dynamic "group" { + for_each = ["groupA", "groupB", "groupC"] + labels = [group.value] + + content { + task "simple" { + driver = "raw_exec" + + } + } +} +} +` + out, err := ParseWithArgs("input.hcl", strings.NewReader(hcl), nil, true) + require.NoError(t, err) + + require.Len(t, out.TaskGroups, 3) + require.Equal(t, "groupA", *out.TaskGroups[0].Name) + require.Equal(t, "groupB", *out.TaskGroups[1].Name) + require.Equal(t, "groupC", *out.TaskGroups[2].Name) +}