From 835b355489f2ed333556a01c5d0f4615873758f7 Mon Sep 17 00:00:00 2001 From: Jeff Mitchell Date: Fri, 25 May 2018 14:34:24 -0400 Subject: [PATCH 1/8] Add key information to list endpoints in identity. (#4634) * Add key information to list endpoints in identity. Also fixes some bugs from before where we were persisting data that we should not have been (mount type/path). * Add cached lookups of real time mount info --- vault/identity_store_aliases.go | 48 ++++++- vault/identity_store_aliases_ext_test.go | 159 +++++++++++++++++++++++ vault/identity_store_aliases_test.go | 56 -------- vault/identity_store_entities.go | 53 +++++++- vault/identity_store_group_aliases.go | 38 +++++- vault/identity_store_groups.go | 41 +++++- vault/identity_store_groups_ext_test.go | 135 +++++++++++++++++++ vault/identity_store_schema.go | 14 -- 8 files changed, 459 insertions(+), 85 deletions(-) diff --git a/vault/identity_store_aliases.go b/vault/identity_store_aliases.go index 5fea5de71..1f578df19 100644 --- a/vault/identity_store_aliases.go +++ b/vault/identity_store_aliases.go @@ -319,9 +319,11 @@ func (i *IdentityStore) handleAliasUpdateCommon(req *logical.Request, d *framewo // Update the fields alias.Name = aliasName alias.Metadata = aliasMetadata - alias.MountType = mountValidationResp.MountType alias.MountAccessor = mountValidationResp.MountAccessor - alias.MountPath = mountValidationResp.MountPath + + // Explicitly set to empty as in the past we incorrectly saved it + alias.MountPath = "" + alias.MountType = "" // Set the canonical ID in the alias index. This should be done after // sanitizing entity. @@ -377,13 +379,16 @@ func (i *IdentityStore) handleAliasReadCommon(alias *identity.Alias) (*logical.R respData := map[string]interface{}{} respData["id"] = alias.ID respData["canonical_id"] = alias.CanonicalID - respData["mount_type"] = alias.MountType respData["mount_accessor"] = alias.MountAccessor - respData["mount_path"] = alias.MountPath respData["metadata"] = alias.Metadata respData["name"] = alias.Name respData["merged_from_canonical_ids"] = alias.MergedFromCanonicalIDs + if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil { + respData["mount_path"] = mountValidationResp.MountPath + respData["mount_type"] = mountValidationResp.MountType + } + // Convert protobuf timestamp into RFC3339 format respData["creation_time"] = ptypes.TimestampString(alias.CreationTime) respData["last_update_time"] = ptypes.TimestampString(alias.LastUpdateTime) @@ -416,15 +421,46 @@ func (i *IdentityStore) pathAliasIDList() framework.OperationFunc { } var aliasIDs []string + aliasInfo := map[string]interface{}{} + + type mountInfo struct { + MountType string + MountPath string + } + mountAccessorMap := map[string]mountInfo{} + for { raw := iter.Next() if raw == nil { break } - aliasIDs = append(aliasIDs, raw.(*identity.Alias).ID) + alias := raw.(*identity.Alias) + aliasIDs = append(aliasIDs, alias.ID) + aliasInfoEntry := map[string]interface{}{ + "name": alias.Name, + "canonical_id": alias.CanonicalID, + "mount_accessor": alias.MountAccessor, + } + + mi, ok := mountAccessorMap[alias.MountAccessor] + if ok { + aliasInfoEntry["mount_type"] = mi.MountType + aliasInfoEntry["mount_path"] = mi.MountPath + } else { + mi = mountInfo{} + if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil { + mi.MountType = mountValidationResp.MountType + mi.MountPath = mountValidationResp.MountPath + aliasInfoEntry["mount_type"] = mi.MountType + aliasInfoEntry["mount_path"] = mi.MountPath + } + mountAccessorMap[alias.MountAccessor] = mi + } + + aliasInfo[alias.ID] = aliasInfoEntry } - return logical.ListResponse(aliasIDs), nil + return logical.ListResponseWithInfo(aliasIDs, aliasInfo), nil } } diff --git a/vault/identity_store_aliases_ext_test.go b/vault/identity_store_aliases_ext_test.go index 4051257b0..37ed4cb77 100644 --- a/vault/identity_store_aliases_ext_test.go +++ b/vault/identity_store_aliases_ext_test.go @@ -8,6 +8,7 @@ import ( "github.com/hashicorp/vault/logical" "github.com/hashicorp/vault/vault" + "github.com/hashicorp/vault/builtin/credential/github" credLdap "github.com/hashicorp/vault/builtin/credential/ldap" ) @@ -60,3 +61,161 @@ func TestIdentityStore_EntityAliasLocalMount(t *testing.T) { t.Fatalf("expected error since mount is local") } } + +func TestIdentityStore_ListAlias(t *testing.T) { + coreConfig := &vault.CoreConfig{ + CredentialBackends: map[string]logical.Factory{ + "github": github.Factory, + }, + } + cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ + HandlerFunc: vaulthttp.Handler, + }) + cluster.Start() + defer cluster.Cleanup() + + core := cluster.Cores[0].Core + vault.TestWaitActive(t, core) + client := cluster.Cores[0].Client + + err := client.Sys().EnableAuthWithOptions("github", &api.EnableAuthOptions{ + Type: "github", + }) + if err != nil { + t.Fatal(err) + } + + mounts, err := client.Sys().ListAuth() + if err != nil { + t.Fatal(err) + } + var githubAccessor string + for k, v := range mounts { + t.Logf("key: %v\nmount: %#v", k, *v) + if k == "github/" { + githubAccessor = v.Accessor + break + } + } + if githubAccessor == "" { + t.Fatal("did not find github accessor") + } + + resp, err := client.Logical().Write("identity/entity", nil) + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + if resp == nil { + t.Fatalf("expected a non-nil response") + } + + entityID := resp.Data["id"].(string) + + // Create an alias + resp, err = client.Logical().Write("identity/entity-alias", map[string]interface{}{ + "name": "testaliasname", + "mount_accessor": githubAccessor, + }) + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + testAliasCanonicalID := resp.Data["canonical_id"].(string) + testAliasAliasID := resp.Data["id"].(string) + + resp, err = client.Logical().Write("identity/entity-alias", map[string]interface{}{ + "name": "entityalias", + "mount_accessor": githubAccessor, + "canonical_id": entityID, + }) + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + entityAliasAliasID := resp.Data["id"].(string) + + resp, err = client.Logical().List("identity/entity-alias/id") + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + + keys := resp.Data["keys"].([]interface{}) + if len(keys) != 2 { + t.Fatalf("bad: length of alias IDs listed; expected: 2, actual: %d", len(keys)) + } + + // Do some due diligence on the key info + aliasInfoRaw, ok := resp.Data["key_info"] + if !ok { + t.Fatal("expected key_info map in response") + } + aliasInfo := aliasInfoRaw.(map[string]interface{}) + for _, keyRaw := range keys { + key := keyRaw.(string) + infoRaw, ok := aliasInfo[key] + if !ok { + t.Fatal("expected key info") + } + info := infoRaw.(map[string]interface{}) + currName := "entityalias" + if info["canonical_id"].(string) == testAliasCanonicalID { + currName = "testaliasname" + } + t.Logf("alias info: %#v", info) + switch { + case info["name"].(string) != currName: + t.Fatalf("bad name: %v", info["name"].(string)) + case info["mount_accessor"].(string) != githubAccessor: + t.Fatalf("bad mount_path: %v", info["mount_accessor"].(string)) + } + } + + // Now do the same with entity info + resp, err = client.Logical().List("identity/entity/id") + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + + keys = resp.Data["keys"].([]interface{}) + if len(keys) != 2 { + t.Fatalf("bad: length of entity IDs listed; expected: 2, actual: %d", len(keys)) + } + + entityInfoRaw, ok := resp.Data["key_info"] + if !ok { + t.Fatal("expected key_info map in response") + } + + // This is basically verifying that the entity has the alias in key_info + // that we expect to be tied to it, plus tests a value further down in it + // for fun + entityInfo := entityInfoRaw.(map[string]interface{}) + for _, keyRaw := range keys { + key := keyRaw.(string) + infoRaw, ok := entityInfo[key] + if !ok { + t.Fatal("expected key info") + } + info := infoRaw.(map[string]interface{}) + t.Logf("entity info: %#v", info) + currAliasID := entityAliasAliasID + if key == testAliasCanonicalID { + currAliasID = testAliasAliasID + } + currAliases := info["aliases"].([]interface{}) + if len(currAliases) != 1 { + t.Fatal("bad aliases length") + } + for _, v := range currAliases { + curr := v.(map[string]interface{}) + switch { + case curr["id"].(string) != currAliasID: + t.Fatalf("bad alias id: %v", curr["id"]) + case curr["mount_accessor"].(string) != githubAccessor: + t.Fatalf("bad mount accessor: %v", curr["mount_accessor"]) + case curr["mount_path"].(string) != "auth/github/": + t.Fatalf("bad mount path: %v", curr["mount_path"]) + case curr["mount_type"].(string) != "github": + t.Fatalf("bad mount type: %v", curr["mount_type"]) + } + } + } +} diff --git a/vault/identity_store_aliases_test.go b/vault/identity_store_aliases_test.go index 081e8c3c0..8d088b3b1 100644 --- a/vault/identity_store_aliases_test.go +++ b/vault/identity_store_aliases_test.go @@ -9,62 +9,6 @@ import ( "github.com/hashicorp/vault/logical" ) -func TestIdentityStore_ListAlias(t *testing.T) { - var err error - var resp *logical.Response - - is, githubAccessor, _ := testIdentityStoreWithGithubAuth(t) - - entityReq := &logical.Request{ - Operation: logical.UpdateOperation, - Path: "entity", - } - resp, err = is.HandleRequest(context.Background(), entityReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("err:%v resp:%#v", err, resp) - } - if resp == nil { - t.Fatalf("expected a non-nil response") - } - entityID := resp.Data["id"].(string) - - // Create an alias - aliasData := map[string]interface{}{ - "name": "testaliasname", - "mount_accessor": githubAccessor, - } - aliasReq := &logical.Request{ - Operation: logical.UpdateOperation, - Path: "entity-alias", - Data: aliasData, - } - resp, err = is.HandleRequest(context.Background(), aliasReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("err:%v resp:%#v", err, resp) - } - - aliasData["name"] = "entityalias" - aliasData["entity_id"] = entityID - resp, err = is.HandleRequest(context.Background(), aliasReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("err:%v resp:%#v", err, resp) - } - - listReq := &logical.Request{ - Operation: logical.ListOperation, - Path: "entity-alias/id", - } - resp, err = is.HandleRequest(context.Background(), listReq) - if err != nil || (resp != nil && resp.IsError()) { - t.Fatalf("err:%v resp:%#v", err, resp) - } - - keys := resp.Data["keys"].([]string) - if len(keys) != 2 { - t.Fatalf("bad: length of alias IDs listed; expected: 2, actual: %d", len(keys)) - } -} - // This test is required because MemDB does not take care of ensuring // uniqueness of indexes that are marked unique. func TestIdentityStore_AliasSameAliasNames(t *testing.T) { diff --git a/vault/identity_store_entities.go b/vault/identity_store_entities.go index fccb5d3eb..5096ed0b0 100644 --- a/vault/identity_store_entities.go +++ b/vault/identity_store_entities.go @@ -459,14 +459,18 @@ func (i *IdentityStore) handleEntityReadCommon(entity *identity.Entity) (*logica aliasMap := map[string]interface{}{} aliasMap["id"] = alias.ID aliasMap["canonical_id"] = alias.CanonicalID - aliasMap["mount_type"] = alias.MountType aliasMap["mount_accessor"] = alias.MountAccessor - aliasMap["mount_path"] = alias.MountPath aliasMap["metadata"] = alias.Metadata aliasMap["name"] = alias.Name aliasMap["merged_from_canonical_ids"] = alias.MergedFromCanonicalIDs aliasMap["creation_time"] = ptypes.TimestampString(alias.CreationTime) aliasMap["last_update_time"] = ptypes.TimestampString(alias.LastUpdateTime) + + if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil { + aliasMap["mount_type"] = mountValidationResp.MountType + aliasMap["mount_path"] = mountValidationResp.MountPath + } + aliasesToReturn[aliasIdx] = aliasMap } @@ -522,15 +526,56 @@ func (i *IdentityStore) pathEntityIDList() framework.OperationFunc { } var entityIDs []string + entityInfo := map[string]interface{}{} + + type mountInfo struct { + MountType string + MountPath string + } + mountAccessorMap := map[string]mountInfo{} + for { raw := iter.Next() if raw == nil { break } - entityIDs = append(entityIDs, raw.(*identity.Entity).ID) + entity := raw.(*identity.Entity) + entityIDs = append(entityIDs, entity.ID) + entityInfoEntry := map[string]interface{}{ + "name": entity.Name, + } + if len(entity.Aliases) > 0 { + aliasList := make([]interface{}, 0, len(entity.Aliases)) + for _, alias := range entity.Aliases { + entry := map[string]interface{}{ + "id": alias.ID, + "name": alias.Name, + "mount_accessor": alias.MountAccessor, + } + + mi, ok := mountAccessorMap[alias.MountAccessor] + if ok { + entry["mount_type"] = mi.MountType + entry["mount_path"] = mi.MountPath + } else { + mi = mountInfo{} + if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil { + mi.MountType = mountValidationResp.MountType + mi.MountPath = mountValidationResp.MountPath + entry["mount_type"] = mi.MountType + entry["mount_path"] = mi.MountPath + } + mountAccessorMap[alias.MountAccessor] = mi + } + + aliasList = append(aliasList, entry) + } + entityInfoEntry["aliases"] = aliasList + } + entityInfo[entity.ID] = entityInfoEntry } - return logical.ListResponse(entityIDs), nil + return logical.ListResponseWithInfo(entityIDs, entityInfo), nil } } diff --git a/vault/identity_store_group_aliases.go b/vault/identity_store_group_aliases.go index 6c8962417..047fc4799 100644 --- a/vault/identity_store_group_aliases.go +++ b/vault/identity_store_group_aliases.go @@ -210,8 +210,9 @@ func (i *IdentityStore) handleGroupAliasUpdateCommon(req *logical.Request, d *fr } group.Alias.Name = groupAliasName - group.Alias.MountType = mountValidationResp.MountType group.Alias.MountAccessor = mountValidationResp.MountAccessor + // Explicitly correct for previous versions that persisted this + group.Alias.MountType = "" err = i.sanitizeAndUpsertGroup(group, nil) if err != nil { @@ -267,15 +268,46 @@ func (i *IdentityStore) pathGroupAliasIDList() framework.OperationFunc { } var groupAliasIDs []string + aliasInfo := map[string]interface{}{} + + type mountInfo struct { + MountType string + MountPath string + } + mountAccessorMap := map[string]mountInfo{} + for { raw := iter.Next() if raw == nil { break } - groupAliasIDs = append(groupAliasIDs, raw.(*identity.Alias).ID) + alias := raw.(*identity.Alias) + groupAliasIDs = append(groupAliasIDs, alias.ID) + entry := map[string]interface{}{ + "name": alias.Name, + "canonical_id": alias.CanonicalID, + "mount_accessor": alias.MountAccessor, + } + + mi, ok := mountAccessorMap[alias.MountAccessor] + if ok { + entry["mount_type"] = mi.MountType + entry["mount_path"] = mi.MountPath + } else { + mi = mountInfo{} + if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil { + mi.MountType = mountValidationResp.MountType + mi.MountPath = mountValidationResp.MountPath + entry["mount_type"] = mi.MountType + entry["mount_path"] = mi.MountPath + } + mountAccessorMap[alias.MountAccessor] = mi + } + + aliasInfo[alias.ID] = entry } - return logical.ListResponse(groupAliasIDs), nil + return logical.ListResponseWithInfo(groupAliasIDs, aliasInfo), nil } } diff --git a/vault/identity_store_groups.go b/vault/identity_store_groups.go index b492d87dd..8bd48d6f8 100644 --- a/vault/identity_store_groups.go +++ b/vault/identity_store_groups.go @@ -331,15 +331,52 @@ func (i *IdentityStore) pathGroupIDList() framework.OperationFunc { } var groupIDs []string + groupInfo := map[string]interface{}{} + + type mountInfo struct { + MountType string + MountPath string + } + mountAccessorMap := map[string]mountInfo{} + for { raw := iter.Next() if raw == nil { break } - groupIDs = append(groupIDs, raw.(*identity.Group).ID) + group := raw.(*identity.Group) + groupIDs = append(groupIDs, group.ID) + groupInfoEntry := map[string]interface{}{ + "name": group.Name, + } + if group.Alias != nil { + entry := map[string]interface{}{ + "id": group.Alias.ID, + "name": group.Alias.Name, + "mount_accessor": group.Alias.MountAccessor, + } + + mi, ok := mountAccessorMap[group.Alias.MountAccessor] + if ok { + entry["mount_type"] = mi.MountType + entry["mount_path"] = mi.MountPath + } else { + mi = mountInfo{} + if mountValidationResp := i.core.router.validateMountByAccessor(group.Alias.MountAccessor); mountValidationResp != nil { + mi.MountType = mountValidationResp.MountType + mi.MountPath = mountValidationResp.MountPath + entry["mount_type"] = mi.MountType + entry["mount_path"] = mi.MountPath + } + mountAccessorMap[group.Alias.MountAccessor] = mi + } + + groupInfoEntry["alias"] = entry + } + groupInfo[group.ID] = groupInfoEntry } - return logical.ListResponse(groupIDs), nil + return logical.ListResponseWithInfo(groupIDs, groupInfo), nil } } diff --git a/vault/identity_store_groups_ext_test.go b/vault/identity_store_groups_ext_test.go index 372454779..3881a9f01 100644 --- a/vault/identity_store_groups_ext_test.go +++ b/vault/identity_store_groups_ext_test.go @@ -8,9 +8,144 @@ import ( "github.com/hashicorp/vault/logical" "github.com/hashicorp/vault/vault" + "github.com/hashicorp/vault/builtin/credential/github" credLdap "github.com/hashicorp/vault/builtin/credential/ldap" ) +func TestIdentityStore_ListGroupAlias(t *testing.T) { + coreConfig := &vault.CoreConfig{ + CredentialBackends: map[string]logical.Factory{ + "github": github.Factory, + }, + } + cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{ + HandlerFunc: vaulthttp.Handler, + }) + cluster.Start() + defer cluster.Cleanup() + + core := cluster.Cores[0].Core + vault.TestWaitActive(t, core) + client := cluster.Cores[0].Client + + err := client.Sys().EnableAuthWithOptions("github", &api.EnableAuthOptions{ + Type: "github", + }) + if err != nil { + t.Fatal(err) + } + + mounts, err := client.Sys().ListAuth() + if err != nil { + t.Fatal(err) + } + var githubAccessor string + for k, v := range mounts { + t.Logf("key: %v\nmount: %#v", k, *v) + if k == "github/" { + githubAccessor = v.Accessor + break + } + } + if githubAccessor == "" { + t.Fatal("did not find github accessor") + } + + resp, err := client.Logical().Write("identity/group", map[string]interface{}{ + "type": "external", + }) + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + + groupID := resp.Data["id"].(string) + + resp, err = client.Logical().Write("identity/group-alias", map[string]interface{}{ + "name": "groupalias", + "mount_accessor": githubAccessor, + "canonical_id": groupID, + }) + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + aliasID := resp.Data["id"].(string) + + resp, err = client.Logical().List("identity/group-alias/id") + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + + keys := resp.Data["keys"].([]interface{}) + if len(keys) != 1 { + t.Fatalf("bad: length of alias IDs listed; expected: 1, actual: %d", len(keys)) + } + + // Do some due diligence on the key info + aliasInfoRaw, ok := resp.Data["key_info"] + if !ok { + t.Fatal("expected key_info map in response") + } + aliasInfo := aliasInfoRaw.(map[string]interface{}) + if len(aliasInfo) != 1 { + t.Fatalf("bad: length of alias ID key info; expected: 1, actual: %d", len(aliasInfo)) + } + + infoRaw, ok := aliasInfo[aliasID] + if !ok { + t.Fatal("expected to find alias ID in key info map") + } + info := infoRaw.(map[string]interface{}) + t.Logf("alias info: %#v", info) + switch { + case info["name"].(string) != "groupalias": + t.Fatalf("bad name: %v", info["name"].(string)) + case info["mount_accessor"].(string) != githubAccessor: + t.Fatalf("bad mount_accessor: %v", info["mount_accessor"].(string)) + } + + // Now do the same with group info + resp, err = client.Logical().List("identity/group/id") + if err != nil { + t.Fatalf("err:%v resp:%#v", err, resp) + } + + keys = resp.Data["keys"].([]interface{}) + if len(keys) != 1 { + t.Fatalf("bad: length of group IDs listed; expected: 1, actual: %d", len(keys)) + } + + groupInfoRaw, ok := resp.Data["key_info"] + if !ok { + t.Fatal("expected key_info map in response") + } + + // This is basically verifying that the group has the alias in key_info + // that we expect to be tied to it, plus tests a value further down in it + // for fun + groupInfo := groupInfoRaw.(map[string]interface{}) + if len(groupInfo) != 1 { + t.Fatalf("bad: length of group ID key info; expected: 1, actual: %d", len(groupInfo)) + } + + infoRaw, ok = groupInfo[groupID] + if !ok { + t.Fatal("expected key info") + } + info = infoRaw.(map[string]interface{}) + t.Logf("group info: %#v", info) + alias := info["alias"].(map[string]interface{}) + switch { + case alias["id"].(string) != aliasID: + t.Fatalf("bad alias id: %v", alias["id"]) + case alias["mount_accessor"].(string) != githubAccessor: + t.Fatalf("bad mount accessor: %v", alias["mount_accessor"]) + case alias["mount_path"].(string) != "auth/github/": + t.Fatalf("bad mount path: %v", alias["mount_path"]) + case alias["mount_type"].(string) != "github": + t.Fatalf("bad mount type: %v", alias["mount_type"]) + } +} + // Testing the fix for GH-4351 func TestIdentityStore_ExternalGroupMembershipsAcrossMounts(t *testing.T) { coreConfig := &vault.CoreConfig{ diff --git a/vault/identity_store_schema.go b/vault/identity_store_schema.go index 33bbae4d8..3fce0bf35 100644 --- a/vault/identity_store_schema.go +++ b/vault/identity_store_schema.go @@ -54,13 +54,6 @@ func aliasesTableSchema() *memdb.TableSchema { Field: "CanonicalID", }, }, - "mount_type": &memdb.IndexSchema{ - Name: "mount_type", - Unique: false, - Indexer: &memdb.StringFieldIndex{ - Field: "MountType", - }, - }, "factors": &memdb.IndexSchema{ Name: "factors", Unique: true, @@ -205,13 +198,6 @@ func groupAliasesTableSchema() *memdb.TableSchema { Field: "CanonicalID", }, }, - "mount_type": &memdb.IndexSchema{ - Name: "mount_type", - Unique: false, - Indexer: &memdb.StringFieldIndex{ - Field: "MountType", - }, - }, "factors": &memdb.IndexSchema{ Name: "factors", Unique: true, From 12976bf60e0973a49e42437e50c79ed6a83d79b7 Mon Sep 17 00:00:00 2001 From: Becca Petrin Date: Fri, 25 May 2018 11:35:09 -0700 Subject: [PATCH 2/8] add userpass note on bound cidrs (#4610) --- website/source/api/auth/userpass/index.html.md | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/website/source/api/auth/userpass/index.html.md b/website/source/api/auth/userpass/index.html.md index b7ecad404..593975b67 100644 --- a/website/source/api/auth/userpass/index.html.md +++ b/website/source/api/auth/userpass/index.html.md @@ -34,13 +34,17 @@ Create a new user or update an existing user. This path honors the distinction b string, only the `default` policy will be applicable to the user. - `ttl` `(string: "")` - The lease duration which decides login expiration. - `max_ttl` `(string: "")` - Maximum duration after which login should expire. +- `bound_cidrs` `(string: "", or list: [])` – If set, restricts usage of the + login and token to client IPs falling within the range of the specified + CIDR(s). ### Sample Payload ```json { "password": "superSecretPassword", - "policies": "admin,default" + "policies": "admin,default", + "bound_cidrs": ["127.0.0.1/32", "128.252.0.0/16"] } ``` From 94ae5d2567f67f022d171c8f976293d3e4b350db Mon Sep 17 00:00:00 2001 From: Becca Petrin Date: Fri, 25 May 2018 11:37:41 -0700 Subject: [PATCH 3/8] Add Active Directory secrets plugin (#4635) --- command/commands.go | 2 + .../hashicorp/vault-plugin-secrets-ad/LICENSE | 373 ++++ .../vault-plugin-secrets-ad/plugin/backend.go | 80 + .../plugin/client/client.go | 140 ++ .../plugin/client/entry.go | 41 + .../plugin/client/fieldregistry.go | 113 ++ .../plugin/client/time.go | 43 + .../plugin/engineconf.go | 10 + .../plugin/passwordconf.go | 15 + .../plugin/path_config.go | 201 ++ .../plugin/path_creds.go | 216 +++ .../plugin/path_roles.go | 254 +++ .../vault-plugin-secrets-ad/plugin/role.go | 28 + .../plugin/util/passwords.go | 38 + .../plugin/util/secrets_client.go | 73 + vendor/golang.org/x/text/encoding/encoding.go | 335 ++++ .../internal/identifier/identifier.go | 81 + .../text/encoding/internal/identifier/mib.go | 1621 +++++++++++++++++ .../x/text/encoding/internal/internal.go | 75 + .../x/text/encoding/unicode/override.go | 82 + .../x/text/encoding/unicode/unicode.go | 434 +++++ .../internal/utf8internal/utf8internal.go | 87 + vendor/golang.org/x/text/runes/cond.go | 187 ++ vendor/golang.org/x/text/runes/runes.go | 355 ++++ vendor/vendor.json | 18 + 25 files changed, 4902 insertions(+) create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/LICENSE create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/backend.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/client.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/entry.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/fieldregistry.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/time.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/engineconf.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/passwordconf.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_config.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_creds.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_roles.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/role.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/util/passwords.go create mode 100644 vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/util/secrets_client.go create mode 100644 vendor/golang.org/x/text/encoding/encoding.go create mode 100644 vendor/golang.org/x/text/encoding/internal/identifier/identifier.go create mode 100644 vendor/golang.org/x/text/encoding/internal/identifier/mib.go create mode 100644 vendor/golang.org/x/text/encoding/internal/internal.go create mode 100644 vendor/golang.org/x/text/encoding/unicode/override.go create mode 100644 vendor/golang.org/x/text/encoding/unicode/unicode.go create mode 100644 vendor/golang.org/x/text/internal/utf8internal/utf8internal.go create mode 100644 vendor/golang.org/x/text/runes/cond.go create mode 100644 vendor/golang.org/x/text/runes/runes.go diff --git a/command/commands.go b/command/commands.go index a02cdda9b..9e092ed47 100644 --- a/command/commands.go +++ b/command/commands.go @@ -6,6 +6,7 @@ import ( "os/signal" "syscall" + ad "github.com/hashicorp/vault-plugin-secrets-ad/plugin" gcp "github.com/hashicorp/vault-plugin-secrets-gcp/plugin" kv "github.com/hashicorp/vault-plugin-secrets-kv" "github.com/hashicorp/vault/audit" @@ -110,6 +111,7 @@ var ( } logicalBackends = map[string]logical.Factory{ + "ad": ad.Factory, "aws": aws.Factory, "cassandra": cassandra.Factory, "consul": consul.Factory, diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/LICENSE b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/LICENSE new file mode 100644 index 000000000..a612ad981 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/LICENSE @@ -0,0 +1,373 @@ +Mozilla Public License Version 2.0 +================================== + +1. Definitions +-------------- + +1.1. "Contributor" + means each individual or legal entity that creates, contributes to + the creation of, or owns Covered Software. + +1.2. "Contributor Version" + means the combination of the Contributions of others (if any) used + by a Contributor and that particular Contributor's Contribution. + +1.3. "Contribution" + means Covered Software of a particular Contributor. + +1.4. "Covered Software" + means Source Code Form to which the initial Contributor has attached + the notice in Exhibit A, the Executable Form of such Source Code + Form, and Modifications of such Source Code Form, in each case + including portions thereof. + +1.5. "Incompatible With Secondary Licenses" + means + + (a) that the initial Contributor has attached the notice described + in Exhibit B to the Covered Software; or + + (b) that the Covered Software was made available under the terms of + version 1.1 or earlier of the License, but not also under the + terms of a Secondary License. + +1.6. "Executable Form" + means any form of the work other than Source Code Form. + +1.7. "Larger Work" + means a work that combines Covered Software with other material, in + a separate file or files, that is not Covered Software. + +1.8. "License" + means this document. + +1.9. "Licensable" + means having the right to grant, to the maximum extent possible, + whether at the time of the initial grant or subsequently, any and + all of the rights conveyed by this License. + +1.10. "Modifications" + means any of the following: + + (a) any file in Source Code Form that results from an addition to, + deletion from, or modification of the contents of Covered + Software; or + + (b) any new file in Source Code Form that contains any Covered + Software. + +1.11. "Patent Claims" of a Contributor + means any patent claim(s), including without limitation, method, + process, and apparatus claims, in any patent Licensable by such + Contributor that would be infringed, but for the grant of the + License, by the making, using, selling, offering for sale, having + made, import, or transfer of either its Contributions or its + Contributor Version. + +1.12. "Secondary License" + means either the GNU General Public License, Version 2.0, the GNU + Lesser General Public License, Version 2.1, the GNU Affero General + Public License, Version 3.0, or any later versions of those + licenses. + +1.13. "Source Code Form" + means the form of the work preferred for making modifications. + +1.14. "You" (or "Your") + means an individual or a legal entity exercising rights under this + License. For legal entities, "You" includes any entity that + controls, is controlled by, or is under common control with You. For + purposes of this definition, "control" means (a) the power, direct + or indirect, to cause the direction or management of such entity, + whether by contract or otherwise, or (b) ownership of more than + fifty percent (50%) of the outstanding shares or beneficial + ownership of such entity. + +2. License Grants and Conditions +-------------------------------- + +2.1. Grants + +Each Contributor hereby grants You a world-wide, royalty-free, +non-exclusive license: + +(a) under intellectual property rights (other than patent or trademark) + Licensable by such Contributor to use, reproduce, make available, + modify, display, perform, distribute, and otherwise exploit its + Contributions, either on an unmodified basis, with Modifications, or + as part of a Larger Work; and + +(b) under Patent Claims of such Contributor to make, use, sell, offer + for sale, have made, import, and otherwise transfer either its + Contributions or its Contributor Version. + +2.2. Effective Date + +The licenses granted in Section 2.1 with respect to any Contribution +become effective for each Contribution on the date the Contributor first +distributes such Contribution. + +2.3. Limitations on Grant Scope + +The licenses granted in this Section 2 are the only rights granted under +this License. No additional rights or licenses will be implied from the +distribution or licensing of Covered Software under this License. +Notwithstanding Section 2.1(b) above, no patent license is granted by a +Contributor: + +(a) for any code that a Contributor has removed from Covered Software; + or + +(b) for infringements caused by: (i) Your and any other third party's + modifications of Covered Software, or (ii) the combination of its + Contributions with other software (except as part of its Contributor + Version); or + +(c) under Patent Claims infringed by Covered Software in the absence of + its Contributions. + +This License does not grant any rights in the trademarks, service marks, +or logos of any Contributor (except as may be necessary to comply with +the notice requirements in Section 3.4). + +2.4. Subsequent Licenses + +No Contributor makes additional grants as a result of Your choice to +distribute the Covered Software under a subsequent version of this +License (see Section 10.2) or under the terms of a Secondary License (if +permitted under the terms of Section 3.3). + +2.5. Representation + +Each Contributor represents that the Contributor believes its +Contributions are its original creation(s) or it has sufficient rights +to grant the rights to its Contributions conveyed by this License. + +2.6. Fair Use + +This License is not intended to limit any rights You have under +applicable copyright doctrines of fair use, fair dealing, or other +equivalents. + +2.7. Conditions + +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted +in Section 2.1. + +3. Responsibilities +------------------- + +3.1. Distribution of Source Form + +All distribution of Covered Software in Source Code Form, including any +Modifications that You create or to which You contribute, must be under +the terms of this License. You must inform recipients that the Source +Code Form of the Covered Software is governed by the terms of this +License, and how they can obtain a copy of this License. You may not +attempt to alter or restrict the recipients' rights in the Source Code +Form. + +3.2. Distribution of Executable Form + +If You distribute Covered Software in Executable Form then: + +(a) such Covered Software must also be made available in Source Code + Form, as described in Section 3.1, and You must inform recipients of + the Executable Form how they can obtain a copy of such Source Code + Form by reasonable means in a timely manner, at a charge no more + than the cost of distribution to the recipient; and + +(b) You may distribute such Executable Form under the terms of this + License, or sublicense it under different terms, provided that the + license for the Executable Form does not attempt to limit or alter + the recipients' rights in the Source Code Form under this License. + +3.3. Distribution of a Larger Work + +You may create and distribute a Larger Work under terms of Your choice, +provided that You also comply with the requirements of this License for +the Covered Software. If the Larger Work is a combination of Covered +Software with a work governed by one or more Secondary Licenses, and the +Covered Software is not Incompatible With Secondary Licenses, this +License permits You to additionally distribute such Covered Software +under the terms of such Secondary License(s), so that the recipient of +the Larger Work may, at their option, further distribute the Covered +Software under the terms of either this License or such Secondary +License(s). + +3.4. Notices + +You may not remove or alter the substance of any license notices +(including copyright notices, patent notices, disclaimers of warranty, +or limitations of liability) contained within the Source Code Form of +the Covered Software, except that You may alter any license notices to +the extent required to remedy known factual inaccuracies. + +3.5. Application of Additional Terms + +You may choose to offer, and to charge a fee for, warranty, support, +indemnity or liability obligations to one or more recipients of Covered +Software. However, You may do so only on Your own behalf, and not on +behalf of any Contributor. You must make it absolutely clear that any +such warranty, support, indemnity, or liability obligation is offered by +You alone, and You hereby agree to indemnify every Contributor for any +liability incurred by such Contributor as a result of warranty, support, +indemnity or liability terms You offer. You may include additional +disclaimers of warranty and limitations of liability specific to any +jurisdiction. + +4. Inability to Comply Due to Statute or Regulation +--------------------------------------------------- + +If it is impossible for You to comply with any of the terms of this +License with respect to some or all of the Covered Software due to +statute, judicial order, or regulation then You must: (a) comply with +the terms of this License to the maximum extent possible; and (b) +describe the limitations and the code they affect. Such description must +be placed in a text file included with all distributions of the Covered +Software under this License. Except to the extent prohibited by statute +or regulation, such description must be sufficiently detailed for a +recipient of ordinary skill to be able to understand it. + +5. Termination +-------------- + +5.1. The rights granted under this License will terminate automatically +if You fail to comply with any of its terms. However, if You become +compliant, then the rights granted under this License from a particular +Contributor are reinstated (a) provisionally, unless and until such +Contributor explicitly and finally terminates Your grants, and (b) on an +ongoing basis, if such Contributor fails to notify You of the +non-compliance by some reasonable means prior to 60 days after You have +come back into compliance. Moreover, Your grants from a particular +Contributor are reinstated on an ongoing basis if such Contributor +notifies You of the non-compliance by some reasonable means, this is the +first time You have received notice of non-compliance with this License +from such Contributor, and You become compliant prior to 30 days after +Your receipt of the notice. + +5.2. If You initiate litigation against any entity by asserting a patent +infringement claim (excluding declaratory judgment actions, +counter-claims, and cross-claims) alleging that a Contributor Version +directly or indirectly infringes any patent, then the rights granted to +You by any and all Contributors for the Covered Software under Section +2.1 of this License shall terminate. + +5.3. In the event of termination under Sections 5.1 or 5.2 above, all +end user license agreements (excluding distributors and resellers) which +have been validly granted by You or Your distributors under this License +prior to termination shall survive termination. + +************************************************************************ +* * +* 6. Disclaimer of Warranty * +* ------------------------- * +* * +* Covered Software is provided under this License on an "as is" * +* basis, without warranty of any kind, either expressed, implied, or * +* statutory, including, without limitation, warranties that the * +* Covered Software is free of defects, merchantable, fit for a * +* particular purpose or non-infringing. The entire risk as to the * +* quality and performance of the Covered Software is with You. * +* Should any Covered Software prove defective in any respect, You * +* (not any Contributor) assume the cost of any necessary servicing, * +* repair, or correction. This disclaimer of warranty constitutes an * +* essential part of this License. No use of any Covered Software is * +* authorized under this License except under this disclaimer. * +* * +************************************************************************ + +************************************************************************ +* * +* 7. Limitation of Liability * +* -------------------------- * +* * +* Under no circumstances and under no legal theory, whether tort * +* (including negligence), contract, or otherwise, shall any * +* Contributor, or anyone who distributes Covered Software as * +* permitted above, be liable to You for any direct, indirect, * +* special, incidental, or consequential damages of any character * +* including, without limitation, damages for lost profits, loss of * +* goodwill, work stoppage, computer failure or malfunction, or any * +* and all other commercial damages or losses, even if such party * +* shall have been informed of the possibility of such damages. This * +* limitation of liability shall not apply to liability for death or * +* personal injury resulting from such party's negligence to the * +* extent applicable law prohibits such limitation. Some * +* jurisdictions do not allow the exclusion or limitation of * +* incidental or consequential damages, so this exclusion and * +* limitation may not apply to You. * +* * +************************************************************************ + +8. Litigation +------------- + +Any litigation relating to this License may be brought only in the +courts of a jurisdiction where the defendant maintains its principal +place of business and such litigation shall be governed by laws of that +jurisdiction, without reference to its conflict-of-law provisions. +Nothing in this Section shall prevent a party's ability to bring +cross-claims or counter-claims. + +9. Miscellaneous +---------------- + +This License represents the complete agreement concerning the subject +matter hereof. If any provision of this License is held to be +unenforceable, such provision shall be reformed only to the extent +necessary to make it enforceable. Any law or regulation which provides +that the language of a contract shall be construed against the drafter +shall not be used to construe this License against a Contributor. + +10. Versions of the License +--------------------------- + +10.1. New Versions + +Mozilla Foundation is the license steward. Except as provided in Section +10.3, no one other than the license steward has the right to modify or +publish new versions of this License. Each version will be given a +distinguishing version number. + +10.2. Effect of New Versions + +You may distribute the Covered Software under the terms of the version +of the License under which You originally received the Covered Software, +or under the terms of any subsequent version published by the license +steward. + +10.3. Modified Versions + +If you create software not governed by this License, and you want to +create a new license for such software, you may create and use a +modified version of this License if you rename the license and remove +any references to the name of the license steward (except to note that +such modified license differs from this License). + +10.4. Distributing Source Code Form that is Incompatible With Secondary +Licenses + +If You choose to distribute Source Code Form that is Incompatible With +Secondary Licenses under the terms of this version of the License, the +notice described in Exhibit B of this License must be attached. + +Exhibit A - Source Code Form License Notice +------------------------------------------- + + This Source Code Form is subject to the terms of the Mozilla Public + License, v. 2.0. If a copy of the MPL was not distributed with this + file, You can obtain one at http://mozilla.org/MPL/2.0/. + +If it is not possible or desirable to put the notice in a particular +file, then You may include the notice in a location (such as a LICENSE +file in a relevant directory) where a recipient would be likely to look +for such a notice. + +You may add additional accurate notices of copyright ownership. + +Exhibit B - "Incompatible With Secondary Licenses" Notice +--------------------------------------------------------- + + This Source Code Form is "Incompatible With Secondary Licenses", as + defined by the Mozilla Public License, v. 2.0. diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/backend.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/backend.go new file mode 100644 index 000000000..1ed1143fc --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/backend.go @@ -0,0 +1,80 @@ +package plugin + +import ( + "context" + "sync" + "time" + + "github.com/hashicorp/vault-plugin-secrets-ad/plugin/client" + "github.com/hashicorp/vault-plugin-secrets-ad/plugin/util" + "github.com/hashicorp/vault/helper/ldaputil" + "github.com/hashicorp/vault/logical" + "github.com/hashicorp/vault/logical/framework" + "github.com/patrickmn/go-cache" +) + +func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) { + backend := newBackend(util.NewSecretsClient(conf.Logger)) + backend.Setup(ctx, conf) + return backend, nil +} + +func newBackend(client secretsClient) *backend { + adBackend := &backend{ + client: client, + roleCache: cache.New(roleCacheExpiration, roleCacheCleanup), + credCache: cache.New(credCacheExpiration, credCacheCleanup), + } + adBackend.Backend = &framework.Backend{ + Help: backendHelp, + Paths: []*framework.Path{ + adBackend.pathConfig(), + adBackend.pathRoles(), + adBackend.pathListRoles(), + adBackend.pathCreds(), + }, + PathsSpecial: &logical.Paths{ + SealWrapStorage: []string{ + configPath, + credPrefix, + }, + }, + Invalidate: adBackend.Invalidate, + BackendType: logical.TypeLogical, + } + return adBackend +} + +type backend struct { + logical.Backend + + client secretsClient + + roleCache *cache.Cache + credCache *cache.Cache + credLock sync.Mutex +} + +func (b *backend) Invalidate(ctx context.Context, key string) { + b.invalidateRole(ctx, key) + b.invalidateCred(ctx, key) +} + +// Wraps the *util.SecretsClient in an interface to support testing. +type secretsClient interface { + Get(conf *ldaputil.ConfigEntry, serviceAccountName string) (*client.Entry, error) + GetPasswordLastSet(conf *ldaputil.ConfigEntry, serviceAccountName string) (time.Time, error) + UpdatePassword(conf *ldaputil.ConfigEntry, serviceAccountName string, newPassword string) error +} + +const backendHelp = ` +The Active Directory (AD) secrets engine rotates AD passwords dynamically, +and is designed for a high-load environment where many instances may be accessing +a shared password simultaneously. With a simple set up and a simple creds API, +it doesn't require instances to be manually registered in advance to gain access. +As long as access has been granted to the creds path via a method like +AppRole, they're available. + +Passwords are lazily rotated based on preset TTLs and can have a length configured to meet +your needs. +` diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/client.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/client.go new file mode 100644 index 000000000..a0fa2f2f5 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/client.go @@ -0,0 +1,140 @@ +package client + +import ( + "fmt" + "math" + "strings" + + "github.com/go-errors/errors" + "github.com/go-ldap/ldap" + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/vault/helper/ldaputil" + "golang.org/x/text/encoding/unicode" +) + +func NewClient(logger hclog.Logger) *Client { + return &Client{ + ldap: &ldaputil.Client{ + Logger: logger, + LDAP: ldaputil.NewLDAP(), + }, + } +} + +type Client struct { + ldap *ldaputil.Client +} + +func (c *Client) Search(cfg *ldaputil.ConfigEntry, filters map[*Field][]string) ([]*Entry, error) { + req := &ldap.SearchRequest{ + BaseDN: cfg.UserDN, + Scope: ldap.ScopeWholeSubtree, + Filter: toString(filters), + SizeLimit: math.MaxInt32, + } + + conn, err := c.ldap.DialLDAP(cfg) + if err != nil { + return nil, err + } + defer conn.Close() + + if err := bind(cfg, conn); err != nil { + return nil, err + } + + result, err := conn.Search(req) + if err != nil { + return nil, err + } + + entries := make([]*Entry, len(result.Entries)) + for i, rawEntry := range result.Entries { + entries[i] = NewEntry(rawEntry) + } + return entries, nil +} + +func (c *Client) UpdateEntry(cfg *ldaputil.ConfigEntry, filters map[*Field][]string, newValues map[*Field][]string) error { + entries, err := c.Search(cfg, filters) + if err != nil { + return err + } + if len(entries) != 1 { + return fmt.Errorf("filter of %s doesn't match just one entry: %s", filters, entries) + } + + replaceAttributes := make([]ldap.PartialAttribute, len(newValues)) + i := 0 + for field, vals := range newValues { + replaceAttributes[i] = ldap.PartialAttribute{ + Type: field.String(), + Vals: vals, + } + i++ + } + + modifyReq := &ldap.ModifyRequest{ + DN: entries[0].DN, + ReplaceAttributes: replaceAttributes, + } + + conn, err := c.ldap.DialLDAP(cfg) + if err != nil { + return err + } + defer conn.Close() + + if err := bind(cfg, conn); err != nil { + return err + } + return conn.Modify(modifyReq) +} + +// UpdatePassword uses a Modify call under the hood because +// Active Directory doesn't recognize the passwordModify method. +// See https://github.com/go-ldap/ldap/issues/106 +// for more. +func (c *Client) UpdatePassword(cfg *ldaputil.ConfigEntry, filters map[*Field][]string, newPassword string) error { + pwdEncoded, err := formatPassword(newPassword) + if err != nil { + return err + } + + newValues := map[*Field][]string{ + FieldRegistry.UnicodePassword: {pwdEncoded}, + } + + return c.UpdateEntry(cfg, filters, newValues) +} + +// According to the MS docs, the password needs to be utf16 and enclosed in quotes. +func formatPassword(original string) (string, error) { + utf16 := unicode.UTF16(unicode.LittleEndian, unicode.IgnoreBOM) + return utf16.NewEncoder().String("\"" + original + "\"") +} + +// Ex. "(cn=Ellen Jones)" +func toString(filters map[*Field][]string) string { + var fieldEquals []string + for f, values := range filters { + for _, v := range values { + fieldEquals = append(fieldEquals, fmt.Sprintf("%s=%s", f, v)) + } + } + result := strings.Join(fieldEquals, ",") + return "(" + result + ")" +} + +func bind(cfg *ldaputil.ConfigEntry, conn ldaputil.Connection) error { + if cfg.BindPassword == "" { + return errors.New("unable to bind due to lack of configured password") + } + if cfg.UPNDomain != "" { + return conn.Bind(fmt.Sprintf("%s@%s", ldaputil.EscapeLDAPValue(cfg.BindDN), cfg.UPNDomain), cfg.BindPassword) + } + if cfg.BindDN != "" { + return conn.Bind(cfg.BindDN, cfg.BindPassword) + } + return errors.New("must provide binddn or upndomain") +} diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/entry.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/entry.go new file mode 100644 index 000000000..a928ad050 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/entry.go @@ -0,0 +1,41 @@ +package client + +import ( + "strings" + + "github.com/go-ldap/ldap" +) + +// Entry is an Active Directory-specific construct +// to make knowing and grabbing fields more convenient, +// while retaining all original information. +func NewEntry(ldapEntry *ldap.Entry) *Entry { + fieldMap := make(map[string][]string) + for _, attribute := range ldapEntry.Attributes { + field := FieldRegistry.Parse(attribute.Name) + if field == nil { + // This field simply isn't in the registry, no big deal. + continue + } + fieldMap[field.String()] = attribute.Values + } + return &Entry{fieldMap: fieldMap, Entry: ldapEntry} +} + +type Entry struct { + *ldap.Entry + fieldMap map[string][]string +} + +func (e *Entry) Get(field *Field) ([]string, bool) { + values, found := e.fieldMap[field.String()] + return values, found +} + +func (e *Entry) GetJoined(field *Field) (string, bool) { + values, found := e.Get(field) + if !found { + return "", false + } + return strings.Join(values, ","), true +} diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/fieldregistry.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/fieldregistry.go new file mode 100644 index 000000000..4baf1e0a0 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/fieldregistry.go @@ -0,0 +1,113 @@ +package client + +import ( + "reflect" +) + +// FieldRegistry is designed to look and feel +// like an enum from another language like Python. +// +// Example: Accessing constants +// +// FieldRegistry.AccountExpires +// FieldRegistry.BadPasswordCount +// +// Example: Utility methods +// +// FieldRegistry.List() +// FieldRegistry.Parse("givenName") +// +var FieldRegistry = newFieldRegistry() + +// newFieldRegistry iterates through all the fields in the registry, +// pulls their ldap strings, and sets up each field to contain its ldap string +func newFieldRegistry() *fieldRegistry { + reg := &fieldRegistry{} + vOfReg := reflect.ValueOf(reg) + + registryFields := vOfReg.Elem() + for i := 0; i < registryFields.NumField(); i++ { + + if registryFields.Field(i).Kind() == reflect.Ptr { + + field := registryFields.Type().Field(i) + ldapString := field.Tag.Get("ldap") + ldapField := &Field{ldapString} + vOfLDAPField := reflect.ValueOf(ldapField) + + registryFields.FieldByName(field.Name).Set(vOfLDAPField) + + reg.fieldList = append(reg.fieldList, ldapField) + } + } + return reg +} + +// fieldRegistry isn't currently intended to be an exhaustive list - +// there are more fields in ActiveDirectory. However, these are the ones +// that may be useful to Vault. Feel free to add to this list! +type fieldRegistry struct { + AccountExpires *Field `ldap:"accountExpires"` + AdminCount *Field `ldap:"adminCount"` + BadPasswordCount *Field `ldap:"badPwdCount"` + BadPasswordTime *Field `ldap:"badPasswordTime"` + CodePage *Field `ldap:"codePage"` + CommonName *Field `ldap:"cn"` + CountryCode *Field `ldap:"countryCode"` + DisplayName *Field `ldap:"displayName"` + DistinguishedName *Field `ldap:"distinguishedName"` + DomainComponent *Field `ldap:"dc"` + DomainName *Field `ldap:"dn"` + DSCorePropogationData *Field `ldap:"dSCorePropagationData"` + GivenName *Field `ldap:"givenName"` + GroupType *Field `ldap:"groupType"` + Initials *Field `ldap:"initials"` + InstanceType *Field `ldap:"instanceType"` + LastLogoff *Field `ldap:"lastLogoff"` + LastLogon *Field `ldap:"lastLogon"` + LastLogonTimestamp *Field `ldap:"lastLogonTimestamp"` + LockoutTime *Field `ldap:"lockoutTime"` + LogonCount *Field `ldap:"logonCount"` + MemberOf *Field `ldap:"memberOf"` + Name *Field `ldap:"name"` + ObjectCategory *Field `ldap:"objectCategory"` + ObjectClass *Field `ldap:"objectClass"` + ObjectGUID *Field `ldap:"objectGUID"` + ObjectSID *Field `ldap:"objectSid"` + OrganizationalUnit *Field `ldap:"ou"` + PasswordLastSet *Field `ldap:"pwdLastSet"` + PrimaryGroupID *Field `ldap:"primaryGroupID"` + SAMAccountName *Field `ldap:"sAMAccountName"` + SAMAccountType *Field `ldap:"sAMAccountType"` + Surname *Field `ldap:"sn"` + UnicodePassword *Field `ldap:"unicodePwd"` + UpdateSequenceNumberChanged *Field `ldap:"uSNChanged"` + UpdateSequenceNumberCreated *Field `ldap:"uSNCreated"` + UserAccountControl *Field `ldap:"userAccountControl"` + UserPrincipalName *Field `ldap:"userPrincipalName"` + WhenCreated *Field `ldap:"whenCreated"` + WhenChanged *Field `ldap:"whenChanged"` + + fieldList []*Field +} + +func (r *fieldRegistry) List() []*Field { + return r.fieldList +} + +func (r *fieldRegistry) Parse(s string) *Field { + for _, f := range r.List() { + if f.String() == s { + return f + } + } + return nil +} + +type Field struct { + str string +} + +func (f *Field) String() string { + return f.str +} diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/time.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/time.go new file mode 100644 index 000000000..c451ba473 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/client/time.go @@ -0,0 +1,43 @@ +package client + +import ( + "strconv" + "time" +) + +const ( + nanoSecondsPerSecond = 1000000000 + nanosInTick = 100 + ticksPerSecond = nanoSecondsPerSecond / nanosInTick +) + +// ParseTicks parses dates represented as Active Directory LargeInts into times. +// Not all time fields are represented this way, +// so be sure to test that your particular time returns expected results. +// Some time fields represented as LargeInts include accountExpires, lastLogon, lastLogonTimestamp, and pwdLastSet. +// More: https://social.technet.microsoft.com/wiki/contents/articles/31135.active-directory-large-integer-attributes.aspx +func ParseTicks(ticks string) (time.Time, error) { + i, err := strconv.ParseInt(ticks, 10, 64) + if err != nil { + return time.Time{}, err + } + return TicksToTime(i), nil +} + +// TicksToTime converts an ActiveDirectory time in ticks to a time. +// This algorithm is summarized as: +// +// Many dates are saved in Active Directory as Large Integer values. +// These attributes represent dates as the number of 100-nanosecond intervals since 12:00 AM January 1, 1601. +// 100-nanosecond intervals, equal to 0.0000001 seconds, are also called ticks. +// Dates in Active Directory are always saved in Coordinated Universal Time, or UTC. +// More: https://social.technet.microsoft.com/wiki/contents/articles/31135.active-directory-large-integer-attributes.aspx +// +// If we directly follow the above algorithm we encounter time.Duration limits of 290 years and int overflow issues. +// Thus below, we carefully sidestep those. +func TicksToTime(ticks int64) time.Time { + origin := time.Date(1601, time.January, 1, 0, 0, 0, 0, time.UTC).Unix() + secondsSinceOrigin := ticks / ticksPerSecond + remainingNanoseconds := ticks % ticksPerSecond * 100 + return time.Unix(origin+secondsSinceOrigin, remainingNanoseconds).UTC() +} diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/engineconf.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/engineconf.go new file mode 100644 index 000000000..8e84807d0 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/engineconf.go @@ -0,0 +1,10 @@ +package plugin + +import ( + "github.com/hashicorp/vault/helper/ldaputil" +) + +type configuration struct { + PasswordConf *passwordConf + ADConf *ldaputil.ConfigEntry +} diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/passwordconf.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/passwordconf.go new file mode 100644 index 000000000..83ebcaed5 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/passwordconf.go @@ -0,0 +1,15 @@ +package plugin + +type passwordConf struct { + TTL int `json:"ttl"` + MaxTTL int `json:"max_ttl"` + Length int `json:"length"` +} + +func (c *passwordConf) Map() map[string]interface{} { + return map[string]interface{}{ + "ttl": c.TTL, + "max_ttl": c.MaxTTL, + "length": c.Length, + } +} diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_config.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_config.go new file mode 100644 index 000000000..2df419b61 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_config.go @@ -0,0 +1,201 @@ +package plugin + +import ( + "context" + "errors" + "fmt" + + "github.com/hashicorp/vault-plugin-secrets-ad/plugin/util" + "github.com/hashicorp/vault/helper/ldaputil" + "github.com/hashicorp/vault/logical" + "github.com/hashicorp/vault/logical/framework" +) + +const ( + configPath = "config" + configStorageKey = "config" + + // This length is arbitrarily chosen but should work for + // most Active Directory minimum and maximum length settings. + // A bit tongue-in-cheek since programmers love their base-2 exponents. + defaultPasswordLength = 64 + + defaultTLSVersion = "tls12" +) + +func (b *backend) readConfig(ctx context.Context, storage logical.Storage) (*configuration, error) { + entry, err := storage.Get(ctx, configStorageKey) + if err != nil { + return nil, err + } + if entry == nil { + return nil, nil + } + config := &configuration{&passwordConf{}, &ldaputil.ConfigEntry{}} + if err := entry.DecodeJSON(config); err != nil { + return nil, err + } + return config, nil +} + +func (b *backend) pathConfig() *framework.Path { + return &framework.Path{ + Pattern: configPath, + Fields: b.configFields(), + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.configUpdateOperation, + logical.ReadOperation: b.configReadOperation, + logical.DeleteOperation: b.configDeleteOperation, + }, + HelpSynopsis: configHelpSynopsis, + HelpDescription: configHelpDescription, + } +} + +func (b *backend) configFields() map[string]*framework.FieldSchema { + fields := ldaputil.ConfigFields() + fields["ttl"] = &framework.FieldSchema{ + Type: framework.TypeDurationSecond, + Description: "In seconds, the default password time-to-live.", + } + fields["max_ttl"] = &framework.FieldSchema{ + Type: framework.TypeDurationSecond, + Description: "In seconds, the maximum password time-to-live.", + } + fields["length"] = &framework.FieldSchema{ + Type: framework.TypeInt, + Default: defaultPasswordLength, + Description: "The desired length of passwords that Vault generates.", + } + return fields +} + +func (b *backend) configUpdateOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) { + // Build and validate the ldap conf. + activeDirectoryConf, err := ldaputil.NewConfigEntry(fieldData) + if err != nil { + return nil, err + } + if err := activeDirectoryConf.Validate(); err != nil { + return nil, err + } + + // Build the password conf. + ttl := fieldData.Get("ttl").(int) + maxTTL := fieldData.Get("max_ttl").(int) + length := fieldData.Get("length").(int) + + if ttl == 0 { + ttl = int(b.System().DefaultLeaseTTL().Seconds()) + } + if maxTTL == 0 { + maxTTL = int(b.System().MaxLeaseTTL().Seconds()) + } + if ttl > maxTTL { + return nil, errors.New("ttl must be smaller than or equal to max_ttl") + } + if ttl < 1 { + return nil, errors.New("ttl must be positive") + } + if maxTTL < 1 { + return nil, errors.New("max_ttl must be positive") + } + if length < util.MinimumPasswordLength { + return nil, fmt.Errorf("minimum password length is %d for sufficient complexity to be secure, though Vault recommends a higher length", util.MinimumPasswordLength) + } + passwordConf := &passwordConf{ + TTL: ttl, + MaxTTL: maxTTL, + Length: length, + } + + config := &configuration{passwordConf, activeDirectoryConf} + entry, err := logical.StorageEntryJSON(configStorageKey, config) + if err != nil { + return nil, err + } + if err := req.Storage.Put(ctx, entry); err != nil { + return nil, err + } + + // Respond with a 204. + return nil, nil +} + +func (b *backend) configReadOperation(ctx context.Context, req *logical.Request, _ *framework.FieldData) (*logical.Response, error) { + config, err := b.readConfig(ctx, req.Storage) + if err != nil { + return nil, err + } + if config == nil { + return nil, nil + } + + // NOTE: + // "password" is intentionally not returned by this endpoint, + // as we lean away from returning sensitive information unless it's absolutely necessary. + // Also, we don't return the full ADConf here because not all parameters are used by this engine. + configMap := map[string]interface{}{ + "url": config.ADConf.Url, + "starttls": config.ADConf.StartTLS, + "insecure_tls": config.ADConf.InsecureTLS, + "certificate": config.ADConf.Certificate, + "binddn": config.ADConf.BindDN, + "userdn": config.ADConf.UserDN, + "upndomain": config.ADConf.UPNDomain, + "tls_min_version": config.ADConf.TLSMinVersion, + "tls_max_version": config.ADConf.TLSMaxVersion, + } + for k, v := range config.PasswordConf.Map() { + configMap[k] = v + } + + resp := &logical.Response{ + Data: configMap, + } + return resp, nil +} + +func (b *backend) configDeleteOperation(ctx context.Context, req *logical.Request, _ *framework.FieldData) (*logical.Response, error) { + if err := req.Storage.Delete(ctx, configStorageKey); err != nil { + return nil, err + } + return nil, nil +} + +const ( + configHelpSynopsis = ` +Configure the AD server to connect to, along with password options. +` + configHelpDescription = ` +This endpoint allows you to configure the AD server to connect to and its +configuration options. When you add, update, or delete a config, it takes +immediate effect on all subsequent actions. It does not apply itself to roles +or creds added in the past. + +The AD URL can use either the "ldap://" or "ldaps://" schema. In the former +case, an unencrypted connection will be made with a default port of 389, unless +the "starttls" parameter is set to true, in which case TLS will be used. In the +latter case, a SSL connection will be established with a default port of 636. + +## A NOTE ON ESCAPING + +It is up to the administrator to provide properly escaped DNs. This includes +the user DN, bind DN for search, and so on. + +The only DN escaping performed by this backend is on usernames given at login +time when they are inserted into the final bind DN, and uses escaping rules +defined in RFC 4514. + +Additionally, Active Directory has escaping rules that differ slightly from the +RFC; in particular it requires escaping of '#' regardless of position in the DN +(the RFC only requires it to be escaped when it is the first character), and +'=', which the RFC indicates can be escaped with a backslash, but does not +contain in its set of required escapes. If you are using Active Directory and +these appear in your usernames, please ensure that they are escaped, in +addition to being properly escaped in your configured DNs. + +For reference, see https://www.ietf.org/rfc/rfc4514.txt and +http://social.technet.microsoft.com/wiki/contents/articles/5312.active-directory-characters-to-escape.aspx +` +) diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_creds.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_creds.go new file mode 100644 index 000000000..fbb284e4f --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_creds.go @@ -0,0 +1,216 @@ +package plugin + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/go-errors/errors" + "github.com/hashicorp/vault-plugin-secrets-ad/plugin/util" + "github.com/hashicorp/vault/logical" + "github.com/hashicorp/vault/logical/framework" +) + +const ( + credPrefix = "creds/" + storageKey = "creds" + + // Since Active Directory offers eventual consistency, in testing we found that sometimes + // Active Directory returned "password last set" times that were _later_ than our own, + // even though ours were captured after synchronously completing a password update operation. + // + // An example we captured was: + // last_vault_rotation 2018-04-18T22:29:57.385454779Z + // password_last_set 2018-04-18T22:29:57.3902786Z + // + // Thus we add a short time buffer when checking whether anyone _else_ updated the AD password + // since Vault last rotated it. + passwordLastSetBuffer = time.Second + + // Since password TTL can be set to as low as 1 second, + // we can't cache passwords for an entire second. + credCacheCleanup = time.Second / 3 + credCacheExpiration = time.Second / 2 +) + +// deleteCred fulfills the DeleteWatcher interface in roles. +// It allows the roleHandler to let us know when a role's been deleted so we can delete its associated creds too. +func (b *backend) deleteCred(ctx context.Context, storage logical.Storage, roleName string) error { + if err := storage.Delete(ctx, storageKey+"/"+roleName); err != nil { + return err + } + b.credCache.Delete(roleName) + return nil +} + +func (b *backend) invalidateCred(ctx context.Context, key string) { + if strings.HasPrefix(key, credPrefix) { + roleName := key[len(credPrefix):] + b.credCache.Delete(roleName) + } +} + +func (b *backend) pathCreds() *framework.Path { + return &framework.Path{ + Pattern: credPrefix + framework.GenericNameRegex("name"), + Fields: map[string]*framework.FieldSchema{ + "name": { + Type: framework.TypeString, + Description: "Name of the role", + }, + }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ReadOperation: b.credReadOperation, + }, + HelpSynopsis: credHelpSynopsis, + HelpDescription: credHelpDescription, + } +} + +func (b *backend) credReadOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) { + cred := make(map[string]interface{}) + + roleName := fieldData.Get("name").(string) + + // We act upon quite a few things below that could be racy if not locked: + // - Roles. If a new cred is created, the role is updated to include the new LastVaultRotation time, + // effecting role storage (and the role cache, but that's already thread-safe). + // - Creds. New creds involve writing to cred storage and the cred cache (also already thread-safe). + // Rather than setting read locks of different types, and upgrading them to write locks, let's keep complexity + // low and use one simple mutex. + b.credLock.Lock() + defer b.credLock.Unlock() + + role, err := b.readRole(ctx, req.Storage, roleName) + if err != nil { + return nil, err + } + if role == nil { + return nil, nil + } + + var resp *logical.Response + var respErr error + var unset time.Time + + switch { + + case role.LastVaultRotation == unset: + // We've never managed this cred before. + // We need to rotate the password so Vault will know it. + resp, respErr = b.generateAndReturnCreds(ctx, req.Storage, roleName, role, cred) + + case role.PasswordLastSet.After(role.LastVaultRotation.Add(passwordLastSetBuffer)): + // Someone has manually rotated the password in Active Directory since we last rolled it. + // We need to rotate it now so Vault will know it and be able to return it. + resp, respErr = b.generateAndReturnCreds(ctx, req.Storage, roleName, role, cred) + + default: + // Since we should know the last password, let's retrieve it now so we can return it with the new one. + credIfc, found := b.credCache.Get(roleName) + if found { + cred = credIfc.(map[string]interface{}) + } else { + entry, err := req.Storage.Get(ctx, storageKey+"/"+roleName) + if err != nil { + return nil, err + } + if entry == nil { + // If the creds aren't in storage, but roles are and we've created creds before, + // this is an unexpected state and something has gone wrong. + // Let's be explicit and error about this. + return nil, fmt.Errorf("should have the creds for %+v but they're not found", role) + } + if err := entry.DecodeJSON(&cred); err != nil { + return nil, err + } + b.credCache.SetDefault(roleName, cred) + } + + // Is the password too old? + // If so, time for a new one! + now := time.Now().UTC() + shouldBeRolled := role.LastVaultRotation.Add(time.Duration(role.TTL) * time.Second) // already in UTC + if now.After(shouldBeRolled) { + resp, respErr = b.generateAndReturnCreds(ctx, req.Storage, roleName, role, cred) + } else { + resp = &logical.Response{ + Data: cred, + } + } + } + if respErr != nil { + return nil, err + } + if resp == nil { + return nil, nil + } + return resp, nil +} + +func (b *backend) generateAndReturnCreds(ctx context.Context, storage logical.Storage, roleName string, role *backendRole, previousCred map[string]interface{}) (*logical.Response, error) { + engineConf, err := b.readConfig(ctx, storage) + if err != nil { + return nil, err + } + if engineConf == nil { + return nil, errors.New("the config is currently unset") + } + + newPassword, err := util.GeneratePassword(engineConf.PasswordConf.Length) + if err != nil { + return nil, err + } + + if err := b.client.UpdatePassword(engineConf.ADConf, role.ServiceAccountName, newPassword); err != nil { + return nil, err + } + + // Time recorded is in UTC for easier user comparison to AD's last rotated time, which is set to UTC by Microsoft. + role.LastVaultRotation = time.Now().UTC() + if err := b.writeRole(ctx, storage, roleName, role); err != nil { + return nil, err + } + + // Although a service account name is typically my_app@example.com, + // the username it uses is just my_app, or everything before the @. + var username string + fields := strings.Split(role.ServiceAccountName, "@") + if len(fields) > 0 { + username = fields[0] + } else { + return nil, fmt.Errorf("unable to infer username from service account name: %s", role.ServiceAccountName) + } + + cred := map[string]interface{}{ + "username": username, + "current_password": newPassword, + } + if previousCred["current_password"] != nil { + cred["last_password"] = previousCred["current_password"] + } + + // Cache and save the cred. + entry, err := logical.StorageEntryJSON(storageKey+"/"+roleName, cred) + if err != nil { + return nil, err + } + if err := storage.Put(ctx, entry); err != nil { + return nil, err + } + b.credCache.SetDefault(roleName, cred) + + return &logical.Response{ + Data: cred, + }, nil +} + +const ( + credHelpSynopsis = ` +Retrieve a role's creds by role name. +` + credHelpDescription = ` +Read creds using a role's name to view the login, current password, and last password. +` +) diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_roles.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_roles.go new file mode 100644 index 000000000..e57b89857 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/path_roles.go @@ -0,0 +1,254 @@ +package plugin + +import ( + "context" + "fmt" + "strings" + "time" + + "github.com/go-errors/errors" + "github.com/hashicorp/vault/logical" + "github.com/hashicorp/vault/logical/framework" +) + +const ( + rolePath = "roles" + rolePrefix = "roles/" + roleStorageKey = "roles" + + roleCacheCleanup = time.Second / 2 + roleCacheExpiration = time.Second +) + +func (b *backend) invalidateRole(ctx context.Context, key string) { + if strings.HasPrefix(key, rolePrefix) { + roleName := key[len(rolePrefix):] + b.roleCache.Delete(roleName) + } +} + +func (b *backend) pathListRoles() *framework.Path { + return &framework.Path{ + Pattern: rolePrefix + "?$", + + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.ListOperation: b.roleListOperation, + }, + + HelpSynopsis: pathListRolesHelpSyn, + HelpDescription: pathListRolesHelpDesc, + } +} + +func (b *backend) pathRoles() *framework.Path { + return &framework.Path{ + Pattern: rolePrefix + framework.GenericNameRegex("name"), + Fields: map[string]*framework.FieldSchema{ + "name": { + Type: framework.TypeString, + Description: "Name of the role", + }, + "service_account_name": { + Type: framework.TypeString, + Description: "The username/logon name for the service account with which this role will be associated.", + }, + "ttl": { + Type: framework.TypeDurationSecond, + Description: "In seconds, the default password time-to-live.", + }, + }, + Callbacks: map[logical.Operation]framework.OperationFunc{ + logical.UpdateOperation: b.roleUpdateOperation, + logical.ReadOperation: b.roleReadOperation, + logical.DeleteOperation: b.roleDeleteOperation, + }, + HelpSynopsis: roleHelpSynopsis, + HelpDescription: roleHelpDescription, + } +} + +func (b *backend) readRole(ctx context.Context, storage logical.Storage, roleName string) (*backendRole, error) { + // If it's cached, return it from there. + roleIfc, found := b.roleCache.Get(roleName) + if found { + return roleIfc.(*backendRole), nil + } + + // It's not, read it from storage. + entry, err := storage.Get(ctx, roleStorageKey+"/"+roleName) + if err != nil { + return nil, err + } + if entry == nil { + return nil, nil + } + + role := &backendRole{} + if err := entry.DecodeJSON(role); err != nil { + return nil, err + } + + // Always check when ActiveDirectory shows the password as last set on the fly. + engineConf, err := b.readConfig(ctx, storage) + if err != nil { + return nil, err + } + if engineConf == nil { + return nil, errors.New("the config is currently unset") + } + + passwordLastSet, err := b.client.GetPasswordLastSet(engineConf.ADConf, role.ServiceAccountName) + if err != nil { + return nil, err + } + role.PasswordLastSet = passwordLastSet + + // Cache it. + b.roleCache.SetDefault(roleName, role) + return role, nil +} + +func (b *backend) writeRole(ctx context.Context, storage logical.Storage, roleName string, role *backendRole) error { + entry, err := logical.StorageEntryJSON(roleStorageKey+"/"+roleName, role) + if err != nil { + return err + } + if err := storage.Put(ctx, entry); err != nil { + return err + } + b.roleCache.SetDefault(roleName, role) + return nil +} + +func (b *backend) roleUpdateOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) { + // Get everything we need to construct the role. + roleName := fieldData.Get("name").(string) + + engineConf, err := b.readConfig(ctx, req.Storage) + if err != nil { + return nil, err + } + if engineConf == nil { + return nil, errors.New("the config is currently unset") + } + + // Actually construct it. + serviceAccountName, err := getServiceAccountName(fieldData) + if err != nil { + return nil, err + } + + // verify service account exists + _, err = b.client.Get(engineConf.ADConf, serviceAccountName) + if err != nil { + return nil, err + } + + ttl, err := getValidatedTTL(engineConf.PasswordConf, fieldData) + if err != nil { + return nil, err + } + role := &backendRole{ + ServiceAccountName: serviceAccountName, + TTL: ttl, + } + + // Was there already a role before that we're now overwriting? If so, let's carry forward the LastVaultRotation. + oldRole, err := b.readRole(ctx, req.Storage, roleName) + if err != nil { + return nil, err + } else { + if oldRole != nil { + role.LastVaultRotation = oldRole.LastVaultRotation + } + } + + // writeRole it to storage and the roleCache. + if err := b.writeRole(ctx, req.Storage, roleName, role); err != nil { + return nil, err + } + + // Return a 204. + return nil, nil +} + +func (b *backend) roleReadOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) { + roleName := fieldData.Get("name").(string) + + role, err := b.readRole(ctx, req.Storage, roleName) + if err != nil { + return nil, err + } + if role == nil { + return nil, nil + } + + return &logical.Response{ + Data: role.Map(), + }, nil +} + +func (b *backend) roleListOperation(ctx context.Context, req *logical.Request, _ *framework.FieldData) (*logical.Response, error) { + keys, err := req.Storage.List(ctx, roleStorageKey+"/") + if err != nil { + return nil, err + } + return logical.ListResponse(keys), nil +} + +func (b *backend) roleDeleteOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) { + roleName := fieldData.Get("name").(string) + + if err := req.Storage.Delete(ctx, roleStorageKey+"/"+roleName); err != nil { + return nil, err + } + + b.roleCache.Delete(roleName) + + if err := b.deleteCred(ctx, req.Storage, roleName); err != nil { + return nil, err + } + return nil, nil +} + +func getServiceAccountName(fieldData *framework.FieldData) (string, error) { + serviceAccountName := fieldData.Get("service_account_name").(string) + if serviceAccountName == "" { + return "", errors.New("\"service_account_name\" is required") + } + return serviceAccountName, nil +} + +func getValidatedTTL(passwordConf *passwordConf, fieldData *framework.FieldData) (int, error) { + ttl := fieldData.Get("ttl").(int) + if ttl == 0 { + ttl = passwordConf.TTL + } + if ttl > passwordConf.MaxTTL { + return 0, fmt.Errorf("requested ttl of %d seconds is over the max ttl of %d seconds", ttl, passwordConf.MaxTTL) + } + if ttl < 0 { + return 0, fmt.Errorf("ttl can't be negative") + } + return ttl, nil +} + +const ( + roleHelpSynopsis = ` +Manage roles to build links between Vault and Active Directory service accounts. +` + roleHelpDescription = ` +This endpoint allows you to read, write, and delete individual roles that are used for enabling password rotation. + +Deleting a role will not disable its current password. It will delete the role's associated creds in Vault. +` + + pathListRolesHelpSyn = ` +List the name of each role currently stored. +` + pathListRolesHelpDesc = ` +To learn which service accounts are being managed by Vault, list the role names using +this endpoint. Then read any individual role by name to learn more, like the name of +the service account it's associated with. +` +) diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/role.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/role.go new file mode 100644 index 000000000..8f653d7b7 --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/role.go @@ -0,0 +1,28 @@ +package plugin + +import ( + "time" +) + +type backendRole struct { + ServiceAccountName string `json:"service_account_name"` + TTL int `json:"ttl"` + LastVaultRotation time.Time `json:"last_vault_rotation"` + PasswordLastSet time.Time `json:"password_last_set"` +} + +func (r *backendRole) Map() map[string]interface{} { + m := map[string]interface{}{ + "service_account_name": r.ServiceAccountName, + "ttl": r.TTL, + } + + var unset time.Time + if r.LastVaultRotation != unset { + m["last_vault_rotation"] = r.LastVaultRotation + } + if r.PasswordLastSet != unset { + m["password_last_set"] = r.PasswordLastSet + } + return m +} diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/util/passwords.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/util/passwords.go new file mode 100644 index 000000000..a6a4e872d --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/util/passwords.go @@ -0,0 +1,38 @@ +package util + +import ( + "encoding/base64" + "fmt" + + "github.com/hashicorp/go-uuid" +) + +var ( + // Per https://en.wikipedia.org/wiki/Password_strength#Guidelines_for_strong_passwords + minimumLengthOfComplexString = 8 + + PasswordComplexityPrefix = "?@09AZ" + MinimumPasswordLength = len(PasswordComplexityPrefix) + minimumLengthOfComplexString +) + +func GeneratePassword(desiredLength int) (string, error) { + if desiredLength < MinimumPasswordLength { + return "", fmt.Errorf("it's not possible to generate a _secure_ password of length %d, please boost length to %d, though Vault recommends higher", desiredLength, MinimumPasswordLength) + } + + b, err := uuid.GenerateRandomBytes(desiredLength) + if err != nil { + return "", err + } + + result := "" + // Though the result should immediately be longer than the desiredLength, + // do this in a loop to ensure there's absolutely no risk of a panic when slicing it down later. + for len(result) <= desiredLength { + // Encode to base64 because it's more complex. + result += base64.StdEncoding.EncodeToString(b) + } + + result = PasswordComplexityPrefix + result + return result[:desiredLength], nil +} diff --git a/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/util/secrets_client.go b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/util/secrets_client.go new file mode 100644 index 000000000..cc5d9693d --- /dev/null +++ b/vendor/github.com/hashicorp/vault-plugin-secrets-ad/plugin/util/secrets_client.go @@ -0,0 +1,73 @@ +package util + +import ( + "fmt" + "time" + + "github.com/hashicorp/go-hclog" + "github.com/hashicorp/vault-plugin-secrets-ad/plugin/client" + "github.com/hashicorp/vault/helper/ldaputil" +) + +func NewSecretsClient(logger hclog.Logger) *SecretsClient { + return &SecretsClient{adClient: client.NewClient(logger)} +} + +// SecretsClient wraps a *activeDirectory.activeDirectoryClient to expose just the common convenience methods needed by the ad secrets backend. +type SecretsClient struct { + adClient *client.Client +} + +func (c *SecretsClient) Get(conf *ldaputil.ConfigEntry, serviceAccountName string) (*client.Entry, error) { + filters := map[*client.Field][]string{ + client.FieldRegistry.UserPrincipalName: {serviceAccountName}, + } + + entries, err := c.adClient.Search(conf, filters) + if err != nil { + return nil, err + } + + if len(entries) == 0 { + return nil, fmt.Errorf("unable to find service account named %s in active directory, searches are case sensitive", serviceAccountName) + } + if len(entries) > 1 { + return nil, fmt.Errorf("expected one matching service account, but received %s", entries) + } + return entries[0], nil +} + +func (c *SecretsClient) GetPasswordLastSet(conf *ldaputil.ConfigEntry, serviceAccountName string) (time.Time, error) { + entry, err := c.Get(conf, serviceAccountName) + if err != nil { + return time.Time{}, err + } + + values, found := entry.Get(client.FieldRegistry.PasswordLastSet) + if !found { + return time.Time{}, fmt.Errorf("%+v lacks a PasswordLastSet field", entry) + } + + if len(values) != 1 { + return time.Time{}, fmt.Errorf("expected only one value for PasswordLastSet, but received %s", values) + } + + ticks := values[0] + if ticks == "0" { + // password has never been rolled in Active Directory, only created + return time.Time{}, nil + } + + t, err := client.ParseTicks(ticks) + if err != nil { + return time.Time{}, err + } + return t, nil +} + +func (c *SecretsClient) UpdatePassword(conf *ldaputil.ConfigEntry, serviceAccountName string, newPassword string) error { + filters := map[*client.Field][]string{ + client.FieldRegistry.UserPrincipalName: {serviceAccountName}, + } + return c.adClient.UpdatePassword(conf, filters, newPassword) +} diff --git a/vendor/golang.org/x/text/encoding/encoding.go b/vendor/golang.org/x/text/encoding/encoding.go new file mode 100644 index 000000000..221f175c0 --- /dev/null +++ b/vendor/golang.org/x/text/encoding/encoding.go @@ -0,0 +1,335 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package encoding defines an interface for character encodings, such as Shift +// JIS and Windows 1252, that can convert to and from UTF-8. +// +// Encoding implementations are provided in other packages, such as +// golang.org/x/text/encoding/charmap and +// golang.org/x/text/encoding/japanese. +package encoding // import "golang.org/x/text/encoding" + +import ( + "errors" + "io" + "strconv" + "unicode/utf8" + + "golang.org/x/text/encoding/internal/identifier" + "golang.org/x/text/transform" +) + +// TODO: +// - There seems to be some inconsistency in when decoders return errors +// and when not. Also documentation seems to suggest they shouldn't return +// errors at all (except for UTF-16). +// - Encoders seem to rely on or at least benefit from the input being in NFC +// normal form. Perhaps add an example how users could prepare their output. + +// Encoding is a character set encoding that can be transformed to and from +// UTF-8. +type Encoding interface { + // NewDecoder returns a Decoder. + NewDecoder() *Decoder + + // NewEncoder returns an Encoder. + NewEncoder() *Encoder +} + +// A Decoder converts bytes to UTF-8. It implements transform.Transformer. +// +// Transforming source bytes that are not of that encoding will not result in an +// error per se. Each byte that cannot be transcoded will be represented in the +// output by the UTF-8 encoding of '\uFFFD', the replacement rune. +type Decoder struct { + transform.Transformer + + // This forces external creators of Decoders to use names in struct + // initializers, allowing for future extendibility without having to break + // code. + _ struct{} +} + +// Bytes converts the given encoded bytes to UTF-8. It returns the converted +// bytes or nil, err if any error occurred. +func (d *Decoder) Bytes(b []byte) ([]byte, error) { + b, _, err := transform.Bytes(d, b) + if err != nil { + return nil, err + } + return b, nil +} + +// String converts the given encoded string to UTF-8. It returns the converted +// string or "", err if any error occurred. +func (d *Decoder) String(s string) (string, error) { + s, _, err := transform.String(d, s) + if err != nil { + return "", err + } + return s, nil +} + +// Reader wraps another Reader to decode its bytes. +// +// The Decoder may not be used for any other operation as long as the returned +// Reader is in use. +func (d *Decoder) Reader(r io.Reader) io.Reader { + return transform.NewReader(r, d) +} + +// An Encoder converts bytes from UTF-8. It implements transform.Transformer. +// +// Each rune that cannot be transcoded will result in an error. In this case, +// the transform will consume all source byte up to, not including the offending +// rune. Transforming source bytes that are not valid UTF-8 will be replaced by +// `\uFFFD`. To return early with an error instead, use transform.Chain to +// preprocess the data with a UTF8Validator. +type Encoder struct { + transform.Transformer + + // This forces external creators of Encoders to use names in struct + // initializers, allowing for future extendibility without having to break + // code. + _ struct{} +} + +// Bytes converts bytes from UTF-8. It returns the converted bytes or nil, err if +// any error occurred. +func (e *Encoder) Bytes(b []byte) ([]byte, error) { + b, _, err := transform.Bytes(e, b) + if err != nil { + return nil, err + } + return b, nil +} + +// String converts a string from UTF-8. It returns the converted string or +// "", err if any error occurred. +func (e *Encoder) String(s string) (string, error) { + s, _, err := transform.String(e, s) + if err != nil { + return "", err + } + return s, nil +} + +// Writer wraps another Writer to encode its UTF-8 output. +// +// The Encoder may not be used for any other operation as long as the returned +// Writer is in use. +func (e *Encoder) Writer(w io.Writer) io.Writer { + return transform.NewWriter(w, e) +} + +// ASCIISub is the ASCII substitute character, as recommended by +// http://unicode.org/reports/tr36/#Text_Comparison +const ASCIISub = '\x1a' + +// Nop is the nop encoding. Its transformed bytes are the same as the source +// bytes; it does not replace invalid UTF-8 sequences. +var Nop Encoding = nop{} + +type nop struct{} + +func (nop) NewDecoder() *Decoder { + return &Decoder{Transformer: transform.Nop} +} +func (nop) NewEncoder() *Encoder { + return &Encoder{Transformer: transform.Nop} +} + +// Replacement is the replacement encoding. Decoding from the replacement +// encoding yields a single '\uFFFD' replacement rune. Encoding from UTF-8 to +// the replacement encoding yields the same as the source bytes except that +// invalid UTF-8 is converted to '\uFFFD'. +// +// It is defined at http://encoding.spec.whatwg.org/#replacement +var Replacement Encoding = replacement{} + +type replacement struct{} + +func (replacement) NewDecoder() *Decoder { + return &Decoder{Transformer: replacementDecoder{}} +} + +func (replacement) NewEncoder() *Encoder { + return &Encoder{Transformer: replacementEncoder{}} +} + +func (replacement) ID() (mib identifier.MIB, other string) { + return identifier.Replacement, "" +} + +type replacementDecoder struct{ transform.NopResetter } + +func (replacementDecoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + if len(dst) < 3 { + return 0, 0, transform.ErrShortDst + } + if atEOF { + const fffd = "\ufffd" + dst[0] = fffd[0] + dst[1] = fffd[1] + dst[2] = fffd[2] + nDst = 3 + } + return nDst, len(src), nil +} + +type replacementEncoder struct{ transform.NopResetter } + +func (replacementEncoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + r, size := rune(0), 0 + + for ; nSrc < len(src); nSrc += size { + r = rune(src[nSrc]) + + // Decode a 1-byte rune. + if r < utf8.RuneSelf { + size = 1 + + } else { + // Decode a multi-byte rune. + r, size = utf8.DecodeRune(src[nSrc:]) + if size == 1 { + // All valid runes of size 1 (those below utf8.RuneSelf) were + // handled above. We have invalid UTF-8 or we haven't seen the + // full character yet. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + r = '\ufffd' + } + } + + if nDst+utf8.RuneLen(r) > len(dst) { + err = transform.ErrShortDst + break + } + nDst += utf8.EncodeRune(dst[nDst:], r) + } + return nDst, nSrc, err +} + +// HTMLEscapeUnsupported wraps encoders to replace source runes outside the +// repertoire of the destination encoding with HTML escape sequences. +// +// This wrapper exists to comply to URL and HTML forms requiring a +// non-terminating legacy encoder. The produced sequences may lead to data +// loss as they are indistinguishable from legitimate input. To avoid this +// issue, use UTF-8 encodings whenever possible. +func HTMLEscapeUnsupported(e *Encoder) *Encoder { + return &Encoder{Transformer: &errorHandler{e, errorToHTML}} +} + +// ReplaceUnsupported wraps encoders to replace source runes outside the +// repertoire of the destination encoding with an encoding-specific +// replacement. +// +// This wrapper is only provided for backwards compatibility and legacy +// handling. Its use is strongly discouraged. Use UTF-8 whenever possible. +func ReplaceUnsupported(e *Encoder) *Encoder { + return &Encoder{Transformer: &errorHandler{e, errorToReplacement}} +} + +type errorHandler struct { + *Encoder + handler func(dst []byte, r rune, err repertoireError) (n int, ok bool) +} + +// TODO: consider making this error public in some form. +type repertoireError interface { + Replacement() byte +} + +func (h errorHandler) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + nDst, nSrc, err = h.Transformer.Transform(dst, src, atEOF) + for err != nil { + rerr, ok := err.(repertoireError) + if !ok { + return nDst, nSrc, err + } + r, sz := utf8.DecodeRune(src[nSrc:]) + n, ok := h.handler(dst[nDst:], r, rerr) + if !ok { + return nDst, nSrc, transform.ErrShortDst + } + err = nil + nDst += n + if nSrc += sz; nSrc < len(src) { + var dn, sn int + dn, sn, err = h.Transformer.Transform(dst[nDst:], src[nSrc:], atEOF) + nDst += dn + nSrc += sn + } + } + return nDst, nSrc, err +} + +func errorToHTML(dst []byte, r rune, err repertoireError) (n int, ok bool) { + buf := [8]byte{} + b := strconv.AppendUint(buf[:0], uint64(r), 10) + if n = len(b) + len("&#;"); n >= len(dst) { + return 0, false + } + dst[0] = '&' + dst[1] = '#' + dst[copy(dst[2:], b)+2] = ';' + return n, true +} + +func errorToReplacement(dst []byte, r rune, err repertoireError) (n int, ok bool) { + if len(dst) == 0 { + return 0, false + } + dst[0] = err.Replacement() + return 1, true +} + +// ErrInvalidUTF8 means that a transformer encountered invalid UTF-8. +var ErrInvalidUTF8 = errors.New("encoding: invalid UTF-8") + +// UTF8Validator is a transformer that returns ErrInvalidUTF8 on the first +// input byte that is not valid UTF-8. +var UTF8Validator transform.Transformer = utf8Validator{} + +type utf8Validator struct{ transform.NopResetter } + +func (utf8Validator) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + n := len(src) + if n > len(dst) { + n = len(dst) + } + for i := 0; i < n; { + if c := src[i]; c < utf8.RuneSelf { + dst[i] = c + i++ + continue + } + _, size := utf8.DecodeRune(src[i:]) + if size == 1 { + // All valid runes of size 1 (those below utf8.RuneSelf) were + // handled above. We have invalid UTF-8 or we haven't seen the + // full character yet. + err = ErrInvalidUTF8 + if !atEOF && !utf8.FullRune(src[i:]) { + err = transform.ErrShortSrc + } + return i, i, err + } + if i+size > len(dst) { + return i, i, transform.ErrShortDst + } + for ; size > 0; size-- { + dst[i] = src[i] + i++ + } + } + if len(src) > len(dst) { + err = transform.ErrShortDst + } + return n, n, err +} diff --git a/vendor/golang.org/x/text/encoding/internal/identifier/identifier.go b/vendor/golang.org/x/text/encoding/internal/identifier/identifier.go new file mode 100644 index 000000000..7351b4ef8 --- /dev/null +++ b/vendor/golang.org/x/text/encoding/internal/identifier/identifier.go @@ -0,0 +1,81 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +//go:generate go run gen.go + +// Package identifier defines the contract between implementations of Encoding +// and Index by defining identifiers that uniquely identify standardized coded +// character sets (CCS) and character encoding schemes (CES), which we will +// together refer to as encodings, for which Encoding implementations provide +// converters to and from UTF-8. This package is typically only of concern to +// implementers of Indexes and Encodings. +// +// One part of the identifier is the MIB code, which is defined by IANA and +// uniquely identifies a CCS or CES. Each code is associated with data that +// references authorities, official documentation as well as aliases and MIME +// names. +// +// Not all CESs are covered by the IANA registry. The "other" string that is +// returned by ID can be used to identify other character sets or versions of +// existing ones. +// +// It is recommended that each package that provides a set of Encodings provide +// the All and Common variables to reference all supported encodings and +// commonly used subset. This allows Index implementations to include all +// available encodings without explicitly referencing or knowing about them. +package identifier + +// Note: this package is internal, but could be made public if there is a need +// for writing third-party Indexes and Encodings. + +// References: +// - http://source.icu-project.org/repos/icu/icu/trunk/source/data/mappings/convrtrs.txt +// - http://www.iana.org/assignments/character-sets/character-sets.xhtml +// - http://www.iana.org/assignments/ianacharset-mib/ianacharset-mib +// - http://www.ietf.org/rfc/rfc2978.txt +// - http://www.unicode.org/reports/tr22/ +// - http://www.w3.org/TR/encoding/ +// - https://encoding.spec.whatwg.org/ +// - https://encoding.spec.whatwg.org/encodings.json +// - https://tools.ietf.org/html/rfc6657#section-5 + +// Interface can be implemented by Encodings to define the CCS or CES for which +// it implements conversions. +type Interface interface { + // ID returns an encoding identifier. Exactly one of the mib and other + // values should be non-zero. + // + // In the usual case it is only necessary to indicate the MIB code. The + // other string can be used to specify encodings for which there is no MIB, + // such as "x-mac-dingbat". + // + // The other string may only contain the characters a-z, A-Z, 0-9, - and _. + ID() (mib MIB, other string) + + // NOTE: the restrictions on the encoding are to allow extending the syntax + // with additional information such as versions, vendors and other variants. +} + +// A MIB identifies an encoding. It is derived from the IANA MIB codes and adds +// some identifiers for some encodings that are not covered by the IANA +// standard. +// +// See http://www.iana.org/assignments/ianacharset-mib. +type MIB uint16 + +// These additional MIB types are not defined in IANA. They are added because +// they are common and defined within the text repo. +const ( + // Unofficial marks the start of encodings not registered by IANA. + Unofficial MIB = 10000 + iota + + // Replacement is the WhatWG replacement encoding. + Replacement + + // XUserDefined is the code for x-user-defined. + XUserDefined + + // MacintoshCyrillic is the code for x-mac-cyrillic. + MacintoshCyrillic +) diff --git a/vendor/golang.org/x/text/encoding/internal/identifier/mib.go b/vendor/golang.org/x/text/encoding/internal/identifier/mib.go new file mode 100644 index 000000000..768842b0a --- /dev/null +++ b/vendor/golang.org/x/text/encoding/internal/identifier/mib.go @@ -0,0 +1,1621 @@ +// Code generated by running "go generate" in golang.org/x/text. DO NOT EDIT. + +package identifier + +const ( + // ASCII is the MIB identifier with IANA name US-ASCII (MIME: US-ASCII). + // + // ANSI X3.4-1986 + // Reference: RFC2046 + ASCII MIB = 3 + + // ISOLatin1 is the MIB identifier with IANA name ISO_8859-1:1987 (MIME: ISO-8859-1). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatin1 MIB = 4 + + // ISOLatin2 is the MIB identifier with IANA name ISO_8859-2:1987 (MIME: ISO-8859-2). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatin2 MIB = 5 + + // ISOLatin3 is the MIB identifier with IANA name ISO_8859-3:1988 (MIME: ISO-8859-3). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatin3 MIB = 6 + + // ISOLatin4 is the MIB identifier with IANA name ISO_8859-4:1988 (MIME: ISO-8859-4). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatin4 MIB = 7 + + // ISOLatinCyrillic is the MIB identifier with IANA name ISO_8859-5:1988 (MIME: ISO-8859-5). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatinCyrillic MIB = 8 + + // ISOLatinArabic is the MIB identifier with IANA name ISO_8859-6:1987 (MIME: ISO-8859-6). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatinArabic MIB = 9 + + // ISOLatinGreek is the MIB identifier with IANA name ISO_8859-7:1987 (MIME: ISO-8859-7). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1947 + // Reference: RFC1345 + ISOLatinGreek MIB = 10 + + // ISOLatinHebrew is the MIB identifier with IANA name ISO_8859-8:1988 (MIME: ISO-8859-8). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatinHebrew MIB = 11 + + // ISOLatin5 is the MIB identifier with IANA name ISO_8859-9:1989 (MIME: ISO-8859-9). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatin5 MIB = 12 + + // ISOLatin6 is the MIB identifier with IANA name ISO-8859-10 (MIME: ISO-8859-10). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOLatin6 MIB = 13 + + // ISOTextComm is the MIB identifier with IANA name ISO_6937-2-add. + // + // ISO-IR: International Register of Escape Sequences and ISO 6937-2:1983 + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISOTextComm MIB = 14 + + // HalfWidthKatakana is the MIB identifier with IANA name JIS_X0201. + // + // JIS X 0201-1976. One byte only, this is equivalent to + // JIS/Roman (similar to ASCII) plus eight-bit half-width + // Katakana + // Reference: RFC1345 + HalfWidthKatakana MIB = 15 + + // JISEncoding is the MIB identifier with IANA name JIS_Encoding. + // + // JIS X 0202-1991. Uses ISO 2022 escape sequences to + // shift code sets as documented in JIS X 0202-1991. + JISEncoding MIB = 16 + + // ShiftJIS is the MIB identifier with IANA name Shift_JIS (MIME: Shift_JIS). + // + // This charset is an extension of csHalfWidthKatakana by + // adding graphic characters in JIS X 0208. The CCS's are + // JIS X0201:1997 and JIS X0208:1997. The + // complete definition is shown in Appendix 1 of JIS + // X0208:1997. + // This charset can be used for the top-level media type "text". + ShiftJIS MIB = 17 + + // EUCPkdFmtJapanese is the MIB identifier with IANA name Extended_UNIX_Code_Packed_Format_for_Japanese (MIME: EUC-JP). + // + // Standardized by OSF, UNIX International, and UNIX Systems + // Laboratories Pacific. Uses ISO 2022 rules to select + // code set 0: US-ASCII (a single 7-bit byte set) + // code set 1: JIS X0208-1990 (a double 8-bit byte set) + // restricted to A0-FF in both bytes + // code set 2: Half Width Katakana (a single 7-bit byte set) + // requiring SS2 as the character prefix + // code set 3: JIS X0212-1990 (a double 7-bit byte set) + // restricted to A0-FF in both bytes + // requiring SS3 as the character prefix + EUCPkdFmtJapanese MIB = 18 + + // EUCFixWidJapanese is the MIB identifier with IANA name Extended_UNIX_Code_Fixed_Width_for_Japanese. + // + // Used in Japan. Each character is 2 octets. + // code set 0: US-ASCII (a single 7-bit byte set) + // 1st byte = 00 + // 2nd byte = 20-7E + // code set 1: JIS X0208-1990 (a double 7-bit byte set) + // restricted to A0-FF in both bytes + // code set 2: Half Width Katakana (a single 7-bit byte set) + // 1st byte = 00 + // 2nd byte = A0-FF + // code set 3: JIS X0212-1990 (a double 7-bit byte set) + // restricted to A0-FF in + // the first byte + // and 21-7E in the second byte + EUCFixWidJapanese MIB = 19 + + // ISO4UnitedKingdom is the MIB identifier with IANA name BS_4730. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO4UnitedKingdom MIB = 20 + + // ISO11SwedishForNames is the MIB identifier with IANA name SEN_850200_C. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO11SwedishForNames MIB = 21 + + // ISO15Italian is the MIB identifier with IANA name IT. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO15Italian MIB = 22 + + // ISO17Spanish is the MIB identifier with IANA name ES. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO17Spanish MIB = 23 + + // ISO21German is the MIB identifier with IANA name DIN_66003. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO21German MIB = 24 + + // ISO60Norwegian1 is the MIB identifier with IANA name NS_4551-1. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO60Norwegian1 MIB = 25 + + // ISO69French is the MIB identifier with IANA name NF_Z_62-010. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO69French MIB = 26 + + // ISO10646UTF1 is the MIB identifier with IANA name ISO-10646-UTF-1. + // + // Universal Transfer Format (1), this is the multibyte + // encoding, that subsets ASCII-7. It does not have byte + // ordering issues. + ISO10646UTF1 MIB = 27 + + // ISO646basic1983 is the MIB identifier with IANA name ISO_646.basic:1983. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO646basic1983 MIB = 28 + + // INVARIANT is the MIB identifier with IANA name INVARIANT. + // + // Reference: RFC1345 + INVARIANT MIB = 29 + + // ISO2IntlRefVersion is the MIB identifier with IANA name ISO_646.irv:1983. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO2IntlRefVersion MIB = 30 + + // NATSSEFI is the MIB identifier with IANA name NATS-SEFI. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + NATSSEFI MIB = 31 + + // NATSSEFIADD is the MIB identifier with IANA name NATS-SEFI-ADD. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + NATSSEFIADD MIB = 32 + + // NATSDANO is the MIB identifier with IANA name NATS-DANO. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + NATSDANO MIB = 33 + + // NATSDANOADD is the MIB identifier with IANA name NATS-DANO-ADD. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + NATSDANOADD MIB = 34 + + // ISO10Swedish is the MIB identifier with IANA name SEN_850200_B. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO10Swedish MIB = 35 + + // KSC56011987 is the MIB identifier with IANA name KS_C_5601-1987. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + KSC56011987 MIB = 36 + + // ISO2022KR is the MIB identifier with IANA name ISO-2022-KR (MIME: ISO-2022-KR). + // + // rfc1557 (see also KS_C_5601-1987) + // Reference: RFC1557 + ISO2022KR MIB = 37 + + // EUCKR is the MIB identifier with IANA name EUC-KR (MIME: EUC-KR). + // + // rfc1557 (see also KS_C_5861-1992) + // Reference: RFC1557 + EUCKR MIB = 38 + + // ISO2022JP is the MIB identifier with IANA name ISO-2022-JP (MIME: ISO-2022-JP). + // + // rfc1468 (see also rfc2237 ) + // Reference: RFC1468 + ISO2022JP MIB = 39 + + // ISO2022JP2 is the MIB identifier with IANA name ISO-2022-JP-2 (MIME: ISO-2022-JP-2). + // + // rfc1554 + // Reference: RFC1554 + ISO2022JP2 MIB = 40 + + // ISO13JISC6220jp is the MIB identifier with IANA name JIS_C6220-1969-jp. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO13JISC6220jp MIB = 41 + + // ISO14JISC6220ro is the MIB identifier with IANA name JIS_C6220-1969-ro. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO14JISC6220ro MIB = 42 + + // ISO16Portuguese is the MIB identifier with IANA name PT. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO16Portuguese MIB = 43 + + // ISO18Greek7Old is the MIB identifier with IANA name greek7-old. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO18Greek7Old MIB = 44 + + // ISO19LatinGreek is the MIB identifier with IANA name latin-greek. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO19LatinGreek MIB = 45 + + // ISO25French is the MIB identifier with IANA name NF_Z_62-010_(1973). + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO25French MIB = 46 + + // ISO27LatinGreek1 is the MIB identifier with IANA name Latin-greek-1. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO27LatinGreek1 MIB = 47 + + // ISO5427Cyrillic is the MIB identifier with IANA name ISO_5427. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO5427Cyrillic MIB = 48 + + // ISO42JISC62261978 is the MIB identifier with IANA name JIS_C6226-1978. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO42JISC62261978 MIB = 49 + + // ISO47BSViewdata is the MIB identifier with IANA name BS_viewdata. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO47BSViewdata MIB = 50 + + // ISO49INIS is the MIB identifier with IANA name INIS. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO49INIS MIB = 51 + + // ISO50INIS8 is the MIB identifier with IANA name INIS-8. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO50INIS8 MIB = 52 + + // ISO51INISCyrillic is the MIB identifier with IANA name INIS-cyrillic. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO51INISCyrillic MIB = 53 + + // ISO54271981 is the MIB identifier with IANA name ISO_5427:1981. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO54271981 MIB = 54 + + // ISO5428Greek is the MIB identifier with IANA name ISO_5428:1980. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO5428Greek MIB = 55 + + // ISO57GB1988 is the MIB identifier with IANA name GB_1988-80. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO57GB1988 MIB = 56 + + // ISO58GB231280 is the MIB identifier with IANA name GB_2312-80. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO58GB231280 MIB = 57 + + // ISO61Norwegian2 is the MIB identifier with IANA name NS_4551-2. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO61Norwegian2 MIB = 58 + + // ISO70VideotexSupp1 is the MIB identifier with IANA name videotex-suppl. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO70VideotexSupp1 MIB = 59 + + // ISO84Portuguese2 is the MIB identifier with IANA name PT2. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO84Portuguese2 MIB = 60 + + // ISO85Spanish2 is the MIB identifier with IANA name ES2. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO85Spanish2 MIB = 61 + + // ISO86Hungarian is the MIB identifier with IANA name MSZ_7795.3. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO86Hungarian MIB = 62 + + // ISO87JISX0208 is the MIB identifier with IANA name JIS_C6226-1983. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO87JISX0208 MIB = 63 + + // ISO88Greek7 is the MIB identifier with IANA name greek7. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO88Greek7 MIB = 64 + + // ISO89ASMO449 is the MIB identifier with IANA name ASMO_449. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO89ASMO449 MIB = 65 + + // ISO90 is the MIB identifier with IANA name iso-ir-90. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO90 MIB = 66 + + // ISO91JISC62291984a is the MIB identifier with IANA name JIS_C6229-1984-a. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO91JISC62291984a MIB = 67 + + // ISO92JISC62991984b is the MIB identifier with IANA name JIS_C6229-1984-b. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO92JISC62991984b MIB = 68 + + // ISO93JIS62291984badd is the MIB identifier with IANA name JIS_C6229-1984-b-add. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO93JIS62291984badd MIB = 69 + + // ISO94JIS62291984hand is the MIB identifier with IANA name JIS_C6229-1984-hand. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO94JIS62291984hand MIB = 70 + + // ISO95JIS62291984handadd is the MIB identifier with IANA name JIS_C6229-1984-hand-add. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO95JIS62291984handadd MIB = 71 + + // ISO96JISC62291984kana is the MIB identifier with IANA name JIS_C6229-1984-kana. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO96JISC62291984kana MIB = 72 + + // ISO2033 is the MIB identifier with IANA name ISO_2033-1983. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO2033 MIB = 73 + + // ISO99NAPLPS is the MIB identifier with IANA name ANSI_X3.110-1983. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO99NAPLPS MIB = 74 + + // ISO102T617bit is the MIB identifier with IANA name T.61-7bit. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO102T617bit MIB = 75 + + // ISO103T618bit is the MIB identifier with IANA name T.61-8bit. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO103T618bit MIB = 76 + + // ISO111ECMACyrillic is the MIB identifier with IANA name ECMA-cyrillic. + // + // ISO registry + // (formerly ECMA + // registry ) + ISO111ECMACyrillic MIB = 77 + + // ISO121Canadian1 is the MIB identifier with IANA name CSA_Z243.4-1985-1. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO121Canadian1 MIB = 78 + + // ISO122Canadian2 is the MIB identifier with IANA name CSA_Z243.4-1985-2. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO122Canadian2 MIB = 79 + + // ISO123CSAZ24341985gr is the MIB identifier with IANA name CSA_Z243.4-1985-gr. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO123CSAZ24341985gr MIB = 80 + + // ISO88596E is the MIB identifier with IANA name ISO_8859-6-E (MIME: ISO-8859-6-E). + // + // rfc1556 + // Reference: RFC1556 + ISO88596E MIB = 81 + + // ISO88596I is the MIB identifier with IANA name ISO_8859-6-I (MIME: ISO-8859-6-I). + // + // rfc1556 + // Reference: RFC1556 + ISO88596I MIB = 82 + + // ISO128T101G2 is the MIB identifier with IANA name T.101-G2. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO128T101G2 MIB = 83 + + // ISO88598E is the MIB identifier with IANA name ISO_8859-8-E (MIME: ISO-8859-8-E). + // + // rfc1556 + // Reference: RFC1556 + ISO88598E MIB = 84 + + // ISO88598I is the MIB identifier with IANA name ISO_8859-8-I (MIME: ISO-8859-8-I). + // + // rfc1556 + // Reference: RFC1556 + ISO88598I MIB = 85 + + // ISO139CSN369103 is the MIB identifier with IANA name CSN_369103. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO139CSN369103 MIB = 86 + + // ISO141JUSIB1002 is the MIB identifier with IANA name JUS_I.B1.002. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO141JUSIB1002 MIB = 87 + + // ISO143IECP271 is the MIB identifier with IANA name IEC_P27-1. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO143IECP271 MIB = 88 + + // ISO146Serbian is the MIB identifier with IANA name JUS_I.B1.003-serb. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO146Serbian MIB = 89 + + // ISO147Macedonian is the MIB identifier with IANA name JUS_I.B1.003-mac. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO147Macedonian MIB = 90 + + // ISO150GreekCCITT is the MIB identifier with IANA name greek-ccitt. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO150GreekCCITT MIB = 91 + + // ISO151Cuba is the MIB identifier with IANA name NC_NC00-10:81. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO151Cuba MIB = 92 + + // ISO6937Add is the MIB identifier with IANA name ISO_6937-2-25. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO6937Add MIB = 93 + + // ISO153GOST1976874 is the MIB identifier with IANA name GOST_19768-74. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO153GOST1976874 MIB = 94 + + // ISO8859Supp is the MIB identifier with IANA name ISO_8859-supp. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO8859Supp MIB = 95 + + // ISO10367Box is the MIB identifier with IANA name ISO_10367-box. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO10367Box MIB = 96 + + // ISO158Lap is the MIB identifier with IANA name latin-lap. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO158Lap MIB = 97 + + // ISO159JISX02121990 is the MIB identifier with IANA name JIS_X0212-1990. + // + // ISO-IR: International Register of Escape Sequences + // Note: The current registration authority is IPSJ/ITSCJ, Japan. + // Reference: RFC1345 + ISO159JISX02121990 MIB = 98 + + // ISO646Danish is the MIB identifier with IANA name DS_2089. + // + // Danish Standard, DS 2089, February 1974 + // Reference: RFC1345 + ISO646Danish MIB = 99 + + // USDK is the MIB identifier with IANA name us-dk. + // + // Reference: RFC1345 + USDK MIB = 100 + + // DKUS is the MIB identifier with IANA name dk-us. + // + // Reference: RFC1345 + DKUS MIB = 101 + + // KSC5636 is the MIB identifier with IANA name KSC5636. + // + // Reference: RFC1345 + KSC5636 MIB = 102 + + // Unicode11UTF7 is the MIB identifier with IANA name UNICODE-1-1-UTF-7. + // + // rfc1642 + // Reference: RFC1642 + Unicode11UTF7 MIB = 103 + + // ISO2022CN is the MIB identifier with IANA name ISO-2022-CN. + // + // rfc1922 + // Reference: RFC1922 + ISO2022CN MIB = 104 + + // ISO2022CNEXT is the MIB identifier with IANA name ISO-2022-CN-EXT. + // + // rfc1922 + // Reference: RFC1922 + ISO2022CNEXT MIB = 105 + + // UTF8 is the MIB identifier with IANA name UTF-8. + // + // rfc3629 + // Reference: RFC3629 + UTF8 MIB = 106 + + // ISO885913 is the MIB identifier with IANA name ISO-8859-13. + // + // ISO See http://www.iana.org/assignments/charset-reg/ISO-8859-13 http://www.iana.org/assignments/charset-reg/ISO-8859-13 + ISO885913 MIB = 109 + + // ISO885914 is the MIB identifier with IANA name ISO-8859-14. + // + // ISO See http://www.iana.org/assignments/charset-reg/ISO-8859-14 + ISO885914 MIB = 110 + + // ISO885915 is the MIB identifier with IANA name ISO-8859-15. + // + // ISO + // Please see: http://www.iana.org/assignments/charset-reg/ISO-8859-15 + ISO885915 MIB = 111 + + // ISO885916 is the MIB identifier with IANA name ISO-8859-16. + // + // ISO + ISO885916 MIB = 112 + + // GBK is the MIB identifier with IANA name GBK. + // + // Chinese IT Standardization Technical Committee + // Please see: http://www.iana.org/assignments/charset-reg/GBK + GBK MIB = 113 + + // GB18030 is the MIB identifier with IANA name GB18030. + // + // Chinese IT Standardization Technical Committee + // Please see: http://www.iana.org/assignments/charset-reg/GB18030 + GB18030 MIB = 114 + + // OSDEBCDICDF0415 is the MIB identifier with IANA name OSD_EBCDIC_DF04_15. + // + // Fujitsu-Siemens standard mainframe EBCDIC encoding + // Please see: http://www.iana.org/assignments/charset-reg/OSD-EBCDIC-DF04-15 + OSDEBCDICDF0415 MIB = 115 + + // OSDEBCDICDF03IRV is the MIB identifier with IANA name OSD_EBCDIC_DF03_IRV. + // + // Fujitsu-Siemens standard mainframe EBCDIC encoding + // Please see: http://www.iana.org/assignments/charset-reg/OSD-EBCDIC-DF03-IRV + OSDEBCDICDF03IRV MIB = 116 + + // OSDEBCDICDF041 is the MIB identifier with IANA name OSD_EBCDIC_DF04_1. + // + // Fujitsu-Siemens standard mainframe EBCDIC encoding + // Please see: http://www.iana.org/assignments/charset-reg/OSD-EBCDIC-DF04-1 + OSDEBCDICDF041 MIB = 117 + + // ISO115481 is the MIB identifier with IANA name ISO-11548-1. + // + // See http://www.iana.org/assignments/charset-reg/ISO-11548-1 + ISO115481 MIB = 118 + + // KZ1048 is the MIB identifier with IANA name KZ-1048. + // + // See http://www.iana.org/assignments/charset-reg/KZ-1048 + KZ1048 MIB = 119 + + // Unicode is the MIB identifier with IANA name ISO-10646-UCS-2. + // + // the 2-octet Basic Multilingual Plane, aka Unicode + // this needs to specify network byte order: the standard + // does not specify (it is a 16-bit integer space) + Unicode MIB = 1000 + + // UCS4 is the MIB identifier with IANA name ISO-10646-UCS-4. + // + // the full code space. (same comment about byte order, + // these are 31-bit numbers. + UCS4 MIB = 1001 + + // UnicodeASCII is the MIB identifier with IANA name ISO-10646-UCS-Basic. + // + // ASCII subset of Unicode. Basic Latin = collection 1 + // See ISO 10646, Appendix A + UnicodeASCII MIB = 1002 + + // UnicodeLatin1 is the MIB identifier with IANA name ISO-10646-Unicode-Latin1. + // + // ISO Latin-1 subset of Unicode. Basic Latin and Latin-1 + // Supplement = collections 1 and 2. See ISO 10646, + // Appendix A. See rfc1815 . + UnicodeLatin1 MIB = 1003 + + // UnicodeJapanese is the MIB identifier with IANA name ISO-10646-J-1. + // + // ISO 10646 Japanese, see rfc1815 . + UnicodeJapanese MIB = 1004 + + // UnicodeIBM1261 is the MIB identifier with IANA name ISO-Unicode-IBM-1261. + // + // IBM Latin-2, -3, -5, Extended Presentation Set, GCSGID: 1261 + UnicodeIBM1261 MIB = 1005 + + // UnicodeIBM1268 is the MIB identifier with IANA name ISO-Unicode-IBM-1268. + // + // IBM Latin-4 Extended Presentation Set, GCSGID: 1268 + UnicodeIBM1268 MIB = 1006 + + // UnicodeIBM1276 is the MIB identifier with IANA name ISO-Unicode-IBM-1276. + // + // IBM Cyrillic Greek Extended Presentation Set, GCSGID: 1276 + UnicodeIBM1276 MIB = 1007 + + // UnicodeIBM1264 is the MIB identifier with IANA name ISO-Unicode-IBM-1264. + // + // IBM Arabic Presentation Set, GCSGID: 1264 + UnicodeIBM1264 MIB = 1008 + + // UnicodeIBM1265 is the MIB identifier with IANA name ISO-Unicode-IBM-1265. + // + // IBM Hebrew Presentation Set, GCSGID: 1265 + UnicodeIBM1265 MIB = 1009 + + // Unicode11 is the MIB identifier with IANA name UNICODE-1-1. + // + // rfc1641 + // Reference: RFC1641 + Unicode11 MIB = 1010 + + // SCSU is the MIB identifier with IANA name SCSU. + // + // SCSU See http://www.iana.org/assignments/charset-reg/SCSU + SCSU MIB = 1011 + + // UTF7 is the MIB identifier with IANA name UTF-7. + // + // rfc2152 + // Reference: RFC2152 + UTF7 MIB = 1012 + + // UTF16BE is the MIB identifier with IANA name UTF-16BE. + // + // rfc2781 + // Reference: RFC2781 + UTF16BE MIB = 1013 + + // UTF16LE is the MIB identifier with IANA name UTF-16LE. + // + // rfc2781 + // Reference: RFC2781 + UTF16LE MIB = 1014 + + // UTF16 is the MIB identifier with IANA name UTF-16. + // + // rfc2781 + // Reference: RFC2781 + UTF16 MIB = 1015 + + // CESU8 is the MIB identifier with IANA name CESU-8. + // + // http://www.unicode.org/unicode/reports/tr26 + CESU8 MIB = 1016 + + // UTF32 is the MIB identifier with IANA name UTF-32. + // + // http://www.unicode.org/unicode/reports/tr19/ + UTF32 MIB = 1017 + + // UTF32BE is the MIB identifier with IANA name UTF-32BE. + // + // http://www.unicode.org/unicode/reports/tr19/ + UTF32BE MIB = 1018 + + // UTF32LE is the MIB identifier with IANA name UTF-32LE. + // + // http://www.unicode.org/unicode/reports/tr19/ + UTF32LE MIB = 1019 + + // BOCU1 is the MIB identifier with IANA name BOCU-1. + // + // http://www.unicode.org/notes/tn6/ + BOCU1 MIB = 1020 + + // Windows30Latin1 is the MIB identifier with IANA name ISO-8859-1-Windows-3.0-Latin-1. + // + // Extended ISO 8859-1 Latin-1 for Windows 3.0. + // PCL Symbol Set id: 9U + Windows30Latin1 MIB = 2000 + + // Windows31Latin1 is the MIB identifier with IANA name ISO-8859-1-Windows-3.1-Latin-1. + // + // Extended ISO 8859-1 Latin-1 for Windows 3.1. + // PCL Symbol Set id: 19U + Windows31Latin1 MIB = 2001 + + // Windows31Latin2 is the MIB identifier with IANA name ISO-8859-2-Windows-Latin-2. + // + // Extended ISO 8859-2. Latin-2 for Windows 3.1. + // PCL Symbol Set id: 9E + Windows31Latin2 MIB = 2002 + + // Windows31Latin5 is the MIB identifier with IANA name ISO-8859-9-Windows-Latin-5. + // + // Extended ISO 8859-9. Latin-5 for Windows 3.1 + // PCL Symbol Set id: 5T + Windows31Latin5 MIB = 2003 + + // HPRoman8 is the MIB identifier with IANA name hp-roman8. + // + // LaserJet IIP Printer User's Manual, + // HP part no 33471-90901, Hewlet-Packard, June 1989. + // Reference: RFC1345 + HPRoman8 MIB = 2004 + + // AdobeStandardEncoding is the MIB identifier with IANA name Adobe-Standard-Encoding. + // + // PostScript Language Reference Manual + // PCL Symbol Set id: 10J + AdobeStandardEncoding MIB = 2005 + + // VenturaUS is the MIB identifier with IANA name Ventura-US. + // + // Ventura US. ASCII plus characters typically used in + // publishing, like pilcrow, copyright, registered, trade mark, + // section, dagger, and double dagger in the range A0 (hex) + // to FF (hex). + // PCL Symbol Set id: 14J + VenturaUS MIB = 2006 + + // VenturaInternational is the MIB identifier with IANA name Ventura-International. + // + // Ventura International. ASCII plus coded characters similar + // to Roman8. + // PCL Symbol Set id: 13J + VenturaInternational MIB = 2007 + + // DECMCS is the MIB identifier with IANA name DEC-MCS. + // + // VAX/VMS User's Manual, + // Order Number: AI-Y517A-TE, April 1986. + // Reference: RFC1345 + DECMCS MIB = 2008 + + // PC850Multilingual is the MIB identifier with IANA name IBM850. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + PC850Multilingual MIB = 2009 + + // PC8DanishNorwegian is the MIB identifier with IANA name PC8-Danish-Norwegian. + // + // PC Danish Norwegian + // 8-bit PC set for Danish Norwegian + // PCL Symbol Set id: 11U + PC8DanishNorwegian MIB = 2012 + + // PC862LatinHebrew is the MIB identifier with IANA name IBM862. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + PC862LatinHebrew MIB = 2013 + + // PC8Turkish is the MIB identifier with IANA name PC8-Turkish. + // + // PC Latin Turkish. PCL Symbol Set id: 9T + PC8Turkish MIB = 2014 + + // IBMSymbols is the MIB identifier with IANA name IBM-Symbols. + // + // Presentation Set, CPGID: 259 + IBMSymbols MIB = 2015 + + // IBMThai is the MIB identifier with IANA name IBM-Thai. + // + // Presentation Set, CPGID: 838 + IBMThai MIB = 2016 + + // HPLegal is the MIB identifier with IANA name HP-Legal. + // + // PCL 5 Comparison Guide, Hewlett-Packard, + // HP part number 5961-0510, October 1992 + // PCL Symbol Set id: 1U + HPLegal MIB = 2017 + + // HPPiFont is the MIB identifier with IANA name HP-Pi-font. + // + // PCL 5 Comparison Guide, Hewlett-Packard, + // HP part number 5961-0510, October 1992 + // PCL Symbol Set id: 15U + HPPiFont MIB = 2018 + + // HPMath8 is the MIB identifier with IANA name HP-Math8. + // + // PCL 5 Comparison Guide, Hewlett-Packard, + // HP part number 5961-0510, October 1992 + // PCL Symbol Set id: 8M + HPMath8 MIB = 2019 + + // HPPSMath is the MIB identifier with IANA name Adobe-Symbol-Encoding. + // + // PostScript Language Reference Manual + // PCL Symbol Set id: 5M + HPPSMath MIB = 2020 + + // HPDesktop is the MIB identifier with IANA name HP-DeskTop. + // + // PCL 5 Comparison Guide, Hewlett-Packard, + // HP part number 5961-0510, October 1992 + // PCL Symbol Set id: 7J + HPDesktop MIB = 2021 + + // VenturaMath is the MIB identifier with IANA name Ventura-Math. + // + // PCL 5 Comparison Guide, Hewlett-Packard, + // HP part number 5961-0510, October 1992 + // PCL Symbol Set id: 6M + VenturaMath MIB = 2022 + + // MicrosoftPublishing is the MIB identifier with IANA name Microsoft-Publishing. + // + // PCL 5 Comparison Guide, Hewlett-Packard, + // HP part number 5961-0510, October 1992 + // PCL Symbol Set id: 6J + MicrosoftPublishing MIB = 2023 + + // Windows31J is the MIB identifier with IANA name Windows-31J. + // + // Windows Japanese. A further extension of Shift_JIS + // to include NEC special characters (Row 13), NEC + // selection of IBM extensions (Rows 89 to 92), and IBM + // extensions (Rows 115 to 119). The CCS's are + // JIS X0201:1997, JIS X0208:1997, and these extensions. + // This charset can be used for the top-level media type "text", + // but it is of limited or specialized use (see rfc2278 ). + // PCL Symbol Set id: 19K + Windows31J MIB = 2024 + + // GB2312 is the MIB identifier with IANA name GB2312 (MIME: GB2312). + // + // Chinese for People's Republic of China (PRC) mixed one byte, + // two byte set: + // 20-7E = one byte ASCII + // A1-FE = two byte PRC Kanji + // See GB 2312-80 + // PCL Symbol Set Id: 18C + GB2312 MIB = 2025 + + // Big5 is the MIB identifier with IANA name Big5 (MIME: Big5). + // + // Chinese for Taiwan Multi-byte set. + // PCL Symbol Set Id: 18T + Big5 MIB = 2026 + + // Macintosh is the MIB identifier with IANA name macintosh. + // + // The Unicode Standard ver1.0, ISBN 0-201-56788-1, Oct 1991 + // Reference: RFC1345 + Macintosh MIB = 2027 + + // IBM037 is the MIB identifier with IANA name IBM037. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM037 MIB = 2028 + + // IBM038 is the MIB identifier with IANA name IBM038. + // + // IBM 3174 Character Set Ref, GA27-3831-02, March 1990 + // Reference: RFC1345 + IBM038 MIB = 2029 + + // IBM273 is the MIB identifier with IANA name IBM273. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM273 MIB = 2030 + + // IBM274 is the MIB identifier with IANA name IBM274. + // + // IBM 3174 Character Set Ref, GA27-3831-02, March 1990 + // Reference: RFC1345 + IBM274 MIB = 2031 + + // IBM275 is the MIB identifier with IANA name IBM275. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM275 MIB = 2032 + + // IBM277 is the MIB identifier with IANA name IBM277. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM277 MIB = 2033 + + // IBM278 is the MIB identifier with IANA name IBM278. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM278 MIB = 2034 + + // IBM280 is the MIB identifier with IANA name IBM280. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM280 MIB = 2035 + + // IBM281 is the MIB identifier with IANA name IBM281. + // + // IBM 3174 Character Set Ref, GA27-3831-02, March 1990 + // Reference: RFC1345 + IBM281 MIB = 2036 + + // IBM284 is the MIB identifier with IANA name IBM284. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM284 MIB = 2037 + + // IBM285 is the MIB identifier with IANA name IBM285. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM285 MIB = 2038 + + // IBM290 is the MIB identifier with IANA name IBM290. + // + // IBM 3174 Character Set Ref, GA27-3831-02, March 1990 + // Reference: RFC1345 + IBM290 MIB = 2039 + + // IBM297 is the MIB identifier with IANA name IBM297. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM297 MIB = 2040 + + // IBM420 is the MIB identifier with IANA name IBM420. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990, + // IBM NLS RM p 11-11 + // Reference: RFC1345 + IBM420 MIB = 2041 + + // IBM423 is the MIB identifier with IANA name IBM423. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM423 MIB = 2042 + + // IBM424 is the MIB identifier with IANA name IBM424. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM424 MIB = 2043 + + // PC8CodePage437 is the MIB identifier with IANA name IBM437. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + PC8CodePage437 MIB = 2011 + + // IBM500 is the MIB identifier with IANA name IBM500. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM500 MIB = 2044 + + // IBM851 is the MIB identifier with IANA name IBM851. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM851 MIB = 2045 + + // PCp852 is the MIB identifier with IANA name IBM852. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + PCp852 MIB = 2010 + + // IBM855 is the MIB identifier with IANA name IBM855. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM855 MIB = 2046 + + // IBM857 is the MIB identifier with IANA name IBM857. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM857 MIB = 2047 + + // IBM860 is the MIB identifier with IANA name IBM860. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM860 MIB = 2048 + + // IBM861 is the MIB identifier with IANA name IBM861. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM861 MIB = 2049 + + // IBM863 is the MIB identifier with IANA name IBM863. + // + // IBM Keyboard layouts and code pages, PN 07G4586 June 1991 + // Reference: RFC1345 + IBM863 MIB = 2050 + + // IBM864 is the MIB identifier with IANA name IBM864. + // + // IBM Keyboard layouts and code pages, PN 07G4586 June 1991 + // Reference: RFC1345 + IBM864 MIB = 2051 + + // IBM865 is the MIB identifier with IANA name IBM865. + // + // IBM DOS 3.3 Ref (Abridged), 94X9575 (Feb 1987) + // Reference: RFC1345 + IBM865 MIB = 2052 + + // IBM868 is the MIB identifier with IANA name IBM868. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM868 MIB = 2053 + + // IBM869 is the MIB identifier with IANA name IBM869. + // + // IBM Keyboard layouts and code pages, PN 07G4586 June 1991 + // Reference: RFC1345 + IBM869 MIB = 2054 + + // IBM870 is the MIB identifier with IANA name IBM870. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM870 MIB = 2055 + + // IBM871 is the MIB identifier with IANA name IBM871. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM871 MIB = 2056 + + // IBM880 is the MIB identifier with IANA name IBM880. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM880 MIB = 2057 + + // IBM891 is the MIB identifier with IANA name IBM891. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM891 MIB = 2058 + + // IBM903 is the MIB identifier with IANA name IBM903. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM903 MIB = 2059 + + // IBBM904 is the MIB identifier with IANA name IBM904. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBBM904 MIB = 2060 + + // IBM905 is the MIB identifier with IANA name IBM905. + // + // IBM 3174 Character Set Ref, GA27-3831-02, March 1990 + // Reference: RFC1345 + IBM905 MIB = 2061 + + // IBM918 is the MIB identifier with IANA name IBM918. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM918 MIB = 2062 + + // IBM1026 is the MIB identifier with IANA name IBM1026. + // + // IBM NLS RM Vol2 SE09-8002-01, March 1990 + // Reference: RFC1345 + IBM1026 MIB = 2063 + + // IBMEBCDICATDE is the MIB identifier with IANA name EBCDIC-AT-DE. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + IBMEBCDICATDE MIB = 2064 + + // EBCDICATDEA is the MIB identifier with IANA name EBCDIC-AT-DE-A. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICATDEA MIB = 2065 + + // EBCDICCAFR is the MIB identifier with IANA name EBCDIC-CA-FR. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICCAFR MIB = 2066 + + // EBCDICDKNO is the MIB identifier with IANA name EBCDIC-DK-NO. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICDKNO MIB = 2067 + + // EBCDICDKNOA is the MIB identifier with IANA name EBCDIC-DK-NO-A. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICDKNOA MIB = 2068 + + // EBCDICFISE is the MIB identifier with IANA name EBCDIC-FI-SE. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICFISE MIB = 2069 + + // EBCDICFISEA is the MIB identifier with IANA name EBCDIC-FI-SE-A. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICFISEA MIB = 2070 + + // EBCDICFR is the MIB identifier with IANA name EBCDIC-FR. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICFR MIB = 2071 + + // EBCDICIT is the MIB identifier with IANA name EBCDIC-IT. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICIT MIB = 2072 + + // EBCDICPT is the MIB identifier with IANA name EBCDIC-PT. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICPT MIB = 2073 + + // EBCDICES is the MIB identifier with IANA name EBCDIC-ES. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICES MIB = 2074 + + // EBCDICESA is the MIB identifier with IANA name EBCDIC-ES-A. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICESA MIB = 2075 + + // EBCDICESS is the MIB identifier with IANA name EBCDIC-ES-S. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICESS MIB = 2076 + + // EBCDICUK is the MIB identifier with IANA name EBCDIC-UK. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICUK MIB = 2077 + + // EBCDICUS is the MIB identifier with IANA name EBCDIC-US. + // + // IBM 3270 Char Set Ref Ch 10, GA27-2837-9, April 1987 + // Reference: RFC1345 + EBCDICUS MIB = 2078 + + // Unknown8BiT is the MIB identifier with IANA name UNKNOWN-8BIT. + // + // Reference: RFC1428 + Unknown8BiT MIB = 2079 + + // Mnemonic is the MIB identifier with IANA name MNEMONIC. + // + // rfc1345 , also known as "mnemonic+ascii+38" + // Reference: RFC1345 + Mnemonic MIB = 2080 + + // Mnem is the MIB identifier with IANA name MNEM. + // + // rfc1345 , also known as "mnemonic+ascii+8200" + // Reference: RFC1345 + Mnem MIB = 2081 + + // VISCII is the MIB identifier with IANA name VISCII. + // + // rfc1456 + // Reference: RFC1456 + VISCII MIB = 2082 + + // VIQR is the MIB identifier with IANA name VIQR. + // + // rfc1456 + // Reference: RFC1456 + VIQR MIB = 2083 + + // KOI8R is the MIB identifier with IANA name KOI8-R (MIME: KOI8-R). + // + // rfc1489 , based on GOST-19768-74, ISO-6937/8, + // INIS-Cyrillic, ISO-5427. + // Reference: RFC1489 + KOI8R MIB = 2084 + + // HZGB2312 is the MIB identifier with IANA name HZ-GB-2312. + // + // rfc1842 , rfc1843 rfc1843 rfc1842 + HZGB2312 MIB = 2085 + + // IBM866 is the MIB identifier with IANA name IBM866. + // + // IBM NLDG Volume 2 (SE09-8002-03) August 1994 + IBM866 MIB = 2086 + + // PC775Baltic is the MIB identifier with IANA name IBM775. + // + // HP PCL 5 Comparison Guide (P/N 5021-0329) pp B-13, 1996 + PC775Baltic MIB = 2087 + + // KOI8U is the MIB identifier with IANA name KOI8-U. + // + // rfc2319 + // Reference: RFC2319 + KOI8U MIB = 2088 + + // IBM00858 is the MIB identifier with IANA name IBM00858. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM00858 + IBM00858 MIB = 2089 + + // IBM00924 is the MIB identifier with IANA name IBM00924. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM00924 + IBM00924 MIB = 2090 + + // IBM01140 is the MIB identifier with IANA name IBM01140. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01140 + IBM01140 MIB = 2091 + + // IBM01141 is the MIB identifier with IANA name IBM01141. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01141 + IBM01141 MIB = 2092 + + // IBM01142 is the MIB identifier with IANA name IBM01142. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01142 + IBM01142 MIB = 2093 + + // IBM01143 is the MIB identifier with IANA name IBM01143. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01143 + IBM01143 MIB = 2094 + + // IBM01144 is the MIB identifier with IANA name IBM01144. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01144 + IBM01144 MIB = 2095 + + // IBM01145 is the MIB identifier with IANA name IBM01145. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01145 + IBM01145 MIB = 2096 + + // IBM01146 is the MIB identifier with IANA name IBM01146. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01146 + IBM01146 MIB = 2097 + + // IBM01147 is the MIB identifier with IANA name IBM01147. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01147 + IBM01147 MIB = 2098 + + // IBM01148 is the MIB identifier with IANA name IBM01148. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01148 + IBM01148 MIB = 2099 + + // IBM01149 is the MIB identifier with IANA name IBM01149. + // + // IBM See http://www.iana.org/assignments/charset-reg/IBM01149 + IBM01149 MIB = 2100 + + // Big5HKSCS is the MIB identifier with IANA name Big5-HKSCS. + // + // See http://www.iana.org/assignments/charset-reg/Big5-HKSCS + Big5HKSCS MIB = 2101 + + // IBM1047 is the MIB identifier with IANA name IBM1047. + // + // IBM1047 (EBCDIC Latin 1/Open Systems) http://www-1.ibm.com/servers/eserver/iseries/software/globalization/pdf/cp01047z.pdf + IBM1047 MIB = 2102 + + // PTCP154 is the MIB identifier with IANA name PTCP154. + // + // See http://www.iana.org/assignments/charset-reg/PTCP154 + PTCP154 MIB = 2103 + + // Amiga1251 is the MIB identifier with IANA name Amiga-1251. + // + // See http://www.amiga.ultranet.ru/Amiga-1251.html + Amiga1251 MIB = 2104 + + // KOI7switched is the MIB identifier with IANA name KOI7-switched. + // + // See http://www.iana.org/assignments/charset-reg/KOI7-switched + KOI7switched MIB = 2105 + + // BRF is the MIB identifier with IANA name BRF. + // + // See http://www.iana.org/assignments/charset-reg/BRF + BRF MIB = 2106 + + // TSCII is the MIB identifier with IANA name TSCII. + // + // See http://www.iana.org/assignments/charset-reg/TSCII + TSCII MIB = 2107 + + // CP51932 is the MIB identifier with IANA name CP51932. + // + // See http://www.iana.org/assignments/charset-reg/CP51932 + CP51932 MIB = 2108 + + // Windows874 is the MIB identifier with IANA name windows-874. + // + // See http://www.iana.org/assignments/charset-reg/windows-874 + Windows874 MIB = 2109 + + // Windows1250 is the MIB identifier with IANA name windows-1250. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1250 + Windows1250 MIB = 2250 + + // Windows1251 is the MIB identifier with IANA name windows-1251. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1251 + Windows1251 MIB = 2251 + + // Windows1252 is the MIB identifier with IANA name windows-1252. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1252 + Windows1252 MIB = 2252 + + // Windows1253 is the MIB identifier with IANA name windows-1253. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1253 + Windows1253 MIB = 2253 + + // Windows1254 is the MIB identifier with IANA name windows-1254. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1254 + Windows1254 MIB = 2254 + + // Windows1255 is the MIB identifier with IANA name windows-1255. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1255 + Windows1255 MIB = 2255 + + // Windows1256 is the MIB identifier with IANA name windows-1256. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1256 + Windows1256 MIB = 2256 + + // Windows1257 is the MIB identifier with IANA name windows-1257. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1257 + Windows1257 MIB = 2257 + + // Windows1258 is the MIB identifier with IANA name windows-1258. + // + // Microsoft http://www.iana.org/assignments/charset-reg/windows-1258 + Windows1258 MIB = 2258 + + // TIS620 is the MIB identifier with IANA name TIS-620. + // + // Thai Industrial Standards Institute (TISI) + TIS620 MIB = 2259 + + // CP50220 is the MIB identifier with IANA name CP50220. + // + // See http://www.iana.org/assignments/charset-reg/CP50220 + CP50220 MIB = 2260 +) diff --git a/vendor/golang.org/x/text/encoding/internal/internal.go b/vendor/golang.org/x/text/encoding/internal/internal.go new file mode 100644 index 000000000..75a5fd165 --- /dev/null +++ b/vendor/golang.org/x/text/encoding/internal/internal.go @@ -0,0 +1,75 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package internal contains code that is shared among encoding implementations. +package internal + +import ( + "golang.org/x/text/encoding" + "golang.org/x/text/encoding/internal/identifier" + "golang.org/x/text/transform" +) + +// Encoding is an implementation of the Encoding interface that adds the String +// and ID methods to an existing encoding. +type Encoding struct { + encoding.Encoding + Name string + MIB identifier.MIB +} + +// _ verifies that Encoding implements identifier.Interface. +var _ identifier.Interface = (*Encoding)(nil) + +func (e *Encoding) String() string { + return e.Name +} + +func (e *Encoding) ID() (mib identifier.MIB, other string) { + return e.MIB, "" +} + +// SimpleEncoding is an Encoding that combines two Transformers. +type SimpleEncoding struct { + Decoder transform.Transformer + Encoder transform.Transformer +} + +func (e *SimpleEncoding) NewDecoder() *encoding.Decoder { + return &encoding.Decoder{Transformer: e.Decoder} +} + +func (e *SimpleEncoding) NewEncoder() *encoding.Encoder { + return &encoding.Encoder{Transformer: e.Encoder} +} + +// FuncEncoding is an Encoding that combines two functions returning a new +// Transformer. +type FuncEncoding struct { + Decoder func() transform.Transformer + Encoder func() transform.Transformer +} + +func (e FuncEncoding) NewDecoder() *encoding.Decoder { + return &encoding.Decoder{Transformer: e.Decoder()} +} + +func (e FuncEncoding) NewEncoder() *encoding.Encoder { + return &encoding.Encoder{Transformer: e.Encoder()} +} + +// A RepertoireError indicates a rune is not in the repertoire of a destination +// encoding. It is associated with an encoding-specific suggested replacement +// byte. +type RepertoireError byte + +// Error implements the error interrface. +func (r RepertoireError) Error() string { + return "encoding: rune not supported by encoding." +} + +// Replacement returns the replacement string associated with this error. +func (r RepertoireError) Replacement() byte { return byte(r) } + +var ErrASCIIReplacement = RepertoireError(encoding.ASCIISub) diff --git a/vendor/golang.org/x/text/encoding/unicode/override.go b/vendor/golang.org/x/text/encoding/unicode/override.go new file mode 100644 index 000000000..35d62fcc9 --- /dev/null +++ b/vendor/golang.org/x/text/encoding/unicode/override.go @@ -0,0 +1,82 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package unicode + +import ( + "golang.org/x/text/transform" +) + +// BOMOverride returns a new decoder transformer that is identical to fallback, +// except that the presence of a Byte Order Mark at the start of the input +// causes it to switch to the corresponding Unicode decoding. It will only +// consider BOMs for UTF-8, UTF-16BE, and UTF-16LE. +// +// This differs from using ExpectBOM by allowing a BOM to switch to UTF-8, not +// just UTF-16 variants, and allowing falling back to any encoding scheme. +// +// This technique is recommended by the W3C for use in HTML 5: "For +// compatibility with deployed content, the byte order mark (also known as BOM) +// is considered more authoritative than anything else." +// http://www.w3.org/TR/encoding/#specification-hooks +// +// Using BOMOverride is mostly intended for use cases where the first characters +// of a fallback encoding are known to not be a BOM, for example, for valid HTML +// and most encodings. +func BOMOverride(fallback transform.Transformer) transform.Transformer { + // TODO: possibly allow a variadic argument of unicode encodings to allow + // specifying details of which fallbacks are supported as well as + // specifying the details of the implementations. This would also allow for + // support for UTF-32, which should not be supported by default. + return &bomOverride{fallback: fallback} +} + +type bomOverride struct { + fallback transform.Transformer + current transform.Transformer +} + +func (d *bomOverride) Reset() { + d.current = nil + d.fallback.Reset() +} + +var ( + // TODO: we could use decode functions here, instead of allocating a new + // decoder on every NewDecoder as IgnoreBOM decoders can be stateless. + utf16le = UTF16(LittleEndian, IgnoreBOM) + utf16be = UTF16(BigEndian, IgnoreBOM) +) + +const utf8BOM = "\ufeff" + +func (d *bomOverride) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + if d.current != nil { + return d.current.Transform(dst, src, atEOF) + } + if len(src) < 3 && !atEOF { + return 0, 0, transform.ErrShortSrc + } + d.current = d.fallback + bomSize := 0 + if len(src) >= 2 { + if src[0] == 0xFF && src[1] == 0xFE { + d.current = utf16le.NewDecoder() + bomSize = 2 + } else if src[0] == 0xFE && src[1] == 0xFF { + d.current = utf16be.NewDecoder() + bomSize = 2 + } else if len(src) >= 3 && + src[0] == utf8BOM[0] && + src[1] == utf8BOM[1] && + src[2] == utf8BOM[2] { + d.current = transform.Nop + bomSize = 3 + } + } + if bomSize < len(src) { + nDst, nSrc, err = d.current.Transform(dst, src[bomSize:], atEOF) + } + return nDst, nSrc + bomSize, err +} diff --git a/vendor/golang.org/x/text/encoding/unicode/unicode.go b/vendor/golang.org/x/text/encoding/unicode/unicode.go new file mode 100644 index 000000000..579cadfb1 --- /dev/null +++ b/vendor/golang.org/x/text/encoding/unicode/unicode.go @@ -0,0 +1,434 @@ +// Copyright 2013 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package unicode provides Unicode encodings such as UTF-16. +package unicode // import "golang.org/x/text/encoding/unicode" + +import ( + "errors" + "unicode/utf16" + "unicode/utf8" + + "golang.org/x/text/encoding" + "golang.org/x/text/encoding/internal" + "golang.org/x/text/encoding/internal/identifier" + "golang.org/x/text/internal/utf8internal" + "golang.org/x/text/runes" + "golang.org/x/text/transform" +) + +// TODO: I think the Transformers really should return errors on unmatched +// surrogate pairs and odd numbers of bytes. This is not required by RFC 2781, +// which leaves it open, but is suggested by WhatWG. It will allow for all error +// modes as defined by WhatWG: fatal, HTML and Replacement. This would require +// the introduction of some kind of error type for conveying the erroneous code +// point. + +// UTF8 is the UTF-8 encoding. +var UTF8 encoding.Encoding = utf8enc + +var utf8enc = &internal.Encoding{ + &internal.SimpleEncoding{utf8Decoder{}, runes.ReplaceIllFormed()}, + "UTF-8", + identifier.UTF8, +} + +type utf8Decoder struct{ transform.NopResetter } + +func (utf8Decoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + var pSrc int // point from which to start copy in src + var accept utf8internal.AcceptRange + + // The decoder can only make the input larger, not smaller. + n := len(src) + if len(dst) < n { + err = transform.ErrShortDst + n = len(dst) + atEOF = false + } + for nSrc < n { + c := src[nSrc] + if c < utf8.RuneSelf { + nSrc++ + continue + } + first := utf8internal.First[c] + size := int(first & utf8internal.SizeMask) + if first == utf8internal.FirstInvalid { + goto handleInvalid // invalid starter byte + } + accept = utf8internal.AcceptRanges[first>>utf8internal.AcceptShift] + if nSrc+size > n { + if !atEOF { + // We may stop earlier than necessary here if the short sequence + // has invalid bytes. Not checking for this simplifies the code + // and may avoid duplicate computations in certain conditions. + if err == nil { + err = transform.ErrShortSrc + } + break + } + // Determine the maximal subpart of an ill-formed subsequence. + switch { + case nSrc+1 >= n || src[nSrc+1] < accept.Lo || accept.Hi < src[nSrc+1]: + size = 1 + case nSrc+2 >= n || src[nSrc+2] < utf8internal.LoCB || utf8internal.HiCB < src[nSrc+2]: + size = 2 + default: + size = 3 // As we are short, the maximum is 3. + } + goto handleInvalid + } + if c = src[nSrc+1]; c < accept.Lo || accept.Hi < c { + size = 1 + goto handleInvalid // invalid continuation byte + } else if size == 2 { + } else if c = src[nSrc+2]; c < utf8internal.LoCB || utf8internal.HiCB < c { + size = 2 + goto handleInvalid // invalid continuation byte + } else if size == 3 { + } else if c = src[nSrc+3]; c < utf8internal.LoCB || utf8internal.HiCB < c { + size = 3 + goto handleInvalid // invalid continuation byte + } + nSrc += size + continue + + handleInvalid: + // Copy the scanned input so far. + nDst += copy(dst[nDst:], src[pSrc:nSrc]) + + // Append RuneError to the destination. + const runeError = "\ufffd" + if nDst+len(runeError) > len(dst) { + return nDst, nSrc, transform.ErrShortDst + } + nDst += copy(dst[nDst:], runeError) + + // Skip the maximal subpart of an ill-formed subsequence according to + // the W3C standard way instead of the Go way. This Transform is + // probably the only place in the text repo where it is warranted. + nSrc += size + pSrc = nSrc + + // Recompute the maximum source length. + if sz := len(dst) - nDst; sz < len(src)-nSrc { + err = transform.ErrShortDst + n = nSrc + sz + atEOF = false + } + } + return nDst + copy(dst[nDst:], src[pSrc:nSrc]), nSrc, err +} + +// UTF16 returns a UTF-16 Encoding for the given default endianness and byte +// order mark (BOM) policy. +// +// When decoding from UTF-16 to UTF-8, if the BOMPolicy is IgnoreBOM then +// neither BOMs U+FEFF nor noncharacters U+FFFE in the input stream will affect +// the endianness used for decoding, and will instead be output as their +// standard UTF-8 encodings: "\xef\xbb\xbf" and "\xef\xbf\xbe". If the BOMPolicy +// is UseBOM or ExpectBOM a staring BOM is not written to the UTF-8 output. +// Instead, it overrides the default endianness e for the remainder of the +// transformation. Any subsequent BOMs U+FEFF or noncharacters U+FFFE will not +// affect the endianness used, and will instead be output as their standard +// UTF-8 encodings. For UseBOM, if there is no starting BOM, it will proceed +// with the default Endianness. For ExpectBOM, in that case, the transformation +// will return early with an ErrMissingBOM error. +// +// When encoding from UTF-8 to UTF-16, a BOM will be inserted at the start of +// the output if the BOMPolicy is UseBOM or ExpectBOM. Otherwise, a BOM will not +// be inserted. The UTF-8 input does not need to contain a BOM. +// +// There is no concept of a 'native' endianness. If the UTF-16 data is produced +// and consumed in a greater context that implies a certain endianness, use +// IgnoreBOM. Otherwise, use ExpectBOM and always produce and consume a BOM. +// +// In the language of http://www.unicode.org/faq/utf_bom.html#bom10, IgnoreBOM +// corresponds to "Where the precise type of the data stream is known... the +// BOM should not be used" and ExpectBOM corresponds to "A particular +// protocol... may require use of the BOM". +func UTF16(e Endianness, b BOMPolicy) encoding.Encoding { + return utf16Encoding{config{e, b}, mibValue[e][b&bomMask]} +} + +// mibValue maps Endianness and BOMPolicy settings to MIB constants. Note that +// some configurations map to the same MIB identifier. RFC 2781 has requirements +// and recommendations. Some of the "configurations" are merely recommendations, +// so multiple configurations could match. +var mibValue = map[Endianness][numBOMValues]identifier.MIB{ + BigEndian: [numBOMValues]identifier.MIB{ + IgnoreBOM: identifier.UTF16BE, + UseBOM: identifier.UTF16, // BigEnding default is preferred by RFC 2781. + // TODO: acceptBOM | strictBOM would map to UTF16BE as well. + }, + LittleEndian: [numBOMValues]identifier.MIB{ + IgnoreBOM: identifier.UTF16LE, + UseBOM: identifier.UTF16, // LittleEndian default is allowed and preferred on Windows. + // TODO: acceptBOM | strictBOM would map to UTF16LE as well. + }, + // ExpectBOM is not widely used and has no valid MIB identifier. +} + +// All lists a configuration for each IANA-defined UTF-16 variant. +var All = []encoding.Encoding{ + UTF8, + UTF16(BigEndian, UseBOM), + UTF16(BigEndian, IgnoreBOM), + UTF16(LittleEndian, IgnoreBOM), +} + +// BOMPolicy is a UTF-16 encoding's byte order mark policy. +type BOMPolicy uint8 + +const ( + writeBOM BOMPolicy = 0x01 + acceptBOM BOMPolicy = 0x02 + requireBOM BOMPolicy = 0x04 + bomMask BOMPolicy = 0x07 + + // HACK: numBOMValues == 8 triggers a bug in the 1.4 compiler (cannot have a + // map of an array of length 8 of a type that is also used as a key or value + // in another map). See golang.org/issue/11354. + // TODO: consider changing this value back to 8 if the use of 1.4.* has + // been minimized. + numBOMValues = 8 + 1 + + // IgnoreBOM means to ignore any byte order marks. + IgnoreBOM BOMPolicy = 0 + // Common and RFC 2781-compliant interpretation for UTF-16BE/LE. + + // UseBOM means that the UTF-16 form may start with a byte order mark, which + // will be used to override the default encoding. + UseBOM BOMPolicy = writeBOM | acceptBOM + // Common and RFC 2781-compliant interpretation for UTF-16. + + // ExpectBOM means that the UTF-16 form must start with a byte order mark, + // which will be used to override the default encoding. + ExpectBOM BOMPolicy = writeBOM | acceptBOM | requireBOM + // Used in Java as Unicode (not to be confused with Java's UTF-16) and + // ICU's UTF-16,version=1. Not compliant with RFC 2781. + + // TODO (maybe): strictBOM: BOM must match Endianness. This would allow: + // - UTF-16(B|L)E,version=1: writeBOM | acceptBOM | requireBOM | strictBOM + // (UnicodeBig and UnicodeLittle in Java) + // - RFC 2781-compliant, but less common interpretation for UTF-16(B|L)E: + // acceptBOM | strictBOM (e.g. assigned to CheckBOM). + // This addition would be consistent with supporting ExpectBOM. +) + +// Endianness is a UTF-16 encoding's default endianness. +type Endianness bool + +const ( + // BigEndian is UTF-16BE. + BigEndian Endianness = false + // LittleEndian is UTF-16LE. + LittleEndian Endianness = true +) + +// ErrMissingBOM means that decoding UTF-16 input with ExpectBOM did not find a +// starting byte order mark. +var ErrMissingBOM = errors.New("encoding: missing byte order mark") + +type utf16Encoding struct { + config + mib identifier.MIB +} + +type config struct { + endianness Endianness + bomPolicy BOMPolicy +} + +func (u utf16Encoding) NewDecoder() *encoding.Decoder { + return &encoding.Decoder{Transformer: &utf16Decoder{ + initial: u.config, + current: u.config, + }} +} + +func (u utf16Encoding) NewEncoder() *encoding.Encoder { + return &encoding.Encoder{Transformer: &utf16Encoder{ + endianness: u.endianness, + initialBOMPolicy: u.bomPolicy, + currentBOMPolicy: u.bomPolicy, + }} +} + +func (u utf16Encoding) ID() (mib identifier.MIB, other string) { + return u.mib, "" +} + +func (u utf16Encoding) String() string { + e, b := "B", "" + if u.endianness == LittleEndian { + e = "L" + } + switch u.bomPolicy { + case ExpectBOM: + b = "Expect" + case UseBOM: + b = "Use" + case IgnoreBOM: + b = "Ignore" + } + return "UTF-16" + e + "E (" + b + " BOM)" +} + +type utf16Decoder struct { + initial config + current config +} + +func (u *utf16Decoder) Reset() { + u.current = u.initial +} + +func (u *utf16Decoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + if len(src) == 0 { + if atEOF && u.current.bomPolicy&requireBOM != 0 { + return 0, 0, ErrMissingBOM + } + return 0, 0, nil + } + if u.current.bomPolicy&acceptBOM != 0 { + if len(src) < 2 { + return 0, 0, transform.ErrShortSrc + } + switch { + case src[0] == 0xfe && src[1] == 0xff: + u.current.endianness = BigEndian + nSrc = 2 + case src[0] == 0xff && src[1] == 0xfe: + u.current.endianness = LittleEndian + nSrc = 2 + default: + if u.current.bomPolicy&requireBOM != 0 { + return 0, 0, ErrMissingBOM + } + } + u.current.bomPolicy = IgnoreBOM + } + + var r rune + var dSize, sSize int + for nSrc < len(src) { + if nSrc+1 < len(src) { + x := uint16(src[nSrc+0])<<8 | uint16(src[nSrc+1]) + if u.current.endianness == LittleEndian { + x = x>>8 | x<<8 + } + r, sSize = rune(x), 2 + if utf16.IsSurrogate(r) { + if nSrc+3 < len(src) { + x = uint16(src[nSrc+2])<<8 | uint16(src[nSrc+3]) + if u.current.endianness == LittleEndian { + x = x>>8 | x<<8 + } + // Save for next iteration if it is not a high surrogate. + if isHighSurrogate(rune(x)) { + r, sSize = utf16.DecodeRune(r, rune(x)), 4 + } + } else if !atEOF { + err = transform.ErrShortSrc + break + } + } + if dSize = utf8.RuneLen(r); dSize < 0 { + r, dSize = utf8.RuneError, 3 + } + } else if atEOF { + // Single trailing byte. + r, dSize, sSize = utf8.RuneError, 3, 1 + } else { + err = transform.ErrShortSrc + break + } + if nDst+dSize > len(dst) { + err = transform.ErrShortDst + break + } + nDst += utf8.EncodeRune(dst[nDst:], r) + nSrc += sSize + } + return nDst, nSrc, err +} + +func isHighSurrogate(r rune) bool { + return 0xDC00 <= r && r <= 0xDFFF +} + +type utf16Encoder struct { + endianness Endianness + initialBOMPolicy BOMPolicy + currentBOMPolicy BOMPolicy +} + +func (u *utf16Encoder) Reset() { + u.currentBOMPolicy = u.initialBOMPolicy +} + +func (u *utf16Encoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + if u.currentBOMPolicy&writeBOM != 0 { + if len(dst) < 2 { + return 0, 0, transform.ErrShortDst + } + dst[0], dst[1] = 0xfe, 0xff + u.currentBOMPolicy = IgnoreBOM + nDst = 2 + } + + r, size := rune(0), 0 + for nSrc < len(src) { + r = rune(src[nSrc]) + + // Decode a 1-byte rune. + if r < utf8.RuneSelf { + size = 1 + + } else { + // Decode a multi-byte rune. + r, size = utf8.DecodeRune(src[nSrc:]) + if size == 1 { + // All valid runes of size 1 (those below utf8.RuneSelf) were + // handled above. We have invalid UTF-8 or we haven't seen the + // full character yet. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + } + } + + if r <= 0xffff { + if nDst+2 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = uint8(r >> 8) + dst[nDst+1] = uint8(r) + nDst += 2 + } else { + if nDst+4 > len(dst) { + err = transform.ErrShortDst + break + } + r1, r2 := utf16.EncodeRune(r) + dst[nDst+0] = uint8(r1 >> 8) + dst[nDst+1] = uint8(r1) + dst[nDst+2] = uint8(r2 >> 8) + dst[nDst+3] = uint8(r2) + nDst += 4 + } + nSrc += size + } + + if u.endianness == LittleEndian { + for i := 0; i < nDst; i += 2 { + dst[i], dst[i+1] = dst[i+1], dst[i] + } + } + return nDst, nSrc, err +} diff --git a/vendor/golang.org/x/text/internal/utf8internal/utf8internal.go b/vendor/golang.org/x/text/internal/utf8internal/utf8internal.go new file mode 100644 index 000000000..575cea870 --- /dev/null +++ b/vendor/golang.org/x/text/internal/utf8internal/utf8internal.go @@ -0,0 +1,87 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package utf8internal contains low-level utf8-related constants, tables, etc. +// that are used internally by the text package. +package utf8internal + +// The default lowest and highest continuation byte. +const ( + LoCB = 0x80 // 1000 0000 + HiCB = 0xBF // 1011 1111 +) + +// Constants related to getting information of first bytes of UTF-8 sequences. +const ( + // ASCII identifies a UTF-8 byte as ASCII. + ASCII = as + + // FirstInvalid indicates a byte is invalid as a first byte of a UTF-8 + // sequence. + FirstInvalid = xx + + // SizeMask is a mask for the size bits. Use use x&SizeMask to get the size. + SizeMask = 7 + + // AcceptShift is the right-shift count for the first byte info byte to get + // the index into the AcceptRanges table. See AcceptRanges. + AcceptShift = 4 + + // The names of these constants are chosen to give nice alignment in the + // table below. The first nibble is an index into acceptRanges or F for + // special one-byte cases. The second nibble is the Rune length or the + // Status for the special one-byte case. + xx = 0xF1 // invalid: size 1 + as = 0xF0 // ASCII: size 1 + s1 = 0x02 // accept 0, size 2 + s2 = 0x13 // accept 1, size 3 + s3 = 0x03 // accept 0, size 3 + s4 = 0x23 // accept 2, size 3 + s5 = 0x34 // accept 3, size 4 + s6 = 0x04 // accept 0, size 4 + s7 = 0x44 // accept 4, size 4 +) + +// First is information about the first byte in a UTF-8 sequence. +var First = [256]uint8{ + // 1 2 3 4 5 6 7 8 9 A B C D E F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x00-0x0F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x10-0x1F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x20-0x2F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x30-0x3F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x40-0x4F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x50-0x5F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x60-0x6F + as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x70-0x7F + // 1 2 3 4 5 6 7 8 9 A B C D E F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x80-0x8F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x90-0x9F + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xA0-0xAF + xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xB0-0xBF + xx, xx, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xC0-0xCF + s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xD0-0xDF + s2, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s4, s3, s3, // 0xE0-0xEF + s5, s6, s6, s6, s7, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xF0-0xFF +} + +// AcceptRange gives the range of valid values for the second byte in a UTF-8 +// sequence for any value for First that is not ASCII or FirstInvalid. +type AcceptRange struct { + Lo uint8 // lowest value for second byte. + Hi uint8 // highest value for second byte. +} + +// AcceptRanges is a slice of AcceptRange values. For a given byte sequence b +// +// AcceptRanges[First[b[0]]>>AcceptShift] +// +// will give the value of AcceptRange for the multi-byte UTF-8 sequence starting +// at b[0]. +var AcceptRanges = [...]AcceptRange{ + 0: {LoCB, HiCB}, + 1: {0xA0, HiCB}, + 2: {LoCB, 0x9F}, + 3: {0x90, HiCB}, + 4: {LoCB, 0x8F}, +} diff --git a/vendor/golang.org/x/text/runes/cond.go b/vendor/golang.org/x/text/runes/cond.go new file mode 100644 index 000000000..df7aa02db --- /dev/null +++ b/vendor/golang.org/x/text/runes/cond.go @@ -0,0 +1,187 @@ +// Copyright 2015 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +package runes + +import ( + "unicode/utf8" + + "golang.org/x/text/transform" +) + +// Note: below we pass invalid UTF-8 to the tIn and tNotIn transformers as is. +// This is done for various reasons: +// - To retain the semantics of the Nop transformer: if input is passed to a Nop +// one would expect it to be unchanged. +// - It would be very expensive to pass a converted RuneError to a transformer: +// a transformer might need more source bytes after RuneError, meaning that +// the only way to pass it safely is to create a new buffer and manage the +// intermingling of RuneErrors and normal input. +// - Many transformers leave ill-formed UTF-8 as is, so this is not +// inconsistent. Generally ill-formed UTF-8 is only replaced if it is a +// logical consequence of the operation (as for Map) or if it otherwise would +// pose security concerns (as for Remove). +// - An alternative would be to return an error on ill-formed UTF-8, but this +// would be inconsistent with other operations. + +// If returns a transformer that applies tIn to consecutive runes for which +// s.Contains(r) and tNotIn to consecutive runes for which !s.Contains(r). Reset +// is called on tIn and tNotIn at the start of each run. A Nop transformer will +// substitute a nil value passed to tIn or tNotIn. Invalid UTF-8 is translated +// to RuneError to determine which transformer to apply, but is passed as is to +// the respective transformer. +func If(s Set, tIn, tNotIn transform.Transformer) Transformer { + if tIn == nil && tNotIn == nil { + return Transformer{transform.Nop} + } + if tIn == nil { + tIn = transform.Nop + } + if tNotIn == nil { + tNotIn = transform.Nop + } + sIn, ok := tIn.(transform.SpanningTransformer) + if !ok { + sIn = dummySpan{tIn} + } + sNotIn, ok := tNotIn.(transform.SpanningTransformer) + if !ok { + sNotIn = dummySpan{tNotIn} + } + + a := &cond{ + tIn: sIn, + tNotIn: sNotIn, + f: s.Contains, + } + a.Reset() + return Transformer{a} +} + +type dummySpan struct{ transform.Transformer } + +func (d dummySpan) Span(src []byte, atEOF bool) (n int, err error) { + return 0, transform.ErrEndOfSpan +} + +type cond struct { + tIn, tNotIn transform.SpanningTransformer + f func(rune) bool + check func(rune) bool // current check to perform + t transform.SpanningTransformer // current transformer to use +} + +// Reset implements transform.Transformer. +func (t *cond) Reset() { + t.check = t.is + t.t = t.tIn + t.t.Reset() // notIn will be reset on first usage. +} + +func (t *cond) is(r rune) bool { + if t.f(r) { + return true + } + t.check = t.isNot + t.t = t.tNotIn + t.tNotIn.Reset() + return false +} + +func (t *cond) isNot(r rune) bool { + if !t.f(r) { + return true + } + t.check = t.is + t.t = t.tIn + t.tIn.Reset() + return false +} + +// This implementation of Span doesn't help all too much, but it needs to be +// there to satisfy this package's Transformer interface. +// TODO: there are certainly room for improvements, though. For example, if +// t.t == transform.Nop (which will a common occurrence) it will save a bundle +// to special-case that loop. +func (t *cond) Span(src []byte, atEOF bool) (n int, err error) { + p := 0 + for n < len(src) && err == nil { + // Don't process too much at a time as the Spanner that will be + // called on this block may terminate early. + const maxChunk = 4096 + max := len(src) + if v := n + maxChunk; v < max { + max = v + } + atEnd := false + size := 0 + current := t.t + for ; p < max; p += size { + r := rune(src[p]) + if r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[p:]); size == 1 { + if !atEOF && !utf8.FullRune(src[p:]) { + err = transform.ErrShortSrc + break + } + } + if !t.check(r) { + // The next rune will be the start of a new run. + atEnd = true + break + } + } + n2, err2 := current.Span(src[n:p], atEnd || (atEOF && p == len(src))) + n += n2 + if err2 != nil { + return n, err2 + } + // At this point either err != nil or t.check will pass for the rune at p. + p = n + size + } + return n, err +} + +func (t *cond) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + p := 0 + for nSrc < len(src) && err == nil { + // Don't process too much at a time, as the work might be wasted if the + // destination buffer isn't large enough to hold the result or a + // transform returns an error early. + const maxChunk = 4096 + max := len(src) + if n := nSrc + maxChunk; n < len(src) { + max = n + } + atEnd := false + size := 0 + current := t.t + for ; p < max; p += size { + r := rune(src[p]) + if r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[p:]); size == 1 { + if !atEOF && !utf8.FullRune(src[p:]) { + err = transform.ErrShortSrc + break + } + } + if !t.check(r) { + // The next rune will be the start of a new run. + atEnd = true + break + } + } + nDst2, nSrc2, err2 := current.Transform(dst[nDst:], src[nSrc:p], atEnd || (atEOF && p == len(src))) + nDst += nDst2 + nSrc += nSrc2 + if err2 != nil { + return nDst, nSrc, err2 + } + // At this point either err != nil or t.check will pass for the rune at p. + p = nSrc + size + } + return nDst, nSrc, err +} diff --git a/vendor/golang.org/x/text/runes/runes.go b/vendor/golang.org/x/text/runes/runes.go new file mode 100644 index 000000000..71933696f --- /dev/null +++ b/vendor/golang.org/x/text/runes/runes.go @@ -0,0 +1,355 @@ +// Copyright 2014 The Go Authors. All rights reserved. +// Use of this source code is governed by a BSD-style +// license that can be found in the LICENSE file. + +// Package runes provide transforms for UTF-8 encoded text. +package runes // import "golang.org/x/text/runes" + +import ( + "unicode" + "unicode/utf8" + + "golang.org/x/text/transform" +) + +// A Set is a collection of runes. +type Set interface { + // Contains returns true if r is contained in the set. + Contains(r rune) bool +} + +type setFunc func(rune) bool + +func (s setFunc) Contains(r rune) bool { + return s(r) +} + +// Note: using funcs here instead of wrapping types result in cleaner +// documentation and a smaller API. + +// In creates a Set with a Contains method that returns true for all runes in +// the given RangeTable. +func In(rt *unicode.RangeTable) Set { + return setFunc(func(r rune) bool { return unicode.Is(rt, r) }) +} + +// In creates a Set with a Contains method that returns true for all runes not +// in the given RangeTable. +func NotIn(rt *unicode.RangeTable) Set { + return setFunc(func(r rune) bool { return !unicode.Is(rt, r) }) +} + +// Predicate creates a Set with a Contains method that returns f(r). +func Predicate(f func(rune) bool) Set { + return setFunc(f) +} + +// Transformer implements the transform.Transformer interface. +type Transformer struct { + t transform.SpanningTransformer +} + +func (t Transformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + return t.t.Transform(dst, src, atEOF) +} + +func (t Transformer) Span(b []byte, atEOF bool) (n int, err error) { + return t.t.Span(b, atEOF) +} + +func (t Transformer) Reset() { t.t.Reset() } + +// Bytes returns a new byte slice with the result of converting b using t. It +// calls Reset on t. It returns nil if any error was found. This can only happen +// if an error-producing Transformer is passed to If. +func (t Transformer) Bytes(b []byte) []byte { + b, _, err := transform.Bytes(t, b) + if err != nil { + return nil + } + return b +} + +// String returns a string with the result of converting s using t. It calls +// Reset on t. It returns the empty string if any error was found. This can only +// happen if an error-producing Transformer is passed to If. +func (t Transformer) String(s string) string { + s, _, err := transform.String(t, s) + if err != nil { + return "" + } + return s +} + +// TODO: +// - Copy: copying strings and bytes in whole-rune units. +// - Validation (maybe) +// - Well-formed-ness (maybe) + +const runeErrorString = string(utf8.RuneError) + +// Remove returns a Transformer that removes runes r for which s.Contains(r). +// Illegal input bytes are replaced by RuneError before being passed to f. +func Remove(s Set) Transformer { + if f, ok := s.(setFunc); ok { + // This little trick cuts the running time of BenchmarkRemove for sets + // created by Predicate roughly in half. + // TODO: special-case RangeTables as well. + return Transformer{remove(f)} + } + return Transformer{remove(s.Contains)} +} + +// TODO: remove transform.RemoveFunc. + +type remove func(r rune) bool + +func (remove) Reset() {} + +// Span implements transform.Spanner. +func (t remove) Span(src []byte, atEOF bool) (n int, err error) { + for r, size := rune(0), 0; n < len(src); { + if r = rune(src[n]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[n:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + } else { + err = transform.ErrEndOfSpan + } + break + } + if t(r) { + err = transform.ErrEndOfSpan + break + } + n += size + } + return +} + +// Transform implements transform.Transformer. +func (t remove) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for r, size := rune(0), 0; nSrc < len(src); { + if r = rune(src[nSrc]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + // We replace illegal bytes with RuneError. Not doing so might + // otherwise turn a sequence of invalid UTF-8 into valid UTF-8. + // The resulting byte sequence may subsequently contain runes + // for which t(r) is true that were passed unnoticed. + if !t(utf8.RuneError) { + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + } + nSrc++ + continue + } + if t(r) { + nSrc += size + continue + } + if nDst+size > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < size; i++ { + dst[nDst] = src[nSrc] + nDst++ + nSrc++ + } + } + return +} + +// Map returns a Transformer that maps the runes in the input using the given +// mapping. Illegal bytes in the input are converted to utf8.RuneError before +// being passed to the mapping func. +func Map(mapping func(rune) rune) Transformer { + return Transformer{mapper(mapping)} +} + +type mapper func(rune) rune + +func (mapper) Reset() {} + +// Span implements transform.Spanner. +func (t mapper) Span(src []byte, atEOF bool) (n int, err error) { + for r, size := rune(0), 0; n < len(src); n += size { + if r = rune(src[n]); r < utf8.RuneSelf { + size = 1 + } else if r, size = utf8.DecodeRune(src[n:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + } else { + err = transform.ErrEndOfSpan + } + break + } + if t(r) != r { + err = transform.ErrEndOfSpan + break + } + } + return n, err +} + +// Transform implements transform.Transformer. +func (t mapper) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + var replacement rune + var b [utf8.UTFMax]byte + + for r, size := rune(0), 0; nSrc < len(src); { + if r = rune(src[nSrc]); r < utf8.RuneSelf { + if replacement = t(r); replacement < utf8.RuneSelf { + if nDst == len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst] = byte(replacement) + nDst++ + nSrc++ + continue + } + size = 1 + } else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 { + // Invalid rune. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + + if replacement = t(utf8.RuneError); replacement == utf8.RuneError { + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + nSrc++ + continue + } + } else if replacement = t(r); replacement == r { + if nDst+size > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < size; i++ { + dst[nDst] = src[nSrc] + nDst++ + nSrc++ + } + continue + } + + n := utf8.EncodeRune(b[:], replacement) + + if nDst+n > len(dst) { + err = transform.ErrShortDst + break + } + for i := 0; i < n; i++ { + dst[nDst] = b[i] + nDst++ + } + nSrc += size + } + return +} + +// ReplaceIllFormed returns a transformer that replaces all input bytes that are +// not part of a well-formed UTF-8 code sequence with utf8.RuneError. +func ReplaceIllFormed() Transformer { + return Transformer{&replaceIllFormed{}} +} + +type replaceIllFormed struct{ transform.NopResetter } + +func (t replaceIllFormed) Span(src []byte, atEOF bool) (n int, err error) { + for n < len(src) { + // ASCII fast path. + if src[n] < utf8.RuneSelf { + n++ + continue + } + + r, size := utf8.DecodeRune(src[n:]) + + // Look for a valid non-ASCII rune. + if r != utf8.RuneError || size != 1 { + n += size + continue + } + + // Look for short source data. + if !atEOF && !utf8.FullRune(src[n:]) { + err = transform.ErrShortSrc + break + } + + // We have an invalid rune. + err = transform.ErrEndOfSpan + break + } + return n, err +} + +func (t replaceIllFormed) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) { + for nSrc < len(src) { + // ASCII fast path. + if r := src[nSrc]; r < utf8.RuneSelf { + if nDst == len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst] = r + nDst++ + nSrc++ + continue + } + + // Look for a valid non-ASCII rune. + if _, size := utf8.DecodeRune(src[nSrc:]); size != 1 { + if size != copy(dst[nDst:], src[nSrc:nSrc+size]) { + err = transform.ErrShortDst + break + } + nDst += size + nSrc += size + continue + } + + // Look for short source data. + if !atEOF && !utf8.FullRune(src[nSrc:]) { + err = transform.ErrShortSrc + break + } + + // We have an invalid rune. + if nDst+3 > len(dst) { + err = transform.ErrShortDst + break + } + dst[nDst+0] = runeErrorString[0] + dst[nDst+1] = runeErrorString[1] + dst[nDst+2] = runeErrorString[2] + nDst += 3 + nSrc++ + } + return nDst, nSrc, err +} diff --git a/vendor/vendor.json b/vendor/vendor.json index cdc961e3e..1df811219 100644 --- a/vendor/vendor.json +++ b/vendor/vendor.json @@ -1320,6 +1320,24 @@ "revision": "d3c2f16719dedd34911cd626a98bd5879e1caaff", "revisionTime": "2018-04-03T19:54:48Z" }, + { + "checksumSHA1": "B8KN0npDVBBnSDoL8htTSBpFgZ0=", + "path": "github.com/hashicorp/vault-plugin-secrets-ad/plugin", + "revision": "321ea9aa40719a982e9ad39fecd911a212d0d7c0", + "revisionTime": "2018-05-24T23:02:05Z" + }, + { + "checksumSHA1": "qHGmA9y3hKMBSLRWLifD37EaHP4=", + "path": "github.com/hashicorp/vault-plugin-secrets-ad/plugin/client", + "revision": "321ea9aa40719a982e9ad39fecd911a212d0d7c0", + "revisionTime": "2018-05-24T23:02:05Z" + }, + { + "checksumSHA1": "/wFdQSWF1zexkefiI7j+LrREMHk=", + "path": "github.com/hashicorp/vault-plugin-secrets-ad/plugin/util", + "revision": "321ea9aa40719a982e9ad39fecd911a212d0d7c0", + "revisionTime": "2018-05-24T23:02:05Z" + }, { "checksumSHA1": "0BXf2h4FJSUTdVK3m75a1KXnYVA=", "path": "github.com/hashicorp/vault-plugin-secrets-gcp/plugin", From 35cb9bc517c52b4ba82434a3c7f94d362fbc0f0c Mon Sep 17 00:00:00 2001 From: Jeff Mitchell Date: Fri, 25 May 2018 14:38:06 -0400 Subject: [PATCH 4/8] Redo API client locking (#4551) * Redo API client locking This assigns local values when in critical paths, allowing a single API client to much more quickly and safely pipeline requests. Additionally, in order to take that paradigm all the way it changes how timeouts are set. It now uses a context value set on the request instead of configuring the timeout in the http client per request, which was also potentially quite racy. Trivially tested with VAULT_CLIENT_TIMEOUT=2 vault write pki/root/generate/internal key_type=rsa key_bits=8192 --- api/client.go | 83 +++++++++++++++++++++++++++++----------------- api/client_test.go | 15 +-------- 2 files changed, 53 insertions(+), 45 deletions(-) diff --git a/api/client.go b/api/client.go index 8f5a29868..ce10fff14 100644 --- a/api/client.go +++ b/api/client.go @@ -388,11 +388,12 @@ func (c *Client) SetAddress(addr string) error { c.modifyLock.Lock() defer c.modifyLock.Unlock() - var err error - if c.addr, err = url.Parse(addr); err != nil { + parsedAddr, err := url.Parse(addr) + if err != nil { return errwrap.Wrapf("failed to set address: {{err}}", err) } + c.addr = parsedAddr return nil } @@ -411,7 +412,8 @@ func (c *Client) SetLimiter(rateLimit float64, burst int) { c.modifyLock.RLock() c.config.modifyLock.Lock() defer c.config.modifyLock.Unlock() - defer c.modifyLock.RUnlock() + c.modifyLock.RUnlock() + c.config.Limiter = rate.NewLimiter(rate.Limit(rateLimit), burst) } @@ -544,14 +546,20 @@ func (c *Client) SetPolicyOverride(override bool) { // doesn't need to be called externally. func (c *Client) NewRequest(method, requestPath string) *Request { c.modifyLock.RLock() - defer c.modifyLock.RUnlock() + addr := c.addr + token := c.token + mfaCreds := c.mfaCreds + wrappingLookupFunc := c.wrappingLookupFunc + headers := c.headers + policyOverride := c.policyOverride + c.modifyLock.RUnlock() // if SRV records exist (see https://tools.ietf.org/html/draft-andrews-http-srv-02), lookup the SRV // record and take the highest match; this is not designed for high-availability, just discovery - var host string = c.addr.Host - if c.addr.Port() == "" { + var host string = addr.Host + if addr.Port() == "" { // Internet Draft specifies that the SRV record is ignored if a port is given - _, addrs, err := net.LookupSRV("http", "tcp", c.addr.Hostname()) + _, addrs, err := net.LookupSRV("http", "tcp", addr.Hostname()) if err == nil && len(addrs) > 0 { host = fmt.Sprintf("%s:%d", addrs[0].Target, addrs[0].Port) } @@ -560,12 +568,12 @@ func (c *Client) NewRequest(method, requestPath string) *Request { req := &Request{ Method: method, URL: &url.URL{ - User: c.addr.User, - Scheme: c.addr.Scheme, + User: addr.User, + Scheme: addr.Scheme, Host: host, - Path: path.Join(c.addr.Path, requestPath), + Path: path.Join(addr.Path, requestPath), }, - ClientToken: c.token, + ClientToken: token, Params: make(map[string][]string), } @@ -579,21 +587,19 @@ func (c *Client) NewRequest(method, requestPath string) *Request { lookupPath = requestPath } - req.MFAHeaderVals = c.mfaCreds + req.MFAHeaderVals = mfaCreds - if c.wrappingLookupFunc != nil { - req.WrapTTL = c.wrappingLookupFunc(method, lookupPath) + if wrappingLookupFunc != nil { + req.WrapTTL = wrappingLookupFunc(method, lookupPath) } else { req.WrapTTL = DefaultWrappingLookupFunc(method, lookupPath) } - if c.config.Timeout != 0 { - c.config.HttpClient.Timeout = c.config.Timeout - } - if c.headers != nil { - req.Headers = c.headers + + if headers != nil { + req.Headers = headers } - req.PolicyOverride = c.policyOverride + req.PolicyOverride = policyOverride return req } @@ -602,18 +608,23 @@ func (c *Client) NewRequest(method, requestPath string) *Request { // a Vault server not configured with this client. This is an advanced operation // that generally won't need to be called externally. func (c *Client) RawRequest(r *Request) (*Response, error) { - c.modifyLock.RLock() - c.config.modifyLock.RLock() - defer c.config.modifyLock.RUnlock() - - if c.config.Limiter != nil { - c.config.Limiter.Wait(context.Background()) - } - token := c.token + + c.config.modifyLock.RLock() + limiter := c.config.Limiter + maxRetries := c.config.MaxRetries + backoff := c.config.Backoff + httpClient := c.config.HttpClient + timeout := c.config.Timeout + c.config.modifyLock.RUnlock() + c.modifyLock.RUnlock() + if limiter != nil { + limiter.Wait(context.Background()) + } + // Sanity check the token before potentially erroring from the API idx := strings.IndexFunc(token, func(c rune) bool { return !unicode.IsPrint(c) @@ -632,16 +643,23 @@ START: return nil, fmt.Errorf("nil request created") } - backoff := c.config.Backoff + // Set the timeout, if any + var cancelFunc context.CancelFunc + if timeout != 0 { + var ctx context.Context + ctx, cancelFunc = context.WithTimeout(context.Background(), timeout) + req.Request = req.Request.WithContext(ctx) + } + if backoff == nil { backoff = retryablehttp.LinearJitterBackoff } client := &retryablehttp.Client{ - HTTPClient: c.config.HttpClient, + HTTPClient: httpClient, RetryWaitMin: 1000 * time.Millisecond, RetryWaitMax: 1500 * time.Millisecond, - RetryMax: c.config.MaxRetries, + RetryMax: maxRetries, CheckRetry: retryablehttp.DefaultRetryPolicy, Backoff: backoff, ErrorHandler: retryablehttp.PassthroughErrorHandler, @@ -649,6 +667,9 @@ START: var result *Response resp, err := client.Do(req) + if cancelFunc != nil { + cancelFunc() + } if resp != nil { result = &Response{Response: resp} } diff --git a/api/client_test.go b/api/client_test.go index 970354bab..5678478ea 100644 --- a/api/client_test.go +++ b/api/client_test.go @@ -7,7 +7,6 @@ import ( "os" "strings" "testing" - "time" ) func init() { @@ -244,22 +243,10 @@ func TestClientTimeoutSetting(t *testing.T) { defer os.Setenv(EnvVaultClientTimeout, oldClientTimeout) config := DefaultConfig() config.ReadEnvironment() - client, err := NewClient(config) + _, err := NewClient(config) if err != nil { t.Fatal(err) } - _ = client.NewRequest("PUT", "/") - if client.config.HttpClient.Timeout != time.Second*10 { - t.Fatalf("error setting client timeout using env variable") - } - - // Setting custom client timeout for a new request - client.SetClientTimeout(time.Second * 20) - _ = client.NewRequest("PUT", "/") - if client.config.HttpClient.Timeout != time.Second*20 { - t.Fatalf("error setting client timeout using SetClientTimeout") - } - } type roundTripperFunc func(*http.Request) (*http.Response, error) From 8231825cd1ad770214b4278ba87478efc24015d5 Mon Sep 17 00:00:00 2001 From: Jeff Mitchell Date: Fri, 25 May 2018 15:24:06 -0400 Subject: [PATCH 5/8] Give more time for the self revocation test to run --- vault/token_store_test.go | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/vault/token_store_test.go b/vault/token_store_test.go index 77cb178ba..4f443c840 100644 --- a/vault/token_store_test.go +++ b/vault/token_store_test.go @@ -935,7 +935,7 @@ func TestTokenStore_RevokeSelf(t *testing.T) { t.Fatalf("err: %v\nresp: %#v", err, resp) } - time.Sleep(200 * time.Millisecond) + time.Sleep(1000 * time.Millisecond) lookup := []string{ent1.ID, ent2.ID, ent3.ID, ent4.ID} for _, id := range lookup { From 43c5030eca40ed5b568c215e565a390ed3928013 Mon Sep 17 00:00:00 2001 From: Chris Hoffman Date: Fri, 25 May 2018 15:39:07 -0400 Subject: [PATCH 6/8] pkcs11 docs updates --- website/source/docs/configuration/seal/pkcs11.html.md | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/website/source/docs/configuration/seal/pkcs11.html.md b/website/source/docs/configuration/seal/pkcs11.html.md index 5cdbdd9aa..0a69adbb0 100644 --- a/website/source/docs/configuration/seal/pkcs11.html.md +++ b/website/source/docs/configuration/seal/pkcs11.html.md @@ -106,8 +106,9 @@ These parameters apply to the `seal` stanza in the Vault configuration file: specified by the `VAULT_HSM_MECHANISM` environment variable. Currently supported mechanisms (in order of precedence): + - `0x1085` `CKM_AES_CBC_PAD` (HMAC mechanism required) - `0x1082` `CKM_AES_CBC` (HMAC mechanism required) - - `0x1087` `CKM_AES_GCM` **_BETA_** + - `0x1087` `CKM_AES_GCM` - `0x0009` `CKM_RSA_PKCS_OAEP` - `0x0001` `CKM_RSA_PKCS` From 2b41283a9163e16b5610eb6fe5f5c086de1e3061 Mon Sep 17 00:00:00 2001 From: madalynrose Date: Fri, 25 May 2018 16:33:22 -0400 Subject: [PATCH 7/8] UI console (#4631) * adding columnify and ember-cli-cjs-transform * add yargs-parser * remove vendored yargs-parser tokenizer and use cjs transform to import it from actual yargs-parser * add clear command that clears the log, but maintains history * make codemirror have no gutter and be auto-height when rendered in the console output log * add fullscreen command and hook up fullscreen toggle button * hook up copy button --- ui/app/adapters/console.js | 8 + ui/app/components/console/command-input.js | 36 + ui/app/components/console/log-command.js | 3 + ui/app/components/console/log-error.js | 3 + ui/app/components/console/log-help.js | 3 + ui/app/components/console/log-json.js | 3 + ui/app/components/console/log-list.js | 9 + ui/app/components/console/log-object.js | 28 + ui/app/components/console/log-success.js | 3 + ui/app/components/console/log-text.js | 3 + ui/app/components/console/output-log.js | 6 + ui/app/components/console/ui-panel.js | 84 ++ ui/app/components/json-editor.js | 4 + ui/app/controllers/application.js | 18 +- ui/app/helpers/multi-line-join.js | 7 + ui/app/lib/console-helpers.js | 183 ++++ ui/app/routes/vault/cluster/logout.js | 8 +- .../vault/cluster/secrets/backend/list.js | 38 +- ui/app/services/console.js | 105 +++ ui/app/styles/components/codemirror.scss | 4 + .../styles/components/console-ui-panel.scss | 149 ++++ ui/app/styles/components/env-banner.scss | 10 + ui/app/styles/components/status-menu.scss | 2 +- ui/app/styles/components/tool-tip.scss | 12 +- ui/app/styles/components/upgrade-overlay.scss | 3 +- ui/app/styles/components/vault-loading.scss | 88 +- ui/app/styles/core.scss | 2 + ui/app/styles/core/buttons.scss | 3 +- ui/app/styles/core/generic.scss | 21 +- ui/app/styles/utils/_bulma_variables.scss | 5 +- ui/app/styles/utils/animations.scss | 4 +- ui/app/styles/utils/mixins.scss | 21 +- ui/app/templates/application.hbs | 16 +- .../components/console/command-input.hbs | 16 + .../components/console/log-command.hbs | 1 + .../components/console/log-error.hbs | 4 + .../templates/components/console/log-help.hbs | 16 + .../templates/components/console/log-json.hbs | 10 + .../templates/components/console/log-list.hbs | 21 + .../components/console/log-object.hbs | 18 + .../components/console/log-success.hbs | 4 + .../templates/components/console/log-text.hbs | 1 + .../components/console/output-log.hbs | 5 + .../templates/components/console/ui-panel.hbs | 16 + ui/app/templates/svg/icons/console-active.hbs | 21 + ui/app/templates/svg/icons/console.hbs | 17 + ui/app/templates/svg/icons/copy.hbs | 1 + .../templates/svg/icons/fullscreen-close.hbs | 1 + .../templates/svg/icons/fullscreen-open.hbs | 1 + ui/ember-cli-build.js | 12 + ui/package.json | 19 +- .../components/console/log-command-test.js | 15 + .../components/console/log-error-test.js | 13 + .../components/console/log-json-test.js | 24 + .../components/console/log-list-test.js | 19 + .../components/console/log-object-test.js | 27 + .../components/console/log-text-test.js | 17 + .../components/console/ui-panel-test.js | 118 +++ ui/tests/pages/components/console/ui-panel.js | 18 + ui/tests/unit/adapters/console-test.js | 13 + ui/tests/unit/lib/console-helpers-test.js | 328 +++++++ ui/tests/unit/services/console-test.js | 94 ++ ui/yarn.lock | 824 ++++++++++++++++-- 63 files changed, 2433 insertions(+), 153 deletions(-) create mode 100644 ui/app/adapters/console.js create mode 100644 ui/app/components/console/command-input.js create mode 100644 ui/app/components/console/log-command.js create mode 100644 ui/app/components/console/log-error.js create mode 100644 ui/app/components/console/log-help.js create mode 100644 ui/app/components/console/log-json.js create mode 100644 ui/app/components/console/log-list.js create mode 100644 ui/app/components/console/log-object.js create mode 100644 ui/app/components/console/log-success.js create mode 100644 ui/app/components/console/log-text.js create mode 100644 ui/app/components/console/output-log.js create mode 100644 ui/app/components/console/ui-panel.js create mode 100644 ui/app/helpers/multi-line-join.js create mode 100644 ui/app/lib/console-helpers.js create mode 100644 ui/app/services/console.js create mode 100644 ui/app/styles/components/console-ui-panel.scss create mode 100644 ui/app/styles/components/env-banner.scss create mode 100644 ui/app/templates/components/console/command-input.hbs create mode 100644 ui/app/templates/components/console/log-command.hbs create mode 100644 ui/app/templates/components/console/log-error.hbs create mode 100644 ui/app/templates/components/console/log-help.hbs create mode 100644 ui/app/templates/components/console/log-json.hbs create mode 100644 ui/app/templates/components/console/log-list.hbs create mode 100644 ui/app/templates/components/console/log-object.hbs create mode 100644 ui/app/templates/components/console/log-success.hbs create mode 100644 ui/app/templates/components/console/log-text.hbs create mode 100644 ui/app/templates/components/console/output-log.hbs create mode 100644 ui/app/templates/components/console/ui-panel.hbs create mode 100644 ui/app/templates/svg/icons/console-active.hbs create mode 100644 ui/app/templates/svg/icons/console.hbs create mode 100644 ui/app/templates/svg/icons/copy.hbs create mode 100644 ui/app/templates/svg/icons/fullscreen-close.hbs create mode 100644 ui/app/templates/svg/icons/fullscreen-open.hbs create mode 100644 ui/tests/integration/components/console/log-command-test.js create mode 100644 ui/tests/integration/components/console/log-error-test.js create mode 100644 ui/tests/integration/components/console/log-json-test.js create mode 100644 ui/tests/integration/components/console/log-list-test.js create mode 100644 ui/tests/integration/components/console/log-object-test.js create mode 100644 ui/tests/integration/components/console/log-text-test.js create mode 100644 ui/tests/integration/components/console/ui-panel-test.js create mode 100644 ui/tests/pages/components/console/ui-panel.js create mode 100644 ui/tests/unit/adapters/console-test.js create mode 100644 ui/tests/unit/lib/console-helpers-test.js create mode 100644 ui/tests/unit/services/console-test.js diff --git a/ui/app/adapters/console.js b/ui/app/adapters/console.js new file mode 100644 index 000000000..473f2ce42 --- /dev/null +++ b/ui/app/adapters/console.js @@ -0,0 +1,8 @@ +import ApplicationAdapter from './application'; + +export default ApplicationAdapter.extend({ + namespace: 'v1', + pathForType(modelName) { + return modelName; + }, +}); diff --git a/ui/app/components/console/command-input.js b/ui/app/components/console/command-input.js new file mode 100644 index 000000000..5ee6eeffe --- /dev/null +++ b/ui/app/components/console/command-input.js @@ -0,0 +1,36 @@ +import Ember from 'ember'; +import keys from 'vault/lib/keycodes'; + +export default Ember.Component.extend({ + 'data-test-component': 'console/command-input', + classNames: 'console-ui-input', + onExecuteCommand() {}, + onFullscreen() {}, + onValueUpdate() {}, + onShiftCommand() {}, + value: null, + isFullscreen: null, + + didRender() { + this.element.scrollIntoView(); + }, + actions: { + handleKeyUp(event) { + const keyCode = event.keyCode; + switch (keyCode) { + case keys.ENTER: + this.get('onExecuteCommand')(event.target.value); + break; + case keys.UP: + case keys.DOWN: + this.get('onShiftCommand')(keyCode); + break; + default: + this.get('onValueUpdate')(event.target.value); + } + }, + fullscreen() { + this.get('onFullscreen')(); + } + }, +}); diff --git a/ui/app/components/console/log-command.js b/ui/app/components/console/log-command.js new file mode 100644 index 000000000..6e705e676 --- /dev/null +++ b/ui/app/components/console/log-command.js @@ -0,0 +1,3 @@ +import Ember from 'ember'; + +export default Ember.Component.extend({}); diff --git a/ui/app/components/console/log-error.js b/ui/app/components/console/log-error.js new file mode 100644 index 000000000..6e705e676 --- /dev/null +++ b/ui/app/components/console/log-error.js @@ -0,0 +1,3 @@ +import Ember from 'ember'; + +export default Ember.Component.extend({}); diff --git a/ui/app/components/console/log-help.js b/ui/app/components/console/log-help.js new file mode 100644 index 000000000..6e705e676 --- /dev/null +++ b/ui/app/components/console/log-help.js @@ -0,0 +1,3 @@ +import Ember from 'ember'; + +export default Ember.Component.extend({}); diff --git a/ui/app/components/console/log-json.js b/ui/app/components/console/log-json.js new file mode 100644 index 000000000..6e705e676 --- /dev/null +++ b/ui/app/components/console/log-json.js @@ -0,0 +1,3 @@ +import Ember from 'ember'; + +export default Ember.Component.extend({}); diff --git a/ui/app/components/console/log-list.js b/ui/app/components/console/log-list.js new file mode 100644 index 000000000..fcca15f27 --- /dev/null +++ b/ui/app/components/console/log-list.js @@ -0,0 +1,9 @@ +import Ember from 'ember'; +const { computed } = Ember; + +export default Ember.Component.extend({ + content: null, + list: computed('content', function() { + return this.get('content').keys; + }), +}); diff --git a/ui/app/components/console/log-object.js b/ui/app/components/console/log-object.js new file mode 100644 index 000000000..275f1edb7 --- /dev/null +++ b/ui/app/components/console/log-object.js @@ -0,0 +1,28 @@ +import Ember from 'ember'; +import columnify from 'columnify'; +const { computed } = Ember; + +export function stringifyObjectValues(data) { + Object.keys(data).forEach(item => { + let val = data[item]; + if (typeof val !== 'string') { + val = JSON.stringify(val); + } + data[item] = val; + }); +} + +export default Ember.Component.extend({ + content: null, + columns: computed('content', function() { + let data = this.get('content'); + stringifyObjectValues(data); + + return columnify(data, { + preserveNewLines: true, + headingTransform: function(heading) { + return Ember.String.capitalize(heading); + }, + }); + }), +}); diff --git a/ui/app/components/console/log-success.js b/ui/app/components/console/log-success.js new file mode 100644 index 000000000..6e705e676 --- /dev/null +++ b/ui/app/components/console/log-success.js @@ -0,0 +1,3 @@ +import Ember from 'ember'; + +export default Ember.Component.extend({}); diff --git a/ui/app/components/console/log-text.js b/ui/app/components/console/log-text.js new file mode 100644 index 000000000..6e705e676 --- /dev/null +++ b/ui/app/components/console/log-text.js @@ -0,0 +1,3 @@ +import Ember from 'ember'; + +export default Ember.Component.extend({}); diff --git a/ui/app/components/console/output-log.js b/ui/app/components/console/output-log.js new file mode 100644 index 000000000..a4c209e24 --- /dev/null +++ b/ui/app/components/console/output-log.js @@ -0,0 +1,6 @@ +import Ember from 'ember'; + +export default Ember.Component.extend({ + 'data-test-component': 'console/output-log', + log: null, +}); diff --git a/ui/app/components/console/ui-panel.js b/ui/app/components/console/ui-panel.js new file mode 100644 index 000000000..fe35294d3 --- /dev/null +++ b/ui/app/components/console/ui-panel.js @@ -0,0 +1,84 @@ +import Ember from 'ember'; +import { + parseCommand, + extractDataAndFlags, + logFromResponse, + logFromError, + logErrorFromInput, + executeUICommand, +} from 'vault/lib/console-helpers'; + +const { inject, computed } = Ember; + +export default Ember.Component.extend({ + classNames: 'console-ui-panel-scroller', + classNameBindings: ['isFullscreen:fullscreen'], + isFullscreen: false, + console: inject.service(), + inputValue: null, + log: computed.alias('console.log'), + + logAndOutput(command, logContent) { + this.set('inputValue', ''); + this.get('console').logAndOutput(command, logContent); + }, + + executeCommand(command, shouldThrow = false) { + let service = this.get('console'); + let serviceArgs; + + if(executeUICommand(command, (args) => this.logAndOutput(args), (args) => service.clearLog(args), () => this.toggleProperty('isFullscreen'))){ + return; + } + + // parse to verify it's valid + try { + serviceArgs = parseCommand(command, shouldThrow); + } catch (e) { + this.logAndOutput(command, { type: 'help' }); + return; + } + // we have a invalid command but don't want to throw + if (serviceArgs === false) { + return; + } + + let [method, flagArray, path, dataArray] = serviceArgs; + + if (dataArray || flagArray) { + var { data, flags } = extractDataAndFlags(dataArray, flagArray); + } + + let inputError = logErrorFromInput(path, method, flags, dataArray); + if (inputError) { + this.logAndOutput(command, inputError); + return; + } + let serviceFn = service[method]; + serviceFn.call(service, path, data, flags.wrapTTL) + .then(resp => { + this.logAndOutput(command, logFromResponse(resp, path, method, flags)); + }) + .catch(error => { + this.logAndOutput(command, logFromError(error, path, method)); + }); + }, + + shiftCommandIndex(keyCode) { + this.get('console').shiftCommandIndex(keyCode, (val) => { + this.set('inputValue', val); + }); + }, + + actions: { + toggleFullscreen() { + this.toggleProperty('isFullscreen'); + }, + executeCommand(val) { + this.executeCommand(val, true); + }, + shiftCommandIndex(direction) { + this.shiftCommandIndex(direction); + }, + }, +}); diff --git a/ui/app/components/json-editor.js b/ui/app/components/json-editor.js index 7b1a1c669..8377e75e3 100644 --- a/ui/app/components/json-editor.js +++ b/ui/app/components/json-editor.js @@ -18,6 +18,10 @@ export default IvyCodemirrorComponent.extend({ 'data-test-component': 'json-editor', updateCodeMirrorOptions() { const options = assign({}, JSON_EDITOR_DEFAULTS, this.get('options')); + if (options.autoHeight) { + options.viewportMargin = Infinity; + delete options.autoHeight; + } if (options) { Object.keys(options).forEach(function(option) { diff --git a/ui/app/controllers/application.js b/ui/app/controllers/application.js index 5f6f6c9a9..353649234 100644 --- a/ui/app/controllers/application.js +++ b/ui/app/controllers/application.js @@ -1,18 +1,21 @@ import Ember from 'ember'; import config from '../config/environment'; +const { computed, inject } = Ember; export default Ember.Controller.extend({ env: config.environment, - auth: Ember.inject.service(), - vaultVersion: Ember.inject.service('version'), - activeCluster: Ember.computed('auth.activeCluster', function() { + auth: inject.service(), + vaultVersion: inject.service('version'), + console: inject.service(), + consoleOpen: computed.alias('console.isOpen'), + activeCluster: computed('auth.activeCluster', function() { return this.store.peekRecord('cluster', this.get('auth.activeCluster')); }), - activeClusterName: Ember.computed('auth.activeCluster', function() { + activeClusterName: computed('auth.activeCluster', function() { const activeCluster = this.store.peekRecord('cluster', this.get('auth.activeCluster')); return activeCluster ? activeCluster.get('name') : null; }), - showNav: Ember.computed( + showNav: computed( 'activeClusterName', 'auth.currentToken', 'activeCluster.dr.isSecondary', @@ -30,4 +33,9 @@ export default Ember.Controller.extend({ } } ), + actions: { + toggleConsole() { + this.toggleProperty('consoleOpen'); + }, + }, }); diff --git a/ui/app/helpers/multi-line-join.js b/ui/app/helpers/multi-line-join.js new file mode 100644 index 000000000..cd22380f1 --- /dev/null +++ b/ui/app/helpers/multi-line-join.js @@ -0,0 +1,7 @@ +import Ember from 'ember'; + +export function multiLineJoin([arr]) { + return arr.join('\n'); +} + +export default Ember.Helper.helper(multiLineJoin); diff --git a/ui/app/lib/console-helpers.js b/ui/app/lib/console-helpers.js new file mode 100644 index 000000000..0d8b12128 --- /dev/null +++ b/ui/app/lib/console-helpers.js @@ -0,0 +1,183 @@ +import keys from 'vault/lib/keycodes'; +import argTokenizer from 'yargs-parser-tokenizer'; + +const supportedCommands = ['read', 'write', 'list', 'delete']; +const uiCommands = ['clearall', 'clear', 'fullscreen']; + +export function extractDataAndFlags(data, flags) { + return data.concat(flags).reduce((accumulator, val) => { + // will be "key=value" or "-flag=value" or "foo=bar=baz" + // split on the first = + let [item, value] = val.split(/=(.+)/); + if (item.startsWith('-')) { + let flagName = item.replace(/^-/, ''); + if (flagName === 'wrap-ttl') { + flagName = 'wrapTTL'; + } + accumulator.flags[flagName] = value || true; + return accumulator; + } + // if it exists in data already, then we have multiple + // foo=bar in the list and need to make it an array + if (accumulator.data[item]) { + accumulator.data[item] = [].concat(accumulator.data[item], value); + return accumulator; + } + accumulator.data[item] = value; + + return accumulator; + }, { data: {}, flags: {} }); +} + +export function executeUICommand(command, logAndOutput, clearLog, toggleFullscreen){ + const isUICommand = uiCommands.includes(command); + if(isUICommand){ + logAndOutput(command); + } + switch(command){ + case 'clearall': + clearLog(true); + break; + case 'clear': + clearLog(); + break; + case 'fullscreen': + toggleFullscreen(); + break; + } + + return isUICommand; +} + +export function parseCommand(command, shouldThrow) { + let args = argTokenizer(command); + if (args[0] === 'vault') { + args.shift(); + } + + let [method, ...rest] = args; + let path; + let flags = []; + let data = []; + + rest.forEach(arg => { + if (arg.startsWith('-')) { + flags.push(arg); + } else { + if (path) { + data.push(arg); + } else { + path = arg; + } + } + }); + + if (!supportedCommands.includes(method)) { + if (shouldThrow) { + throw new Error('invalid command'); + } + return false; + } + return [method, flags, path, data]; +} + +export function logFromResponse(response, path, method, flags) { + if (!response) { + let message = + method === 'write' + ? `Success! Data written to: ${path}` + : `Success! Data deleted (if it existed) at: ${path}`; + + return { type: 'success', content: message }; + } + let { format, field } = flags; + let secret = response.auth || response.data || response.wrap_info; + + if (field) { + let fieldValue = secret[field]; + let response; + if (fieldValue) { + if (format && format === 'json') { + return { type: 'json', content: fieldValue }; + } + switch (typeof fieldValue) { + case 'string': + response = { type: 'text', content: fieldValue }; + break; + default: + response = { type: 'object', content: fieldValue }; + break; + } + } else { + response = { type: 'error', content: `Field "${field}" not present in secret` }; + } + return response; + } + + if (format && format === 'json') { + // just print whole response + return { type: 'json', content: response }; + } + + if (method === 'list') { + return { type: 'list', content: secret }; + } + + return { type: 'object', content: secret }; +} + +export function logFromError(error, vaultPath, method) { + let content; + let { httpStatus, path } = error; + let verbClause = { + read: 'reading from', + write: 'writing to', + list: 'listing', + delete: 'deleting at', + }[method]; + + content = `Error ${verbClause}: ${vaultPath}.\nURL: ${path}\nCode: ${httpStatus}`; + + if (typeof error.errors[0] === 'string') { + content = `${content}\nErrors:\n ${error.errors.join('\n ')}`; + } + + return { type: 'error', content }; +} + +export function shiftCommandIndex(keyCode, history, index) { + let newInputValue; + let commandHistoryLength = history.length; + + if (!commandHistoryLength) { return []; } + + if (keyCode === keys.UP) { + index -= 1; + if (index < 0) { + index = commandHistoryLength - 1; + } + } else { + index += 1; + if (index === commandHistoryLength) { + newInputValue = ''; + } + if (index > commandHistoryLength) { + index -= 1; + } + } + + if (newInputValue !== '') { + newInputValue = history.objectAt(index).content; + } + + return [index, newInputValue]; +} + +export function logErrorFromInput(path, method, flags, dataArray) { + if (path === undefined) { + return { type: 'error', content: 'A path is required to make a request.' }; + } + if (method === 'write' && !flags.force && dataArray.length === 0) { + return { type: 'error', content: 'Must supply data or use -force' }; + } +} diff --git a/ui/app/routes/vault/cluster/logout.js b/ui/app/routes/vault/cluster/logout.js index 0ddc7c110..824d93116 100644 --- a/ui/app/routes/vault/cluster/logout.js +++ b/ui/app/routes/vault/cluster/logout.js @@ -1,14 +1,18 @@ import Ember from 'ember'; import ModelBoundaryRoute from 'vault/mixins/model-boundary-route'; +const { inject } = Ember; export default Ember.Route.extend(ModelBoundaryRoute, { - auth: Ember.inject.service(), - flashMessages: Ember.inject.service(), + auth: inject.service(), + flashMessages: inject.service(), + console: inject.service(), modelTypes: ['secret', 'secret-engine'], beforeModel() { this.get('auth').deleteCurrentToken(); + this.get('console').set('isOpen', false); + this.get('console').clearLog(true); this.clearModelCache(); this.replaceWith('vault.cluster'); this.get('flashMessages').clearMessages(); diff --git a/ui/app/routes/vault/cluster/secrets/backend/list.js b/ui/app/routes/vault/cluster/secrets/backend/list.js index dfbde5b70..1731c1525 100644 --- a/ui/app/routes/vault/cluster/secrets/backend/list.js +++ b/ui/app/routes/vault/cluster/secrets/backend/list.js @@ -49,25 +49,25 @@ export default Ember.Route.extend({ return Ember.RSVP.hash({ secret, secrets: this.store - .lazyPaginatedQuery(this.getModelType(backend, params.tab), { - id: secret, - backend, - responsePath: 'data.keys', - page: params.page, - pageFilter: params.pageFilter, - size: 100, - }) - .then(model => { - this.set('has404', false); - return model; - }) - .catch(err => { - if (backendModel && err.httpStatus === 404 && secret === '') { - return []; - } else { - throw err; - } - }) + .lazyPaginatedQuery(this.getModelType(backend, params.tab), { + id: secret, + backend, + responsePath: 'data.keys', + page: params.page, + pageFilter: params.pageFilter, + size: 100, + }) + .then(model => { + this.set('has404', false); + return model; + }) + .catch(err => { + if (backendModel && err.httpStatus === 404 && secret === '') { + return []; + } else { + throw err; + } + }), }); }, diff --git a/ui/app/services/console.js b/ui/app/services/console.js new file mode 100644 index 000000000..e35555f3b --- /dev/null +++ b/ui/app/services/console.js @@ -0,0 +1,105 @@ +// Low level service that allows users to input paths to make requests to vault +// this service provides the UI synecdote to the cli commands read, write, delete, and list +import Ember from 'ember'; +import { + shiftCommandIndex, +} from 'vault/lib/console-helpers'; + +const { Service, getOwner, computed } = Ember; + +export function sanitizePath(path) { + //remove whitespace + remove trailing and leading slashes + return path.trim().replace(/^\/+|\/+$/g, ''); +} +export function ensureTrailingSlash(path) { + return path.replace(/(\w+[^/]$)/g, '$1/'); +} + +const VERBS = { + read: 'GET', + list: 'GET', + write: 'POST', + delete: 'DELETE', +}; + +export default Service.extend({ + isOpen: false, + + adapter() { + return getOwner(this).lookup('adapter:console'); + }, + commandHistory: computed('log.[]', function() { + return this.get('log').filterBy('type', 'command'); + }), + log: computed(function() { + return []; + }), + commandIndex: null, + + shiftCommandIndex(keyCode, setCommandFn = () => {}) { + let [newIndex, newCommand] = shiftCommandIndex( + keyCode, + this.get('commandHistory'), + this.get('commandIndex') + ); + if (newCommand !== undefined && newIndex !== undefined) { + this.set('commandIndex', newIndex); + setCommandFn(newCommand); + } + }, + + clearLog(clearAll=false) { + let log = this.get('log'); + let history; + if (!clearAll) { + history = this.get('commandHistory').slice(); + history.setEach('hidden', true); + } + log.clear(); + if (history) { + log.addObjects(history); + } + }, + + logAndOutput(command, logContent) { + let log = this.get('log'); + log.pushObject({ type: 'command', content: command }); + this.set('commandIndex', null); + if (logContent) { + log.pushObject(logContent); + } + }, + + ajax(operation, path, options = {}) { + let verb = VERBS[operation]; + let adapter = this.adapter(); + let url = adapter.buildURL(path); + let { data, wrapTTL } = options; + return adapter.ajax(url, verb, { + data, + wrapTTL, + }); + }, + + read(path, data, wrapTTL) { + return this.ajax('read', sanitizePath(path), { wrapTTL }); + }, + + write(path, data, wrapTTL) { + return this.ajax('write', sanitizePath(path), { data, wrapTTL }); + }, + + delete(path) { + return this.ajax('delete', sanitizePath(path)); + }, + + list(path, data, wrapTTL) { + let listPath = ensureTrailingSlash(sanitizePath(path)); + return this.ajax('list', listPath, { + data: { + list: true, + }, + wrapTTL, + }); + }, +}); diff --git a/ui/app/styles/components/codemirror.scss b/ui/app/styles/components/codemirror.scss index 37f773a04..68cb9971c 100644 --- a/ui/app/styles/components/codemirror.scss +++ b/ui/app/styles/components/codemirror.scss @@ -171,3 +171,7 @@ $gutter-grey: #2a2f36; } } } + +.cm-s-auto-height.CodeMirror { + height: auto; +} diff --git a/ui/app/styles/components/console-ui-panel.scss b/ui/app/styles/components/console-ui-panel.scss new file mode 100644 index 000000000..9649bb8bb --- /dev/null +++ b/ui/app/styles/components/console-ui-panel.scss @@ -0,0 +1,149 @@ +.console-ui-panel-scroller { + background: linear-gradient(to right, #191A1C, #1B212D); + height: 0; + left: 0; + min-height: 400px; + overflow: auto; + position: fixed; + right: 0; + transform: translate3d(0, -400px, 0); + transition: min-height $speed ease-out, transform $speed ease-in; + will-change: transform, min-height; + z-index: 199; +} + +.console-ui-panel { + display: flex; + flex-direction: column; + justify-content: flex-end; + padding: $size-8 $size-8 $size-4; + min-height: 100%; + color: $white; + font-size: $body-size; + font-weight: $font-weight-semibold; + transition: justify-content $speed ease-in; + + + pre, p { + background: none; + color: inherit; + font-size: $body-size; + + &:not(.console-ui-command):not(.CodeMirror-line) { + padding-left: $console-spacing; + } + } + + .cm-s-hashi.CodeMirror { + background-color: rgba($black, 0.5) !important; + font-weight: $font-weight-normal; + margin-left: $console-spacing; + padding: $size-8 $size-4; + } + + .button, + { + background: transparent; + border: none; + color: $grey-dark; + min-width: 0; + padding: 0 $size-8; + + &.active, + &:hover { + background: $blue; + color: $white; + } + } +} + +.console-ui-input { + align-items: center; + display: flex; + + + input { + background-color: rgba($black, 0.5); + border: 0; + caret-color: $white; + color: $white; + flex: 1; + font-family: $family-monospace; + font-size: $body-size; + font-weight: $font-weight-bold; + margin-left: -$size-10; + outline: none; + padding: $size-10; + transition: background-color $speed; + } +} + +.console-ui-command { + line-height: 2; +} + +.console-ui-output { + transition: background-color $speed; + padding-right: $size-2; + position: relative; + + .console-ui-output-actions { + opacity: 0; + position: absolute; + right: 0; + top: 0; + transition: opacity $speed; + will-change: opacity; + } + + &:hover { + background: rgba($black, 0.25); + + .console-ui-output-actions { + opacity: 1; + } + } +} + +.console-ui-alert { + margin-left: calc(#{$console-spacing} - 0.33rem); + position: relative; + + .icon { + position: absolute; + left: 0; + top: 0; + } +} + +.panel-open .console-ui-panel-scroller { + transform: translate3d(0, 0, 0); +} +.panel-open .console-ui-panel-scroller.fullscreen { + bottom: 0; + top: 0; + min-height: 100%; +} + +.panel-open { + .navbar, .navbar-sections{ + transition: transform $speed ease-in; + } +} + +.panel-open.panel-fullscreen { + .navbar, .navbar-sections{ + transform: translate3d(0, -100px, 0); + } +} + +.page-container > header { + background: linear-gradient(to right, #191A1C, #1B212D); +} + +header .navbar, +header .navbar-sections { + z-index: 200; + transform: translate3d(0, 0, 0); + will-change: transform; +} diff --git a/ui/app/styles/components/env-banner.scss b/ui/app/styles/components/env-banner.scss new file mode 100644 index 000000000..d2851dcf6 --- /dev/null +++ b/ui/app/styles/components/env-banner.scss @@ -0,0 +1,10 @@ +.env-banner { + &, + &:not(:last-child):not(:last-child) { + margin: 0; + } + + .level-item { + padding: $size-10 $size-8; + } +} diff --git a/ui/app/styles/components/status-menu.scss b/ui/app/styles/components/status-menu.scss index 1c6937dae..1fe965b99 100644 --- a/ui/app/styles/components/status-menu.scss +++ b/ui/app/styles/components/status-menu.scss @@ -59,7 +59,7 @@ .is-status-chevron { line-height: 0; - padding: 0.25em 0 0.25em 0.25em; + padding: 0.3em 0 0 $size-11; } .status-menu-user-trigger { diff --git a/ui/app/styles/components/tool-tip.scss b/ui/app/styles/components/tool-tip.scss index a971e6e9b..5d8c02b3c 100644 --- a/ui/app/styles/components/tool-tip.scss +++ b/ui/app/styles/components/tool-tip.scss @@ -5,7 +5,7 @@ .box { position: relative; color: $white; - width: 200px; + max-width: 200px; background: $grey; padding: 0.5rem; line-height: 1.4; @@ -28,6 +28,16 @@ .ember-basic-dropdown-content--left.tool-tip { margin: 8px 0 0 -11px; } + +.ember-basic-dropdown-content--below.ember-basic-dropdown-content--right.tool-tip { + @include css-top-arrow(8px, $grey, 1px, $grey-dark, calc(100% - 20px)); +} +.ember-basic-dropdown-content--above.ember-basic-dropdown-content--right.tool-tip { + @include css-bottom-arrow(8px, $grey, 1px, $grey-dark, calc(100% - 20px)); +} +.ember-basic-dropdown-content--above.tool-tip { + margin-top: -2px; +} .tool-tip-trigger { border: none; border-radius: 20px; diff --git a/ui/app/styles/components/upgrade-overlay.scss b/ui/app/styles/components/upgrade-overlay.scss index 5ec71408e..4ad81dc4c 100644 --- a/ui/app/styles/components/upgrade-overlay.scss +++ b/ui/app/styles/components/upgrade-overlay.scss @@ -10,7 +10,8 @@ } .modal-background { - background-image: url("/ui/vault-hex.svg"), linear-gradient(90deg, #191A1C, #1B212D); + background-image: url("/ui/vault-hex.svg"), + linear-gradient(90deg, #191a1c, #1b212d); opacity: 0.97; } diff --git a/ui/app/styles/components/vault-loading.scss b/ui/app/styles/components/vault-loading.scss index 1634dfa7e..610375007 100644 --- a/ui/app/styles/components/vault-loading.scss +++ b/ui/app/styles/components/vault-loading.scss @@ -1,52 +1,52 @@ - @keyframes vault-loading-animation { - 0%, - 70%, - 100% { - transform: scale3D(1, 1, 1); - } - - 35% { - transform: scale3D(0, 0, 1); - } +@keyframes vault-loading-animation { + 0%, + 70%, + 100% { + transform: scale3D(1, 1, 1); } - #vault-loading { - polygon { - animation: vault-loading-animation 1.3s infinite ease-in-out; - transform-origin: 50% 50%; - fill: #DCE2E9; - } - - .vault-loading-order-1 { - animation-delay: .1s; - } - - .vault-loading-order-2 { - animation-delay: .2s; - } - - .vault-loading-order-3 { - animation-delay: .3s; - } - - .vault-loading-order-4 { - animation-delay: .4s; - } + 35% { + transform: scale3D(0, 0, 1); + } +} + +#vault-loading { + polygon { + animation: vault-loading-animation 1.3s infinite ease-in-out; + transform-origin: 50% 50%; + fill: #dce2e9; } - #vault-loading-animated { - @media all and (-ms-high-contrast: none), (-ms-high-contrast: active) { - // For IE11 - display: none; - } + .vault-loading-order-1 { + animation-delay: .1s; } - #vault-loading-static { + .vault-loading-order-2 { + animation-delay: .2s; + } + + .vault-loading-order-3 { + animation-delay: .3s; + } + + .vault-loading-order-4 { + animation-delay: .4s; + } +} + +#vault-loading-animated { + @media all and (-ms-high-contrast: none), (-ms-high-contrast: active) { + // For IE11 display: none; - font-size: 9px; + } +} - @media all and (-ms-high-contrast: none), (-ms-high-contrast: active) { - // For IE11 - display: block; - } - } \ No newline at end of file +#vault-loading-static { + display: none; + font-size: 9px; + + @media all and (-ms-high-contrast: none), (-ms-high-contrast: active) { + // For IE11 + display: block; + } +} diff --git a/ui/app/styles/core.scss b/ui/app/styles/core.scss index 3c8acc643..bad506107 100644 --- a/ui/app/styles/core.scss +++ b/ui/app/styles/core.scss @@ -46,6 +46,8 @@ @import "./components/box-label"; @import "./components/codemirror"; @import "./components/confirm"; +@import "./components/console-ui-panel"; +@import "./components/env-banner"; @import "./components/form-section"; @import "./components/global-flash"; @import "./components/init-illustration"; diff --git a/ui/app/styles/core/buttons.scss b/ui/app/styles/core/buttons.scss index 5efa15a61..9472a2873 100644 --- a/ui/app/styles/core/buttons.scss +++ b/ui/app/styles/core/buttons.scss @@ -12,7 +12,8 @@ $button-box-shadow-standard: 0 3px 1px 0 rgba($black, 0.12); min-width: 6rem; padding: $size-10 $size-8; text-decoration: none; - transition: background-color $speed, border-color $speed, box-shadow $speed, color $speed; + transition: background-color $speed, border-color $speed, box-shadow $speed, + color $speed; vertical-align: middle; &.is-icon { diff --git a/ui/app/styles/core/generic.scss b/ui/app/styles/core/generic.scss index 358b6dd32..664c2a56b 100644 --- a/ui/app/styles/core/generic.scss +++ b/ui/app/styles/core/generic.scss @@ -33,13 +33,16 @@ input::-webkit-inner-spin-button { .link { background: transparent; border: 0; - color: $blue; - cursor: pointer; - display: inline; - font: inherit; - line-height: normal; - margin: 0; - padding: 0; - text-decoration: underline; - -moz-user-select: text; + color: $blue; + cursor: pointer; + display: inline; + font: inherit; + line-height: normal; + margin: 0; + padding: 0; + text-decoration: underline; + -webkit-user-select: text; /* Chrome all / Safari all */ + -moz-user-select: text; /* Firefox all */ + -ms-user-select: text; /* IE 10+ */ + user-select: text; } diff --git a/ui/app/styles/utils/_bulma_variables.scss b/ui/app/styles/utils/_bulma_variables.scss index 7d6812849..6a97abeda 100644 --- a/ui/app/styles/utils/_bulma_variables.scss +++ b/ui/app/styles/utils/_bulma_variables.scss @@ -37,7 +37,9 @@ $border: $grey-light; $hr-margin: 1rem 0; //typography -$family-sans: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif; +$family-sans: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", + "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", + sans-serif; $family-primary: $family-sans; $body-size: 14px; $size-3: (24/14) + 0rem; @@ -46,6 +48,7 @@ $size-8: (12/14) + 0rem; $size-9: 0.75rem; $size-10: 0.5rem; $size-11: 0.25rem; +$console-spacing: 1.5rem; $size-small: $size-8; $font-weight-normal: 400; $font-weight-semibold: 600; diff --git a/ui/app/styles/utils/animations.scss b/ui/app/styles/utils/animations.scss index 8c7c3b2d7..0020823b4 100644 --- a/ui/app/styles/utils/animations.scss +++ b/ui/app/styles/utils/animations.scss @@ -14,7 +14,7 @@ } @include keyframes(drop-fade-below) { - 0% { + 0% { opacity: 0; transform: translateY(-1rem); } @@ -25,7 +25,7 @@ } @include keyframes(drop-fade-above) { - 0% { + 0% { opacity: 0; transform: translateY(1rem); } diff --git a/ui/app/styles/utils/mixins.scss b/ui/app/styles/utils/mixins.scss index 97f3cc012..9832a8e86 100644 --- a/ui/app/styles/utils/mixins.scss +++ b/ui/app/styles/utils/mixins.scss @@ -1,11 +1,15 @@ -@mixin css-top-arrow($size, $color, $border-width, $border-color, $left: 50%, $left-offset: 0px) { +@mixin css-arrow($vertical-direction, $size, $color, $border-width, $border-color, $left: 50%, $left-offset: 0px) { & { border: 1px solid $border-color; } &:after, &:before { - bottom: 100%; + @if ($vertical-direction == 'top') { + bottom: 100%; + } @else { + top: 100%; + } border: solid transparent; content: " "; height: 0; @@ -28,6 +32,12 @@ left: calc(#{$left} + #{$left-offset}); margin-left: -($size + round(1.41421356 * $border-width)); } + &:before, + &:after { + @if ($vertical-direction == 'bottom') { + transform: rotate(180deg); + } + } @at-root .ember-basic-dropdown-content--left#{&} { &:after, @@ -38,6 +48,13 @@ } } +@mixin css-top-arrow($size, $color, $border-width, $border-color, $left: 50%, $left-offset: 0px) { + @include css-arrow('top', $size, $color, $border-width, $border-color, $left, $left-offset); +} +@mixin css-bottom-arrow($size, $color, $border-width, $border-color, $left: 50%, $left-offset: 0px) { + @include css-arrow('bottom', $size, $color, $border-width, $border-color, $left, $left-offset); +} + @mixin vault-block { &:not(:last-child) { margin-bottom: (5/14) + 0rem; diff --git a/ui/app/templates/application.hbs b/ui/app/templates/application.hbs index c3d67ecfe..e4480ce1b 100644 --- a/ui/app/templates/application.hbs +++ b/ui/app/templates/application.hbs @@ -1,6 +1,6 @@
{{#if showNav}} -
+
{{/if}}
@@ -129,7 +141,7 @@
{{#if (eq env "development") }} -
+
{{i-con glyph="wand" class="type-icon"}}Local Development
diff --git a/ui/app/templates/components/console/command-input.hbs b/ui/app/templates/components/console/command-input.hbs new file mode 100644 index 000000000..3d1496868 --- /dev/null +++ b/ui/app/templates/components/console/command-input.hbs @@ -0,0 +1,16 @@ +{{i-con glyph="chevron-right" size=12}} + +{{#tool-tip horizontalPosition="auto-right" verticalPosition=(if isFullscreen "above" "below") as |d|}} + {{#d.trigger tagName="button" type="button" class=(concat "button is-compact" (if isFullscreen " active")) click=(action "fullscreen") data-test-tool-tip-trigger=true}} + {{i-con glyph=(if isFullscreen "fullscreen-close" "fullscreen-open") aria-hidden="true" size=16}} + {{/d.trigger}} + {{#d.content class="tool-tip"}} +
+ {{#if isFullscreen}} + Minimize + {{else}} + Maximize + {{/if}} +
+ {{/d.content}} +{{/tool-tip}} diff --git a/ui/app/templates/components/console/log-command.hbs b/ui/app/templates/components/console/log-command.hbs new file mode 100644 index 000000000..39b9e7d1f --- /dev/null +++ b/ui/app/templates/components/console/log-command.hbs @@ -0,0 +1 @@ +
{{i-con glyph="chevron-right" size=12}}{{content}}
diff --git a/ui/app/templates/components/console/log-error.hbs b/ui/app/templates/components/console/log-error.hbs new file mode 100644 index 000000000..add86ec49 --- /dev/null +++ b/ui/app/templates/components/console/log-error.hbs @@ -0,0 +1,4 @@ +
+ {{i-con glyph="close-circled" aria-hidden="true" size=12}} +
{{content}}
+
diff --git a/ui/app/templates/components/console/log-help.hbs b/ui/app/templates/components/console/log-help.hbs new file mode 100644 index 000000000..0022c1f39 --- /dev/null +++ b/ui/app/templates/components/console/log-help.hbs @@ -0,0 +1,16 @@ +
+{{i-con glyph="information-circled" aria-hidden="true" size=12}} +
Usage: vault <command> [args]
+
+Commands:
+  read        Read data and retrieves secrets
+  write       Write data, configuration, and secrets
+  delete      Delete secrets and configuration
+  list        List data or secrets
+
+Web CLI Commands:
+  fullscreen  Toggle fullscreen display
+  clear       Clear output from the log
+  clearall    Clear output and command history
+
+
diff --git a/ui/app/templates/components/console/log-json.hbs b/ui/app/templates/components/console/log-json.hbs new file mode 100644 index 000000000..293ee6f23 --- /dev/null +++ b/ui/app/templates/components/console/log-json.hbs @@ -0,0 +1,10 @@ +{{json-editor + value=(stringify content) + options=(hash + readOnly=true + lineNumbers=false + autoHeight=true + gutters=false + theme='hashi auto-height' + ) + }} diff --git a/ui/app/templates/components/console/log-list.hbs b/ui/app/templates/components/console/log-list.hbs new file mode 100644 index 000000000..83fc544bd --- /dev/null +++ b/ui/app/templates/components/console/log-list.hbs @@ -0,0 +1,21 @@ +
+
Keys
+{{#each list as |item|}}
+{{item}}
+{{/each}}
+
+
+ {{#tool-tip renderInPlace=true as |d|}} + {{#d.trigger data-test-tool-tip-trigger=true}} + {{#copy-button clipboardText=(multi-line-join list) class="button is-compact"}} + {{i-con glyph="copy" aria-hidden="true" size=16}} + {{/copy-button}} + {{/d.trigger}} + {{#d.content class="tool-tip"}} +
+ Copy +
+ {{/d.content}} + {{/tool-tip}} +
+
diff --git a/ui/app/templates/components/console/log-object.hbs b/ui/app/templates/components/console/log-object.hbs new file mode 100644 index 000000000..3c8d77cf4 --- /dev/null +++ b/ui/app/templates/components/console/log-object.hbs @@ -0,0 +1,18 @@ +
+
{{columns}}
+ +
+ {{#tool-tip renderInPlace=true as |d|}} + {{#d.trigger data-test-tool-tip-trigger=true}} + {{#copy-button clipboardText=columns class="button is-compact"}} + {{i-con glyph="copy" aria-hidden="true" size=16}} + {{/copy-button}} + {{/d.trigger}} + {{#d.content class="tool-tip"}} +
+ Copy +
+ {{/d.content}} + {{/tool-tip}} +
+
diff --git a/ui/app/templates/components/console/log-success.hbs b/ui/app/templates/components/console/log-success.hbs new file mode 100644 index 000000000..e16ae924c --- /dev/null +++ b/ui/app/templates/components/console/log-success.hbs @@ -0,0 +1,4 @@ +
+ {{i-con glyph="checkmark-circled" aria-hidden="true" size=12}} +
{{content}}
+
diff --git a/ui/app/templates/components/console/log-text.hbs b/ui/app/templates/components/console/log-text.hbs new file mode 100644 index 000000000..3da410475 --- /dev/null +++ b/ui/app/templates/components/console/log-text.hbs @@ -0,0 +1 @@ +
{{content}}
\ No newline at end of file diff --git a/ui/app/templates/components/console/output-log.hbs b/ui/app/templates/components/console/output-log.hbs new file mode 100644 index 000000000..e464c7fa0 --- /dev/null +++ b/ui/app/templates/components/console/output-log.hbs @@ -0,0 +1,5 @@ +{{#each log as |message|}} + {{#unless message.hidden}} + {{component (concat 'console/log-' message.type) content=message.content}} + {{/unless}} +{{/each}} diff --git a/ui/app/templates/components/console/ui-panel.hbs b/ui/app/templates/components/console/ui-panel.hbs new file mode 100644 index 000000000..f374e20d2 --- /dev/null +++ b/ui/app/templates/components/console/ui-panel.hbs @@ -0,0 +1,16 @@ +
+
+

+ The Vault Browser CLI provides an easy way to execute the most common CLI commands, such as write, read, delete, and list. +

+
+ {{console/output-log log=log}} + {{console/command-input + isFullscreen=isFullscreen + value=inputValue + onValueUpdate=(action (mut inputValue)) + onFullscreen=(action 'toggleFullscreen') + onExecuteCommand=(action 'executeCommand') + onShiftCommand=(action 'shiftCommandIndex') + }} +
diff --git a/ui/app/templates/svg/icons/console-active.hbs b/ui/app/templates/svg/icons/console-active.hbs new file mode 100644 index 000000000..aa155909c --- /dev/null +++ b/ui/app/templates/svg/icons/console-active.hbs @@ -0,0 +1,21 @@ + + + + + + diff --git a/ui/app/templates/svg/icons/console.hbs b/ui/app/templates/svg/icons/console.hbs new file mode 100644 index 000000000..f201234cc --- /dev/null +++ b/ui/app/templates/svg/icons/console.hbs @@ -0,0 +1,17 @@ + + + + + diff --git a/ui/app/templates/svg/icons/copy.hbs b/ui/app/templates/svg/icons/copy.hbs new file mode 100644 index 000000000..5a1bfb1b1 --- /dev/null +++ b/ui/app/templates/svg/icons/copy.hbs @@ -0,0 +1 @@ + diff --git a/ui/app/templates/svg/icons/fullscreen-close.hbs b/ui/app/templates/svg/icons/fullscreen-close.hbs new file mode 100644 index 000000000..43f026bb9 --- /dev/null +++ b/ui/app/templates/svg/icons/fullscreen-close.hbs @@ -0,0 +1 @@ + diff --git a/ui/app/templates/svg/icons/fullscreen-open.hbs b/ui/app/templates/svg/icons/fullscreen-open.hbs new file mode 100644 index 000000000..3702768ce --- /dev/null +++ b/ui/app/templates/svg/icons/fullscreen-open.hbs @@ -0,0 +1 @@ + diff --git a/ui/ember-cli-build.js b/ui/ember-cli-build.js index f758b6d63..74461efe3 100644 --- a/ui/ember-cli-build.js +++ b/ui/ember-cli-build.js @@ -55,6 +55,18 @@ module.exports = function(defaults) { app.import('node_modules/text-encoder-lite/index.js'); app.import('node_modules/Duration.js/duration.js'); + app.import('node_modules/columnify/columnify.js', { + using: [ + { transformation: 'cjs', as: 'columnify' } + ] + }); + + app.import('node_modules/yargs-parser/lib/tokenize-arg-string.js', { + using: [ + { transformation: 'cjs', as: 'yargs-parser-tokenizer' } + ] + }); + // Use `app.import` to add additional libraries to the generated // output files. // diff --git a/ui/package.json b/ui/package.json index 89bd31042..9263e7039 100644 --- a/ui/package.json +++ b/ui/package.json @@ -14,7 +14,7 @@ "start2": "ember server --proxy=http://localhost:8202 --port=4202", "test": "node scripts/start-vault.js & ember test", "test-oss": "yarn run test -f='!enterprise'", - "fmt-js": "prettier-eslint --single-quote --trailing-comma es5 --print-width=110 --write {app,tests,config,lib,mirage}/**/*.js", + "fmt-js": "prettier-eslint --single-quote --no-use-tabs --trailing-comma es5 --print-width=110 --write '{app,tests,config,lib,mirage}/**/*.js'", "fmt-styles": "prettier --write app/styles/**/*.*", "fmt": "yarn run fmt-js && yarn run fmt-styles", "precommit": "lint-staged" @@ -33,19 +33,24 @@ } }, "devDependencies": { + "Duration.js": "icholy/Duration.js#golang_compatible", + "autosize": "3.0.17", "babel-plugin-transform-object-rest-spread": "^6.23.0", + "base64-js": "1.2.1", "broccoli-asset-rev": "^2.4.5", "broccoli-sri-hash": "meirish/broccoli-sri-hash#rooturl", "bulma": "^0.5.2", "bulma-switch": "^0.0.1", + "codemirror": "5.15.2", "cool-checkboxes-for-bulma.io": "^1.1.0", "ember-ajax": "^3.0.0", "ember-api-actions": "^0.1.8", "ember-basic-dropdown": "^0.33.5", "ember-basic-dropdown-hover": "^0.2.0", - "ember-cli": "~2.15.0", + "ember-cli": "~2.16.0", "ember-cli-autoprefixer": "^0.8.1", "ember-cli-babel": "^6.3.0", + "ember-cli-cjs-transform": "^1.2.0", "ember-cli-clipboard": "^0.8.0", "ember-cli-content-security-policy": "^1.0.0", "ember-cli-dependency-checker": "^1.3.0", @@ -83,18 +88,16 @@ "ember-test-selectors": "^0.3.6", "ember-truth-helpers": "1.2.0", "ivy-codemirror": "2.1.0", + "jsonlint": "1.6.0", "loader.js": "^4.2.3", "normalize.css": "4.1.1", "prettier": "^1.5.3", "prettier-eslint-cli": "^4.2.1", "qunit-dom": "^0.6.2", + "string.prototype.startswith": "mathiasbynens/String.prototype.startsWith", "text-encoder-lite": "1.0.0", - "base64-js": "1.2.1", - "autosize": "3.0.17", - "jsonlint": "1.6.0", - "codemirror": "5.15.2", - "Duration.js": "icholy/Duration.js#golang_compatible", - "string.prototype.startswith": "mathiasbynens/String.prototype.startsWith" + "columnify": "^1.5.4", + "yargs-parser": "^10.0.0" }, "engines": { "node": "^4.5 || 6.* || >= 7.*" diff --git a/ui/tests/integration/components/console/log-command-test.js b/ui/tests/integration/components/console/log-command-test.js new file mode 100644 index 000000000..72a9514ac --- /dev/null +++ b/ui/tests/integration/components/console/log-command-test.js @@ -0,0 +1,15 @@ +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('console/log-command', 'Integration | Component | console/log command', { + integration: true, +}); + +test('it renders', function(assert) { + const commandText = 'list this/path'; + this.set('content', commandText); + + this.render(hbs`{{console/log-command content=content}}`); + + assert.dom('pre').includesText(commandText); +}); diff --git a/ui/tests/integration/components/console/log-error-test.js b/ui/tests/integration/components/console/log-error-test.js new file mode 100644 index 000000000..f32c2dad9 --- /dev/null +++ b/ui/tests/integration/components/console/log-error-test.js @@ -0,0 +1,13 @@ +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('console/log-error', 'Integration | Component | console/log error', { + integration: true, +}); + +test('it renders', function(assert) { + const errorText = 'Error deleting at: sys/foo.\nURL: v1/sys/foo\nCode: 404'; + this.set('content', errorText); + this.render(hbs`{{console/log-error content=content}}`); + assert.dom('pre').includesText(errorText); +}); diff --git a/ui/tests/integration/components/console/log-json-test.js b/ui/tests/integration/components/console/log-json-test.js new file mode 100644 index 000000000..353835df0 --- /dev/null +++ b/ui/tests/integration/components/console/log-json-test.js @@ -0,0 +1,24 @@ +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('console/log-json', 'Integration | Component | console/log json', { + integration: true, + + beforeEach() { + this.inject.service('code-mirror', { as: 'codeMirror' }); + }, +}); + +test('it renders', function(assert) { + // Set any properties with this.set('myProperty', 'value'); + // Handle any actions with this.on('myAction', function(val) { ... }); + const objectContent = { one: 'two', three: 'four', seven: { five: 'six' }, eight: [5, 6] }; + const expectedText = JSON.stringify(objectContent, null, 2); + + this.set('content', objectContent); + + this.render(hbs`{{console/log-json content=content}}`); + const instance = this.codeMirror.instanceFor(this.$('[data-test-component=json-editor]').attr('id')); + + assert.equal(instance.getValue(), expectedText); +}); diff --git a/ui/tests/integration/components/console/log-list-test.js b/ui/tests/integration/components/console/log-list-test.js new file mode 100644 index 000000000..625787b54 --- /dev/null +++ b/ui/tests/integration/components/console/log-list-test.js @@ -0,0 +1,19 @@ +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('console/log-list', 'Integration | Component | console/log list', { + integration: true, +}); + +test('it renders', function(assert) { + // Set any properties with this.set('myProperty', 'value'); + // Handle any actions with this.on('myAction', function(val) { ... }); + const listContent = { keys: ['one', 'two'] }; + const expectedText = 'Keys\none\ntwo'; + + this.set('content', listContent); + + this.render(hbs`{{console/log-list content=content}}`); + + assert.dom('pre').includesText(`${expectedText}`); +}); diff --git a/ui/tests/integration/components/console/log-object-test.js b/ui/tests/integration/components/console/log-object-test.js new file mode 100644 index 000000000..1b5347004 --- /dev/null +++ b/ui/tests/integration/components/console/log-object-test.js @@ -0,0 +1,27 @@ +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; +import columnify from 'columnify'; +import { capitalize } from 'vault/helpers/capitalize'; +import { stringifyObjectValues } from 'vault/components/console/log-object'; + +moduleForComponent('console/log-object', 'Integration | Component | console/log object', { + integration: true, +}); + +test('it renders', function(assert) { + const objectContent = { one: 'two', three: 'four', seven: { five: 'six' }, eight: [5, 6] }; + const data = { one: 'two', three: 'four', seven: { five: 'six' }, eight: [5, 6] }; + stringifyObjectValues(data); + const expectedText = columnify(data, { + preserveNewLines: true, + headingTransform: function(heading) { + return capitalize([heading]); + }, + }); + + this.set('content', objectContent); + + this.render(hbs`{{console/log-object content=content}}`); + + assert.dom('pre').includesText(`${expectedText}`); +}); diff --git a/ui/tests/integration/components/console/log-text-test.js b/ui/tests/integration/components/console/log-text-test.js new file mode 100644 index 000000000..357c99cc8 --- /dev/null +++ b/ui/tests/integration/components/console/log-text-test.js @@ -0,0 +1,17 @@ +import { moduleForComponent, test } from 'ember-qunit'; +import hbs from 'htmlbars-inline-precompile'; + +moduleForComponent('console/log-text', 'Integration | Component | console/log text', { + integration: true, +}); + +test('it renders', function(assert) { + // Set any properties with this.set('myProperty', 'value'); + // Handle any actions with this.on('myAction', function(val) { ... }); + const text = 'Success! You did a thing!'; + this.set('content', text); + + this.render(hbs`{{console/log-text content=content}}`); + + assert.dom('pre').includesText(text); +}); diff --git a/ui/tests/integration/components/console/ui-panel-test.js b/ui/tests/integration/components/console/ui-panel-test.js new file mode 100644 index 000000000..57d6b8d38 --- /dev/null +++ b/ui/tests/integration/components/console/ui-panel-test.js @@ -0,0 +1,118 @@ +import { moduleForComponent, test } from 'ember-qunit'; +import { create } from 'ember-cli-page-object'; +import wait from 'ember-test-helpers/wait'; +import uiPanel from 'vault/tests/pages/components/console/ui-panel'; +import hbs from 'htmlbars-inline-precompile'; + +const component = create(uiPanel); + +moduleForComponent('console/ui-panel', 'Integration | Component | console/ui panel', { + integration: true, + + beforeEach() { + component.setContext(this); + }, + + afterEach() { + component.removeContext(); + }, +}); + +test('it renders', function(assert) { + this.render(hbs`{{console/ui-panel}}`); + assert.ok(component.hasInput); +}); + +test('it clears console input on enter', function(assert) { + this.render(hbs`{{console/ui-panel}}`); + component.consoleInput('list this/thing/here').enter(); + return wait().then(() => { + assert.equal(component.consoleInputValue, '', 'empties input field on enter'); + }); +}); + +test('it clears the log when using clear command', function(assert) { + this.render(hbs`{{console/ui-panel}}`); + component.consoleInput('list this/thing/here').enter(); + component.consoleInput('list this/other/thing').enter(); + component.consoleInput('read another/thing').enter(); + wait().then(() => { + assert.notEqual(component.logOutput, '', 'there is output in the log'); + component.consoleInput('clear').enter(); + }); + + wait().then(() => component.up()); + return wait().then(() => { + assert.equal(component.logOutput, '', 'clears the output log'); + assert.equal( + component.consoleInputValue, + 'clear', + 'populates console input with previous command on up after enter' + ); + }); +}); + +test('it adds command to history on enter', function(assert) { + this.render(hbs`{{console/ui-panel}}`); + component.consoleInput('list this/thing/here').enter(); + wait().then(() => component.up()); + wait().then(() => { + assert.equal( + component.consoleInputValue, + 'list this/thing/here', + 'populates console input with previous command on up after enter' + ); + }); + wait().then(() => component.down()); + return wait().then(() => { + assert.equal(component.consoleInputValue, '', 'populates console input with next command on down'); + }); +}); + +test('it cycles through history with more than one command', function(assert) { + this.render(hbs`{{console/ui-panel}}`); + component.consoleInput('list this/thing/here').enter(); + wait().then(() => component.consoleInput('read that/thing/there').enter()); + wait().then(() => component.consoleInput('qwerty').enter()); + + wait().then(() => component.up()); + wait().then(() => { + assert.equal( + component.consoleInputValue, + 'qwerty', + 'populates console input with previous command on up after enter' + ); + }); + wait().then(() => component.up()); + wait().then(() => { + assert.equal( + component.consoleInputValue, + 'read that/thing/there', + 'populates console input with previous command on up' + ); + }); + wait().then(() => component.up()); + wait().then(() => { + assert.equal( + component.consoleInputValue, + 'list this/thing/here', + 'populates console input with previous command on up' + ); + }); + wait().then(() => component.up()); + wait().then(() => { + assert.equal( + component.consoleInputValue, + 'qwerty', + 'populates console input with initial command if cycled through all previous commands' + ); + }); + wait().then(() => component.down()); + return wait().then(() => { + assert.equal( + component.consoleInputValue, + '', + 'clears console input if down pressed after history is on most recent command' + ); + }); +}); diff --git a/ui/tests/pages/components/console/ui-panel.js b/ui/tests/pages/components/console/ui-panel.js new file mode 100644 index 000000000..b17858a0a --- /dev/null +++ b/ui/tests/pages/components/console/ui-panel.js @@ -0,0 +1,18 @@ +import { text, triggerable, fillable, value, isPresent } from 'ember-cli-page-object'; +import keys from 'vault/lib/keycodes'; + +export default { + consoleInput: fillable('[data-test-component="console/command-input"] input'), + consoleInputValue: value('[data-test-component="console/command-input"] input'), + logOutput: text('[data-test-component="console/output-log"]'), + up: triggerable('keyup', '[data-test-component="console/command-input"] input', { + eventProperties: { keyCode: keys.UP }, + }), + down: triggerable('keyup', '[data-test-component="console/command-input"] input', { + eventProperties: { keyCode: keys.DOWN }, + }), + enter: triggerable('keyup', '[data-test-component="console/command-input"] input', { + eventProperties: { keyCode: keys.ENTER }, + }), + hasInput: isPresent('[data-test-component="console/command-input"] input'), +}; diff --git a/ui/tests/unit/adapters/console-test.js b/ui/tests/unit/adapters/console-test.js new file mode 100644 index 000000000..6d6fd2da9 --- /dev/null +++ b/ui/tests/unit/adapters/console-test.js @@ -0,0 +1,13 @@ +import { moduleFor, test } from 'ember-qunit'; + +moduleFor('adapter:console', 'Unit | Adapter | console', { + needs: ['service:auth', 'service:flash-messages', 'service:version'], +}); + +test('it builds the correct URL', function(assert) { + let adapter = this.subject(); + let sysPath = 'sys/health'; + let awsPath = 'aws/roles/my-other-role'; + assert.equal(adapter.buildURL(sysPath), '/v1/sys/health'); + assert.equal(adapter.buildURL(awsPath), '/v1/aws/roles/my-other-role'); +}); diff --git a/ui/tests/unit/lib/console-helpers-test.js b/ui/tests/unit/lib/console-helpers-test.js new file mode 100644 index 000000000..ea63e1e41 --- /dev/null +++ b/ui/tests/unit/lib/console-helpers-test.js @@ -0,0 +1,328 @@ +import { module, test } from 'qunit'; +import { + parseCommand, + extractDataAndFlags, + logFromResponse, + logFromError, + logErrorFromInput, +} from 'vault/lib/console-helpers'; + +module('lib/console-helpers', 'Unit | Lib | console helpers'); + +const testCommands = [ + { + name: 'write with data', + command: `vault write aws/config/root \ + access_key=AKIAJWVN5Z4FOFT7NLNA \ + secret_key=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 \ + region=us-east-1`, + expected: [ + 'write', + [], + 'aws/config/root', + [ + 'access_key=AKIAJWVN5Z4FOFT7NLNA', + 'secret_key=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', + 'region=us-east-1', + ], + ], + }, + { + name: 'read with field', + command: `vault read -field=access_key aws/creds/my-role`, + expected: ['read', ['-field=access_key'], 'aws/creds/my-role', []], + }, +]; + +testCommands.forEach(function(testCase) { + test(`#parseCommand: ${testCase.name}`, function(assert) { + let result = parseCommand(testCase.command); + assert.deepEqual(result, testCase.expected); + }); +}); + +test('#parseCommand: invalid commands', function(assert) { + let command = 'vault kv get foo'; + let result = parseCommand(command); + assert.equal(result, false, 'parseCommand returns false by default'); + + assert.throws( + () => { + parseCommand(command, true); + }, + /invalid command/, + 'throws on invalid command when `shouldThrow` is true' + ); +}); + +const testExtractCases = [ + { + name: 'data fields', + input: [ + [ + 'access_key=AKIAJWVN5Z4FOFT7NLNA', + 'secret_key=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', + 'region=us-east-1', + ], + [], + ], + expected: { + data: { + access_key: 'AKIAJWVN5Z4FOFT7NLNA', + secret_key: 'R4nm063hgMVo4BTT5xOs5nHLeLXA6lar7ZJ3Nt0i', + region: 'us-east-1', + }, + flags: {}, + }, + }, + { + name: 'repeated data and a flag', + input: [['allowed_domains=example.com', 'allowed_domains=foo.example.com'], ['-wrap-ttl=2h']], + expected: { + data: { + allowed_domains: ['example.com', 'foo.example.com'], + }, + flags: { + wrapTTL: '2h', + }, + }, + }, + { + name: 'data with more than one equals sign', + input: [['foo=bar=baz', 'foo=baz=bop', 'some=value=val'], []], + expected: { + data: { + foo: ['bar=baz', 'baz=bop'], + some: 'value=val', + }, + flags: {}, + }, + }, +]; + +testExtractCases.forEach(function(testCase) { + test(`#extractDataAndFlags: ${testCase.name}`, function(assert) { + let { data, flags } = extractDataAndFlags(...testCase.input); + assert.deepEqual(data, testCase.expected.data, 'has expected data'); + assert.deepEqual(flags, testCase.expected.flags, 'has expected flags'); + }); +}); + +let testResponseCases = [ + { + name: 'write response, no content', + args: [null, 'foo/bar', 'write', {}], + expectedData: { + type: 'success', + content: 'Success! Data written to: foo/bar', + }, + }, + { + name: 'delete response, no content', + args: [null, 'foo/bar', 'delete', {}], + expectedData: { + type: 'success', + content: 'Success! Data deleted (if it existed) at: foo/bar', + }, + }, + { + name: 'write, with content', + args: [{ data: { one: 'two' } }, 'foo/bar', 'write', {}], + expectedData: { + type: 'object', + content: { one: 'two' }, + }, + }, + { + name: 'with wrap-ttl flag', + args: [{ wrap_info: { one: 'two' } }, 'foo/bar', 'read', { wrapTTL: '1h' }], + expectedData: { + type: 'object', + content: { one: 'two' }, + }, + }, + { + name: 'with -format=json flag and wrap-ttl flag', + args: [{ foo: 'bar', wrap_info: { one: 'two' } }, 'foo/bar', 'read', { format: 'json', wrapTTL: '1h' }], + expectedData: { + type: 'json', + content: { foo: 'bar', wrap_info: { one: 'two' } }, + }, + }, + { + name: 'with -format=json and -field flags', + args: [{ foo: 'bar', data: { one: 'two' } }, 'foo/bar', 'read', { format: 'json', field: 'one' }], + expectedData: { + type: 'json', + content: 'two', + }, + }, + { + name: 'with -format=json and -field, and -wrap-ttl flags', + args: [ + { foo: 'bar', wrap_info: { one: 'two' } }, + 'foo/bar', + 'read', + { format: 'json', wrapTTL: '1h', field: 'one' }, + ], + expectedData: { + type: 'json', + content: 'two', + }, + }, + { + name: 'with string field flag and wrap-ttl flag', + args: [{ foo: 'bar', wrap_info: { one: 'two' } }, 'foo/bar', 'read', { field: 'one', wrapTTL: '1h' }], + expectedData: { + type: 'text', + content: 'two', + }, + }, + { + name: 'with object field flag and wrap-ttl flag', + args: [ + { foo: 'bar', wrap_info: { one: { two: 'three' } } }, + 'foo/bar', + 'read', + { field: 'one', wrapTTL: '1h' }, + ], + expectedData: { + type: 'object', + content: { two: 'three' }, + }, + }, + { + name: 'with response data and string field flag', + args: [{ foo: 'bar', data: { one: 'two' } }, 'foo/bar', 'read', { field: 'one', wrapTTL: '1h' }], + expectedData: { + type: 'text', + content: 'two', + }, + }, + { + name: 'with response data and object field flag ', + args: [ + { foo: 'bar', data: { one: { two: 'three' } } }, + 'foo/bar', + 'read', + { field: 'one', wrapTTL: '1h' }, + ], + expectedData: { + type: 'object', + content: { two: 'three' }, + }, + }, + { + name: 'response with data', + args: [{ foo: 'bar', data: { one: 'two' } }, 'foo/bar', 'read', {}], + expectedData: { + type: 'object', + content: { one: 'two' }, + }, + }, + { + name: 'with response data, field flag, and field missing', + args: [{ foo: 'bar', data: { one: 'two' } }, 'foo/bar', 'read', { field: 'foo' }], + expectedData: { + type: 'error', + content: 'Field "foo" not present in secret', + }, + }, + { + name: 'with response data and auth block', + args: [{ data: { one: 'two' }, auth: { three: 'four' } }, 'auth/token/create', 'write', {}], + expectedData: { + type: 'object', + content: { three: 'four' }, + }, + }, + { + name: 'with -field and -format with an object field', + args: [{ data: { one: { three: 'two' } } }, 'sys/mounts', 'read', { field: 'one', format: 'json' }], + expectedData: { + type: 'json', + content: { three: 'two' }, + }, + }, + { + name: 'with -field and -format with a string field', + args: [{ data: { one: 'two' } }, 'sys/mounts', 'read', { field: 'one', format: 'json' }], + expectedData: { + type: 'json', + content: 'two', + }, + }, +]; + +testResponseCases.forEach(function(testCase) { + test(`#logFromResponse: ${testCase.name}`, function(assert) { + let data = logFromResponse(...testCase.args); + assert.deepEqual(data, testCase.expectedData); + }); +}); + +let testErrorCases = [ + { + name: 'AdapterError write', + args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: [{}] }, 'sys/foo', 'write'], + expectedContent: 'Error writing to: sys/foo.\nURL: v1/sys/foo\nCode: 404', + }, + { + name: 'AdapterError read', + args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: [{}] }, 'sys/foo', 'read'], + expectedContent: 'Error reading from: sys/foo.\nURL: v1/sys/foo\nCode: 404', + }, + { + name: 'AdapterError list', + args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: [{}] }, 'sys/foo', 'list'], + expectedContent: 'Error listing: sys/foo.\nURL: v1/sys/foo\nCode: 404', + }, + { + name: 'AdapterError delete', + args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: [{}] }, 'sys/foo', 'delete'], + expectedContent: 'Error deleting at: sys/foo.\nURL: v1/sys/foo\nCode: 404', + }, + { + name: 'VaultError single error', + args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: ['no client token'] }, 'sys/foo', 'delete'], + expectedContent: 'Error deleting at: sys/foo.\nURL: v1/sys/foo\nCode: 404\nErrors:\n no client token', + }, + { + name: 'VaultErrors multiple errors', + args: [ + { httpStatus: 404, path: 'v1/sys/foo', errors: ['no client token', 'this is an error'] }, + 'sys/foo', + 'delete', + ], + expectedContent: + 'Error deleting at: sys/foo.\nURL: v1/sys/foo\nCode: 404\nErrors:\n no client token\n this is an error', + }, +]; + +testErrorCases.forEach(function(testCase) { + test(`#logFromError: ${testCase.name}`, function(assert) { + let data = logFromError(...testCase.args); + assert.deepEqual(data, { type: 'error', content: testCase.expectedContent }, 'returns the expected data'); + }); +}); + +const testCommandCases = [ + { + name: 'errors when command does not include a path', + args: [], + expectedContent: 'A path is required to make a request.', + }, + { + name: 'errors when write command does not include data and does not have force tag', + args: ['foo/bar', 'write', {}, []], + expectedContent: 'Must supply data or use -force', + }, +]; + +testCommandCases.forEach(function(testCase) { + test(`#logErrorFromInput: ${testCase.name}`, function(assert) { + let data = logErrorFromInput(...testCase.args); + + assert.deepEqual(data, { type: 'error', content: testCase.expectedContent }, 'returns the pcorrect data'); + }); +}); diff --git a/ui/tests/unit/services/console-test.js b/ui/tests/unit/services/console-test.js new file mode 100644 index 000000000..0f4adca4d --- /dev/null +++ b/ui/tests/unit/services/console-test.js @@ -0,0 +1,94 @@ +import { moduleFor, test } from 'ember-qunit'; +import { sanitizePath, ensureTrailingSlash } from 'vault/services/console'; +import sinon from 'sinon'; + +moduleFor('service:console', 'Unit | Service | console', { + needs: ['service:auth'], + beforeEach() {}, + afterEach() {}, +}); + +test('#sanitizePath', function(assert) { + assert.equal(sanitizePath(' /foo/bar/baz/ '), 'foo/bar/baz', 'removes spaces and slashs on either side'); + assert.equal(sanitizePath('//foo/bar/baz/'), 'foo/bar/baz', 'removes more than one slash'); +}); + +test('#ensureTrailingSlash', function(assert) { + assert.equal(ensureTrailingSlash('foo/bar'), 'foo/bar/', 'adds trailing slash'); + assert.equal(ensureTrailingSlash('baz/'), 'baz/', 'keeps trailing slash if there is one'); +}); + +let testCases = [ + { + method: 'read', + args: ['/sys/health', {}], + expectedURL: 'sys/health', + expectedVerb: 'GET', + expectedOptions: { data: undefined, wrapTTL: undefined }, + }, + + { + method: 'read', + args: ['/secrets/foo/bar', {}, '30m'], + expectedURL: 'secrets/foo/bar', + expectedVerb: 'GET', + expectedOptions: { data: undefined, wrapTTL: '30m' }, + }, + + { + method: 'write', + args: ['aws/roles/my-other-role', { arn: 'arn=arn:aws:iam::aws:policy/AmazonEC2ReadOnlyAccess' }], + expectedURL: 'aws/roles/my-other-role', + expectedVerb: 'POST', + expectedOptions: { + data: { arn: 'arn=arn:aws:iam::aws:policy/AmazonEC2ReadOnlyAccess' }, + wrapTTL: undefined, + }, + }, + + { + method: 'list', + args: ['secret/mounts', {}], + expectedURL: 'secret/mounts/', + expectedVerb: 'GET', + expectedOptions: { data: { list: true }, wrapTTL: undefined }, + }, + + { + method: 'list', + args: ['secret/mounts', {}, '1h'], + expectedURL: 'secret/mounts/', + expectedVerb: 'GET', + expectedOptions: { data: { list: true }, wrapTTL: '1h' }, + }, + + { + method: 'delete', + args: ['secret/secrets/kv'], + expectedURL: 'secret/secrets/kv', + expectedVerb: 'DELETE', + expectedOptions: { data: undefined, wrapTTL: undefined }, + }, +]; + +test('it reads, writes, lists, deletes', function(assert) { + let ajax = sinon.stub(); + let uiConsole = this.subject({ + adapter() { + return { + buildURL(url) { + return url; + }, + ajax, + }; + }, + }); + + testCases.forEach(testCase => { + uiConsole[testCase.method](...testCase.args); + let [url, verb, options] = ajax.lastCall.args; + assert.equal(url, testCase.expectedURL, `${testCase.method}: uses trimmed passed url`); + assert.equal(verb, testCase.expectedVerb, `${testCase.method}: uses the correct verb`); + assert.deepEqual(options, testCase.expectedOptions, `${testCase.method}: uses the correct options`); + }); +}); diff --git a/ui/yarn.lock b/ui/yarn.lock index 18f4f4fa8..5fa12cc11 100644 --- a/ui/yarn.lock +++ b/ui/yarn.lock @@ -82,6 +82,14 @@ dependencies: "@glimmer/util" "^0.22.3" +"@types/estree@0.0.38": + version "0.0.38" + resolved "https://registry.yarnpkg.com/@types/estree/-/estree-0.0.38.tgz#c1be40aa933723c608820a99a373a16d215a1ca2" + +"@types/node@*": + version "10.0.4" + resolved "https://registry.yarnpkg.com/@types/node/-/node-10.0.4.tgz#130598ee56e9a1210a53f557d64935571f05390d" + Duration.js@icholy/Duration.js#golang_compatible: version "2.0.0" resolved "https://codeload.github.com/icholy/Duration.js/tar.gz/cb1c58efc2772ef0f261da9e2535890734a86417" @@ -178,6 +186,12 @@ amd-name-resolver@0.0.7: dependencies: ensure-posix-path "^1.0.1" +amd-name-resolver@1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/amd-name-resolver/-/amd-name-resolver-1.0.0.tgz#0e593b28d6fa3326ab1798107edaea961046e8d8" + dependencies: + ensure-posix-path "^1.0.1" + amdefine@>=0.0.4: version "1.0.1" resolved "https://registry.yarnpkg.com/amdefine/-/amdefine-1.0.1.tgz#4a5282ac164729e93619bcfd3ad151f817ce91f5" @@ -274,10 +288,18 @@ arr-diff@^2.0.0: dependencies: arr-flatten "^1.0.1" -arr-flatten@^1.0.1: +arr-diff@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/arr-diff/-/arr-diff-4.0.0.tgz#d6461074febfec71e7e15235761a329a5dc7c520" + +arr-flatten@^1.0.1, arr-flatten@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/arr-flatten/-/arr-flatten-1.1.0.tgz#36048bbff4e7b47e136644316c99669ea5ae91f1" +arr-union@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/arr-union/-/arr-union-3.1.0.tgz#e39b09aea9def866a8f206e288af63919bae39c4" + array-equal@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/array-equal/-/array-equal-1.0.0.tgz#8c2a5ef2472fd9ea742b04c77a75093ba2757c93" @@ -314,6 +336,10 @@ array-unique@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.2.1.tgz#a1d97ccafcbc2625cc70fadceb36a50c58b01a53" +array-unique@^0.3.2: + version "0.3.2" + resolved "https://registry.yarnpkg.com/array-unique/-/array-unique-0.3.2.tgz#a894b75d4bc4f6cd679ef3244a9fd8f46ae2d428" + arraybuffer.slice@0.0.6: version "0.0.6" resolved "https://registry.yarnpkg.com/arraybuffer.slice/-/arraybuffer.slice-0.0.6.tgz#f33b2159f0532a3f3107a272c0ccfbd1ad2979ca" @@ -338,6 +364,10 @@ assertion-error@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.0.0.tgz#c7f85438fdd466bc7ca16ab90c81513797a5d23b" +assign-symbols@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/assign-symbols/-/assign-symbols-1.0.0.tgz#59667f41fadd4f20ccbc2bb96b8d4f7f78ec0367" + ast-traverse@~0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/ast-traverse/-/ast-traverse-0.1.1.tgz#69cf2b8386f19dcda1bb1e05d68fe359d8897de6" @@ -398,6 +428,10 @@ asynckit@^0.4.0: version "0.4.0" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" +atob@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.1.tgz#ae2d5a729477f289d60dd7f96a6314a22dd6c22a" + autoprefixer@^7.0.0: version "7.2.6" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-7.2.6.tgz#256672f86f7c735da849c4f07d008abb056067dc" @@ -1091,6 +1125,18 @@ base64id@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/base64id/-/base64id-0.1.0.tgz#02ce0fdeee0cef4f40080e1e73e834f0b1bfce3f" +base@^0.11.1: + version "0.11.2" + resolved "https://registry.yarnpkg.com/base/-/base-0.11.2.tgz#7bde5ced145b6d551a90db87f83c558b4eb48a8f" + dependencies: + cache-base "^1.0.1" + class-utils "^0.3.5" + component-emitter "^1.2.1" + define-property "^1.0.0" + isobject "^3.0.1" + mixin-deep "^1.2.0" + pascalcase "^0.1.1" + basic-auth@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/basic-auth/-/basic-auth-1.1.0.tgz#45221ee429f7ee1e5035be3f51533f1cdfd29884" @@ -1216,6 +1262,21 @@ braces@^1.8.2: preserve "^0.2.0" repeat-element "^1.1.2" +braces@^2.3.1: + version "2.3.2" + resolved "https://registry.yarnpkg.com/braces/-/braces-2.3.2.tgz#5979fd3f14cd531565e5fa2df1abfff1dfaee729" + dependencies: + arr-flatten "^1.1.0" + array-unique "^0.3.2" + extend-shallow "^2.0.1" + fill-range "^4.0.0" + isobject "^3.0.1" + repeat-element "^1.1.2" + snapdragon "^0.8.1" + snapdragon-node "^2.0.1" + split-string "^3.0.2" + to-regex "^3.0.1" + breakable@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/breakable/-/breakable-1.0.0.tgz#784a797915a38ead27bad456b5572cb4bbaa78c1" @@ -1381,6 +1442,17 @@ broccoli-debug@^0.6.1, broccoli-debug@^0.6.2: symlink-or-copy "^1.1.8" tree-sync "^1.2.2" +broccoli-debug@^0.6.3, broccoli-debug@^0.6.4: + version "0.6.4" + resolved "https://registry.yarnpkg.com/broccoli-debug/-/broccoli-debug-0.6.4.tgz#986eb3d2005e00e3bb91f9d0a10ab137210cd150" + dependencies: + broccoli-plugin "^1.2.1" + fs-tree-diff "^0.5.2" + heimdalljs "^0.2.1" + heimdalljs-logger "^0.1.7" + symlink-or-copy "^1.1.8" + tree-sync "^1.2.2" + broccoli-favicon@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/broccoli-favicon/-/broccoli-favicon-1.0.0.tgz#c770a5aa16032fbaf1b5c9c033f71b9cc5a5cb51" @@ -1735,6 +1807,10 @@ builtin-modules@^1.0.0: version "1.1.1" resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-1.1.1.tgz#270f076c5a72c02f5b65a47df94c5fe3a278892f" +builtin-modules@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/builtin-modules/-/builtin-modules-2.0.0.tgz#60b7ef5ae6546bd7deefa74b08b62a43a232648e" + builtins@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/builtins/-/builtins-1.0.3.tgz#cb94faeb61c8696451db36534e1422f94f0aee88" @@ -1759,6 +1835,20 @@ bytes@3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.0.0.tgz#d32815404d689699f85a4ea4fa8755dd13a96048" +cache-base@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/cache-base/-/cache-base-1.0.1.tgz#0a7f46416831c8b662ee36fe4e7c59d76f666ab2" + dependencies: + collection-visit "^1.0.0" + component-emitter "^1.2.1" + get-value "^2.0.6" + has-value "^1.0.0" + isobject "^3.0.1" + set-value "^2.0.0" + to-object-path "^0.3.0" + union-value "^1.0.0" + unset-value "^1.0.0" + calculate-cache-key-for-tree@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/calculate-cache-key-for-tree/-/calculate-cache-key-for-tree-1.1.0.tgz#0c3e42c9c134f3c9de5358c0f16793627ea976d6" @@ -1890,6 +1980,14 @@ chalk@^2.0.0: escape-string-regexp "^1.0.5" supports-color "^4.0.0" +chalk@^2.0.1, chalk@^2.1.0, chalk@^2.3.1: + version "2.4.1" + resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.1.tgz#18c49ab16a037b6eb0152cc83e3471338215b66e" + dependencies: + ansi-styles "^3.2.1" + escape-string-regexp "^1.0.5" + supports-color "^5.3.0" + chalk@^2.3.2: version "2.3.2" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.3.2.tgz#250dc96b07491bfd601e648d66ddf5f60c7a5c65" @@ -1941,6 +2039,15 @@ circular-json@^0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/circular-json/-/circular-json-0.3.1.tgz#be8b36aefccde8b3ca7aa2d6afc07a37242c0d2d" +class-utils@^0.3.5: + version "0.3.6" + resolved "https://registry.yarnpkg.com/class-utils/-/class-utils-0.3.6.tgz#f93369ae8b9a7ce02fd41faad0ca83033190c463" + dependencies: + arr-union "^3.1.0" + define-property "^0.2.5" + isobject "^3.0.0" + static-extend "^0.1.1" + clean-base-url@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/clean-base-url/-/clean-base-url-1.0.0.tgz#c901cf0a20b972435b0eccd52d056824a4351b7b" @@ -1960,7 +2067,7 @@ clean-css@^3.4.5: commander "2.8.x" source-map "0.4.x" -cli-cursor@^1.0.1, cli-cursor@^1.0.2: +cli-cursor@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/cli-cursor/-/cli-cursor-1.0.2.tgz#64da3f7d56a54412e59794bd62dc35295e8f2987" dependencies: @@ -1972,9 +2079,9 @@ cli-cursor@^2.1.0: dependencies: restore-cursor "^2.0.0" -cli-spinners@^0.1.2: - version "0.1.2" - resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-0.1.2.tgz#bb764d88e185fb9e1e6a2a1f19772318f605e31c" +cli-spinners@^1.1.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/cli-spinners/-/cli-spinners-1.3.1.tgz#002c1990912d0d59580c93bd36c056de99e4259a" cli-table2@^0.2.0: version "0.2.0" @@ -2051,6 +2158,13 @@ coffee-script@^1.10.0: version "1.12.7" resolved "https://registry.yarnpkg.com/coffee-script/-/coffee-script-1.12.7.tgz#c05dae0cb79591d05b3070a8433a98c9a89ccc53" +collection-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/collection-visit/-/collection-visit-1.0.0.tgz#4bc0373c164bc3291b4d368c829cf1a80a59dca0" + dependencies: + map-visit "^1.0.0" + object-visit "^1.0.0" + color-convert@^1.0.0: version "1.9.0" resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-1.9.0.tgz#1accf97dd739b983bf994d56fec8f95853641b7a" @@ -2075,6 +2189,13 @@ colors@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/colors/-/colors-1.1.2.tgz#168a4701756b6a7f51a12ce0c97bfa28c084ed63" +columnify@^1.5.4: + version "1.5.4" + resolved "https://registry.yarnpkg.com/columnify/-/columnify-1.5.4.tgz#4737ddf1c7b69a8a7c340570782e947eec8e78bb" + dependencies: + strip-ansi "^3.0.0" + wcwidth "^1.0.0" + combined-stream@^1.0.5, combined-stream@~1.0.5: version "1.0.5" resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.5.tgz#938370a57b4a51dea2c77c15d5c5fdf895164009" @@ -2125,7 +2246,7 @@ component-emitter@1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.1.2.tgz#296594f2753daa63996d2af08d15a95116c9aec3" -component-emitter@1.2.1: +component-emitter@1.2.1, component-emitter@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.2.1.tgz#137918d6d78283f7df7a6b7c5a63e140e69425e6" @@ -2186,14 +2307,16 @@ console-control-strings@^1.0.0, console-control-strings@~1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e" -console-ui@^1.0.2: - version "1.0.3" - resolved "https://registry.yarnpkg.com/console-ui/-/console-ui-1.0.3.tgz#31c524461b63422769f9e89c173495d91393721c" +console-ui@^2.0.0: + version "2.2.2" + resolved "https://registry.yarnpkg.com/console-ui/-/console-ui-2.2.2.tgz#b294a2934de869dd06789ab4be69555411edef29" dependencies: - chalk "^1.1.3" - inquirer "^1.2.3" - ora "^0.2.0" + chalk "^2.1.0" + inquirer "^2" + json-stable-stringify "^1.0.1" + ora "^2.0.0" through "^2.3.8" + user-info "^1.0.0" consolidate@^0.14.0: version "0.14.5" @@ -2237,6 +2360,10 @@ copy-dereference@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/copy-dereference/-/copy-dereference-1.0.0.tgz#6b131865420fd81b413ba994b44d3655311152b6" +copy-descriptor@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d" + core-js@^1.0.0: version "1.2.7" resolved "https://registry.yarnpkg.com/core-js/-/core-js-1.2.7.tgz#652294c14651db28fa93bd2d5ff2983a4f08c636" @@ -2371,7 +2498,7 @@ debug@2.6.8, debug@^2.1.0, debug@^2.1.1, debug@^2.1.3, debug@^2.2.0, debug@^2.4. dependencies: ms "2.0.0" -debug@2.6.9: +debug@2.6.9, debug@^2.3.3: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" dependencies: @@ -2381,18 +2508,43 @@ decamelize@^1.0.0, decamelize@^1.1.1, decamelize@^1.1.2: version "1.2.0" resolved "https://registry.yarnpkg.com/decamelize/-/decamelize-1.2.0.tgz#f6534d15148269b20352e7bee26f501f9a191290" +decode-uri-component@^0.2.0: + version "0.2.0" + resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" + deep-extend@~0.4.0: version "0.4.2" resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.4.2.tgz#48b699c27e334bf89f10892be432f6e4c7d34a7f" -deep-freeze@^0.0.1: - version "0.0.1" - resolved "https://registry.yarnpkg.com/deep-freeze/-/deep-freeze-0.0.1.tgz#3a0b0005de18672819dfd38cd31f91179c893e84" - deep-is@~0.1.3: version "0.1.3" resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34" +defaults@^1.0.3: + version "1.0.3" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" + dependencies: + clone "^1.0.2" + +define-property@^0.2.5: + version "0.2.5" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-0.2.5.tgz#c35b1ef918ec3c990f9a5bc57be04aacec5c8116" + dependencies: + is-descriptor "^0.1.0" + +define-property@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-1.0.0.tgz#769ebaaf3f4a63aad3af9e8d304c9bbe79bfb0e6" + dependencies: + is-descriptor "^1.0.0" + +define-property@^2.0.2: + version "2.0.2" + resolved "https://registry.yarnpkg.com/define-property/-/define-property-2.0.2.tgz#d459689e8d654ba77e02a817f8710d702cb16e9d" + dependencies: + is-descriptor "^1.0.2" + isobject "^3.0.1" + defined@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" @@ -2679,6 +2831,21 @@ ember-cli-broccoli-sane-watcher@^2.0.4: rsvp "^3.0.18" sane "^1.1.1" +ember-cli-cjs-transform@^1.2.0: + version "1.2.0" + resolved "https://registry.yarnpkg.com/ember-cli-cjs-transform/-/ember-cli-cjs-transform-1.2.0.tgz#34a0d2667673caec0248f500a954f45668027e8b" + dependencies: + broccoli-debug "^0.6.4" + broccoli-plugin "^1.3.0" + ember-cli-babel "^6.6.0" + fs-extra "^5.0.0" + hash-for-dep "^1.2.3" + pkg-dir "^2.0.0" + rollup "^0.58.1" + rollup-plugin-commonjs "^9.1.0" + rollup-plugin-node-resolve "^3.3.0" + username "^3.0.0" + ember-cli-clipboard@^0.8.0: version "0.8.0" resolved "https://registry.yarnpkg.com/ember-cli-clipboard/-/ember-cli-clipboard-0.8.0.tgz#c2e91290b2746c1a4903097f5d7a55406de539b1" @@ -2805,10 +2972,14 @@ ember-cli-legacy-blueprints@^0.1.2: rsvp "^3.0.17" silent-error "^1.0.0" -ember-cli-lodash-subset@^1.0.11, ember-cli-lodash-subset@^1.0.7: +ember-cli-lodash-subset@^1.0.7: version "1.0.12" resolved "https://registry.yarnpkg.com/ember-cli-lodash-subset/-/ember-cli-lodash-subset-1.0.12.tgz#af2e77eba5dcb0d77f3308d3a6fd7d3450f6e537" +ember-cli-lodash-subset@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/ember-cli-lodash-subset/-/ember-cli-lodash-subset-2.0.1.tgz#20cb68a790fe0fde2488ddfd8efbb7df6fe766f2" + ember-cli-mirage@^0.4.1: version "0.4.1" resolved "https://registry.yarnpkg.com/ember-cli-mirage/-/ember-cli-mirage-0.4.1.tgz#bfdfe61e5e74dc3881ed31f12112dae1a29f0d4c" @@ -3000,11 +3171,11 @@ ember-cli-version-checker@^2.1.0: resolve "^1.3.3" semver "^5.3.0" -ember-cli@~2.15.0: - version "2.15.1" - resolved "https://registry.yarnpkg.com/ember-cli/-/ember-cli-2.15.1.tgz#773add3cc18e5068f1c5f43a77544efa2712e47b" +ember-cli@~2.16.0: + version "2.16.2" + resolved "https://registry.yarnpkg.com/ember-cli/-/ember-cli-2.16.2.tgz#53b922073a8e6f34255a6e0dcb1794a91ba3e1b7" dependencies: - amd-name-resolver "0.0.7" + amd-name-resolver "1.0.0" babel-plugin-transform-es2015-modules-amd "^6.24.0" bower-config "^1.3.0" bower-endpoint-parser "0.2.2" @@ -3014,7 +3185,8 @@ ember-cli@~2.15.0: broccoli-concat "^3.2.2" broccoli-config-loader "^1.0.0" broccoli-config-replace "^1.1.2" - broccoli-funnel "^1.0.6" + broccoli-debug "^0.6.3" + broccoli-funnel "^2.0.0" broccoli-funnel-reducer "^1.0.0" broccoli-merge-trees "^2.0.0" broccoli-middleware "^1.0.0" @@ -3022,31 +3194,30 @@ ember-cli@~2.15.0: broccoli-stew "^1.2.0" calculate-cache-key-for-tree "^1.0.0" capture-exit "^1.1.0" - chalk "^1.1.3" + chalk "^2.0.1" clean-base-url "^1.0.0" compression "^1.4.4" configstore "^3.0.0" - console-ui "^1.0.2" + console-ui "^2.0.0" core-object "^3.1.3" dag-map "^2.0.2" - deep-freeze "^0.0.1" diff "^3.2.0" ember-cli-broccoli-sane-watcher "^2.0.4" ember-cli-is-package-missing "^1.0.0" ember-cli-legacy-blueprints "^0.1.2" - ember-cli-lodash-subset "^1.0.11" + ember-cli-lodash-subset "^2.0.1" ember-cli-normalize-entity-name "^1.0.0" ember-cli-preprocess-registry "^3.1.0" ember-cli-string-utils "^1.0.0" ember-try "^0.2.15" ensure-posix-path "^1.0.2" - execa "^0.7.0" + execa "^0.8.0" exists-sync "0.0.4" exit "^0.1.2" express "^4.12.3" filesize "^3.1.3" find-up "^2.1.0" - fs-extra "^3.0.0" + fs-extra "^4.0.0" fs-tree-diff "^0.5.2" get-caller-file "^1.0.0" git-repo-info "^1.4.1" @@ -3074,7 +3245,7 @@ ember-cli@~2.15.0: promise-map-series "^0.2.1" quick-temp "^0.1.8" resolve "^1.3.0" - rsvp "^3.3.3" + rsvp "^3.6.0" sane "^1.6.0" semver "^5.1.1" silent-error "^1.0.0" @@ -3660,6 +3831,10 @@ estraverse@^4.0.0, estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0: version "4.2.0" resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.2.0.tgz#0dee3fed31fcd469618ce7342099fc1afa0bdb13" +estree-walker@^0.5.1, estree-walker@^0.5.2: + version "0.5.2" + resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-0.5.2.tgz#d3850be7529c9580d815600b53126515e146dd39" + esutils@^2.0.0, esutils@^2.0.2: version "2.0.2" resolved "https://registry.yarnpkg.com/esutils/-/esutils-2.0.2.tgz#0abf4f1caa5bcb1f7a9d8acc6dea4faaa04bac9b" @@ -3683,6 +3858,14 @@ events-to-array@^1.0.1: version "1.1.2" resolved "https://registry.yarnpkg.com/events-to-array/-/events-to-array-1.1.2.tgz#2d41f563e1fe400ed4962fe1a4d5c6a7539df7f6" +exec-file-sync@^2.0.0: + version "2.0.2" + resolved "https://registry.yarnpkg.com/exec-file-sync/-/exec-file-sync-2.0.2.tgz#58d441db46e40de6d1f30de5be022785bd89e328" + dependencies: + is-obj "^1.0.0" + object-assign "^4.0.1" + spawn-sync "^1.0.11" + exec-sh@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/exec-sh/-/exec-sh-0.2.0.tgz#14f75de3f20d286ef933099b2ce50a90359cef10" @@ -3701,6 +3884,18 @@ execa@^0.7.0: signal-exit "^3.0.0" strip-eof "^1.0.0" +execa@^0.8.0: + version "0.8.0" + resolved "https://registry.yarnpkg.com/execa/-/execa-0.8.0.tgz#d8d76bbc1b55217ed190fd6dd49d3c774ecfc8da" + dependencies: + cross-spawn "^5.0.1" + get-stream "^3.0.0" + is-stream "^1.1.0" + npm-run-path "^2.0.0" + p-finally "^1.0.0" + signal-exit "^3.0.0" + strip-eof "^1.0.0" + exif-parser@^0.1.9: version "0.1.11" resolved "https://registry.yarnpkg.com/exif-parser/-/exif-parser-0.1.11.tgz#8a97d1c9315ffd4754b6ae938ce4488d1b1a26b7" @@ -3731,6 +3926,18 @@ expand-brackets@^0.1.4: dependencies: is-posix-bracket "^0.1.0" +expand-brackets@^2.1.4: + version "2.1.4" + resolved "https://registry.yarnpkg.com/expand-brackets/-/expand-brackets-2.1.4.tgz#b77735e315ce30f6b6eff0f83b04151a22449622" + dependencies: + debug "^2.3.3" + define-property "^0.2.5" + extend-shallow "^2.0.1" + posix-character-classes "^0.1.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + expand-range@^1.8.1: version "1.8.2" resolved "https://registry.yarnpkg.com/expand-range/-/expand-range-1.8.2.tgz#a299effd335fe2721ebae8e257ec79644fc85337" @@ -3776,6 +3983,19 @@ express@^4.10.7, express@^4.12.3: utils-merge "1.0.0" vary "~1.1.1" +extend-shallow@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-2.0.1.tgz#51af7d614ad9a9f610ea1bafbb989d6b1c56890f" + dependencies: + is-extendable "^0.1.0" + +extend-shallow@^3.0.0, extend-shallow@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/extend-shallow/-/extend-shallow-3.0.2.tgz#26a71aaf073b39fb2127172746131c2704028db8" + dependencies: + assign-symbols "^1.0.0" + is-extendable "^1.0.1" + extend@^3.0.0, extend@~3.0.0: version "3.0.1" resolved "https://registry.yarnpkg.com/extend/-/extend-3.0.1.tgz#a755ea7bc1adfcc5a31ce7e762dbaadc5e636444" @@ -3802,6 +4022,19 @@ extglob@^0.3.1: dependencies: is-extglob "^1.0.0" +extglob@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/extglob/-/extglob-2.0.4.tgz#ad00fe4dc612a9232e8718711dc5cb5ab0285543" + dependencies: + array-unique "^0.3.2" + define-property "^1.0.0" + expand-brackets "^2.1.4" + extend-shallow "^2.0.1" + fragment-cache "^0.2.1" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + extract-zip@~1.5.0: version "1.5.0" resolved "https://registry.yarnpkg.com/extract-zip/-/extract-zip-1.5.0.tgz#92ccf6d81ef70a9fa4c1747114ccef6d8688a6c4" @@ -3943,6 +4176,15 @@ fill-range@^2.1.0: repeat-element "^1.1.2" repeat-string "^1.5.2" +fill-range@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-4.0.0.tgz#d544811d428f98eb06a63dc402d2403c328c38f7" + dependencies: + extend-shallow "^2.0.1" + is-number "^3.0.0" + repeat-string "^1.6.1" + to-regex-range "^2.1.0" + finalhandler@~1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/finalhandler/-/finalhandler-1.0.3.tgz#ef47e77950e999780e86022a560e3217e0d0cc89" @@ -4006,7 +4248,7 @@ for-each@^0.3.2: dependencies: is-function "~1.0.0" -for-in@^1.0.1: +for-in@^1.0.1, for-in@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/for-in/-/for-in-1.0.2.tgz#81068d295a8142ec0ac726c6e2200c30fb6d5e80" @@ -4038,6 +4280,12 @@ forwarded@~0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/forwarded/-/forwarded-0.1.0.tgz#19ef9874c4ae1c297bcf078fde63a09b66a84363" +fragment-cache@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/fragment-cache/-/fragment-cache-0.2.1.tgz#4290fad27f13e89be7f33799c6bc5a0abfff0d19" + dependencies: + map-cache "^0.2.2" + fresh@0.5.0: version "0.5.0" resolved "https://registry.yarnpkg.com/fresh/-/fresh-0.5.0.tgz#f474ca5e6a9246d6fd8e0953cfa9b9c805afa78e" @@ -4090,12 +4338,20 @@ fs-extra@^2.0.0: graceful-fs "^4.1.2" jsonfile "^2.1.0" -fs-extra@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-3.0.1.tgz#3794f378c58b342ea7dbbb23095109c4b3b62291" +fs-extra@^4.0.0: + version "4.0.3" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-4.0.3.tgz#0d852122e5bc5beb453fb028e9c0c9bf36340c94" dependencies: graceful-fs "^4.1.2" - jsonfile "^3.0.0" + jsonfile "^4.0.0" + universalify "^0.1.0" + +fs-extra@^5.0.0: + version "5.0.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-5.0.0.tgz#414d0110cdd06705734d055652c5411260c31abd" + dependencies: + graceful-fs "^4.1.2" + jsonfile "^4.0.0" universalify "^0.1.0" fs-readdir-recursive@^0.1.0: @@ -4191,6 +4447,10 @@ get-stream@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" +get-value@^2.0.3, get-value@^2.0.6: + version "2.0.6" + resolved "https://registry.yarnpkg.com/get-value/-/get-value-2.0.6.tgz#dc15ca1c672387ca76bd37ac0a395ba2042a2c28" + getpass@^0.1.1: version "0.1.7" resolved "https://registry.yarnpkg.com/getpass/-/getpass-0.1.7.tgz#5eff8e3e684d569ae4cb2b1282604e8ba62149fa" @@ -4429,6 +4689,33 @@ has-unicode@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9" +has-value@^0.3.1: + version "0.3.1" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f" + dependencies: + get-value "^2.0.3" + has-values "^0.1.4" + isobject "^2.0.0" + +has-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-value/-/has-value-1.0.0.tgz#18b281da585b1c5c51def24c930ed29a0be6b177" + dependencies: + get-value "^2.0.6" + has-values "^1.0.0" + isobject "^3.0.0" + +has-values@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-0.1.4.tgz#6d61de95d91dfca9b9a02089ad384bff8f62b771" + +has-values@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/has-values/-/has-values-1.0.0.tgz#95b0b63fec2146619a6fe57fe75628d5a39efe4f" + dependencies: + is-number "^3.0.0" + kind-of "^4.0.0" + hash-for-dep@^1.0.2: version "1.1.2" resolved "https://registry.yarnpkg.com/hash-for-dep/-/hash-for-dep-1.1.2.tgz#e3347ed92960eb0bb53a2c6c2b70e36d75b7cd0c" @@ -4438,6 +4725,15 @@ hash-for-dep@^1.0.2: heimdalljs-logger "^0.1.7" resolve "^1.1.6" +hash-for-dep@^1.2.3: + version "1.2.3" + resolved "https://registry.yarnpkg.com/hash-for-dep/-/hash-for-dep-1.2.3.tgz#5ec69fca32c23523972d52acb5bb65ffc3664cab" + dependencies: + broccoli-kitchen-sink-helpers "^0.3.1" + heimdalljs "^0.2.3" + heimdalljs-logger "^0.1.7" + resolve "^1.4.0" + hasha@~2.2.0: version "2.2.0" resolved "https://registry.yarnpkg.com/hasha/-/hasha-2.2.0.tgz#78d7cbfc1e6d66303fe79837365984517b2f6ee1" @@ -4657,22 +4953,22 @@ inquirer@^0.12.0: strip-ansi "^3.0.0" through "^2.3.6" -inquirer@^1.2.3: - version "1.2.3" - resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-1.2.3.tgz#4dec6f32f37ef7bb0b2ed3f1d1a5c3f545074918" +inquirer@^2: + version "2.0.0" + resolved "https://registry.yarnpkg.com/inquirer/-/inquirer-2.0.0.tgz#e1351687b90d150ca403ceaa3cefb1e3065bef4b" dependencies: ansi-escapes "^1.1.0" chalk "^1.0.0" cli-cursor "^1.0.1" cli-width "^2.0.0" external-editor "^1.1.0" - figures "^1.3.5" + figures "^2.0.0" lodash "^4.3.0" mute-stream "0.0.6" pinkie-promise "^2.0.0" run-async "^2.2.0" rx "^4.1.0" - string-width "^1.0.1" + string-width "^2.0.0" strip-ansi "^3.0.0" through "^2.3.6" @@ -4717,6 +5013,18 @@ ipaddr.js@1.3.0: version "1.3.0" resolved "https://registry.yarnpkg.com/ipaddr.js/-/ipaddr.js-1.3.0.tgz#1e03a52fdad83a8bbb2b25cbf4998b4cffcd3dec" +is-accessor-descriptor@^0.1.6: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-0.1.6.tgz#a9e12cb3ae8d876727eeef3843f8a0897b5c98d6" + dependencies: + kind-of "^3.0.2" + +is-accessor-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-accessor-descriptor/-/is-accessor-descriptor-1.0.0.tgz#169c2f6d3df1f992618072365c9b0ea1f6878656" + dependencies: + kind-of "^6.0.0" + is-arrayish@^0.2.1: version "0.2.1" resolved "https://registry.yarnpkg.com/is-arrayish/-/is-arrayish-0.2.1.tgz#77c99840527aa8ecb1a8ba697b80645a7a926a9d" @@ -4737,6 +5045,34 @@ is-builtin-module@^1.0.0: dependencies: builtin-modules "^1.0.0" +is-data-descriptor@^0.1.4: + version "0.1.4" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-0.1.4.tgz#0b5ee648388e2c860282e793f1856fec3f301b56" + dependencies: + kind-of "^3.0.2" + +is-data-descriptor@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-data-descriptor/-/is-data-descriptor-1.0.0.tgz#d84876321d0e7add03990406abbbbd36ba9268c7" + dependencies: + kind-of "^6.0.0" + +is-descriptor@^0.1.0: + version "0.1.6" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-0.1.6.tgz#366d8240dde487ca51823b1ab9f07a10a78251ca" + dependencies: + is-accessor-descriptor "^0.1.6" + is-data-descriptor "^0.1.4" + kind-of "^5.0.0" + +is-descriptor@^1.0.0, is-descriptor@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-descriptor/-/is-descriptor-1.0.2.tgz#3b159746a66604b04f8c81524ba365c5f14d86ec" + dependencies: + is-accessor-descriptor "^1.0.0" + is-data-descriptor "^1.0.0" + kind-of "^6.0.2" + is-dotfile@^1.0.0: version "1.0.3" resolved "https://registry.yarnpkg.com/is-dotfile/-/is-dotfile-1.0.3.tgz#a6a2f32ffd2dfb04f5ca25ecd0f6b83cf798a1e1" @@ -4747,10 +5083,16 @@ is-equal-shallow@^0.1.3: dependencies: is-primitive "^2.0.0" -is-extendable@^0.1.1: +is-extendable@^0.1.0, is-extendable@^0.1.1: version "0.1.1" resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-0.1.1.tgz#62b110e289a471418e3ec36a617d472e301dfc89" +is-extendable@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/is-extendable/-/is-extendable-1.0.1.tgz#a7470f9e426733d81bd81e1155264e3a3507cab4" + dependencies: + is-plain-object "^2.0.4" + is-extglob@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-extglob/-/is-extglob-1.0.0.tgz#ac468177c4943405a092fc8f29760c6ffc6206c0" @@ -4795,6 +5137,10 @@ is-integer@^1.0.4: dependencies: is-finite "^1.0.0" +is-module@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/is-module/-/is-module-1.0.0.tgz#3258fb69f78c14d5b815d664336b4cffb6441591" + is-my-json-valid@^2.10.0, is-my-json-valid@^2.12.4: version "2.16.0" resolved "https://registry.yarnpkg.com/is-my-json-valid/-/is-my-json-valid-2.16.0.tgz#f079dd9bfdae65ee2038aae8acbc86ab109e3693" @@ -4816,10 +5162,20 @@ is-number@^3.0.0: dependencies: kind-of "^3.0.2" +is-number@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/is-number/-/is-number-4.0.0.tgz#0026e37f5454d73e356dfe6564699867c6a7f0ff" + is-obj@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/is-obj/-/is-obj-1.0.1.tgz#3e4729ac1f5fde025cd7d83a896dab9f4f67db0f" +is-odd@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/is-odd/-/is-odd-2.0.0.tgz#7646624671fd7ea558ccd9a2795182f2958f1b24" + dependencies: + is-number "^4.0.0" + is-path-cwd@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/is-path-cwd/-/is-path-cwd-1.0.0.tgz#d225ec23132e89edd38fda767472e62e65f1106d" @@ -4836,6 +5192,12 @@ is-path-inside@^1.0.0: dependencies: path-is-inside "^1.0.1" +is-plain-object@^2.0.1, is-plain-object@^2.0.3, is-plain-object@^2.0.4: + version "2.0.4" + resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-2.0.4.tgz#2c163b3fafb1b606d9d17928f05c2a1c38e07677" + dependencies: + isobject "^3.0.1" + is-posix-bracket@^0.1.0: version "0.1.1" resolved "https://registry.yarnpkg.com/is-posix-bracket/-/is-posix-bracket-0.1.1.tgz#3334dc79774368e92f016e6fbc0a88f5cd6e6bc4" @@ -4880,6 +5242,10 @@ is-windows@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-0.2.0.tgz#de1aa6d63ea29dd248737b69f1ff8b8002d2108c" +is-windows@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/is-windows/-/is-windows-1.0.2.tgz#d1850eb9791ecd18e6182ce12a30f396634bb19d" + isarray@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/isarray/-/isarray-0.0.1.tgz#8a18acfca9a8f4177e09abfc6038939b05d1eedf" @@ -4902,6 +5268,10 @@ isobject@^2.0.0: dependencies: isarray "1.0.0" +isobject@^3.0.0, isobject@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" + isstream@0.1.x, isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" @@ -5049,9 +5419,9 @@ jsonfile@^2.1.0: optionalDependencies: graceful-fs "^4.1.6" -jsonfile@^3.0.0: - version "3.0.1" - resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-3.0.1.tgz#a5ecc6f65f53f662c4415c7675a0331d0992ec66" +jsonfile@^4.0.0: + version "4.0.0" + resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-4.0.0.tgz#8771aae0799b64076b76640fca058f9c10e33ecb" optionalDependencies: graceful-fs "^4.1.6" @@ -5100,7 +5470,7 @@ kew@~0.7.0: version "0.7.0" resolved "https://registry.yarnpkg.com/kew/-/kew-0.7.0.tgz#79d93d2d33363d6fdd2970b335d9141ad591d79b" -kind-of@^3.0.2: +kind-of@^3.0.2, kind-of@^3.0.3, kind-of@^3.2.0: version "3.2.2" resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-3.2.2.tgz#31ea21a734bab9bbb0f32466d893aea51e4a3c64" dependencies: @@ -5112,6 +5482,14 @@ kind-of@^4.0.0: dependencies: is-buffer "^1.1.5" +kind-of@^5.0.0: + version "5.1.0" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-5.1.0.tgz#729c91e2d857b7a419a1f9aa65685c4c33f5845d" + +kind-of@^6.0.0, kind-of@^6.0.2: + version "6.0.2" + resolved "https://registry.yarnpkg.com/kind-of/-/kind-of-6.0.2.tgz#01146b36a6218e64e58f3a8d66de5d7fc6f6d051" + klaw@^1.0.0: version "1.3.1" resolved "https://registry.yarnpkg.com/klaw/-/klaw-1.3.1.tgz#4088433b46b3b1ba259d78785d8e96f73ba02439" @@ -5393,6 +5771,12 @@ lodash@~2.4.1: version "2.4.2" resolved "https://registry.yarnpkg.com/lodash/-/lodash-2.4.2.tgz#fadd834b9683073da179b3eae6d9c0d15053f73e" +log-symbols@^2.2.0: + version "2.2.0" + resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a" + dependencies: + chalk "^2.0.1" + loglevel-colored-level-prefix@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/loglevel-colored-level-prefix/-/loglevel-colored-level-prefix-1.0.0.tgz#6a40218fdc7ae15fc76c3d0f3e676c465388603e" @@ -5440,6 +5824,12 @@ lru-cache@^4.0.1: pseudomap "^1.0.2" yallist "^2.1.2" +magic-string@^0.22.4: + version "0.22.5" + resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.22.5.tgz#8e9cf5afddf44385c1da5bc2a6a0dbd10b03657e" + dependencies: + vlq "^0.2.2" + make-dir@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/make-dir/-/make-dir-1.0.0.tgz#97a011751e91dd87cfadef58832ebb04936de978" @@ -5458,6 +5848,10 @@ makeerror@1.0.x: dependencies: tmpl "1.0.x" +map-cache@^0.2.2: + version "0.2.2" + resolved "https://registry.yarnpkg.com/map-cache/-/map-cache-0.2.2.tgz#c32abd0bd6525d9b051645bb4f26ac5dc98a0dbf" + map-obj@^1.0.0, map-obj@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-1.0.1.tgz#d933ceb9205d82bdcf4886f6742bdc2b4dea146d" @@ -5466,6 +5860,12 @@ map-obj@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/map-obj/-/map-obj-2.0.0.tgz#a65cd29087a92598b8791257a523e021222ac1f9" +map-visit@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/map-visit/-/map-visit-1.0.0.tgz#ecdca8f13144e660f1b5bd41f12f3479d98dfb8f" + dependencies: + object-visit "^1.0.0" + markdown-it-terminal@0.1.0: version "0.1.0" resolved "https://registry.yarnpkg.com/markdown-it-terminal/-/markdown-it-terminal-0.1.0.tgz#545abd8dd01c3d62353bfcea71db580b51d22bd9" @@ -5510,13 +5910,19 @@ media-typer@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/media-typer/-/media-typer-0.3.0.tgz#8710d7af0aa626f8fffa1ce00168545263255748" +mem@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/mem/-/mem-1.1.0.tgz#5edd52b485ca1d900fe64895505399a0dfa45f76" + dependencies: + mimic-fn "^1.0.0" + memory-streams@^0.1.0: version "0.1.2" resolved "https://registry.yarnpkg.com/memory-streams/-/memory-streams-0.1.2.tgz#273ff777ab60fec599b116355255282cca2c50c2" dependencies: readable-stream "~1.0.2" -meow@^3.7.0: +meow@^3.4.0, meow@^3.7.0: version "3.7.0" resolved "https://registry.yarnpkg.com/meow/-/meow-3.7.0.tgz#72cb668b425228290abbfa856892587308a801fb" dependencies: @@ -5592,6 +5998,24 @@ micromatch@^2.1.5, micromatch@^2.3.7: parse-glob "^3.0.4" regex-cache "^0.4.2" +micromatch@^3.1.10: + version "3.1.10" + resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-3.1.10.tgz#70859bc95c9840952f359a068a3fc49f9ecfac23" + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + braces "^2.3.1" + define-property "^2.0.2" + extend-shallow "^3.0.2" + extglob "^2.0.4" + fragment-cache "^0.2.1" + kind-of "^6.0.2" + nanomatch "^1.2.9" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.2" + "mime-db@>= 1.27.0 < 2": version "1.29.0" resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.29.0.tgz#48d26d235589651704ac5916ca06001914266878" @@ -5661,6 +6085,13 @@ minimist@^1.1.0, minimist@^1.1.1, minimist@^1.1.3, minimist@^1.2.0: version "1.2.0" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" +mixin-deep@^1.2.0: + version "1.3.1" + resolved "https://registry.yarnpkg.com/mixin-deep/-/mixin-deep-1.3.1.tgz#a49e7268dce1a0d9698e45326c5626df3543d0fe" + dependencies: + for-in "^1.0.2" + is-extendable "^1.0.1" + mkdirp@0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.0.tgz#1bbf5ab1ba827af23575143490426455f481fe1e" @@ -5761,6 +6192,23 @@ nan@^2.3.0: version "2.6.2" resolved "https://registry.yarnpkg.com/nan/-/nan-2.6.2.tgz#e4ff34e6c95fdfb5aecc08de6596f43605a7db45" +nanomatch@^1.2.9: + version "1.2.9" + resolved "https://registry.yarnpkg.com/nanomatch/-/nanomatch-1.2.9.tgz#879f7150cb2dab7a471259066c104eee6e0fa7c2" + dependencies: + arr-diff "^4.0.0" + array-unique "^0.3.2" + define-property "^2.0.2" + extend-shallow "^3.0.2" + fragment-cache "^0.2.1" + is-odd "^2.0.0" + is-windows "^1.0.2" + kind-of "^6.0.2" + object.pick "^1.3.0" + regex-not "^1.0.0" + snapdragon "^0.8.1" + to-regex "^3.0.1" + native-promise-only@^0.8.1: version "0.8.1" resolved "https://registry.yarnpkg.com/native-promise-only/-/native-promise-only-0.8.1.tgz#20a318c30cb45f71fe7adfbf7b21c99c1472ef11" @@ -5977,6 +6425,20 @@ object-component@0.0.3: version "0.0.3" resolved "https://registry.yarnpkg.com/object-component/-/object-component-0.0.3.tgz#f0c69aa50efc95b866c186f400a33769cb2f1291" +object-copy@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/object-copy/-/object-copy-0.1.0.tgz#7e7d858b781bd7c991a41ba975ed3812754e998c" + dependencies: + copy-descriptor "^0.1.0" + define-property "^0.2.5" + kind-of "^3.0.3" + +object-visit@^1.0.0: + version "1.0.1" + resolved "https://registry.yarnpkg.com/object-visit/-/object-visit-1.0.1.tgz#f79c4493af0c5377b59fe39d395e41042dd045bb" + dependencies: + isobject "^3.0.0" + object.omit@^2.0.0: version "2.0.1" resolved "https://registry.yarnpkg.com/object.omit/-/object.omit-2.0.1.tgz#1a9c744829f39dbb858c76ca3579ae2a54ebd1fa" @@ -5984,6 +6446,12 @@ object.omit@^2.0.0: for-own "^0.1.4" is-extendable "^0.1.1" +object.pick@^1.3.0: + version "1.3.0" + resolved "https://registry.yarnpkg.com/object.pick/-/object.pick-1.3.0.tgz#87a10ac4c1694bd2e1cbf53591a66141fb5dd747" + dependencies: + isobject "^3.0.1" + on-finished@~2.3.0: version "2.3.0" resolved "https://registry.yarnpkg.com/on-finished/-/on-finished-2.3.0.tgz#20f1336481b083cd75337992a16971aa2d906947" @@ -6032,14 +6500,16 @@ options@>=0.0.5: version "0.0.6" resolved "https://registry.yarnpkg.com/options/-/options-0.0.6.tgz#ec22d312806bb53e731773e7cdaefcf1c643128f" -ora@^0.2.0: - version "0.2.3" - resolved "https://registry.yarnpkg.com/ora/-/ora-0.2.3.tgz#37527d220adcd53c39b73571d754156d5db657a4" +ora@^2.0.0: + version "2.1.0" + resolved "https://registry.yarnpkg.com/ora/-/ora-2.1.0.tgz#6caf2830eb924941861ec53a173799e008b51e5b" dependencies: - chalk "^1.1.1" - cli-cursor "^1.0.2" - cli-spinners "^0.1.2" - object-assign "^4.0.1" + chalk "^2.3.1" + cli-cursor "^2.1.0" + cli-spinners "^1.1.0" + log-symbols "^2.2.0" + strip-ansi "^4.0.0" + wcwidth "^1.0.1" os-homedir@^1.0.0, os-homedir@^1.0.1: version "1.0.2" @@ -6157,6 +6627,16 @@ parseurl@~1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/parseurl/-/parseurl-1.3.1.tgz#c8ab8c9223ba34888aa64a297b28853bec18da56" +pascalcase@^0.1.1: + version "0.1.1" + resolved "https://registry.yarnpkg.com/pascalcase/-/pascalcase-0.1.1.tgz#b363e55e8006ca6fe21784d2db22bd15d7917f14" + +passwd-user@^1.2.1: + version "1.2.1" + resolved "https://registry.yarnpkg.com/passwd-user/-/passwd-user-1.2.1.tgz#a01a5dc639ef007dc56364b8178569080ad3a7b8" + dependencies: + exec-file-sync "^2.0.0" + path-exists@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/path-exists/-/path-exists-1.0.0.tgz#d5a8998eb71ef37a74c34eb0d9eba6e878eea081" @@ -6251,6 +6731,12 @@ pixelmatch@^4.0.0: dependencies: pngjs "^3.0.0" +pkg-dir@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/pkg-dir/-/pkg-dir-2.0.0.tgz#f6d5d1109e19d63edf428e0bd57e12777615334b" + dependencies: + find-up "^2.1.0" + pluralize@^1.2.1: version "1.2.1" resolved "https://registry.yarnpkg.com/pluralize/-/pluralize-1.2.1.tgz#d1a21483fd22bb41e58a12fa3421823140897c45" @@ -6275,6 +6761,10 @@ portfinder@^1.0.7: debug "^2.2.0" mkdirp "0.5.x" +posix-character-classes@^0.1.0: + version "0.1.1" + resolved "https://registry.yarnpkg.com/posix-character-classes/-/posix-character-classes-0.1.1.tgz#01eac0fe3b5af71a2a6c02feabb8c1fef7e00eab" + postcss-value-parser@^3.2.3: version "3.3.0" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.0.tgz#87f38f9f18f774a4ab4c8a232f5c5ce8872a9d15" @@ -6651,6 +7141,13 @@ regex-cache@^0.4.2: is-equal-shallow "^0.1.3" is-primitive "^2.0.0" +regex-not@^1.0.0, regex-not@^1.0.2: + version "1.0.2" + resolved "https://registry.yarnpkg.com/regex-not/-/regex-not-1.0.2.tgz#1f4ece27e00b0b65e0247a6810e6a85d83a5752c" + dependencies: + extend-shallow "^3.0.2" + safe-regex "^1.1.0" + regexpu-core@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/regexpu-core/-/regexpu-core-2.0.0.tgz#49d038837b8dcf8bfa5b9a42139938e6ea2ae240" @@ -6687,7 +7184,7 @@ repeat-element@^1.1.2: version "1.1.2" resolved "https://registry.yarnpkg.com/repeat-element/-/repeat-element-1.1.2.tgz#ef089a178d1483baae4d93eb98b4f9e4e11d990a" -repeat-string@^1.5.2: +repeat-string@^1.5.2, repeat-string@^1.6.1: version "1.6.1" resolved "https://registry.yarnpkg.com/repeat-string/-/repeat-string-1.6.1.tgz#8dcae470e1c88abc2d600fff4a776286da75e637" @@ -6814,6 +7311,10 @@ resolve-from@^1.0.0: version "1.0.1" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-1.0.1.tgz#26cbfe935d1aeeeabb29bc3fe5aeb01e93d44226" +resolve-url@^0.2.1: + version "0.2.1" + resolved "https://registry.yarnpkg.com/resolve-url/-/resolve-url-0.2.1.tgz#2c637fe77c893afd2a663fe21aa9080068e2052a" + resolve@1.3.2: version "1.3.2" resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.3.2.tgz#1f0442c9e0cbb8136e87b9305f932f46c7f28235" @@ -6826,6 +7327,12 @@ resolve@^1.1.2, resolve@^1.1.6, resolve@^1.1.7, resolve@^1.3.0, resolve@^1.3.3: dependencies: path-parse "^1.0.5" +resolve@^1.4.0, resolve@^1.5.0: + version "1.7.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.7.1.tgz#aadd656374fd298aee895bc026b8297418677fd3" + dependencies: + path-parse "^1.0.5" + restore-cursor@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-1.0.1.tgz#34661f46886327fed2991479152252df92daa541" @@ -6840,6 +7347,10 @@ restore-cursor@^2.0.0: onetime "^2.0.0" signal-exit "^3.0.2" +ret@~0.1.10: + version "0.1.15" + resolved "https://registry.yarnpkg.com/ret/-/ret-0.1.15.tgz#b8a4825d5bdb1fc3f6f53c2bc33f81388681c7bc" + right-align@^0.1.1: version "0.1.3" resolved "https://registry.yarnpkg.com/right-align/-/right-align-0.1.3.tgz#61339b722fe6a3515689210d24e14c96148613ef" @@ -6856,6 +7367,38 @@ rimraf@~2.2.6: version "2.2.8" resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.2.8.tgz#e439be2aaee327321952730f99a8929e4fc50582" +rollup-plugin-commonjs@^9.1.0: + version "9.1.3" + resolved "https://registry.yarnpkg.com/rollup-plugin-commonjs/-/rollup-plugin-commonjs-9.1.3.tgz#37bfbf341292ea14f512438a56df8f9ca3ba4d67" + dependencies: + estree-walker "^0.5.1" + magic-string "^0.22.4" + resolve "^1.5.0" + rollup-pluginutils "^2.0.1" + +rollup-plugin-node-resolve@^3.3.0: + version "3.3.0" + resolved "https://registry.yarnpkg.com/rollup-plugin-node-resolve/-/rollup-plugin-node-resolve-3.3.0.tgz#c26d110a36812cbefa7ce117cadcd3439aa1c713" + dependencies: + builtin-modules "^2.0.0" + is-module "^1.0.0" + resolve "^1.1.6" + +rollup-pluginutils@^2.0.1: + version "2.1.0" + resolved "https://registry.yarnpkg.com/rollup-pluginutils/-/rollup-pluginutils-2.1.0.tgz#4ee80b1bbc8eaed67aabf7f55b4fe9ccf9fd3c3a" + dependencies: + estree-walker "^0.5.2" + micromatch "^3.1.10" + tosource "^1.0.0" + +rollup@^0.58.1: + version "0.58.2" + resolved "https://registry.yarnpkg.com/rollup/-/rollup-0.58.2.tgz#2feddea8c0c022f3e74b35c48e3c21b3433803ce" + dependencies: + "@types/estree" "0.0.38" + "@types/node" "*" + route-recognizer@^0.2.3: version "0.2.10" resolved "https://registry.yarnpkg.com/route-recognizer/-/route-recognizer-0.2.10.tgz#024b2283c2e68d13a7c7f5173a5924645e8902df" @@ -6868,6 +7411,10 @@ rsvp@^3.0.14, rsvp@^3.0.16, rsvp@^3.0.17, rsvp@^3.0.18, rsvp@^3.0.21, rsvp@^3.0. version "3.6.1" resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-3.6.1.tgz#34f4a7ac2859f7bacc8f49789c5604f1e26ae702" +rsvp@^3.6.0: + version "3.6.2" + resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-3.6.2.tgz#2e96491599a96cde1b515d5674a8f7a91452926a" + rsvp@^4.7.0: version "4.7.0" resolved "https://registry.yarnpkg.com/rsvp/-/rsvp-4.7.0.tgz#dc1b0b1a536f7dec9d2be45e0a12ad4197c9fd96" @@ -6924,6 +7471,12 @@ safe-json-parse@~1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/safe-json-parse/-/safe-json-parse-1.0.1.tgz#3e76723e38dfdda13c9b1d29a1e07ffee4b30b57" +safe-regex@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" + dependencies: + ret "~0.1.10" + samsam@1.x, samsam@^1.1.3: version "1.3.0" resolved "https://registry.yarnpkg.com/samsam/-/samsam-1.3.0.tgz#8d1d9350e25622da30de3e44ba692b5221ab7c50" @@ -7011,6 +7564,24 @@ set-immediate-shim@^1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/set-immediate-shim/-/set-immediate-shim-1.0.1.tgz#4b2b1b27eb808a9f8dcc481a58e5e56f599f3f61" +set-value@^0.4.3: + version "0.4.3" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-0.4.3.tgz#7db08f9d3d22dc7f78e53af3c3bf4666ecdfccf1" + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.1" + to-object-path "^0.3.0" + +set-value@^2.0.0: + version "2.0.0" + resolved "https://registry.yarnpkg.com/set-value/-/set-value-2.0.0.tgz#71ae4a88f0feefbbf52d1ea604f3fb315ebb6274" + dependencies: + extend-shallow "^2.0.1" + is-extendable "^0.1.1" + is-plain-object "^2.0.3" + split-string "^3.0.1" + setprototypeof@1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/setprototypeof/-/setprototypeof-1.0.3.tgz#66567e37043eeb4f04d91bd658c0cbefb55b8e04" @@ -7103,6 +7674,33 @@ slide@^1.1.5: version "1.1.6" resolved "https://registry.yarnpkg.com/slide/-/slide-1.1.6.tgz#56eb027d65b4d2dce6cb2e2d32c4d4afc9e1d707" +snapdragon-node@^2.0.1: + version "2.1.1" + resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" + dependencies: + define-property "^1.0.0" + isobject "^3.0.0" + snapdragon-util "^3.0.1" + +snapdragon-util@^3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/snapdragon-util/-/snapdragon-util-3.0.1.tgz#f956479486f2acd79700693f6f7b805e45ab56e2" + dependencies: + kind-of "^3.2.0" + +snapdragon@^0.8.1: + version "0.8.2" + resolved "https://registry.yarnpkg.com/snapdragon/-/snapdragon-0.8.2.tgz#64922e7c565b0e14204ba1aa7d6964278d25182d" + dependencies: + base "^0.11.1" + debug "^2.2.0" + define-property "^0.2.5" + extend-shallow "^2.0.1" + map-cache "^0.2.2" + source-map "^0.5.6" + source-map-resolve "^0.5.0" + use "^3.1.0" + sntp@1.x.x: version "1.0.9" resolved "https://registry.yarnpkg.com/sntp/-/sntp-1.0.9.tgz#6541184cc90aeea6c6e7b35e2659082443c66198" @@ -7163,6 +7761,16 @@ sort-package-json@^1.4.0: dependencies: sort-object-keys "^1.1.1" +source-map-resolve@^0.5.0: + version "0.5.1" + resolved "https://registry.yarnpkg.com/source-map-resolve/-/source-map-resolve-0.5.1.tgz#7ad0f593f2281598e854df80f19aae4b92d7a11a" + dependencies: + atob "^2.0.0" + decode-uri-component "^0.2.0" + resolve-url "^0.2.1" + source-map-url "^0.4.0" + urix "^0.1.0" + source-map-support@^0.2.10: version "0.2.10" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.2.10.tgz#ea5a3900a1c1cb25096a0ae8cc5c2b4b10ded3dc" @@ -7179,6 +7787,10 @@ source-map-url@^0.3.0: version "0.3.0" resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.3.0.tgz#7ecaf13b57bcd09da8a40c5d269db33799d4aaf9" +source-map-url@^0.4.0: + version "0.4.0" + resolved "https://registry.yarnpkg.com/source-map-url/-/source-map-url-0.4.0.tgz#3e935d7ddd73631b97659956d55128e87b5084a3" + source-map@0.1.32: version "0.1.32" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.1.32.tgz#c8b6c167797ba4740a8ea33252162ff08591b266" @@ -7203,7 +7815,7 @@ spawn-args@^0.2.0: version "0.2.0" resolved "https://registry.yarnpkg.com/spawn-args/-/spawn-args-0.2.0.tgz#fb7d0bd1d70fd4316bd9e3dec389e65f9d6361bb" -spawn-sync@^1.0.15: +spawn-sync@^1.0.11, spawn-sync@^1.0.15: version "1.0.15" resolved "https://registry.yarnpkg.com/spawn-sync/-/spawn-sync-1.0.15.tgz#b00799557eb7fb0c8376c29d44e8a1ea67e57476" dependencies: @@ -7224,6 +7836,12 @@ spdx-license-ids@^1.0.2: version "1.2.2" resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-1.2.2.tgz#c9df7a3424594ade6bd11900d596696dc06bac57" +split-string@^3.0.1, split-string@^3.0.2: + version "3.1.0" + resolved "https://registry.yarnpkg.com/split-string/-/split-string-3.1.0.tgz#7cb09dda3a86585705c64b39a6466038682e8fe2" + dependencies: + extend-shallow "^3.0.0" + sprintf-js@^1.0.3, sprintf-js@~1.0.2: version "1.0.3" resolved "https://registry.yarnpkg.com/sprintf-js/-/sprintf-js-1.0.3.tgz#04e6926f662895354f3dd015203633b857297e2c" @@ -7254,6 +7872,13 @@ stack-trace@0.0.x: version "0.0.10" resolved "https://registry.yarnpkg.com/stack-trace/-/stack-trace-0.0.10.tgz#547c70b347e8d32b4e108ea1a2a159e5fdde19c0" +static-extend@^0.1.1: + version "0.1.2" + resolved "https://registry.yarnpkg.com/static-extend/-/static-extend-0.1.2.tgz#60809c39cbff55337226fd5e0b520f341f1fb5c6" + dependencies: + define-property "^0.2.5" + object-copy "^0.1.0" + "statuses@>= 1.3.1 < 2", statuses@~1.3.1: version "1.3.1" resolved "https://registry.yarnpkg.com/statuses/-/statuses-1.3.1.tgz#faf51b9eb74aaef3b3acf4ad5f61abf24cb7b93e" @@ -7599,6 +8224,32 @@ to-ico@^1.1.2: parse-png "^1.0.0" resize-img "^1.1.0" +to-object-path@^0.3.0: + version "0.3.0" + resolved "https://registry.yarnpkg.com/to-object-path/-/to-object-path-0.3.0.tgz#297588b7b0e7e0ac08e04e672f85c1f4999e17af" + dependencies: + kind-of "^3.0.2" + +to-regex-range@^2.1.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-2.1.1.tgz#7c80c17b9dfebe599e27367e0d4dd5590141db38" + dependencies: + is-number "^3.0.0" + repeat-string "^1.6.1" + +to-regex@^3.0.1, to-regex@^3.0.2: + version "3.0.2" + resolved "https://registry.yarnpkg.com/to-regex/-/to-regex-3.0.2.tgz#13cfdd9b336552f30b51f33a8ae1b42a7a7599ce" + dependencies: + define-property "^2.0.2" + extend-shallow "^3.0.2" + regex-not "^1.0.2" + safe-regex "^1.1.0" + +tosource@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/tosource/-/tosource-1.0.0.tgz#42d88dd116618bcf00d6106dd5446f3427902ff1" + tough-cookie@~2.3.0: version "2.3.2" resolved "https://registry.yarnpkg.com/tough-cookie/-/tough-cookie-2.3.2.tgz#f081f76e4c85720e6c37a5faced737150d84072a" @@ -7724,6 +8375,15 @@ underscore@~1.6.0: version "1.6.0" resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.6.0.tgz#8b38b10cacdef63337b8b24e4ff86d45aea529a8" +union-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/union-value/-/union-value-1.0.0.tgz#5c71c34cb5bad5dcebe3ea0cd08207ba5aa1aea4" + dependencies: + arr-union "^3.1.0" + get-value "^2.0.6" + is-extendable "^0.1.1" + set-value "^0.4.3" + unique-string@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unique-string/-/unique-string-1.0.0.tgz#9e1057cca851abb93398f8b33ae187b99caec11a" @@ -7738,18 +8398,35 @@ unpipe@1.0.0, unpipe@~1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" +unset-value@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/unset-value/-/unset-value-1.0.0.tgz#8376873f7d2335179ffb1e6fc3a8ed0dfc8ab559" + dependencies: + has-value "^0.3.1" + isobject "^3.0.0" + untildify@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/untildify/-/untildify-2.1.0.tgz#17eb2807987f76952e9c0485fc311d06a826a2e0" dependencies: os-homedir "^1.0.0" +urix@^0.1.0: + version "0.1.0" + resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" + url-regex@^3.0.0: version "3.2.0" resolved "https://registry.yarnpkg.com/url-regex/-/url-regex-3.2.0.tgz#dbad1e0c9e29e105dd0b1f09f6862f7fdb482724" dependencies: ip-regex "^1.0.1" +use@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/use/-/use-3.1.0.tgz#14716bf03fdfefd03040aef58d8b4b85f3a7c544" + dependencies: + kind-of "^6.0.2" + user-home@^1.1.1: version "1.1.1" resolved "https://registry.yarnpkg.com/user-home/-/user-home-1.1.1.tgz#2b5be23a32b63a7c9deb8d0f28d485724a3df190" @@ -7760,10 +8437,31 @@ user-home@^2.0.0: dependencies: os-homedir "^1.0.0" +user-info@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/user-info/-/user-info-1.0.0.tgz#81c82b7ed63e674c2475667653413b3c76fde239" + dependencies: + os-homedir "^1.0.1" + passwd-user "^1.2.1" + username "^1.0.1" + username-sync@1.0.1: version "1.0.1" resolved "https://registry.yarnpkg.com/username-sync/-/username-sync-1.0.1.tgz#1cde87eefcf94b8822984d938ba2b797426dae1f" +username@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/username/-/username-1.0.1.tgz#e1f72295e3e58e06f002c6327ce06897a99cd67f" + dependencies: + meow "^3.4.0" + +username@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/username/-/username-3.0.0.tgz#b3dba982a72b4ce59d52f159fa1aeba266af5fc8" + dependencies: + execa "^0.7.0" + mem "^1.1.0" + util-deprecate@^1.0.2, util-deprecate@~1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/util-deprecate/-/util-deprecate-1.0.2.tgz#450d4dc9fa70de732762fbd2d4a28981419a0ccf" @@ -7807,6 +8505,10 @@ vinyl@^1.1.0: clone-stats "^0.0.1" replace-ext "0.0.1" +vlq@^0.2.2: + version "0.2.3" + resolved "https://registry.yarnpkg.com/vlq/-/vlq-0.2.3.tgz#8f3e4328cf63b1540c0d67e1b2778386f8975b26" + walk-sync@0.3.1: version "0.3.1" resolved "https://registry.yarnpkg.com/walk-sync/-/walk-sync-0.3.1.tgz#558a16aeac8c0db59c028b73c66f397684ece465" @@ -7842,6 +8544,12 @@ watch@~0.10.0: version "0.10.0" resolved "https://registry.yarnpkg.com/watch/-/watch-0.10.0.tgz#77798b2da0f9910d595f1ace5b0c2258521f21dc" +wcwidth@^1.0.0, wcwidth@^1.0.1: + version "1.0.1" + resolved "https://registry.yarnpkg.com/wcwidth/-/wcwidth-1.0.1.tgz#f0b0dcf915bc5ff1528afadb2c0e17b532da2fe8" + dependencies: + defaults "^1.0.3" + websocket-driver@>=0.5.1: version "0.6.5" resolved "https://registry.yarnpkg.com/websocket-driver/-/websocket-driver-0.6.5.tgz#5cb2556ceb85f4373c6d8238aa691c8454e13a36" @@ -8006,6 +8714,12 @@ yam@0.0.22: fs-extra "^0.30.0" lodash.merge "^4.4.0" +yargs-parser@^10.0.0: + version "10.0.0" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-10.0.0.tgz#c737c93de2567657750cb1f2c00be639fd19c994" + dependencies: + camelcase "^4.1.0" + yargs-parser@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-5.0.0.tgz#275ecf0d7ffe05c77e64e7c86e4cd94bf0e1228a" From 0d90e50122dd3aa835e6e7a362367c5cef271315 Mon Sep 17 00:00:00 2001 From: madalynrose Date: Fri, 25 May 2018 16:43:16 -0400 Subject: [PATCH 8/8] Update CHANGELOG.md --- CHANGELOG.md | 4 ++++ 1 file changed, 4 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index a0e9880a3..c7c4911b0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,6 +16,10 @@ FEATURES: * Userpass auth CIDR restrictions: When using the `userpass` auth method you can now limit authentication to specific CIDRs; these will also be encoded in resultant tokens to limit their use. + * Vault Browser CLI: The UI now supports usage of read/write/list/delete + commands in a CLI that can be accessed from the nav bar. Complex inputs such + as JSON files are not currently supported. This surfaces features otherwise + unsupported in Vault's UI. IMPROVEMENTS: