Merge branch 'aws-auth-backend' of https://github.com/hashicorp/vault into aws-auth-backend
This commit is contained in:
commit
33541d4574
|
@ -76,8 +76,11 @@ BUG FIXES:
|
||||||
`default` resulted in the same behavior anyways. [GH-1276]
|
`default` resulted in the same behavior anyways. [GH-1276]
|
||||||
* credential/token: Fix issues renewing tokens when using the "suffix"
|
* credential/token: Fix issues renewing tokens when using the "suffix"
|
||||||
capability of token roles [GH-1331]
|
capability of token roles [GH-1331]
|
||||||
|
* credential/token: Fix lookup via POST showing the request token instead of
|
||||||
|
the desired token [GH-1354]
|
||||||
* credential/various: Fix renewal conditions when `default` policy is not
|
* credential/various: Fix renewal conditions when `default` policy is not
|
||||||
contained in the backend config [GH-1256]
|
contained in the backend config [GH-1256]
|
||||||
|
* physical/s3: Don't panic in certain error cases from bad S3 responses [GH-1353]
|
||||||
* secret/pki: Don't check whether a certificate is destined to be a CA
|
* secret/pki: Don't check whether a certificate is destined to be a CA
|
||||||
certificate if sign-verbatim endpoint is used [GH-1250]
|
certificate if sign-verbatim endpoint is used [GH-1250]
|
||||||
|
|
||||||
|
|
15
Makefile
15
Makefile
|
@ -1,24 +1,23 @@
|
||||||
TEST?=$$(go list ./... | grep -v /vendor/)
|
TEST?=$$(go list ./... | grep -v /vendor/)
|
||||||
VETARGS?=-asmdecl -atomic -bool -buildtags -copylocks -methods -nilfunc -printf -rangeloops -shift -structtags -unsafeptr
|
VETARGS?=-asmdecl -atomic -bool -buildtags -copylocks -methods -nilfunc -printf -rangeloops -shift -structtags -unsafeptr
|
||||||
EXTERNAL_TOOLS=\
|
EXTERNAL_TOOLS=\
|
||||||
github.com/mitchellh/gox \
|
github.com/mitchellh/gox
|
||||||
golang.org/x/tools/cmd/cover \
|
BUILD_TAGS?=vault
|
||||||
golang.org/x/tools/cmd/vet
|
|
||||||
|
|
||||||
default: test
|
default: test
|
||||||
|
|
||||||
# bin generates the releaseable binaries for Vault
|
# bin generates the releaseable binaries for Vault
|
||||||
bin: generate
|
bin: generate
|
||||||
@sh -c "'$(CURDIR)/scripts/build.sh'"
|
@CGO_ENABLED=0 BUILD_TAGS='$(BUILD_TAGS)' sh -c "'$(CURDIR)/scripts/build.sh'"
|
||||||
|
|
||||||
# dev creates binaries for testing Vault locally. These are put
|
# dev creates binaries for testing Vault locally. These are put
|
||||||
# into ./bin/ as well as $GOPATH/bin
|
# into ./bin/ as well as $GOPATH/bin
|
||||||
dev: generate
|
dev: generate
|
||||||
@VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'"
|
@CGO_ENABLED=0 BUILD_TAGS='$(BUILD_TAGS)' VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'"
|
||||||
|
|
||||||
# test runs the unit tests and vets the code
|
# test runs the unit tests and vets the code
|
||||||
test: generate
|
test: generate
|
||||||
VAULT_TOKEN= VAULT_ACC= go test $(TEST) $(TESTARGS) -timeout=120s -parallel=4
|
CGO_ENABLED=0 VAULT_TOKEN= VAULT_ACC= go test -tags='$(BUILD_TAGS)' $(TEST) $(TESTARGS) -timeout=120s -parallel=4
|
||||||
|
|
||||||
# testacc runs acceptance tests
|
# testacc runs acceptance tests
|
||||||
testacc: generate
|
testacc: generate
|
||||||
|
@ -26,11 +25,11 @@ testacc: generate
|
||||||
echo "ERROR: Set TEST to a specific package"; \
|
echo "ERROR: Set TEST to a specific package"; \
|
||||||
exit 1; \
|
exit 1; \
|
||||||
fi
|
fi
|
||||||
VAULT_ACC=1 go test $(TEST) -v $(TESTARGS) -timeout 45m
|
VAULT_ACC=1 go test -tags='$(BUILD_TAGS)' $(TEST) -v $(TESTARGS) -timeout 45m
|
||||||
|
|
||||||
# testrace runs the race checker
|
# testrace runs the race checker
|
||||||
testrace: generate
|
testrace: generate
|
||||||
CGO_ENABLED=1 VAULT_TOKEN= VAULT_ACC= go test -race $(TEST) $(TESTARGS)
|
CGO_ENABLED=1 VAULT_TOKEN= VAULT_ACC= go test -tags='$(BUILD_TAGS)' -race $(TEST) $(TESTARGS)
|
||||||
|
|
||||||
cover:
|
cover:
|
||||||
./scripts/coverage.sh --html
|
./scripts/coverage.sh --html
|
||||||
|
|
|
@ -163,6 +163,14 @@ func (c *ServerCommand) Run(args []string) int {
|
||||||
|
|
||||||
var seal vault.Seal = &vault.DefaultSeal{}
|
var seal vault.Seal = &vault.DefaultSeal{}
|
||||||
|
|
||||||
|
// Ensure that the seal finalizer is called, even if using verify-only
|
||||||
|
defer func() {
|
||||||
|
err = seal.Finalize()
|
||||||
|
if err != nil {
|
||||||
|
c.Ui.Error(fmt.Sprintf("Error finalizing seals: %v", err))
|
||||||
|
}
|
||||||
|
}()
|
||||||
|
|
||||||
coreConfig := &vault.CoreConfig{
|
coreConfig := &vault.CoreConfig{
|
||||||
Physical: backend,
|
Physical: backend,
|
||||||
AdvertiseAddr: config.Backend.AdvertiseAddr,
|
AdvertiseAddr: config.Backend.AdvertiseAddr,
|
||||||
|
@ -392,11 +400,6 @@ func (c *ServerCommand) Run(args []string) int {
|
||||||
listener.Close()
|
listener.Close()
|
||||||
}
|
}
|
||||||
|
|
||||||
err = seal.Finalize()
|
|
||||||
if err != nil {
|
|
||||||
c.Ui.Error(fmt.Sprintf("Error finalizing seals: %v", err))
|
|
||||||
}
|
|
||||||
|
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -120,7 +120,6 @@ func (s *S3Backend) Get(key string) (*Entry, error) {
|
||||||
Bucket: aws.String(s.bucket),
|
Bucket: aws.String(s.bucket),
|
||||||
Key: aws.String(key),
|
Key: aws.String(key),
|
||||||
})
|
})
|
||||||
|
|
||||||
if awsErr, ok := err.(awserr.RequestFailure); ok {
|
if awsErr, ok := err.(awserr.RequestFailure); ok {
|
||||||
// Return nil on 404s, error on anything else
|
// Return nil on 404s, error on anything else
|
||||||
if awsErr.StatusCode() == 404 {
|
if awsErr.StatusCode() == 404 {
|
||||||
|
@ -129,6 +128,12 @@ func (s *S3Backend) Get(key string) (*Entry, error) {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if resp == nil {
|
||||||
|
return nil, fmt.Errorf("got nil response from S3 but no error")
|
||||||
|
}
|
||||||
|
|
||||||
data := make([]byte, *resp.ContentLength)
|
data := make([]byte, *resp.ContentLength)
|
||||||
_, err = io.ReadFull(resp.Body, data)
|
_, err = io.ReadFull(resp.Body, data)
|
||||||
|
@ -172,6 +177,9 @@ func (s *S3Backend) List(prefix string) ([]string, error) {
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
if resp == nil {
|
||||||
|
return nil, fmt.Errorf("nil response from S3 but no error")
|
||||||
|
}
|
||||||
|
|
||||||
keys := []string{}
|
keys := []string{}
|
||||||
for _, key := range resp.Contents {
|
for _, key := range resp.Contents {
|
||||||
|
|
|
@ -11,6 +11,9 @@ DIR="$( cd -P "$( dirname "$SOURCE" )/.." && pwd )"
|
||||||
# Change into that directory
|
# Change into that directory
|
||||||
cd "$DIR"
|
cd "$DIR"
|
||||||
|
|
||||||
|
# Set build tags
|
||||||
|
BUILD_TAGS="${BUILD_TAGS:-"vault"}"
|
||||||
|
|
||||||
# Get the git commit
|
# Get the git commit
|
||||||
GIT_COMMIT="$(git rev-parse HEAD)"
|
GIT_COMMIT="$(git rev-parse HEAD)"
|
||||||
GIT_DIRTY="$(test -n "`git status --porcelain`" && echo "+CHANGES" || true)"
|
GIT_DIRTY="$(test -n "`git status --porcelain`" && echo "+CHANGES" || true)"
|
||||||
|
@ -45,6 +48,7 @@ gox \
|
||||||
-arch="${XC_ARCH}" \
|
-arch="${XC_ARCH}" \
|
||||||
-ldflags "-X github.com/hashicorp/vault/version.GitCommit='${GIT_COMMIT}${GIT_DIRTY}'" \
|
-ldflags "-X github.com/hashicorp/vault/version.GitCommit='${GIT_COMMIT}${GIT_DIRTY}'" \
|
||||||
-output "pkg/{{.OS}}_{{.Arch}}/vault" \
|
-output "pkg/{{.OS}}_{{.Arch}}/vault" \
|
||||||
|
-tags="${BUILD_TAGS}" \
|
||||||
.
|
.
|
||||||
|
|
||||||
# Move all the compiled things to the $GOPATH/bin
|
# Move all the compiled things to the $GOPATH/bin
|
||||||
|
|
|
@ -10,7 +10,7 @@ RUN apt-get update -y && apt-get install --no-install-recommends -y -q \
|
||||||
git mercurial bzr \
|
git mercurial bzr \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
ENV GOVERSION 1.6
|
ENV GOVERSION 1.6.2
|
||||||
RUN mkdir /goroot && mkdir /gopath
|
RUN mkdir /goroot && mkdir /gopath
|
||||||
RUN curl https://storage.googleapis.com/golang/go${GOVERSION}.linux-amd64.tar.gz \
|
RUN curl https://storage.googleapis.com/golang/go${GOVERSION}.linux-amd64.tar.gz \
|
||||||
| tar xvzf - -C /goroot --strip-components=1
|
| tar xvzf - -C /goroot --strip-components=1
|
||||||
|
|
|
@ -140,8 +140,15 @@ func TestCore_Rekey_Update(t *testing.T) {
|
||||||
|
|
||||||
func testCore_Rekey_Update_Common(t *testing.T, c *Core, keys [][]byte, root string, recovery bool) {
|
func testCore_Rekey_Update_Common(t *testing.T, c *Core, keys [][]byte, root string, recovery bool) {
|
||||||
// Start a rekey
|
// Start a rekey
|
||||||
|
var expType string
|
||||||
|
if recovery {
|
||||||
|
expType = c.seal.RecoveryType()
|
||||||
|
} else {
|
||||||
|
expType = c.seal.BarrierType()
|
||||||
|
}
|
||||||
|
|
||||||
newConf := &SealConfig{
|
newConf := &SealConfig{
|
||||||
Type: "shamir",
|
Type: expType,
|
||||||
SecretThreshold: 3,
|
SecretThreshold: 3,
|
||||||
SecretShares: 5,
|
SecretShares: 5,
|
||||||
}
|
}
|
||||||
|
@ -208,7 +215,7 @@ func testCore_Rekey_Update_Common(t *testing.T, c *Core, keys [][]byte, root str
|
||||||
|
|
||||||
newConf.Nonce = rkconf.Nonce
|
newConf.Nonce = rkconf.Nonce
|
||||||
if !reflect.DeepEqual(sealConf, newConf) {
|
if !reflect.DeepEqual(sealConf, newConf) {
|
||||||
t.Fatalf("\nexpected: %#v\nactual: %#v\n", newConf, sealConf)
|
t.Fatalf("\nexpected: %#v\nactual: %#v\nexpType: %s\nrecovery: %t", newConf, sealConf, expType, recovery)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Attempt unseal if this was not recovery mode
|
// Attempt unseal if this was not recovery mode
|
||||||
|
@ -230,7 +237,7 @@ func testCore_Rekey_Update_Common(t *testing.T, c *Core, keys [][]byte, root str
|
||||||
|
|
||||||
// Start another rekey, this time we require a quorum!
|
// Start another rekey, this time we require a quorum!
|
||||||
newConf = &SealConfig{
|
newConf = &SealConfig{
|
||||||
Type: "shamir",
|
Type: expType,
|
||||||
SecretThreshold: 1,
|
SecretThreshold: 1,
|
||||||
SecretShares: 1,
|
SecretShares: 1,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,8 +1,11 @@
|
||||||
|
// +build vault
|
||||||
|
|
||||||
package vault
|
package vault
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"testing"
|
||||||
)
|
)
|
||||||
|
|
||||||
type TestSeal struct {
|
type TestSeal struct {
|
||||||
|
@ -88,3 +91,45 @@ func (d *TestSeal) SetRecoveryKey(key []byte) error {
|
||||||
d.recoveryKey = newbuf.Bytes()
|
d.recoveryKey = newbuf.Bytes()
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestCoreUnsealedWithConfigs(t *testing.T, barrierConf, recoveryConf *SealConfig) (*Core, [][]byte, [][]byte, string) {
|
||||||
|
seal := &TestSeal{}
|
||||||
|
core := TestCoreWithSeal(t, seal)
|
||||||
|
result, err := core.Initialize(barrierConf, recoveryConf)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("err: %s", err)
|
||||||
|
}
|
||||||
|
err = core.UnsealWithStoredKeys()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("err: %s", err)
|
||||||
|
}
|
||||||
|
if sealed, _ := core.Sealed(); sealed {
|
||||||
|
for _, key := range result.SecretShares {
|
||||||
|
if _, err := core.Unseal(key); err != nil {
|
||||||
|
|
||||||
|
t.Fatalf("unseal err: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sealed, err = core.Sealed()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("err checking seal status: %s", err)
|
||||||
|
}
|
||||||
|
if sealed {
|
||||||
|
t.Fatal("should not be sealed")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return core, result.SecretShares, result.RecoveryShares, result.RootToken
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSealDefConfigs() (*SealConfig, *SealConfig) {
|
||||||
|
return &SealConfig{
|
||||||
|
SecretShares: 5,
|
||||||
|
SecretThreshold: 3,
|
||||||
|
StoredShares: 2,
|
||||||
|
}, &SealConfig{
|
||||||
|
SecretShares: 5,
|
||||||
|
SecretThreshold: 3,
|
||||||
|
}
|
||||||
|
}
|
|
@ -376,49 +376,3 @@ func GenerateRandBytes(length int) ([]byte, error) {
|
||||||
|
|
||||||
return buf, nil
|
return buf, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* TestSeal items
|
|
||||||
*/
|
|
||||||
|
|
||||||
func TestCoreUnsealedWithConfigs(t *testing.T, barrierConf, recoveryConf *SealConfig) (*Core, [][]byte, [][]byte, string) {
|
|
||||||
seal := &TestSeal{}
|
|
||||||
core := TestCoreWithSeal(t, seal)
|
|
||||||
result, err := core.Initialize(barrierConf, recoveryConf)
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("err: %s", err)
|
|
||||||
}
|
|
||||||
err = core.UnsealWithStoredKeys()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("err: %s", err)
|
|
||||||
}
|
|
||||||
if sealed, _ := core.Sealed(); sealed {
|
|
||||||
for _, key := range result.SecretShares {
|
|
||||||
if _, err := core.Unseal(key); err != nil {
|
|
||||||
|
|
||||||
t.Fatalf("unseal err: %s", err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sealed, err = core.Sealed()
|
|
||||||
if err != nil {
|
|
||||||
t.Fatalf("err checking seal status: %s", err)
|
|
||||||
}
|
|
||||||
if sealed {
|
|
||||||
t.Fatal("should not be sealed")
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return core, result.SecretShares, result.RecoveryShares, result.RootToken
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestSealDefConfigs() (*SealConfig, *SealConfig) {
|
|
||||||
return &SealConfig{
|
|
||||||
SecretShares: 5,
|
|
||||||
SecretThreshold: 3,
|
|
||||||
StoredShares: 2,
|
|
||||||
}, &SealConfig{
|
|
||||||
SecretShares: 5,
|
|
||||||
SecretThreshold: 3,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
|
@ -196,12 +196,16 @@ func NewTokenStore(c *Core, config *logical.BackendConfig) (*TokenStore, error)
|
||||||
},
|
},
|
||||||
|
|
||||||
&framework.Path{
|
&framework.Path{
|
||||||
Pattern: "lookup" + framework.OptionalParamRegex("token"),
|
Pattern: "lookup" + framework.OptionalParamRegex("urltoken"),
|
||||||
|
|
||||||
Fields: map[string]*framework.FieldSchema{
|
Fields: map[string]*framework.FieldSchema{
|
||||||
|
"urltoken": &framework.FieldSchema{
|
||||||
|
Type: framework.TypeString,
|
||||||
|
Description: "Token to lookup (GET/POST URL parameter)",
|
||||||
|
},
|
||||||
"token": &framework.FieldSchema{
|
"token": &framework.FieldSchema{
|
||||||
Type: framework.TypeString,
|
Type: framework.TypeString,
|
||||||
Description: "Token to lookup",
|
Description: "Token to lookup (POST request body)",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -215,12 +219,16 @@ func NewTokenStore(c *Core, config *logical.BackendConfig) (*TokenStore, error)
|
||||||
},
|
},
|
||||||
|
|
||||||
&framework.Path{
|
&framework.Path{
|
||||||
Pattern: "lookup-accessor" + framework.OptionalParamRegex("accessor"),
|
Pattern: "lookup-accessor" + framework.OptionalParamRegex("urlaccessor"),
|
||||||
|
|
||||||
Fields: map[string]*framework.FieldSchema{
|
Fields: map[string]*framework.FieldSchema{
|
||||||
|
"urlaccessor": &framework.FieldSchema{
|
||||||
|
Type: framework.TypeString,
|
||||||
|
Description: "Accessor of the token to look up (URL parameter)",
|
||||||
|
},
|
||||||
"accessor": &framework.FieldSchema{
|
"accessor": &framework.FieldSchema{
|
||||||
Type: framework.TypeString,
|
Type: framework.TypeString,
|
||||||
Description: "Accessor of the token to lookup",
|
Description: "Accessor of the token to look up (request body)",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -238,12 +246,12 @@ func NewTokenStore(c *Core, config *logical.BackendConfig) (*TokenStore, error)
|
||||||
Fields: map[string]*framework.FieldSchema{
|
Fields: map[string]*framework.FieldSchema{
|
||||||
"token": &framework.FieldSchema{
|
"token": &framework.FieldSchema{
|
||||||
Type: framework.TypeString,
|
Type: framework.TypeString,
|
||||||
Description: "Token to lookup",
|
Description: "Token to look up (unused)",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
Callbacks: map[logical.Operation]framework.OperationFunc{
|
Callbacks: map[logical.Operation]framework.OperationFunc{
|
||||||
logical.ReadOperation: t.handleLookup,
|
logical.ReadOperation: t.handleLookupSelf,
|
||||||
},
|
},
|
||||||
|
|
||||||
HelpSynopsis: strings.TrimSpace(tokenLookupHelp),
|
HelpSynopsis: strings.TrimSpace(tokenLookupHelp),
|
||||||
|
@ -251,12 +259,16 @@ func NewTokenStore(c *Core, config *logical.BackendConfig) (*TokenStore, error)
|
||||||
},
|
},
|
||||||
|
|
||||||
&framework.Path{
|
&framework.Path{
|
||||||
Pattern: "revoke-accessor" + framework.OptionalParamRegex("accessor"),
|
Pattern: "revoke-accessor" + framework.OptionalParamRegex("urlaccessor"),
|
||||||
|
|
||||||
Fields: map[string]*framework.FieldSchema{
|
Fields: map[string]*framework.FieldSchema{
|
||||||
|
"urlaccessor": &framework.FieldSchema{
|
||||||
|
Type: framework.TypeString,
|
||||||
|
Description: "Accessor of the token (in URL)",
|
||||||
|
},
|
||||||
"accessor": &framework.FieldSchema{
|
"accessor": &framework.FieldSchema{
|
||||||
Type: framework.TypeString,
|
Type: framework.TypeString,
|
||||||
Description: "Accessor of the token",
|
Description: "Accessor of the token (request body)",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -280,12 +292,16 @@ func NewTokenStore(c *Core, config *logical.BackendConfig) (*TokenStore, error)
|
||||||
},
|
},
|
||||||
|
|
||||||
&framework.Path{
|
&framework.Path{
|
||||||
Pattern: "revoke" + framework.OptionalParamRegex("token"),
|
Pattern: "revoke" + framework.OptionalParamRegex("urltoken"),
|
||||||
|
|
||||||
Fields: map[string]*framework.FieldSchema{
|
Fields: map[string]*framework.FieldSchema{
|
||||||
|
"urltoken": &framework.FieldSchema{
|
||||||
|
Type: framework.TypeString,
|
||||||
|
Description: "Token to revoke (in URL)",
|
||||||
|
},
|
||||||
"token": &framework.FieldSchema{
|
"token": &framework.FieldSchema{
|
||||||
Type: framework.TypeString,
|
Type: framework.TypeString,
|
||||||
Description: "Token to revoke",
|
Description: "Token to revoke (request body)",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -298,12 +314,16 @@ func NewTokenStore(c *Core, config *logical.BackendConfig) (*TokenStore, error)
|
||||||
},
|
},
|
||||||
|
|
||||||
&framework.Path{
|
&framework.Path{
|
||||||
Pattern: "revoke-orphan" + framework.OptionalParamRegex("token"),
|
Pattern: "revoke-orphan" + framework.OptionalParamRegex("urltoken"),
|
||||||
|
|
||||||
Fields: map[string]*framework.FieldSchema{
|
Fields: map[string]*framework.FieldSchema{
|
||||||
|
"urltoken": &framework.FieldSchema{
|
||||||
|
Type: framework.TypeString,
|
||||||
|
Description: "Token to revoke (in URL)",
|
||||||
|
},
|
||||||
"token": &framework.FieldSchema{
|
"token": &framework.FieldSchema{
|
||||||
Type: framework.TypeString,
|
Type: framework.TypeString,
|
||||||
Description: "Token to revoke",
|
Description: "Token to revoke (request body)",
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
|
||||||
|
@ -321,7 +341,7 @@ func NewTokenStore(c *Core, config *logical.BackendConfig) (*TokenStore, error)
|
||||||
Fields: map[string]*framework.FieldSchema{
|
Fields: map[string]*framework.FieldSchema{
|
||||||
"token": &framework.FieldSchema{
|
"token": &framework.FieldSchema{
|
||||||
Type: framework.TypeString,
|
Type: framework.TypeString,
|
||||||
Description: "Token to renew",
|
Description: "Token to renew (unused)",
|
||||||
},
|
},
|
||||||
"increment": &framework.FieldSchema{
|
"increment": &framework.FieldSchema{
|
||||||
Type: framework.TypeDurationSecond,
|
Type: framework.TypeDurationSecond,
|
||||||
|
@ -339,12 +359,16 @@ func NewTokenStore(c *Core, config *logical.BackendConfig) (*TokenStore, error)
|
||||||
},
|
},
|
||||||
|
|
||||||
&framework.Path{
|
&framework.Path{
|
||||||
Pattern: "renew" + framework.OptionalParamRegex("token"),
|
Pattern: "renew" + framework.OptionalParamRegex("urltoken"),
|
||||||
|
|
||||||
Fields: map[string]*framework.FieldSchema{
|
Fields: map[string]*framework.FieldSchema{
|
||||||
|
"urltoken": &framework.FieldSchema{
|
||||||
|
Type: framework.TypeString,
|
||||||
|
Description: "Token to renew (in URL)",
|
||||||
|
},
|
||||||
"token": &framework.FieldSchema{
|
"token": &framework.FieldSchema{
|
||||||
Type: framework.TypeString,
|
Type: framework.TypeString,
|
||||||
Description: "Token to renew",
|
Description: "Token to renew (request body)",
|
||||||
},
|
},
|
||||||
"increment": &framework.FieldSchema{
|
"increment": &framework.FieldSchema{
|
||||||
Type: framework.TypeDurationSecond,
|
Type: framework.TypeDurationSecond,
|
||||||
|
@ -728,7 +752,10 @@ func (ts *TokenStore) lookupByAccessor(accessor string) (string, error) {
|
||||||
func (ts *TokenStore) handleUpdateLookupAccessor(req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
func (ts *TokenStore) handleUpdateLookupAccessor(req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
||||||
accessor := data.Get("accessor").(string)
|
accessor := data.Get("accessor").(string)
|
||||||
if accessor == "" {
|
if accessor == "" {
|
||||||
return nil, &StatusBadRequest{Err: "missing accessor"}
|
accessor = data.Get("urlaccessor").(string)
|
||||||
|
if accessor == "" {
|
||||||
|
return nil, &StatusBadRequest{Err: "missing accessor"}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenID, err := ts.lookupByAccessor(accessor)
|
tokenID, err := ts.lookupByAccessor(accessor)
|
||||||
|
@ -773,7 +800,10 @@ func (ts *TokenStore) handleUpdateLookupAccessor(req *logical.Request, data *fra
|
||||||
func (ts *TokenStore) handleUpdateRevokeAccessor(req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
func (ts *TokenStore) handleUpdateRevokeAccessor(req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
||||||
accessor := data.Get("accessor").(string)
|
accessor := data.Get("accessor").(string)
|
||||||
if accessor == "" {
|
if accessor == "" {
|
||||||
return nil, &StatusBadRequest{Err: "missing accessor"}
|
accessor = data.Get("urlaccessor").(string)
|
||||||
|
if accessor == "" {
|
||||||
|
return nil, &StatusBadRequest{Err: "missing accessor"}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
tokenID, err := ts.lookupByAccessor(accessor)
|
tokenID, err := ts.lookupByAccessor(accessor)
|
||||||
|
@ -1043,7 +1073,10 @@ func (ts *TokenStore) handleRevokeTree(
|
||||||
req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
||||||
id := data.Get("token").(string)
|
id := data.Get("token").(string)
|
||||||
if id == "" {
|
if id == "" {
|
||||||
return logical.ErrorResponse("missing token ID"), logical.ErrInvalidRequest
|
id = data.Get("urltoken").(string)
|
||||||
|
if id == "" {
|
||||||
|
return logical.ErrorResponse("missing token ID"), logical.ErrInvalidRequest
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Revoke the token and its children
|
// Revoke the token and its children
|
||||||
|
@ -1061,7 +1094,10 @@ func (ts *TokenStore) handleRevokeOrphan(
|
||||||
// Parse the id
|
// Parse the id
|
||||||
id := data.Get("token").(string)
|
id := data.Get("token").(string)
|
||||||
if id == "" {
|
if id == "" {
|
||||||
return logical.ErrorResponse("missing token ID"), logical.ErrInvalidRequest
|
id = data.Get("urltoken").(string)
|
||||||
|
if id == "" {
|
||||||
|
return logical.ErrorResponse("missing token ID"), logical.ErrInvalidRequest
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
parent, err := ts.Lookup(req.ClientToken)
|
parent, err := ts.Lookup(req.ClientToken)
|
||||||
|
@ -1087,11 +1123,20 @@ func (ts *TokenStore) handleRevokeOrphan(
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (ts *TokenStore) handleLookupSelf(
|
||||||
|
req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
||||||
|
data.Raw["token"] = req.ClientToken
|
||||||
|
return ts.handleLookup(req, data)
|
||||||
|
}
|
||||||
|
|
||||||
// handleLookup handles the auth/token/lookup/id path for querying information about
|
// handleLookup handles the auth/token/lookup/id path for querying information about
|
||||||
// a particular token. This can be used to see which policies are applicable.
|
// a particular token. This can be used to see which policies are applicable.
|
||||||
func (ts *TokenStore) handleLookup(
|
func (ts *TokenStore) handleLookup(
|
||||||
req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
||||||
id := data.Get("token").(string)
|
id := data.Get("token").(string)
|
||||||
|
if id == "" {
|
||||||
|
id = data.Get("urltoken").(string)
|
||||||
|
}
|
||||||
if id == "" {
|
if id == "" {
|
||||||
id = req.ClientToken
|
id = req.ClientToken
|
||||||
}
|
}
|
||||||
|
@ -1162,7 +1207,10 @@ func (ts *TokenStore) handleRenew(
|
||||||
req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
req *logical.Request, data *framework.FieldData) (*logical.Response, error) {
|
||||||
id := data.Get("token").(string)
|
id := data.Get("token").(string)
|
||||||
if id == "" {
|
if id == "" {
|
||||||
return logical.ErrorResponse("missing token ID"), logical.ErrInvalidRequest
|
id = data.Get("urltoken").(string)
|
||||||
|
if id == "" {
|
||||||
|
return logical.ErrorResponse("missing token ID"), logical.ErrInvalidRequest
|
||||||
|
}
|
||||||
}
|
}
|
||||||
incrementRaw := data.Get("increment").(int)
|
incrementRaw := data.Get("increment").(int)
|
||||||
|
|
||||||
|
|
|
@ -979,6 +979,7 @@ func TestTokenStore_HandleRequest_Lookup(t *testing.T) {
|
||||||
|
|
||||||
testCoreMakeToken(t, c, root, "client", "3600s", []string{"foo"})
|
testCoreMakeToken(t, c, root, "client", "3600s", []string{"foo"})
|
||||||
|
|
||||||
|
// Test via GET
|
||||||
req = logical.TestRequest(t, logical.ReadOperation, "lookup/client")
|
req = logical.TestRequest(t, logical.ReadOperation, "lookup/client")
|
||||||
resp, err = ts.HandleRequest(req)
|
resp, err = ts.HandleRequest(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -1016,6 +1017,47 @@ func TestTokenStore_HandleRequest_Lookup(t *testing.T) {
|
||||||
t.Fatalf("bad:\n%#v\nexp:\n%#v\n", resp.Data, exp)
|
t.Fatalf("bad:\n%#v\nexp:\n%#v\n", resp.Data, exp)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Test via POST
|
||||||
|
req = logical.TestRequest(t, logical.UpdateOperation, "lookup")
|
||||||
|
req.Data = map[string]interface{}{
|
||||||
|
"token": "client",
|
||||||
|
}
|
||||||
|
resp, err = ts.HandleRequest(req)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("err: %v %v", err, resp)
|
||||||
|
}
|
||||||
|
if resp == nil {
|
||||||
|
t.Fatalf("bad: %#v", resp)
|
||||||
|
}
|
||||||
|
|
||||||
|
exp = map[string]interface{}{
|
||||||
|
"id": "client",
|
||||||
|
"accessor": resp.Data["accessor"],
|
||||||
|
"policies": []string{"default", "foo"},
|
||||||
|
"path": "auth/token/create",
|
||||||
|
"meta": map[string]string(nil),
|
||||||
|
"display_name": "token",
|
||||||
|
"orphan": false,
|
||||||
|
"num_uses": 0,
|
||||||
|
"creation_ttl": int64(3600),
|
||||||
|
"ttl": int64(3600),
|
||||||
|
"role": "",
|
||||||
|
}
|
||||||
|
|
||||||
|
if resp.Data["creation_time"].(int64) == 0 {
|
||||||
|
t.Fatalf("creation time was zero")
|
||||||
|
}
|
||||||
|
delete(resp.Data, "creation_time")
|
||||||
|
|
||||||
|
// Depending on timing of the test this may have ticked down, so accept 3599
|
||||||
|
if resp.Data["ttl"].(int64) == 3599 {
|
||||||
|
resp.Data["ttl"] = int64(3600)
|
||||||
|
}
|
||||||
|
|
||||||
|
if !reflect.DeepEqual(resp.Data, exp) {
|
||||||
|
t.Fatalf("bad:\n%#v\nexp:\n%#v\n", resp.Data, exp)
|
||||||
|
}
|
||||||
|
|
||||||
// Test last_renewal_time functionality
|
// Test last_renewal_time functionality
|
||||||
req = logical.TestRequest(t, logical.UpdateOperation, "renew/client")
|
req = logical.TestRequest(t, logical.UpdateOperation, "renew/client")
|
||||||
resp, err = ts.HandleRequest(req)
|
resp, err = ts.HandleRequest(req)
|
||||||
|
|
24
vendor/github.com/fullsailor/pkcs7/.gitignore
generated
vendored
Normal file
24
vendor/github.com/fullsailor/pkcs7/.gitignore
generated
vendored
Normal file
|
@ -0,0 +1,24 @@
|
||||||
|
# Compiled Object files, Static and Dynamic libs (Shared Objects)
|
||||||
|
*.o
|
||||||
|
*.a
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Folders
|
||||||
|
_obj
|
||||||
|
_test
|
||||||
|
|
||||||
|
# Architecture specific extensions/prefixes
|
||||||
|
*.[568vq]
|
||||||
|
[568vq].out
|
||||||
|
|
||||||
|
*.cgo1.go
|
||||||
|
*.cgo2.c
|
||||||
|
_cgo_defun.c
|
||||||
|
_cgo_gotypes.go
|
||||||
|
_cgo_export.*
|
||||||
|
|
||||||
|
_testmain.go
|
||||||
|
|
||||||
|
*.exe
|
||||||
|
*.test
|
||||||
|
*.prof
|
22
vendor/github.com/fullsailor/pkcs7/LICENSE
generated
vendored
Normal file
22
vendor/github.com/fullsailor/pkcs7/LICENSE
generated
vendored
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
The MIT License (MIT)
|
||||||
|
|
||||||
|
Copyright (c) 2015 Andrew Smith
|
||||||
|
|
||||||
|
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
of this software and associated documentation files (the "Software"), to deal
|
||||||
|
in the Software without restriction, including without limitation the rights
|
||||||
|
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
copies of the Software, and to permit persons to whom the Software is
|
||||||
|
furnished to do so, subject to the following conditions:
|
||||||
|
|
||||||
|
The above copyright notice and this permission notice shall be included in all
|
||||||
|
copies or substantial portions of the Software.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
SOFTWARE.
|
||||||
|
|
7
vendor/github.com/fullsailor/pkcs7/README.md
generated
vendored
Normal file
7
vendor/github.com/fullsailor/pkcs7/README.md
generated
vendored
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
# pkcs7
|
||||||
|
|
||||||
|
[![GoDoc](https://godoc.org/github.com/fullsailor/pkcs7?status.svg)](https://godoc.org/github.com/fullsailor/pkcs7)
|
||||||
|
|
||||||
|
pkcs7 implements parsing and creating signed and enveloped messages.
|
||||||
|
|
||||||
|
- Documentation on [GoDoc](http://godoc.org/github.com/fullsailor/pkcs7)
|
228
vendor/github.com/fullsailor/pkcs7/ber.go
generated
vendored
Normal file
228
vendor/github.com/fullsailor/pkcs7/ber.go
generated
vendored
Normal file
|
@ -0,0 +1,228 @@
|
||||||
|
package pkcs7
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"errors"
|
||||||
|
)
|
||||||
|
|
||||||
|
var encodeIndent = 0
|
||||||
|
|
||||||
|
type asn1Object interface {
|
||||||
|
EncodeTo(writer *bytes.Buffer) error
|
||||||
|
}
|
||||||
|
|
||||||
|
type asn1Structured struct {
|
||||||
|
tagBytes []byte
|
||||||
|
content []asn1Object
|
||||||
|
}
|
||||||
|
|
||||||
|
func (s asn1Structured) EncodeTo(out *bytes.Buffer) error {
|
||||||
|
//fmt.Printf("%s--> tag: % X\n", strings.Repeat("| ", encodeIndent), s.tagBytes)
|
||||||
|
encodeIndent++
|
||||||
|
inner := new(bytes.Buffer)
|
||||||
|
for _, obj := range s.content {
|
||||||
|
err := obj.EncodeTo(inner)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
encodeIndent--
|
||||||
|
out.Write(s.tagBytes)
|
||||||
|
encodeLength(out, inner.Len())
|
||||||
|
out.Write(inner.Bytes())
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type asn1Primitive struct {
|
||||||
|
tagBytes []byte
|
||||||
|
length int
|
||||||
|
content []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p asn1Primitive) EncodeTo(out *bytes.Buffer) error {
|
||||||
|
_, err := out.Write(p.tagBytes)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if err = encodeLength(out, p.length); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
//fmt.Printf("%s--> tag: % X length: %d\n", strings.Repeat("| ", encodeIndent), p.tagBytes, p.length)
|
||||||
|
//fmt.Printf("%s--> content length: %d\n", strings.Repeat("| ", encodeIndent), len(p.content))
|
||||||
|
out.Write(p.content)
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func ber2der(ber []byte) ([]byte, error) {
|
||||||
|
if len(ber) == 0 {
|
||||||
|
return nil, errors.New("ber2der: input ber is empty")
|
||||||
|
}
|
||||||
|
//fmt.Printf("--> ber2der: Transcoding %d bytes\n", len(ber))
|
||||||
|
out := new(bytes.Buffer)
|
||||||
|
|
||||||
|
obj, _, err := readObject(ber, 0)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
obj.EncodeTo(out)
|
||||||
|
|
||||||
|
// if offset < len(ber) {
|
||||||
|
// return nil, fmt.Errorf("ber2der: Content longer than expected. Got %d, expected %d", offset, len(ber))
|
||||||
|
//}
|
||||||
|
|
||||||
|
return out.Bytes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// encodes lengths that are longer than 127 into string of bytes
|
||||||
|
func marshalLongLength(out *bytes.Buffer, i int) (err error) {
|
||||||
|
n := lengthLength(i)
|
||||||
|
|
||||||
|
for ; n > 0; n-- {
|
||||||
|
err = out.WriteByte(byte(i >> uint((n-1)*8)))
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// computes the byte length of an encoded length value
|
||||||
|
func lengthLength(i int) (numBytes int) {
|
||||||
|
numBytes = 1
|
||||||
|
for i > 255 {
|
||||||
|
numBytes++
|
||||||
|
i >>= 8
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// encodes the length in DER format
|
||||||
|
// If the length fits in 7 bits, the value is encoded directly.
|
||||||
|
//
|
||||||
|
// Otherwise, the number of bytes to encode the length is first determined.
|
||||||
|
// This number is likely to be 4 or less for a 32bit length. This number is
|
||||||
|
// added to 0x80. The length is encoded in big endian encoding follow after
|
||||||
|
//
|
||||||
|
// Examples:
|
||||||
|
// length | byte 1 | bytes n
|
||||||
|
// 0 | 0x00 | -
|
||||||
|
// 120 | 0x78 | -
|
||||||
|
// 200 | 0x81 | 0xC8
|
||||||
|
// 500 | 0x82 | 0x01 0xF4
|
||||||
|
//
|
||||||
|
func encodeLength(out *bytes.Buffer, length int) (err error) {
|
||||||
|
if length >= 128 {
|
||||||
|
l := lengthLength(length)
|
||||||
|
err = out.WriteByte(0x80 | byte(l))
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
err = marshalLongLength(out, length)
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
err = out.WriteByte(byte(length))
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
func readObject(ber []byte, offset int) (asn1Object, int, error) {
|
||||||
|
//fmt.Printf("\n====> Starting readObject at offset: %d\n\n", offset)
|
||||||
|
tagStart := offset
|
||||||
|
b := ber[offset]
|
||||||
|
offset++
|
||||||
|
tag := b & 0x1F // last 5 bits
|
||||||
|
if tag == 0x1F {
|
||||||
|
tag = 0
|
||||||
|
for ber[offset] >= 0x80 {
|
||||||
|
tag = tag*128 + ber[offset] - 0x80
|
||||||
|
offset++
|
||||||
|
}
|
||||||
|
tag = tag*128 + ber[offset] - 0x80
|
||||||
|
offset++
|
||||||
|
}
|
||||||
|
tagEnd := offset
|
||||||
|
|
||||||
|
kind := b & 0x20
|
||||||
|
/*
|
||||||
|
if kind == 0 {
|
||||||
|
fmt.Print("--> Primitive\n")
|
||||||
|
} else {
|
||||||
|
fmt.Print("--> Constructed\n")
|
||||||
|
}
|
||||||
|
*/
|
||||||
|
// read length
|
||||||
|
var length int
|
||||||
|
l := ber[offset]
|
||||||
|
offset++
|
||||||
|
hack := 0
|
||||||
|
if l > 0x80 {
|
||||||
|
numberOfBytes := (int)(l & 0x7F)
|
||||||
|
if numberOfBytes > 4 { // int is only guaranteed to be 32bit
|
||||||
|
return nil, 0, errors.New("ber2der: BER tag length too long")
|
||||||
|
}
|
||||||
|
if numberOfBytes == 4 && (int)(ber[offset]) > 0x7F {
|
||||||
|
return nil, 0, errors.New("ber2der: BER tag length is negative")
|
||||||
|
}
|
||||||
|
if 0x0 == (int)(ber[offset]) {
|
||||||
|
return nil, 0, errors.New("ber2der: BER tag length has leading zero")
|
||||||
|
}
|
||||||
|
//fmt.Printf("--> (compute length) indicator byte: %x\n", l)
|
||||||
|
//fmt.Printf("--> (compute length) length bytes: % X\n", ber[offset:offset+numberOfBytes])
|
||||||
|
for i := 0; i < numberOfBytes; i++ {
|
||||||
|
length = length*256 + (int)(ber[offset])
|
||||||
|
offset++
|
||||||
|
}
|
||||||
|
} else if l == 0x80 {
|
||||||
|
// find length by searching content
|
||||||
|
markerIndex := bytes.LastIndex(ber[offset:], []byte{0x0, 0x0})
|
||||||
|
if markerIndex == -1 {
|
||||||
|
return nil, 0, errors.New("ber2der: Invalid BER format")
|
||||||
|
}
|
||||||
|
length = markerIndex
|
||||||
|
hack = 2
|
||||||
|
//fmt.Printf("--> (compute length) marker found at offset: %d\n", markerIndex+offset)
|
||||||
|
} else {
|
||||||
|
length = (int)(l)
|
||||||
|
}
|
||||||
|
|
||||||
|
//fmt.Printf("--> length : %d\n", length)
|
||||||
|
contentEnd := offset + length
|
||||||
|
if contentEnd > len(ber) {
|
||||||
|
return nil, 0, errors.New("ber2der: BER tag length is more than available data")
|
||||||
|
}
|
||||||
|
//fmt.Printf("--> content start : %d\n", offset)
|
||||||
|
//fmt.Printf("--> content end : %d\n", contentEnd)
|
||||||
|
//fmt.Printf("--> content : % X\n", ber[offset:contentEnd])
|
||||||
|
var obj asn1Object
|
||||||
|
if kind == 0 {
|
||||||
|
obj = asn1Primitive{
|
||||||
|
tagBytes: ber[tagStart:tagEnd],
|
||||||
|
length: length,
|
||||||
|
content: ber[offset:contentEnd],
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
var subObjects []asn1Object
|
||||||
|
for offset < contentEnd {
|
||||||
|
var subObj asn1Object
|
||||||
|
var err error
|
||||||
|
subObj, offset, err = readObject(ber[:contentEnd], offset)
|
||||||
|
if err != nil {
|
||||||
|
return nil, 0, err
|
||||||
|
}
|
||||||
|
subObjects = append(subObjects, subObj)
|
||||||
|
}
|
||||||
|
obj = asn1Structured{
|
||||||
|
tagBytes: ber[tagStart:tagEnd],
|
||||||
|
content: subObjects,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return obj, contentEnd + hack, nil
|
||||||
|
}
|
61
vendor/github.com/fullsailor/pkcs7/ber_test.go
generated
vendored
Normal file
61
vendor/github.com/fullsailor/pkcs7/ber_test.go
generated
vendored
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
package pkcs7
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"encoding/asn1"
|
||||||
|
"strings"
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestBer2Der(t *testing.T) {
|
||||||
|
// indefinite length fixture
|
||||||
|
ber := []byte{0x30, 0x80, 0x02, 0x01, 0x01, 0x00, 0x00}
|
||||||
|
expected := []byte{0x30, 0x03, 0x02, 0x01, 0x01}
|
||||||
|
der, err := ber2der(ber)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("ber2der failed with error: %v", err)
|
||||||
|
}
|
||||||
|
if bytes.Compare(der, expected) != 0 {
|
||||||
|
t.Errorf("ber2der result did not match.\n\tExpected: % X\n\tActual: % X", expected, der)
|
||||||
|
}
|
||||||
|
|
||||||
|
if der2, err := ber2der(der); err != nil {
|
||||||
|
t.Errorf("ber2der on DER bytes failed with error: %v", err)
|
||||||
|
} else {
|
||||||
|
if !bytes.Equal(der, der2) {
|
||||||
|
t.Error("ber2der is not idempotent")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
var thing struct {
|
||||||
|
Number int
|
||||||
|
}
|
||||||
|
rest, err := asn1.Unmarshal(der, &thing)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Cannot parse resulting DER because: %v", err)
|
||||||
|
} else if len(rest) > 0 {
|
||||||
|
t.Errorf("Resulting DER has trailing data: % X", rest)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestBer2Der_Negatives(t *testing.T) {
|
||||||
|
fixtures := []struct {
|
||||||
|
Input []byte
|
||||||
|
ErrorContains string
|
||||||
|
}{
|
||||||
|
{[]byte{0x30, 0x85}, "length too long"},
|
||||||
|
{[]byte{0x30, 0x84, 0x80, 0x0, 0x0, 0x0}, "length is negative"},
|
||||||
|
{[]byte{0x30, 0x82, 0x0, 0x1}, "length has leading zero"},
|
||||||
|
{[]byte{0x30, 0x80, 0x1, 0x2}, "Invalid BER format"},
|
||||||
|
{[]byte{0x30, 0x03, 0x01, 0x02}, "length is more than available data"},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, fixture := range fixtures {
|
||||||
|
_, err := ber2der(fixture.Input)
|
||||||
|
if err == nil {
|
||||||
|
t.Errorf("No error thrown. Expected: %s", fixture.ErrorContains)
|
||||||
|
}
|
||||||
|
if !strings.Contains(err.Error(), fixture.ErrorContains) {
|
||||||
|
t.Errorf("Unexpected error thrown.\n\tExpected: /%s/\n\tActual: %s", fixture.ErrorContains, err.Error())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
776
vendor/github.com/fullsailor/pkcs7/pkcs7.go
generated
vendored
Normal file
776
vendor/github.com/fullsailor/pkcs7/pkcs7.go
generated
vendored
Normal file
|
@ -0,0 +1,776 @@
|
||||||
|
// Package pkcs7 implements parsing and generation of some PKCS#7 structures.
|
||||||
|
package pkcs7
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto"
|
||||||
|
"crypto/aes"
|
||||||
|
"crypto/cipher"
|
||||||
|
"crypto/des"
|
||||||
|
"crypto/hmac"
|
||||||
|
"crypto/rand"
|
||||||
|
"crypto/rsa"
|
||||||
|
"crypto/x509"
|
||||||
|
"crypto/x509/pkix"
|
||||||
|
"encoding/asn1"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"math/big"
|
||||||
|
"sort"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
_ "crypto/sha1" // for crypto.SHA1
|
||||||
|
)
|
||||||
|
|
||||||
|
// PKCS7 Represents a PKCS7 structure
|
||||||
|
type PKCS7 struct {
|
||||||
|
Content []byte
|
||||||
|
Certificates []*x509.Certificate
|
||||||
|
CRLs []pkix.CertificateList
|
||||||
|
Signers []signerInfo
|
||||||
|
raw interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
type contentInfo struct {
|
||||||
|
ContentType asn1.ObjectIdentifier
|
||||||
|
Content asn1.RawValue `asn1:"explicit,optional,tag:0"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrUnsupportedContentType is returned when a PKCS7 content is not supported.
|
||||||
|
// Currently only Data (1.2.840.113549.1.7.1), Signed Data (1.2.840.113549.1.7.2),
|
||||||
|
// and Enveloped Data are supported (1.2.840.113549.1.7.3)
|
||||||
|
var ErrUnsupportedContentType = errors.New("pkcs7: cannot parse data: unimplemented content type")
|
||||||
|
|
||||||
|
type unsignedData []byte
|
||||||
|
|
||||||
|
var (
|
||||||
|
oidData = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 7, 1}
|
||||||
|
oidSignedData = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 7, 2}
|
||||||
|
oidEnvelopedData = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 7, 3}
|
||||||
|
oidSignedAndEnvelopedData = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 7, 4}
|
||||||
|
oidDigestedData = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 7, 5}
|
||||||
|
oidEncryptedData = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 7, 6}
|
||||||
|
oidAttributeContentType = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 9, 3}
|
||||||
|
oidAttributeMessageDigest = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 9, 4}
|
||||||
|
oidAttributeSigningTime = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 9, 5}
|
||||||
|
)
|
||||||
|
|
||||||
|
type signedData struct {
|
||||||
|
Version int `asn1:"default:1"`
|
||||||
|
DigestAlgorithmIdentifiers []pkix.AlgorithmIdentifier `asn1:"set"`
|
||||||
|
ContentInfo contentInfo
|
||||||
|
Certificates rawCertificates `asn1:"optional,tag:0"`
|
||||||
|
CRLs []pkix.CertificateList `asn1:"optional,tag:1"`
|
||||||
|
SignerInfos []signerInfo `asn1:"set"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type rawCertificates struct {
|
||||||
|
Raw asn1.RawContent
|
||||||
|
}
|
||||||
|
|
||||||
|
type envelopedData struct {
|
||||||
|
Version int
|
||||||
|
RecipientInfos []recipientInfo `asn1:"set"`
|
||||||
|
EncryptedContentInfo encryptedContentInfo
|
||||||
|
}
|
||||||
|
|
||||||
|
type recipientInfo struct {
|
||||||
|
Version int
|
||||||
|
IssuerAndSerialNumber issuerAndSerial
|
||||||
|
KeyEncryptionAlgorithm pkix.AlgorithmIdentifier
|
||||||
|
EncryptedKey []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
type encryptedContentInfo struct {
|
||||||
|
ContentType asn1.ObjectIdentifier
|
||||||
|
ContentEncryptionAlgorithm pkix.AlgorithmIdentifier
|
||||||
|
EncryptedContent asn1.RawValue `asn1:"tag:0,optional,explicit"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type attribute struct {
|
||||||
|
Type asn1.ObjectIdentifier
|
||||||
|
Value asn1.RawValue `asn1:"set"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type issuerAndSerial struct {
|
||||||
|
IssuerName asn1.RawValue
|
||||||
|
SerialNumber *big.Int
|
||||||
|
}
|
||||||
|
|
||||||
|
// MessageDigestMismatchError is returned when the signer data digest does not
|
||||||
|
// match the computed digest for the contained content
|
||||||
|
type MessageDigestMismatchError struct {
|
||||||
|
ExpectedDigest []byte
|
||||||
|
ActualDigest []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
func (err *MessageDigestMismatchError) Error() string {
|
||||||
|
return fmt.Sprintf("pkcs7: Message digest mismatch\n\tExpected: %X\n\tActual : %X", err.ExpectedDigest, err.ActualDigest)
|
||||||
|
}
|
||||||
|
|
||||||
|
type signerInfo struct {
|
||||||
|
Version int `asn1:"default:1"`
|
||||||
|
IssuerAndSerialNumber issuerAndSerial
|
||||||
|
DigestAlgorithm pkix.AlgorithmIdentifier
|
||||||
|
AuthenticatedAttributes []attribute `asn1:"optional,tag:0"`
|
||||||
|
DigestEncryptionAlgorithm pkix.AlgorithmIdentifier
|
||||||
|
EncryptedDigest []byte
|
||||||
|
UnauthenticatedAttributes []attribute `asn1:"optional,tag:1"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse decodes a DER encoded PKCS7 package
|
||||||
|
func Parse(data []byte) (p7 *PKCS7, err error) {
|
||||||
|
if len(data) == 0 {
|
||||||
|
return nil, errors.New("pkcs7: input data is empty")
|
||||||
|
}
|
||||||
|
var info contentInfo
|
||||||
|
der, err := ber2der(data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
rest, err := asn1.Unmarshal(der, &info)
|
||||||
|
if len(rest) > 0 {
|
||||||
|
err = asn1.SyntaxError{Msg: "trailing data"}
|
||||||
|
return
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// fmt.Printf("--> Content Type: %s", info.ContentType)
|
||||||
|
switch {
|
||||||
|
case info.ContentType.Equal(oidSignedData):
|
||||||
|
return parseSignedData(info.Content.Bytes)
|
||||||
|
case info.ContentType.Equal(oidEnvelopedData):
|
||||||
|
return parseEnvelopedData(info.Content.Bytes)
|
||||||
|
}
|
||||||
|
return nil, ErrUnsupportedContentType
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseSignedData(data []byte) (*PKCS7, error) {
|
||||||
|
var sd signedData
|
||||||
|
asn1.Unmarshal(data, &sd)
|
||||||
|
certs, err := sd.Certificates.Parse()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
// fmt.Printf("--> Signed Data Version %d\n", sd.Version)
|
||||||
|
|
||||||
|
var compound asn1.RawValue
|
||||||
|
var content unsignedData
|
||||||
|
|
||||||
|
// The Content.Bytes maybe empty on PKI responses.
|
||||||
|
if len(sd.ContentInfo.Content.Bytes) > 0 {
|
||||||
|
if _, err := asn1.Unmarshal(sd.ContentInfo.Content.Bytes, &compound); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// Compound octet string
|
||||||
|
if compound.IsCompound {
|
||||||
|
if _, err = asn1.Unmarshal(compound.Bytes, &content); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// assuming this is tag 04
|
||||||
|
content = compound.Bytes
|
||||||
|
}
|
||||||
|
return &PKCS7{
|
||||||
|
Content: content,
|
||||||
|
Certificates: certs,
|
||||||
|
CRLs: sd.CRLs,
|
||||||
|
Signers: sd.SignerInfos,
|
||||||
|
raw: sd}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (raw rawCertificates) Parse() ([]*x509.Certificate, error) {
|
||||||
|
if len(raw.Raw) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var val asn1.RawValue
|
||||||
|
if _, err := asn1.Unmarshal(raw.Raw, &val); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return x509.ParseCertificates(val.Bytes)
|
||||||
|
}
|
||||||
|
|
||||||
|
func parseEnvelopedData(data []byte) (*PKCS7, error) {
|
||||||
|
var ed envelopedData
|
||||||
|
if _, err := asn1.Unmarshal(data, &ed); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &PKCS7{
|
||||||
|
raw: ed,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// Verify checks the signatures of a PKCS7 object
|
||||||
|
// WARNING: Verify does not check signing time or verify certificate chains at
|
||||||
|
// this time.
|
||||||
|
func (p7 *PKCS7) Verify() (err error) {
|
||||||
|
if len(p7.Signers) == 0 {
|
||||||
|
return errors.New("pkcs7: Message has no signers")
|
||||||
|
}
|
||||||
|
for _, signer := range p7.Signers {
|
||||||
|
if err := verifySignature(p7, signer); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func verifySignature(p7 *PKCS7, signer signerInfo) error {
|
||||||
|
if len(signer.AuthenticatedAttributes) > 0 {
|
||||||
|
// TODO(fullsailor): First check the content type match
|
||||||
|
var digest []byte
|
||||||
|
err := unmarshalAttribute(signer.AuthenticatedAttributes, oidAttributeMessageDigest, &digest)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
hash, err := getHashForOID(signer.DigestAlgorithm.Algorithm)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
h := hash.New()
|
||||||
|
h.Write(p7.Content)
|
||||||
|
computed := h.Sum(nil)
|
||||||
|
if !hmac.Equal(digest, computed) {
|
||||||
|
return &MessageDigestMismatchError{
|
||||||
|
ExpectedDigest: digest,
|
||||||
|
ActualDigest: computed,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cert := getCertFromCertsByIssuerAndSerial(p7.Certificates, signer.IssuerAndSerialNumber)
|
||||||
|
if cert == nil {
|
||||||
|
return errors.New("pkcs7: No certificate for signer")
|
||||||
|
}
|
||||||
|
// TODO(fullsailor): Optionally verify certificate chain
|
||||||
|
// TODO(fullsailor): Optionally verify signingTime against certificate NotAfter/NotBefore
|
||||||
|
encodedAttributes, err := marshalAttributes(signer.AuthenticatedAttributes)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
algo := x509.SHA1WithRSA
|
||||||
|
return cert.CheckSignature(algo, encodedAttributes, signer.EncryptedDigest)
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalAttributes(attrs []attribute) ([]byte, error) {
|
||||||
|
encodedAttributes, err := asn1.Marshal(struct {
|
||||||
|
A []attribute `asn1:"set"`
|
||||||
|
}{A: attrs})
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remove the leading sequence octets
|
||||||
|
var raw asn1.RawValue
|
||||||
|
asn1.Unmarshal(encodedAttributes, &raw)
|
||||||
|
return raw.Bytes, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
oidDigestAlgorithmSHA1 = asn1.ObjectIdentifier{1, 3, 14, 3, 2, 26}
|
||||||
|
oidEncryptionAlgorithmRSA = asn1.ObjectIdentifier{1, 2, 840, 113549, 1, 1, 1}
|
||||||
|
)
|
||||||
|
|
||||||
|
func getCertFromCertsByIssuerAndSerial(certs []*x509.Certificate, ias issuerAndSerial) *x509.Certificate {
|
||||||
|
for _, cert := range certs {
|
||||||
|
if isCertMatchForIssuerAndSerial(cert, ias) {
|
||||||
|
return cert
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getHashForOID(oid asn1.ObjectIdentifier) (crypto.Hash, error) {
|
||||||
|
switch {
|
||||||
|
case oid.Equal(oidDigestAlgorithmSHA1):
|
||||||
|
return crypto.SHA1, nil
|
||||||
|
}
|
||||||
|
return crypto.Hash(0), ErrUnsupportedAlgorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetOnlySigner returns an x509.Certificate for the first signer of the signed
|
||||||
|
// data payload. If there are more or less than one signer, nil is returned
|
||||||
|
func (p7 *PKCS7) GetOnlySigner() *x509.Certificate {
|
||||||
|
if len(p7.Signers) != 1 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
signer := p7.Signers[0]
|
||||||
|
return getCertFromCertsByIssuerAndSerial(p7.Certificates, signer.IssuerAndSerialNumber)
|
||||||
|
}
|
||||||
|
|
||||||
|
// ErrUnsupportedAlgorithm tells you when our quick dev assumptions have failed
|
||||||
|
var ErrUnsupportedAlgorithm = errors.New("pkcs7: cannot decrypt data: only RSA, DES, DES-EDE3 and AES-256-CBC supported")
|
||||||
|
|
||||||
|
// ErrNotEncryptedContent is returned when attempting to Decrypt data that is not encrypted data
|
||||||
|
var ErrNotEncryptedContent = errors.New("pkcs7: content data is a decryptable data type")
|
||||||
|
|
||||||
|
// Decrypt decrypts encrypted content info for recipient cert and private key
|
||||||
|
func (p7 *PKCS7) Decrypt(cert *x509.Certificate, pk crypto.PrivateKey) ([]byte, error) {
|
||||||
|
data, ok := p7.raw.(envelopedData)
|
||||||
|
if !ok {
|
||||||
|
return nil, ErrNotEncryptedContent
|
||||||
|
}
|
||||||
|
recipient := selectRecipientForCertificate(data.RecipientInfos, cert)
|
||||||
|
if recipient.EncryptedKey == nil {
|
||||||
|
return nil, errors.New("pkcs7: no enveloped recipient for provided certificate")
|
||||||
|
}
|
||||||
|
if priv := pk.(*rsa.PrivateKey); priv != nil {
|
||||||
|
var contentKey []byte
|
||||||
|
contentKey, err := rsa.DecryptPKCS1v15(rand.Reader, priv, recipient.EncryptedKey)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return data.EncryptedContentInfo.decrypt(contentKey)
|
||||||
|
}
|
||||||
|
fmt.Printf("Unsupported Private Key: %v\n", pk)
|
||||||
|
return nil, ErrUnsupportedAlgorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
var oidEncryptionAlgorithmDESCBC = asn1.ObjectIdentifier{1, 3, 14, 3, 2, 7}
|
||||||
|
var oidEncryptionAlgorithmDESEDE3CBC = asn1.ObjectIdentifier{1, 2, 840, 113549, 3, 7}
|
||||||
|
var oidEncryptionAlgorithmAES256CBC = asn1.ObjectIdentifier{2, 16, 840, 1, 101, 3, 4, 1, 42}
|
||||||
|
|
||||||
|
func (eci encryptedContentInfo) decrypt(key []byte) ([]byte, error) {
|
||||||
|
alg := eci.ContentEncryptionAlgorithm.Algorithm
|
||||||
|
if !alg.Equal(oidEncryptionAlgorithmDESCBC) && !alg.Equal(oidEncryptionAlgorithmDESEDE3CBC) && !alg.Equal(oidEncryptionAlgorithmAES256CBC) {
|
||||||
|
fmt.Printf("Unsupported Content Encryption Algorithm: %s\n", alg)
|
||||||
|
return nil, ErrUnsupportedAlgorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
// EncryptedContent can either be constructed of multple OCTET STRINGs
|
||||||
|
// or _be_ a tagged OCTET STRING
|
||||||
|
var cyphertext []byte
|
||||||
|
if eci.EncryptedContent.IsCompound {
|
||||||
|
// Complex case to concat all of the children OCTET STRINGs
|
||||||
|
var buf bytes.Buffer
|
||||||
|
cypherbytes := eci.EncryptedContent.Bytes
|
||||||
|
for {
|
||||||
|
var part []byte
|
||||||
|
cypherbytes, _ = asn1.Unmarshal(cypherbytes, &part)
|
||||||
|
buf.Write(part)
|
||||||
|
if cypherbytes == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
cyphertext = buf.Bytes()
|
||||||
|
} else {
|
||||||
|
// Simple case, the bytes _are_ the cyphertext
|
||||||
|
cyphertext = eci.EncryptedContent.Bytes
|
||||||
|
}
|
||||||
|
|
||||||
|
var block cipher.Block
|
||||||
|
var err error
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case alg.Equal(oidEncryptionAlgorithmDESCBC):
|
||||||
|
block, err = des.NewCipher(key)
|
||||||
|
case alg.Equal(oidEncryptionAlgorithmDESEDE3CBC):
|
||||||
|
block, err = des.NewTripleDESCipher(key)
|
||||||
|
case alg.Equal(oidEncryptionAlgorithmAES256CBC):
|
||||||
|
block, err = aes.NewCipher(key)
|
||||||
|
}
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
iv := eci.ContentEncryptionAlgorithm.Parameters.Bytes
|
||||||
|
if len(iv) != block.BlockSize() {
|
||||||
|
return nil, errors.New("pkcs7: encryption algorithm parameters are malformed")
|
||||||
|
}
|
||||||
|
mode := cipher.NewCBCDecrypter(block, iv)
|
||||||
|
plaintext := make([]byte, len(cyphertext))
|
||||||
|
mode.CryptBlocks(plaintext, cyphertext)
|
||||||
|
if plaintext, err = unpad(plaintext, mode.BlockSize()); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return plaintext, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func selectRecipientForCertificate(recipients []recipientInfo, cert *x509.Certificate) recipientInfo {
|
||||||
|
for _, recp := range recipients {
|
||||||
|
if isCertMatchForIssuerAndSerial(cert, recp.IssuerAndSerialNumber) {
|
||||||
|
return recp
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return recipientInfo{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func isCertMatchForIssuerAndSerial(cert *x509.Certificate, ias issuerAndSerial) bool {
|
||||||
|
return cert.SerialNumber.Cmp(ias.SerialNumber) == 0 && bytes.Compare(cert.RawIssuer, ias.IssuerName.FullBytes) == 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func pad(data []byte, blocklen int) ([]byte, error) {
|
||||||
|
if blocklen < 1 {
|
||||||
|
return nil, fmt.Errorf("invalid blocklen %d", blocklen)
|
||||||
|
}
|
||||||
|
padlen := blocklen - (len(data) % blocklen)
|
||||||
|
if padlen == 0 {
|
||||||
|
padlen = blocklen
|
||||||
|
}
|
||||||
|
pad := bytes.Repeat([]byte{byte(padlen)}, padlen)
|
||||||
|
return append(data, pad...), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func unpad(data []byte, blocklen int) ([]byte, error) {
|
||||||
|
if blocklen < 1 {
|
||||||
|
return nil, fmt.Errorf("invalid blocklen %d", blocklen)
|
||||||
|
}
|
||||||
|
if len(data)%blocklen != 0 || len(data) == 0 {
|
||||||
|
return nil, fmt.Errorf("invalid data len %d", len(data))
|
||||||
|
}
|
||||||
|
|
||||||
|
// the last byte is the length of padding
|
||||||
|
padlen := int(data[len(data)-1])
|
||||||
|
|
||||||
|
// check padding integrity, all bytes should be the same
|
||||||
|
pad := data[len(data)-padlen:]
|
||||||
|
for _, padbyte := range pad {
|
||||||
|
if padbyte != byte(padlen) {
|
||||||
|
return nil, errors.New("invalid padding")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return data[:len(data)-padlen], nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func unmarshalAttribute(attrs []attribute, attributeType asn1.ObjectIdentifier, out interface{}) error {
|
||||||
|
for _, attr := range attrs {
|
||||||
|
if attr.Type.Equal(attributeType) {
|
||||||
|
_, err := asn1.Unmarshal(attr.Value.Bytes, out)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return errors.New("pkcs7: attribute type not in attributes")
|
||||||
|
}
|
||||||
|
|
||||||
|
// UnmarshalSignedAttribute decodes a single attribute from the signer info
|
||||||
|
func (p7 *PKCS7) UnmarshalSignedAttribute(attributeType asn1.ObjectIdentifier, out interface{}) error {
|
||||||
|
sd, ok := p7.raw.(signedData)
|
||||||
|
if !ok {
|
||||||
|
return errors.New("pkcs7: payload is not signedData content")
|
||||||
|
}
|
||||||
|
if len(sd.SignerInfos) < 1 {
|
||||||
|
return errors.New("pkcs7: payload has no signers")
|
||||||
|
}
|
||||||
|
attributes := sd.SignerInfos[0].AuthenticatedAttributes
|
||||||
|
return unmarshalAttribute(attributes, attributeType, out)
|
||||||
|
}
|
||||||
|
|
||||||
|
// SignedData is an opaque data structure for creating signed data payloads
|
||||||
|
type SignedData struct {
|
||||||
|
sd signedData
|
||||||
|
certs []*x509.Certificate
|
||||||
|
messageDigest []byte
|
||||||
|
}
|
||||||
|
|
||||||
|
// Attribute represents a key value pair attribute. Value must be marshalable byte
|
||||||
|
// `encoding/asn1`
|
||||||
|
type Attribute struct {
|
||||||
|
Type asn1.ObjectIdentifier
|
||||||
|
Value interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// SignerInfoConfig are optional values to include when adding a signer
|
||||||
|
type SignerInfoConfig struct {
|
||||||
|
ExtraSignedAttributes []Attribute
|
||||||
|
}
|
||||||
|
|
||||||
|
// NewSignedData initializes a SignedData with content
|
||||||
|
func NewSignedData(data []byte) (*SignedData, error) {
|
||||||
|
content, err := asn1.Marshal(data)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ci := contentInfo{
|
||||||
|
ContentType: oidData,
|
||||||
|
Content: asn1.RawValue{Class: 2, Tag: 0, Bytes: content, IsCompound: true},
|
||||||
|
}
|
||||||
|
digAlg := pkix.AlgorithmIdentifier{
|
||||||
|
Algorithm: oidDigestAlgorithmSHA1,
|
||||||
|
}
|
||||||
|
h := crypto.SHA1.New()
|
||||||
|
h.Write(data)
|
||||||
|
md := h.Sum(nil)
|
||||||
|
sd := signedData{
|
||||||
|
ContentInfo: ci,
|
||||||
|
Version: 1,
|
||||||
|
DigestAlgorithmIdentifiers: []pkix.AlgorithmIdentifier{digAlg},
|
||||||
|
}
|
||||||
|
return &SignedData{sd: sd, messageDigest: md}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type attributes struct {
|
||||||
|
types []asn1.ObjectIdentifier
|
||||||
|
values []interface{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Add adds the attribute, maintaining insertion order
|
||||||
|
func (attrs *attributes) Add(attrType asn1.ObjectIdentifier, value interface{}) {
|
||||||
|
attrs.types = append(attrs.types, attrType)
|
||||||
|
attrs.values = append(attrs.values, value)
|
||||||
|
}
|
||||||
|
|
||||||
|
type sortableAttribute struct {
|
||||||
|
SortKey []byte
|
||||||
|
Attribute attribute
|
||||||
|
}
|
||||||
|
|
||||||
|
type attributeSet []sortableAttribute
|
||||||
|
|
||||||
|
func (sa attributeSet) Len() int {
|
||||||
|
return len(sa)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sa attributeSet) Less(i, j int) bool {
|
||||||
|
return bytes.Compare(sa[i].SortKey, sa[j].SortKey) < 0
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sa attributeSet) Swap(i, j int) {
|
||||||
|
sa[i], sa[j] = sa[j], sa[i]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sa attributeSet) Attributes() []attribute {
|
||||||
|
attrs := make([]attribute, len(sa))
|
||||||
|
for i, attr := range sa {
|
||||||
|
attrs[i] = attr.Attribute
|
||||||
|
}
|
||||||
|
return attrs
|
||||||
|
}
|
||||||
|
|
||||||
|
func (attrs *attributes) ForMarshaling() ([]attribute, error) {
|
||||||
|
sortables := make(attributeSet, len(attrs.types))
|
||||||
|
for i := range sortables {
|
||||||
|
attrType := attrs.types[i]
|
||||||
|
attrValue := attrs.values[i]
|
||||||
|
asn1Value, err := asn1.Marshal(attrValue)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
attr := attribute{
|
||||||
|
Type: attrType,
|
||||||
|
Value: asn1.RawValue{Tag: 17, IsCompound: true, Bytes: asn1Value}, // 17 == SET tag
|
||||||
|
}
|
||||||
|
encoded, err := asn1.Marshal(attr)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
sortables[i] = sortableAttribute{
|
||||||
|
SortKey: encoded,
|
||||||
|
Attribute: attr,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sort.Sort(sortables)
|
||||||
|
return sortables.Attributes(), nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddSigner signs attributes about the content and adds certificate to payload
|
||||||
|
func (sd *SignedData) AddSigner(cert *x509.Certificate, pkey crypto.PrivateKey, config SignerInfoConfig) error {
|
||||||
|
attrs := &attributes{}
|
||||||
|
attrs.Add(oidAttributeContentType, sd.sd.ContentInfo.ContentType)
|
||||||
|
attrs.Add(oidAttributeMessageDigest, sd.messageDigest)
|
||||||
|
attrs.Add(oidAttributeSigningTime, time.Now())
|
||||||
|
for _, attr := range config.ExtraSignedAttributes {
|
||||||
|
attrs.Add(attr.Type, attr.Value)
|
||||||
|
}
|
||||||
|
finalAttrs, err := attrs.ForMarshaling()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
signature, err := signAttributes(finalAttrs, pkey, crypto.SHA1)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
ias, err := cert2issuerAndSerial(cert)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
signer := signerInfo{
|
||||||
|
AuthenticatedAttributes: finalAttrs,
|
||||||
|
DigestAlgorithm: pkix.AlgorithmIdentifier{Algorithm: oidDigestAlgorithmSHA1},
|
||||||
|
DigestEncryptionAlgorithm: pkix.AlgorithmIdentifier{Algorithm: oidEncryptionAlgorithmRSA},
|
||||||
|
IssuerAndSerialNumber: ias,
|
||||||
|
EncryptedDigest: signature,
|
||||||
|
Version: 1,
|
||||||
|
}
|
||||||
|
// create signature of signed attributes
|
||||||
|
sd.certs = append(sd.certs, cert)
|
||||||
|
sd.sd.SignerInfos = append(sd.sd.SignerInfos, signer)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// AddCertificate adds the certificate to the payload. Useful for parent certificates
|
||||||
|
func (sd *SignedData) AddCertificate(cert *x509.Certificate) {
|
||||||
|
sd.certs = append(sd.certs, cert)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Finish marshals the content and its signers
|
||||||
|
func (sd *SignedData) Finish() ([]byte, error) {
|
||||||
|
sd.sd.Certificates = marshalCertificates(sd.certs)
|
||||||
|
inner, err := asn1.Marshal(sd.sd)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
outer := contentInfo{
|
||||||
|
ContentType: oidSignedData,
|
||||||
|
Content: asn1.RawValue{Class: 2, Tag: 0, Bytes: inner, IsCompound: true},
|
||||||
|
}
|
||||||
|
return asn1.Marshal(outer)
|
||||||
|
}
|
||||||
|
|
||||||
|
func cert2issuerAndSerial(cert *x509.Certificate) (issuerAndSerial, error) {
|
||||||
|
var ias issuerAndSerial
|
||||||
|
// The issuer RDNSequence has to match exactly the sequence in the certificate
|
||||||
|
// We cannot use cert.Issuer.ToRDNSequence() here since it mangles the sequence
|
||||||
|
ias.IssuerName = asn1.RawValue{FullBytes: cert.RawIssuer}
|
||||||
|
ias.SerialNumber = cert.SerialNumber
|
||||||
|
|
||||||
|
return ias, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// signs the DER encoded form of the attributes with the private key
|
||||||
|
func signAttributes(attrs []attribute, pkey crypto.PrivateKey, hash crypto.Hash) ([]byte, error) {
|
||||||
|
attrBytes, err := marshalAttributes(attrs)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
h := hash.New()
|
||||||
|
h.Write(attrBytes)
|
||||||
|
hashed := h.Sum(nil)
|
||||||
|
switch priv := pkey.(type) {
|
||||||
|
case *rsa.PrivateKey:
|
||||||
|
return rsa.SignPKCS1v15(rand.Reader, priv, crypto.SHA1, hashed)
|
||||||
|
}
|
||||||
|
return nil, ErrUnsupportedAlgorithm
|
||||||
|
}
|
||||||
|
|
||||||
|
// concats and wraps the certificates in the RawValue structure
|
||||||
|
func marshalCertificates(certs []*x509.Certificate) rawCertificates {
|
||||||
|
var buf bytes.Buffer
|
||||||
|
for _, cert := range certs {
|
||||||
|
buf.Write(cert.Raw)
|
||||||
|
}
|
||||||
|
// Even though, the tag & length are stripped out during marshalling the
|
||||||
|
// RawContent, we have to encode it into the RawContent. If its missing,
|
||||||
|
// then `asn1.Marshal()` will strip out the certificate wrapper instead.
|
||||||
|
var val = asn1.RawValue{Bytes: buf.Bytes(), Class: 2, Tag: 0, IsCompound: true}
|
||||||
|
b, _ := asn1.Marshal(val)
|
||||||
|
return rawCertificates{Raw: b}
|
||||||
|
}
|
||||||
|
|
||||||
|
// DegenerateCertificate creates a signed data structure containing only the
|
||||||
|
// provided certificate
|
||||||
|
func DegenerateCertificate(cert []byte) ([]byte, error) {
|
||||||
|
emptyContent := contentInfo{ContentType: oidData}
|
||||||
|
sd := signedData{
|
||||||
|
Version: 1,
|
||||||
|
ContentInfo: emptyContent,
|
||||||
|
Certificates: rawCertificates{Raw: cert},
|
||||||
|
CRLs: []pkix.CertificateList{},
|
||||||
|
}
|
||||||
|
content, err := asn1.Marshal(sd)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
signedContent := contentInfo{
|
||||||
|
ContentType: oidSignedData,
|
||||||
|
Content: asn1.RawValue{Class: 2, Tag: 0, Bytes: content, IsCompound: true},
|
||||||
|
}
|
||||||
|
return asn1.Marshal(signedContent)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encrypt creates and returns an envelope data PKCS7 structure with encrypted
|
||||||
|
// recipient keys for each recipient public key
|
||||||
|
// TODO(fullsailor): Add support for encrypting content with other algorithms
|
||||||
|
func Encrypt(content []byte, recipients []*x509.Certificate) ([]byte, error) {
|
||||||
|
|
||||||
|
// Create DES key & CBC IV
|
||||||
|
key := make([]byte, 8)
|
||||||
|
iv := make([]byte, des.BlockSize)
|
||||||
|
_, err := rand.Read(key)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
_, err = rand.Read(iv)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Encrypt padded content
|
||||||
|
block, err := des.NewCipher(key)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
mode := cipher.NewCBCEncrypter(block, iv)
|
||||||
|
plaintext, err := pad(content, mode.BlockSize())
|
||||||
|
cyphertext := make([]byte, len(plaintext))
|
||||||
|
mode.CryptBlocks(cyphertext, plaintext)
|
||||||
|
|
||||||
|
// Prepare ASN.1 Encrypted Content Info
|
||||||
|
eci := encryptedContentInfo{
|
||||||
|
ContentType: oidData,
|
||||||
|
ContentEncryptionAlgorithm: pkix.AlgorithmIdentifier{
|
||||||
|
Algorithm: oidEncryptionAlgorithmDESCBC,
|
||||||
|
Parameters: asn1.RawValue{Tag: 4, Bytes: iv},
|
||||||
|
},
|
||||||
|
EncryptedContent: marshalEncryptedContent(cyphertext),
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare each recipient's encrypted cipher key
|
||||||
|
recipientInfos := make([]recipientInfo, len(recipients))
|
||||||
|
for i, recipient := range recipients {
|
||||||
|
encrypted, err := encryptKey(key, recipient)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
ias, err := cert2issuerAndSerial(recipient)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
info := recipientInfo{
|
||||||
|
Version: 0,
|
||||||
|
IssuerAndSerialNumber: ias,
|
||||||
|
KeyEncryptionAlgorithm: pkix.AlgorithmIdentifier{
|
||||||
|
Algorithm: oidEncryptionAlgorithmRSA,
|
||||||
|
},
|
||||||
|
EncryptedKey: encrypted,
|
||||||
|
}
|
||||||
|
recipientInfos[i] = info
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare envelope content
|
||||||
|
envelope := envelopedData{
|
||||||
|
EncryptedContentInfo: eci,
|
||||||
|
Version: 0,
|
||||||
|
RecipientInfos: recipientInfos,
|
||||||
|
}
|
||||||
|
innerContent, err := asn1.Marshal(envelope)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
// Prepare outer payload structure
|
||||||
|
wrapper := contentInfo{
|
||||||
|
ContentType: oidEnvelopedData,
|
||||||
|
Content: asn1.RawValue{Class: 2, Tag: 0, IsCompound: true, Bytes: innerContent},
|
||||||
|
}
|
||||||
|
|
||||||
|
return asn1.Marshal(wrapper)
|
||||||
|
}
|
||||||
|
|
||||||
|
func marshalEncryptedContent(content []byte) asn1.RawValue {
|
||||||
|
asn1Content, _ := asn1.Marshal(content)
|
||||||
|
return asn1.RawValue{Tag: 0, Class: 2, Bytes: asn1Content, IsCompound: true}
|
||||||
|
}
|
||||||
|
|
||||||
|
func encryptKey(key []byte, recipient *x509.Certificate) ([]byte, error) {
|
||||||
|
if pub := recipient.PublicKey.(*rsa.PublicKey); pub != nil {
|
||||||
|
return rsa.EncryptPKCS1v15(rand.Reader, pub, key)
|
||||||
|
}
|
||||||
|
return nil, ErrUnsupportedAlgorithm
|
||||||
|
}
|
410
vendor/github.com/fullsailor/pkcs7/pkcs7_test.go
generated
vendored
Normal file
410
vendor/github.com/fullsailor/pkcs7/pkcs7_test.go
generated
vendored
Normal file
|
@ -0,0 +1,410 @@
|
||||||
|
package pkcs7
|
||||||
|
|
||||||
|
import (
|
||||||
|
"bytes"
|
||||||
|
"crypto"
|
||||||
|
"crypto/rand"
|
||||||
|
"crypto/rsa"
|
||||||
|
"crypto/x509"
|
||||||
|
"crypto/x509/pkix"
|
||||||
|
"encoding/asn1"
|
||||||
|
"encoding/pem"
|
||||||
|
"fmt"
|
||||||
|
"io"
|
||||||
|
"math/big"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestVerify(t *testing.T) {
|
||||||
|
fixture := UnmarshalTestFixture(SignedTestFixture)
|
||||||
|
p7, err := Parse(fixture.Input)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Parse encountered unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := p7.Verify(); err != nil {
|
||||||
|
t.Errorf("Verify failed with error: %v", err)
|
||||||
|
}
|
||||||
|
expected := []byte("We the People")
|
||||||
|
if bytes.Compare(p7.Content, expected) != 0 {
|
||||||
|
t.Errorf("Signed content does not match.\n\tExpected:%s\n\tActual:%s", expected, p7.Content)
|
||||||
|
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestVerifyEC2(t *testing.T) {
|
||||||
|
fixture := UnmarshalTestFixture(EC2IdentityDocumentFixture)
|
||||||
|
p7, err := Parse(fixture.Input)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Parse encountered unexpected error: %v", err)
|
||||||
|
}
|
||||||
|
p7.Certificates = []*x509.Certificate{fixture.Certificate}
|
||||||
|
if err := p7.Verify(); err != nil {
|
||||||
|
t.Errorf("Verify failed with error: %v", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDecrypt(t *testing.T) {
|
||||||
|
fixture := UnmarshalTestFixture(EncryptedTestFixture)
|
||||||
|
p7, err := Parse(fixture.Input)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
content, err := p7.Decrypt(fixture.Certificate, fixture.PrivateKey)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("Cannot Decrypt with error: %v", err)
|
||||||
|
}
|
||||||
|
expected := []byte("This is a test")
|
||||||
|
if bytes.Compare(content, expected) != 0 {
|
||||||
|
t.Errorf("Decrypted result does not match.\n\tExpected:%s\n\tActual:%s", expected, content)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestDegenerateCertificate(t *testing.T) {
|
||||||
|
cert, err := createTestCertificate()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
deg, err := DegenerateCertificate(cert.Certificate.Raw)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
fmt.Printf("=== BEGIN DEGENERATE CERT ===\n% X\n=== END DEGENERATE CERT ===\n", deg)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSign(t *testing.T) {
|
||||||
|
cert, err := createTestCertificate()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
content := []byte("Hello World")
|
||||||
|
toBeSigned, err := NewSignedData(content)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Cannot initialize signed data: %s", err)
|
||||||
|
}
|
||||||
|
if err := toBeSigned.AddSigner(cert.Certificate, cert.PrivateKey, SignerInfoConfig{}); err != nil {
|
||||||
|
t.Fatalf("Cannot add signer: %s", err)
|
||||||
|
}
|
||||||
|
signed, err := toBeSigned.Finish()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Cannot finish signing data: %s", err)
|
||||||
|
}
|
||||||
|
fmt.Printf("=== BEGIN SIGNED RESULT ===\n% X\n=== END SIGNED RESULT ===\n", signed)
|
||||||
|
|
||||||
|
p7, err := Parse(signed)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Cannot parse our signed data: %s", err)
|
||||||
|
}
|
||||||
|
if bytes.Compare(content, p7.Content) != 0 {
|
||||||
|
t.Errorf("Our content was not in the parsed data:\n\tExpected: %s\n\tActual: %s", content, p7.Content)
|
||||||
|
}
|
||||||
|
if err := p7.Verify(); err != nil {
|
||||||
|
t.Errorf("Cannot verify our signed data: %s", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestEncrypt(t *testing.T) {
|
||||||
|
plaintext := []byte("Hello Secret World!")
|
||||||
|
cert, err := createTestCertificate()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
encrypted, err := Encrypt(plaintext, []*x509.Certificate{cert.Certificate})
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
p7, err := Parse(encrypted)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("cannot Parse encrypted result: %s", err)
|
||||||
|
}
|
||||||
|
result, err := p7.Decrypt(cert.Certificate, cert.PrivateKey)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("cannot Decrypt encrypted result: %s", err)
|
||||||
|
}
|
||||||
|
if bytes.Compare(plaintext, result) != 0 {
|
||||||
|
t.Errorf("encrypted data does not match plaintext:\n\tExpected: %s\n\tActual: %s", plaintext, result)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestUnmarshalSignedAttribute(t *testing.T) {
|
||||||
|
cert, err := createTestCertificate()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
content := []byte("Hello World")
|
||||||
|
toBeSigned, err := NewSignedData(content)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Cannot initialize signed data: %s", err)
|
||||||
|
}
|
||||||
|
oidTest := asn1.ObjectIdentifier{2, 3, 4, 5, 6, 7}
|
||||||
|
testValue := "TestValue"
|
||||||
|
if err := toBeSigned.AddSigner(cert.Certificate, cert.PrivateKey, SignerInfoConfig{
|
||||||
|
ExtraSignedAttributes: []Attribute{Attribute{Type: oidTest, Value: testValue}},
|
||||||
|
}); err != nil {
|
||||||
|
t.Fatalf("Cannot add signer: %s", err)
|
||||||
|
}
|
||||||
|
signed, err := toBeSigned.Finish()
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Cannot finish signing data: %s", err)
|
||||||
|
}
|
||||||
|
p7, err := Parse(signed)
|
||||||
|
var actual string
|
||||||
|
err = p7.UnmarshalSignedAttribute(oidTest, &actual)
|
||||||
|
if err != nil {
|
||||||
|
t.Fatalf("Cannot unmarshal test value: %s", err)
|
||||||
|
}
|
||||||
|
if testValue != actual {
|
||||||
|
t.Errorf("Attribute does not match test value\n\tExpected: %s\n\tActual: %s", testValue, actual)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestPad(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
Original []byte
|
||||||
|
Expected []byte
|
||||||
|
BlockSize int
|
||||||
|
}{
|
||||||
|
{[]byte{0x1, 0x2, 0x3, 0x10}, []byte{0x1, 0x2, 0x3, 0x10, 0x4, 0x4, 0x4, 0x4}, 8},
|
||||||
|
{[]byte{0x1, 0x2, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0}, []byte{0x1, 0x2, 0x3, 0x0, 0x0, 0x0, 0x0, 0x0, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8, 0x8}, 8},
|
||||||
|
}
|
||||||
|
for _, test := range tests {
|
||||||
|
padded, err := pad(test.Original, test.BlockSize)
|
||||||
|
if err != nil {
|
||||||
|
t.Errorf("pad encountered error: %s", err)
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if bytes.Compare(test.Expected, padded) != 0 {
|
||||||
|
t.Errorf("pad results mismatch:\n\tExpected: %X\n\tActual: %X", test.Expected, padded)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
type certKeyPair struct {
|
||||||
|
Certificate *x509.Certificate
|
||||||
|
PrivateKey *rsa.PrivateKey
|
||||||
|
}
|
||||||
|
|
||||||
|
func createTestCertificate() (certKeyPair, error) {
|
||||||
|
signer, err := createTestCertificateByIssuer("Eddard Stark", nil)
|
||||||
|
if err != nil {
|
||||||
|
return certKeyPair{}, err
|
||||||
|
}
|
||||||
|
pair, err := createTestCertificateByIssuer("Jon Snow", signer)
|
||||||
|
if err != nil {
|
||||||
|
return certKeyPair{}, err
|
||||||
|
}
|
||||||
|
return *pair, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func createTestCertificateByIssuer(name string, issuer *certKeyPair) (*certKeyPair, error) {
|
||||||
|
|
||||||
|
priv, err := rsa.GenerateKey(rand.Reader, 1024)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 32)
|
||||||
|
serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
template := x509.Certificate{
|
||||||
|
SerialNumber: serialNumber,
|
||||||
|
SignatureAlgorithm: x509.SHA256WithRSA,
|
||||||
|
Subject: pkix.Name{
|
||||||
|
CommonName: name,
|
||||||
|
Organization: []string{"Acme Co"},
|
||||||
|
},
|
||||||
|
NotBefore: time.Now(),
|
||||||
|
NotAfter: time.Now().AddDate(1, 0, 0),
|
||||||
|
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
|
||||||
|
}
|
||||||
|
var issuerCert *x509.Certificate
|
||||||
|
var issuerKey crypto.PrivateKey
|
||||||
|
if issuer != nil {
|
||||||
|
issuerCert = issuer.Certificate
|
||||||
|
issuerKey = issuer.PrivateKey
|
||||||
|
} else {
|
||||||
|
issuerCert = &template
|
||||||
|
issuerKey = priv
|
||||||
|
}
|
||||||
|
cert, err := x509.CreateCertificate(rand.Reader, &template, issuerCert, priv.Public(), issuerKey)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
leaf, err := x509.ParseCertificate(cert)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &certKeyPair{
|
||||||
|
Certificate: leaf,
|
||||||
|
PrivateKey: priv,
|
||||||
|
}, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
type TestFixture struct {
|
||||||
|
Input []byte
|
||||||
|
Certificate *x509.Certificate
|
||||||
|
PrivateKey *rsa.PrivateKey
|
||||||
|
}
|
||||||
|
|
||||||
|
func UnmarshalTestFixture(testPEMBlock string) TestFixture {
|
||||||
|
var result TestFixture
|
||||||
|
var derBlock *pem.Block
|
||||||
|
var pemBlock = []byte(testPEMBlock)
|
||||||
|
for {
|
||||||
|
derBlock, pemBlock = pem.Decode(pemBlock)
|
||||||
|
if derBlock == nil {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
switch derBlock.Type {
|
||||||
|
case "PKCS7":
|
||||||
|
result.Input = derBlock.Bytes
|
||||||
|
case "CERTIFICATE":
|
||||||
|
result.Certificate, _ = x509.ParseCertificate(derBlock.Bytes)
|
||||||
|
case "PRIVATE KEY":
|
||||||
|
result.PrivateKey, _ = x509.ParsePKCS1PrivateKey(derBlock.Bytes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return result
|
||||||
|
}
|
||||||
|
|
||||||
|
func MarshalTestFixture(t TestFixture, w io.Writer) {
|
||||||
|
if t.Input != nil {
|
||||||
|
pem.Encode(w, &pem.Block{Type: "PKCS7", Bytes: t.Input})
|
||||||
|
}
|
||||||
|
if t.Certificate != nil {
|
||||||
|
pem.Encode(w, &pem.Block{Type: "CERTIFICATE", Bytes: t.Certificate.Raw})
|
||||||
|
}
|
||||||
|
if t.PrivateKey != nil {
|
||||||
|
pem.Encode(w, &pem.Block{Type: "PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(t.PrivateKey)})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
var SignedTestFixture = `
|
||||||
|
-----BEGIN PKCS7-----
|
||||||
|
MIIDVgYJKoZIhvcNAQcCoIIDRzCCA0MCAQExCTAHBgUrDgMCGjAcBgkqhkiG9w0B
|
||||||
|
BwGgDwQNV2UgdGhlIFBlb3BsZaCCAdkwggHVMIIBQKADAgECAgRpuDctMAsGCSqG
|
||||||
|
SIb3DQEBCzApMRAwDgYDVQQKEwdBY21lIENvMRUwEwYDVQQDEwxFZGRhcmQgU3Rh
|
||||||
|
cmswHhcNMTUwNTA2MDQyNDQ4WhcNMTYwNTA2MDQyNDQ4WjAlMRAwDgYDVQQKEwdB
|
||||||
|
Y21lIENvMREwDwYDVQQDEwhKb24gU25vdzCBnzANBgkqhkiG9w0BAQEFAAOBjQAw
|
||||||
|
gYkCgYEAqr+tTF4mZP5rMwlXp1y+crRtFpuLXF1zvBZiYMfIvAHwo1ta8E1IcyEP
|
||||||
|
J1jIiKMcwbzeo6kAmZzIJRCTezq9jwXUsKbQTvcfOH9HmjUmXBRWFXZYoQs/OaaF
|
||||||
|
a45deHmwEeMQkuSWEtYiVKKZXtJOtflKIT3MryJEDiiItMkdybUCAwEAAaMSMBAw
|
||||||
|
DgYDVR0PAQH/BAQDAgCgMAsGCSqGSIb3DQEBCwOBgQDK1EweZWRL+f7Z+J0kVzY8
|
||||||
|
zXptcBaV4Lf5wGZJLJVUgp33bpLNpT3yadS++XQJ+cvtW3wADQzBSTMduyOF8Zf+
|
||||||
|
L7TjjrQ2+F2HbNbKUhBQKudxTfv9dJHdKbD+ngCCdQJYkIy2YexsoNG0C8nQkggy
|
||||||
|
axZd/J69xDVx6pui3Sj8sDGCATYwggEyAgEBMDEwKTEQMA4GA1UEChMHQWNtZSBD
|
||||||
|
bzEVMBMGA1UEAxMMRWRkYXJkIFN0YXJrAgRpuDctMAcGBSsOAwIaoGEwGAYJKoZI
|
||||||
|
hvcNAQkDMQsGCSqGSIb3DQEHATAgBgkqhkiG9w0BCQUxExcRMTUwNTA2MDAyNDQ4
|
||||||
|
LTA0MDAwIwYJKoZIhvcNAQkEMRYEFG9D7gcTh9zfKiYNJ1lgB0yTh4sZMAsGCSqG
|
||||||
|
SIb3DQEBAQSBgFF3sGDU9PtXty/QMtpcFa35vvIOqmWQAIZt93XAskQOnBq4OloX
|
||||||
|
iL9Ct7t1m4pzjRm0o9nDkbaSLZe7HKASHdCqijroScGlI8M+alJ8drHSFv6ZIjnM
|
||||||
|
FIwIf0B2Lko6nh9/6mUXq7tbbIHa3Gd1JUVire/QFFtmgRXMbXYk8SIS
|
||||||
|
-----END PKCS7-----
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIB1TCCAUCgAwIBAgIEabg3LTALBgkqhkiG9w0BAQswKTEQMA4GA1UEChMHQWNt
|
||||||
|
ZSBDbzEVMBMGA1UEAxMMRWRkYXJkIFN0YXJrMB4XDTE1MDUwNjA0MjQ0OFoXDTE2
|
||||||
|
MDUwNjA0MjQ0OFowJTEQMA4GA1UEChMHQWNtZSBDbzERMA8GA1UEAxMISm9uIFNu
|
||||||
|
b3cwgZ8wDQYJKoZIhvcNAQEBBQADgY0AMIGJAoGBAKq/rUxeJmT+azMJV6dcvnK0
|
||||||
|
bRabi1xdc7wWYmDHyLwB8KNbWvBNSHMhDydYyIijHMG83qOpAJmcyCUQk3s6vY8F
|
||||||
|
1LCm0E73Hzh/R5o1JlwUVhV2WKELPzmmhWuOXXh5sBHjEJLklhLWIlSimV7STrX5
|
||||||
|
SiE9zK8iRA4oiLTJHcm1AgMBAAGjEjAQMA4GA1UdDwEB/wQEAwIAoDALBgkqhkiG
|
||||||
|
9w0BAQsDgYEAytRMHmVkS/n+2fidJFc2PM16bXAWleC3+cBmSSyVVIKd926SzaU9
|
||||||
|
8mnUvvl0CfnL7Vt8AA0MwUkzHbsjhfGX/i+04460Nvhdh2zWylIQUCrncU37/XSR
|
||||||
|
3Smw/p4AgnUCWJCMtmHsbKDRtAvJ0JIIMmsWXfyevcQ1ceqbot0o/LA=
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIICXgIBAAKBgQCqv61MXiZk/mszCVenXL5ytG0Wm4tcXXO8FmJgx8i8AfCjW1rw
|
||||||
|
TUhzIQ8nWMiIoxzBvN6jqQCZnMglEJN7Or2PBdSwptBO9x84f0eaNSZcFFYVdlih
|
||||||
|
Cz85poVrjl14ebAR4xCS5JYS1iJUople0k61+UohPcyvIkQOKIi0yR3JtQIDAQAB
|
||||||
|
AoGBAIPLCR9N+IKxodq11lNXEaUFwMHXc1zqwP8no+2hpz3+nVfplqqubEJ4/PJY
|
||||||
|
5AgbJoIfnxVhyBXJXu7E+aD/OPneKZrgp58YvHKgGvvPyJg2gpC/1Fh0vQB0HNpI
|
||||||
|
1ZzIZUl8ZTUtVgtnCBUOh5JGI4bFokAqrT//Uvcfd+idgxqBAkEA1ZbP/Kseld14
|
||||||
|
qbWmgmU5GCVxsZRxgR1j4lG3UVjH36KXMtRTm1atAam1uw3OEGa6Y3ANjpU52FaB
|
||||||
|
Hep5rkk4FQJBAMynMo1L1uiN5GP+KYLEF5kKRxK+FLjXR0ywnMh+gpGcZDcOae+J
|
||||||
|
+t1gLoWBIESH/Xt639T7smuSfrZSA9V0EyECQA8cvZiWDvLxmaEAXkipmtGPjKzQ
|
||||||
|
4PsOtkuEFqFl07aKDYKmLUg3aMROWrJidqsIabWxbvQgsNgSvs38EiH3wkUCQQCg
|
||||||
|
ndxb7piVXb9RBwm3OoU2tE1BlXMX+sVXmAkEhd2dwDsaxrI3sHf1xGXem5AimQRF
|
||||||
|
JBOFyaCnMotGNioSHY5hAkEAxyXcNixQ2RpLXJTQZtwnbk0XDcbgB+fBgXnv/4f3
|
||||||
|
BCvcu85DqJeJyQv44Oe1qsXEX9BfcQIOVaoep35RPlKi9g==
|
||||||
|
-----END PRIVATE KEY-----`
|
||||||
|
|
||||||
|
// Content is "This is a test"
|
||||||
|
var EncryptedTestFixture = `
|
||||||
|
-----BEGIN PKCS7-----
|
||||||
|
MIIBFwYJKoZIhvcNAQcDoIIBCDCCAQQCAQAxgcowgccCAQAwMjApMRAwDgYDVQQK
|
||||||
|
EwdBY21lIENvMRUwEwYDVQQDEwxFZGRhcmQgU3RhcmsCBQDL+CvWMAsGCSqGSIb3
|
||||||
|
DQEBAQSBgKyP/5WlRTZD3dWMrLOX6QRNDrXEkQjhmToRwFZdY3LgUh25ZU0S/q4G
|
||||||
|
dHPV21Fv9lQD+q7l3vfeHw8M6Z1PKi9sHMVfxAkQpvaI96DTIT3YHtuLC1w3geCO
|
||||||
|
8eFWTq2qS4WChSuS/yhYosjA1kTkE0eLnVZcGw0z/WVuEZznkdyIMDIGCSqGSIb3
|
||||||
|
DQEHATARBgUrDgMCBwQImpKsUyMPpQigEgQQRcWWrCRXqpD5Njs0GkJl+g==
|
||||||
|
-----END PKCS7-----
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIB1jCCAUGgAwIBAgIFAMv4K9YwCwYJKoZIhvcNAQELMCkxEDAOBgNVBAoTB0Fj
|
||||||
|
bWUgQ28xFTATBgNVBAMTDEVkZGFyZCBTdGFyazAeFw0xNTA1MDYwMzU2NDBaFw0x
|
||||||
|
NjA1MDYwMzU2NDBaMCUxEDAOBgNVBAoTB0FjbWUgQ28xETAPBgNVBAMTCEpvbiBT
|
||||||
|
bm93MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDK6NU0R0eiCYVquU4RcjKc
|
||||||
|
LzGfx0aa1lMr2TnLQUSeLFZHFxsyyMXXuMPig3HK4A7SGFHupO+/1H/sL4xpH5zg
|
||||||
|
8+Zg2r8xnnney7abxcuv0uATWSIeKlNnb1ZO1BAxFnESc3GtyOCr2dUwZHX5mRVP
|
||||||
|
+Zxp2ni5qHNraf3wE2VPIQIDAQABoxIwEDAOBgNVHQ8BAf8EBAMCAKAwCwYJKoZI
|
||||||
|
hvcNAQELA4GBAIr2F7wsqmEU/J/kLyrCgEVXgaV/sKZq4pPNnzS0tBYk8fkV3V18
|
||||||
|
sBJyHKRLL/wFZASvzDcVGCplXyMdAOCyfd8jO3F9Ac/xdlz10RrHJT75hNu3a7/n
|
||||||
|
9KNwKhfN4A1CQv2x372oGjRhCW5bHNCWx4PIVeNzCyq/KZhyY9sxHE6f
|
||||||
|
-----END CERTIFICATE-----
|
||||||
|
-----BEGIN PRIVATE KEY-----
|
||||||
|
MIICXgIBAAKBgQDK6NU0R0eiCYVquU4RcjKcLzGfx0aa1lMr2TnLQUSeLFZHFxsy
|
||||||
|
yMXXuMPig3HK4A7SGFHupO+/1H/sL4xpH5zg8+Zg2r8xnnney7abxcuv0uATWSIe
|
||||||
|
KlNnb1ZO1BAxFnESc3GtyOCr2dUwZHX5mRVP+Zxp2ni5qHNraf3wE2VPIQIDAQAB
|
||||||
|
AoGBALyvnSt7KUquDen7nXQtvJBudnf9KFPt//OjkdHHxNZNpoF/JCSqfQeoYkeu
|
||||||
|
MdAVYNLQGMiRifzZz4dDhA9xfUAuy7lcGQcMCxEQ1dwwuFaYkawbS0Tvy2PFlq2d
|
||||||
|
H5/HeDXU4EDJ3BZg0eYj2Bnkt1sJI35UKQSxblQ0MY2q0uFBAkEA5MMOogkgUx1C
|
||||||
|
67S1tFqMUSM8D0mZB0O5vOJZC5Gtt2Urju6vywge2ArExWRXlM2qGl8afFy2SgSv
|
||||||
|
Xk5eybcEiQJBAOMRwwbEoW5NYHuFFbSJyWll4n71CYuWuQOCzehDPyTb80WFZGLV
|
||||||
|
i91kFIjeERyq88eDE5xVB3ZuRiXqaShO/9kCQQCKOEkpInaDgZSjskZvuJ47kByD
|
||||||
|
6CYsO4GIXQMMeHML8ncFH7bb6AYq5ybJVb2NTU7QLFJmfeYuhvIm+xdOreRxAkEA
|
||||||
|
o5FC5Jg2FUfFzZSDmyZ6IONUsdF/i78KDV5nRv1R+hI6/oRlWNCtTNBv/lvBBd6b
|
||||||
|
dseUE9QoaQZsn5lpILEvmQJAZ0B+Or1rAYjnbjnUhdVZoy9kC4Zov+4UH3N/BtSy
|
||||||
|
KJRWUR0wTWfZBPZ5hAYZjTBEAFULaYCXlQKsODSp0M1aQA==
|
||||||
|
-----END PRIVATE KEY-----`
|
||||||
|
|
||||||
|
var EC2IdentityDocumentFixture = `
|
||||||
|
-----BEGIN PKCS7-----
|
||||||
|
MIAGCSqGSIb3DQEHAqCAMIACAQExCzAJBgUrDgMCGgUAMIAGCSqGSIb3DQEHAaCA
|
||||||
|
JIAEggGmewogICJwcml2YXRlSXAiIDogIjE3Mi4zMC4wLjI1MiIsCiAgImRldnBh
|
||||||
|
eVByb2R1Y3RDb2RlcyIgOiBudWxsLAogICJhdmFpbGFiaWxpdHlab25lIiA6ICJ1
|
||||||
|
cy1lYXN0LTFhIiwKICAidmVyc2lvbiIgOiAiMjAxMC0wOC0zMSIsCiAgImluc3Rh
|
||||||
|
bmNlSWQiIDogImktZjc5ZmU1NmMiLAogICJiaWxsaW5nUHJvZHVjdHMiIDogbnVs
|
||||||
|
bCwKICAiaW5zdGFuY2VUeXBlIiA6ICJ0Mi5taWNybyIsCiAgImFjY291bnRJZCIg
|
||||||
|
OiAiMTIxNjU5MDE0MzM0IiwKICAiaW1hZ2VJZCIgOiAiYW1pLWZjZTNjNjk2IiwK
|
||||||
|
ICAicGVuZGluZ1RpbWUiIDogIjIwMTYtMDQtMDhUMDM6MDE6MzhaIiwKICAiYXJj
|
||||||
|
aGl0ZWN0dXJlIiA6ICJ4ODZfNjQiLAogICJrZXJuZWxJZCIgOiBudWxsLAogICJy
|
||||||
|
YW1kaXNrSWQiIDogbnVsbCwKICAicmVnaW9uIiA6ICJ1cy1lYXN0LTEiCn0AAAAA
|
||||||
|
AAAxggEYMIIBFAIBATBpMFwxCzAJBgNVBAYTAlVTMRkwFwYDVQQIExBXYXNoaW5n
|
||||||
|
dG9uIFN0YXRlMRAwDgYDVQQHEwdTZWF0dGxlMSAwHgYDVQQKExdBbWF6b24gV2Vi
|
||||||
|
IFNlcnZpY2VzIExMQwIJAJa6SNnlXhpnMAkGBSsOAwIaBQCgXTAYBgkqhkiG9w0B
|
||||||
|
CQMxCwYJKoZIhvcNAQcBMBwGCSqGSIb3DQEJBTEPFw0xNjA0MDgwMzAxNDRaMCMG
|
||||||
|
CSqGSIb3DQEJBDEWBBTuUc28eBXmImAautC+wOjqcFCBVjAJBgcqhkjOOAQDBC8w
|
||||||
|
LQIVAKA54NxGHWWCz5InboDmY/GHs33nAhQ6O/ZI86NwjA9Vz3RNMUJrUPU5tAAA
|
||||||
|
AAAAAA==
|
||||||
|
-----END PKCS7-----
|
||||||
|
-----BEGIN CERTIFICATE-----
|
||||||
|
MIIC7TCCAq0CCQCWukjZ5V4aZzAJBgcqhkjOOAQDMFwxCzAJBgNVBAYTAlVTMRkw
|
||||||
|
FwYDVQQIExBXYXNoaW5ndG9uIFN0YXRlMRAwDgYDVQQHEwdTZWF0dGxlMSAwHgYD
|
||||||
|
VQQKExdBbWF6b24gV2ViIFNlcnZpY2VzIExMQzAeFw0xMjAxMDUxMjU2MTJaFw0z
|
||||||
|
ODAxMDUxMjU2MTJaMFwxCzAJBgNVBAYTAlVTMRkwFwYDVQQIExBXYXNoaW5ndG9u
|
||||||
|
IFN0YXRlMRAwDgYDVQQHEwdTZWF0dGxlMSAwHgYDVQQKExdBbWF6b24gV2ViIFNl
|
||||||
|
cnZpY2VzIExMQzCCAbcwggEsBgcqhkjOOAQBMIIBHwKBgQCjkvcS2bb1VQ4yt/5e
|
||||||
|
ih5OO6kK/n1Lzllr7D8ZwtQP8fOEpp5E2ng+D6Ud1Z1gYipr58Kj3nssSNpI6bX3
|
||||||
|
VyIQzK7wLclnd/YozqNNmgIyZecN7EglK9ITHJLP+x8FtUpt3QbyYXJdmVMegN6P
|
||||||
|
hviYt5JH/nYl4hh3Pa1HJdskgQIVALVJ3ER11+Ko4tP6nwvHwh6+ERYRAoGBAI1j
|
||||||
|
k+tkqMVHuAFcvAGKocTgsjJem6/5qomzJuKDmbJNu9Qxw3rAotXau8Qe+MBcJl/U
|
||||||
|
hhy1KHVpCGl9fueQ2s6IL0CaO/buycU1CiYQk40KNHCcHfNiZbdlx1E9rpUp7bnF
|
||||||
|
lRa2v1ntMX3caRVDdbtPEWmdxSCYsYFDk4mZrOLBA4GEAAKBgEbmeve5f8LIE/Gf
|
||||||
|
MNmP9CM5eovQOGx5ho8WqD+aTebs+k2tn92BBPqeZqpWRa5P/+jrdKml1qx4llHW
|
||||||
|
MXrs3IgIb6+hUIB+S8dz8/mmO0bpr76RoZVCXYab2CZedFut7qc3WUH9+EUAH5mw
|
||||||
|
vSeDCOUMYQR7R9LINYwouHIziqQYMAkGByqGSM44BAMDLwAwLAIUWXBlk40xTwSw
|
||||||
|
7HX32MxXYruse9ACFBNGmdX2ZBrVNGrN9N2f6ROk0k9K
|
||||||
|
-----END CERTIFICATE-----`
|
|
@ -228,7 +228,7 @@ of the header should be "X-Vault-Token" and the value should be the token.
|
||||||
</dd>
|
</dd>
|
||||||
|
|
||||||
<dt>Method</dt>
|
<dt>Method</dt>
|
||||||
<dd>GET</dd>
|
<dd>POST</dd>
|
||||||
|
|
||||||
<dt>URL</dt>
|
<dt>URL</dt>
|
||||||
<dd>`/auth/token/lookup`</dd>
|
<dd>`/auth/token/lookup`</dd>
|
||||||
|
|
Loading…
Reference in a new issue