Merge branch 'master' into ui-styles-update

This commit is contained in:
Joshua Ogle 2018-05-25 15:28:57 -06:00 committed by GitHub
commit b44fa640f5
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
102 changed files with 7859 additions and 286 deletions

View File

@ -16,6 +16,10 @@ FEATURES:
* Userpass auth CIDR restrictions: When using the `userpass` auth method you
can now limit authentication to specific CIDRs; these will also be encoded
in resultant tokens to limit their use.
* Vault Browser CLI: The UI now supports usage of read/write/list/delete
commands in a CLI that can be accessed from the nav bar. Complex inputs such
as JSON files are not currently supported. This surfaces features otherwise
unsupported in Vault's UI.
IMPROVEMENTS:

View File

@ -388,11 +388,12 @@ func (c *Client) SetAddress(addr string) error {
c.modifyLock.Lock()
defer c.modifyLock.Unlock()
var err error
if c.addr, err = url.Parse(addr); err != nil {
parsedAddr, err := url.Parse(addr)
if err != nil {
return errwrap.Wrapf("failed to set address: {{err}}", err)
}
c.addr = parsedAddr
return nil
}
@ -411,7 +412,8 @@ func (c *Client) SetLimiter(rateLimit float64, burst int) {
c.modifyLock.RLock()
c.config.modifyLock.Lock()
defer c.config.modifyLock.Unlock()
defer c.modifyLock.RUnlock()
c.modifyLock.RUnlock()
c.config.Limiter = rate.NewLimiter(rate.Limit(rateLimit), burst)
}
@ -544,14 +546,20 @@ func (c *Client) SetPolicyOverride(override bool) {
// doesn't need to be called externally.
func (c *Client) NewRequest(method, requestPath string) *Request {
c.modifyLock.RLock()
defer c.modifyLock.RUnlock()
addr := c.addr
token := c.token
mfaCreds := c.mfaCreds
wrappingLookupFunc := c.wrappingLookupFunc
headers := c.headers
policyOverride := c.policyOverride
c.modifyLock.RUnlock()
// if SRV records exist (see https://tools.ietf.org/html/draft-andrews-http-srv-02), lookup the SRV
// record and take the highest match; this is not designed for high-availability, just discovery
var host string = c.addr.Host
if c.addr.Port() == "" {
var host string = addr.Host
if addr.Port() == "" {
// Internet Draft specifies that the SRV record is ignored if a port is given
_, addrs, err := net.LookupSRV("http", "tcp", c.addr.Hostname())
_, addrs, err := net.LookupSRV("http", "tcp", addr.Hostname())
if err == nil && len(addrs) > 0 {
host = fmt.Sprintf("%s:%d", addrs[0].Target, addrs[0].Port)
}
@ -560,12 +568,12 @@ func (c *Client) NewRequest(method, requestPath string) *Request {
req := &Request{
Method: method,
URL: &url.URL{
User: c.addr.User,
Scheme: c.addr.Scheme,
User: addr.User,
Scheme: addr.Scheme,
Host: host,
Path: path.Join(c.addr.Path, requestPath),
Path: path.Join(addr.Path, requestPath),
},
ClientToken: c.token,
ClientToken: token,
Params: make(map[string][]string),
}
@ -579,21 +587,19 @@ func (c *Client) NewRequest(method, requestPath string) *Request {
lookupPath = requestPath
}
req.MFAHeaderVals = c.mfaCreds
req.MFAHeaderVals = mfaCreds
if c.wrappingLookupFunc != nil {
req.WrapTTL = c.wrappingLookupFunc(method, lookupPath)
if wrappingLookupFunc != nil {
req.WrapTTL = wrappingLookupFunc(method, lookupPath)
} else {
req.WrapTTL = DefaultWrappingLookupFunc(method, lookupPath)
}
if c.config.Timeout != 0 {
c.config.HttpClient.Timeout = c.config.Timeout
}
if c.headers != nil {
req.Headers = c.headers
if headers != nil {
req.Headers = headers
}
req.PolicyOverride = c.policyOverride
req.PolicyOverride = policyOverride
return req
}
@ -602,18 +608,23 @@ func (c *Client) NewRequest(method, requestPath string) *Request {
// a Vault server not configured with this client. This is an advanced operation
// that generally won't need to be called externally.
func (c *Client) RawRequest(r *Request) (*Response, error) {
c.modifyLock.RLock()
c.config.modifyLock.RLock()
defer c.config.modifyLock.RUnlock()
if c.config.Limiter != nil {
c.config.Limiter.Wait(context.Background())
}
token := c.token
c.config.modifyLock.RLock()
limiter := c.config.Limiter
maxRetries := c.config.MaxRetries
backoff := c.config.Backoff
httpClient := c.config.HttpClient
timeout := c.config.Timeout
c.config.modifyLock.RUnlock()
c.modifyLock.RUnlock()
if limiter != nil {
limiter.Wait(context.Background())
}
// Sanity check the token before potentially erroring from the API
idx := strings.IndexFunc(token, func(c rune) bool {
return !unicode.IsPrint(c)
@ -632,16 +643,23 @@ START:
return nil, fmt.Errorf("nil request created")
}
backoff := c.config.Backoff
// Set the timeout, if any
var cancelFunc context.CancelFunc
if timeout != 0 {
var ctx context.Context
ctx, cancelFunc = context.WithTimeout(context.Background(), timeout)
req.Request = req.Request.WithContext(ctx)
}
if backoff == nil {
backoff = retryablehttp.LinearJitterBackoff
}
client := &retryablehttp.Client{
HTTPClient: c.config.HttpClient,
HTTPClient: httpClient,
RetryWaitMin: 1000 * time.Millisecond,
RetryWaitMax: 1500 * time.Millisecond,
RetryMax: c.config.MaxRetries,
RetryMax: maxRetries,
CheckRetry: retryablehttp.DefaultRetryPolicy,
Backoff: backoff,
ErrorHandler: retryablehttp.PassthroughErrorHandler,
@ -649,6 +667,9 @@ START:
var result *Response
resp, err := client.Do(req)
if cancelFunc != nil {
cancelFunc()
}
if resp != nil {
result = &Response{Response: resp}
}

View File

@ -7,7 +7,6 @@ import (
"os"
"strings"
"testing"
"time"
)
func init() {
@ -244,22 +243,10 @@ func TestClientTimeoutSetting(t *testing.T) {
defer os.Setenv(EnvVaultClientTimeout, oldClientTimeout)
config := DefaultConfig()
config.ReadEnvironment()
client, err := NewClient(config)
_, err := NewClient(config)
if err != nil {
t.Fatal(err)
}
_ = client.NewRequest("PUT", "/")
if client.config.HttpClient.Timeout != time.Second*10 {
t.Fatalf("error setting client timeout using env variable")
}
// Setting custom client timeout for a new request
client.SetClientTimeout(time.Second * 20)
_ = client.NewRequest("PUT", "/")
if client.config.HttpClient.Timeout != time.Second*20 {
t.Fatalf("error setting client timeout using SetClientTimeout")
}
}
type roundTripperFunc func(*http.Request) (*http.Response, error)

View File

@ -6,6 +6,7 @@ import (
"os/signal"
"syscall"
ad "github.com/hashicorp/vault-plugin-secrets-ad/plugin"
gcp "github.com/hashicorp/vault-plugin-secrets-gcp/plugin"
kv "github.com/hashicorp/vault-plugin-secrets-kv"
"github.com/hashicorp/vault/audit"
@ -110,6 +111,7 @@ var (
}
logicalBackends = map[string]logical.Factory{
"ad": ad.Factory,
"aws": aws.Factory,
"cassandra": cassandra.Factory,
"consul": consul.Factory,

View File

@ -0,0 +1,8 @@
import ApplicationAdapter from './application';
export default ApplicationAdapter.extend({
namespace: 'v1',
pathForType(modelName) {
return modelName;
},
});

View File

@ -0,0 +1,36 @@
import Ember from 'ember';
import keys from 'vault/lib/keycodes';
export default Ember.Component.extend({
'data-test-component': 'console/command-input',
classNames: 'console-ui-input',
onExecuteCommand() {},
onFullscreen() {},
onValueUpdate() {},
onShiftCommand() {},
value: null,
isFullscreen: null,
didRender() {
this.element.scrollIntoView();
},
actions: {
handleKeyUp(event) {
const keyCode = event.keyCode;
switch (keyCode) {
case keys.ENTER:
this.get('onExecuteCommand')(event.target.value);
break;
case keys.UP:
case keys.DOWN:
this.get('onShiftCommand')(keyCode);
break;
default:
this.get('onValueUpdate')(event.target.value);
}
},
fullscreen() {
this.get('onFullscreen')();
}
},
});

View File

@ -0,0 +1,3 @@
import Ember from 'ember';
export default Ember.Component.extend({});

View File

@ -0,0 +1,3 @@
import Ember from 'ember';
export default Ember.Component.extend({});

View File

@ -0,0 +1,3 @@
import Ember from 'ember';
export default Ember.Component.extend({});

View File

@ -0,0 +1,3 @@
import Ember from 'ember';
export default Ember.Component.extend({});

View File

@ -0,0 +1,9 @@
import Ember from 'ember';
const { computed } = Ember;
export default Ember.Component.extend({
content: null,
list: computed('content', function() {
return this.get('content').keys;
}),
});

View File

@ -0,0 +1,28 @@
import Ember from 'ember';
import columnify from 'columnify';
const { computed } = Ember;
export function stringifyObjectValues(data) {
Object.keys(data).forEach(item => {
let val = data[item];
if (typeof val !== 'string') {
val = JSON.stringify(val);
}
data[item] = val;
});
}
export default Ember.Component.extend({
content: null,
columns: computed('content', function() {
let data = this.get('content');
stringifyObjectValues(data);
return columnify(data, {
preserveNewLines: true,
headingTransform: function(heading) {
return Ember.String.capitalize(heading);
},
});
}),
});

View File

@ -0,0 +1,3 @@
import Ember from 'ember';
export default Ember.Component.extend({});

View File

@ -0,0 +1,3 @@
import Ember from 'ember';
export default Ember.Component.extend({});

View File

@ -0,0 +1,6 @@
import Ember from 'ember';
export default Ember.Component.extend({
'data-test-component': 'console/output-log',
log: null,
});

View File

@ -0,0 +1,84 @@
import Ember from 'ember';
import {
parseCommand,
extractDataAndFlags,
logFromResponse,
logFromError,
logErrorFromInput,
executeUICommand,
} from 'vault/lib/console-helpers';
const { inject, computed } = Ember;
export default Ember.Component.extend({
classNames: 'console-ui-panel-scroller',
classNameBindings: ['isFullscreen:fullscreen'],
isFullscreen: false,
console: inject.service(),
inputValue: null,
log: computed.alias('console.log'),
logAndOutput(command, logContent) {
this.set('inputValue', '');
this.get('console').logAndOutput(command, logContent);
},
executeCommand(command, shouldThrow = false) {
let service = this.get('console');
let serviceArgs;
if(executeUICommand(command, (args) => this.logAndOutput(args), (args) => service.clearLog(args), () => this.toggleProperty('isFullscreen'))){
return;
}
// parse to verify it's valid
try {
serviceArgs = parseCommand(command, shouldThrow);
} catch (e) {
this.logAndOutput(command, { type: 'help' });
return;
}
// we have a invalid command but don't want to throw
if (serviceArgs === false) {
return;
}
let [method, flagArray, path, dataArray] = serviceArgs;
if (dataArray || flagArray) {
var { data, flags } = extractDataAndFlags(dataArray, flagArray);
}
let inputError = logErrorFromInput(path, method, flags, dataArray);
if (inputError) {
this.logAndOutput(command, inputError);
return;
}
let serviceFn = service[method];
serviceFn.call(service, path, data, flags.wrapTTL)
.then(resp => {
this.logAndOutput(command, logFromResponse(resp, path, method, flags));
})
.catch(error => {
this.logAndOutput(command, logFromError(error, path, method));
});
},
shiftCommandIndex(keyCode) {
this.get('console').shiftCommandIndex(keyCode, (val) => {
this.set('inputValue', val);
});
},
actions: {
toggleFullscreen() {
this.toggleProperty('isFullscreen');
},
executeCommand(val) {
this.executeCommand(val, true);
},
shiftCommandIndex(direction) {
this.shiftCommandIndex(direction);
},
},
});

View File

@ -18,6 +18,10 @@ export default IvyCodemirrorComponent.extend({
'data-test-component': 'json-editor',
updateCodeMirrorOptions() {
const options = assign({}, JSON_EDITOR_DEFAULTS, this.get('options'));
if (options.autoHeight) {
options.viewportMargin = Infinity;
delete options.autoHeight;
}
if (options) {
Object.keys(options).forEach(function(option) {

View File

@ -1,18 +1,21 @@
import Ember from 'ember';
import config from '../config/environment';
const { computed, inject } = Ember;
export default Ember.Controller.extend({
env: config.environment,
auth: Ember.inject.service(),
vaultVersion: Ember.inject.service('version'),
activeCluster: Ember.computed('auth.activeCluster', function() {
auth: inject.service(),
vaultVersion: inject.service('version'),
console: inject.service(),
consoleOpen: computed.alias('console.isOpen'),
activeCluster: computed('auth.activeCluster', function() {
return this.store.peekRecord('cluster', this.get('auth.activeCluster'));
}),
activeClusterName: Ember.computed('auth.activeCluster', function() {
activeClusterName: computed('auth.activeCluster', function() {
const activeCluster = this.store.peekRecord('cluster', this.get('auth.activeCluster'));
return activeCluster ? activeCluster.get('name') : null;
}),
showNav: Ember.computed(
showNav: computed(
'activeClusterName',
'auth.currentToken',
'activeCluster.dr.isSecondary',
@ -30,4 +33,9 @@ export default Ember.Controller.extend({
}
}
),
actions: {
toggleConsole() {
this.toggleProperty('consoleOpen');
},
},
});

View File

@ -0,0 +1,7 @@
import Ember from 'ember';
export function multiLineJoin([arr]) {
return arr.join('\n');
}
export default Ember.Helper.helper(multiLineJoin);

View File

@ -0,0 +1,183 @@
import keys from 'vault/lib/keycodes';
import argTokenizer from 'yargs-parser-tokenizer';
const supportedCommands = ['read', 'write', 'list', 'delete'];
const uiCommands = ['clearall', 'clear', 'fullscreen'];
export function extractDataAndFlags(data, flags) {
return data.concat(flags).reduce((accumulator, val) => {
// will be "key=value" or "-flag=value" or "foo=bar=baz"
// split on the first =
let [item, value] = val.split(/=(.+)/);
if (item.startsWith('-')) {
let flagName = item.replace(/^-/, '');
if (flagName === 'wrap-ttl') {
flagName = 'wrapTTL';
}
accumulator.flags[flagName] = value || true;
return accumulator;
}
// if it exists in data already, then we have multiple
// foo=bar in the list and need to make it an array
if (accumulator.data[item]) {
accumulator.data[item] = [].concat(accumulator.data[item], value);
return accumulator;
}
accumulator.data[item] = value;
return accumulator;
}, { data: {}, flags: {} });
}
export function executeUICommand(command, logAndOutput, clearLog, toggleFullscreen){
const isUICommand = uiCommands.includes(command);
if(isUICommand){
logAndOutput(command);
}
switch(command){
case 'clearall':
clearLog(true);
break;
case 'clear':
clearLog();
break;
case 'fullscreen':
toggleFullscreen();
break;
}
return isUICommand;
}
export function parseCommand(command, shouldThrow) {
let args = argTokenizer(command);
if (args[0] === 'vault') {
args.shift();
}
let [method, ...rest] = args;
let path;
let flags = [];
let data = [];
rest.forEach(arg => {
if (arg.startsWith('-')) {
flags.push(arg);
} else {
if (path) {
data.push(arg);
} else {
path = arg;
}
}
});
if (!supportedCommands.includes(method)) {
if (shouldThrow) {
throw new Error('invalid command');
}
return false;
}
return [method, flags, path, data];
}
export function logFromResponse(response, path, method, flags) {
if (!response) {
let message =
method === 'write'
? `Success! Data written to: ${path}`
: `Success! Data deleted (if it existed) at: ${path}`;
return { type: 'success', content: message };
}
let { format, field } = flags;
let secret = response.auth || response.data || response.wrap_info;
if (field) {
let fieldValue = secret[field];
let response;
if (fieldValue) {
if (format && format === 'json') {
return { type: 'json', content: fieldValue };
}
switch (typeof fieldValue) {
case 'string':
response = { type: 'text', content: fieldValue };
break;
default:
response = { type: 'object', content: fieldValue };
break;
}
} else {
response = { type: 'error', content: `Field "${field}" not present in secret` };
}
return response;
}
if (format && format === 'json') {
// just print whole response
return { type: 'json', content: response };
}
if (method === 'list') {
return { type: 'list', content: secret };
}
return { type: 'object', content: secret };
}
export function logFromError(error, vaultPath, method) {
let content;
let { httpStatus, path } = error;
let verbClause = {
read: 'reading from',
write: 'writing to',
list: 'listing',
delete: 'deleting at',
}[method];
content = `Error ${verbClause}: ${vaultPath}.\nURL: ${path}\nCode: ${httpStatus}`;
if (typeof error.errors[0] === 'string') {
content = `${content}\nErrors:\n ${error.errors.join('\n ')}`;
}
return { type: 'error', content };
}
export function shiftCommandIndex(keyCode, history, index) {
let newInputValue;
let commandHistoryLength = history.length;
if (!commandHistoryLength) { return []; }
if (keyCode === keys.UP) {
index -= 1;
if (index < 0) {
index = commandHistoryLength - 1;
}
} else {
index += 1;
if (index === commandHistoryLength) {
newInputValue = '';
}
if (index > commandHistoryLength) {
index -= 1;
}
}
if (newInputValue !== '') {
newInputValue = history.objectAt(index).content;
}
return [index, newInputValue];
}
export function logErrorFromInput(path, method, flags, dataArray) {
if (path === undefined) {
return { type: 'error', content: 'A path is required to make a request.' };
}
if (method === 'write' && !flags.force && dataArray.length === 0) {
return { type: 'error', content: 'Must supply data or use -force' };
}
}

View File

@ -1,14 +1,18 @@
import Ember from 'ember';
import ModelBoundaryRoute from 'vault/mixins/model-boundary-route';
const { inject } = Ember;
export default Ember.Route.extend(ModelBoundaryRoute, {
auth: Ember.inject.service(),
flashMessages: Ember.inject.service(),
auth: inject.service(),
flashMessages: inject.service(),
console: inject.service(),
modelTypes: ['secret', 'secret-engine'],
beforeModel() {
this.get('auth').deleteCurrentToken();
this.get('console').set('isOpen', false);
this.get('console').clearLog(true);
this.clearModelCache();
this.replaceWith('vault.cluster');
this.get('flashMessages').clearMessages();

View File

@ -49,25 +49,25 @@ export default Ember.Route.extend({
return Ember.RSVP.hash({
secret,
secrets: this.store
.lazyPaginatedQuery(this.getModelType(backend, params.tab), {
id: secret,
backend,
responsePath: 'data.keys',
page: params.page,
pageFilter: params.pageFilter,
size: 100,
})
.then(model => {
this.set('has404', false);
return model;
})
.catch(err => {
if (backendModel && err.httpStatus === 404 && secret === '') {
return [];
} else {
throw err;
}
})
.lazyPaginatedQuery(this.getModelType(backend, params.tab), {
id: secret,
backend,
responsePath: 'data.keys',
page: params.page,
pageFilter: params.pageFilter,
size: 100,
})
.then(model => {
this.set('has404', false);
return model;
})
.catch(err => {
if (backendModel && err.httpStatus === 404 && secret === '') {
return [];
} else {
throw err;
}
}),
});
},

105
ui/app/services/console.js Normal file
View File

@ -0,0 +1,105 @@
// Low level service that allows users to input paths to make requests to vault
// this service provides the UI synecdote to the cli commands read, write, delete, and list
import Ember from 'ember';
import {
shiftCommandIndex,
} from 'vault/lib/console-helpers';
const { Service, getOwner, computed } = Ember;
export function sanitizePath(path) {
//remove whitespace + remove trailing and leading slashes
return path.trim().replace(/^\/+|\/+$/g, '');
}
export function ensureTrailingSlash(path) {
return path.replace(/(\w+[^/]$)/g, '$1/');
}
const VERBS = {
read: 'GET',
list: 'GET',
write: 'POST',
delete: 'DELETE',
};
export default Service.extend({
isOpen: false,
adapter() {
return getOwner(this).lookup('adapter:console');
},
commandHistory: computed('log.[]', function() {
return this.get('log').filterBy('type', 'command');
}),
log: computed(function() {
return [];
}),
commandIndex: null,
shiftCommandIndex(keyCode, setCommandFn = () => {}) {
let [newIndex, newCommand] = shiftCommandIndex(
keyCode,
this.get('commandHistory'),
this.get('commandIndex')
);
if (newCommand !== undefined && newIndex !== undefined) {
this.set('commandIndex', newIndex);
setCommandFn(newCommand);
}
},
clearLog(clearAll=false) {
let log = this.get('log');
let history;
if (!clearAll) {
history = this.get('commandHistory').slice();
history.setEach('hidden', true);
}
log.clear();
if (history) {
log.addObjects(history);
}
},
logAndOutput(command, logContent) {
let log = this.get('log');
log.pushObject({ type: 'command', content: command });
this.set('commandIndex', null);
if (logContent) {
log.pushObject(logContent);
}
},
ajax(operation, path, options = {}) {
let verb = VERBS[operation];
let adapter = this.adapter();
let url = adapter.buildURL(path);
let { data, wrapTTL } = options;
return adapter.ajax(url, verb, {
data,
wrapTTL,
});
},
read(path, data, wrapTTL) {
return this.ajax('read', sanitizePath(path), { wrapTTL });
},
write(path, data, wrapTTL) {
return this.ajax('write', sanitizePath(path), { data, wrapTTL });
},
delete(path) {
return this.ajax('delete', sanitizePath(path));
},
list(path, data, wrapTTL) {
let listPath = ensureTrailingSlash(sanitizePath(path));
return this.ajax('list', listPath, {
data: {
list: true,
},
wrapTTL,
});
},
});

View File

@ -171,3 +171,7 @@ $gutter-grey: #2a2f36;
}
}
}
.cm-s-auto-height.CodeMirror {
height: auto;
}

View File

@ -0,0 +1,149 @@
.console-ui-panel-scroller {
background: linear-gradient(to right, #191A1C, #1B212D);
height: 0;
left: 0;
min-height: 400px;
overflow: auto;
position: fixed;
right: 0;
transform: translate3d(0, -400px, 0);
transition: min-height $speed ease-out, transform $speed ease-in;
will-change: transform, min-height;
z-index: 199;
}
.console-ui-panel {
display: flex;
flex-direction: column;
justify-content: flex-end;
padding: $size-8 $size-8 $size-4;
min-height: 100%;
color: $white;
font-size: $body-size;
font-weight: $font-weight-semibold;
transition: justify-content $speed ease-in;
pre, p {
background: none;
color: inherit;
font-size: $body-size;
&:not(.console-ui-command):not(.CodeMirror-line) {
padding-left: $console-spacing;
}
}
.cm-s-hashi.CodeMirror {
background-color: rgba($black, 0.5) !important;
font-weight: $font-weight-normal;
margin-left: $console-spacing;
padding: $size-8 $size-4;
}
.button,
{
background: transparent;
border: none;
color: $grey-dark;
min-width: 0;
padding: 0 $size-8;
&.active,
&:hover {
background: $blue;
color: $white;
}
}
}
.console-ui-input {
align-items: center;
display: flex;
input {
background-color: rgba($black, 0.5);
border: 0;
caret-color: $white;
color: $white;
flex: 1;
font-family: $family-monospace;
font-size: $body-size;
font-weight: $font-weight-bold;
margin-left: -$size-10;
outline: none;
padding: $size-10;
transition: background-color $speed;
}
}
.console-ui-command {
line-height: 2;
}
.console-ui-output {
transition: background-color $speed;
padding-right: $size-2;
position: relative;
.console-ui-output-actions {
opacity: 0;
position: absolute;
right: 0;
top: 0;
transition: opacity $speed;
will-change: opacity;
}
&:hover {
background: rgba($black, 0.25);
.console-ui-output-actions {
opacity: 1;
}
}
}
.console-ui-alert {
margin-left: calc(#{$console-spacing} - 0.33rem);
position: relative;
.icon {
position: absolute;
left: 0;
top: 0;
}
}
.panel-open .console-ui-panel-scroller {
transform: translate3d(0, 0, 0);
}
.panel-open .console-ui-panel-scroller.fullscreen {
bottom: 0;
top: 0;
min-height: 100%;
}
.panel-open {
.navbar, .navbar-sections{
transition: transform $speed ease-in;
}
}
.panel-open.panel-fullscreen {
.navbar, .navbar-sections{
transform: translate3d(0, -100px, 0);
}
}
.page-container > header {
background: linear-gradient(to right, #191A1C, #1B212D);
}
header .navbar,
header .navbar-sections {
z-index: 200;
transform: translate3d(0, 0, 0);
will-change: transform;
}

View File

@ -0,0 +1,10 @@
.env-banner {
&,
&:not(:last-child):not(:last-child) {
margin: 0;
}
.level-item {
padding: $size-10 $size-8;
}
}

View File

@ -1,6 +1,6 @@
.is-status-chevron {
line-height: 0;
padding: 0.25em 0 0.25em 0.25em;
padding: 0.3em 0 0 $size-11;
}
.status-menu-user-trigger {

View File

@ -5,7 +5,7 @@
.box {
position: relative;
color: $white;
width: 200px;
max-width: 200px;
background: $grey;
padding: 0.5rem;
line-height: 1.4;
@ -28,6 +28,16 @@
.ember-basic-dropdown-content--left.tool-tip {
margin: 8px 0 0 -11px;
}
.ember-basic-dropdown-content--below.ember-basic-dropdown-content--right.tool-tip {
@include css-top-arrow(8px, $grey, 1px, $grey-dark, calc(100% - 20px));
}
.ember-basic-dropdown-content--above.ember-basic-dropdown-content--right.tool-tip {
@include css-bottom-arrow(8px, $grey, 1px, $grey-dark, calc(100% - 20px));
}
.ember-basic-dropdown-content--above.tool-tip {
margin-top: -2px;
}
.tool-tip-trigger {
border: none;
border-radius: 20px;

View File

@ -10,7 +10,8 @@
}
.modal-background {
background-image: url("/ui/vault-hex.svg"), linear-gradient(90deg, #191A1C, #1B212D);
background-image: url("/ui/vault-hex.svg"),
linear-gradient(90deg, #191a1c, #1b212d);
opacity: 0.97;
}

View File

@ -1,52 +1,52 @@
@keyframes vault-loading-animation {
0%,
70%,
100% {
transform: scale3D(1, 1, 1);
}
35% {
transform: scale3D(0, 0, 1);
}
@keyframes vault-loading-animation {
0%,
70%,
100% {
transform: scale3D(1, 1, 1);
}
#vault-loading {
polygon {
animation: vault-loading-animation 1.3s infinite ease-in-out;
transform-origin: 50% 50%;
fill: #DCE2E9;
}
.vault-loading-order-1 {
animation-delay: .1s;
}
.vault-loading-order-2 {
animation-delay: .2s;
}
.vault-loading-order-3 {
animation-delay: .3s;
}
.vault-loading-order-4 {
animation-delay: .4s;
}
35% {
transform: scale3D(0, 0, 1);
}
}
#vault-loading {
polygon {
animation: vault-loading-animation 1.3s infinite ease-in-out;
transform-origin: 50% 50%;
fill: #dce2e9;
}
#vault-loading-animated {
@media all and (-ms-high-contrast: none), (-ms-high-contrast: active) {
// For IE11
display: none;
}
.vault-loading-order-1 {
animation-delay: .1s;
}
#vault-loading-static {
.vault-loading-order-2 {
animation-delay: .2s;
}
.vault-loading-order-3 {
animation-delay: .3s;
}
.vault-loading-order-4 {
animation-delay: .4s;
}
}
#vault-loading-animated {
@media all and (-ms-high-contrast: none), (-ms-high-contrast: active) {
// For IE11
display: none;
font-size: 9px;
}
}
@media all and (-ms-high-contrast: none), (-ms-high-contrast: active) {
// For IE11
display: block;
}
}
#vault-loading-static {
display: none;
font-size: 9px;
@media all and (-ms-high-contrast: none), (-ms-high-contrast: active) {
// For IE11
display: block;
}
}

View File

@ -46,6 +46,8 @@
@import "./components/box-label";
@import "./components/codemirror";
@import "./components/confirm";
@import "./components/console-ui-panel";
@import "./components/env-banner";
@import "./components/form-section";
@import "./components/global-flash";
@import "./components/init-illustration";

View File

@ -12,7 +12,8 @@ $button-box-shadow-standard: 0 3px 1px 0 rgba($black, 0.12);
min-width: 6rem;
padding: $size-10 $size-8;
text-decoration: none;
transition: background-color $speed, border-color $speed, box-shadow $speed, color $speed;
transition: background-color $speed, border-color $speed, box-shadow $speed,
color $speed;
vertical-align: middle;
&.is-icon {

View File

@ -33,13 +33,16 @@ input::-webkit-inner-spin-button {
.link {
background: transparent;
border: 0;
color: $blue;
cursor: pointer;
display: inline;
font: inherit;
line-height: normal;
margin: 0;
padding: 0;
text-decoration: underline;
-moz-user-select: text;
color: $blue;
cursor: pointer;
display: inline;
font: inherit;
line-height: normal;
margin: 0;
padding: 0;
text-decoration: underline;
-webkit-user-select: text; /* Chrome all / Safari all */
-moz-user-select: text; /* Firefox all */
-ms-user-select: text; /* IE 10+ */
user-select: text;
}

View File

@ -37,7 +37,9 @@ $border: $grey-light;
$hr-margin: 1rem 0;
//typography
$family-sans: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue", sans-serif;
$family-sans: system-ui, -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto",
"Oxygen", "Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue",
sans-serif;
$family-primary: $family-sans;
$body-size: 14px;
$size-3: (24/14) + 0rem;
@ -46,6 +48,7 @@ $size-8: (12/14) + 0rem;
$size-9: 0.75rem;
$size-10: 0.5rem;
$size-11: 0.25rem;
$console-spacing: 1.5rem;
$size-small: $size-8;
$font-weight-normal: 400;
$font-weight-semibold: 600;

View File

@ -14,7 +14,7 @@
}
@include keyframes(drop-fade-below) {
0% {
0% {
opacity: 0;
transform: translateY(-1rem);
}
@ -25,7 +25,7 @@
}
@include keyframes(drop-fade-above) {
0% {
0% {
opacity: 0;
transform: translateY(1rem);
}

View File

@ -1,11 +1,15 @@
@mixin css-top-arrow($size, $color, $border-width, $border-color, $left: 50%, $left-offset: 0px) {
@mixin css-arrow($vertical-direction, $size, $color, $border-width, $border-color, $left: 50%, $left-offset: 0px) {
& {
border: 1px solid $border-color;
}
&:after,
&:before {
bottom: 100%;
@if ($vertical-direction == 'top') {
bottom: 100%;
} @else {
top: 100%;
}
border: solid transparent;
content: " ";
height: 0;
@ -28,6 +32,12 @@
left: calc(#{$left} + #{$left-offset});
margin-left: -($size + round(1.41421356 * $border-width));
}
&:before,
&:after {
@if ($vertical-direction == 'bottom') {
transform: rotate(180deg);
}
}
@at-root .ember-basic-dropdown-content--left#{&} {
&:after,
@ -38,6 +48,13 @@
}
}
@mixin css-top-arrow($size, $color, $border-width, $border-color, $left: 50%, $left-offset: 0px) {
@include css-arrow('top', $size, $color, $border-width, $border-color, $left, $left-offset);
}
@mixin css-bottom-arrow($size, $color, $border-width, $border-color, $left: 50%, $left-offset: 0px) {
@include css-arrow('bottom', $size, $color, $border-width, $border-color, $left, $left-offset);
}
@mixin vault-block {
&:not(:last-child) {
margin-bottom: (5/14) + 0rem;

View File

@ -1,6 +1,6 @@
<div class="page-container">
{{#if showNav}}
<header data-test-header-with-nav>
<header data-test-header-with-nav class="{{if consoleOpen 'panel-open'}} {{if consoleFullscreen ' panel-fullscreen'}}">
<nav class="navbar has-dark-grey-gradient is-grouped-split">
<div class="navbar-brand">
{{#home-link class="navbar-item has-text-white has-current-color-fill"}}
@ -8,6 +8,17 @@
{{/home-link}}
</div>
<div class="navbar-end is-divider-list is-flex">
<div class="navbar-item">
<button type="button" class="button is-transparent" {{action 'toggleConsole'}}>
{{#if consoleOpen}}
{{i-con glyph="console-active" size=24}}
{{i-con glyph="chevron-up" aria-hidden="true" size=8 class="has-text-white auto-width is-status-chevron"}}
{{else}}
{{i-con glyph="console" size=24}}
{{i-con glyph="chevron-down" aria-hidden="true" size=8 class="has-text-white auto-width is-status-chevron"}}
{{/if}}
</button>
</div>
<div class="navbar-item">
{{status-menu}}
</div>
@ -55,6 +66,7 @@
</a>
</li>
</ul>
{{console/ui-panel isFullscreen=consoleFullscreen}}
</header>
{{/if}}
<div class="global-flash">
@ -129,7 +141,7 @@
</div>
</footer>
{{#if (eq env "development") }}
<div class="level development">
<div class="env-banner level development">
<div class="level-item notification has-background-dark has-text-white">
{{i-con glyph="wand" class="type-icon"}}Local Development
</div>

View File

@ -0,0 +1,16 @@
{{i-con glyph="chevron-right" size=12}}
<input onkeyup={{action 'handleKeyUp'}} value={{value}} />
{{#tool-tip horizontalPosition="auto-right" verticalPosition=(if isFullscreen "above" "below") as |d|}}
{{#d.trigger tagName="button" type="button" class=(concat "button is-compact" (if isFullscreen " active")) click=(action "fullscreen") data-test-tool-tip-trigger=true}}
{{i-con glyph=(if isFullscreen "fullscreen-close" "fullscreen-open") aria-hidden="true" size=16}}
{{/d.trigger}}
{{#d.content class="tool-tip"}}
<div class="box">
{{#if isFullscreen}}
Minimize
{{else}}
Maximize
{{/if}}
</div>
{{/d.content}}
{{/tool-tip}}

View File

@ -0,0 +1 @@
<pre class="console-ui-command">{{i-con glyph="chevron-right" size=12}}{{content}}</pre>

View File

@ -0,0 +1,4 @@
<div class="console-ui-alert has-text-danger">
{{i-con glyph="close-circled" aria-hidden="true" size=12}}
<pre>{{content}}</pre>
</div>

View File

@ -0,0 +1,16 @@
<div class="console-ui-alert has-text-grey">
{{i-con glyph="information-circled" aria-hidden="true" size=12}}
<pre>Usage: vault &lt;command&gt; [args]
Commands:
read Read data and retrieves secrets
write Write data, configuration, and secrets
delete Delete secrets and configuration
list List data or secrets
Web CLI Commands:
fullscreen Toggle fullscreen display
clear Clear output from the log
clearall Clear output and command history
</pre>
</div>

View File

@ -0,0 +1,10 @@
{{json-editor
value=(stringify content)
options=(hash
readOnly=true
lineNumbers=false
autoHeight=true
gutters=false
theme='hashi auto-height'
)
}}

View File

@ -0,0 +1,21 @@
<div class="console-ui-output">
<pre>Keys
{{#each list as |item|}}
{{item}}
{{/each}}
</pre>
<div class="console-ui-output-actions">
{{#tool-tip renderInPlace=true as |d|}}
{{#d.trigger data-test-tool-tip-trigger=true}}
{{#copy-button clipboardText=(multi-line-join list) class="button is-compact"}}
{{i-con glyph="copy" aria-hidden="true" size=16}}
{{/copy-button}}
{{/d.trigger}}
{{#d.content class="tool-tip"}}
<div class="box">
Copy
</div>
{{/d.content}}
{{/tool-tip}}
</div>
</div>

View File

@ -0,0 +1,18 @@
<div class="console-ui-output">
<pre>{{columns}}</pre>
<div class="console-ui-output-actions">
{{#tool-tip renderInPlace=true as |d|}}
{{#d.trigger data-test-tool-tip-trigger=true}}
{{#copy-button clipboardText=columns class="button is-compact"}}
{{i-con glyph="copy" aria-hidden="true" size=16}}
{{/copy-button}}
{{/d.trigger}}
{{#d.content class="tool-tip"}}
<div class="box">
Copy
</div>
{{/d.content}}
{{/tool-tip}}
</div>
</div>

View File

@ -0,0 +1,4 @@
<div class="console-ui-alert has-text-success">
{{i-con glyph="checkmark-circled" aria-hidden="true" size=12}}
<pre>{{content}}</pre>
</div>

View File

@ -0,0 +1 @@
<pre>{{content}}</pre>

View File

@ -0,0 +1,5 @@
{{#each log as |message|}}
{{#unless message.hidden}}
{{component (concat 'console/log-' message.type) content=message.content}}
{{/unless}}
{{/each}}

View File

@ -0,0 +1,16 @@
<div class="console-ui-panel">
<div class="content">
<p class="has-text-grey is-font-mono">
The Vault Browser CLI provides an easy way to execute the most common CLI commands, such as write, read, delete, and list.
</p>
</div>
{{console/output-log log=log}}
{{console/command-input
isFullscreen=isFullscreen
value=inputValue
onValueUpdate=(action (mut inputValue))
onFullscreen=(action 'toggleFullscreen')
onExecuteCommand=(action 'executeCommand')
onShiftCommand=(action 'shiftCommandIndex')
}}
</div>

View File

@ -0,0 +1,21 @@
<g transform="translate(458 28)">
<path
d="M2.8,39h-409.6c-17,0-30.7,13.9-30.7,30.9V400c0,17.1,13.8,30.9,30.7,30.9H2.8c17,0,30.7-13.9,30.7-30.9 V70C33.5,52.9,19.8,39,2.8,39z"
fill="#0068FF"
/>
<path
d="M2.8,18.4h-409.6c-28.3,0-51.2,23.1-51.2,51.6V400c0,28.5,22.9,51.6,51.2,51.6H2.8 c28.3,0,51.2-23.1,51.2-51.6V70C54,41.5,31.1,18.4,2.8,18.4z M33.5,400c0,17.1-13.8,30.9-30.7,30.9h-409.6 c-17,0-30.7-13.9-30.7-30.9V70c0-17.1,13.8-30.9,30.7-30.9H2.8c17,0,30.7,13.9,30.7,30.9V400z"
fill="#8AB1FF"
/>
<polygon
points="-241.9,235.9 -241.7,235.7 -262.8,214.6 -263,214.8 -319.1,158.7 -340.2,179.8 -284.1,235.9 -340.2,292 -319.1,313.1 -263,257 -262.8,257.2 -241.7,236.1"
fill="#fff"
/>
<rect
x="-241.7"
y="283.5"
width="157.5"
height="29.5"
fill="#fff"
/>
</g>

View File

@ -0,0 +1,17 @@
<g transform="translate(458 28)">
<path
d="M2.8,18.4h-409.6c-28.3,0-51.2,23.1-51.2,51.6V400c0,28.5,22.9,51.6,51.2,51.6H2.8 c28.3,0,51.2-23.1,51.2-51.6V70C54,41.5,31.1,18.4,2.8,18.4z M33.5,400c0,17.1-13.8,30.9-30.7,30.9h-409.6 c-17,0-30.7-13.9-30.7-30.9V70c0-17.1,13.8-30.9,30.7-30.9H2.8c17,0,30.7,13.9,30.7,30.9V400z"
fill="#B3B9C0"
/>
<polygon
points="-241.9,235.9 -241.7,235.7 -262.8,214.6 -263,214.8 -319.1,158.7 -340.2,179.8 -284.1,235.9 -340.2,292 -319.1,313.1 -263,257 -262.8,257.2 -241.7,236.1"
fill="#fff"
/>
<rect
x="-241.7"
y="283.5"
width="157.5"
height="29.5"
fill="#fff"
/>
</g>

View File

@ -0,0 +1 @@
<path d="M272.769063,297.207977 L173.82716,297.207977 L173.82716,465.646091 C173.82716,464.098951 174.365679,464.592593 176.987654,464.592593 L455.111111,464.592593 C457.733087,464.592593 458.271605,464.098951 458.271605,465.646091 L458.271605,141.168724 C458.271605,142.715864 457.733087,142.222222 455.111111,142.222222 L176.987654,142.222222 C174.365679,142.222222 173.82716,142.715864 173.82716,141.168724 L173.82716,260.740741 L280.463115,260.740741 L230.986572,211.264198 L256.515702,185.735068 L345.867656,275.087023 L345.827346,275.127333 L345.867656,275.167643 L256.515702,364.519598 L230.986572,338.990468 L272.769063,297.207977 Z M126.419753,260.740741 L126.419753,141.168724 C126.419753,115.568167 149.059774,94.8148148 176.987654,94.8148148 L300.246914,94.8148148 L300.246914,47.4074074 L47.4074074,47.4074074 L47.4074074,335.012346 L126.419753,335.012346 L126.419753,297.207977 L79.0123457,297.207977 L79.0123457,260.740741 L126.419753,260.740741 Z M126.419753,382.419753 L46.3539095,382.419753 C20.7533522,382.419753 0,363.395847 0,339.928669 L0,42.4910837 C0,19.0239062 20.7533522,0 46.3539095,0 L301.300412,0 C326.900969,0 347.654321,19.0239062 347.654321,42.4910837 L347.654321,94.8148148 L455.111111,94.8148148 C483.038992,94.8148148 505.679012,115.568167 505.679012,141.168724 L505.679012,465.646091 C505.679012,491.246648 483.038992,512 455.111111,512 L176.987654,512 C149.059774,512 126.419753,491.246648 126.419753,465.646091 L126.419753,382.419753 Z"/>

View File

@ -0,0 +1 @@
<path d="M186.527813,356.143158 L47.8085404,494.86243 L21.6999823,468.753872 L165.927777,324.526077 L53.6999823,324.526077 L53.6999823,287.97114 L223.02503,287.97114 L223.02503,288.02886 L223.08275,288.02886 L223.08275,461.722531 L186.527813,461.722531 L186.527813,356.143158 Z M332.526077,154.910732 L471.245349,16.1914596 L497.353908,42.3000177 L353.126113,186.527813 L465.308859,186.527813 L465.308859,223.08275 L296.02886,223.08275 L296.02886,223.02503 L295.97114,223.02503 L295.97114,53.6999823 L332.526077,53.6999823 L332.526077,154.910732 Z"/>

View File

@ -0,0 +1 @@
<path d="M36.5260773,442.910732 L175.245349,304.19146 L201.353908,330.300018 L57.1261127,474.527813 L159.08275,474.527813 L159.08275,511.08275 L0.0288599258,511.08275 L0.0288599258,511.02503 L-0.0288599258,511.02503 L-0.0288599258,351.97114 L36.5260773,351.97114 L36.5260773,442.910732 Z M474.527813,68.143158 L335.80854,206.86243 L309.699982,180.753872 L453.927777,36.5260773 L351.97114,36.5260773 L351.97114,-0.0288599258 L511.02503,-0.0288599258 L511.02503,0.0288599258 L511.08275,0.0288599258 L511.08275,159.08275 L474.527813,159.08275 L474.527813,68.143158 Z"/>

View File

@ -55,6 +55,18 @@ module.exports = function(defaults) {
app.import('node_modules/text-encoder-lite/index.js');
app.import('node_modules/Duration.js/duration.js');
app.import('node_modules/columnify/columnify.js', {
using: [
{ transformation: 'cjs', as: 'columnify' }
]
});
app.import('node_modules/yargs-parser/lib/tokenize-arg-string.js', {
using: [
{ transformation: 'cjs', as: 'yargs-parser-tokenizer' }
]
});
// Use `app.import` to add additional libraries to the generated
// output files.
//

View File

@ -14,7 +14,7 @@
"start2": "ember server --proxy=http://localhost:8202 --port=4202",
"test": "node scripts/start-vault.js & ember test",
"test-oss": "yarn run test -f='!enterprise'",
"fmt-js": "prettier-eslint --single-quote --trailing-comma es5 --print-width=110 --write {app,tests,config,lib,mirage}/**/*.js",
"fmt-js": "prettier-eslint --single-quote --no-use-tabs --trailing-comma es5 --print-width=110 --write '{app,tests,config,lib,mirage}/**/*.js'",
"fmt-styles": "prettier --write app/styles/**/*.*",
"fmt": "yarn run fmt-js && yarn run fmt-styles",
"precommit": "lint-staged"
@ -33,19 +33,24 @@
}
},
"devDependencies": {
"Duration.js": "icholy/Duration.js#golang_compatible",
"autosize": "3.0.17",
"babel-plugin-transform-object-rest-spread": "^6.23.0",
"base64-js": "1.2.1",
"broccoli-asset-rev": "^2.4.5",
"broccoli-sri-hash": "meirish/broccoli-sri-hash#rooturl",
"bulma": "^0.5.2",
"bulma-switch": "^0.0.1",
"codemirror": "5.15.2",
"cool-checkboxes-for-bulma.io": "^1.1.0",
"ember-ajax": "^3.0.0",
"ember-api-actions": "^0.1.8",
"ember-basic-dropdown": "^0.33.5",
"ember-basic-dropdown-hover": "^0.2.0",
"ember-cli": "~2.15.0",
"ember-cli": "~2.16.0",
"ember-cli-autoprefixer": "^0.8.1",
"ember-cli-babel": "^6.3.0",
"ember-cli-cjs-transform": "^1.2.0",
"ember-cli-clipboard": "^0.8.0",
"ember-cli-content-security-policy": "^1.0.0",
"ember-cli-dependency-checker": "^1.3.0",
@ -83,18 +88,16 @@
"ember-test-selectors": "^0.3.6",
"ember-truth-helpers": "1.2.0",
"ivy-codemirror": "2.1.0",
"jsonlint": "1.6.0",
"loader.js": "^4.2.3",
"normalize.css": "4.1.1",
"prettier": "^1.5.3",
"prettier-eslint-cli": "^4.2.1",
"qunit-dom": "^0.6.2",
"string.prototype.startswith": "mathiasbynens/String.prototype.startsWith",
"text-encoder-lite": "1.0.0",
"base64-js": "1.2.1",
"autosize": "3.0.17",
"jsonlint": "1.6.0",
"codemirror": "5.15.2",
"Duration.js": "icholy/Duration.js#golang_compatible",
"string.prototype.startswith": "mathiasbynens/String.prototype.startsWith"
"columnify": "^1.5.4",
"yargs-parser": "^10.0.0"
},
"engines": {
"node": "^4.5 || 6.* || >= 7.*"

View File

@ -0,0 +1,15 @@
import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
moduleForComponent('console/log-command', 'Integration | Component | console/log command', {
integration: true,
});
test('it renders', function(assert) {
const commandText = 'list this/path';
this.set('content', commandText);
this.render(hbs`{{console/log-command content=content}}`);
assert.dom('pre').includesText(commandText);
});

View File

@ -0,0 +1,13 @@
import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
moduleForComponent('console/log-error', 'Integration | Component | console/log error', {
integration: true,
});
test('it renders', function(assert) {
const errorText = 'Error deleting at: sys/foo.\nURL: v1/sys/foo\nCode: 404';
this.set('content', errorText);
this.render(hbs`{{console/log-error content=content}}`);
assert.dom('pre').includesText(errorText);
});

View File

@ -0,0 +1,24 @@
import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
moduleForComponent('console/log-json', 'Integration | Component | console/log json', {
integration: true,
beforeEach() {
this.inject.service('code-mirror', { as: 'codeMirror' });
},
});
test('it renders', function(assert) {
// Set any properties with this.set('myProperty', 'value');
// Handle any actions with this.on('myAction', function(val) { ... });
const objectContent = { one: 'two', three: 'four', seven: { five: 'six' }, eight: [5, 6] };
const expectedText = JSON.stringify(objectContent, null, 2);
this.set('content', objectContent);
this.render(hbs`{{console/log-json content=content}}`);
const instance = this.codeMirror.instanceFor(this.$('[data-test-component=json-editor]').attr('id'));
assert.equal(instance.getValue(), expectedText);
});

View File

@ -0,0 +1,19 @@
import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
moduleForComponent('console/log-list', 'Integration | Component | console/log list', {
integration: true,
});
test('it renders', function(assert) {
// Set any properties with this.set('myProperty', 'value');
// Handle any actions with this.on('myAction', function(val) { ... });
const listContent = { keys: ['one', 'two'] };
const expectedText = 'Keys\none\ntwo';
this.set('content', listContent);
this.render(hbs`{{console/log-list content=content}}`);
assert.dom('pre').includesText(`${expectedText}`);
});

View File

@ -0,0 +1,27 @@
import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
import columnify from 'columnify';
import { capitalize } from 'vault/helpers/capitalize';
import { stringifyObjectValues } from 'vault/components/console/log-object';
moduleForComponent('console/log-object', 'Integration | Component | console/log object', {
integration: true,
});
test('it renders', function(assert) {
const objectContent = { one: 'two', three: 'four', seven: { five: 'six' }, eight: [5, 6] };
const data = { one: 'two', three: 'four', seven: { five: 'six' }, eight: [5, 6] };
stringifyObjectValues(data);
const expectedText = columnify(data, {
preserveNewLines: true,
headingTransform: function(heading) {
return capitalize([heading]);
},
});
this.set('content', objectContent);
this.render(hbs`{{console/log-object content=content}}`);
assert.dom('pre').includesText(`${expectedText}`);
});

View File

@ -0,0 +1,17 @@
import { moduleForComponent, test } from 'ember-qunit';
import hbs from 'htmlbars-inline-precompile';
moduleForComponent('console/log-text', 'Integration | Component | console/log text', {
integration: true,
});
test('it renders', function(assert) {
// Set any properties with this.set('myProperty', 'value');
// Handle any actions with this.on('myAction', function(val) { ... });
const text = 'Success! You did a thing!';
this.set('content', text);
this.render(hbs`{{console/log-text content=content}}`);
assert.dom('pre').includesText(text);
});

View File

@ -0,0 +1,118 @@
import { moduleForComponent, test } from 'ember-qunit';
import { create } from 'ember-cli-page-object';
import wait from 'ember-test-helpers/wait';
import uiPanel from 'vault/tests/pages/components/console/ui-panel';
import hbs from 'htmlbars-inline-precompile';
const component = create(uiPanel);
moduleForComponent('console/ui-panel', 'Integration | Component | console/ui panel', {
integration: true,
beforeEach() {
component.setContext(this);
},
afterEach() {
component.removeContext();
},
});
test('it renders', function(assert) {
this.render(hbs`{{console/ui-panel}}`);
assert.ok(component.hasInput);
});
test('it clears console input on enter', function(assert) {
this.render(hbs`{{console/ui-panel}}`);
component.consoleInput('list this/thing/here').enter();
return wait().then(() => {
assert.equal(component.consoleInputValue, '', 'empties input field on enter');
});
});
test('it clears the log when using clear command', function(assert) {
this.render(hbs`{{console/ui-panel}}`);
component.consoleInput('list this/thing/here').enter();
component.consoleInput('list this/other/thing').enter();
component.consoleInput('read another/thing').enter();
wait().then(() => {
assert.notEqual(component.logOutput, '', 'there is output in the log');
component.consoleInput('clear').enter();
});
wait().then(() => component.up());
return wait().then(() => {
assert.equal(component.logOutput, '', 'clears the output log');
assert.equal(
component.consoleInputValue,
'clear',
'populates console input with previous command on up after enter'
);
});
});
test('it adds command to history on enter', function(assert) {
this.render(hbs`{{console/ui-panel}}`);
component.consoleInput('list this/thing/here').enter();
wait().then(() => component.up());
wait().then(() => {
assert.equal(
component.consoleInputValue,
'list this/thing/here',
'populates console input with previous command on up after enter'
);
});
wait().then(() => component.down());
return wait().then(() => {
assert.equal(component.consoleInputValue, '', 'populates console input with next command on down');
});
});
test('it cycles through history with more than one command', function(assert) {
this.render(hbs`{{console/ui-panel}}`);
component.consoleInput('list this/thing/here').enter();
wait().then(() => component.consoleInput('read that/thing/there').enter());
wait().then(() => component.consoleInput('qwerty').enter());
wait().then(() => component.up());
wait().then(() => {
assert.equal(
component.consoleInputValue,
'qwerty',
'populates console input with previous command on up after enter'
);
});
wait().then(() => component.up());
wait().then(() => {
assert.equal(
component.consoleInputValue,
'read that/thing/there',
'populates console input with previous command on up'
);
});
wait().then(() => component.up());
wait().then(() => {
assert.equal(
component.consoleInputValue,
'list this/thing/here',
'populates console input with previous command on up'
);
});
wait().then(() => component.up());
wait().then(() => {
assert.equal(
component.consoleInputValue,
'qwerty',
'populates console input with initial command if cycled through all previous commands'
);
});
wait().then(() => component.down());
return wait().then(() => {
assert.equal(
component.consoleInputValue,
'',
'clears console input if down pressed after history is on most recent command'
);
});
});

View File

@ -0,0 +1,18 @@
import { text, triggerable, fillable, value, isPresent } from 'ember-cli-page-object';
import keys from 'vault/lib/keycodes';
export default {
consoleInput: fillable('[data-test-component="console/command-input"] input'),
consoleInputValue: value('[data-test-component="console/command-input"] input'),
logOutput: text('[data-test-component="console/output-log"]'),
up: triggerable('keyup', '[data-test-component="console/command-input"] input', {
eventProperties: { keyCode: keys.UP },
}),
down: triggerable('keyup', '[data-test-component="console/command-input"] input', {
eventProperties: { keyCode: keys.DOWN },
}),
enter: triggerable('keyup', '[data-test-component="console/command-input"] input', {
eventProperties: { keyCode: keys.ENTER },
}),
hasInput: isPresent('[data-test-component="console/command-input"] input'),
};

View File

@ -0,0 +1,13 @@
import { moduleFor, test } from 'ember-qunit';
moduleFor('adapter:console', 'Unit | Adapter | console', {
needs: ['service:auth', 'service:flash-messages', 'service:version'],
});
test('it builds the correct URL', function(assert) {
let adapter = this.subject();
let sysPath = 'sys/health';
let awsPath = 'aws/roles/my-other-role';
assert.equal(adapter.buildURL(sysPath), '/v1/sys/health');
assert.equal(adapter.buildURL(awsPath), '/v1/aws/roles/my-other-role');
});

View File

@ -0,0 +1,328 @@
import { module, test } from 'qunit';
import {
parseCommand,
extractDataAndFlags,
logFromResponse,
logFromError,
logErrorFromInput,
} from 'vault/lib/console-helpers';
module('lib/console-helpers', 'Unit | Lib | console helpers');
const testCommands = [
{
name: 'write with data',
command: `vault write aws/config/root \
access_key=AKIAJWVN5Z4FOFT7NLNA \
secret_key=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 \
region=us-east-1`,
expected: [
'write',
[],
'aws/config/root',
[
'access_key=AKIAJWVN5Z4FOFT7NLNA',
'secret_key=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',
'region=us-east-1',
],
],
},
{
name: 'read with field',
command: `vault read -field=access_key aws/creds/my-role`,
expected: ['read', ['-field=access_key'], 'aws/creds/my-role', []],
},
];
testCommands.forEach(function(testCase) {
test(`#parseCommand: ${testCase.name}`, function(assert) {
let result = parseCommand(testCase.command);
assert.deepEqual(result, testCase.expected);
});
});
test('#parseCommand: invalid commands', function(assert) {
let command = 'vault kv get foo';
let result = parseCommand(command);
assert.equal(result, false, 'parseCommand returns false by default');
assert.throws(
() => {
parseCommand(command, true);
},
/invalid command/,
'throws on invalid command when `shouldThrow` is true'
);
});
const testExtractCases = [
{
name: 'data fields',
input: [
[
'access_key=AKIAJWVN5Z4FOFT7NLNA',
'secret_key=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',
'region=us-east-1',
],
[],
],
expected: {
data: {
access_key: 'AKIAJWVN5Z4FOFT7NLNA',
secret_key: 'R4nm063hgMVo4BTT5xOs5nHLeLXA6lar7ZJ3Nt0i',
region: 'us-east-1',
},
flags: {},
},
},
{
name: 'repeated data and a flag',
input: [['allowed_domains=example.com', 'allowed_domains=foo.example.com'], ['-wrap-ttl=2h']],
expected: {
data: {
allowed_domains: ['example.com', 'foo.example.com'],
},
flags: {
wrapTTL: '2h',
},
},
},
{
name: 'data with more than one equals sign',
input: [['foo=bar=baz', 'foo=baz=bop', 'some=value=val'], []],
expected: {
data: {
foo: ['bar=baz', 'baz=bop'],
some: 'value=val',
},
flags: {},
},
},
];
testExtractCases.forEach(function(testCase) {
test(`#extractDataAndFlags: ${testCase.name}`, function(assert) {
let { data, flags } = extractDataAndFlags(...testCase.input);
assert.deepEqual(data, testCase.expected.data, 'has expected data');
assert.deepEqual(flags, testCase.expected.flags, 'has expected flags');
});
});
let testResponseCases = [
{
name: 'write response, no content',
args: [null, 'foo/bar', 'write', {}],
expectedData: {
type: 'success',
content: 'Success! Data written to: foo/bar',
},
},
{
name: 'delete response, no content',
args: [null, 'foo/bar', 'delete', {}],
expectedData: {
type: 'success',
content: 'Success! Data deleted (if it existed) at: foo/bar',
},
},
{
name: 'write, with content',
args: [{ data: { one: 'two' } }, 'foo/bar', 'write', {}],
expectedData: {
type: 'object',
content: { one: 'two' },
},
},
{
name: 'with wrap-ttl flag',
args: [{ wrap_info: { one: 'two' } }, 'foo/bar', 'read', { wrapTTL: '1h' }],
expectedData: {
type: 'object',
content: { one: 'two' },
},
},
{
name: 'with -format=json flag and wrap-ttl flag',
args: [{ foo: 'bar', wrap_info: { one: 'two' } }, 'foo/bar', 'read', { format: 'json', wrapTTL: '1h' }],
expectedData: {
type: 'json',
content: { foo: 'bar', wrap_info: { one: 'two' } },
},
},
{
name: 'with -format=json and -field flags',
args: [{ foo: 'bar', data: { one: 'two' } }, 'foo/bar', 'read', { format: 'json', field: 'one' }],
expectedData: {
type: 'json',
content: 'two',
},
},
{
name: 'with -format=json and -field, and -wrap-ttl flags',
args: [
{ foo: 'bar', wrap_info: { one: 'two' } },
'foo/bar',
'read',
{ format: 'json', wrapTTL: '1h', field: 'one' },
],
expectedData: {
type: 'json',
content: 'two',
},
},
{
name: 'with string field flag and wrap-ttl flag',
args: [{ foo: 'bar', wrap_info: { one: 'two' } }, 'foo/bar', 'read', { field: 'one', wrapTTL: '1h' }],
expectedData: {
type: 'text',
content: 'two',
},
},
{
name: 'with object field flag and wrap-ttl flag',
args: [
{ foo: 'bar', wrap_info: { one: { two: 'three' } } },
'foo/bar',
'read',
{ field: 'one', wrapTTL: '1h' },
],
expectedData: {
type: 'object',
content: { two: 'three' },
},
},
{
name: 'with response data and string field flag',
args: [{ foo: 'bar', data: { one: 'two' } }, 'foo/bar', 'read', { field: 'one', wrapTTL: '1h' }],
expectedData: {
type: 'text',
content: 'two',
},
},
{
name: 'with response data and object field flag ',
args: [
{ foo: 'bar', data: { one: { two: 'three' } } },
'foo/bar',
'read',
{ field: 'one', wrapTTL: '1h' },
],
expectedData: {
type: 'object',
content: { two: 'three' },
},
},
{
name: 'response with data',
args: [{ foo: 'bar', data: { one: 'two' } }, 'foo/bar', 'read', {}],
expectedData: {
type: 'object',
content: { one: 'two' },
},
},
{
name: 'with response data, field flag, and field missing',
args: [{ foo: 'bar', data: { one: 'two' } }, 'foo/bar', 'read', { field: 'foo' }],
expectedData: {
type: 'error',
content: 'Field "foo" not present in secret',
},
},
{
name: 'with response data and auth block',
args: [{ data: { one: 'two' }, auth: { three: 'four' } }, 'auth/token/create', 'write', {}],
expectedData: {
type: 'object',
content: { three: 'four' },
},
},
{
name: 'with -field and -format with an object field',
args: [{ data: { one: { three: 'two' } } }, 'sys/mounts', 'read', { field: 'one', format: 'json' }],
expectedData: {
type: 'json',
content: { three: 'two' },
},
},
{
name: 'with -field and -format with a string field',
args: [{ data: { one: 'two' } }, 'sys/mounts', 'read', { field: 'one', format: 'json' }],
expectedData: {
type: 'json',
content: 'two',
},
},
];
testResponseCases.forEach(function(testCase) {
test(`#logFromResponse: ${testCase.name}`, function(assert) {
let data = logFromResponse(...testCase.args);
assert.deepEqual(data, testCase.expectedData);
});
});
let testErrorCases = [
{
name: 'AdapterError write',
args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: [{}] }, 'sys/foo', 'write'],
expectedContent: 'Error writing to: sys/foo.\nURL: v1/sys/foo\nCode: 404',
},
{
name: 'AdapterError read',
args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: [{}] }, 'sys/foo', 'read'],
expectedContent: 'Error reading from: sys/foo.\nURL: v1/sys/foo\nCode: 404',
},
{
name: 'AdapterError list',
args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: [{}] }, 'sys/foo', 'list'],
expectedContent: 'Error listing: sys/foo.\nURL: v1/sys/foo\nCode: 404',
},
{
name: 'AdapterError delete',
args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: [{}] }, 'sys/foo', 'delete'],
expectedContent: 'Error deleting at: sys/foo.\nURL: v1/sys/foo\nCode: 404',
},
{
name: 'VaultError single error',
args: [{ httpStatus: 404, path: 'v1/sys/foo', errors: ['no client token'] }, 'sys/foo', 'delete'],
expectedContent: 'Error deleting at: sys/foo.\nURL: v1/sys/foo\nCode: 404\nErrors:\n no client token',
},
{
name: 'VaultErrors multiple errors',
args: [
{ httpStatus: 404, path: 'v1/sys/foo', errors: ['no client token', 'this is an error'] },
'sys/foo',
'delete',
],
expectedContent:
'Error deleting at: sys/foo.\nURL: v1/sys/foo\nCode: 404\nErrors:\n no client token\n this is an error',
},
];
testErrorCases.forEach(function(testCase) {
test(`#logFromError: ${testCase.name}`, function(assert) {
let data = logFromError(...testCase.args);
assert.deepEqual(data, { type: 'error', content: testCase.expectedContent }, 'returns the expected data');
});
});
const testCommandCases = [
{
name: 'errors when command does not include a path',
args: [],
expectedContent: 'A path is required to make a request.',
},
{
name: 'errors when write command does not include data and does not have force tag',
args: ['foo/bar', 'write', {}, []],
expectedContent: 'Must supply data or use -force',
},
];
testCommandCases.forEach(function(testCase) {
test(`#logErrorFromInput: ${testCase.name}`, function(assert) {
let data = logErrorFromInput(...testCase.args);
assert.deepEqual(data, { type: 'error', content: testCase.expectedContent }, 'returns the pcorrect data');
});
});

View File

@ -0,0 +1,94 @@
import { moduleFor, test } from 'ember-qunit';
import { sanitizePath, ensureTrailingSlash } from 'vault/services/console';
import sinon from 'sinon';
moduleFor('service:console', 'Unit | Service | console', {
needs: ['service:auth'],
beforeEach() {},
afterEach() {},
});
test('#sanitizePath', function(assert) {
assert.equal(sanitizePath(' /foo/bar/baz/ '), 'foo/bar/baz', 'removes spaces and slashs on either side');
assert.equal(sanitizePath('//foo/bar/baz/'), 'foo/bar/baz', 'removes more than one slash');
});
test('#ensureTrailingSlash', function(assert) {
assert.equal(ensureTrailingSlash('foo/bar'), 'foo/bar/', 'adds trailing slash');
assert.equal(ensureTrailingSlash('baz/'), 'baz/', 'keeps trailing slash if there is one');
});
let testCases = [
{
method: 'read',
args: ['/sys/health', {}],
expectedURL: 'sys/health',
expectedVerb: 'GET',
expectedOptions: { data: undefined, wrapTTL: undefined },
},
{
method: 'read',
args: ['/secrets/foo/bar', {}, '30m'],
expectedURL: 'secrets/foo/bar',
expectedVerb: 'GET',
expectedOptions: { data: undefined, wrapTTL: '30m' },
},
{
method: 'write',
args: ['aws/roles/my-other-role', { arn: 'arn=arn:aws:iam::aws:policy/AmazonEC2ReadOnlyAccess' }],
expectedURL: 'aws/roles/my-other-role',
expectedVerb: 'POST',
expectedOptions: {
data: { arn: 'arn=arn:aws:iam::aws:policy/AmazonEC2ReadOnlyAccess' },
wrapTTL: undefined,
},
},
{
method: 'list',
args: ['secret/mounts', {}],
expectedURL: 'secret/mounts/',
expectedVerb: 'GET',
expectedOptions: { data: { list: true }, wrapTTL: undefined },
},
{
method: 'list',
args: ['secret/mounts', {}, '1h'],
expectedURL: 'secret/mounts/',
expectedVerb: 'GET',
expectedOptions: { data: { list: true }, wrapTTL: '1h' },
},
{
method: 'delete',
args: ['secret/secrets/kv'],
expectedURL: 'secret/secrets/kv',
expectedVerb: 'DELETE',
expectedOptions: { data: undefined, wrapTTL: undefined },
},
];
test('it reads, writes, lists, deletes', function(assert) {
let ajax = sinon.stub();
let uiConsole = this.subject({
adapter() {
return {
buildURL(url) {
return url;
},
ajax,
};
},
});
testCases.forEach(testCase => {
uiConsole[testCase.method](...testCase.args);
let [url, verb, options] = ajax.lastCall.args;
assert.equal(url, testCase.expectedURL, `${testCase.method}: uses trimmed passed url`);
assert.equal(verb, testCase.expectedVerb, `${testCase.method}: uses the correct verb`);
assert.deepEqual(options, testCase.expectedOptions, `${testCase.method}: uses the correct options`);
});
});

File diff suppressed because it is too large Load Diff

View File

@ -319,9 +319,11 @@ func (i *IdentityStore) handleAliasUpdateCommon(req *logical.Request, d *framewo
// Update the fields
alias.Name = aliasName
alias.Metadata = aliasMetadata
alias.MountType = mountValidationResp.MountType
alias.MountAccessor = mountValidationResp.MountAccessor
alias.MountPath = mountValidationResp.MountPath
// Explicitly set to empty as in the past we incorrectly saved it
alias.MountPath = ""
alias.MountType = ""
// Set the canonical ID in the alias index. This should be done after
// sanitizing entity.
@ -377,13 +379,16 @@ func (i *IdentityStore) handleAliasReadCommon(alias *identity.Alias) (*logical.R
respData := map[string]interface{}{}
respData["id"] = alias.ID
respData["canonical_id"] = alias.CanonicalID
respData["mount_type"] = alias.MountType
respData["mount_accessor"] = alias.MountAccessor
respData["mount_path"] = alias.MountPath
respData["metadata"] = alias.Metadata
respData["name"] = alias.Name
respData["merged_from_canonical_ids"] = alias.MergedFromCanonicalIDs
if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil {
respData["mount_path"] = mountValidationResp.MountPath
respData["mount_type"] = mountValidationResp.MountType
}
// Convert protobuf timestamp into RFC3339 format
respData["creation_time"] = ptypes.TimestampString(alias.CreationTime)
respData["last_update_time"] = ptypes.TimestampString(alias.LastUpdateTime)
@ -416,15 +421,46 @@ func (i *IdentityStore) pathAliasIDList() framework.OperationFunc {
}
var aliasIDs []string
aliasInfo := map[string]interface{}{}
type mountInfo struct {
MountType string
MountPath string
}
mountAccessorMap := map[string]mountInfo{}
for {
raw := iter.Next()
if raw == nil {
break
}
aliasIDs = append(aliasIDs, raw.(*identity.Alias).ID)
alias := raw.(*identity.Alias)
aliasIDs = append(aliasIDs, alias.ID)
aliasInfoEntry := map[string]interface{}{
"name": alias.Name,
"canonical_id": alias.CanonicalID,
"mount_accessor": alias.MountAccessor,
}
mi, ok := mountAccessorMap[alias.MountAccessor]
if ok {
aliasInfoEntry["mount_type"] = mi.MountType
aliasInfoEntry["mount_path"] = mi.MountPath
} else {
mi = mountInfo{}
if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil {
mi.MountType = mountValidationResp.MountType
mi.MountPath = mountValidationResp.MountPath
aliasInfoEntry["mount_type"] = mi.MountType
aliasInfoEntry["mount_path"] = mi.MountPath
}
mountAccessorMap[alias.MountAccessor] = mi
}
aliasInfo[alias.ID] = aliasInfoEntry
}
return logical.ListResponse(aliasIDs), nil
return logical.ListResponseWithInfo(aliasIDs, aliasInfo), nil
}
}

View File

@ -8,6 +8,7 @@ import (
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/vault"
"github.com/hashicorp/vault/builtin/credential/github"
credLdap "github.com/hashicorp/vault/builtin/credential/ldap"
)
@ -60,3 +61,161 @@ func TestIdentityStore_EntityAliasLocalMount(t *testing.T) {
t.Fatalf("expected error since mount is local")
}
}
func TestIdentityStore_ListAlias(t *testing.T) {
coreConfig := &vault.CoreConfig{
CredentialBackends: map[string]logical.Factory{
"github": github.Factory,
},
}
cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{
HandlerFunc: vaulthttp.Handler,
})
cluster.Start()
defer cluster.Cleanup()
core := cluster.Cores[0].Core
vault.TestWaitActive(t, core)
client := cluster.Cores[0].Client
err := client.Sys().EnableAuthWithOptions("github", &api.EnableAuthOptions{
Type: "github",
})
if err != nil {
t.Fatal(err)
}
mounts, err := client.Sys().ListAuth()
if err != nil {
t.Fatal(err)
}
var githubAccessor string
for k, v := range mounts {
t.Logf("key: %v\nmount: %#v", k, *v)
if k == "github/" {
githubAccessor = v.Accessor
break
}
}
if githubAccessor == "" {
t.Fatal("did not find github accessor")
}
resp, err := client.Logical().Write("identity/entity", nil)
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
if resp == nil {
t.Fatalf("expected a non-nil response")
}
entityID := resp.Data["id"].(string)
// Create an alias
resp, err = client.Logical().Write("identity/entity-alias", map[string]interface{}{
"name": "testaliasname",
"mount_accessor": githubAccessor,
})
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
testAliasCanonicalID := resp.Data["canonical_id"].(string)
testAliasAliasID := resp.Data["id"].(string)
resp, err = client.Logical().Write("identity/entity-alias", map[string]interface{}{
"name": "entityalias",
"mount_accessor": githubAccessor,
"canonical_id": entityID,
})
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
entityAliasAliasID := resp.Data["id"].(string)
resp, err = client.Logical().List("identity/entity-alias/id")
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
keys := resp.Data["keys"].([]interface{})
if len(keys) != 2 {
t.Fatalf("bad: length of alias IDs listed; expected: 2, actual: %d", len(keys))
}
// Do some due diligence on the key info
aliasInfoRaw, ok := resp.Data["key_info"]
if !ok {
t.Fatal("expected key_info map in response")
}
aliasInfo := aliasInfoRaw.(map[string]interface{})
for _, keyRaw := range keys {
key := keyRaw.(string)
infoRaw, ok := aliasInfo[key]
if !ok {
t.Fatal("expected key info")
}
info := infoRaw.(map[string]interface{})
currName := "entityalias"
if info["canonical_id"].(string) == testAliasCanonicalID {
currName = "testaliasname"
}
t.Logf("alias info: %#v", info)
switch {
case info["name"].(string) != currName:
t.Fatalf("bad name: %v", info["name"].(string))
case info["mount_accessor"].(string) != githubAccessor:
t.Fatalf("bad mount_path: %v", info["mount_accessor"].(string))
}
}
// Now do the same with entity info
resp, err = client.Logical().List("identity/entity/id")
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
keys = resp.Data["keys"].([]interface{})
if len(keys) != 2 {
t.Fatalf("bad: length of entity IDs listed; expected: 2, actual: %d", len(keys))
}
entityInfoRaw, ok := resp.Data["key_info"]
if !ok {
t.Fatal("expected key_info map in response")
}
// This is basically verifying that the entity has the alias in key_info
// that we expect to be tied to it, plus tests a value further down in it
// for fun
entityInfo := entityInfoRaw.(map[string]interface{})
for _, keyRaw := range keys {
key := keyRaw.(string)
infoRaw, ok := entityInfo[key]
if !ok {
t.Fatal("expected key info")
}
info := infoRaw.(map[string]interface{})
t.Logf("entity info: %#v", info)
currAliasID := entityAliasAliasID
if key == testAliasCanonicalID {
currAliasID = testAliasAliasID
}
currAliases := info["aliases"].([]interface{})
if len(currAliases) != 1 {
t.Fatal("bad aliases length")
}
for _, v := range currAliases {
curr := v.(map[string]interface{})
switch {
case curr["id"].(string) != currAliasID:
t.Fatalf("bad alias id: %v", curr["id"])
case curr["mount_accessor"].(string) != githubAccessor:
t.Fatalf("bad mount accessor: %v", curr["mount_accessor"])
case curr["mount_path"].(string) != "auth/github/":
t.Fatalf("bad mount path: %v", curr["mount_path"])
case curr["mount_type"].(string) != "github":
t.Fatalf("bad mount type: %v", curr["mount_type"])
}
}
}
}

View File

@ -9,62 +9,6 @@ import (
"github.com/hashicorp/vault/logical"
)
func TestIdentityStore_ListAlias(t *testing.T) {
var err error
var resp *logical.Response
is, githubAccessor, _ := testIdentityStoreWithGithubAuth(t)
entityReq := &logical.Request{
Operation: logical.UpdateOperation,
Path: "entity",
}
resp, err = is.HandleRequest(context.Background(), entityReq)
if err != nil || (resp != nil && resp.IsError()) {
t.Fatalf("err:%v resp:%#v", err, resp)
}
if resp == nil {
t.Fatalf("expected a non-nil response")
}
entityID := resp.Data["id"].(string)
// Create an alias
aliasData := map[string]interface{}{
"name": "testaliasname",
"mount_accessor": githubAccessor,
}
aliasReq := &logical.Request{
Operation: logical.UpdateOperation,
Path: "entity-alias",
Data: aliasData,
}
resp, err = is.HandleRequest(context.Background(), aliasReq)
if err != nil || (resp != nil && resp.IsError()) {
t.Fatalf("err:%v resp:%#v", err, resp)
}
aliasData["name"] = "entityalias"
aliasData["entity_id"] = entityID
resp, err = is.HandleRequest(context.Background(), aliasReq)
if err != nil || (resp != nil && resp.IsError()) {
t.Fatalf("err:%v resp:%#v", err, resp)
}
listReq := &logical.Request{
Operation: logical.ListOperation,
Path: "entity-alias/id",
}
resp, err = is.HandleRequest(context.Background(), listReq)
if err != nil || (resp != nil && resp.IsError()) {
t.Fatalf("err:%v resp:%#v", err, resp)
}
keys := resp.Data["keys"].([]string)
if len(keys) != 2 {
t.Fatalf("bad: length of alias IDs listed; expected: 2, actual: %d", len(keys))
}
}
// This test is required because MemDB does not take care of ensuring
// uniqueness of indexes that are marked unique.
func TestIdentityStore_AliasSameAliasNames(t *testing.T) {

View File

@ -459,14 +459,18 @@ func (i *IdentityStore) handleEntityReadCommon(entity *identity.Entity) (*logica
aliasMap := map[string]interface{}{}
aliasMap["id"] = alias.ID
aliasMap["canonical_id"] = alias.CanonicalID
aliasMap["mount_type"] = alias.MountType
aliasMap["mount_accessor"] = alias.MountAccessor
aliasMap["mount_path"] = alias.MountPath
aliasMap["metadata"] = alias.Metadata
aliasMap["name"] = alias.Name
aliasMap["merged_from_canonical_ids"] = alias.MergedFromCanonicalIDs
aliasMap["creation_time"] = ptypes.TimestampString(alias.CreationTime)
aliasMap["last_update_time"] = ptypes.TimestampString(alias.LastUpdateTime)
if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil {
aliasMap["mount_type"] = mountValidationResp.MountType
aliasMap["mount_path"] = mountValidationResp.MountPath
}
aliasesToReturn[aliasIdx] = aliasMap
}
@ -522,15 +526,56 @@ func (i *IdentityStore) pathEntityIDList() framework.OperationFunc {
}
var entityIDs []string
entityInfo := map[string]interface{}{}
type mountInfo struct {
MountType string
MountPath string
}
mountAccessorMap := map[string]mountInfo{}
for {
raw := iter.Next()
if raw == nil {
break
}
entityIDs = append(entityIDs, raw.(*identity.Entity).ID)
entity := raw.(*identity.Entity)
entityIDs = append(entityIDs, entity.ID)
entityInfoEntry := map[string]interface{}{
"name": entity.Name,
}
if len(entity.Aliases) > 0 {
aliasList := make([]interface{}, 0, len(entity.Aliases))
for _, alias := range entity.Aliases {
entry := map[string]interface{}{
"id": alias.ID,
"name": alias.Name,
"mount_accessor": alias.MountAccessor,
}
mi, ok := mountAccessorMap[alias.MountAccessor]
if ok {
entry["mount_type"] = mi.MountType
entry["mount_path"] = mi.MountPath
} else {
mi = mountInfo{}
if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil {
mi.MountType = mountValidationResp.MountType
mi.MountPath = mountValidationResp.MountPath
entry["mount_type"] = mi.MountType
entry["mount_path"] = mi.MountPath
}
mountAccessorMap[alias.MountAccessor] = mi
}
aliasList = append(aliasList, entry)
}
entityInfoEntry["aliases"] = aliasList
}
entityInfo[entity.ID] = entityInfoEntry
}
return logical.ListResponse(entityIDs), nil
return logical.ListResponseWithInfo(entityIDs, entityInfo), nil
}
}

View File

@ -210,8 +210,9 @@ func (i *IdentityStore) handleGroupAliasUpdateCommon(req *logical.Request, d *fr
}
group.Alias.Name = groupAliasName
group.Alias.MountType = mountValidationResp.MountType
group.Alias.MountAccessor = mountValidationResp.MountAccessor
// Explicitly correct for previous versions that persisted this
group.Alias.MountType = ""
err = i.sanitizeAndUpsertGroup(group, nil)
if err != nil {
@ -267,15 +268,46 @@ func (i *IdentityStore) pathGroupAliasIDList() framework.OperationFunc {
}
var groupAliasIDs []string
aliasInfo := map[string]interface{}{}
type mountInfo struct {
MountType string
MountPath string
}
mountAccessorMap := map[string]mountInfo{}
for {
raw := iter.Next()
if raw == nil {
break
}
groupAliasIDs = append(groupAliasIDs, raw.(*identity.Alias).ID)
alias := raw.(*identity.Alias)
groupAliasIDs = append(groupAliasIDs, alias.ID)
entry := map[string]interface{}{
"name": alias.Name,
"canonical_id": alias.CanonicalID,
"mount_accessor": alias.MountAccessor,
}
mi, ok := mountAccessorMap[alias.MountAccessor]
if ok {
entry["mount_type"] = mi.MountType
entry["mount_path"] = mi.MountPath
} else {
mi = mountInfo{}
if mountValidationResp := i.core.router.validateMountByAccessor(alias.MountAccessor); mountValidationResp != nil {
mi.MountType = mountValidationResp.MountType
mi.MountPath = mountValidationResp.MountPath
entry["mount_type"] = mi.MountType
entry["mount_path"] = mi.MountPath
}
mountAccessorMap[alias.MountAccessor] = mi
}
aliasInfo[alias.ID] = entry
}
return logical.ListResponse(groupAliasIDs), nil
return logical.ListResponseWithInfo(groupAliasIDs, aliasInfo), nil
}
}

View File

@ -331,15 +331,52 @@ func (i *IdentityStore) pathGroupIDList() framework.OperationFunc {
}
var groupIDs []string
groupInfo := map[string]interface{}{}
type mountInfo struct {
MountType string
MountPath string
}
mountAccessorMap := map[string]mountInfo{}
for {
raw := iter.Next()
if raw == nil {
break
}
groupIDs = append(groupIDs, raw.(*identity.Group).ID)
group := raw.(*identity.Group)
groupIDs = append(groupIDs, group.ID)
groupInfoEntry := map[string]interface{}{
"name": group.Name,
}
if group.Alias != nil {
entry := map[string]interface{}{
"id": group.Alias.ID,
"name": group.Alias.Name,
"mount_accessor": group.Alias.MountAccessor,
}
mi, ok := mountAccessorMap[group.Alias.MountAccessor]
if ok {
entry["mount_type"] = mi.MountType
entry["mount_path"] = mi.MountPath
} else {
mi = mountInfo{}
if mountValidationResp := i.core.router.validateMountByAccessor(group.Alias.MountAccessor); mountValidationResp != nil {
mi.MountType = mountValidationResp.MountType
mi.MountPath = mountValidationResp.MountPath
entry["mount_type"] = mi.MountType
entry["mount_path"] = mi.MountPath
}
mountAccessorMap[group.Alias.MountAccessor] = mi
}
groupInfoEntry["alias"] = entry
}
groupInfo[group.ID] = groupInfoEntry
}
return logical.ListResponse(groupIDs), nil
return logical.ListResponseWithInfo(groupIDs, groupInfo), nil
}
}

View File

@ -8,9 +8,144 @@ import (
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/vault"
"github.com/hashicorp/vault/builtin/credential/github"
credLdap "github.com/hashicorp/vault/builtin/credential/ldap"
)
func TestIdentityStore_ListGroupAlias(t *testing.T) {
coreConfig := &vault.CoreConfig{
CredentialBackends: map[string]logical.Factory{
"github": github.Factory,
},
}
cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{
HandlerFunc: vaulthttp.Handler,
})
cluster.Start()
defer cluster.Cleanup()
core := cluster.Cores[0].Core
vault.TestWaitActive(t, core)
client := cluster.Cores[0].Client
err := client.Sys().EnableAuthWithOptions("github", &api.EnableAuthOptions{
Type: "github",
})
if err != nil {
t.Fatal(err)
}
mounts, err := client.Sys().ListAuth()
if err != nil {
t.Fatal(err)
}
var githubAccessor string
for k, v := range mounts {
t.Logf("key: %v\nmount: %#v", k, *v)
if k == "github/" {
githubAccessor = v.Accessor
break
}
}
if githubAccessor == "" {
t.Fatal("did not find github accessor")
}
resp, err := client.Logical().Write("identity/group", map[string]interface{}{
"type": "external",
})
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
groupID := resp.Data["id"].(string)
resp, err = client.Logical().Write("identity/group-alias", map[string]interface{}{
"name": "groupalias",
"mount_accessor": githubAccessor,
"canonical_id": groupID,
})
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
aliasID := resp.Data["id"].(string)
resp, err = client.Logical().List("identity/group-alias/id")
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
keys := resp.Data["keys"].([]interface{})
if len(keys) != 1 {
t.Fatalf("bad: length of alias IDs listed; expected: 1, actual: %d", len(keys))
}
// Do some due diligence on the key info
aliasInfoRaw, ok := resp.Data["key_info"]
if !ok {
t.Fatal("expected key_info map in response")
}
aliasInfo := aliasInfoRaw.(map[string]interface{})
if len(aliasInfo) != 1 {
t.Fatalf("bad: length of alias ID key info; expected: 1, actual: %d", len(aliasInfo))
}
infoRaw, ok := aliasInfo[aliasID]
if !ok {
t.Fatal("expected to find alias ID in key info map")
}
info := infoRaw.(map[string]interface{})
t.Logf("alias info: %#v", info)
switch {
case info["name"].(string) != "groupalias":
t.Fatalf("bad name: %v", info["name"].(string))
case info["mount_accessor"].(string) != githubAccessor:
t.Fatalf("bad mount_accessor: %v", info["mount_accessor"].(string))
}
// Now do the same with group info
resp, err = client.Logical().List("identity/group/id")
if err != nil {
t.Fatalf("err:%v resp:%#v", err, resp)
}
keys = resp.Data["keys"].([]interface{})
if len(keys) != 1 {
t.Fatalf("bad: length of group IDs listed; expected: 1, actual: %d", len(keys))
}
groupInfoRaw, ok := resp.Data["key_info"]
if !ok {
t.Fatal("expected key_info map in response")
}
// This is basically verifying that the group has the alias in key_info
// that we expect to be tied to it, plus tests a value further down in it
// for fun
groupInfo := groupInfoRaw.(map[string]interface{})
if len(groupInfo) != 1 {
t.Fatalf("bad: length of group ID key info; expected: 1, actual: %d", len(groupInfo))
}
infoRaw, ok = groupInfo[groupID]
if !ok {
t.Fatal("expected key info")
}
info = infoRaw.(map[string]interface{})
t.Logf("group info: %#v", info)
alias := info["alias"].(map[string]interface{})
switch {
case alias["id"].(string) != aliasID:
t.Fatalf("bad alias id: %v", alias["id"])
case alias["mount_accessor"].(string) != githubAccessor:
t.Fatalf("bad mount accessor: %v", alias["mount_accessor"])
case alias["mount_path"].(string) != "auth/github/":
t.Fatalf("bad mount path: %v", alias["mount_path"])
case alias["mount_type"].(string) != "github":
t.Fatalf("bad mount type: %v", alias["mount_type"])
}
}
// Testing the fix for GH-4351
func TestIdentityStore_ExternalGroupMembershipsAcrossMounts(t *testing.T) {
coreConfig := &vault.CoreConfig{

View File

@ -54,13 +54,6 @@ func aliasesTableSchema() *memdb.TableSchema {
Field: "CanonicalID",
},
},
"mount_type": &memdb.IndexSchema{
Name: "mount_type",
Unique: false,
Indexer: &memdb.StringFieldIndex{
Field: "MountType",
},
},
"factors": &memdb.IndexSchema{
Name: "factors",
Unique: true,
@ -205,13 +198,6 @@ func groupAliasesTableSchema() *memdb.TableSchema {
Field: "CanonicalID",
},
},
"mount_type": &memdb.IndexSchema{
Name: "mount_type",
Unique: false,
Indexer: &memdb.StringFieldIndex{
Field: "MountType",
},
},
"factors": &memdb.IndexSchema{
Name: "factors",
Unique: true,

View File

@ -935,7 +935,7 @@ func TestTokenStore_RevokeSelf(t *testing.T) {
t.Fatalf("err: %v\nresp: %#v", err, resp)
}
time.Sleep(200 * time.Millisecond)
time.Sleep(1000 * time.Millisecond)
lookup := []string{ent1.ID, ent2.ID, ent3.ID, ent4.ID}
for _, id := range lookup {

View File

@ -0,0 +1,373 @@
Mozilla Public License Version 2.0
==================================
1. Definitions
--------------
1.1. "Contributor"
means each individual or legal entity that creates, contributes to
the creation of, or owns Covered Software.
1.2. "Contributor Version"
means the combination of the Contributions of others (if any) used
by a Contributor and that particular Contributor's Contribution.
1.3. "Contribution"
means Covered Software of a particular Contributor.
1.4. "Covered Software"
means Source Code Form to which the initial Contributor has attached
the notice in Exhibit A, the Executable Form of such Source Code
Form, and Modifications of such Source Code Form, in each case
including portions thereof.
1.5. "Incompatible With Secondary Licenses"
means
(a) that the initial Contributor has attached the notice described
in Exhibit B to the Covered Software; or
(b) that the Covered Software was made available under the terms of
version 1.1 or earlier of the License, but not also under the
terms of a Secondary License.
1.6. "Executable Form"
means any form of the work other than Source Code Form.
1.7. "Larger Work"
means a work that combines Covered Software with other material, in
a separate file or files, that is not Covered Software.
1.8. "License"
means this document.
1.9. "Licensable"
means having the right to grant, to the maximum extent possible,
whether at the time of the initial grant or subsequently, any and
all of the rights conveyed by this License.
1.10. "Modifications"
means any of the following:
(a) any file in Source Code Form that results from an addition to,
deletion from, or modification of the contents of Covered
Software; or
(b) any new file in Source Code Form that contains any Covered
Software.
1.11. "Patent Claims" of a Contributor
means any patent claim(s), including without limitation, method,
process, and apparatus claims, in any patent Licensable by such
Contributor that would be infringed, but for the grant of the
License, by the making, using, selling, offering for sale, having
made, import, or transfer of either its Contributions or its
Contributor Version.
1.12. "Secondary License"
means either the GNU General Public License, Version 2.0, the GNU
Lesser General Public License, Version 2.1, the GNU Affero General
Public License, Version 3.0, or any later versions of those
licenses.
1.13. "Source Code Form"
means the form of the work preferred for making modifications.
1.14. "You" (or "Your")
means an individual or a legal entity exercising rights under this
License. For legal entities, "You" includes any entity that
controls, is controlled by, or is under common control with You. For
purposes of this definition, "control" means (a) the power, direct
or indirect, to cause the direction or management of such entity,
whether by contract or otherwise, or (b) ownership of more than
fifty percent (50%) of the outstanding shares or beneficial
ownership of such entity.
2. License Grants and Conditions
--------------------------------
2.1. Grants
Each Contributor hereby grants You a world-wide, royalty-free,
non-exclusive license:
(a) under intellectual property rights (other than patent or trademark)
Licensable by such Contributor to use, reproduce, make available,
modify, display, perform, distribute, and otherwise exploit its
Contributions, either on an unmodified basis, with Modifications, or
as part of a Larger Work; and
(b) under Patent Claims of such Contributor to make, use, sell, offer
for sale, have made, import, and otherwise transfer either its
Contributions or its Contributor Version.
2.2. Effective Date
The licenses granted in Section 2.1 with respect to any Contribution
become effective for each Contribution on the date the Contributor first
distributes such Contribution.
2.3. Limitations on Grant Scope
The licenses granted in this Section 2 are the only rights granted under
this License. No additional rights or licenses will be implied from the
distribution or licensing of Covered Software under this License.
Notwithstanding Section 2.1(b) above, no patent license is granted by a
Contributor:
(a) for any code that a Contributor has removed from Covered Software;
or
(b) for infringements caused by: (i) Your and any other third party's
modifications of Covered Software, or (ii) the combination of its
Contributions with other software (except as part of its Contributor
Version); or
(c) under Patent Claims infringed by Covered Software in the absence of
its Contributions.
This License does not grant any rights in the trademarks, service marks,
or logos of any Contributor (except as may be necessary to comply with
the notice requirements in Section 3.4).
2.4. Subsequent Licenses
No Contributor makes additional grants as a result of Your choice to
distribute the Covered Software under a subsequent version of this
License (see Section 10.2) or under the terms of a Secondary License (if
permitted under the terms of Section 3.3).
2.5. Representation
Each Contributor represents that the Contributor believes its
Contributions are its original creation(s) or it has sufficient rights
to grant the rights to its Contributions conveyed by this License.
2.6. Fair Use
This License is not intended to limit any rights You have under
applicable copyright doctrines of fair use, fair dealing, or other
equivalents.
2.7. Conditions
Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted
in Section 2.1.
3. Responsibilities
-------------------
3.1. Distribution of Source Form
All distribution of Covered Software in Source Code Form, including any
Modifications that You create or to which You contribute, must be under
the terms of this License. You must inform recipients that the Source
Code Form of the Covered Software is governed by the terms of this
License, and how they can obtain a copy of this License. You may not
attempt to alter or restrict the recipients' rights in the Source Code
Form.
3.2. Distribution of Executable Form
If You distribute Covered Software in Executable Form then:
(a) such Covered Software must also be made available in Source Code
Form, as described in Section 3.1, and You must inform recipients of
the Executable Form how they can obtain a copy of such Source Code
Form by reasonable means in a timely manner, at a charge no more
than the cost of distribution to the recipient; and
(b) You may distribute such Executable Form under the terms of this
License, or sublicense it under different terms, provided that the
license for the Executable Form does not attempt to limit or alter
the recipients' rights in the Source Code Form under this License.
3.3. Distribution of a Larger Work
You may create and distribute a Larger Work under terms of Your choice,
provided that You also comply with the requirements of this License for
the Covered Software. If the Larger Work is a combination of Covered
Software with a work governed by one or more Secondary Licenses, and the
Covered Software is not Incompatible With Secondary Licenses, this
License permits You to additionally distribute such Covered Software
under the terms of such Secondary License(s), so that the recipient of
the Larger Work may, at their option, further distribute the Covered
Software under the terms of either this License or such Secondary
License(s).
3.4. Notices
You may not remove or alter the substance of any license notices
(including copyright notices, patent notices, disclaimers of warranty,
or limitations of liability) contained within the Source Code Form of
the Covered Software, except that You may alter any license notices to
the extent required to remedy known factual inaccuracies.
3.5. Application of Additional Terms
You may choose to offer, and to charge a fee for, warranty, support,
indemnity or liability obligations to one or more recipients of Covered
Software. However, You may do so only on Your own behalf, and not on
behalf of any Contributor. You must make it absolutely clear that any
such warranty, support, indemnity, or liability obligation is offered by
You alone, and You hereby agree to indemnify every Contributor for any
liability incurred by such Contributor as a result of warranty, support,
indemnity or liability terms You offer. You may include additional
disclaimers of warranty and limitations of liability specific to any
jurisdiction.
4. Inability to Comply Due to Statute or Regulation
---------------------------------------------------
If it is impossible for You to comply with any of the terms of this
License with respect to some or all of the Covered Software due to
statute, judicial order, or regulation then You must: (a) comply with
the terms of this License to the maximum extent possible; and (b)
describe the limitations and the code they affect. Such description must
be placed in a text file included with all distributions of the Covered
Software under this License. Except to the extent prohibited by statute
or regulation, such description must be sufficiently detailed for a
recipient of ordinary skill to be able to understand it.
5. Termination
--------------
5.1. The rights granted under this License will terminate automatically
if You fail to comply with any of its terms. However, if You become
compliant, then the rights granted under this License from a particular
Contributor are reinstated (a) provisionally, unless and until such
Contributor explicitly and finally terminates Your grants, and (b) on an
ongoing basis, if such Contributor fails to notify You of the
non-compliance by some reasonable means prior to 60 days after You have
come back into compliance. Moreover, Your grants from a particular
Contributor are reinstated on an ongoing basis if such Contributor
notifies You of the non-compliance by some reasonable means, this is the
first time You have received notice of non-compliance with this License
from such Contributor, and You become compliant prior to 30 days after
Your receipt of the notice.
5.2. If You initiate litigation against any entity by asserting a patent
infringement claim (excluding declaratory judgment actions,
counter-claims, and cross-claims) alleging that a Contributor Version
directly or indirectly infringes any patent, then the rights granted to
You by any and all Contributors for the Covered Software under Section
2.1 of this License shall terminate.
5.3. In the event of termination under Sections 5.1 or 5.2 above, all
end user license agreements (excluding distributors and resellers) which
have been validly granted by You or Your distributors under this License
prior to termination shall survive termination.
************************************************************************
* *
* 6. Disclaimer of Warranty *
* ------------------------- *
* *
* Covered Software is provided under this License on an "as is" *
* basis, without warranty of any kind, either expressed, implied, or *
* statutory, including, without limitation, warranties that the *
* Covered Software is free of defects, merchantable, fit for a *
* particular purpose or non-infringing. The entire risk as to the *
* quality and performance of the Covered Software is with You. *
* Should any Covered Software prove defective in any respect, You *
* (not any Contributor) assume the cost of any necessary servicing, *
* repair, or correction. This disclaimer of warranty constitutes an *
* essential part of this License. No use of any Covered Software is *
* authorized under this License except under this disclaimer. *
* *
************************************************************************
************************************************************************
* *
* 7. Limitation of Liability *
* -------------------------- *
* *
* Under no circumstances and under no legal theory, whether tort *
* (including negligence), contract, or otherwise, shall any *
* Contributor, or anyone who distributes Covered Software as *
* permitted above, be liable to You for any direct, indirect, *
* special, incidental, or consequential damages of any character *
* including, without limitation, damages for lost profits, loss of *
* goodwill, work stoppage, computer failure or malfunction, or any *
* and all other commercial damages or losses, even if such party *
* shall have been informed of the possibility of such damages. This *
* limitation of liability shall not apply to liability for death or *
* personal injury resulting from such party's negligence to the *
* extent applicable law prohibits such limitation. Some *
* jurisdictions do not allow the exclusion or limitation of *
* incidental or consequential damages, so this exclusion and *
* limitation may not apply to You. *
* *
************************************************************************
8. Litigation
-------------
Any litigation relating to this License may be brought only in the
courts of a jurisdiction where the defendant maintains its principal
place of business and such litigation shall be governed by laws of that
jurisdiction, without reference to its conflict-of-law provisions.
Nothing in this Section shall prevent a party's ability to bring
cross-claims or counter-claims.
9. Miscellaneous
----------------
This License represents the complete agreement concerning the subject
matter hereof. If any provision of this License is held to be
unenforceable, such provision shall be reformed only to the extent
necessary to make it enforceable. Any law or regulation which provides
that the language of a contract shall be construed against the drafter
shall not be used to construe this License against a Contributor.
10. Versions of the License
---------------------------
10.1. New Versions
Mozilla Foundation is the license steward. Except as provided in Section
10.3, no one other than the license steward has the right to modify or
publish new versions of this License. Each version will be given a
distinguishing version number.
10.2. Effect of New Versions
You may distribute the Covered Software under the terms of the version
of the License under which You originally received the Covered Software,
or under the terms of any subsequent version published by the license
steward.
10.3. Modified Versions
If you create software not governed by this License, and you want to
create a new license for such software, you may create and use a
modified version of this License if you rename the license and remove
any references to the name of the license steward (except to note that
such modified license differs from this License).
10.4. Distributing Source Code Form that is Incompatible With Secondary
Licenses
If You choose to distribute Source Code Form that is Incompatible With
Secondary Licenses under the terms of this version of the License, the
notice described in Exhibit B of this License must be attached.
Exhibit A - Source Code Form License Notice
-------------------------------------------
This Source Code Form is subject to the terms of the Mozilla Public
License, v. 2.0. If a copy of the MPL was not distributed with this
file, You can obtain one at http://mozilla.org/MPL/2.0/.
If it is not possible or desirable to put the notice in a particular
file, then You may include the notice in a location (such as a LICENSE
file in a relevant directory) where a recipient would be likely to look
for such a notice.
You may add additional accurate notices of copyright ownership.
Exhibit B - "Incompatible With Secondary Licenses" Notice
---------------------------------------------------------
This Source Code Form is "Incompatible With Secondary Licenses", as
defined by the Mozilla Public License, v. 2.0.

View File

@ -0,0 +1,80 @@
package plugin
import (
"context"
"sync"
"time"
"github.com/hashicorp/vault-plugin-secrets-ad/plugin/client"
"github.com/hashicorp/vault-plugin-secrets-ad/plugin/util"
"github.com/hashicorp/vault/helper/ldaputil"
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/logical/framework"
"github.com/patrickmn/go-cache"
)
func Factory(ctx context.Context, conf *logical.BackendConfig) (logical.Backend, error) {
backend := newBackend(util.NewSecretsClient(conf.Logger))
backend.Setup(ctx, conf)
return backend, nil
}
func newBackend(client secretsClient) *backend {
adBackend := &backend{
client: client,
roleCache: cache.New(roleCacheExpiration, roleCacheCleanup),
credCache: cache.New(credCacheExpiration, credCacheCleanup),
}
adBackend.Backend = &framework.Backend{
Help: backendHelp,
Paths: []*framework.Path{
adBackend.pathConfig(),
adBackend.pathRoles(),
adBackend.pathListRoles(),
adBackend.pathCreds(),
},
PathsSpecial: &logical.Paths{
SealWrapStorage: []string{
configPath,
credPrefix,
},
},
Invalidate: adBackend.Invalidate,
BackendType: logical.TypeLogical,
}
return adBackend
}
type backend struct {
logical.Backend
client secretsClient
roleCache *cache.Cache
credCache *cache.Cache
credLock sync.Mutex
}
func (b *backend) Invalidate(ctx context.Context, key string) {
b.invalidateRole(ctx, key)
b.invalidateCred(ctx, key)
}
// Wraps the *util.SecretsClient in an interface to support testing.
type secretsClient interface {
Get(conf *ldaputil.ConfigEntry, serviceAccountName string) (*client.Entry, error)
GetPasswordLastSet(conf *ldaputil.ConfigEntry, serviceAccountName string) (time.Time, error)
UpdatePassword(conf *ldaputil.ConfigEntry, serviceAccountName string, newPassword string) error
}
const backendHelp = `
The Active Directory (AD) secrets engine rotates AD passwords dynamically,
and is designed for a high-load environment where many instances may be accessing
a shared password simultaneously. With a simple set up and a simple creds API,
it doesn't require instances to be manually registered in advance to gain access.
As long as access has been granted to the creds path via a method like
AppRole, they're available.
Passwords are lazily rotated based on preset TTLs and can have a length configured to meet
your needs.
`

View File

@ -0,0 +1,140 @@
package client
import (
"fmt"
"math"
"strings"
"github.com/go-errors/errors"
"github.com/go-ldap/ldap"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/helper/ldaputil"
"golang.org/x/text/encoding/unicode"
)
func NewClient(logger hclog.Logger) *Client {
return &Client{
ldap: &ldaputil.Client{
Logger: logger,
LDAP: ldaputil.NewLDAP(),
},
}
}
type Client struct {
ldap *ldaputil.Client
}
func (c *Client) Search(cfg *ldaputil.ConfigEntry, filters map[*Field][]string) ([]*Entry, error) {
req := &ldap.SearchRequest{
BaseDN: cfg.UserDN,
Scope: ldap.ScopeWholeSubtree,
Filter: toString(filters),
SizeLimit: math.MaxInt32,
}
conn, err := c.ldap.DialLDAP(cfg)
if err != nil {
return nil, err
}
defer conn.Close()
if err := bind(cfg, conn); err != nil {
return nil, err
}
result, err := conn.Search(req)
if err != nil {
return nil, err
}
entries := make([]*Entry, len(result.Entries))
for i, rawEntry := range result.Entries {
entries[i] = NewEntry(rawEntry)
}
return entries, nil
}
func (c *Client) UpdateEntry(cfg *ldaputil.ConfigEntry, filters map[*Field][]string, newValues map[*Field][]string) error {
entries, err := c.Search(cfg, filters)
if err != nil {
return err
}
if len(entries) != 1 {
return fmt.Errorf("filter of %s doesn't match just one entry: %s", filters, entries)
}
replaceAttributes := make([]ldap.PartialAttribute, len(newValues))
i := 0
for field, vals := range newValues {
replaceAttributes[i] = ldap.PartialAttribute{
Type: field.String(),
Vals: vals,
}
i++
}
modifyReq := &ldap.ModifyRequest{
DN: entries[0].DN,
ReplaceAttributes: replaceAttributes,
}
conn, err := c.ldap.DialLDAP(cfg)
if err != nil {
return err
}
defer conn.Close()
if err := bind(cfg, conn); err != nil {
return err
}
return conn.Modify(modifyReq)
}
// UpdatePassword uses a Modify call under the hood because
// Active Directory doesn't recognize the passwordModify method.
// See https://github.com/go-ldap/ldap/issues/106
// for more.
func (c *Client) UpdatePassword(cfg *ldaputil.ConfigEntry, filters map[*Field][]string, newPassword string) error {
pwdEncoded, err := formatPassword(newPassword)
if err != nil {
return err
}
newValues := map[*Field][]string{
FieldRegistry.UnicodePassword: {pwdEncoded},
}
return c.UpdateEntry(cfg, filters, newValues)
}
// According to the MS docs, the password needs to be utf16 and enclosed in quotes.
func formatPassword(original string) (string, error) {
utf16 := unicode.UTF16(unicode.LittleEndian, unicode.IgnoreBOM)
return utf16.NewEncoder().String("\"" + original + "\"")
}
// Ex. "(cn=Ellen Jones)"
func toString(filters map[*Field][]string) string {
var fieldEquals []string
for f, values := range filters {
for _, v := range values {
fieldEquals = append(fieldEquals, fmt.Sprintf("%s=%s", f, v))
}
}
result := strings.Join(fieldEquals, ",")
return "(" + result + ")"
}
func bind(cfg *ldaputil.ConfigEntry, conn ldaputil.Connection) error {
if cfg.BindPassword == "" {
return errors.New("unable to bind due to lack of configured password")
}
if cfg.UPNDomain != "" {
return conn.Bind(fmt.Sprintf("%s@%s", ldaputil.EscapeLDAPValue(cfg.BindDN), cfg.UPNDomain), cfg.BindPassword)
}
if cfg.BindDN != "" {
return conn.Bind(cfg.BindDN, cfg.BindPassword)
}
return errors.New("must provide binddn or upndomain")
}

View File

@ -0,0 +1,41 @@
package client
import (
"strings"
"github.com/go-ldap/ldap"
)
// Entry is an Active Directory-specific construct
// to make knowing and grabbing fields more convenient,
// while retaining all original information.
func NewEntry(ldapEntry *ldap.Entry) *Entry {
fieldMap := make(map[string][]string)
for _, attribute := range ldapEntry.Attributes {
field := FieldRegistry.Parse(attribute.Name)
if field == nil {
// This field simply isn't in the registry, no big deal.
continue
}
fieldMap[field.String()] = attribute.Values
}
return &Entry{fieldMap: fieldMap, Entry: ldapEntry}
}
type Entry struct {
*ldap.Entry
fieldMap map[string][]string
}
func (e *Entry) Get(field *Field) ([]string, bool) {
values, found := e.fieldMap[field.String()]
return values, found
}
func (e *Entry) GetJoined(field *Field) (string, bool) {
values, found := e.Get(field)
if !found {
return "", false
}
return strings.Join(values, ","), true
}

View File

@ -0,0 +1,113 @@
package client
import (
"reflect"
)
// FieldRegistry is designed to look and feel
// like an enum from another language like Python.
//
// Example: Accessing constants
//
// FieldRegistry.AccountExpires
// FieldRegistry.BadPasswordCount
//
// Example: Utility methods
//
// FieldRegistry.List()
// FieldRegistry.Parse("givenName")
//
var FieldRegistry = newFieldRegistry()
// newFieldRegistry iterates through all the fields in the registry,
// pulls their ldap strings, and sets up each field to contain its ldap string
func newFieldRegistry() *fieldRegistry {
reg := &fieldRegistry{}
vOfReg := reflect.ValueOf(reg)
registryFields := vOfReg.Elem()
for i := 0; i < registryFields.NumField(); i++ {
if registryFields.Field(i).Kind() == reflect.Ptr {
field := registryFields.Type().Field(i)
ldapString := field.Tag.Get("ldap")
ldapField := &Field{ldapString}
vOfLDAPField := reflect.ValueOf(ldapField)
registryFields.FieldByName(field.Name).Set(vOfLDAPField)
reg.fieldList = append(reg.fieldList, ldapField)
}
}
return reg
}
// fieldRegistry isn't currently intended to be an exhaustive list -
// there are more fields in ActiveDirectory. However, these are the ones
// that may be useful to Vault. Feel free to add to this list!
type fieldRegistry struct {
AccountExpires *Field `ldap:"accountExpires"`
AdminCount *Field `ldap:"adminCount"`
BadPasswordCount *Field `ldap:"badPwdCount"`
BadPasswordTime *Field `ldap:"badPasswordTime"`
CodePage *Field `ldap:"codePage"`
CommonName *Field `ldap:"cn"`
CountryCode *Field `ldap:"countryCode"`
DisplayName *Field `ldap:"displayName"`
DistinguishedName *Field `ldap:"distinguishedName"`
DomainComponent *Field `ldap:"dc"`
DomainName *Field `ldap:"dn"`
DSCorePropogationData *Field `ldap:"dSCorePropagationData"`
GivenName *Field `ldap:"givenName"`
GroupType *Field `ldap:"groupType"`
Initials *Field `ldap:"initials"`
InstanceType *Field `ldap:"instanceType"`
LastLogoff *Field `ldap:"lastLogoff"`
LastLogon *Field `ldap:"lastLogon"`
LastLogonTimestamp *Field `ldap:"lastLogonTimestamp"`
LockoutTime *Field `ldap:"lockoutTime"`
LogonCount *Field `ldap:"logonCount"`
MemberOf *Field `ldap:"memberOf"`
Name *Field `ldap:"name"`
ObjectCategory *Field `ldap:"objectCategory"`
ObjectClass *Field `ldap:"objectClass"`
ObjectGUID *Field `ldap:"objectGUID"`
ObjectSID *Field `ldap:"objectSid"`
OrganizationalUnit *Field `ldap:"ou"`
PasswordLastSet *Field `ldap:"pwdLastSet"`
PrimaryGroupID *Field `ldap:"primaryGroupID"`
SAMAccountName *Field `ldap:"sAMAccountName"`
SAMAccountType *Field `ldap:"sAMAccountType"`
Surname *Field `ldap:"sn"`
UnicodePassword *Field `ldap:"unicodePwd"`
UpdateSequenceNumberChanged *Field `ldap:"uSNChanged"`
UpdateSequenceNumberCreated *Field `ldap:"uSNCreated"`
UserAccountControl *Field `ldap:"userAccountControl"`
UserPrincipalName *Field `ldap:"userPrincipalName"`
WhenCreated *Field `ldap:"whenCreated"`
WhenChanged *Field `ldap:"whenChanged"`
fieldList []*Field
}
func (r *fieldRegistry) List() []*Field {
return r.fieldList
}
func (r *fieldRegistry) Parse(s string) *Field {
for _, f := range r.List() {
if f.String() == s {
return f
}
}
return nil
}
type Field struct {
str string
}
func (f *Field) String() string {
return f.str
}

View File

@ -0,0 +1,43 @@
package client
import (
"strconv"
"time"
)
const (
nanoSecondsPerSecond = 1000000000
nanosInTick = 100
ticksPerSecond = nanoSecondsPerSecond / nanosInTick
)
// ParseTicks parses dates represented as Active Directory LargeInts into times.
// Not all time fields are represented this way,
// so be sure to test that your particular time returns expected results.
// Some time fields represented as LargeInts include accountExpires, lastLogon, lastLogonTimestamp, and pwdLastSet.
// More: https://social.technet.microsoft.com/wiki/contents/articles/31135.active-directory-large-integer-attributes.aspx
func ParseTicks(ticks string) (time.Time, error) {
i, err := strconv.ParseInt(ticks, 10, 64)
if err != nil {
return time.Time{}, err
}
return TicksToTime(i), nil
}
// TicksToTime converts an ActiveDirectory time in ticks to a time.
// This algorithm is summarized as:
//
// Many dates are saved in Active Directory as Large Integer values.
// These attributes represent dates as the number of 100-nanosecond intervals since 12:00 AM January 1, 1601.
// 100-nanosecond intervals, equal to 0.0000001 seconds, are also called ticks.
// Dates in Active Directory are always saved in Coordinated Universal Time, or UTC.
// More: https://social.technet.microsoft.com/wiki/contents/articles/31135.active-directory-large-integer-attributes.aspx
//
// If we directly follow the above algorithm we encounter time.Duration limits of 290 years and int overflow issues.
// Thus below, we carefully sidestep those.
func TicksToTime(ticks int64) time.Time {
origin := time.Date(1601, time.January, 1, 0, 0, 0, 0, time.UTC).Unix()
secondsSinceOrigin := ticks / ticksPerSecond
remainingNanoseconds := ticks % ticksPerSecond * 100
return time.Unix(origin+secondsSinceOrigin, remainingNanoseconds).UTC()
}

View File

@ -0,0 +1,10 @@
package plugin
import (
"github.com/hashicorp/vault/helper/ldaputil"
)
type configuration struct {
PasswordConf *passwordConf
ADConf *ldaputil.ConfigEntry
}

View File

@ -0,0 +1,15 @@
package plugin
type passwordConf struct {
TTL int `json:"ttl"`
MaxTTL int `json:"max_ttl"`
Length int `json:"length"`
}
func (c *passwordConf) Map() map[string]interface{} {
return map[string]interface{}{
"ttl": c.TTL,
"max_ttl": c.MaxTTL,
"length": c.Length,
}
}

View File

@ -0,0 +1,201 @@
package plugin
import (
"context"
"errors"
"fmt"
"github.com/hashicorp/vault-plugin-secrets-ad/plugin/util"
"github.com/hashicorp/vault/helper/ldaputil"
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/logical/framework"
)
const (
configPath = "config"
configStorageKey = "config"
// This length is arbitrarily chosen but should work for
// most Active Directory minimum and maximum length settings.
// A bit tongue-in-cheek since programmers love their base-2 exponents.
defaultPasswordLength = 64
defaultTLSVersion = "tls12"
)
func (b *backend) readConfig(ctx context.Context, storage logical.Storage) (*configuration, error) {
entry, err := storage.Get(ctx, configStorageKey)
if err != nil {
return nil, err
}
if entry == nil {
return nil, nil
}
config := &configuration{&passwordConf{}, &ldaputil.ConfigEntry{}}
if err := entry.DecodeJSON(config); err != nil {
return nil, err
}
return config, nil
}
func (b *backend) pathConfig() *framework.Path {
return &framework.Path{
Pattern: configPath,
Fields: b.configFields(),
Callbacks: map[logical.Operation]framework.OperationFunc{
logical.UpdateOperation: b.configUpdateOperation,
logical.ReadOperation: b.configReadOperation,
logical.DeleteOperation: b.configDeleteOperation,
},
HelpSynopsis: configHelpSynopsis,
HelpDescription: configHelpDescription,
}
}
func (b *backend) configFields() map[string]*framework.FieldSchema {
fields := ldaputil.ConfigFields()
fields["ttl"] = &framework.FieldSchema{
Type: framework.TypeDurationSecond,
Description: "In seconds, the default password time-to-live.",
}
fields["max_ttl"] = &framework.FieldSchema{
Type: framework.TypeDurationSecond,
Description: "In seconds, the maximum password time-to-live.",
}
fields["length"] = &framework.FieldSchema{
Type: framework.TypeInt,
Default: defaultPasswordLength,
Description: "The desired length of passwords that Vault generates.",
}
return fields
}
func (b *backend) configUpdateOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) {
// Build and validate the ldap conf.
activeDirectoryConf, err := ldaputil.NewConfigEntry(fieldData)
if err != nil {
return nil, err
}
if err := activeDirectoryConf.Validate(); err != nil {
return nil, err
}
// Build the password conf.
ttl := fieldData.Get("ttl").(int)
maxTTL := fieldData.Get("max_ttl").(int)
length := fieldData.Get("length").(int)
if ttl == 0 {
ttl = int(b.System().DefaultLeaseTTL().Seconds())
}
if maxTTL == 0 {
maxTTL = int(b.System().MaxLeaseTTL().Seconds())
}
if ttl > maxTTL {
return nil, errors.New("ttl must be smaller than or equal to max_ttl")
}
if ttl < 1 {
return nil, errors.New("ttl must be positive")
}
if maxTTL < 1 {
return nil, errors.New("max_ttl must be positive")
}
if length < util.MinimumPasswordLength {
return nil, fmt.Errorf("minimum password length is %d for sufficient complexity to be secure, though Vault recommends a higher length", util.MinimumPasswordLength)
}
passwordConf := &passwordConf{
TTL: ttl,
MaxTTL: maxTTL,
Length: length,
}
config := &configuration{passwordConf, activeDirectoryConf}
entry, err := logical.StorageEntryJSON(configStorageKey, config)
if err != nil {
return nil, err
}
if err := req.Storage.Put(ctx, entry); err != nil {
return nil, err
}
// Respond with a 204.
return nil, nil
}
func (b *backend) configReadOperation(ctx context.Context, req *logical.Request, _ *framework.FieldData) (*logical.Response, error) {
config, err := b.readConfig(ctx, req.Storage)
if err != nil {
return nil, err
}
if config == nil {
return nil, nil
}
// NOTE:
// "password" is intentionally not returned by this endpoint,
// as we lean away from returning sensitive information unless it's absolutely necessary.
// Also, we don't return the full ADConf here because not all parameters are used by this engine.
configMap := map[string]interface{}{
"url": config.ADConf.Url,
"starttls": config.ADConf.StartTLS,
"insecure_tls": config.ADConf.InsecureTLS,
"certificate": config.ADConf.Certificate,
"binddn": config.ADConf.BindDN,
"userdn": config.ADConf.UserDN,
"upndomain": config.ADConf.UPNDomain,
"tls_min_version": config.ADConf.TLSMinVersion,
"tls_max_version": config.ADConf.TLSMaxVersion,
}
for k, v := range config.PasswordConf.Map() {
configMap[k] = v
}
resp := &logical.Response{
Data: configMap,
}
return resp, nil
}
func (b *backend) configDeleteOperation(ctx context.Context, req *logical.Request, _ *framework.FieldData) (*logical.Response, error) {
if err := req.Storage.Delete(ctx, configStorageKey); err != nil {
return nil, err
}
return nil, nil
}
const (
configHelpSynopsis = `
Configure the AD server to connect to, along with password options.
`
configHelpDescription = `
This endpoint allows you to configure the AD server to connect to and its
configuration options. When you add, update, or delete a config, it takes
immediate effect on all subsequent actions. It does not apply itself to roles
or creds added in the past.
The AD URL can use either the "ldap://" or "ldaps://" schema. In the former
case, an unencrypted connection will be made with a default port of 389, unless
the "starttls" parameter is set to true, in which case TLS will be used. In the
latter case, a SSL connection will be established with a default port of 636.
## A NOTE ON ESCAPING
It is up to the administrator to provide properly escaped DNs. This includes
the user DN, bind DN for search, and so on.
The only DN escaping performed by this backend is on usernames given at login
time when they are inserted into the final bind DN, and uses escaping rules
defined in RFC 4514.
Additionally, Active Directory has escaping rules that differ slightly from the
RFC; in particular it requires escaping of '#' regardless of position in the DN
(the RFC only requires it to be escaped when it is the first character), and
'=', which the RFC indicates can be escaped with a backslash, but does not
contain in its set of required escapes. If you are using Active Directory and
these appear in your usernames, please ensure that they are escaped, in
addition to being properly escaped in your configured DNs.
For reference, see https://www.ietf.org/rfc/rfc4514.txt and
http://social.technet.microsoft.com/wiki/contents/articles/5312.active-directory-characters-to-escape.aspx
`
)

View File

@ -0,0 +1,216 @@
package plugin
import (
"context"
"fmt"
"strings"
"time"
"github.com/go-errors/errors"
"github.com/hashicorp/vault-plugin-secrets-ad/plugin/util"
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/logical/framework"
)
const (
credPrefix = "creds/"
storageKey = "creds"
// Since Active Directory offers eventual consistency, in testing we found that sometimes
// Active Directory returned "password last set" times that were _later_ than our own,
// even though ours were captured after synchronously completing a password update operation.
//
// An example we captured was:
// last_vault_rotation 2018-04-18T22:29:57.385454779Z
// password_last_set 2018-04-18T22:29:57.3902786Z
//
// Thus we add a short time buffer when checking whether anyone _else_ updated the AD password
// since Vault last rotated it.
passwordLastSetBuffer = time.Second
// Since password TTL can be set to as low as 1 second,
// we can't cache passwords for an entire second.
credCacheCleanup = time.Second / 3
credCacheExpiration = time.Second / 2
)
// deleteCred fulfills the DeleteWatcher interface in roles.
// It allows the roleHandler to let us know when a role's been deleted so we can delete its associated creds too.
func (b *backend) deleteCred(ctx context.Context, storage logical.Storage, roleName string) error {
if err := storage.Delete(ctx, storageKey+"/"+roleName); err != nil {
return err
}
b.credCache.Delete(roleName)
return nil
}
func (b *backend) invalidateCred(ctx context.Context, key string) {
if strings.HasPrefix(key, credPrefix) {
roleName := key[len(credPrefix):]
b.credCache.Delete(roleName)
}
}
func (b *backend) pathCreds() *framework.Path {
return &framework.Path{
Pattern: credPrefix + framework.GenericNameRegex("name"),
Fields: map[string]*framework.FieldSchema{
"name": {
Type: framework.TypeString,
Description: "Name of the role",
},
},
Callbacks: map[logical.Operation]framework.OperationFunc{
logical.ReadOperation: b.credReadOperation,
},
HelpSynopsis: credHelpSynopsis,
HelpDescription: credHelpDescription,
}
}
func (b *backend) credReadOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) {
cred := make(map[string]interface{})
roleName := fieldData.Get("name").(string)
// We act upon quite a few things below that could be racy if not locked:
// - Roles. If a new cred is created, the role is updated to include the new LastVaultRotation time,
// effecting role storage (and the role cache, but that's already thread-safe).
// - Creds. New creds involve writing to cred storage and the cred cache (also already thread-safe).
// Rather than setting read locks of different types, and upgrading them to write locks, let's keep complexity
// low and use one simple mutex.
b.credLock.Lock()
defer b.credLock.Unlock()
role, err := b.readRole(ctx, req.Storage, roleName)
if err != nil {
return nil, err
}
if role == nil {
return nil, nil
}
var resp *logical.Response
var respErr error
var unset time.Time
switch {
case role.LastVaultRotation == unset:
// We've never managed this cred before.
// We need to rotate the password so Vault will know it.
resp, respErr = b.generateAndReturnCreds(ctx, req.Storage, roleName, role, cred)
case role.PasswordLastSet.After(role.LastVaultRotation.Add(passwordLastSetBuffer)):
// Someone has manually rotated the password in Active Directory since we last rolled it.
// We need to rotate it now so Vault will know it and be able to return it.
resp, respErr = b.generateAndReturnCreds(ctx, req.Storage, roleName, role, cred)
default:
// Since we should know the last password, let's retrieve it now so we can return it with the new one.
credIfc, found := b.credCache.Get(roleName)
if found {
cred = credIfc.(map[string]interface{})
} else {
entry, err := req.Storage.Get(ctx, storageKey+"/"+roleName)
if err != nil {
return nil, err
}
if entry == nil {
// If the creds aren't in storage, but roles are and we've created creds before,
// this is an unexpected state and something has gone wrong.
// Let's be explicit and error about this.
return nil, fmt.Errorf("should have the creds for %+v but they're not found", role)
}
if err := entry.DecodeJSON(&cred); err != nil {
return nil, err
}
b.credCache.SetDefault(roleName, cred)
}
// Is the password too old?
// If so, time for a new one!
now := time.Now().UTC()
shouldBeRolled := role.LastVaultRotation.Add(time.Duration(role.TTL) * time.Second) // already in UTC
if now.After(shouldBeRolled) {
resp, respErr = b.generateAndReturnCreds(ctx, req.Storage, roleName, role, cred)
} else {
resp = &logical.Response{
Data: cred,
}
}
}
if respErr != nil {
return nil, err
}
if resp == nil {
return nil, nil
}
return resp, nil
}
func (b *backend) generateAndReturnCreds(ctx context.Context, storage logical.Storage, roleName string, role *backendRole, previousCred map[string]interface{}) (*logical.Response, error) {
engineConf, err := b.readConfig(ctx, storage)
if err != nil {
return nil, err
}
if engineConf == nil {
return nil, errors.New("the config is currently unset")
}
newPassword, err := util.GeneratePassword(engineConf.PasswordConf.Length)
if err != nil {
return nil, err
}
if err := b.client.UpdatePassword(engineConf.ADConf, role.ServiceAccountName, newPassword); err != nil {
return nil, err
}
// Time recorded is in UTC for easier user comparison to AD's last rotated time, which is set to UTC by Microsoft.
role.LastVaultRotation = time.Now().UTC()
if err := b.writeRole(ctx, storage, roleName, role); err != nil {
return nil, err
}
// Although a service account name is typically my_app@example.com,
// the username it uses is just my_app, or everything before the @.
var username string
fields := strings.Split(role.ServiceAccountName, "@")
if len(fields) > 0 {
username = fields[0]
} else {
return nil, fmt.Errorf("unable to infer username from service account name: %s", role.ServiceAccountName)
}
cred := map[string]interface{}{
"username": username,
"current_password": newPassword,
}
if previousCred["current_password"] != nil {
cred["last_password"] = previousCred["current_password"]
}
// Cache and save the cred.
entry, err := logical.StorageEntryJSON(storageKey+"/"+roleName, cred)
if err != nil {
return nil, err
}
if err := storage.Put(ctx, entry); err != nil {
return nil, err
}
b.credCache.SetDefault(roleName, cred)
return &logical.Response{
Data: cred,
}, nil
}
const (
credHelpSynopsis = `
Retrieve a role's creds by role name.
`
credHelpDescription = `
Read creds using a role's name to view the login, current password, and last password.
`
)

View File

@ -0,0 +1,254 @@
package plugin
import (
"context"
"fmt"
"strings"
"time"
"github.com/go-errors/errors"
"github.com/hashicorp/vault/logical"
"github.com/hashicorp/vault/logical/framework"
)
const (
rolePath = "roles"
rolePrefix = "roles/"
roleStorageKey = "roles"
roleCacheCleanup = time.Second / 2
roleCacheExpiration = time.Second
)
func (b *backend) invalidateRole(ctx context.Context, key string) {
if strings.HasPrefix(key, rolePrefix) {
roleName := key[len(rolePrefix):]
b.roleCache.Delete(roleName)
}
}
func (b *backend) pathListRoles() *framework.Path {
return &framework.Path{
Pattern: rolePrefix + "?$",
Callbacks: map[logical.Operation]framework.OperationFunc{
logical.ListOperation: b.roleListOperation,
},
HelpSynopsis: pathListRolesHelpSyn,
HelpDescription: pathListRolesHelpDesc,
}
}
func (b *backend) pathRoles() *framework.Path {
return &framework.Path{
Pattern: rolePrefix + framework.GenericNameRegex("name"),
Fields: map[string]*framework.FieldSchema{
"name": {
Type: framework.TypeString,
Description: "Name of the role",
},
"service_account_name": {
Type: framework.TypeString,
Description: "The username/logon name for the service account with which this role will be associated.",
},
"ttl": {
Type: framework.TypeDurationSecond,
Description: "In seconds, the default password time-to-live.",
},
},
Callbacks: map[logical.Operation]framework.OperationFunc{
logical.UpdateOperation: b.roleUpdateOperation,
logical.ReadOperation: b.roleReadOperation,
logical.DeleteOperation: b.roleDeleteOperation,
},
HelpSynopsis: roleHelpSynopsis,
HelpDescription: roleHelpDescription,
}
}
func (b *backend) readRole(ctx context.Context, storage logical.Storage, roleName string) (*backendRole, error) {
// If it's cached, return it from there.
roleIfc, found := b.roleCache.Get(roleName)
if found {
return roleIfc.(*backendRole), nil
}
// It's not, read it from storage.
entry, err := storage.Get(ctx, roleStorageKey+"/"+roleName)
if err != nil {
return nil, err
}
if entry == nil {
return nil, nil
}
role := &backendRole{}
if err := entry.DecodeJSON(role); err != nil {
return nil, err
}
// Always check when ActiveDirectory shows the password as last set on the fly.
engineConf, err := b.readConfig(ctx, storage)
if err != nil {
return nil, err
}
if engineConf == nil {
return nil, errors.New("the config is currently unset")
}
passwordLastSet, err := b.client.GetPasswordLastSet(engineConf.ADConf, role.ServiceAccountName)
if err != nil {
return nil, err
}
role.PasswordLastSet = passwordLastSet
// Cache it.
b.roleCache.SetDefault(roleName, role)
return role, nil
}
func (b *backend) writeRole(ctx context.Context, storage logical.Storage, roleName string, role *backendRole) error {
entry, err := logical.StorageEntryJSON(roleStorageKey+"/"+roleName, role)
if err != nil {
return err
}
if err := storage.Put(ctx, entry); err != nil {
return err
}
b.roleCache.SetDefault(roleName, role)
return nil
}
func (b *backend) roleUpdateOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) {
// Get everything we need to construct the role.
roleName := fieldData.Get("name").(string)
engineConf, err := b.readConfig(ctx, req.Storage)
if err != nil {
return nil, err
}
if engineConf == nil {
return nil, errors.New("the config is currently unset")
}
// Actually construct it.
serviceAccountName, err := getServiceAccountName(fieldData)
if err != nil {
return nil, err
}
// verify service account exists
_, err = b.client.Get(engineConf.ADConf, serviceAccountName)
if err != nil {
return nil, err
}
ttl, err := getValidatedTTL(engineConf.PasswordConf, fieldData)
if err != nil {
return nil, err
}
role := &backendRole{
ServiceAccountName: serviceAccountName,
TTL: ttl,
}
// Was there already a role before that we're now overwriting? If so, let's carry forward the LastVaultRotation.
oldRole, err := b.readRole(ctx, req.Storage, roleName)
if err != nil {
return nil, err
} else {
if oldRole != nil {
role.LastVaultRotation = oldRole.LastVaultRotation
}
}
// writeRole it to storage and the roleCache.
if err := b.writeRole(ctx, req.Storage, roleName, role); err != nil {
return nil, err
}
// Return a 204.
return nil, nil
}
func (b *backend) roleReadOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) {
roleName := fieldData.Get("name").(string)
role, err := b.readRole(ctx, req.Storage, roleName)
if err != nil {
return nil, err
}
if role == nil {
return nil, nil
}
return &logical.Response{
Data: role.Map(),
}, nil
}
func (b *backend) roleListOperation(ctx context.Context, req *logical.Request, _ *framework.FieldData) (*logical.Response, error) {
keys, err := req.Storage.List(ctx, roleStorageKey+"/")
if err != nil {
return nil, err
}
return logical.ListResponse(keys), nil
}
func (b *backend) roleDeleteOperation(ctx context.Context, req *logical.Request, fieldData *framework.FieldData) (*logical.Response, error) {
roleName := fieldData.Get("name").(string)
if err := req.Storage.Delete(ctx, roleStorageKey+"/"+roleName); err != nil {
return nil, err
}
b.roleCache.Delete(roleName)
if err := b.deleteCred(ctx, req.Storage, roleName); err != nil {
return nil, err
}
return nil, nil
}
func getServiceAccountName(fieldData *framework.FieldData) (string, error) {
serviceAccountName := fieldData.Get("service_account_name").(string)
if serviceAccountName == "" {
return "", errors.New("\"service_account_name\" is required")
}
return serviceAccountName, nil
}
func getValidatedTTL(passwordConf *passwordConf, fieldData *framework.FieldData) (int, error) {
ttl := fieldData.Get("ttl").(int)
if ttl == 0 {
ttl = passwordConf.TTL
}
if ttl > passwordConf.MaxTTL {
return 0, fmt.Errorf("requested ttl of %d seconds is over the max ttl of %d seconds", ttl, passwordConf.MaxTTL)
}
if ttl < 0 {
return 0, fmt.Errorf("ttl can't be negative")
}
return ttl, nil
}
const (
roleHelpSynopsis = `
Manage roles to build links between Vault and Active Directory service accounts.
`
roleHelpDescription = `
This endpoint allows you to read, write, and delete individual roles that are used for enabling password rotation.
Deleting a role will not disable its current password. It will delete the role's associated creds in Vault.
`
pathListRolesHelpSyn = `
List the name of each role currently stored.
`
pathListRolesHelpDesc = `
To learn which service accounts are being managed by Vault, list the role names using
this endpoint. Then read any individual role by name to learn more, like the name of
the service account it's associated with.
`
)

View File

@ -0,0 +1,28 @@
package plugin
import (
"time"
)
type backendRole struct {
ServiceAccountName string `json:"service_account_name"`
TTL int `json:"ttl"`
LastVaultRotation time.Time `json:"last_vault_rotation"`
PasswordLastSet time.Time `json:"password_last_set"`
}
func (r *backendRole) Map() map[string]interface{} {
m := map[string]interface{}{
"service_account_name": r.ServiceAccountName,
"ttl": r.TTL,
}
var unset time.Time
if r.LastVaultRotation != unset {
m["last_vault_rotation"] = r.LastVaultRotation
}
if r.PasswordLastSet != unset {
m["password_last_set"] = r.PasswordLastSet
}
return m
}

View File

@ -0,0 +1,38 @@
package util
import (
"encoding/base64"
"fmt"
"github.com/hashicorp/go-uuid"
)
var (
// Per https://en.wikipedia.org/wiki/Password_strength#Guidelines_for_strong_passwords
minimumLengthOfComplexString = 8
PasswordComplexityPrefix = "?@09AZ"
MinimumPasswordLength = len(PasswordComplexityPrefix) + minimumLengthOfComplexString
)
func GeneratePassword(desiredLength int) (string, error) {
if desiredLength < MinimumPasswordLength {
return "", fmt.Errorf("it's not possible to generate a _secure_ password of length %d, please boost length to %d, though Vault recommends higher", desiredLength, MinimumPasswordLength)
}
b, err := uuid.GenerateRandomBytes(desiredLength)
if err != nil {
return "", err
}
result := ""
// Though the result should immediately be longer than the desiredLength,
// do this in a loop to ensure there's absolutely no risk of a panic when slicing it down later.
for len(result) <= desiredLength {
// Encode to base64 because it's more complex.
result += base64.StdEncoding.EncodeToString(b)
}
result = PasswordComplexityPrefix + result
return result[:desiredLength], nil
}

View File

@ -0,0 +1,73 @@
package util
import (
"fmt"
"time"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault-plugin-secrets-ad/plugin/client"
"github.com/hashicorp/vault/helper/ldaputil"
)
func NewSecretsClient(logger hclog.Logger) *SecretsClient {
return &SecretsClient{adClient: client.NewClient(logger)}
}
// SecretsClient wraps a *activeDirectory.activeDirectoryClient to expose just the common convenience methods needed by the ad secrets backend.
type SecretsClient struct {
adClient *client.Client
}
func (c *SecretsClient) Get(conf *ldaputil.ConfigEntry, serviceAccountName string) (*client.Entry, error) {
filters := map[*client.Field][]string{
client.FieldRegistry.UserPrincipalName: {serviceAccountName},
}
entries, err := c.adClient.Search(conf, filters)
if err != nil {
return nil, err
}
if len(entries) == 0 {
return nil, fmt.Errorf("unable to find service account named %s in active directory, searches are case sensitive", serviceAccountName)
}
if len(entries) > 1 {
return nil, fmt.Errorf("expected one matching service account, but received %s", entries)
}
return entries[0], nil
}
func (c *SecretsClient) GetPasswordLastSet(conf *ldaputil.ConfigEntry, serviceAccountName string) (time.Time, error) {
entry, err := c.Get(conf, serviceAccountName)
if err != nil {
return time.Time{}, err
}
values, found := entry.Get(client.FieldRegistry.PasswordLastSet)
if !found {
return time.Time{}, fmt.Errorf("%+v lacks a PasswordLastSet field", entry)
}
if len(values) != 1 {
return time.Time{}, fmt.Errorf("expected only one value for PasswordLastSet, but received %s", values)
}
ticks := values[0]
if ticks == "0" {
// password has never been rolled in Active Directory, only created
return time.Time{}, nil
}
t, err := client.ParseTicks(ticks)
if err != nil {
return time.Time{}, err
}
return t, nil
}
func (c *SecretsClient) UpdatePassword(conf *ldaputil.ConfigEntry, serviceAccountName string, newPassword string) error {
filters := map[*client.Field][]string{
client.FieldRegistry.UserPrincipalName: {serviceAccountName},
}
return c.adClient.UpdatePassword(conf, filters, newPassword)
}

335
vendor/golang.org/x/text/encoding/encoding.go generated vendored Normal file
View File

@ -0,0 +1,335 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package encoding defines an interface for character encodings, such as Shift
// JIS and Windows 1252, that can convert to and from UTF-8.
//
// Encoding implementations are provided in other packages, such as
// golang.org/x/text/encoding/charmap and
// golang.org/x/text/encoding/japanese.
package encoding // import "golang.org/x/text/encoding"
import (
"errors"
"io"
"strconv"
"unicode/utf8"
"golang.org/x/text/encoding/internal/identifier"
"golang.org/x/text/transform"
)
// TODO:
// - There seems to be some inconsistency in when decoders return errors
// and when not. Also documentation seems to suggest they shouldn't return
// errors at all (except for UTF-16).
// - Encoders seem to rely on or at least benefit from the input being in NFC
// normal form. Perhaps add an example how users could prepare their output.
// Encoding is a character set encoding that can be transformed to and from
// UTF-8.
type Encoding interface {
// NewDecoder returns a Decoder.
NewDecoder() *Decoder
// NewEncoder returns an Encoder.
NewEncoder() *Encoder
}
// A Decoder converts bytes to UTF-8. It implements transform.Transformer.
//
// Transforming source bytes that are not of that encoding will not result in an
// error per se. Each byte that cannot be transcoded will be represented in the
// output by the UTF-8 encoding of '\uFFFD', the replacement rune.
type Decoder struct {
transform.Transformer
// This forces external creators of Decoders to use names in struct
// initializers, allowing for future extendibility without having to break
// code.
_ struct{}
}
// Bytes converts the given encoded bytes to UTF-8. It returns the converted
// bytes or nil, err if any error occurred.
func (d *Decoder) Bytes(b []byte) ([]byte, error) {
b, _, err := transform.Bytes(d, b)
if err != nil {
return nil, err
}
return b, nil
}
// String converts the given encoded string to UTF-8. It returns the converted
// string or "", err if any error occurred.
func (d *Decoder) String(s string) (string, error) {
s, _, err := transform.String(d, s)
if err != nil {
return "", err
}
return s, nil
}
// Reader wraps another Reader to decode its bytes.
//
// The Decoder may not be used for any other operation as long as the returned
// Reader is in use.
func (d *Decoder) Reader(r io.Reader) io.Reader {
return transform.NewReader(r, d)
}
// An Encoder converts bytes from UTF-8. It implements transform.Transformer.
//
// Each rune that cannot be transcoded will result in an error. In this case,
// the transform will consume all source byte up to, not including the offending
// rune. Transforming source bytes that are not valid UTF-8 will be replaced by
// `\uFFFD`. To return early with an error instead, use transform.Chain to
// preprocess the data with a UTF8Validator.
type Encoder struct {
transform.Transformer
// This forces external creators of Encoders to use names in struct
// initializers, allowing for future extendibility without having to break
// code.
_ struct{}
}
// Bytes converts bytes from UTF-8. It returns the converted bytes or nil, err if
// any error occurred.
func (e *Encoder) Bytes(b []byte) ([]byte, error) {
b, _, err := transform.Bytes(e, b)
if err != nil {
return nil, err
}
return b, nil
}
// String converts a string from UTF-8. It returns the converted string or
// "", err if any error occurred.
func (e *Encoder) String(s string) (string, error) {
s, _, err := transform.String(e, s)
if err != nil {
return "", err
}
return s, nil
}
// Writer wraps another Writer to encode its UTF-8 output.
//
// The Encoder may not be used for any other operation as long as the returned
// Writer is in use.
func (e *Encoder) Writer(w io.Writer) io.Writer {
return transform.NewWriter(w, e)
}
// ASCIISub is the ASCII substitute character, as recommended by
// http://unicode.org/reports/tr36/#Text_Comparison
const ASCIISub = '\x1a'
// Nop is the nop encoding. Its transformed bytes are the same as the source
// bytes; it does not replace invalid UTF-8 sequences.
var Nop Encoding = nop{}
type nop struct{}
func (nop) NewDecoder() *Decoder {
return &Decoder{Transformer: transform.Nop}
}
func (nop) NewEncoder() *Encoder {
return &Encoder{Transformer: transform.Nop}
}
// Replacement is the replacement encoding. Decoding from the replacement
// encoding yields a single '\uFFFD' replacement rune. Encoding from UTF-8 to
// the replacement encoding yields the same as the source bytes except that
// invalid UTF-8 is converted to '\uFFFD'.
//
// It is defined at http://encoding.spec.whatwg.org/#replacement
var Replacement Encoding = replacement{}
type replacement struct{}
func (replacement) NewDecoder() *Decoder {
return &Decoder{Transformer: replacementDecoder{}}
}
func (replacement) NewEncoder() *Encoder {
return &Encoder{Transformer: replacementEncoder{}}
}
func (replacement) ID() (mib identifier.MIB, other string) {
return identifier.Replacement, ""
}
type replacementDecoder struct{ transform.NopResetter }
func (replacementDecoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
if len(dst) < 3 {
return 0, 0, transform.ErrShortDst
}
if atEOF {
const fffd = "\ufffd"
dst[0] = fffd[0]
dst[1] = fffd[1]
dst[2] = fffd[2]
nDst = 3
}
return nDst, len(src), nil
}
type replacementEncoder struct{ transform.NopResetter }
func (replacementEncoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
r, size := rune(0), 0
for ; nSrc < len(src); nSrc += size {
r = rune(src[nSrc])
// Decode a 1-byte rune.
if r < utf8.RuneSelf {
size = 1
} else {
// Decode a multi-byte rune.
r, size = utf8.DecodeRune(src[nSrc:])
if size == 1 {
// All valid runes of size 1 (those below utf8.RuneSelf) were
// handled above. We have invalid UTF-8 or we haven't seen the
// full character yet.
if !atEOF && !utf8.FullRune(src[nSrc:]) {
err = transform.ErrShortSrc
break
}
r = '\ufffd'
}
}
if nDst+utf8.RuneLen(r) > len(dst) {
err = transform.ErrShortDst
break
}
nDst += utf8.EncodeRune(dst[nDst:], r)
}
return nDst, nSrc, err
}
// HTMLEscapeUnsupported wraps encoders to replace source runes outside the
// repertoire of the destination encoding with HTML escape sequences.
//
// This wrapper exists to comply to URL and HTML forms requiring a
// non-terminating legacy encoder. The produced sequences may lead to data
// loss as they are indistinguishable from legitimate input. To avoid this
// issue, use UTF-8 encodings whenever possible.
func HTMLEscapeUnsupported(e *Encoder) *Encoder {
return &Encoder{Transformer: &errorHandler{e, errorToHTML}}
}
// ReplaceUnsupported wraps encoders to replace source runes outside the
// repertoire of the destination encoding with an encoding-specific
// replacement.
//
// This wrapper is only provided for backwards compatibility and legacy
// handling. Its use is strongly discouraged. Use UTF-8 whenever possible.
func ReplaceUnsupported(e *Encoder) *Encoder {
return &Encoder{Transformer: &errorHandler{e, errorToReplacement}}
}
type errorHandler struct {
*Encoder
handler func(dst []byte, r rune, err repertoireError) (n int, ok bool)
}
// TODO: consider making this error public in some form.
type repertoireError interface {
Replacement() byte
}
func (h errorHandler) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
nDst, nSrc, err = h.Transformer.Transform(dst, src, atEOF)
for err != nil {
rerr, ok := err.(repertoireError)
if !ok {
return nDst, nSrc, err
}
r, sz := utf8.DecodeRune(src[nSrc:])
n, ok := h.handler(dst[nDst:], r, rerr)
if !ok {
return nDst, nSrc, transform.ErrShortDst
}
err = nil
nDst += n
if nSrc += sz; nSrc < len(src) {
var dn, sn int
dn, sn, err = h.Transformer.Transform(dst[nDst:], src[nSrc:], atEOF)
nDst += dn
nSrc += sn
}
}
return nDst, nSrc, err
}
func errorToHTML(dst []byte, r rune, err repertoireError) (n int, ok bool) {
buf := [8]byte{}
b := strconv.AppendUint(buf[:0], uint64(r), 10)
if n = len(b) + len("&#;"); n >= len(dst) {
return 0, false
}
dst[0] = '&'
dst[1] = '#'
dst[copy(dst[2:], b)+2] = ';'
return n, true
}
func errorToReplacement(dst []byte, r rune, err repertoireError) (n int, ok bool) {
if len(dst) == 0 {
return 0, false
}
dst[0] = err.Replacement()
return 1, true
}
// ErrInvalidUTF8 means that a transformer encountered invalid UTF-8.
var ErrInvalidUTF8 = errors.New("encoding: invalid UTF-8")
// UTF8Validator is a transformer that returns ErrInvalidUTF8 on the first
// input byte that is not valid UTF-8.
var UTF8Validator transform.Transformer = utf8Validator{}
type utf8Validator struct{ transform.NopResetter }
func (utf8Validator) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
n := len(src)
if n > len(dst) {
n = len(dst)
}
for i := 0; i < n; {
if c := src[i]; c < utf8.RuneSelf {
dst[i] = c
i++
continue
}
_, size := utf8.DecodeRune(src[i:])
if size == 1 {
// All valid runes of size 1 (those below utf8.RuneSelf) were
// handled above. We have invalid UTF-8 or we haven't seen the
// full character yet.
err = ErrInvalidUTF8
if !atEOF && !utf8.FullRune(src[i:]) {
err = transform.ErrShortSrc
}
return i, i, err
}
if i+size > len(dst) {
return i, i, transform.ErrShortDst
}
for ; size > 0; size-- {
dst[i] = src[i]
i++
}
}
if len(src) > len(dst) {
err = transform.ErrShortDst
}
return n, n, err
}

View File

@ -0,0 +1,81 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
//go:generate go run gen.go
// Package identifier defines the contract between implementations of Encoding
// and Index by defining identifiers that uniquely identify standardized coded
// character sets (CCS) and character encoding schemes (CES), which we will
// together refer to as encodings, for which Encoding implementations provide
// converters to and from UTF-8. This package is typically only of concern to
// implementers of Indexes and Encodings.
//
// One part of the identifier is the MIB code, which is defined by IANA and
// uniquely identifies a CCS or CES. Each code is associated with data that
// references authorities, official documentation as well as aliases and MIME
// names.
//
// Not all CESs are covered by the IANA registry. The "other" string that is
// returned by ID can be used to identify other character sets or versions of
// existing ones.
//
// It is recommended that each package that provides a set of Encodings provide
// the All and Common variables to reference all supported encodings and
// commonly used subset. This allows Index implementations to include all
// available encodings without explicitly referencing or knowing about them.
package identifier
// Note: this package is internal, but could be made public if there is a need
// for writing third-party Indexes and Encodings.
// References:
// - http://source.icu-project.org/repos/icu/icu/trunk/source/data/mappings/convrtrs.txt
// - http://www.iana.org/assignments/character-sets/character-sets.xhtml
// - http://www.iana.org/assignments/ianacharset-mib/ianacharset-mib
// - http://www.ietf.org/rfc/rfc2978.txt
// - http://www.unicode.org/reports/tr22/
// - http://www.w3.org/TR/encoding/
// - https://encoding.spec.whatwg.org/
// - https://encoding.spec.whatwg.org/encodings.json
// - https://tools.ietf.org/html/rfc6657#section-5
// Interface can be implemented by Encodings to define the CCS or CES for which
// it implements conversions.
type Interface interface {
// ID returns an encoding identifier. Exactly one of the mib and other
// values should be non-zero.
//
// In the usual case it is only necessary to indicate the MIB code. The
// other string can be used to specify encodings for which there is no MIB,
// such as "x-mac-dingbat".
//
// The other string may only contain the characters a-z, A-Z, 0-9, - and _.
ID() (mib MIB, other string)
// NOTE: the restrictions on the encoding are to allow extending the syntax
// with additional information such as versions, vendors and other variants.
}
// A MIB identifies an encoding. It is derived from the IANA MIB codes and adds
// some identifiers for some encodings that are not covered by the IANA
// standard.
//
// See http://www.iana.org/assignments/ianacharset-mib.
type MIB uint16
// These additional MIB types are not defined in IANA. They are added because
// they are common and defined within the text repo.
const (
// Unofficial marks the start of encodings not registered by IANA.
Unofficial MIB = 10000 + iota
// Replacement is the WhatWG replacement encoding.
Replacement
// XUserDefined is the code for x-user-defined.
XUserDefined
// MacintoshCyrillic is the code for x-mac-cyrillic.
MacintoshCyrillic
)

File diff suppressed because it is too large Load Diff

75
vendor/golang.org/x/text/encoding/internal/internal.go generated vendored Normal file
View File

@ -0,0 +1,75 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package internal contains code that is shared among encoding implementations.
package internal
import (
"golang.org/x/text/encoding"
"golang.org/x/text/encoding/internal/identifier"
"golang.org/x/text/transform"
)
// Encoding is an implementation of the Encoding interface that adds the String
// and ID methods to an existing encoding.
type Encoding struct {
encoding.Encoding
Name string
MIB identifier.MIB
}
// _ verifies that Encoding implements identifier.Interface.
var _ identifier.Interface = (*Encoding)(nil)
func (e *Encoding) String() string {
return e.Name
}
func (e *Encoding) ID() (mib identifier.MIB, other string) {
return e.MIB, ""
}
// SimpleEncoding is an Encoding that combines two Transformers.
type SimpleEncoding struct {
Decoder transform.Transformer
Encoder transform.Transformer
}
func (e *SimpleEncoding) NewDecoder() *encoding.Decoder {
return &encoding.Decoder{Transformer: e.Decoder}
}
func (e *SimpleEncoding) NewEncoder() *encoding.Encoder {
return &encoding.Encoder{Transformer: e.Encoder}
}
// FuncEncoding is an Encoding that combines two functions returning a new
// Transformer.
type FuncEncoding struct {
Decoder func() transform.Transformer
Encoder func() transform.Transformer
}
func (e FuncEncoding) NewDecoder() *encoding.Decoder {
return &encoding.Decoder{Transformer: e.Decoder()}
}
func (e FuncEncoding) NewEncoder() *encoding.Encoder {
return &encoding.Encoder{Transformer: e.Encoder()}
}
// A RepertoireError indicates a rune is not in the repertoire of a destination
// encoding. It is associated with an encoding-specific suggested replacement
// byte.
type RepertoireError byte
// Error implements the error interrface.
func (r RepertoireError) Error() string {
return "encoding: rune not supported by encoding."
}
// Replacement returns the replacement string associated with this error.
func (r RepertoireError) Replacement() byte { return byte(r) }
var ErrASCIIReplacement = RepertoireError(encoding.ASCIISub)

82
vendor/golang.org/x/text/encoding/unicode/override.go generated vendored Normal file
View File

@ -0,0 +1,82 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package unicode
import (
"golang.org/x/text/transform"
)
// BOMOverride returns a new decoder transformer that is identical to fallback,
// except that the presence of a Byte Order Mark at the start of the input
// causes it to switch to the corresponding Unicode decoding. It will only
// consider BOMs for UTF-8, UTF-16BE, and UTF-16LE.
//
// This differs from using ExpectBOM by allowing a BOM to switch to UTF-8, not
// just UTF-16 variants, and allowing falling back to any encoding scheme.
//
// This technique is recommended by the W3C for use in HTML 5: "For
// compatibility with deployed content, the byte order mark (also known as BOM)
// is considered more authoritative than anything else."
// http://www.w3.org/TR/encoding/#specification-hooks
//
// Using BOMOverride is mostly intended for use cases where the first characters
// of a fallback encoding are known to not be a BOM, for example, for valid HTML
// and most encodings.
func BOMOverride(fallback transform.Transformer) transform.Transformer {
// TODO: possibly allow a variadic argument of unicode encodings to allow
// specifying details of which fallbacks are supported as well as
// specifying the details of the implementations. This would also allow for
// support for UTF-32, which should not be supported by default.
return &bomOverride{fallback: fallback}
}
type bomOverride struct {
fallback transform.Transformer
current transform.Transformer
}
func (d *bomOverride) Reset() {
d.current = nil
d.fallback.Reset()
}
var (
// TODO: we could use decode functions here, instead of allocating a new
// decoder on every NewDecoder as IgnoreBOM decoders can be stateless.
utf16le = UTF16(LittleEndian, IgnoreBOM)
utf16be = UTF16(BigEndian, IgnoreBOM)
)
const utf8BOM = "\ufeff"
func (d *bomOverride) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
if d.current != nil {
return d.current.Transform(dst, src, atEOF)
}
if len(src) < 3 && !atEOF {
return 0, 0, transform.ErrShortSrc
}
d.current = d.fallback
bomSize := 0
if len(src) >= 2 {
if src[0] == 0xFF && src[1] == 0xFE {
d.current = utf16le.NewDecoder()
bomSize = 2
} else if src[0] == 0xFE && src[1] == 0xFF {
d.current = utf16be.NewDecoder()
bomSize = 2
} else if len(src) >= 3 &&
src[0] == utf8BOM[0] &&
src[1] == utf8BOM[1] &&
src[2] == utf8BOM[2] {
d.current = transform.Nop
bomSize = 3
}
}
if bomSize < len(src) {
nDst, nSrc, err = d.current.Transform(dst, src[bomSize:], atEOF)
}
return nDst, nSrc + bomSize, err
}

434
vendor/golang.org/x/text/encoding/unicode/unicode.go generated vendored Normal file
View File

@ -0,0 +1,434 @@
// Copyright 2013 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package unicode provides Unicode encodings such as UTF-16.
package unicode // import "golang.org/x/text/encoding/unicode"
import (
"errors"
"unicode/utf16"
"unicode/utf8"
"golang.org/x/text/encoding"
"golang.org/x/text/encoding/internal"
"golang.org/x/text/encoding/internal/identifier"
"golang.org/x/text/internal/utf8internal"
"golang.org/x/text/runes"
"golang.org/x/text/transform"
)
// TODO: I think the Transformers really should return errors on unmatched
// surrogate pairs and odd numbers of bytes. This is not required by RFC 2781,
// which leaves it open, but is suggested by WhatWG. It will allow for all error
// modes as defined by WhatWG: fatal, HTML and Replacement. This would require
// the introduction of some kind of error type for conveying the erroneous code
// point.
// UTF8 is the UTF-8 encoding.
var UTF8 encoding.Encoding = utf8enc
var utf8enc = &internal.Encoding{
&internal.SimpleEncoding{utf8Decoder{}, runes.ReplaceIllFormed()},
"UTF-8",
identifier.UTF8,
}
type utf8Decoder struct{ transform.NopResetter }
func (utf8Decoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
var pSrc int // point from which to start copy in src
var accept utf8internal.AcceptRange
// The decoder can only make the input larger, not smaller.
n := len(src)
if len(dst) < n {
err = transform.ErrShortDst
n = len(dst)
atEOF = false
}
for nSrc < n {
c := src[nSrc]
if c < utf8.RuneSelf {
nSrc++
continue
}
first := utf8internal.First[c]
size := int(first & utf8internal.SizeMask)
if first == utf8internal.FirstInvalid {
goto handleInvalid // invalid starter byte
}
accept = utf8internal.AcceptRanges[first>>utf8internal.AcceptShift]
if nSrc+size > n {
if !atEOF {
// We may stop earlier than necessary here if the short sequence
// has invalid bytes. Not checking for this simplifies the code
// and may avoid duplicate computations in certain conditions.
if err == nil {
err = transform.ErrShortSrc
}
break
}
// Determine the maximal subpart of an ill-formed subsequence.
switch {
case nSrc+1 >= n || src[nSrc+1] < accept.Lo || accept.Hi < src[nSrc+1]:
size = 1
case nSrc+2 >= n || src[nSrc+2] < utf8internal.LoCB || utf8internal.HiCB < src[nSrc+2]:
size = 2
default:
size = 3 // As we are short, the maximum is 3.
}
goto handleInvalid
}
if c = src[nSrc+1]; c < accept.Lo || accept.Hi < c {
size = 1
goto handleInvalid // invalid continuation byte
} else if size == 2 {
} else if c = src[nSrc+2]; c < utf8internal.LoCB || utf8internal.HiCB < c {
size = 2
goto handleInvalid // invalid continuation byte
} else if size == 3 {
} else if c = src[nSrc+3]; c < utf8internal.LoCB || utf8internal.HiCB < c {
size = 3
goto handleInvalid // invalid continuation byte
}
nSrc += size
continue
handleInvalid:
// Copy the scanned input so far.
nDst += copy(dst[nDst:], src[pSrc:nSrc])
// Append RuneError to the destination.
const runeError = "\ufffd"
if nDst+len(runeError) > len(dst) {
return nDst, nSrc, transform.ErrShortDst
}
nDst += copy(dst[nDst:], runeError)
// Skip the maximal subpart of an ill-formed subsequence according to
// the W3C standard way instead of the Go way. This Transform is
// probably the only place in the text repo where it is warranted.
nSrc += size
pSrc = nSrc
// Recompute the maximum source length.
if sz := len(dst) - nDst; sz < len(src)-nSrc {
err = transform.ErrShortDst
n = nSrc + sz
atEOF = false
}
}
return nDst + copy(dst[nDst:], src[pSrc:nSrc]), nSrc, err
}
// UTF16 returns a UTF-16 Encoding for the given default endianness and byte
// order mark (BOM) policy.
//
// When decoding from UTF-16 to UTF-8, if the BOMPolicy is IgnoreBOM then
// neither BOMs U+FEFF nor noncharacters U+FFFE in the input stream will affect
// the endianness used for decoding, and will instead be output as their
// standard UTF-8 encodings: "\xef\xbb\xbf" and "\xef\xbf\xbe". If the BOMPolicy
// is UseBOM or ExpectBOM a staring BOM is not written to the UTF-8 output.
// Instead, it overrides the default endianness e for the remainder of the
// transformation. Any subsequent BOMs U+FEFF or noncharacters U+FFFE will not
// affect the endianness used, and will instead be output as their standard
// UTF-8 encodings. For UseBOM, if there is no starting BOM, it will proceed
// with the default Endianness. For ExpectBOM, in that case, the transformation
// will return early with an ErrMissingBOM error.
//
// When encoding from UTF-8 to UTF-16, a BOM will be inserted at the start of
// the output if the BOMPolicy is UseBOM or ExpectBOM. Otherwise, a BOM will not
// be inserted. The UTF-8 input does not need to contain a BOM.
//
// There is no concept of a 'native' endianness. If the UTF-16 data is produced
// and consumed in a greater context that implies a certain endianness, use
// IgnoreBOM. Otherwise, use ExpectBOM and always produce and consume a BOM.
//
// In the language of http://www.unicode.org/faq/utf_bom.html#bom10, IgnoreBOM
// corresponds to "Where the precise type of the data stream is known... the
// BOM should not be used" and ExpectBOM corresponds to "A particular
// protocol... may require use of the BOM".
func UTF16(e Endianness, b BOMPolicy) encoding.Encoding {
return utf16Encoding{config{e, b}, mibValue[e][b&bomMask]}
}
// mibValue maps Endianness and BOMPolicy settings to MIB constants. Note that
// some configurations map to the same MIB identifier. RFC 2781 has requirements
// and recommendations. Some of the "configurations" are merely recommendations,
// so multiple configurations could match.
var mibValue = map[Endianness][numBOMValues]identifier.MIB{
BigEndian: [numBOMValues]identifier.MIB{
IgnoreBOM: identifier.UTF16BE,
UseBOM: identifier.UTF16, // BigEnding default is preferred by RFC 2781.
// TODO: acceptBOM | strictBOM would map to UTF16BE as well.
},
LittleEndian: [numBOMValues]identifier.MIB{
IgnoreBOM: identifier.UTF16LE,
UseBOM: identifier.UTF16, // LittleEndian default is allowed and preferred on Windows.
// TODO: acceptBOM | strictBOM would map to UTF16LE as well.
},
// ExpectBOM is not widely used and has no valid MIB identifier.
}
// All lists a configuration for each IANA-defined UTF-16 variant.
var All = []encoding.Encoding{
UTF8,
UTF16(BigEndian, UseBOM),
UTF16(BigEndian, IgnoreBOM),
UTF16(LittleEndian, IgnoreBOM),
}
// BOMPolicy is a UTF-16 encoding's byte order mark policy.
type BOMPolicy uint8
const (
writeBOM BOMPolicy = 0x01
acceptBOM BOMPolicy = 0x02
requireBOM BOMPolicy = 0x04
bomMask BOMPolicy = 0x07
// HACK: numBOMValues == 8 triggers a bug in the 1.4 compiler (cannot have a
// map of an array of length 8 of a type that is also used as a key or value
// in another map). See golang.org/issue/11354.
// TODO: consider changing this value back to 8 if the use of 1.4.* has
// been minimized.
numBOMValues = 8 + 1
// IgnoreBOM means to ignore any byte order marks.
IgnoreBOM BOMPolicy = 0
// Common and RFC 2781-compliant interpretation for UTF-16BE/LE.
// UseBOM means that the UTF-16 form may start with a byte order mark, which
// will be used to override the default encoding.
UseBOM BOMPolicy = writeBOM | acceptBOM
// Common and RFC 2781-compliant interpretation for UTF-16.
// ExpectBOM means that the UTF-16 form must start with a byte order mark,
// which will be used to override the default encoding.
ExpectBOM BOMPolicy = writeBOM | acceptBOM | requireBOM
// Used in Java as Unicode (not to be confused with Java's UTF-16) and
// ICU's UTF-16,version=1. Not compliant with RFC 2781.
// TODO (maybe): strictBOM: BOM must match Endianness. This would allow:
// - UTF-16(B|L)E,version=1: writeBOM | acceptBOM | requireBOM | strictBOM
// (UnicodeBig and UnicodeLittle in Java)
// - RFC 2781-compliant, but less common interpretation for UTF-16(B|L)E:
// acceptBOM | strictBOM (e.g. assigned to CheckBOM).
// This addition would be consistent with supporting ExpectBOM.
)
// Endianness is a UTF-16 encoding's default endianness.
type Endianness bool
const (
// BigEndian is UTF-16BE.
BigEndian Endianness = false
// LittleEndian is UTF-16LE.
LittleEndian Endianness = true
)
// ErrMissingBOM means that decoding UTF-16 input with ExpectBOM did not find a
// starting byte order mark.
var ErrMissingBOM = errors.New("encoding: missing byte order mark")
type utf16Encoding struct {
config
mib identifier.MIB
}
type config struct {
endianness Endianness
bomPolicy BOMPolicy
}
func (u utf16Encoding) NewDecoder() *encoding.Decoder {
return &encoding.Decoder{Transformer: &utf16Decoder{
initial: u.config,
current: u.config,
}}
}
func (u utf16Encoding) NewEncoder() *encoding.Encoder {
return &encoding.Encoder{Transformer: &utf16Encoder{
endianness: u.endianness,
initialBOMPolicy: u.bomPolicy,
currentBOMPolicy: u.bomPolicy,
}}
}
func (u utf16Encoding) ID() (mib identifier.MIB, other string) {
return u.mib, ""
}
func (u utf16Encoding) String() string {
e, b := "B", ""
if u.endianness == LittleEndian {
e = "L"
}
switch u.bomPolicy {
case ExpectBOM:
b = "Expect"
case UseBOM:
b = "Use"
case IgnoreBOM:
b = "Ignore"
}
return "UTF-16" + e + "E (" + b + " BOM)"
}
type utf16Decoder struct {
initial config
current config
}
func (u *utf16Decoder) Reset() {
u.current = u.initial
}
func (u *utf16Decoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
if len(src) == 0 {
if atEOF && u.current.bomPolicy&requireBOM != 0 {
return 0, 0, ErrMissingBOM
}
return 0, 0, nil
}
if u.current.bomPolicy&acceptBOM != 0 {
if len(src) < 2 {
return 0, 0, transform.ErrShortSrc
}
switch {
case src[0] == 0xfe && src[1] == 0xff:
u.current.endianness = BigEndian
nSrc = 2
case src[0] == 0xff && src[1] == 0xfe:
u.current.endianness = LittleEndian
nSrc = 2
default:
if u.current.bomPolicy&requireBOM != 0 {
return 0, 0, ErrMissingBOM
}
}
u.current.bomPolicy = IgnoreBOM
}
var r rune
var dSize, sSize int
for nSrc < len(src) {
if nSrc+1 < len(src) {
x := uint16(src[nSrc+0])<<8 | uint16(src[nSrc+1])
if u.current.endianness == LittleEndian {
x = x>>8 | x<<8
}
r, sSize = rune(x), 2
if utf16.IsSurrogate(r) {
if nSrc+3 < len(src) {
x = uint16(src[nSrc+2])<<8 | uint16(src[nSrc+3])
if u.current.endianness == LittleEndian {
x = x>>8 | x<<8
}
// Save for next iteration if it is not a high surrogate.
if isHighSurrogate(rune(x)) {
r, sSize = utf16.DecodeRune(r, rune(x)), 4
}
} else if !atEOF {
err = transform.ErrShortSrc
break
}
}
if dSize = utf8.RuneLen(r); dSize < 0 {
r, dSize = utf8.RuneError, 3
}
} else if atEOF {
// Single trailing byte.
r, dSize, sSize = utf8.RuneError, 3, 1
} else {
err = transform.ErrShortSrc
break
}
if nDst+dSize > len(dst) {
err = transform.ErrShortDst
break
}
nDst += utf8.EncodeRune(dst[nDst:], r)
nSrc += sSize
}
return nDst, nSrc, err
}
func isHighSurrogate(r rune) bool {
return 0xDC00 <= r && r <= 0xDFFF
}
type utf16Encoder struct {
endianness Endianness
initialBOMPolicy BOMPolicy
currentBOMPolicy BOMPolicy
}
func (u *utf16Encoder) Reset() {
u.currentBOMPolicy = u.initialBOMPolicy
}
func (u *utf16Encoder) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
if u.currentBOMPolicy&writeBOM != 0 {
if len(dst) < 2 {
return 0, 0, transform.ErrShortDst
}
dst[0], dst[1] = 0xfe, 0xff
u.currentBOMPolicy = IgnoreBOM
nDst = 2
}
r, size := rune(0), 0
for nSrc < len(src) {
r = rune(src[nSrc])
// Decode a 1-byte rune.
if r < utf8.RuneSelf {
size = 1
} else {
// Decode a multi-byte rune.
r, size = utf8.DecodeRune(src[nSrc:])
if size == 1 {
// All valid runes of size 1 (those below utf8.RuneSelf) were
// handled above. We have invalid UTF-8 or we haven't seen the
// full character yet.
if !atEOF && !utf8.FullRune(src[nSrc:]) {
err = transform.ErrShortSrc
break
}
}
}
if r <= 0xffff {
if nDst+2 > len(dst) {
err = transform.ErrShortDst
break
}
dst[nDst+0] = uint8(r >> 8)
dst[nDst+1] = uint8(r)
nDst += 2
} else {
if nDst+4 > len(dst) {
err = transform.ErrShortDst
break
}
r1, r2 := utf16.EncodeRune(r)
dst[nDst+0] = uint8(r1 >> 8)
dst[nDst+1] = uint8(r1)
dst[nDst+2] = uint8(r2 >> 8)
dst[nDst+3] = uint8(r2)
nDst += 4
}
nSrc += size
}
if u.endianness == LittleEndian {
for i := 0; i < nDst; i += 2 {
dst[i], dst[i+1] = dst[i+1], dst[i]
}
}
return nDst, nSrc, err
}

View File

@ -0,0 +1,87 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package utf8internal contains low-level utf8-related constants, tables, etc.
// that are used internally by the text package.
package utf8internal
// The default lowest and highest continuation byte.
const (
LoCB = 0x80 // 1000 0000
HiCB = 0xBF // 1011 1111
)
// Constants related to getting information of first bytes of UTF-8 sequences.
const (
// ASCII identifies a UTF-8 byte as ASCII.
ASCII = as
// FirstInvalid indicates a byte is invalid as a first byte of a UTF-8
// sequence.
FirstInvalid = xx
// SizeMask is a mask for the size bits. Use use x&SizeMask to get the size.
SizeMask = 7
// AcceptShift is the right-shift count for the first byte info byte to get
// the index into the AcceptRanges table. See AcceptRanges.
AcceptShift = 4
// The names of these constants are chosen to give nice alignment in the
// table below. The first nibble is an index into acceptRanges or F for
// special one-byte cases. The second nibble is the Rune length or the
// Status for the special one-byte case.
xx = 0xF1 // invalid: size 1
as = 0xF0 // ASCII: size 1
s1 = 0x02 // accept 0, size 2
s2 = 0x13 // accept 1, size 3
s3 = 0x03 // accept 0, size 3
s4 = 0x23 // accept 2, size 3
s5 = 0x34 // accept 3, size 4
s6 = 0x04 // accept 0, size 4
s7 = 0x44 // accept 4, size 4
)
// First is information about the first byte in a UTF-8 sequence.
var First = [256]uint8{
// 1 2 3 4 5 6 7 8 9 A B C D E F
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x00-0x0F
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x10-0x1F
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x20-0x2F
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x30-0x3F
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x40-0x4F
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x50-0x5F
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x60-0x6F
as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, as, // 0x70-0x7F
// 1 2 3 4 5 6 7 8 9 A B C D E F
xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x80-0x8F
xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0x90-0x9F
xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xA0-0xAF
xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xB0-0xBF
xx, xx, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xC0-0xCF
s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, s1, // 0xD0-0xDF
s2, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s3, s4, s3, s3, // 0xE0-0xEF
s5, s6, s6, s6, s7, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, xx, // 0xF0-0xFF
}
// AcceptRange gives the range of valid values for the second byte in a UTF-8
// sequence for any value for First that is not ASCII or FirstInvalid.
type AcceptRange struct {
Lo uint8 // lowest value for second byte.
Hi uint8 // highest value for second byte.
}
// AcceptRanges is a slice of AcceptRange values. For a given byte sequence b
//
// AcceptRanges[First[b[0]]>>AcceptShift]
//
// will give the value of AcceptRange for the multi-byte UTF-8 sequence starting
// at b[0].
var AcceptRanges = [...]AcceptRange{
0: {LoCB, HiCB},
1: {0xA0, HiCB},
2: {LoCB, 0x9F},
3: {0x90, HiCB},
4: {LoCB, 0x8F},
}

187
vendor/golang.org/x/text/runes/cond.go generated vendored Normal file
View File

@ -0,0 +1,187 @@
// Copyright 2015 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package runes
import (
"unicode/utf8"
"golang.org/x/text/transform"
)
// Note: below we pass invalid UTF-8 to the tIn and tNotIn transformers as is.
// This is done for various reasons:
// - To retain the semantics of the Nop transformer: if input is passed to a Nop
// one would expect it to be unchanged.
// - It would be very expensive to pass a converted RuneError to a transformer:
// a transformer might need more source bytes after RuneError, meaning that
// the only way to pass it safely is to create a new buffer and manage the
// intermingling of RuneErrors and normal input.
// - Many transformers leave ill-formed UTF-8 as is, so this is not
// inconsistent. Generally ill-formed UTF-8 is only replaced if it is a
// logical consequence of the operation (as for Map) or if it otherwise would
// pose security concerns (as for Remove).
// - An alternative would be to return an error on ill-formed UTF-8, but this
// would be inconsistent with other operations.
// If returns a transformer that applies tIn to consecutive runes for which
// s.Contains(r) and tNotIn to consecutive runes for which !s.Contains(r). Reset
// is called on tIn and tNotIn at the start of each run. A Nop transformer will
// substitute a nil value passed to tIn or tNotIn. Invalid UTF-8 is translated
// to RuneError to determine which transformer to apply, but is passed as is to
// the respective transformer.
func If(s Set, tIn, tNotIn transform.Transformer) Transformer {
if tIn == nil && tNotIn == nil {
return Transformer{transform.Nop}
}
if tIn == nil {
tIn = transform.Nop
}
if tNotIn == nil {
tNotIn = transform.Nop
}
sIn, ok := tIn.(transform.SpanningTransformer)
if !ok {
sIn = dummySpan{tIn}
}
sNotIn, ok := tNotIn.(transform.SpanningTransformer)
if !ok {
sNotIn = dummySpan{tNotIn}
}
a := &cond{
tIn: sIn,
tNotIn: sNotIn,
f: s.Contains,
}
a.Reset()
return Transformer{a}
}
type dummySpan struct{ transform.Transformer }
func (d dummySpan) Span(src []byte, atEOF bool) (n int, err error) {
return 0, transform.ErrEndOfSpan
}
type cond struct {
tIn, tNotIn transform.SpanningTransformer
f func(rune) bool
check func(rune) bool // current check to perform
t transform.SpanningTransformer // current transformer to use
}
// Reset implements transform.Transformer.
func (t *cond) Reset() {
t.check = t.is
t.t = t.tIn
t.t.Reset() // notIn will be reset on first usage.
}
func (t *cond) is(r rune) bool {
if t.f(r) {
return true
}
t.check = t.isNot
t.t = t.tNotIn
t.tNotIn.Reset()
return false
}
func (t *cond) isNot(r rune) bool {
if !t.f(r) {
return true
}
t.check = t.is
t.t = t.tIn
t.tIn.Reset()
return false
}
// This implementation of Span doesn't help all too much, but it needs to be
// there to satisfy this package's Transformer interface.
// TODO: there are certainly room for improvements, though. For example, if
// t.t == transform.Nop (which will a common occurrence) it will save a bundle
// to special-case that loop.
func (t *cond) Span(src []byte, atEOF bool) (n int, err error) {
p := 0
for n < len(src) && err == nil {
// Don't process too much at a time as the Spanner that will be
// called on this block may terminate early.
const maxChunk = 4096
max := len(src)
if v := n + maxChunk; v < max {
max = v
}
atEnd := false
size := 0
current := t.t
for ; p < max; p += size {
r := rune(src[p])
if r < utf8.RuneSelf {
size = 1
} else if r, size = utf8.DecodeRune(src[p:]); size == 1 {
if !atEOF && !utf8.FullRune(src[p:]) {
err = transform.ErrShortSrc
break
}
}
if !t.check(r) {
// The next rune will be the start of a new run.
atEnd = true
break
}
}
n2, err2 := current.Span(src[n:p], atEnd || (atEOF && p == len(src)))
n += n2
if err2 != nil {
return n, err2
}
// At this point either err != nil or t.check will pass for the rune at p.
p = n + size
}
return n, err
}
func (t *cond) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
p := 0
for nSrc < len(src) && err == nil {
// Don't process too much at a time, as the work might be wasted if the
// destination buffer isn't large enough to hold the result or a
// transform returns an error early.
const maxChunk = 4096
max := len(src)
if n := nSrc + maxChunk; n < len(src) {
max = n
}
atEnd := false
size := 0
current := t.t
for ; p < max; p += size {
r := rune(src[p])
if r < utf8.RuneSelf {
size = 1
} else if r, size = utf8.DecodeRune(src[p:]); size == 1 {
if !atEOF && !utf8.FullRune(src[p:]) {
err = transform.ErrShortSrc
break
}
}
if !t.check(r) {
// The next rune will be the start of a new run.
atEnd = true
break
}
}
nDst2, nSrc2, err2 := current.Transform(dst[nDst:], src[nSrc:p], atEnd || (atEOF && p == len(src)))
nDst += nDst2
nSrc += nSrc2
if err2 != nil {
return nDst, nSrc, err2
}
// At this point either err != nil or t.check will pass for the rune at p.
p = nSrc + size
}
return nDst, nSrc, err
}

355
vendor/golang.org/x/text/runes/runes.go generated vendored Normal file
View File

@ -0,0 +1,355 @@
// Copyright 2014 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Package runes provide transforms for UTF-8 encoded text.
package runes // import "golang.org/x/text/runes"
import (
"unicode"
"unicode/utf8"
"golang.org/x/text/transform"
)
// A Set is a collection of runes.
type Set interface {
// Contains returns true if r is contained in the set.
Contains(r rune) bool
}
type setFunc func(rune) bool
func (s setFunc) Contains(r rune) bool {
return s(r)
}
// Note: using funcs here instead of wrapping types result in cleaner
// documentation and a smaller API.
// In creates a Set with a Contains method that returns true for all runes in
// the given RangeTable.
func In(rt *unicode.RangeTable) Set {
return setFunc(func(r rune) bool { return unicode.Is(rt, r) })
}
// In creates a Set with a Contains method that returns true for all runes not
// in the given RangeTable.
func NotIn(rt *unicode.RangeTable) Set {
return setFunc(func(r rune) bool { return !unicode.Is(rt, r) })
}
// Predicate creates a Set with a Contains method that returns f(r).
func Predicate(f func(rune) bool) Set {
return setFunc(f)
}
// Transformer implements the transform.Transformer interface.
type Transformer struct {
t transform.SpanningTransformer
}
func (t Transformer) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
return t.t.Transform(dst, src, atEOF)
}
func (t Transformer) Span(b []byte, atEOF bool) (n int, err error) {
return t.t.Span(b, atEOF)
}
func (t Transformer) Reset() { t.t.Reset() }
// Bytes returns a new byte slice with the result of converting b using t. It
// calls Reset on t. It returns nil if any error was found. This can only happen
// if an error-producing Transformer is passed to If.
func (t Transformer) Bytes(b []byte) []byte {
b, _, err := transform.Bytes(t, b)
if err != nil {
return nil
}
return b
}
// String returns a string with the result of converting s using t. It calls
// Reset on t. It returns the empty string if any error was found. This can only
// happen if an error-producing Transformer is passed to If.
func (t Transformer) String(s string) string {
s, _, err := transform.String(t, s)
if err != nil {
return ""
}
return s
}
// TODO:
// - Copy: copying strings and bytes in whole-rune units.
// - Validation (maybe)
// - Well-formed-ness (maybe)
const runeErrorString = string(utf8.RuneError)
// Remove returns a Transformer that removes runes r for which s.Contains(r).
// Illegal input bytes are replaced by RuneError before being passed to f.
func Remove(s Set) Transformer {
if f, ok := s.(setFunc); ok {
// This little trick cuts the running time of BenchmarkRemove for sets
// created by Predicate roughly in half.
// TODO: special-case RangeTables as well.
return Transformer{remove(f)}
}
return Transformer{remove(s.Contains)}
}
// TODO: remove transform.RemoveFunc.
type remove func(r rune) bool
func (remove) Reset() {}
// Span implements transform.Spanner.
func (t remove) Span(src []byte, atEOF bool) (n int, err error) {
for r, size := rune(0), 0; n < len(src); {
if r = rune(src[n]); r < utf8.RuneSelf {
size = 1
} else if r, size = utf8.DecodeRune(src[n:]); size == 1 {
// Invalid rune.
if !atEOF && !utf8.FullRune(src[n:]) {
err = transform.ErrShortSrc
} else {
err = transform.ErrEndOfSpan
}
break
}
if t(r) {
err = transform.ErrEndOfSpan
break
}
n += size
}
return
}
// Transform implements transform.Transformer.
func (t remove) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
for r, size := rune(0), 0; nSrc < len(src); {
if r = rune(src[nSrc]); r < utf8.RuneSelf {
size = 1
} else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 {
// Invalid rune.
if !atEOF && !utf8.FullRune(src[nSrc:]) {
err = transform.ErrShortSrc
break
}
// We replace illegal bytes with RuneError. Not doing so might
// otherwise turn a sequence of invalid UTF-8 into valid UTF-8.
// The resulting byte sequence may subsequently contain runes
// for which t(r) is true that were passed unnoticed.
if !t(utf8.RuneError) {
if nDst+3 > len(dst) {
err = transform.ErrShortDst
break
}
dst[nDst+0] = runeErrorString[0]
dst[nDst+1] = runeErrorString[1]
dst[nDst+2] = runeErrorString[2]
nDst += 3
}
nSrc++
continue
}
if t(r) {
nSrc += size
continue
}
if nDst+size > len(dst) {
err = transform.ErrShortDst
break
}
for i := 0; i < size; i++ {
dst[nDst] = src[nSrc]
nDst++
nSrc++
}
}
return
}
// Map returns a Transformer that maps the runes in the input using the given
// mapping. Illegal bytes in the input are converted to utf8.RuneError before
// being passed to the mapping func.
func Map(mapping func(rune) rune) Transformer {
return Transformer{mapper(mapping)}
}
type mapper func(rune) rune
func (mapper) Reset() {}
// Span implements transform.Spanner.
func (t mapper) Span(src []byte, atEOF bool) (n int, err error) {
for r, size := rune(0), 0; n < len(src); n += size {
if r = rune(src[n]); r < utf8.RuneSelf {
size = 1
} else if r, size = utf8.DecodeRune(src[n:]); size == 1 {
// Invalid rune.
if !atEOF && !utf8.FullRune(src[n:]) {
err = transform.ErrShortSrc
} else {
err = transform.ErrEndOfSpan
}
break
}
if t(r) != r {
err = transform.ErrEndOfSpan
break
}
}
return n, err
}
// Transform implements transform.Transformer.
func (t mapper) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
var replacement rune
var b [utf8.UTFMax]byte
for r, size := rune(0), 0; nSrc < len(src); {
if r = rune(src[nSrc]); r < utf8.RuneSelf {
if replacement = t(r); replacement < utf8.RuneSelf {
if nDst == len(dst) {
err = transform.ErrShortDst
break
}
dst[nDst] = byte(replacement)
nDst++
nSrc++
continue
}
size = 1
} else if r, size = utf8.DecodeRune(src[nSrc:]); size == 1 {
// Invalid rune.
if !atEOF && !utf8.FullRune(src[nSrc:]) {
err = transform.ErrShortSrc
break
}
if replacement = t(utf8.RuneError); replacement == utf8.RuneError {
if nDst+3 > len(dst) {
err = transform.ErrShortDst
break
}
dst[nDst+0] = runeErrorString[0]
dst[nDst+1] = runeErrorString[1]
dst[nDst+2] = runeErrorString[2]
nDst += 3
nSrc++
continue
}
} else if replacement = t(r); replacement == r {
if nDst+size > len(dst) {
err = transform.ErrShortDst
break
}
for i := 0; i < size; i++ {
dst[nDst] = src[nSrc]
nDst++
nSrc++
}
continue
}
n := utf8.EncodeRune(b[:], replacement)
if nDst+n > len(dst) {
err = transform.ErrShortDst
break
}
for i := 0; i < n; i++ {
dst[nDst] = b[i]
nDst++
}
nSrc += size
}
return
}
// ReplaceIllFormed returns a transformer that replaces all input bytes that are
// not part of a well-formed UTF-8 code sequence with utf8.RuneError.
func ReplaceIllFormed() Transformer {
return Transformer{&replaceIllFormed{}}
}
type replaceIllFormed struct{ transform.NopResetter }
func (t replaceIllFormed) Span(src []byte, atEOF bool) (n int, err error) {
for n < len(src) {
// ASCII fast path.
if src[n] < utf8.RuneSelf {
n++
continue
}
r, size := utf8.DecodeRune(src[n:])
// Look for a valid non-ASCII rune.
if r != utf8.RuneError || size != 1 {
n += size
continue
}
// Look for short source data.
if !atEOF && !utf8.FullRune(src[n:]) {
err = transform.ErrShortSrc
break
}
// We have an invalid rune.
err = transform.ErrEndOfSpan
break
}
return n, err
}
func (t replaceIllFormed) Transform(dst, src []byte, atEOF bool) (nDst, nSrc int, err error) {
for nSrc < len(src) {
// ASCII fast path.
if r := src[nSrc]; r < utf8.RuneSelf {
if nDst == len(dst) {
err = transform.ErrShortDst
break
}
dst[nDst] = r
nDst++
nSrc++
continue
}
// Look for a valid non-ASCII rune.
if _, size := utf8.DecodeRune(src[nSrc:]); size != 1 {
if size != copy(dst[nDst:], src[nSrc:nSrc+size]) {
err = transform.ErrShortDst
break
}
nDst += size
nSrc += size
continue
}
// Look for short source data.
if !atEOF && !utf8.FullRune(src[nSrc:]) {
err = transform.ErrShortSrc
break
}
// We have an invalid rune.
if nDst+3 > len(dst) {
err = transform.ErrShortDst
break
}
dst[nDst+0] = runeErrorString[0]
dst[nDst+1] = runeErrorString[1]
dst[nDst+2] = runeErrorString[2]
nDst += 3
nSrc++
}
return nDst, nSrc, err
}

18
vendor/vendor.json vendored
View File

@ -1320,6 +1320,24 @@
"revision": "d3c2f16719dedd34911cd626a98bd5879e1caaff",
"revisionTime": "2018-04-03T19:54:48Z"
},
{
"checksumSHA1": "B8KN0npDVBBnSDoL8htTSBpFgZ0=",
"path": "github.com/hashicorp/vault-plugin-secrets-ad/plugin",
"revision": "321ea9aa40719a982e9ad39fecd911a212d0d7c0",
"revisionTime": "2018-05-24T23:02:05Z"
},
{
"checksumSHA1": "qHGmA9y3hKMBSLRWLifD37EaHP4=",
"path": "github.com/hashicorp/vault-plugin-secrets-ad/plugin/client",
"revision": "321ea9aa40719a982e9ad39fecd911a212d0d7c0",
"revisionTime": "2018-05-24T23:02:05Z"
},
{
"checksumSHA1": "/wFdQSWF1zexkefiI7j+LrREMHk=",
"path": "github.com/hashicorp/vault-plugin-secrets-ad/plugin/util",
"revision": "321ea9aa40719a982e9ad39fecd911a212d0d7c0",
"revisionTime": "2018-05-24T23:02:05Z"
},
{
"checksumSHA1": "0BXf2h4FJSUTdVK3m75a1KXnYVA=",
"path": "github.com/hashicorp/vault-plugin-secrets-gcp/plugin",

Some files were not shown because too many files have changed in this diff Show More