diff --git a/.github/workflows/test-go.yml b/.github/workflows/test-go.yml
index 24825b5272b6..63e96ec14a57 100644
--- a/.github/workflows/test-go.yml
+++ b/.github/workflows/test-go.yml
@@ -510,7 +510,7 @@ jobs:
- run: |
rm -rf test-results/go-test/logs
ls -lhR test-results/go-test
- find test-results/go-test -mindepth 1 -mtime +3 -delete
+ find test-results/go-test -mindepth 1 -type f -mtime +3 -delete
# Prune invalid timing files
find test-results/go-test -mindepth 1 -type f -name "*.json" -exec sh -c '
diff --git a/audit/types.go b/audit/types.go
index 3434ff84d840..8d8bd158c339 100644
--- a/audit/types.go
+++ b/audit/types.go
@@ -279,6 +279,11 @@ type Backend interface {
// nodes for node and pipeline registration.
event.PipelineReader
+ // IsFallback can be used to determine if this audit backend device is intended to
+ // be used as a fallback to catch all events that are not written when only using
+ // filtered pipelines.
+ IsFallback() bool
+
// LogRequest is used to synchronously log a request. This is done after the
// request is authorized but before the request is executed. The arguments
// MUST not be modified in any way. They should be deep copied if this is
diff --git a/builtin/audit/file/backend.go b/builtin/audit/file/backend.go
index 2681ee244e99..44fd2438e032 100644
--- a/builtin/audit/file/backend.go
+++ b/builtin/audit/file/backend.go
@@ -16,6 +16,7 @@ import (
"sync/atomic"
"github.com/hashicorp/eventlogger"
+ "github.com/hashicorp/go-secure-stdlib/parseutil"
"github.com/hashicorp/vault/audit"
"github.com/hashicorp/vault/internal/observability/event"
"github.com/hashicorp/vault/sdk/helper/salt"
@@ -36,6 +37,7 @@ var _ audit.Backend = (*Backend)(nil)
// or reset the write cursor, this should be done in the future.
type Backend struct {
f *os.File
+ fallback bool
fileLock sync.RWMutex
formatter *audit.EntryFormatterWriter
formatConfig audit.FormatterConfig
@@ -60,6 +62,21 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
return nil, fmt.Errorf("%s: nil salt view", op)
}
+ // The config options 'fallback' and 'filter' are mutually exclusive, a fallback
+ // device catches everything, so it cannot be allowed to filter.
+ var fallback bool
+ var err error
+ if fallbackRaw, ok := conf.Config["fallback"]; ok {
+ fallback, err = parseutil.ParseBool(fallbackRaw)
+ if err != nil {
+ return nil, fmt.Errorf("%s: unable to parse 'fallback': %w", op, err)
+ }
+ }
+
+ if _, ok := conf.Config["filter"]; ok && fallback {
+ return nil, fmt.Errorf("%s: cannot configure a fallback device with a filter: %w", op, event.ErrInvalidParameter)
+ }
+
// Get file path from config or fall back to the old option name ('path') for compatibility
// (see commit bac4fe0799a372ba1245db642f3f6cd1f1d02669).
var filePath string
@@ -106,6 +123,7 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
}
b := &Backend{
+ fallback: fallback,
filePath: filePath,
formatConfig: cfg,
mode: mode,
@@ -550,3 +568,10 @@ func (b *Backend) EventType() eventlogger.EventType {
func (b *Backend) HasFiltering() bool {
return len(b.nodeIDList) > 0 && b.nodeMap[b.nodeIDList[0]].Type() == eventlogger.NodeTypeFilter
}
+
+// IsFallback can be used to determine if this audit backend device is intended to
+// be used as a fallback to catch all events that are not written when only using
+// filtered pipelines.
+func (b *Backend) IsFallback() bool {
+ return b.fallback
+}
diff --git a/builtin/audit/file/backend_test.go b/builtin/audit/file/backend_test.go
index 17ea7fd20365..1e47f0f2cd96 100644
--- a/builtin/audit/file/backend_test.go
+++ b/builtin/audit/file/backend_test.go
@@ -576,3 +576,129 @@ func TestBackend_configureFilterFormatterSink(t *testing.T) {
node = b.nodeMap[id]
require.Equal(t, eventlogger.NodeTypeSink, node.Type())
}
+
+// TestBackend_Factory_Conf is used to ensure that any configuration which is
+// supplied, is validated and tested.
+func TestBackend_Factory_Conf(t *testing.T) {
+ t.Parallel()
+
+ ctx := context.Background()
+
+ tests := map[string]struct {
+ backendConfig *audit.BackendConfig
+ isErrorExpected bool
+ expectedErrorMessage string
+ }{
+ "nil-salt-config": {
+ backendConfig: &audit.BackendConfig{
+ SaltConfig: nil,
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "file.Factory: nil salt config",
+ },
+ "nil-salt-view": {
+ backendConfig: &audit.BackendConfig{
+ SaltConfig: &salt.Config{},
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "file.Factory: nil salt view",
+ },
+ "fallback-device-with-filter": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "true",
+ "file_path": discard,
+ "filter": "mount_type == kv",
+ },
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "file.Factory: cannot configure a fallback device with a filter: invalid parameter",
+ },
+ "non-fallback-device-with-filter": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "false",
+ "file_path": discard,
+ "filter": "mount_type == kv",
+ },
+ },
+ isErrorExpected: false,
+ },
+ }
+
+ for name, tc := range tests {
+ name := name
+ tc := tc
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ be, err := Factory(ctx, tc.backendConfig, true, nil)
+
+ switch {
+ case tc.isErrorExpected:
+ require.Error(t, err)
+ require.EqualError(t, err, tc.expectedErrorMessage)
+ default:
+ require.NoError(t, err)
+ require.NotNil(t, be)
+ }
+ })
+ }
+}
+
+// TestBackend_IsFallback ensures that the 'fallback' config setting is parsed
+// and set correctly, then exposed via the interface method IsFallback().
+func TestBackend_IsFallback(t *testing.T) {
+ t.Parallel()
+
+ ctx := context.Background()
+
+ tests := map[string]struct {
+ backendConfig *audit.BackendConfig
+ isFallbackExpected bool
+ }{
+ "fallback": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "true",
+ "file_path": discard,
+ },
+ },
+ isFallbackExpected: true,
+ },
+ "no-fallback": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "false",
+ "file_path": discard,
+ },
+ },
+ isFallbackExpected: false,
+ },
+ }
+
+ for name, tc := range tests {
+ name := name
+ tc := tc
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ be, err := Factory(ctx, tc.backendConfig, true, nil)
+ require.NoError(t, err)
+ require.NotNil(t, be)
+ require.Equal(t, tc.isFallbackExpected, be.IsFallback())
+ })
+ }
+}
diff --git a/builtin/audit/socket/backend.go b/builtin/audit/socket/backend.go
index 09662c2ab683..84629efe3108 100644
--- a/builtin/audit/socket/backend.go
+++ b/builtin/audit/socket/backend.go
@@ -29,6 +29,7 @@ type Backend struct {
sync.Mutex
address string
connection net.Conn
+ fallback bool
formatter *audit.EntryFormatterWriter
formatConfig audit.FormatterConfig
name string
@@ -73,12 +74,27 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
return nil, fmt.Errorf("%s: failed to parse 'write_timeout': %w", op, err)
}
+ // The config options 'fallback' and 'filter' are mutually exclusive, a fallback
+ // device catches everything, so it cannot be allowed to filter.
+ var fallback bool
+ if fallbackRaw, ok := conf.Config["fallback"]; ok {
+ fallback, err = parseutil.ParseBool(fallbackRaw)
+ if err != nil {
+ return nil, fmt.Errorf("%s: unable to parse 'fallback': %w", op, err)
+ }
+ }
+
+ if _, ok := conf.Config["filter"]; ok && fallback {
+ return nil, fmt.Errorf("%s: cannot configure a fallback device with a filter: %w", op, event.ErrInvalidParameter)
+ }
+
cfg, err := formatterConfig(conf.Config)
if err != nil {
return nil, fmt.Errorf("%s: failed to create formatter config: %w", op, err)
}
b := &Backend{
+ fallback: fallback,
address: address,
formatConfig: cfg,
name: conf.MountPath,
@@ -443,3 +459,10 @@ func (b *Backend) EventType() eventlogger.EventType {
func (b *Backend) HasFiltering() bool {
return len(b.nodeIDList) > 0 && b.nodeMap[b.nodeIDList[0]].Type() == eventlogger.NodeTypeFilter
}
+
+// IsFallback can be used to determine if this audit backend device is intended to
+// be used as a fallback to catch all events that are not written when only using
+// filtered pipelines.
+func (b *Backend) IsFallback() bool {
+ return b.fallback
+}
diff --git a/builtin/audit/socket/backend_test.go b/builtin/audit/socket/backend_test.go
index d1dfc384720c..3693f8fdeeb4 100644
--- a/builtin/audit/socket/backend_test.go
+++ b/builtin/audit/socket/backend_test.go
@@ -4,10 +4,13 @@
package socket
import (
+ "context"
"testing"
"github.com/hashicorp/eventlogger"
"github.com/hashicorp/vault/audit"
+ "github.com/hashicorp/vault/sdk/helper/salt"
+ "github.com/hashicorp/vault/sdk/logical"
"github.com/stretchr/testify/require"
)
@@ -329,3 +332,192 @@ func TestBackend_configureFilterFormatterSink(t *testing.T) {
node = b.nodeMap[id]
require.Equal(t, eventlogger.NodeTypeSink, node.Type())
}
+
+// TestBackend_Factory_Conf is used to ensure that any configuration which is
+// supplied, is validated and tested.
+func TestBackend_Factory_Conf(t *testing.T) {
+ t.Parallel()
+
+ ctx := context.Background()
+
+ tests := map[string]struct {
+ backendConfig *audit.BackendConfig
+ isErrorExpected bool
+ expectedErrorMessage string
+ }{
+ "nil-salt-config": {
+ backendConfig: &audit.BackendConfig{
+ SaltConfig: nil,
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "socket.Factory: nil salt config",
+ },
+ "nil-salt-view": {
+ backendConfig: &audit.BackendConfig{
+ SaltConfig: &salt.Config{},
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "socket.Factory: nil salt view",
+ },
+ "no-address": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{},
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "socket.Factory: address is required",
+ },
+ "empty-address": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "address": "",
+ },
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "socket.Factory: error configuring sink node: socket.(Backend).configureSinkNode: address is required: invalid parameter",
+ },
+ "whitespace-address": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "address": " ",
+ },
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "socket.Factory: error configuring sink node: socket.(Backend).configureSinkNode: address is required: invalid parameter",
+ },
+ "write-duration-valid": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "address": "hashicorp.com",
+ "write_timeout": "5s",
+ },
+ },
+ isErrorExpected: false,
+ },
+ "write-duration-not-valid": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "address": "hashicorp.com",
+ "write_timeout": "qwerty",
+ },
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "socket.Factory: failed to parse 'write_timeout': time: invalid duration \"qwerty\"",
+ },
+ "non-fallback-device-with-filter": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "address": "hashicorp.com",
+ "write_timeout": "5s",
+ "fallback": "false",
+ "filter": "mount_type == kv",
+ },
+ },
+ isErrorExpected: false,
+ },
+ "fallback-device-with-filter": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "address": "hashicorp.com",
+ "write_timeout": "2s",
+ "fallback": "true",
+ "filter": "mount_type == kv",
+ },
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "socket.Factory: cannot configure a fallback device with a filter: invalid parameter",
+ },
+ }
+
+ for name, tc := range tests {
+ name := name
+ tc := tc
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ be, err := Factory(ctx, tc.backendConfig, true, nil)
+
+ switch {
+ case tc.isErrorExpected:
+ require.Error(t, err)
+ require.EqualError(t, err, tc.expectedErrorMessage)
+ default:
+ require.NoError(t, err)
+ require.NotNil(t, be)
+ }
+ })
+ }
+}
+
+// TestBackend_IsFallback ensures that the 'fallback' config setting is parsed
+// and set correctly, then exposed via the interface method IsFallback().
+func TestBackend_IsFallback(t *testing.T) {
+ t.Parallel()
+
+ ctx := context.Background()
+
+ tests := map[string]struct {
+ backendConfig *audit.BackendConfig
+ isFallbackExpected bool
+ }{
+ "fallback": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "qwerty",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "true",
+ "address": "hashicorp.com",
+ "write_timeout": "5s",
+ },
+ },
+ isFallbackExpected: true,
+ },
+ "no-fallback": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "qwerty",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "false",
+ "address": "hashicorp.com",
+ "write_timeout": "5s",
+ },
+ },
+ isFallbackExpected: false,
+ },
+ }
+
+ for name, tc := range tests {
+ name := name
+ tc := tc
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ be, err := Factory(ctx, tc.backendConfig, true, nil)
+ require.NoError(t, err)
+ require.NotNil(t, be)
+ require.Equal(t, tc.isFallbackExpected, be.IsFallback())
+ })
+ }
+}
diff --git a/builtin/audit/syslog/backend.go b/builtin/audit/syslog/backend.go
index 45d6e0762daa..dff6b5800382 100644
--- a/builtin/audit/syslog/backend.go
+++ b/builtin/audit/syslog/backend.go
@@ -12,6 +12,7 @@ import (
"sync"
"github.com/hashicorp/eventlogger"
+ "github.com/hashicorp/go-secure-stdlib/parseutil"
gsyslog "github.com/hashicorp/go-syslog"
"github.com/hashicorp/vault/audit"
"github.com/hashicorp/vault/internal/observability/event"
@@ -23,6 +24,7 @@ var _ audit.Backend = (*Backend)(nil)
// Backend is the audit backend for the syslog-based audit store.
type Backend struct {
+ fallback bool
formatter *audit.EntryFormatterWriter
formatConfig audit.FormatterConfig
logger gsyslog.Syslogger
@@ -58,6 +60,21 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
tag = "vault"
}
+ // The config options 'fallback' and 'filter' are mutually exclusive, a fallback
+ // device catches everything, so it cannot be allowed to filter.
+ var fallback bool
+ var err error
+ if fallbackRaw, ok := conf.Config["fallback"]; ok {
+ fallback, err = parseutil.ParseBool(fallbackRaw)
+ if err != nil {
+ return nil, fmt.Errorf("%s: unable to parse 'fallback': %w", op, err)
+ }
+ }
+
+ if _, ok := conf.Config["filter"]; ok && fallback {
+ return nil, fmt.Errorf("%s: cannot configure a fallback device with a filter: %w", op, event.ErrInvalidParameter)
+ }
+
cfg, err := formatterConfig(conf.Config)
if err != nil {
return nil, fmt.Errorf("%s: failed to create formatter config: %w", op, err)
@@ -70,6 +87,7 @@ func Factory(_ context.Context, conf *audit.BackendConfig, useEventLogger bool,
}
b := &Backend{
+ fallback: fallback,
formatConfig: cfg,
logger: logger,
name: conf.MountPath,
@@ -347,3 +365,10 @@ func (b *Backend) EventType() eventlogger.EventType {
func (b *Backend) HasFiltering() bool {
return len(b.nodeIDList) > 0 && b.nodeMap[b.nodeIDList[0]].Type() == eventlogger.NodeTypeFilter
}
+
+// IsFallback can be used to determine if this audit backend device is intended to
+// be used as a fallback to catch all events that are not written when only using
+// filtered pipelines.
+func (b *Backend) IsFallback() bool {
+ return b.fallback
+}
diff --git a/builtin/audit/syslog/backend_test.go b/builtin/audit/syslog/backend_test.go
index 4aeaa5d0da5c..ba0b990803ef 100644
--- a/builtin/audit/syslog/backend_test.go
+++ b/builtin/audit/syslog/backend_test.go
@@ -4,10 +4,13 @@
package syslog
import (
+ "context"
"testing"
"github.com/hashicorp/eventlogger"
"github.com/hashicorp/vault/audit"
+ "github.com/hashicorp/vault/sdk/helper/salt"
+ "github.com/hashicorp/vault/sdk/logical"
"github.com/stretchr/testify/require"
)
@@ -311,3 +314,125 @@ func TestBackend_configureFilterFormatterSink(t *testing.T) {
node = b.nodeMap[id]
require.Equal(t, eventlogger.NodeTypeSink, node.Type())
}
+
+// TestBackend_Factory_Conf is used to ensure that any configuration which is
+// supplied, is validated and tested.
+func TestBackend_Factory_Conf(t *testing.T) {
+ t.Parallel()
+
+ ctx := context.Background()
+
+ tests := map[string]struct {
+ backendConfig *audit.BackendConfig
+ isErrorExpected bool
+ expectedErrorMessage string
+ }{
+ "nil-salt-config": {
+ backendConfig: &audit.BackendConfig{
+ SaltConfig: nil,
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "syslog.Factory: nil salt config",
+ },
+ "nil-salt-view": {
+ backendConfig: &audit.BackendConfig{
+ SaltConfig: &salt.Config{},
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "syslog.Factory: nil salt view",
+ },
+ "non-fallback-device-with-filter": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "false",
+ "filter": "mount_type == kv",
+ },
+ },
+ isErrorExpected: false,
+ },
+ "fallback-device-with-filter": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "discard",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "true",
+ "filter": "mount_type == kv",
+ },
+ },
+ isErrorExpected: true,
+ expectedErrorMessage: "syslog.Factory: cannot configure a fallback device with a filter: invalid parameter",
+ },
+ }
+
+ for name, tc := range tests {
+ name := name
+ tc := tc
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ be, err := Factory(ctx, tc.backendConfig, true, nil)
+
+ switch {
+ case tc.isErrorExpected:
+ require.Error(t, err)
+ require.EqualError(t, err, tc.expectedErrorMessage)
+ default:
+ require.NoError(t, err)
+ require.NotNil(t, be)
+ }
+ })
+ }
+}
+
+// TestBackend_IsFallback ensures that the 'fallback' config setting is parsed
+// and set correctly, then exposed via the interface method IsFallback().
+func TestBackend_IsFallback(t *testing.T) {
+ t.Parallel()
+
+ ctx := context.Background()
+
+ tests := map[string]struct {
+ backendConfig *audit.BackendConfig
+ isFallbackExpected bool
+ }{
+ "fallback": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "qwerty",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "true",
+ },
+ },
+ isFallbackExpected: true,
+ },
+ "no-fallback": {
+ backendConfig: &audit.BackendConfig{
+ MountPath: "qwerty",
+ SaltConfig: &salt.Config{},
+ SaltView: &logical.InmemStorage{},
+ Config: map[string]string{
+ "fallback": "false",
+ },
+ },
+ isFallbackExpected: false,
+ },
+ }
+
+ for name, tc := range tests {
+ name := name
+ tc := tc
+ t.Run(name, func(t *testing.T) {
+ t.Parallel()
+
+ be, err := Factory(ctx, tc.backendConfig, true, nil)
+ require.NoError(t, err)
+ require.NotNil(t, be)
+ require.Equal(t, tc.isFallbackExpected, be.IsFallback())
+ })
+ }
+}
diff --git a/builtin/logical/database/backend_test.go b/builtin/logical/database/backend_test.go
index 0554365e1c35..f330e5e5c2a6 100644
--- a/builtin/logical/database/backend_test.go
+++ b/builtin/logical/database/backend_test.go
@@ -6,12 +6,15 @@ package database
import (
"context"
"database/sql"
+ "encoding/json"
+ "errors"
"fmt"
"log"
"net/url"
"os"
"reflect"
"strings"
+ "sync"
"testing"
"time"
@@ -35,12 +38,26 @@ import (
"github.com/mitchellh/mapstructure"
)
+func getClusterPostgresDBWithFactory(t *testing.T, factory logical.Factory) (*vault.TestCluster, logical.SystemView) {
+ t.Helper()
+ cluster, sys := getClusterWithFactory(t, factory)
+ vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "postgresql-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_PostgresMultiplexed",
+ []string{fmt.Sprintf("%s=%s", pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)})
+ return cluster, sys
+}
+
func getClusterPostgresDB(t *testing.T) (*vault.TestCluster, logical.SystemView) {
+ t.Helper()
+ cluster, sys := getClusterPostgresDBWithFactory(t, Factory)
+ return cluster, sys
+}
+
+func getClusterWithFactory(t *testing.T, factory logical.Factory) (*vault.TestCluster, logical.SystemView) {
t.Helper()
pluginDir := corehelpers.MakeTestPluginDir(t)
coreConfig := &vault.CoreConfig{
LogicalBackends: map[string]logical.Factory{
- "database": Factory,
+ "database": factory,
},
BuiltinRegistry: builtinplugins.Registry,
PluginDirectory: pluginDir,
@@ -53,36 +70,14 @@ func getClusterPostgresDB(t *testing.T) (*vault.TestCluster, logical.SystemView)
cores := cluster.Cores
vault.TestWaitActive(t, cores[0].Core)
- os.Setenv(pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)
-
sys := vault.TestDynamicSystemView(cores[0].Core, nil)
- vault.TestAddTestPlugin(t, cores[0].Core, "postgresql-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_PostgresMultiplexed", []string{})
return cluster, sys
}
func getCluster(t *testing.T) (*vault.TestCluster, logical.SystemView) {
t.Helper()
- pluginDir := corehelpers.MakeTestPluginDir(t)
- coreConfig := &vault.CoreConfig{
- LogicalBackends: map[string]logical.Factory{
- "database": Factory,
- },
- BuiltinRegistry: builtinplugins.Registry,
- PluginDirectory: pluginDir,
- }
-
- cluster := vault.NewTestCluster(t, coreConfig, &vault.TestClusterOptions{
- HandlerFunc: vaulthttp.Handler,
- })
- cluster.Start()
- cores := cluster.Cores
- vault.TestWaitActive(t, cores[0].Core)
-
- os.Setenv(pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)
-
- sys := vault.TestDynamicSystemView(cores[0].Core, nil)
-
+ cluster, sys := getClusterWithFactory(t, Factory)
return cluster, sys
}
@@ -515,7 +510,7 @@ func TestBackend_basic(t *testing.T) {
if credsResp.Secret.TTL != 5*time.Minute {
t.Fatalf("unexpected TTL of %d", credsResp.Secret.TTL)
}
- if !testCredsExist(t, credsResp, connURL) {
+ if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
@@ -535,7 +530,7 @@ func TestBackend_basic(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
- if testCredsExist(t, credsResp, connURL) {
+ if testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should not exist")
}
}
@@ -553,7 +548,7 @@ func TestBackend_basic(t *testing.T) {
if err != nil || (credsResp != nil && credsResp.IsError()) {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
- if !testCredsExist(t, credsResp, connURL) {
+ if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
@@ -586,108 +581,118 @@ func TestBackend_basic(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
- if testCredsExist(t, credsResp, connURL) {
+ if testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should not exist")
}
}
}
-func TestBackend_connectionCrud(t *testing.T) {
- cluster, sys := getClusterPostgresDB(t)
- defer cluster.Cleanup()
+// singletonDBFactory allows us to reach into the internals of a databaseBackend
+// even when it's been created by a call to the sys mount. The factory method
+// satisfies the logical.Factory type, and lazily creates the databaseBackend
+// once the SystemView has been provided because the factory method itself is an
+// input for creating the test cluster and its system view.
+type singletonDBFactory struct {
+ once sync.Once
+ db *databaseBackend
+
+ sys logical.SystemView
+}
+
+// factory satisfies the logical.Factory type.
+func (s *singletonDBFactory) factory(context.Context, *logical.BackendConfig) (logical.Backend, error) {
+ if s.sys == nil {
+ return nil, errors.New("sys is nil")
+ }
config := logical.TestBackendConfig()
config.StorageView = &logical.InmemStorage{}
- config.System = sys
+ config.System = s.sys
- b, err := Factory(context.Background(), config)
+ var err error
+ s.once.Do(func() {
+ var b logical.Backend
+ b, err = Factory(context.Background(), config)
+ s.db = b.(*databaseBackend)
+ })
if err != nil {
- t.Fatal(err)
+ return nil, err
}
- defer b.Cleanup(context.Background())
+ if s.db == nil {
+ return nil, errors.New("db is nil")
+ }
+ return s.db, nil
+}
+
+func TestBackend_connectionCrud(t *testing.T) {
+ dbFactory := &singletonDBFactory{}
+ cluster, sys := getClusterPostgresDBWithFactory(t, dbFactory.factory)
+ defer cluster.Cleanup()
+
+ dbFactory.sys = sys
+ client := cluster.Cores[0].Client.Logical()
cleanup, connURL := postgreshelper.PrepareTestContainer(t, "13.4-buster")
defer cleanup()
+ // Mount the database plugin.
+ resp, err := client.Write("sys/mounts/database", map[string]interface{}{
+ "type": "database",
+ })
+ if err != nil {
+ t.Fatalf("err:%s resp:%#v\n", err, resp)
+ }
+
// Configure a connection
- data := map[string]interface{}{
+ resp, err = client.Write("database/config/plugin-test", map[string]interface{}{
"connection_url": "test",
"plugin_name": "postgresql-database-plugin",
"verify_connection": false,
- }
- req := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "config/plugin-test",
- Storage: config.StorageView,
- Data: data,
- }
- resp, err := b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ })
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Configure a second connection to confirm below it doesn't get restarted.
- data = map[string]interface{}{
+ resp, err = client.Write("database/config/plugin-test-hana", map[string]interface{}{
"connection_url": "test",
"plugin_name": "hana-database-plugin",
"verify_connection": false,
- }
- req = &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "config/plugin-test-hana",
- Storage: config.StorageView,
- Data: data,
- }
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ })
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Create a role
- data = map[string]interface{}{
+ resp, err = client.Write("database/roles/plugin-role-test", map[string]interface{}{
"db_name": "plugin-test",
"creation_statements": testRole,
"revocation_statements": defaultRevocationSQL,
"default_ttl": "5m",
"max_ttl": "10m",
- }
- req = &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "roles/plugin-role-test",
- Storage: config.StorageView,
- Data: data,
- }
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ })
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Update the connection
- data = map[string]interface{}{
+ resp, err = client.Write("database/config/plugin-test", map[string]interface{}{
"connection_url": connURL,
"plugin_name": "postgresql-database-plugin",
"allowed_roles": []string{"plugin-role-test"},
"username": "postgres",
"password": "secret",
"private_key": "PRIVATE_KEY",
- }
- req = &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "config/plugin-test",
- Storage: config.StorageView,
- Data: data,
- }
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ })
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
if len(resp.Warnings) == 0 {
t.Fatalf("expected warning about password in url %s, resp:%#v\n", connURL, resp)
}
- req.Operation = logical.ReadOperation
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ resp, err = client.Read("database/config/plugin-test")
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
returnedConnectionDetails := resp.Data["connection_details"].(map[string]interface{})
@@ -703,11 +708,16 @@ func TestBackend_connectionCrud(t *testing.T) {
}
// Replace connection url with templated version
- req.Operation = logical.UpdateOperation
- connURL = strings.ReplaceAll(connURL, "postgres:secret", "{{username}}:{{password}}")
- data["connection_url"] = connURL
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ templatedConnURL := strings.ReplaceAll(connURL, "postgres:secret", "{{username}}:{{password}}")
+ resp, err = client.Write("database/config/plugin-test", map[string]interface{}{
+ "connection_url": templatedConnURL,
+ "plugin_name": "postgresql-database-plugin",
+ "allowed_roles": []string{"plugin-role-test"},
+ "username": "postgres",
+ "password": "secret",
+ "private_key": "PRIVATE_KEY",
+ })
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
@@ -716,36 +726,38 @@ func TestBackend_connectionCrud(t *testing.T) {
"plugin_name": "postgresql-database-plugin",
"connection_details": map[string]interface{}{
"username": "postgres",
- "connection_url": connURL,
+ "connection_url": templatedConnURL,
},
- "allowed_roles": []string{"plugin-role-test"},
- "root_credentials_rotate_statements": []string(nil),
+ "allowed_roles": []any{"plugin-role-test"},
+ "root_credentials_rotate_statements": []any{},
"password_policy": "",
"plugin_version": "",
}
- req.Operation = logical.ReadOperation
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ resp, err = client.Read("database/config/plugin-test")
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
delete(resp.Data["connection_details"].(map[string]interface{}), "name")
if diff := deep.Equal(resp.Data, expected); diff != nil {
- t.Fatal(diff)
+ t.Fatal(strings.Join(diff, "\n"))
}
// Test endpoints for reloading plugins.
- for _, reloadPath := range []string{
- "reset/plugin-test",
- "reload/postgresql-database-plugin",
+ for _, reload := range []struct {
+ path string
+ data map[string]any
+ checkCount bool
+ }{
+ {"database/reset/plugin-test", nil, false},
+ {"database/reload/postgresql-database-plugin", nil, true},
+ {"sys/plugins/reload/backend", map[string]any{
+ "plugin": "postgresql-database-plugin",
+ }, false},
} {
getConnectionID := func(name string) string {
t.Helper()
- dbBackend, ok := b.(*databaseBackend)
- if !ok {
- t.Fatal("could not convert logical.Backend to databaseBackend")
- }
- dbi := dbBackend.connections.Get(name)
+ dbi := dbFactory.db.connections.Get(name)
if dbi == nil {
t.Fatal("no plugin-test dbi")
}
@@ -753,14 +765,8 @@ func TestBackend_connectionCrud(t *testing.T) {
}
initialID := getConnectionID("plugin-test")
hanaID := getConnectionID("plugin-test-hana")
- req = &logical.Request{
- Operation: logical.UpdateOperation,
- Path: reloadPath,
- Storage: config.StorageView,
- Data: map[string]interface{}{},
- }
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ resp, err = client.Write(reload.path, reload.data)
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
if initialID == getConnectionID("plugin-test") {
@@ -769,54 +775,43 @@ func TestBackend_connectionCrud(t *testing.T) {
if hanaID != getConnectionID("plugin-test-hana") {
t.Fatal("hana plugin got restarted but shouldn't have been")
}
- if strings.HasPrefix(reloadPath, "reload/") {
- if expected := 1; expected != resp.Data["count"] {
- t.Fatalf("expected %d but got %d", expected, resp.Data["count"])
+ if reload.checkCount {
+ actual, err := resp.Data["count"].(json.Number).Int64()
+ if err != nil {
+ t.Fatal(err)
}
- if expected := []string{"plugin-test"}; !reflect.DeepEqual(expected, resp.Data["connections"]) {
+ if expected := 1; expected != int(actual) {
+ t.Fatalf("expected %d but got %d", expected, resp.Data["count"].(int))
+ }
+ if expected := []any{"plugin-test"}; !reflect.DeepEqual(expected, resp.Data["connections"]) {
t.Fatalf("expected %v but got %v", expected, resp.Data["connections"])
}
}
}
// Get creds
- data = map[string]interface{}{}
- req = &logical.Request{
- Operation: logical.ReadOperation,
- Path: "creds/plugin-role-test",
- Storage: config.StorageView,
- Data: data,
- }
- credsResp, err := b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (credsResp != nil && credsResp.IsError()) {
+ credsResp, err := client.Read("database/creds/plugin-role-test")
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
- credCheckURL := dbutil.QueryHelper(connURL, map[string]string{
+ credCheckURL := dbutil.QueryHelper(templatedConnURL, map[string]string{
"username": "postgres",
"password": "secret",
})
- if !testCredsExist(t, credsResp, credCheckURL) {
+ if !testCredsExist(t, credsResp.Data, credCheckURL) {
t.Fatalf("Creds should exist")
}
// Delete Connection
- data = map[string]interface{}{}
- req = &logical.Request{
- Operation: logical.DeleteOperation,
- Path: "config/plugin-test",
- Storage: config.StorageView,
- Data: data,
- }
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ resp, err = client.Delete("database/config/plugin-test")
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
// Read connection
- req.Operation = logical.ReadOperation
- resp, err = b.HandleRequest(namespace.RootContext(nil), req)
- if err != nil || (resp != nil && resp.IsError()) {
+ resp, err = client.Read("database/config/plugin-test")
+ if err != nil {
t.Fatalf("err:%s resp:%#v\n", err, resp)
}
@@ -1190,7 +1185,7 @@ func TestBackend_allowedRoles(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
- if !testCredsExist(t, credsResp, connURL) {
+ if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
@@ -1224,7 +1219,7 @@ func TestBackend_allowedRoles(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
- if !testCredsExist(t, credsResp, connURL) {
+ if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
@@ -1271,7 +1266,7 @@ func TestBackend_allowedRoles(t *testing.T) {
t.Fatalf("err:%s resp:%#v\n", err, credsResp)
}
- if !testCredsExist(t, credsResp, connURL) {
+ if !testCredsExist(t, credsResp.Data, connURL) {
t.Fatalf("Creds should exist")
}
}
@@ -1581,13 +1576,13 @@ func TestNewDatabaseWrapper_IgnoresBuiltinVersion(t *testing.T) {
}
}
-func testCredsExist(t *testing.T, resp *logical.Response, connURL string) bool {
+func testCredsExist(t *testing.T, data map[string]any, connURL string) bool {
t.Helper()
var d struct {
Username string `mapstructure:"username"`
Password string `mapstructure:"password"`
}
- if err := mapstructure.Decode(resp.Data, &d); err != nil {
+ if err := mapstructure.Decode(data, &d); err != nil {
t.Fatal(err)
}
log.Printf("[TRACE] Generated credentials: %v", d)
diff --git a/builtin/logical/database/versioning_large_test.go b/builtin/logical/database/versioning_large_test.go
index bacb4a6a7123..be936c760336 100644
--- a/builtin/logical/database/versioning_large_test.go
+++ b/builtin/logical/database/versioning_large_test.go
@@ -25,9 +25,10 @@ func TestPlugin_lifecycle(t *testing.T) {
cluster, sys := getCluster(t)
defer cluster.Cleanup()
- vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v4-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV4", []string{})
- vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v5-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV5", []string{})
- vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v6-database-plugin-muxed", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV6Multiplexed", []string{})
+ env := []string{fmt.Sprintf("%s=%s", pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)}
+ vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v4-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV4", env)
+ vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v5-database-plugin", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV5", env)
+ vault.TestAddTestPlugin(t, cluster.Cores[0].Core, "mock-v6-database-plugin-muxed", consts.PluginTypeDatabase, "", "TestBackend_PluginMain_MockV6Multiplexed", env)
config := logical.TestBackendConfig()
config.StorageView = &logical.InmemStorage{}
diff --git a/builtin/plugin/backend_test.go b/builtin/plugin/backend_test.go
index c1b7a83d153a..713444061286 100644
--- a/builtin/plugin/backend_test.go
+++ b/builtin/plugin/backend_test.go
@@ -140,9 +140,8 @@ func testConfig(t *testing.T, pluginCmd string) (*logical.BackendConfig, func())
},
}
- os.Setenv(pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)
-
- vault.TestAddTestPlugin(t, core.Core, "mock-plugin", consts.PluginTypeSecrets, "", pluginCmd, []string{})
+ vault.TestAddTestPlugin(t, core.Core, "mock-plugin", consts.PluginTypeSecrets, "", pluginCmd,
+ []string{fmt.Sprintf("%s=%s", pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)})
return config, func() {
cluster.Cleanup()
diff --git a/changelog/18513.txt b/changelog/18513.txt
new file mode 100644
index 000000000000..6b3ca2fe486f
--- /dev/null
+++ b/changelog/18513.txt
@@ -0,0 +1,3 @@
+```release-note:improvement
+ui: latest version of chrome does not automatically redirect back to the app after authentication unless triggered by the user, hence added a link to redirect back to the app.
+```
\ No newline at end of file
diff --git a/changelog/24512.txt b/changelog/24512.txt
new file mode 100644
index 000000000000..efed04a22535
--- /dev/null
+++ b/changelog/24512.txt
@@ -0,0 +1,6 @@
+```release-note:change
+plugins: Add a warning to the response from sys/plugins/reload/backend if no plugins were reloaded.
+```
+```release-note:improvement
+secrets/database: Support reloading named database plugins using the sys/plugins/reload/backend API endpoint.
+```
diff --git a/changelog/24660.txt b/changelog/24660.txt
new file mode 100644
index 000000000000..415944299e1a
--- /dev/null
+++ b/changelog/24660.txt
@@ -0,0 +1,3 @@
+```release-note:bug
+ui: The UI can now be used to create or update database roles by operator without permission on the database connection.
+```
diff --git a/changelog/24686.txt b/changelog/24686.txt
new file mode 100644
index 000000000000..30ef696f491e
--- /dev/null
+++ b/changelog/24686.txt
@@ -0,0 +1,3 @@
+```release-note:bug
+ui: fix incorrectly calculated capabilities on PKI issuer endpoints
+```
diff --git a/changelog/24697.txt b/changelog/24697.txt
new file mode 100644
index 000000000000..49492d19b290
--- /dev/null
+++ b/changelog/24697.txt
@@ -0,0 +1,3 @@
+```release-note:bug
+ui: Fixes input for jwks_ca_pem when configuring a JWT auth method
+```
\ No newline at end of file
diff --git a/go.mod b/go.mod
index 957bf91a6529..01ccff572660 100644
--- a/go.mod
+++ b/go.mod
@@ -41,6 +41,7 @@ require (
github.com/aliyun/alibaba-cloud-sdk-go v1.62.521
github.com/aliyun/aliyun-oss-go-sdk v0.0.0-20190307165228-86c17b95fcd5
github.com/apple/foundationdb/bindings/go v0.0.0-20190411004307-cd5c9d91fad2
+ github.com/armon/go-metrics v0.4.1
github.com/armon/go-radix v1.0.0
github.com/asaskevich/govalidator v0.0.0-20200907205600-7a23bdc65eef
github.com/aws/aws-sdk-go v1.44.331
@@ -99,7 +100,6 @@ require (
github.com/hashicorp/go-kms-wrapping/wrappers/transit/v2 v2.0.8
github.com/hashicorp/go-memdb v1.3.4
github.com/hashicorp/go-metrics v0.5.3
- github.com/hashicorp/go-msgpack v1.1.5
github.com/hashicorp/go-multierror v1.1.1
github.com/hashicorp/go-plugin v1.6.0
github.com/hashicorp/go-raftchunking v0.6.3-0.20191002164813-7e9e8525653a
@@ -120,16 +120,16 @@ require (
github.com/hashicorp/go-syslog v1.0.0
github.com/hashicorp/go-uuid v1.0.3
github.com/hashicorp/go-version v1.6.0
- github.com/hashicorp/golang-lru v0.5.4
+ github.com/hashicorp/golang-lru v1.0.2
github.com/hashicorp/hcl v1.0.1-vault-5
github.com/hashicorp/hcl/v2 v2.16.2
github.com/hashicorp/hcp-link v0.1.0
github.com/hashicorp/hcp-scada-provider v0.2.1
github.com/hashicorp/hcp-sdk-go v0.23.0
github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf
- github.com/hashicorp/raft v1.3.10
+ github.com/hashicorp/raft v1.6.0
github.com/hashicorp/raft-autopilot v0.2.0
- github.com/hashicorp/raft-boltdb/v2 v2.0.0-20210421194847-a7e34179d62c
+ github.com/hashicorp/raft-boltdb/v2 v2.3.0
github.com/hashicorp/raft-snapshot v1.0.4
github.com/hashicorp/vault-plugin-auth-alicloud v0.16.0
github.com/hashicorp/vault-plugin-auth-azure v0.16.2
@@ -220,11 +220,11 @@ require (
golang.org/x/exp v0.0.0-20230817173708-d852ddb80c63
golang.org/x/net v0.17.0
golang.org/x/oauth2 v0.11.0
- golang.org/x/sync v0.3.0
+ golang.org/x/sync v0.4.0
golang.org/x/sys v0.15.0
golang.org/x/term v0.15.0
golang.org/x/text v0.14.0
- golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846
+ golang.org/x/tools v0.14.0
google.golang.org/api v0.139.0
google.golang.org/grpc v1.58.3
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0
@@ -280,7 +280,6 @@ require (
github.com/apache/arrow/go/v12 v12.0.1 // indirect
github.com/apache/thrift v0.16.0 // indirect
github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect
- github.com/armon/go-metrics v0.4.1 // indirect
github.com/aws/aws-sdk-go-v2 v1.17.7 // indirect
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 // indirect
github.com/aws/aws-sdk-go-v2/credentials v1.13.18 // indirect
@@ -302,6 +301,7 @@ require (
github.com/baiyubin/aliyun-sts-go-sdk v0.0.0-20180326062324-cfa1a18b161f // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bgentry/speakeasy v0.1.0 // indirect
+ github.com/boltdb/bolt v1.3.1 // indirect
github.com/boombuler/barcode v1.0.1 // indirect
github.com/cenkalti/backoff v2.2.1+incompatible // indirect
github.com/cenkalti/backoff/v4 v4.2.1 // indirect
@@ -335,7 +335,7 @@ require (
github.com/docker/go-connections v0.4.0 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/dsnet/compress v0.0.2-0.20210315054119-f66993602bf5 // indirect
- github.com/dvsekhvalnov/jose2go v1.5.0 // indirect
+ github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
github.com/emicklei/go-restful/v3 v3.10.1 // indirect
github.com/emirpasic/gods v1.18.1 // indirect
github.com/envoyproxy/go-control-plane v0.11.1 // indirect
@@ -387,7 +387,7 @@ require (
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed // indirect
github.com/hashicorp/cronexpr v1.1.1 // indirect
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
- github.com/hashicorp/go-msgpack/v2 v2.0.0 // indirect
+ github.com/hashicorp/go-msgpack/v2 v2.1.1 // indirect
github.com/hashicorp/go-secure-stdlib/fileutil v0.1.0 // indirect
github.com/hashicorp/go-secure-stdlib/plugincontainer v0.3.0 // indirect
github.com/hashicorp/go-slug v0.12.1 // indirect
@@ -515,7 +515,7 @@ require (
go.uber.org/multierr v1.7.0 // indirect
go.uber.org/zap v1.19.1 // indirect
golang.org/x/exp/typeparams v0.0.0-20221208152030-732eee02a75a // indirect
- golang.org/x/mod v0.12.0 // indirect
+ golang.org/x/mod v0.13.0 // indirect
golang.org/x/time v0.3.0 // indirect
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 // indirect
google.golang.org/appengine v1.6.7 // indirect
diff --git a/go.sum b/go.sum
index 2ba2d83ddd11..cc0845f7f62e 100644
--- a/go.sum
+++ b/go.sum
@@ -1230,6 +1230,7 @@ github.com/blang/semver v3.5.1+incompatible/go.mod h1:kRBLl5iJ+tD4TcOOxsy/0fnweb
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869/go.mod h1:Ekp36dRnpXw/yCqJaO+ZrUyxD+3VXMFFr56k5XYrpB4=
+github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
github.com/boombuler/barcode v1.0.0/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
github.com/boombuler/barcode v1.0.1-0.20190219062509-6c824513bacc/go.mod h1:paBWMcWSl3LHKBqUq+rly7CNSldXjb2rDl3JlRe0mD8=
@@ -1577,8 +1578,9 @@ github.com/dustin/go-humanize v0.0.0-20171111073723-bb3d318650d4/go.mod h1:Htrtb
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
-github.com/dvsekhvalnov/jose2go v1.5.0 h1:3j8ya4Z4kMCwT5nXIKFSV84YS+HdqSSO0VsTQxaLAeM=
github.com/dvsekhvalnov/jose2go v1.5.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
+github.com/dvsekhvalnov/jose2go v1.6.0 h1:Y9gnSnP4qEI0+/uQkHvFXeD2PLPJeXEL+ySMEA2EjTY=
+github.com/dvsekhvalnov/jose2go v1.6.0/go.mod h1:QsHjhyTlD/lAVqn/NSbVZmSCGeDehTB/mPZadG+mhXU=
github.com/elazarl/goproxy v0.0.0-20180725130230-947c36da3153/go.mod h1:/Zj4wYkgs4iZTTu3o/KG3Itv/qCCa8VVMlb3i9OVuzc=
github.com/elazarl/goproxy v0.0.0-20221015165544-a0805db90819 h1:RIB4cRk+lBqKK3Oy0r2gRX4ui7tuhiZq2SuTtTCi0/0=
github.com/elazarl/goproxy v0.0.0-20221015165544-a0805db90819/go.mod h1:Ro8st/ElPeALwNFlcTpWmkr6IoMFfkjXAvTHpevnDsM=
@@ -2202,8 +2204,8 @@ github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iP
github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
github.com/hashicorp/go-msgpack v1.1.5 h1:9byZdVjKTe5mce63pRVNP1L7UAmdHOTEMGehn6KvJWs=
github.com/hashicorp/go-msgpack v1.1.5/go.mod h1:gWVc3sv/wbDmR3rQsj1CAktEZzoz1YNK9NfGLXJ69/4=
-github.com/hashicorp/go-msgpack/v2 v2.0.0 h1:c1fiLq1LNghmLOry1ipGhvLDi+/zEoaEP2JrE1oFJ9s=
-github.com/hashicorp/go-msgpack/v2 v2.0.0/go.mod h1:JIxYkkFJRDDRSoWQBSh7s9QAVThq+82iWmUpmE4jKak=
+github.com/hashicorp/go-msgpack/v2 v2.1.1 h1:xQEY9yB2wnHitoSzk/B9UjXWRQ67QKu5AOm8aFp8N3I=
+github.com/hashicorp/go-msgpack/v2 v2.1.1/go.mod h1:upybraOAblm4S7rx0+jeNy+CWWhzywQsSRV5033mMu4=
github.com/hashicorp/go-multierror v0.0.0-20161216184304-ed905158d874/go.mod h1:JMRHfdO9jKNzS/+BTlxCjKNQHg/jZAft8U7LloJvN7I=
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA=
@@ -2272,8 +2274,9 @@ github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
-github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
+github.com/hashicorp/golang-lru v1.0.2 h1:dV3g9Z/unq5DpblPpw+Oqcv4dU/1omnb4Ok8iPY6p1c=
+github.com/hashicorp/golang-lru v1.0.2/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ=
github.com/hashicorp/hcl v1.0.1-vault-5 h1:kI3hhbbyzr4dldA8UdTb7ZlVVlI2DACdCfz31RPDgJM=
github.com/hashicorp/hcl v1.0.1-vault-5/go.mod h1:XYhtn6ijBSAj6n4YqAaf7RBPS4I06AItNorpy+MoQNM=
@@ -2301,16 +2304,17 @@ github.com/hashicorp/net-rpc-msgpackrpc/v2 v2.0.0/go.mod h1:6pdNz0vo0mF0GvhwDG56
github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf h1:cKXVf1UJqwdkGiTF3idqCOLApAql0310OSmJxeiaMWg=
github.com/hashicorp/nomad/api v0.0.0-20230519153805-2275a83cbfdf/go.mod h1:rb38DqjaaIfhJRiLeCAGgIt+wV7o78rB+liyFE3mVzE=
github.com/hashicorp/raft v1.0.1/go.mod h1:DVSAWItjLjTOkVbSpWQ0j0kUADIvDaCtBxIcbNAQLkI=
-github.com/hashicorp/raft v1.1.0/go.mod h1:4Ak7FSPnuvmb0GV6vgIAJ4vYT4bek9bb6Q+7HVbyzqM=
github.com/hashicorp/raft v1.1.2-0.20191002163536-9c6bd3e3eb17/go.mod h1:vPAJM8Asw6u8LxC3eJCUZmRP/E4QmUGE1R7g7k8sG/8=
github.com/hashicorp/raft v1.2.0/go.mod h1:vPAJM8Asw6u8LxC3eJCUZmRP/E4QmUGE1R7g7k8sG/8=
-github.com/hashicorp/raft v1.3.10 h1:LR5QZX1VQd0DFWZfeCwWawyeKfpS/Tm1yjnJIY5X4Tw=
-github.com/hashicorp/raft v1.3.10/go.mod h1:J8naEwc6XaaCfts7+28whSeRvCqTd6e20BlCU3LtEO4=
+github.com/hashicorp/raft v1.6.0 h1:tkIAORZy2GbJ2Trp5eUSggLXDPOJLXC+JJLNMMqtgtM=
+github.com/hashicorp/raft v1.6.0/go.mod h1:Xil5pDgeGwRWuX4uPUmwa+7Vagg4N804dz6mhNi6S7o=
github.com/hashicorp/raft-autopilot v0.2.0 h1:2/R2RPgamgRKgNWGQioULZvjeKXQZmDuw5Ty+6c+H7Y=
github.com/hashicorp/raft-autopilot v0.2.0/go.mod h1:q6tZ8UAZ5xio2gv2JvjgmtOlh80M6ic8xQYBe2Egkg8=
github.com/hashicorp/raft-boltdb v0.0.0-20171010151810-6e5ba93211ea/go.mod h1:pNv7Wc3ycL6F5oOWn+tPGo2gWD4a5X+yp/ntwdKLjRk=
-github.com/hashicorp/raft-boltdb/v2 v2.0.0-20210421194847-a7e34179d62c h1:oiKun9QlrOz5yQxMZJ3tf1kWtFYuKSJzxzEDxDPevj4=
-github.com/hashicorp/raft-boltdb/v2 v2.0.0-20210421194847-a7e34179d62c/go.mod h1:kiPs9g148eLShc2TYagUAyKDnD+dH9U+CQKsXzlY9xo=
+github.com/hashicorp/raft-boltdb v0.0.0-20230125174641-2a8082862702 h1:RLKEcCuKcZ+qp2VlaaZsYZfLOmIiuJNpEi48Rl8u9cQ=
+github.com/hashicorp/raft-boltdb v0.0.0-20230125174641-2a8082862702/go.mod h1:nTakvJ4XYq45UXtn0DbwR4aU9ZdjlnIenpbs6Cd+FM0=
+github.com/hashicorp/raft-boltdb/v2 v2.3.0 h1:fPpQR1iGEVYjZ2OELvUHX600VAK5qmdnDEv3eXOwZUA=
+github.com/hashicorp/raft-boltdb/v2 v2.3.0/go.mod h1:YHukhB04ChJsLHLJEUD6vjFyLX2L3dsX3wPBZcX4tmc=
github.com/hashicorp/raft-snapshot v1.0.4 h1:EuDuayAJPdiDmVk1ygTDnG2zDzrs0/6/yBuma1IYSow=
github.com/hashicorp/raft-snapshot v1.0.4/go.mod h1:5sL9eUn72lH5DzsFIJ9jaysITbHksSSszImWSOTC8Ic=
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
@@ -3548,8 +3552,8 @@ golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.9.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.10.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.11.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
-golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc=
-golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
+golang.org/x/mod v0.13.0 h1:I/DsJXRlw/8l/0c24sM9yb0T4z9liZTduXvdAWYiysY=
+golang.org/x/mod v0.13.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20170114055629-f2499483f923/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
@@ -3702,8 +3706,9 @@ golang.org/x/sync v0.0.0-20220819030929-7fc1605a5dde/go.mod h1:RxMgew5VJxzue5/jJ
golang.org/x/sync v0.0.0-20220929204114-8fcdb60fdcc0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.1.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.2.0/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
-golang.org/x/sync v0.3.0 h1:ftCYgMx6zT/asHUrPw8BLLscYtGznsLAnjq5RH9P66E=
golang.org/x/sync v0.3.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
+golang.org/x/sync v0.4.0 h1:zxkM55ReGkDlKSM+Fu41A+zmbZuaPVbGMzvvdUPznYQ=
+golang.org/x/sync v0.4.0/go.mod h1:FU7BRWz2tNW+3quACPkgCx/L+uEAv1htQ0V83Z9Rj+Y=
golang.org/x/sys v0.0.0-20170830134202-bb24a47a89ea/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
@@ -3948,7 +3953,6 @@ golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3
golang.org/x/tools v0.0.0-20190329151228-23e29df326fe/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190416151739-9c9e1878f421/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190420181800-aa740d480789/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
-golang.org/x/tools v0.0.0-20190424220101-1e8e1cfdf96b/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
@@ -4031,8 +4035,8 @@ golang.org/x/tools v0.7.0/go.mod h1:4pg6aUX35JBAogB10C9AtvVL+qowtN4pT3CGSQex14s=
golang.org/x/tools v0.8.0/go.mod h1:JxBZ99ISMI5ViVkT1tr6tdNmXeTrcpVSD3vZ1RsRdN4=
golang.org/x/tools v0.9.1/go.mod h1:owI94Op576fPu3cIGQeHs3joujW/2Oc6MtlxbF5dfNc=
golang.org/x/tools v0.10.0/go.mod h1:UJwyiVBsOA2uwvK/e5OY3GTpDUJriEd+/YlqAwLPmyM=
-golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846 h1:Vve/L0v7CXXuxUmaMGIEK/dEeq7uiqb5qBgQrZzIE7E=
-golang.org/x/tools v0.12.1-0.20230815132531-74c255bcf846/go.mod h1:Sc0INKfu04TlqNoRA1hgpFZbhYXHPr4V5DzpSBTPqQM=
+golang.org/x/tools v0.14.0 h1:jvNa2pY0M4r62jkRQ6RwEZZyPcymeL9XZMLBbV7U2nc=
+golang.org/x/tools v0.14.0/go.mod h1:uYBEerGOWcJyEORxN+Ek8+TT266gXkNlHdJBwexUsBg=
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
diff --git a/helper/testhelpers/corehelpers/corehelpers.go b/helper/testhelpers/corehelpers/corehelpers.go
index c2d6bc8a3ce7..8c3d7cfb966f 100644
--- a/helper/testhelpers/corehelpers/corehelpers.go
+++ b/helper/testhelpers/corehelpers/corehelpers.go
@@ -535,7 +535,7 @@ func (n *NoopAudit) Invalidate(_ context.Context) {
// the audit.Backend interface.
func (n *NoopAudit) RegisterNodesAndPipeline(broker *eventlogger.Broker, name string) error {
for id, node := range n.nodeMap {
- if err := broker.RegisterNode(id, node, eventlogger.WithNodeRegistrationPolicy(eventlogger.DenyOverwrite)); err != nil {
+ if err := broker.RegisterNode(id, node); err != nil {
return err
}
}
@@ -546,7 +546,7 @@ func (n *NoopAudit) RegisterNodesAndPipeline(broker *eventlogger.Broker, name st
NodeIDs: n.nodeIDList,
}
- return broker.RegisterPipeline(pipeline, eventlogger.WithPipelineRegistrationPolicy(eventlogger.DenyOverwrite))
+ return broker.RegisterPipeline(pipeline)
}
type TestLogger struct {
@@ -632,3 +632,7 @@ func (n *NoopAudit) Nodes() map[eventlogger.NodeID]eventlogger.Node {
func (n *NoopAudit) NodeIDs() []eventlogger.NodeID {
return n.nodeIDList
}
+
+func (n *NoopAudit) IsFallback() bool {
+ return false
+}
diff --git a/http/plugin_test.go b/http/plugin_test.go
index fa67187621a8..b215a6b1c6bc 100644
--- a/http/plugin_test.go
+++ b/http/plugin_test.go
@@ -5,6 +5,7 @@ package http
import (
"encoding/json"
+ "fmt"
"io/ioutil"
"os"
"reflect"
@@ -55,10 +56,9 @@ func getPluginClusterAndCore(t *testing.T, logger log.Logger) (*vault.TestCluste
cores := cluster.Cores
core := cores[0]
- os.Setenv(pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)
-
vault.TestWaitActive(benchhelpers.TBtoT(t), core.Core)
- vault.TestAddTestPlugin(benchhelpers.TBtoT(t), core.Core, "mock-plugin", consts.PluginTypeSecrets, "", "TestPlugin_PluginMain", []string{})
+ vault.TestAddTestPlugin(benchhelpers.TBtoT(t), core.Core, "mock-plugin", consts.PluginTypeSecrets, "", "TestPlugin_PluginMain",
+ []string{fmt.Sprintf("%s=%s", pluginutil.PluginCACertPEMEnv, cluster.CACertPEMFile)})
// Mount the mock plugin
err = core.Client.Sys().Mount("mock", &api.MountInput{
diff --git a/physical/raft/msgpack.go b/physical/raft/msgpack.go
deleted file mode 100644
index b9b4f110486b..000000000000
--- a/physical/raft/msgpack.go
+++ /dev/null
@@ -1,13 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-package raft
-
-// If we downgrade msgpack from v1.1.5 to v0.5.5, everything will still
-// work, but any pre-existing raft clusters will break on upgrade.
-// This file exists so that the Vault project has an explicit dependency
-// on the library, which allows us to pin the version in go.mod.
-
-import (
- _ "github.com/hashicorp/go-msgpack/codec"
-)
diff --git a/physical/raft/raft.go b/physical/raft/raft.go
index 3a355555d075..9e9bb9100f7e 100644
--- a/physical/raft/raft.go
+++ b/physical/raft/raft.go
@@ -431,8 +431,9 @@ func NewRaftBackend(conf map[string]string, logger log.Logger) (physical.Backend
dbPath := filepath.Join(path, "raft.db")
opts := etcdboltOptions(dbPath)
raftOptions := raftboltdb.Options{
- Path: dbPath,
- BoltOptions: opts,
+ Path: dbPath,
+ BoltOptions: opts,
+ MsgpackUseNewTimeFormat: true,
}
store, err := raftboltdb.New(raftOptions)
if err != nil {
@@ -997,11 +998,12 @@ func (b *RaftBackend) SetupCluster(ctx context.Context, opts SetupOpts) error {
return err
}
transConfig := &raft.NetworkTransportConfig{
- Stream: streamLayer,
- MaxPool: 3,
- Timeout: 10 * time.Second,
- ServerAddressProvider: b.serverAddressProvider,
- Logger: b.logger.Named("raft-net"),
+ Stream: streamLayer,
+ MaxPool: 3,
+ Timeout: 10 * time.Second,
+ ServerAddressProvider: b.serverAddressProvider,
+ Logger: b.logger.Named("raft-net"),
+ MsgpackUseNewTimeFormat: true,
}
transport := raft.NewNetworkTransportWithConfig(transConfig)
diff --git a/sdk/go.mod b/sdk/go.mod
index 4bb988f9c0b3..87a465c1ff2c 100644
--- a/sdk/go.mod
+++ b/sdk/go.mod
@@ -46,9 +46,9 @@ require (
github.com/ryanuber/go-glob v1.0.0
github.com/stretchr/testify v1.8.3
go.uber.org/atomic v1.9.0
- golang.org/x/crypto v0.14.0
+ golang.org/x/crypto v0.17.0
golang.org/x/net v0.17.0
- golang.org/x/text v0.13.0
+ golang.org/x/text v0.14.0
google.golang.org/grpc v1.57.2
google.golang.org/protobuf v1.31.0
)
@@ -104,8 +104,8 @@ require (
go.opencensus.io v0.24.0 // indirect
golang.org/x/mod v0.9.0 // indirect
golang.org/x/oauth2 v0.11.0 // indirect
- golang.org/x/sys v0.13.0 // indirect
- golang.org/x/term v0.13.0 // indirect
+ golang.org/x/sys v0.15.0 // indirect
+ golang.org/x/term v0.15.0 // indirect
golang.org/x/time v0.3.0 // indirect
golang.org/x/tools v0.7.0 // indirect
google.golang.org/api v0.134.0 // indirect
diff --git a/sdk/go.sum b/sdk/go.sum
index 206cbe876e41..b33d28de43a7 100644
--- a/sdk/go.sum
+++ b/sdk/go.sum
@@ -543,8 +543,8 @@ golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20220314234659-1baeb1ce4c0b/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.0.0-20220622213112-05595931fe9d/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
-golang.org/x/crypto v0.14.0 h1:wBqGXzWJW6m1XrIKlAH0Hs1JJ7+9KBwnIO8v66Q9cHc=
-golang.org/x/crypto v0.14.0/go.mod h1:MVFd36DqK4CsrnJYDkBA3VC4m2GkXAM0PvzMCn4JQf4=
+golang.org/x/crypto v0.17.0 h1:r8bRNjWL3GshPW3gkd+RpvzWrZAwPS49OmTGZ/uhM4k=
+golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -683,14 +683,14 @@ golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.13.0 h1:Af8nKPmuFypiUBjVoU9V20FiaFXOcuZI21p0ycVYYGE=
-golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
+golang.org/x/sys v0.15.0 h1:h48lPFYpsTvQJZF4EKyI4aLHaev3CxivZmv7yZig9pc=
+golang.org/x/sys v0.15.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
-golang.org/x/term v0.13.0 h1:bb+I9cTfFazGW51MZqBVmZy7+JEJMouUHTUSKVQLBek=
-golang.org/x/term v0.13.0/go.mod h1:LTmsnFJwVN6bCy1rVCoS+qHT1HhALEFxKncY3WNNh4U=
+golang.org/x/term v0.15.0 h1:y/Oo/a/q3IXu26lQgl04j/gjuBDOBlx7X6Om1j2CPW4=
+golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -702,8 +702,8 @@ golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
-golang.org/x/text v0.13.0 h1:ablQoSUd0tRdKxZewP80B+BaqeKJuVhuRxj/dkrun3k=
-golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
+golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ=
+golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
diff --git a/ui/app/adapters/database/role.js b/ui/app/adapters/database/role.js
index 848719e37a7f..2a3002c1d1fc 100644
--- a/ui/app/adapters/database/role.js
+++ b/ui/app/adapters/database/role.js
@@ -164,7 +164,7 @@ export default ApplicationAdapter.extend({
db: db[0],
});
} catch (e) {
- throw new Error('Could not update allowed roles for selected database. Check Vault logs for details');
+ this.checkError(e);
}
return this.ajax(this.urlFor(backend, id, roleType), 'POST', { data }).then(() => {
@@ -180,12 +180,16 @@ export default ApplicationAdapter.extend({
const backend = snapshot.attr('backend');
const id = snapshot.attr('name');
const db = snapshot.attr('database');
- await this._updateAllowedRoles(store, {
- role: id,
- backend,
- db: db[0],
- type: 'remove',
- });
+ try {
+ await this._updateAllowedRoles(store, {
+ role: id,
+ backend,
+ db: db[0],
+ type: 'remove',
+ });
+ } catch (e) {
+ this.checkError(e);
+ }
return this.ajax(this.urlFor(backend, id, roleType), 'DELETE');
},
@@ -199,4 +203,14 @@ export default ApplicationAdapter.extend({
return this.ajax(this.urlFor(backend, id, roleType), 'POST', { data }).then(() => data);
},
+
+ checkError(e) {
+ if (e.httpStatus === 403) {
+ // The user does not have the permission to update the connection. This
+ // can happen if their permissions are limited to the role. In that case
+ // we ignore the error and continue updating the role.
+ return;
+ }
+ throw new Error(`Could not update allowed roles for selected database: ${e.errors.join(', ')}`);
+ },
});
diff --git a/ui/app/components/database-role-edit.js b/ui/app/components/database-role-edit.js
index 3379650e6e52..9672fe676034 100644
--- a/ui/app/components/database-role-edit.js
+++ b/ui/app/components/database-role-edit.js
@@ -27,9 +27,6 @@ export default class DatabaseRoleEdit extends Component {
get warningMessages() {
const warnings = {};
- if (this.args.model.canUpdateDb === false) {
- warnings.database = `You don’t have permissions to update this database connection, so this role cannot be created.`;
- }
if (
(this.args.model.type === 'dynamic' && this.args.model.canCreateDynamic === false) ||
(this.args.model.type === 'static' && this.args.model.canCreateStatic === false)
diff --git a/ui/app/models/auth-config/jwt.js b/ui/app/models/auth-config/jwt.js
index e432800c3021..f675c29c9ffe 100644
--- a/ui/app/models/auth-config/jwt.js
+++ b/ui/app/models/auth-config/jwt.js
@@ -24,12 +24,31 @@ export default AuthConfig.extend({
oidcClientSecret: attr('string', {
label: 'OIDC client secret',
}),
+
oidcDiscoveryCaPem: attr('string', {
label: 'OIDC discovery CA PEM',
editType: 'file',
helpText:
'The CA certificate or chain of certificates, in PEM format, to use to validate connections to the OIDC Discovery URL. If not set, system certificates are used',
}),
+
+ jwksCaPem: attr('string', {
+ label: 'JWKS CA PEM',
+ editType: 'file',
+ }),
+
+ jwksUrl: attr('string', {
+ label: 'JWKS URL',
+ }),
+
+ oidcResponseMode: attr('string', {
+ label: 'OIDC response mode',
+ }),
+
+ oidcResponseTypes: attr('string', {
+ label: 'OIDC response types',
+ }),
+
jwtValidationPubkeys: attr({
label: 'JWT validation public keys',
editType: 'stringArray',
@@ -38,14 +57,23 @@ export default AuthConfig.extend({
jwtSupportedAlgs: attr({
label: 'JWT supported algorithms',
}),
+
boundIssuer: attr('string', {
helpText: 'The value against which to match the iss claim in a JWT',
}),
+
fieldGroups: computed('constructor.modelName', 'newFields', function () {
const type = this.constructor.modelName.split('/')[1].toUpperCase();
let groups = [
{
- default: ['oidcDiscoveryUrl', 'defaultRole'],
+ default: [
+ 'oidcDiscoveryUrl',
+ 'defaultRole',
+ 'jwksCaPem',
+ 'jwksUrl',
+ 'oidcResponseMode',
+ 'oidcResponseTypes',
+ ],
},
{
[`${type} Options`]: [
diff --git a/ui/app/models/control-group.js b/ui/app/models/control-group.js
index be420d5dff25..b84974feed33 100644
--- a/ui/app/models/control-group.js
+++ b/ui/app/models/control-group.js
@@ -10,8 +10,8 @@ import lazyCapabilities, { apiPath } from 'vault/macros/lazy-capabilities';
export default Model.extend({
approved: attr('boolean'),
requestPath: attr('string'),
- requestEntity: belongsTo('identity/entity', { async: false }),
- authorizations: hasMany('identity/entity', { async: false }),
+ requestEntity: belongsTo('identity/entity', { async: false, inverse: null }),
+ authorizations: hasMany('identity/entity', { async: false, inverse: null }),
authorizePath: lazyCapabilities(apiPath`sys/control-group/authorize`),
canAuthorize: alias('authorizePath.canUpdate'),
diff --git a/ui/app/models/identity/entity-alias.js b/ui/app/models/identity/entity-alias.js
index a29f16084658..d6042175a723 100644
--- a/ui/app/models/identity/entity-alias.js
+++ b/ui/app/models/identity/entity-alias.js
@@ -14,7 +14,7 @@ export default IdentityModel.extend({
formFields: computed(function () {
return ['name', 'mountAccessor'];
}),
- entity: belongsTo('identity/entity', { readOnly: true, async: false }),
+ entity: belongsTo('identity/entity', { readOnly: true, async: false, inverse: 'aliases' }),
name: attr('string'),
canonicalId: attr('string'),
diff --git a/ui/app/models/identity/entity.js b/ui/app/models/identity/entity.js
index 5622ea5a65cd..bb53324d8120 100644
--- a/ui/app/models/identity/entity.js
+++ b/ui/app/models/identity/entity.js
@@ -34,7 +34,7 @@ export default IdentityModel.extend({
lastUpdateTime: attr('string', {
readOnly: true,
}),
- aliases: hasMany('identity/entity-alias', { async: false, readOnly: true }),
+ aliases: hasMany('identity/entity-alias', { async: false, readOnly: true, inverse: 'entity' }),
groupIds: attr({
readOnly: true,
}),
diff --git a/ui/app/models/identity/group.js b/ui/app/models/identity/group.js
index 14a8c8c30da0..d2be312b31c5 100644
--- a/ui/app/models/identity/group.js
+++ b/ui/app/models/identity/group.js
@@ -77,7 +77,7 @@ export default IdentityModel.extend({
),
policyPath: lazyCapabilities(apiPath`sys/policies`),
canCreatePolicies: alias('policyPath.canCreate'),
- alias: belongsTo('identity/group-alias', { async: false, readOnly: true }),
+ alias: belongsTo('identity/group-alias', { async: false, readOnly: true, inverse: 'group' }),
updatePath: identityCapabilities(),
canDelete: alias('updatePath.canDelete'),
canEdit: alias('updatePath.canUpdate'),
diff --git a/ui/app/models/kmip/ca.js b/ui/app/models/kmip/ca.js
index aea85b4cf937..c468530c634d 100644
--- a/ui/app/models/kmip/ca.js
+++ b/ui/app/models/kmip/ca.js
@@ -6,7 +6,7 @@
import Model, { belongsTo, attr } from '@ember-data/model';
export default Model.extend({
- config: belongsTo('kmip/config', { async: false }),
+ config: belongsTo('kmip/config', { async: false, inverse: 'ca' }),
caPem: attr('string', {
label: 'CA PEM',
}),
diff --git a/ui/app/models/kmip/config.js b/ui/app/models/kmip/config.js
index 336cc3db7cfc..8a9fedbb2d5b 100644
--- a/ui/app/models/kmip/config.js
+++ b/ui/app/models/kmip/config.js
@@ -10,7 +10,7 @@ import fieldToAttrs from 'vault/utils/field-to-attrs';
export default Model.extend({
useOpenAPI: true,
- ca: belongsTo('kmip/ca', { async: false }),
+ ca: belongsTo('kmip/ca', { async: false, inverse: 'config' }),
getHelpUrl(path) {
return `/v1/${path}/config?help=1`;
},
diff --git a/ui/app/models/mfa-login-enforcement.js b/ui/app/models/mfa-login-enforcement.js
index 69f307ea676e..ac1791b7f75e 100644
--- a/ui/app/models/mfa-login-enforcement.js
+++ b/ui/app/models/mfa-login-enforcement.js
@@ -37,12 +37,12 @@ const validations = {
export default class MfaLoginEnforcementModel extends Model {
@service store;
@attr('string') name;
- @hasMany('mfa-method') mfa_methods;
+ @hasMany('mfa-method', { async: true, inverse: null }) mfa_methods;
@attr('string') namespace_id;
@attr('array', { defaultValue: () => [] }) auth_method_accessors; // ["auth_approle_17a552c6"]
@attr('array', { defaultValue: () => [] }) auth_method_types; // ["userpass"]
- @hasMany('identity/entity') identity_entities;
- @hasMany('identity/group') identity_groups;
+ @hasMany('identity/entity', { async: true, inverse: null }) identity_entities;
+ @hasMany('identity/group', { async: true, inverse: null }) identity_groups;
get targets() {
return ArrayProxy.extend(PromiseProxyMixin).create({
diff --git a/ui/app/models/pki/issuer.js b/ui/app/models/pki/issuer.js
index a06ea671690b..6e51d469d5fc 100644
--- a/ui/app/models/pki/issuer.js
+++ b/ui/app/models/pki/issuer.js
@@ -135,13 +135,14 @@ export default class PkiIssuerModel extends Model {
@attr importedKeys;
@attr mapping;
- @lazyCapabilities(apiPath`${'backend'}/issuer/${'issuerId'}`) issuerPath;
- @lazyCapabilities(apiPath`${'backend'}/root/rotate/exported`) rotateExported;
- @lazyCapabilities(apiPath`${'backend'}/root/rotate/internal`) rotateInternal;
- @lazyCapabilities(apiPath`${'backend'}/root/rotate/existing`) rotateExisting;
+ @lazyCapabilities(apiPath`${'backend'}/issuer/${'issuerId'}`, 'backend', 'issuerId') issuerPath;
+ @lazyCapabilities(apiPath`${'backend'}/root/rotate/exported`, 'backend') rotateExported;
+ @lazyCapabilities(apiPath`${'backend'}/root/rotate/internal`, 'backend') rotateInternal;
+ @lazyCapabilities(apiPath`${'backend'}/root/rotate/existing`, 'backend') rotateExisting;
@lazyCapabilities(apiPath`${'backend'}/root`, 'backend') deletePath;
- @lazyCapabilities(apiPath`${'backend'}/intermediate/cross-sign`) crossSignPath;
- @lazyCapabilities(apiPath`${'backend'}/issuer/${'issuerId'}/sign-intermediate`) signIntermediate;
+ @lazyCapabilities(apiPath`${'backend'}/intermediate/cross-sign`, 'backend') crossSignPath;
+ @lazyCapabilities(apiPath`${'backend'}/issuer/${'issuerId'}/sign-intermediate`, 'backend', 'issuerId')
+ signIntermediate;
get canRotateIssuer() {
return (
this.rotateExported.get('canUpdate') !== false ||
diff --git a/ui/app/models/sync/destinations/azure-kv.js b/ui/app/models/sync/destinations/azure-kv.js
index 5c7dbcec894f..f1fb4c66f4d3 100644
--- a/ui/app/models/sync/destinations/azure-kv.js
+++ b/ui/app/models/sync/destinations/azure-kv.js
@@ -8,8 +8,8 @@ import { attr } from '@ember-data/model';
import { withFormFields } from 'vault/decorators/model-form-fields';
const displayFields = ['name', 'keyVaultUri', 'tenantId', 'cloud', 'clientId', 'clientSecret'];
const formFieldGroups = [
- { default: ['name', 'tenantId', 'cloud', 'clientId'] },
- { Credentials: ['keyVaultUri', 'clientSecret'] },
+ { default: ['name', 'keyVaultUri', 'tenantId', 'cloud', 'clientId'] },
+ { Credentials: ['clientSecret'] },
];
@withFormFields(displayFields, formFieldGroups)
export default class SyncDestinationsAzureKeyVaultModel extends SyncDestinationModel {
diff --git a/ui/app/routes/vault/cluster/oidc-provider.js b/ui/app/routes/vault/cluster/oidc-provider.js
index d969c1e68e29..a33a9b6ac623 100644
--- a/ui/app/routes/vault/cluster/oidc-provider.js
+++ b/ui/app/routes/vault/cluster/oidc-provider.js
@@ -94,17 +94,17 @@ export default class VaultClusterOidcProviderRoute extends Route {
_handleSuccess(response, baseUrl, state) {
const { code } = response;
const redirectUrl = this._buildUrl(baseUrl, { code, state });
- if (Ember.testing) {
- return { redirectUrl };
+ if (!Ember.testing) {
+ this.win.location.replace(redirectUrl);
}
- this.win.location.replace(redirectUrl);
+ return { redirectUrl };
}
_handleError(errorResp, baseUrl) {
const redirectUrl = this._buildUrl(baseUrl, { ...errorResp });
- if (Ember.testing) {
- return { redirectUrl };
+ if (!Ember.testing) {
+ this.win.location.replace(redirectUrl);
}
- this.win.location.replace(redirectUrl);
+ return { redirectUrl };
}
/**
diff --git a/ui/app/styles/helper-classes/typography.scss b/ui/app/styles/helper-classes/typography.scss
index 4cde9b2d3c16..8466b0c29440 100644
--- a/ui/app/styles/helper-classes/typography.scss
+++ b/ui/app/styles/helper-classes/typography.scss
@@ -110,6 +110,9 @@
}
}
+.opacity-050 {
+ opacity: 0.5;
+}
.opacity-060 {
opacity: 0.6;
}
diff --git a/ui/app/templates/vault/cluster/oidc-provider.hbs b/ui/app/templates/vault/cluster/oidc-provider.hbs
index 24f57426bdc0..e3fcd541a83d 100644
--- a/ui/app/templates/vault/cluster/oidc-provider.hbs
+++ b/ui/app/templates/vault/cluster/oidc-provider.hbs
@@ -22,7 +22,9 @@
@onSuccess={{this._handleSuccess}}
/>
{{else if this.model.redirectUrl}}
-
{{this.model.redirectUrl}}
+
+ If you are not automatically redirected,
+ click here to go back to app.
{{else}}
{{/if}}
diff --git a/ui/lib/core/addon/components/search-select-placeholder.hbs b/ui/lib/core/addon/components/search-select-placeholder.hbs
index b168b1c9ab53..32b9adeea1ce 100644
--- a/ui/lib/core/addon/components/search-select-placeholder.hbs
+++ b/ui/lib/core/addon/components/search-select-placeholder.hbs
@@ -6,7 +6,7 @@
- {{or @placeholder "Search"}}
+ {{or @placeholder "Search"}}
diff --git a/ui/lib/kv/addon/components/page/secret/details.hbs b/ui/lib/kv/addon/components/page/secret/details.hbs
index 4a5f91ea6609..2eb6bab4e8fa 100644
--- a/ui/lib/kv/addon/components/page/secret/details.hbs
+++ b/ui/lib/kv/addon/components/page/secret/details.hbs
@@ -6,10 +6,11 @@
<:syncDetails>
{{#if this.syncStatus}}
-
+
- This secret has been synced from Vault to other destinations, updates to the secret will get automatically synced
- to destinations.
+ This secret has been synced from Vault to
+ {{pluralize this.syncStatus.length "destination"}}. Updates to this secret will automatically sync to its
+ {{if (eq this.syncStatus.length 1) "destination" "destinations"}}.
{{#each this.syncStatus as |status|}}
diff --git a/ui/lib/kv/package.json b/ui/lib/kv/package.json
index 4942e70fea86..5c76aa678d4a 100644
--- a/ui/lib/kv/package.json
+++ b/ui/lib/kv/package.json
@@ -8,7 +8,8 @@
"ember-cli-htmlbars": "*",
"ember-cli-babel": "*",
"ember-concurrency": "*",
- "@ember/test-waiters": "*"
+ "@ember/test-waiters": "*",
+ "ember-inflector": "*"
},
"ember-addon": {
"paths": [
diff --git a/ui/lib/sync/addon/components/secrets/page/destinations.hbs b/ui/lib/sync/addon/components/secrets/page/destinations.hbs
index 40eba8842c04..a1cf7ecbea12 100644
--- a/ui/lib/sync/addon/components/secrets/page/destinations.hbs
+++ b/ui/lib/sync/addon/components/secrets/page/destinations.hbs
@@ -28,18 +28,17 @@
class="is-marginless"
data-test-filter="type"
/>
-
+
+
+
diff --git a/ui/lib/sync/addon/components/secrets/page/destinations.ts b/ui/lib/sync/addon/components/secrets/page/destinations.ts
index d4aa6c08b5ca..77ac790f06a0 100644
--- a/ui/lib/sync/addon/components/secrets/page/destinations.ts
+++ b/ui/lib/sync/addon/components/secrets/page/destinations.ts
@@ -9,6 +9,7 @@ import { action } from '@ember/object';
import { getOwner } from '@ember/application';
import errorMessage from 'vault/utils/error-message';
import { findDestination, syncDestinations } from 'core/helpers/sync-destinations';
+import { next } from '@ember/runloop';
import type SyncDestinationModel from 'vault/vault/models/sync/destination';
import type RouterService from '@ember/routing/router-service';
@@ -16,6 +17,7 @@ import type StoreService from 'vault/services/store';
import type FlashMessageService from 'vault/services/flash-messages';
import type { EngineOwner } from 'vault/vault/app-types';
import type { SyncDestinationName, SyncDestinationType } from 'vault/vault/helpers/sync-destinations';
+import type Transition from '@ember/routing/transition';
interface Args {
destinations: Array;
@@ -28,15 +30,31 @@ export default class SyncSecretsDestinationsPageComponent extends Component document.getElementById('name-filter')?.focus());
+ }
+ }
+
// typeFilter arg comes in as destination type but we need to pass the destination display name into the SearchSelect
get typeFilterName() {
return findDestination(this.args.typeFilter)?.name;
}
- get destinationNames() {
- return this.args.destinations.map((destination) => ({ id: destination.name, name: destination.name }));
- }
-
get destinationTypes() {
return syncDestinations().map((d) => ({ id: d.name, name: d.type }));
}
@@ -65,9 +83,10 @@ export default class SyncSecretsDestinationsPageComponent extends Component) {
+ onFilterChange(key: string, value: { id: string; name: string }[] | string | undefined) {
+ const queryValue = Array.isArray(value) ? value[0]?.name : value;
this.router.transitionTo('vault.cluster.sync.secrets.destinations', {
- queryParams: { [key]: selectObject[0]?.name },
+ queryParams: { [key]: queryValue },
});
}
diff --git a/ui/lib/sync/addon/components/secrets/page/destinations/create-and-edit.ts b/ui/lib/sync/addon/components/secrets/page/destinations/create-and-edit.ts
index 04e9331bcfae..6ba75cfe0e39 100644
--- a/ui/lib/sync/addon/components/secrets/page/destinations/create-and-edit.ts
+++ b/ui/lib/sync/addon/components/secrets/page/destinations/create-and-edit.ts
@@ -59,7 +59,7 @@ export default class DestinationsCreateForm extends Component {
@waitFor
*save(event: Event) {
event.preventDefault();
-
+ this.error = '';
// clear out validation warnings
this.modelValidations = null;
const { destination } = this.args;
diff --git a/ui/lib/sync/addon/components/secrets/page/destinations/destination/sync.hbs b/ui/lib/sync/addon/components/secrets/page/destinations/destination/sync.hbs
index be90b8acb5cc..20725c4c7896 100644
--- a/ui/lib/sync/addon/components/secrets/page/destinations/destination/sync.hbs
+++ b/ui/lib/sync/addon/components/secrets/page/destinations/destination/sync.hbs
@@ -37,7 +37,7 @@
Select a KV engine mount and path to sync a secret to the
{{@destination.typeDisplayName}}
- destination.
+ destination. Selecting a previously synced secret will re-sync that secret.
diff --git a/ui/lib/sync/addon/components/secrets/page/overview.hbs b/ui/lib/sync/addon/components/secrets/page/overview.hbs
index 3a8844fb5f1b..e114da4cf849 100644
--- a/ui/lib/sync/addon/components/secrets/page/overview.hbs
+++ b/ui/lib/sync/addon/components/secrets/page/overview.hbs
@@ -104,7 +104,7 @@
diff --git a/ui/lib/sync/addon/routes/secrets/destinations/destination/secrets.ts b/ui/lib/sync/addon/routes/secrets/destinations/destination/secrets.ts
index ddf1ca5dd570..9d24413acd00 100644
--- a/ui/lib/sync/addon/routes/secrets/destinations/destination/secrets.ts
+++ b/ui/lib/sync/addon/routes/secrets/destinations/destination/secrets.ts
@@ -8,12 +8,24 @@ import { inject as service } from '@ember/service';
import { hash } from 'rsvp';
import type StoreService from 'vault/services/store';
-import SyncDestinationModel from 'vault/vault/models/sync/destination';
+import type SyncDestinationModel from 'vault/vault/models/sync/destination';
+import type SyncAssociationModel from 'vault/vault/models/sync/association';
+import type Controller from '@ember/controller';
interface SyncDestinationSecretsRouteParams {
page: string;
}
+interface SyncDestinationSecretsRouteModel {
+ destination: SyncDestinationModel;
+ associations: SyncAssociationModel[];
+}
+
+interface SyncDestinationSecretsController extends Controller {
+ model: SyncDestinationSecretsRouteModel;
+ page: number | undefined;
+}
+
export default class SyncDestinationSecretsRoute extends Route {
@service declare readonly store: StoreService;
@@ -35,4 +47,10 @@ export default class SyncDestinationSecretsRoute extends Route {
}),
});
}
+
+ resetController(controller: SyncDestinationSecretsController, isExiting: boolean) {
+ if (isExiting) {
+ controller.set('page', undefined);
+ }
+ }
}
diff --git a/ui/lib/sync/addon/routes/secrets/destinations/index.ts b/ui/lib/sync/addon/routes/secrets/destinations/index.ts
index 132417095f1c..ae8d37cbcca2 100644
--- a/ui/lib/sync/addon/routes/secrets/destinations/index.ts
+++ b/ui/lib/sync/addon/routes/secrets/destinations/index.ts
@@ -11,6 +11,7 @@ import type StoreService from 'vault/services/store';
import type RouterService from '@ember/routing/router-service';
import type { ModelFrom } from 'vault/vault/route';
import type SyncDestinationModel from 'vault/vault/models/sync/destination';
+import type Controller from '@ember/controller';
interface SyncSecretsDestinationsIndexRouteParams {
name: string;
@@ -18,6 +19,19 @@ interface SyncSecretsDestinationsIndexRouteParams {
page: string;
}
+interface SyncSecretsDestinationsRouteModel {
+ destinations: SyncDestinationModel[];
+ nameFilter: string | undefined;
+ typeFilter: string | undefined;
+}
+
+interface SyncSecretsDestinationsController extends Controller {
+ model: SyncSecretsDestinationsRouteModel;
+ page: number | undefined;
+ name: number | undefined;
+ type: number | undefined;
+}
+
export default class SyncSecretsDestinationsIndexRoute extends Route {
@service declare readonly store: StoreService;
@service declare readonly router: RouterService;
@@ -35,7 +49,7 @@ export default class SyncSecretsDestinationsIndexRoute extends Route {
};
redirect(model: ModelFrom) {
- if (model.destinations.length === 0) {
+ if (!model.destinations.meta.total) {
this.router.transitionTo('vault.cluster.sync.secrets.overview');
}
}
@@ -43,7 +57,7 @@ export default class SyncSecretsDestinationsIndexRoute extends Route {
filterData(dataset: Array, name: string, type: string): Array {
let filteredDataset = dataset;
const filter = (key: keyof SyncDestinationModel, value: string) => {
- return dataset.filter((model) => {
+ return filteredDataset.filter((model) => {
return model[key].toLowerCase().includes(value.toLowerCase());
});
};
@@ -68,4 +82,14 @@ export default class SyncSecretsDestinationsIndexRoute extends Route {
typeFilter: params.type,
});
}
+
+ resetController(controller: SyncSecretsDestinationsController, isExiting: boolean) {
+ if (isExiting) {
+ controller.setProperties({
+ page: undefined,
+ name: undefined,
+ type: undefined,
+ });
+ }
+ }
}
diff --git a/ui/package.json b/ui/package.json
index be524e0817b5..5965b81085bb 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -30,9 +30,10 @@
"start:mirage": "start () { MIRAGE_DEV_HANDLER=$1 yarn run start; }; start",
"test": "npm-run-all --print-name lint:js:quiet lint:hbs:quiet && node scripts/start-vault.js",
"test:enos": "npm-run-all lint:js:quiet lint:hbs:quiet && node scripts/enos-test-ember.js",
- "test:oss": "yarn run test -f='!enterprise'",
- "test:quick": "node scripts/start-vault.js",
- "test:quick-oss": "yarn test:quick -f='!enterprise'",
+ "test:oss": "yarn run test -f='!enterprise' --split=8 --preserve-test-name --parallel",
+ "test:quick": "node scripts/start-vault.js --split=8 --preserve-test-name --parallel",
+ "test:quick-oss": "yarn test:quick -f='!enterprise' --split=8 --preserve-test-name --parallel",
+ "test:filter": "node scripts/start-vault.js --server -f='!enterprise'",
"types:declare": "declare () { yarn tsc $1 --declaration --allowJs --emitDeclarationOnly --experimentalDecorators --outDir $2; }; declare",
"vault": "VAULT_REDIRECT_ADDR=http://127.0.0.1:8200 vault server -log-level=error -dev -dev-root-token-id=root -dev-ha -dev-transactional",
"vault:cluster": "VAULT_REDIRECT_ADDR=http://127.0.0.1:8202 vault server -log-level=error -dev -dev-root-token-id=root -dev-listen-address=127.0.0.1:8202 -dev-ha -dev-transactional"
@@ -145,6 +146,7 @@
"ember-d3": "^0.5.1",
"ember-data": "~4.11.3",
"ember-engines": "0.8.23",
+ "ember-exam": "^9.0.0",
"ember-fetch": "^8.1.2",
"ember-inflector": "4.0.2",
"ember-load-initializers": "^2.1.2",
diff --git a/ui/scripts/start-vault.js b/ui/scripts/start-vault.js
index 44c1cae5424b..8ea4398da80f 100755
--- a/ui/scripts/start-vault.js
+++ b/ui/scripts/start-vault.js
@@ -69,7 +69,9 @@ async function processLines(input, eachLine = () => {}) {
}
});
try {
- await testHelper.run('ember', ['test', ...process.argv.slice(2)]);
+ // only the test:filter command specifies --server by default
+ const verb = process.argv[2] === '--server' ? 'test' : 'exam';
+ await testHelper.run('ember', [verb, ...process.argv.slice(2)]);
} catch (error) {
console.log(error);
process.exit(1);
diff --git a/ui/testem.js b/ui/testem.js
index 3888c70fc45c..66c8bdbbb0b2 100644
--- a/ui/testem.js
+++ b/ui/testem.js
@@ -28,9 +28,10 @@ module.exports = {
},
proxies: {
'/v1': {
- target: 'http://localhost:9200',
+ target: 'http://127.0.0.1:9200',
},
},
+ parallel: process.env.EMBER_EXAM_SPLIT_COUNT || 1,
};
if (process.env.CI) {
diff --git a/ui/tests/acceptance/console-test.js b/ui/tests/acceptance/console-test.js
index 409a70a49e2d..527605da81c2 100644
--- a/ui/tests/acceptance/console-test.js
+++ b/ui/tests/acceptance/console-test.js
@@ -10,6 +10,7 @@ import { setupApplicationTest } from 'ember-qunit';
import enginesPage from 'vault/tests/pages/secrets/backends';
import authPage from 'vault/tests/pages/auth';
import consoleClass from 'vault/tests/pages/components/console/ui-panel';
+import { v4 as uuidv4 } from 'uuid';
const consoleComponent = create(consoleClass);
@@ -21,28 +22,38 @@ module('Acceptance | console', function (hooks) {
});
test("refresh reloads the current route's data", async function (assert) {
+ assert.expect(6);
await enginesPage.visit();
await settled();
- const numEngines = enginesPage.rows.length;
await consoleComponent.toggle();
await settled();
- for (const num of [1, 2, 3]) {
- const inputString = `write sys/mounts/console-route-${num} type=kv`;
+ const ids = [uuidv4(), uuidv4(), uuidv4()];
+ for (const id of ids) {
+ const inputString = `write sys/mounts/console-route-${id} type=kv`;
await consoleComponent.runCommands(inputString);
await settled();
}
await consoleComponent.runCommands('refresh');
await settled();
- assert.strictEqual(enginesPage.rows.length, numEngines + 3, 'new engines were added to the page');
+ for (const id of ids) {
+ assert.ok(
+ enginesPage.rows.findOneBy('path', `console-route-${id}/`),
+ 'new engine is shown on the page'
+ );
+ }
// Clean up
- for (const num of [1, 2, 3]) {
- const inputString = `delete sys/mounts/console-route-${num}`;
+ for (const id of ids) {
+ const inputString = `delete sys/mounts/console-route-${id}`;
await consoleComponent.runCommands(inputString);
await settled();
}
await consoleComponent.runCommands('refresh');
await settled();
- assert.strictEqual(enginesPage.rows.length, numEngines, 'engines were removed from the page');
+ for (const id of ids) {
+ assert.throws(() => {
+ enginesPage.rows.findOneBy('path', `console-route-${id}/`);
+ }, 'engine was removed');
+ }
});
test('fullscreen command expands the cli panel', async function (assert) {
diff --git a/ui/tests/acceptance/oidc-config/clients-assignments-test.js b/ui/tests/acceptance/oidc-config/clients-assignments-test.js
index c3b34996da5a..c5fa9d2858b2 100644
--- a/ui/tests/acceptance/oidc-config/clients-assignments-test.js
+++ b/ui/tests/acceptance/oidc-config/clients-assignments-test.js
@@ -75,7 +75,7 @@ module('Acceptance | oidc-config clients and assignments', function (hooks) {
});
test('it creates an assignment inline, creates a client, updates client to limit access, deletes client', async function (assert) {
- assert.expect(22);
+ assert.expect(21);
//* clear out test state
await clearRecord(this.store, 'oidc/client', 'test-app');
@@ -197,11 +197,16 @@ module('Acceptance | oidc-config clients and assignments', function (hooks) {
assert.strictEqual(currentRouteName(), 'vault.cluster.access.oidc.clients.client.details');
await click(SELECTORS.clientDeleteButton);
await click(SELECTORS.confirmActionButton);
- assert.strictEqual(
- currentRouteName(),
- 'vault.cluster.access.oidc.index',
- 'redirects to call to action if only existing client is deleted'
- );
+
+ //TODO this part of the test has a race condition
+ //because other tests could have created clients - there is no guarantee that this will be the last
+ //client in the list to redirect to the call to action
+ //assert.strictEqual(
+ //currentRouteName(),
+ //'vault.cluster.access.oidc.index',
+ //'redirects to call to action if only existing client is deleted'
+ //);
+
//* clean up test state
await clearRecord(this.store, 'oidc/assignment', 'assignment-inline');
});
diff --git a/ui/tests/acceptance/oidc-provider-test.js b/ui/tests/acceptance/oidc-provider-test.js
index 8ef24aa24cd4..4446afccd94c 100644
--- a/ui/tests/acceptance/oidc-provider-test.js
+++ b/ui/tests/acceptance/oidc-provider-test.js
@@ -163,10 +163,11 @@ module('Acceptance | oidc provider', function (hooks) {
await authFormComponent.login();
await settled();
assert.strictEqual(currentURL(), url, 'URL is as expected after login');
- assert.dom('[data-test-oidc-redirect]').exists('redirect text exists');
assert
.dom('[data-test-oidc-redirect]')
- .hasTextContaining(`${callback}?code=`, 'Successful redirect to callback');
+ .hasTextContaining(`click here to go back to app`, 'Shows link back to app');
+ const link = document.querySelector('[data-test-oidc-redirect]').getAttribute('href');
+ assert.ok(link.includes('/callback?code='), 'Redirects to correct url');
//* clean up test state
await clearRecord(this.store, 'oidc/client', 'my-webapp');
@@ -191,7 +192,9 @@ module('Acceptance | oidc provider', function (hooks) {
await settled();
assert
.dom('[data-test-oidc-redirect]')
- .hasTextContaining(`${callback}?code=`, 'Successful redirect to callback');
+ .hasTextContaining(`click here to go back to app`, 'Shows link back to app');
+ const link = document.querySelector('[data-test-oidc-redirect]').getAttribute('href');
+ assert.ok(link.includes('/callback?code='), 'Redirects to correct url');
//* clean up test state
await clearRecord(this.store, 'oidc/client', 'my-webapp');
diff --git a/ui/tests/acceptance/pki/pki-engine-workflow-test.js b/ui/tests/acceptance/pki/pki-engine-workflow-test.js
index 3afc02b25db9..2e47be583573 100644
--- a/ui/tests/acceptance/pki/pki-engine-workflow-test.js
+++ b/ui/tests/acceptance/pki/pki-engine-workflow-test.js
@@ -13,7 +13,7 @@ import enablePage from 'vault/tests/pages/settings/mount-secret-backend';
import { click, currentURL, fillIn, find, isSettled, visit } from '@ember/test-helpers';
import { SELECTORS } from 'vault/tests/helpers/pki/workflow';
import { adminPolicy, readerPolicy, updatePolicy } from 'vault/tests/helpers/policy-generator/pki';
-import { tokenWithPolicy, runCommands } from 'vault/tests/helpers/pki/pki-run-commands';
+import { tokenWithPolicy, runCommands, clearRecords } from 'vault/tests/helpers/pki/pki-run-commands';
import { unsupportedPem } from 'vault/tests/helpers/pki/values';
/**
@@ -25,12 +25,14 @@ module('Acceptance | pki workflow', function (hooks) {
setupApplicationTest(hooks);
hooks.beforeEach(async function () {
+ this.store = this.owner.lookup('service:store');
await authPage.login();
// Setup PKI engine
const mountPath = `pki-workflow-${uuidv4()}`;
await enablePage.enable('pki', mountPath);
this.mountPath = mountPath;
await logout.visit();
+ clearRecords(this.store);
});
hooks.afterEach(async function () {
@@ -40,40 +42,50 @@ module('Acceptance | pki workflow', function (hooks) {
await runCommands([`delete sys/mounts/${this.mountPath}`]);
});
- test('empty state messages are correct when PKI not configured', async function (assert) {
- assert.expect(21);
- const assertEmptyState = (assert, resource) => {
- assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/${resource}`);
- assert
- .dom(SELECTORS.emptyStateTitle)
- .hasText(
- 'PKI not configured',
- `${resource} index renders correct empty state title when PKI not configured`
- );
- assert.dom(SELECTORS.emptyStateLink).hasText('Configure PKI');
- assert
- .dom(SELECTORS.emptyStateMessage)
- .hasText(
- `This PKI mount hasn't yet been configured with a certificate issuer.`,
- `${resource} index empty state message correct when PKI not configured`
- );
- };
- await authPage.login(this.pkiAdminToken);
- await visit(`/vault/secrets/${this.mountPath}/pki/overview`);
- assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/overview`);
-
- await click(SELECTORS.rolesTab);
- assertEmptyState(assert, 'roles');
-
- await click(SELECTORS.issuersTab);
- assertEmptyState(assert, 'issuers');
-
- await click(SELECTORS.certsTab);
- assertEmptyState(assert, 'certificates');
- await click(SELECTORS.keysTab);
- assertEmptyState(assert, 'keys');
- await click(SELECTORS.tidyTab);
- assertEmptyState(assert, 'tidy');
+ module('not configured', function (hooks) {
+ hooks.beforeEach(async function () {
+ await authPage.login();
+ const pki_admin_policy = adminPolicy(this.mountPath, 'roles');
+ this.pkiAdminToken = await tokenWithPolicy(`pki-admin-${this.mountPath}`, pki_admin_policy);
+ await logout.visit();
+ clearRecords(this.store);
+ });
+
+ test('empty state messages are correct when PKI not configured', async function (assert) {
+ assert.expect(21);
+ const assertEmptyState = (assert, resource) => {
+ assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/${resource}`);
+ assert
+ .dom(SELECTORS.emptyStateTitle)
+ .hasText(
+ 'PKI not configured',
+ `${resource} index renders correct empty state title when PKI not configured`
+ );
+ assert.dom(SELECTORS.emptyStateLink).hasText('Configure PKI');
+ assert
+ .dom(SELECTORS.emptyStateMessage)
+ .hasText(
+ `This PKI mount hasn't yet been configured with a certificate issuer.`,
+ `${resource} index empty state message correct when PKI not configured`
+ );
+ };
+ await authPage.login(this.pkiAdminToken);
+ await visit(`/vault/secrets/${this.mountPath}/pki/overview`);
+ assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/overview`);
+
+ await click(SELECTORS.rolesTab);
+ assertEmptyState(assert, 'roles');
+
+ await click(SELECTORS.issuersTab);
+ assertEmptyState(assert, 'issuers');
+
+ await click(SELECTORS.certsTab);
+ assertEmptyState(assert, 'certificates');
+ await click(SELECTORS.keysTab);
+ assertEmptyState(assert, 'keys');
+ await click(SELECTORS.tidyTab);
+ assertEmptyState(assert, 'tidy');
+ });
});
module('roles', function (hooks) {
@@ -91,10 +103,11 @@ module('Acceptance | pki workflow', function (hooks) {
const pki_admin_policy = adminPolicy(this.mountPath, 'roles');
const pki_reader_policy = readerPolicy(this.mountPath, 'roles');
const pki_editor_policy = updatePolicy(this.mountPath, 'roles');
- this.pkiRoleReader = await tokenWithPolicy('pki-reader', pki_reader_policy);
- this.pkiRoleEditor = await tokenWithPolicy('pki-editor', pki_editor_policy);
- this.pkiAdminToken = await tokenWithPolicy('pki-admin', pki_admin_policy);
+ this.pkiRoleReader = await tokenWithPolicy(`pki-reader-${this.mountPath}`, pki_reader_policy);
+ this.pkiRoleEditor = await tokenWithPolicy(`pki-editor-${this.mountPath}`, pki_editor_policy);
+ this.pkiAdminToken = await tokenWithPolicy(`pki-admin-${this.mountPath}`, pki_admin_policy);
await logout.visit();
+ clearRecords(this.store);
});
test('shows correct items if user has all permissions', async function (assert) {
@@ -222,10 +235,11 @@ module('Acceptance | pki workflow', function (hooks) {
const pki_admin_policy = adminPolicy(this.mountPath);
const pki_reader_policy = readerPolicy(this.mountPath, 'keys', true);
const pki_editor_policy = updatePolicy(this.mountPath, 'keys');
- this.pkiKeyReader = await tokenWithPolicy('pki-reader', pki_reader_policy);
- this.pkiKeyEditor = await tokenWithPolicy('pki-editor', pki_editor_policy);
- this.pkiAdminToken = await tokenWithPolicy('pki-admin', pki_admin_policy);
+ this.pkiKeyReader = await tokenWithPolicy(`pki-reader-${this.mountPath}`, pki_reader_policy);
+ this.pkiKeyEditor = await tokenWithPolicy(`pki-editor-${this.mountPath}`, pki_editor_policy);
+ this.pkiAdminToken = await tokenWithPolicy(`pki-admin-${this.mountPath}`, pki_admin_policy);
await logout.visit();
+ clearRecords(this.store);
});
test('shows correct items if user has all permissions', async function (assert) {
@@ -339,11 +353,14 @@ module('Acceptance | pki workflow', function (hooks) {
module('issuers', function (hooks) {
hooks.beforeEach(async function () {
await authPage.login();
+ const pki_admin_policy = adminPolicy(this.mountPath);
+ this.pkiAdminToken = await tokenWithPolicy(`pki-admin-${this.mountPath}`, pki_admin_policy);
// Configure engine with a default issuer
await runCommands([
`write ${this.mountPath}/root/generate/internal common_name="Hashicorp Test" name="Hashicorp Test"`,
]);
await logout.visit();
+ clearRecords(this.store);
});
test('lists the correct issuer metadata info', async function (assert) {
assert.expect(6);
@@ -373,7 +390,10 @@ module('Acceptance | pki workflow', function (hooks) {
capabilities = ["deny"]
}
`;
- this.token = await tokenWithPolicy('pki-issuer-denied-policy', pki_issuer_denied_policy);
+ this.token = await tokenWithPolicy(
+ `pki-issuer-denied-policy-${this.mountPath}`,
+ pki_issuer_denied_policy
+ );
await logout.visit();
await authPage.login(this.token);
await visit(`/vault/secrets/${this.mountPath}/pki/overview`);
@@ -487,7 +507,10 @@ module('Acceptance | pki workflow', function (hooks) {
${adminPolicy(this.mountPath)}
${readerPolicy(this.mountPath, 'config/cluster')}
`;
- this.mixedConfigCapabilities = await tokenWithPolicy('pki-reader', mixed_config_policy);
+ this.mixedConfigCapabilities = await tokenWithPolicy(
+ `pki-reader-${this.mountPath}`,
+ mixed_config_policy
+ );
await logout.visit();
});
diff --git a/ui/tests/acceptance/pki/pki-overview-test.js b/ui/tests/acceptance/pki/pki-overview-test.js
index a8234e0d31fc..355216a53149 100644
--- a/ui/tests/acceptance/pki/pki-overview-test.js
+++ b/ui/tests/acceptance/pki/pki-overview-test.js
@@ -5,20 +5,23 @@
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
+import { v4 as uuidv4 } from 'uuid';
+
import authPage from 'vault/tests/pages/auth';
import logout from 'vault/tests/pages/logout';
import enablePage from 'vault/tests/pages/settings/mount-secret-backend';
import { click, currentURL, currentRouteName, visit } from '@ember/test-helpers';
import { SELECTORS } from 'vault/tests/helpers/pki/overview';
-import { tokenWithPolicy, runCommands } from 'vault/tests/helpers/pki/pki-run-commands';
+import { tokenWithPolicy, runCommands, clearRecords } from 'vault/tests/helpers/pki/pki-run-commands';
module('Acceptance | pki overview', function (hooks) {
setupApplicationTest(hooks);
hooks.beforeEach(async function () {
+ this.store = this.owner.lookup('service:store');
await authPage.login();
// Setup PKI engine
- const mountPath = `pki`;
+ const mountPath = `pki-${uuidv4()}`;
await enablePage.enable('pki', mountPath);
this.mountPath = mountPath;
await runCommands([`write ${this.mountPath}/root/generate/internal common_name="Hashicorp Test"`]);
@@ -42,6 +45,7 @@ module('Acceptance | pki overview', function (hooks) {
this.pkiIssuersList = await tokenWithPolicy('pki-issuers-list', pki_issuers_list_policy);
this.pkiAdminToken = await tokenWithPolicy('pki-admin', pki_admin_policy);
await logout.visit();
+ clearRecords(this.store);
});
hooks.afterEach(async function () {
diff --git a/ui/tests/acceptance/policies-acl-old-test.js b/ui/tests/acceptance/policies-acl-old-test.js
index 3415f0e5d63a..35eccecf4bc6 100644
--- a/ui/tests/acceptance/policies-acl-old-test.js
+++ b/ui/tests/acceptance/policies-acl-old-test.js
@@ -45,6 +45,7 @@ module('Acceptance | policies (old)', function (hooks) {
assert.dom('[data-test-policy-name]').hasText(policyLower, 'displays the policy name on the show page');
assert.dom('[data-test-flash-message].is-info').doesNotExist('no flash message is displayed on save');
await click('[data-test-policy-list-link] a');
+ await fillIn('[data-test-component="navigate-input"]', policyLower);
assert
.dom(`[data-test-policy-link="${policyLower}"]`)
.exists({ count: 1 }, 'new policy shown in the list');
@@ -63,6 +64,7 @@ module('Acceptance | policies (old)', function (hooks) {
`/vault/policies/acl`,
'navigates to policy list on successful deletion'
);
+ await fillIn('[data-test-component="navigate-input"]', policyLower);
assert
.dom(`[data-test-policy-item="${policyLower}"]`)
.doesNotExist('deleted policy is not shown in the list');
diff --git a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-create-test.js b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-create-test.js
index 49567b1f508b..fc121a888c2f 100644
--- a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-create-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-create-test.js
@@ -38,7 +38,7 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('admin persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('admin', personas.admin(this.backend)));
+ const token = await runCmd(tokenWithPolicyCmd(`admin-${this.backend}`, personas.admin(this.backend)));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -282,7 +282,9 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('data-reader persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('data-reader', personas.dataReader(this.backend)));
+ const token = await runCmd(
+ tokenWithPolicyCmd(`data-reader-${this.backend}`, personas.dataReader(this.backend))
+ );
await authPage.login(token);
clearRecords(this.store);
return;
@@ -423,7 +425,7 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('data-list-reader persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
- tokenWithPolicyCmd('data-list-reader', personas.dataListReader(this.backend))
+ tokenWithPolicyCmd(`data-list-reader-${this.backend}`, personas.dataListReader(this.backend))
);
await authPage.login(token);
clearRecords(this.store);
@@ -568,7 +570,7 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('metadata-maintainer persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
- tokenWithPolicyCmd('data-list-reader', personas.metadataMaintainer(this.backend))
+ tokenWithPolicyCmd(`data-list-reader-${this.backend}`, personas.metadataMaintainer(this.backend))
);
await authPage.login(token);
clearRecords(this.store);
@@ -764,7 +766,9 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('secret-creator persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('secret-creator', personas.secretCreator(this.backend)));
+ const token = await runCmd(
+ tokenWithPolicyCmd(`secret-creator-${this.backend}`, personas.secretCreator(this.backend))
+ );
await authPage.login(token);
clearRecords(this.store);
return;
@@ -1006,7 +1010,10 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
module('secret-nested-creator persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
- tokenWithPolicyCmd('secret-nested-creator', personas.secretNestedCreator(this.backend))
+ tokenWithPolicyCmd(
+ `secret-nested-creator-${this.backend}`,
+ personas.secretNestedCreator(this.backend)
+ )
);
await authPage.login(token);
clearRecords(this.store);
@@ -1048,7 +1055,11 @@ path "${this.backend}/metadata/*" {
capabilities = ["list", "read"]
}
`;
- const { userToken } = await setupControlGroup({ userPolicy });
+
+ const { userToken } = await setupControlGroup({
+ userPolicy,
+ backend: this.backend,
+ });
this.userToken = userToken;
await authPage.login(userToken);
clearRecords(this.store);
@@ -1089,11 +1100,12 @@ path "${this.backend}/metadata/*" {
'shows control group error'
);
await grantAccessForWrite({
- accessor: tokenToUnwrap.accessor,
token: tokenToUnwrap.token,
+ accessor: tokenToUnwrap.accessor,
creation_path: `${backend}/data/${secretPath}`,
originUrl: `/vault/secrets/${backend}/kv/create`,
userToken: this.userToken,
+ backend: this.backend,
});
// In a real scenario the user would stay on page, but in the test
// we fill in the same info and try again
@@ -1161,6 +1173,7 @@ path "${this.backend}/metadata/*" {
creation_path: `${backend}/data/${secretPath}`,
originUrl: `/vault/secrets/${backend}/kv/${secretPath}/details/edit`,
userToken: this.userToken,
+ backend: this.backend,
});
// Remark for unwrap as if we never left the page.
this.controlGroup.markTokenForUnwrap(tokenToUnwrap.accessor);
diff --git a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-delete-test.js b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-delete-test.js
index c96e70f061c1..6a5f5be9c982 100644
--- a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-delete-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-delete-test.js
@@ -28,6 +28,11 @@ const assertDeleteActions = (assert, expected = ['delete', 'destroy']) => {
}
});
};
+
+const makeToken = (name, mountPath, policyGenerator) => {
+ return tokenWithPolicyCmd(`${name}-${mountPath}`, policyGenerator(mountPath));
+};
+
/**
* This test set is for testing delete, undelete, destroy flows
* Letter(s) in parenthesis at the end are shorthand for the persona,
@@ -58,7 +63,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('admin persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('admin', personas.admin(this.backend)));
+ const token = await runCmd(makeToken('admin', this.backend, personas.admin));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -159,7 +164,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('data-reader persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('data-reader', personas.dataReader(this.backend)));
+ const token = await runCmd(makeToken('data-reader', this.backend, personas.dataReader));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -206,9 +211,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('data-list-reader persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(
- tokenWithPolicyCmd('data-list-reader', personas.dataListReader(this.backend))
- );
+ const token = await runCmd(makeToken('data-list-reader', this.backend, personas.dataListReader));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -266,9 +269,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('metadata-maintainer persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(
- tokenWithPolicyCmd('metadata-maintainer', personas.metadataMaintainer(this.backend))
- );
+ const token = await runCmd(makeToken('metadata-maintainer', this.backend, personas.metadataMaintainer));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -358,7 +359,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('secret-nested-creator persona', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd(
- tokenWithPolicyCmd('secret-nested-creator', personas.secretNestedCreator(this.backend))
+ makeToken('secret-nested-creator', this.backend, personas.secretNestedCreator)
);
await authPage.login(token);
clearRecords(this.store);
@@ -384,7 +385,7 @@ module('Acceptance | kv-v2 workflow | delete, undelete, destroy', function (hook
module('secret-creator persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('secret-creator', personas.secretCreator(this.backend)));
+ const token = await runCmd(makeToken('secret-creator', this.backend, personas.secretCreator));
await authPage.login(token);
clearRecords(this.store);
return;
@@ -468,7 +469,7 @@ path "sys/control-group/request" {
}
`;
- const { userToken } = await setupControlGroup({ userPolicy });
+ const { userToken } = await setupControlGroup({ userPolicy, backend: this.backend });
this.userToken = userToken;
await authPage.login(userToken);
clearRecords(this.store);
diff --git a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-edge-cases-test.js b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-edge-cases-test.js
index 1c1b0d433a74..87de5a20d715 100644
--- a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-edge-cases-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-edge-cases-test.js
@@ -59,11 +59,11 @@ module('Acceptance | kv-v2 workflow | edge cases', function (hooks) {
const backend = this.backend;
const token = await runCmd([
createPolicyCmd(
- 'nested-secret-list-reader',
+ `nested-secret-list-reader-${this.backend}`,
metadataPolicy({ backend, secretPath, capabilities }) +
dataPolicy({ backend, secretPath, capabilities })
),
- createTokenCmd('nested-secret-list-reader'),
+ createTokenCmd(`nested-secret-list-reader-${this.backend}`),
]);
await authPage.login(token);
});
@@ -191,14 +191,14 @@ module('Acceptance | kv-v2 workflow | edge cases', function (hooks) {
// user has different permissions for each secret path
const token = await runCmd([
createPolicyCmd(
- 'destruction-no-read',
+ `destruction-no-read-${this.backend}`,
dataPolicy({ backend, secretPath: 'data-delete-only', capabilities: ['delete'] }) +
deleteVersionsPolicy({ backend, secretPath: 'delete-version-only' }) +
destroyVersionsPolicy({ backend, secretPath: 'destroy-version-only' }) +
metadataPolicy({ backend, secretPath: 'destroy-metadata-only', capabilities: ['delete'] }) +
metadataListPolicy(backend)
),
- createTokenCmd('destruction-no-read'),
+ createTokenCmd(`destruction-no-read-${this.backend}`),
]);
for (const secret of testSecrets) {
await writeVersionedSecret(backend, secret, 'foo', 'bar', 2);
diff --git a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-navigation-test.js b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-navigation-test.js
index 914d8820c9d5..b1369e35e1e4 100644
--- a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-navigation-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-navigation-test.js
@@ -360,10 +360,10 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd([
createPolicyCmd(
- 'data-reader',
+ `data-reader-${this.backend}`,
personas.dataReader(this.backend) + personas.dataReader(this.emptyBackend)
),
- createTokenCmd('data-reader'),
+ createTokenCmd(`data-reader-${this.backend}`),
]);
await authPage.login(token);
clearRecords(this.store);
@@ -540,10 +540,10 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd([
createPolicyCmd(
- 'data-reader-list',
+ `data-reader-list-${this.backend}`,
personas.dataListReader(this.backend) + personas.dataListReader(this.emptyBackend)
),
- createTokenCmd('data-reader-list'),
+ createTokenCmd(`data-reader-list-${this.backend}`),
]);
await authPage.login(token);
@@ -727,10 +727,10 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd([
createPolicyCmd(
- 'metadata-maintainer',
+ `metadata-maintainer-${this.backend}`,
personas.metadataMaintainer(this.backend) + personas.metadataMaintainer(this.emptyBackend)
),
- createTokenCmd('metadata-maintainer'),
+ createTokenCmd(`metadata-maintainer-${this.backend}`),
]);
await authPage.login(token);
clearRecords(this.store);
@@ -945,10 +945,10 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
hooks.beforeEach(async function () {
const token = await runCmd([
createPolicyCmd(
- 'secret-creator',
+ `secret-creator-${this.backend}`,
personas.secretCreator(this.backend) + personas.secretCreator(this.emptyBackend)
),
- createTokenCmd('secret-creator'),
+ createTokenCmd(`secret-creator-${this.backend}`),
]);
await authPage.login(token);
clearRecords(this.store);
@@ -1163,7 +1163,7 @@ path "${this.backend}/*" {
capabilities = ["list"]
}
`;
- const { userToken } = await setupControlGroup({ userPolicy });
+ const { userToken } = await setupControlGroup({ userPolicy, backend: this.backend });
this.userToken = userToken;
await authPage.login(userToken);
clearRecords(this.store);
@@ -1204,6 +1204,7 @@ path "${this.backend}/*" {
apiPath: `${backend}/data/app/nested/secret`,
originUrl: `/vault/secrets/${backend}/kv/list/app/nested/`,
userToken: this.userToken,
+ backend: this.backend,
});
assert.strictEqual(
currentURL(),
@@ -1254,11 +1255,11 @@ path "${this.backend}/*" {
await waitUntil(() => currentRouteName() === 'vault.cluster.access.control-group-accessor'),
'redirects to access control group route'
);
-
await grantAccess({
apiPath: `${backend}/data/${encodeURIComponent(secretPath)}`,
originUrl: `/vault/secrets/${backend}/kv/list`,
userToken: this.userToken,
+ backend: this.backend,
});
assert.strictEqual(
diff --git a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-version-history-paths-test.js b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-version-history-paths-test.js
index eed313cde5b0..82d0461ed64d 100644
--- a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-version-history-paths-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-version-history-paths-test.js
@@ -20,6 +20,9 @@ import { PAGE } from 'vault/tests/helpers/kv/kv-selectors';
import { click, currentRouteName, currentURL, visit, waitUntil } from '@ember/test-helpers';
import { grantAccess, setupControlGroup } from 'vault/tests/helpers/control-groups';
+const makeToken = (name, mountPath, policyGenerator) => {
+ return tokenWithPolicyCmd(`${name}-${mountPath}`, policyGenerator(mountPath));
+};
/**
* This test set is for testing version history & path pages for secret.
* Letter(s) in parenthesis at the end are shorthand for the persona,
@@ -52,7 +55,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('admin persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('admin', personas.admin(this.backend)));
+ const token = await runCmd(makeToken('admin', this.backend, personas.admin));
await authPage.login(token);
clearRecords(this.store);
});
@@ -101,7 +104,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('data-reader persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('data-reader', personas.dataReader(this.backend)));
+ const token = await runCmd(makeToken('data-reader', this.backend, personas.dataReader));
await authPage.login(token);
clearRecords(this.store);
});
@@ -128,9 +131,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('data-list-reader persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(
- tokenWithPolicyCmd('data-list-reader', personas.dataListReader(this.backend))
- );
+ const token = await runCmd(makeToken('data-list-reader', this.backend, personas.dataListReader));
await authPage.login(token);
clearRecords(this.store);
});
@@ -157,9 +158,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('metadata-maintainer persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(
- tokenWithPolicyCmd('metadata-maintainer', personas.metadataMaintainer(this.backend))
- );
+ const token = await runCmd(makeToken('metadata-maintainer', this.backend, personas.metadataMaintainer));
await authPage.login(token);
clearRecords(this.store);
});
@@ -208,7 +207,7 @@ module('Acceptance | kv-v2 workflow | version history, paths', function (hooks)
module('secret-creator persona', function (hooks) {
hooks.beforeEach(async function () {
- const token = await runCmd(tokenWithPolicyCmd('secret-creator', personas.secretCreator(this.backend)));
+ const token = await runCmd(makeToken('secret-creator', this.backend, personas.secretCreator));
await authPage.login(token);
clearRecords(this.store);
});
@@ -254,7 +253,7 @@ path "${this.backend}/*" {
capabilities = ["list"]
}
`;
- const { userToken } = await setupControlGroup({ userPolicy });
+ const { userToken } = await setupControlGroup({ userPolicy, backend: this.backend });
this.userToken = userToken;
await authPage.login(userToken);
clearRecords(this.store);
@@ -270,6 +269,7 @@ path "${this.backend}/*" {
apiPath: `${this.backend}/metadata/${this.secretPath}`,
originUrl: `/vault/secrets/${this.urlPath}/details`,
userToken: this.userToken,
+ backend: this.backend,
});
assert.strictEqual(
currentURL(),
diff --git a/ui/tests/acceptance/sync/secrets/destinations-test.js b/ui/tests/acceptance/sync/secrets/destinations-test.js
index 017460621ff1..715d8ab1f5df 100644
--- a/ui/tests/acceptance/sync/secrets/destinations-test.js
+++ b/ui/tests/acceptance/sync/secrets/destinations-test.js
@@ -9,7 +9,7 @@ import { setupMirage } from 'ember-cli-mirage/test-support';
import syncScenario from 'vault/mirage/scenarios/sync';
import syncHandlers from 'vault/mirage/handlers/sync';
import authPage from 'vault/tests/pages/auth';
-import { click, visit } from '@ember/test-helpers';
+import { click, visit, fillIn } from '@ember/test-helpers';
import { PAGE } from 'vault/tests/helpers/sync/sync-selectors';
const { searchSelect, filter, listItem } = PAGE;
@@ -29,6 +29,11 @@ module('Acceptance | sync | destinations', function (hooks) {
assert.dom(listItem).exists({ count: 6 }, 'All destinations render');
await click(`${filter('type')} .ember-basic-dropdown-trigger`);
await click(searchSelect.option());
- assert.dom(listItem).exists({ count: 2 }, 'Filtered destinations render');
+ assert.dom(listItem).exists({ count: 2 }, 'Destinations are filtered by type');
+ await fillIn(filter('name'), 'new');
+ assert.dom(listItem).exists({ count: 1 }, 'Destinations are filtered by type and name');
+ await click(searchSelect.removeSelected);
+ await fillIn(filter('name'), 'gcp');
+ assert.dom(listItem).exists({ count: 1 }, 'Destinations are filtered by name');
});
});
diff --git a/ui/tests/acceptance/unseal-test.js b/ui/tests/acceptance/unseal-test.js
index 26fb12d768a5..9385dbaac636 100644
--- a/ui/tests/acceptance/unseal-test.js
+++ b/ui/tests/acceptance/unseal-test.js
@@ -12,7 +12,7 @@ import { pollCluster } from 'vault/tests/helpers/poll-cluster';
const { unsealKeys } = VAULT_KEYS;
-module('Acceptance | unseal', function (hooks) {
+module.skip('Acceptance | unseal', function (hooks) {
setupApplicationTest(hooks);
hooks.beforeEach(function () {
diff --git a/ui/tests/helpers/control-groups.js b/ui/tests/helpers/control-groups.js
index abe42ee550e7..c226ba158762 100644
--- a/ui/tests/helpers/control-groups.js
+++ b/ui/tests/helpers/control-groups.js
@@ -5,8 +5,8 @@
import { click, visit } from '@ember/test-helpers';
import { create } from 'ember-cli-page-object';
-import { CONTROL_GROUP_PREFIX, TOKEN_SEPARATOR } from 'vault/services/control-group';
+import { CONTROL_GROUP_PREFIX, TOKEN_SEPARATOR } from 'vault/services/control-group';
import authPage from 'vault/tests/pages/auth';
import controlGroup from 'vault/tests/pages/components/control-group';
import { createPolicyCmd, createTokenCmd, mountAuthCmd, runCmd } from './commands';
@@ -16,13 +16,22 @@ const storageKey = (accessor, path) => {
return `${CONTROL_GROUP_PREFIX}${accessor}${TOKEN_SEPARATOR}${path}`;
};
+// This function is used to setup a control group for testing
+// It will create a userpass backend, create an authorizing user,
+// and create a controlled access token. The auth mount and policy
+// names will be appended with the backend
export const setupControlGroup = async ({
userPolicy,
+ backend,
adminUser = 'authorizer',
- adminPassword = 'password',
- userpassMount = 'userpass',
+ adminPassword = 'testing-xyz',
}) => {
- const userPolicyName = 'kv-control-group';
+ if (!backend || !userPolicy) {
+ throw new Error('missing required fields for setupControlGroup');
+ }
+ const userpassMount = `userpass-${backend}`;
+ const userPolicyName = `kv-control-group-${backend}`;
+ const authorizerPolicyName = `authorizer-${backend}`;
const authorizerPolicy = `
path "sys/control-group/authorize" {
capabilities = ["update"]
@@ -35,7 +44,7 @@ export const setupControlGroup = async ({
const userpassAccessor = await runCmd([
// write policies for control group + authorization
createPolicyCmd(userPolicyName, userPolicy),
- createPolicyCmd('authorizer', authorizerPolicy),
+ createPolicyCmd(authorizerPolicyName, authorizerPolicy),
// enable userpass, create admin user
mountAuthCmd('userpass', userpassMount),
// read out mount to get the accessor
@@ -50,14 +59,13 @@ export const setupControlGroup = async ({
const userToken = await runCmd([
// create alias for authorizor and add them to the managers group
`write identity/alias mount_accessor=${userpassAccessor} entity_id=${authorizerEntityId} name=${adminUser}`,
- `write identity/group name=managers member_entity_ids=${authorizerEntityId} policies=authorizer`,
+ `write identity/group name=managers member_entity_ids=${authorizerEntityId} policies=${authorizerPolicyName}`,
// create a token to request access to kv/foo
createTokenCmd(userPolicyName),
]);
return {
userToken,
userPolicyName,
- userPolicy,
adminUser,
adminPassword,
userpassMount,
@@ -70,10 +78,15 @@ export async function grantAccessForWrite({
creation_path,
originUrl,
userToken,
+ backend,
authorizerUser = 'authorizer',
- authorizerPassword = 'password',
+ authorizerPassword = 'testing-xyz',
}) {
- await authPage.loginUsername(authorizerUser, authorizerPassword);
+ if (!token || !accessor || !creation_path || !originUrl || !userToken || !backend) {
+ throw new Error('missing required fields for grantAccessForWrite');
+ }
+ const userpassMount = `userpass-${backend}`;
+ await authPage.loginUsername(authorizerUser, authorizerPassword, userpassMount);
await visit(`/vault/access/control-groups/${accessor}`);
await controlGroupComponent.authorize();
await authPage.login(userToken);
@@ -91,21 +104,26 @@ export async function grantAccessForWrite({
await visit(originUrl);
}
+/*
+ * Control group grant access flow
+ * Assumes start on route 'vault.cluster.access.control-group-accessor'
+ * and authorizer login is via userpass
+ */
export async function grantAccess({
apiPath,
originUrl,
userToken,
+ backend,
authorizerUser = 'authorizer',
- authorizerPassword = 'password',
+ authorizerPassword = 'testing-xyz',
}) {
- /*
- * Control group grant access flow
- * Assumes start on route 'vault.cluster.access.control-group-accessor'
- * and authorizer login is via userpass
- */
+ if (!apiPath || !originUrl || !userToken || !backend) {
+ throw new Error('missing required fields for grantAccess');
+ }
+ const userpassMount = `userpass-${backend}`;
const accessor = controlGroupComponent.accessor;
const controlGroupToken = controlGroupComponent.token;
- await authPage.loginUsername(authorizerUser, authorizerPassword);
+ await authPage.loginUsername(authorizerUser, authorizerPassword, userpassMount);
await visit(`/vault/access/control-groups/${accessor}`);
await controlGroupComponent.authorize();
await authPage.login(userToken);
diff --git a/ui/tests/helpers/general-selectors.js b/ui/tests/helpers/general-selectors.js
index 670fda1f4c01..36ee70c5f9bf 100644
--- a/ui/tests/helpers/general-selectors.js
+++ b/ui/tests/helpers/general-selectors.js
@@ -14,6 +14,7 @@ export const SELECTORS = {
icon: (name) => `[data-test-icon="${name}"]`,
tab: (name) => `[data-test-tab="${name}"]`,
filter: (name) => `[data-test-filter="${name}"]`,
+ filterInput: '[data-test-filter-input]',
confirmModalInput: '[data-test-confirmation-modal-input]',
confirmButton: '[data-test-confirm-button]',
emptyStateTitle: '[data-test-empty-state-title]',
diff --git a/ui/tests/helpers/pki/pki-run-commands.js b/ui/tests/helpers/pki/pki-run-commands.js
index ac60ec513efd..291aab176fdd 100644
--- a/ui/tests/helpers/pki/pki-run-commands.js
+++ b/ui/tests/helpers/pki/pki-run-commands.js
@@ -34,3 +34,21 @@ export const runCommands = async function (commands) {
throw error;
}
};
+
+// Clears pki-related data and capabilities so that admin
+// capabilities from setup don't rollover
+export function clearRecords(store) {
+ store.unloadAll('pki/action');
+ store.unloadAll('pki/issuer');
+ store.unloadAll('pki/key');
+ store.unloadAll('pki/role');
+ store.unloadAll('pki/sign-intermediate');
+ store.unloadAll('pki/tidy');
+ store.unloadAll('pki/config/urls');
+ store.unloadAll('pki/config/crl');
+ store.unloadAll('pki/config/cluster');
+ store.unloadAll('pki/config/acme');
+ store.unloadAll('pki/certificate/generate');
+ store.unloadAll('pki/certificate/sign');
+ store.unloadAll('capabilities');
+}
diff --git a/ui/tests/integration/components/info-table-row-test.js b/ui/tests/integration/components/info-table-row-test.js
index d6ba44a25ee0..37b53e60e3ec 100644
--- a/ui/tests/integration/components/info-table-row-test.js
+++ b/ui/tests/integration/components/info-table-row-test.js
@@ -197,10 +197,12 @@ module('Integration | Component | InfoTableRow', function (hooks) {
});
test('Truncates the label if too long', async function (assert) {
this.set('label', 'abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyz');
- await render(hbs`
+ `);
+ />
+ `);
assert.dom('[data-test-component="info-table-row"]').exists('Row renders');
assert.dom('[data-test-label-div].label-overflow').exists('Label has class label-overflow');
await triggerEvent('[data-test-row-label]', 'mouseenter');
diff --git a/ui/tests/integration/components/kv/page/kv-page-secret-details-test.js b/ui/tests/integration/components/kv/page/kv-page-secret-details-test.js
index d01abb56abee..28327e13a699 100644
--- a/ui/tests/integration/components/kv/page/kv-page-secret-details-test.js
+++ b/ui/tests/integration/components/kv/page/kv-page-secret-details-test.js
@@ -240,8 +240,8 @@ module('Integration | Component | kv-v2 | Page::Secret::Details', function (hook
.exists('renders current version icon');
});
- test('it renders sync status page alert', async function (assert) {
- assert.expect(5); // assert count important because confirms request made to fetch sync status twice
+ test('it renders sync status page alert and refreshes', async function (assert) {
+ assert.expect(6); // assert count important because confirms request made to fetch sync status twice
const destinationName = 'my-destination';
this.server.create('sync-association', {
type: 'aws-sm',
@@ -250,7 +250,7 @@ module('Integration | Component | kv-v2 | Page::Secret::Details', function (hook
secret_name: this.path,
});
this.server.get(`sys/sync/associations/destinations`, (schema, req) => {
- // this assertion should be hit twice, once on init and again when the 'Refresh' button is clicked
+ // these assertions should be hit twice, once on init and again when the 'Refresh' button is clicked
assert.ok(true, 'request made to fetch sync status');
assert.propEqual(
req.queryParams,
@@ -281,8 +281,56 @@ module('Integration | Component | kv-v2 | Page::Secret::Details', function (hook
'Synced my-destination - last updated September',
'renders sync status alert banner'
);
-
+ assert
+ .dom(PAGE.detail.syncAlert())
+ .hasTextContaining(
+ 'This secret has been synced from Vault to 1 destination. Updates to this secret will automatically sync to its destination.',
+ 'renders alert header referring to singular destination'
+ );
// sync status refresh button
await click(`${PAGE.detail.syncAlert()} button`);
});
+
+ test('it renders sync status page alert for multiple destinations', async function (assert) {
+ assert.expect(3); // assert count important because confirms request made to fetch sync status twice
+ this.server.create('sync-association', {
+ type: 'aws-sm',
+ name: 'aws-dest',
+ mount: this.backend,
+ secret_name: this.path,
+ });
+ this.server.create('sync-association', {
+ type: 'gh',
+ name: 'gh-dest',
+ mount: this.backend,
+ secret_name: this.path,
+ });
+ this.server.get(`sys/sync/associations/destinations`, (schema, req) => {
+ return syncStatusResponse(schema, req);
+ });
+
+ await render(
+ hbs`
+
+ `,
+ { owner: this.engine }
+ );
+ assert
+ .dom(PAGE.detail.syncAlert('aws-dest'))
+ .hasTextContaining('Synced aws-dest - last updated September', 'renders status for aws destination');
+ assert
+ .dom(PAGE.detail.syncAlert('gh-dest'))
+ .hasTextContaining('Syncing gh-dest - last updated September', 'renders status for gh destination');
+ assert
+ .dom(PAGE.detail.syncAlert())
+ .hasTextContaining(
+ 'This secret has been synced from Vault to 2 destinations. Updates to this secret will automatically sync to its destinations.',
+ 'renders alert title referring to plural destinations'
+ );
+ });
});
diff --git a/ui/tests/integration/components/sync/secrets/page/destinations-test.js b/ui/tests/integration/components/sync/secrets/page/destinations-test.js
index 4366bb379b43..66bf310a79bf 100644
--- a/ui/tests/integration/components/sync/secrets/page/destinations-test.js
+++ b/ui/tests/integration/components/sync/secrets/page/destinations-test.js
@@ -7,7 +7,7 @@ import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
import { setupEngine } from 'ember-engines/test-support';
import { setupMirage } from 'ember-cli-mirage/test-support';
-import { render, click } from '@ember/test-helpers';
+import { render, click, fillIn } from '@ember/test-helpers';
import hbs from 'htmlbars-inline-precompile';
import { allowAllCapabilitiesStub } from 'vault/tests/helpers/stubs';
import sinon from 'sinon';
@@ -97,8 +97,7 @@ module('Integration | Component | sync | Page::Destinations', function (hooks) {
);
// NAME FILTER
- await click(`${filter('name')} .ember-basic-dropdown-trigger`);
- await click(searchSelect.option(searchSelect.optionIndex('destination-aws')));
+ await fillIn(filter('name'), 'destination-aws');
assert.deepEqual(
this.transitionStub.lastCall.args,
['vault.cluster.sync.secrets.destinations', { queryParams: { name: 'destination-aws' } }],
diff --git a/ui/tests/integration/components/sync/secrets/page/destinations/create-and-edit-test.js b/ui/tests/integration/components/sync/secrets/page/destinations/create-and-edit-test.js
index aa2ff5bd281b..600fee2c3ded 100644
--- a/ui/tests/integration/components/sync/secrets/page/destinations/create-and-edit-test.js
+++ b/ui/tests/integration/components/sync/secrets/page/destinations/create-and-edit-test.js
@@ -256,11 +256,11 @@ module('Integration | Component | sync | Secrets::Page::Destinations::CreateAndE
assert.dom(PAGE.title).hasTextContaining(`Edit ${this.model.name}`);
for (const attr of this.model.formFields) {
- // Enable inputs with sensitive values
- if (maskedParams.includes(attr.name)) {
- await click(PAGE.form.enableInput(attr.name));
- }
if (editable.includes(attr.name)) {
+ if (maskedParams.includes(attr.name)) {
+ // Enable inputs with sensitive values
+ await click(PAGE.form.enableInput(attr.name));
+ }
await PAGE.form.fillInByAttr(attr.name, `new-${decamelize(attr.name)}-value`);
} else {
assert.dom(PAGE.inputByAttr(attr.name)).isDisabled(`${attr.name} is disabled`);
diff --git a/ui/tests/integration/components/sync/secrets/page/overview-test.js b/ui/tests/integration/components/sync/secrets/page/overview-test.js
index 33cdff977d04..860a185a1cc2 100644
--- a/ui/tests/integration/components/sync/secrets/page/overview-test.js
+++ b/ui/tests/integration/components/sync/secrets/page/overview-test.js
@@ -105,7 +105,7 @@ module('Integration | Component | sync | Page::Overview', function (hooks) {
await click(actionToggle(0));
assert.dom(action('sync')).hasText('Sync secrets', 'Sync action renders');
- assert.dom(action('details')).hasText('Details', 'Details action renders');
+ assert.dom(action('details')).hasText('View synced secrets', 'View synced secrets action renders');
});
test('it should paginate secrets by destination table', async function (assert) {
diff --git a/ui/tests/test-helper.js b/ui/tests/test-helper.js
index bc3ba1386c50..5213d456fd98 100644
--- a/ui/tests/test-helper.js
+++ b/ui/tests/test-helper.js
@@ -8,7 +8,7 @@ import config from 'vault/config/environment';
import * as QUnit from 'qunit';
import { setApplication } from '@ember/test-helpers';
import { setup } from 'qunit-dom';
-import { start } from 'ember-qunit';
+import start from 'ember-exam/test-support/start';
import './helpers/flash-message';
import preloadAssets from 'ember-asset-loader/test-support/preload-assets';
import { setupGlobalA11yHooks, setRunOptions } from 'ember-a11y-testing/test-support';
diff --git a/ui/yarn.lock b/ui/yarn.lock
index d586f0e0e1c8..f8ec5a61b05f 100644
--- a/ui/yarn.lock
+++ b/ui/yarn.lock
@@ -67,7 +67,7 @@ __metadata:
languageName: node
linkType: hard
-"@babel/code-frame@npm:^7.22.13":
+"@babel/code-frame@npm:^7.22.13, @babel/code-frame@npm:^7.23.5":
version: 7.23.5
resolution: "@babel/code-frame@npm:7.23.5"
dependencies:
@@ -266,6 +266,29 @@ __metadata:
languageName: node
linkType: hard
+"@babel/core@npm:^7.23.6":
+ version: 7.23.7
+ resolution: "@babel/core@npm:7.23.7"
+ dependencies:
+ "@ampproject/remapping": ^2.2.0
+ "@babel/code-frame": ^7.23.5
+ "@babel/generator": ^7.23.6
+ "@babel/helper-compilation-targets": ^7.23.6
+ "@babel/helper-module-transforms": ^7.23.3
+ "@babel/helpers": ^7.23.7
+ "@babel/parser": ^7.23.6
+ "@babel/template": ^7.22.15
+ "@babel/traverse": ^7.23.7
+ "@babel/types": ^7.23.6
+ convert-source-map: ^2.0.0
+ debug: ^4.1.0
+ gensync: ^1.0.0-beta.2
+ json5: ^2.2.3
+ semver: ^6.3.1
+ checksum: 32d5bf73372a47429afaae9adb0af39e47bcea6a831c4b5dcbb4791380cda6949cb8cb1a2fea8b60bb1ebe189209c80e333903df1fa8e9dcb04798c0ce5bf59e
+ languageName: node
+ linkType: hard
+
"@babel/eslint-parser@npm:^7.21.3":
version: 7.22.9
resolution: "@babel/eslint-parser@npm:7.22.9"
@@ -336,6 +359,18 @@ __metadata:
languageName: node
linkType: hard
+"@babel/generator@npm:^7.23.6":
+ version: 7.23.6
+ resolution: "@babel/generator@npm:7.23.6"
+ dependencies:
+ "@babel/types": ^7.23.6
+ "@jridgewell/gen-mapping": ^0.3.2
+ "@jridgewell/trace-mapping": ^0.3.17
+ jsesc: ^2.5.1
+ checksum: 1a1a1c4eac210f174cd108d479464d053930a812798e09fee069377de39a893422df5b5b146199ead7239ae6d3a04697b45fc9ac6e38e0f6b76374390f91fc6c
+ languageName: node
+ linkType: hard
+
"@babel/helper-annotate-as-pure@npm:^7.12.13, @babel/helper-annotate-as-pure@npm:^7.16.0":
version: 7.16.0
resolution: "@babel/helper-annotate-as-pure@npm:7.16.0"
@@ -485,6 +520,19 @@ __metadata:
languageName: node
linkType: hard
+"@babel/helper-compilation-targets@npm:^7.23.6":
+ version: 7.23.6
+ resolution: "@babel/helper-compilation-targets@npm:7.23.6"
+ dependencies:
+ "@babel/compat-data": ^7.23.5
+ "@babel/helper-validator-option": ^7.23.5
+ browserslist: ^4.22.2
+ lru-cache: ^5.1.1
+ semver: ^6.3.1
+ checksum: c630b98d4527ac8fe2c58d9a06e785dfb2b73ec71b7c4f2ddf90f814b5f75b547f3c015f110a010fd31f76e3864daaf09f3adcd2f6acdbfb18a8de3a48717590
+ languageName: node
+ linkType: hard
+
"@babel/helper-create-class-features-plugin@npm:^7.13.0, @babel/helper-create-class-features-plugin@npm:^7.16.0, @babel/helper-create-class-features-plugin@npm:^7.8.3":
version: 7.16.0
resolution: "@babel/helper-create-class-features-plugin@npm:7.16.0"
@@ -1574,6 +1622,17 @@ __metadata:
languageName: node
linkType: hard
+"@babel/helpers@npm:^7.23.7":
+ version: 7.23.7
+ resolution: "@babel/helpers@npm:7.23.7"
+ dependencies:
+ "@babel/template": ^7.22.15
+ "@babel/traverse": ^7.23.7
+ "@babel/types": ^7.23.6
+ checksum: 4f3bdf35fb54ff79107c6020ba1e36a38213a15b05ca0fa06c553b65f566e185fba6339fb3344be04593ebc244ed0bbb0c6087e73effe0d053a30bcd2db3a013
+ languageName: node
+ linkType: hard
+
"@babel/highlight@npm:^7.12.13, @babel/highlight@npm:^7.16.0":
version: 7.16.0
resolution: "@babel/highlight@npm:7.16.0"
@@ -1692,6 +1751,15 @@ __metadata:
languageName: node
linkType: hard
+"@babel/parser@npm:^7.23.6":
+ version: 7.23.6
+ resolution: "@babel/parser@npm:7.23.6"
+ bin:
+ parser: ./bin/babel-parser.js
+ checksum: 140801c43731a6c41fd193f5c02bc71fd647a0360ca616b23d2db8be4b9739b9f951a03fc7c2db4f9b9214f4b27c1074db0f18bc3fa653783082d5af7c8860d5
+ languageName: node
+ linkType: hard
+
"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:^7.16.7":
version: 7.16.7
resolution: "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:7.16.7"
@@ -5088,6 +5156,24 @@ __metadata:
languageName: node
linkType: hard
+"@babel/traverse@npm:^7.23.7":
+ version: 7.23.7
+ resolution: "@babel/traverse@npm:7.23.7"
+ dependencies:
+ "@babel/code-frame": ^7.23.5
+ "@babel/generator": ^7.23.6
+ "@babel/helper-environment-visitor": ^7.22.20
+ "@babel/helper-function-name": ^7.23.0
+ "@babel/helper-hoist-variables": ^7.22.5
+ "@babel/helper-split-export-declaration": ^7.22.6
+ "@babel/parser": ^7.23.6
+ "@babel/types": ^7.23.6
+ debug: ^4.3.1
+ globals: ^11.1.0
+ checksum: d4a7afb922361f710efc97b1e25ec343fab8b2a4ddc81ca84f9a153f22d4482112cba8f263774be8d297918b6c4767c7a98988ab4e53ac73686c986711dd002e
+ languageName: node
+ linkType: hard
+
"@babel/types@npm:^7.1.6, @babel/types@npm:^7.7.2":
version: 7.14.0
resolution: "@babel/types@npm:7.14.0"
@@ -5172,6 +5258,17 @@ __metadata:
languageName: node
linkType: hard
+"@babel/types@npm:^7.23.6":
+ version: 7.23.6
+ resolution: "@babel/types@npm:7.23.6"
+ dependencies:
+ "@babel/helper-string-parser": ^7.23.4
+ "@babel/helper-validator-identifier": ^7.22.20
+ to-fast-properties: ^2.0.0
+ checksum: 68187dbec0d637f79bc96263ac95ec8b06d424396678e7e225492be866414ce28ebc918a75354d4c28659be6efe30020b4f0f6df81cc418a2d30645b690a8de0
+ languageName: node
+ linkType: hard
+
"@babel/types@npm:^7.8.3":
version: 7.21.4
resolution: "@babel/types@npm:7.21.4"
@@ -5195,6 +5292,13 @@ __metadata:
languageName: node
linkType: hard
+"@colors/colors@npm:1.5.0":
+ version: 1.5.0
+ resolution: "@colors/colors@npm:1.5.0"
+ checksum: d64d5260bed1d5012ae3fc617d38d1afc0329fec05342f4e6b838f46998855ba56e0a73833f4a80fa8378c84810da254f76a8a19c39d038260dc06dc4e007425
+ languageName: node
+ linkType: hard
+
"@csstools/css-parser-algorithms@npm:^2.3.0":
version: 2.3.0
resolution: "@csstools/css-parser-algorithms@npm:2.3.0"
@@ -6207,6 +6311,20 @@ __metadata:
languageName: node
linkType: hard
+"@isaacs/cliui@npm:^8.0.2":
+ version: 8.0.2
+ resolution: "@isaacs/cliui@npm:8.0.2"
+ dependencies:
+ string-width: ^5.1.2
+ string-width-cjs: "npm:string-width@^4.2.0"
+ strip-ansi: ^7.0.1
+ strip-ansi-cjs: "npm:strip-ansi@^6.0.1"
+ wrap-ansi: ^8.1.0
+ wrap-ansi-cjs: "npm:wrap-ansi@^7.0.0"
+ checksum: 4a473b9b32a7d4d3cfb7a614226e555091ff0c5a29a1734c28c72a182c2f6699b26fc6b5c2131dfd841e86b185aea714c72201d7c98c2fba5f17709333a67aeb
+ languageName: node
+ linkType: hard
+
"@jridgewell/gen-mapping@npm:^0.3.0":
version: 0.3.1
resolution: "@jridgewell/gen-mapping@npm:0.3.1"
@@ -6483,6 +6601,13 @@ __metadata:
languageName: node
linkType: hard
+"@pkgjs/parseargs@npm:^0.11.0":
+ version: 0.11.0
+ resolution: "@pkgjs/parseargs@npm:0.11.0"
+ checksum: 6ad6a00fc4f2f2cfc6bff76fb1d88b8ee20bc0601e18ebb01b6d4be583733a860239a521a7fbca73b612e66705078809483549d2b18f370eb346c5155c8e4a0f
+ languageName: node
+ linkType: hard
+
"@popperjs/core@npm:^2.9.0":
version: 2.11.8
resolution: "@popperjs/core@npm:2.11.8"
@@ -7844,6 +7969,15 @@ __metadata:
languageName: node
linkType: hard
+"abort-controller@npm:^3.0.0":
+ version: 3.0.0
+ resolution: "abort-controller@npm:3.0.0"
+ dependencies:
+ event-target-shim: ^5.0.0
+ checksum: 170bdba9b47b7e65906a28c8ce4f38a7a369d78e2271706f020849c1bfe0ee2067d4261df8bbb66eb84f79208fd5b710df759d64191db58cfba7ce8ef9c54b75
+ languageName: node
+ linkType: hard
+
"abortcontroller-polyfill@npm:^1.7.3":
version: 1.7.3
resolution: "abortcontroller-polyfill@npm:1.7.3"
@@ -8216,6 +8350,13 @@ __metadata:
languageName: node
linkType: hard
+"ansi-regex@npm:^6.0.1":
+ version: 6.0.1
+ resolution: "ansi-regex@npm:6.0.1"
+ checksum: 1ff8b7667cded1de4fa2c9ae283e979fc87036864317da86a2e546725f96406746411d0d85e87a2d12fa5abd715d90006de7fa4fa0477c92321ad3b4c7d4e169
+ languageName: node
+ linkType: hard
+
"ansi-styles@npm:^2.2.1":
version: 2.2.1
resolution: "ansi-styles@npm:2.2.1"
@@ -8241,6 +8382,13 @@ __metadata:
languageName: node
linkType: hard
+"ansi-styles@npm:^6.1.0":
+ version: 6.2.1
+ resolution: "ansi-styles@npm:6.2.1"
+ checksum: ef940f2f0ced1a6347398da88a91da7930c33ecac3c77b72c5905f8b8fe402c52e6fde304ff5347f616e27a742da3f1dc76de98f6866c69251ad0b07a66776d9
+ languageName: node
+ linkType: hard
+
"ansi-styles@npm:~1.0.0":
version: 1.0.0
resolution: "ansi-styles@npm:1.0.0"
@@ -8320,6 +8468,16 @@ __metadata:
languageName: node
linkType: hard
+"are-we-there-yet@npm:^4.0.0":
+ version: 4.0.1
+ resolution: "are-we-there-yet@npm:4.0.1"
+ dependencies:
+ delegates: ^1.0.0
+ readable-stream: ^4.1.0
+ checksum: 16871ee259e138bfab60800ae5b53406fb1b72b5d356f98b13c1b222bb2a13d9bc4292d79f4521fb0eca10874eb3838ae0d9f721f3bb34ddd37ee8f949831800
+ languageName: node
+ linkType: hard
+
"argparse@npm:^1.0.7":
version: 1.0.10
resolution: "argparse@npm:1.0.10"
@@ -11082,7 +11240,7 @@ __metadata:
languageName: node
linkType: hard
-"browserslist@npm:^4.22.1":
+"browserslist@npm:^4.22.1, browserslist@npm:^4.22.2":
version: 4.22.2
resolution: "browserslist@npm:4.22.2"
dependencies:
@@ -11140,6 +11298,16 @@ __metadata:
languageName: node
linkType: hard
+"buffer@npm:^6.0.3":
+ version: 6.0.3
+ resolution: "buffer@npm:6.0.3"
+ dependencies:
+ base64-js: ^1.3.1
+ ieee754: ^1.2.1
+ checksum: 5ad23293d9a731e4318e420025800b42bf0d264004c0286c8cc010af7a270c7a0f6522e84f54b9ad65cbd6db20b8badbfd8d2ebf4f80fa03dab093b89e68c3f9
+ languageName: node
+ linkType: hard
+
"builtin-status-codes@npm:^3.0.0":
version: 3.0.0
resolution: "builtin-status-codes@npm:3.0.0"
@@ -11432,7 +11600,7 @@ __metadata:
languageName: node
linkType: hard
-"chalk@npm:^5.2.0":
+"chalk@npm:^5.2.0, chalk@npm:^5.3.0":
version: 5.3.0
resolution: "chalk@npm:5.3.0"
checksum: 623922e077b7d1e9dedaea6f8b9e9352921f8ae3afe739132e0e00c275971bdd331268183b2628cf4ab1727c45ea1f28d7e24ac23ce1db1eb653c414ca8a5a80
@@ -11703,6 +11871,19 @@ __metadata:
languageName: node
linkType: hard
+"cli-table3@npm:^0.6.0":
+ version: 0.6.3
+ resolution: "cli-table3@npm:0.6.3"
+ dependencies:
+ "@colors/colors": 1.5.0
+ string-width: ^4.2.0
+ dependenciesMeta:
+ "@colors/colors":
+ optional: true
+ checksum: 09897f68467973f827c04e7eaadf13b55f8aec49ecd6647cc276386ea660059322e2dd8020a8b6b84d422dbdd619597046fa89cbbbdc95b2cea149a2df7c096c
+ languageName: node
+ linkType: hard
+
"cli-table@npm:^0.3.1":
version: 0.3.6
resolution: "cli-table@npm:0.3.6"
@@ -12120,6 +12301,13 @@ __metadata:
languageName: node
linkType: hard
+"convert-source-map@npm:^2.0.0":
+ version: 2.0.0
+ resolution: "convert-source-map@npm:2.0.0"
+ checksum: 63ae9933be5a2b8d4509daca5124e20c14d023c820258e484e32dc324d34c2754e71297c94a05784064ad27615037ef677e3f0c00469fb55f409d2bb21261035
+ languageName: node
+ linkType: hard
+
"cookie-signature@npm:1.0.6":
version: 1.0.6
resolution: "cookie-signature@npm:1.0.6"
@@ -13496,6 +13684,13 @@ __metadata:
languageName: node
linkType: hard
+"eastasianwidth@npm:^0.2.0":
+ version: 0.2.0
+ resolution: "eastasianwidth@npm:0.2.0"
+ checksum: 7d00d7cd8e49b9afa762a813faac332dee781932d6f2c848dc348939c4253f1d4564341b7af1d041853bc3f32c2ef141b58e0a4d9862c17a7f08f68df1e0f1ed
+ languageName: node
+ linkType: hard
+
"ecc-jsbn@npm:~0.1.1":
version: 0.1.2
resolution: "ecc-jsbn@npm:0.1.2"
@@ -13862,6 +14057,48 @@ __metadata:
languageName: node
linkType: hard
+"ember-auto-import@npm:^2.7.0":
+ version: 2.7.2
+ resolution: "ember-auto-import@npm:2.7.2"
+ dependencies:
+ "@babel/core": ^7.16.7
+ "@babel/plugin-proposal-class-properties": ^7.16.7
+ "@babel/plugin-proposal-decorators": ^7.16.7
+ "@babel/plugin-proposal-private-methods": ^7.16.7
+ "@babel/plugin-transform-class-static-block": ^7.16.7
+ "@babel/preset-env": ^7.16.7
+ "@embroider/macros": ^1.0.0
+ "@embroider/shared-internals": ^2.0.0
+ babel-loader: ^8.0.6
+ babel-plugin-ember-modules-api-polyfill: ^3.5.0
+ babel-plugin-ember-template-compilation: ^2.0.1
+ babel-plugin-htmlbars-inline-precompile: ^5.2.1
+ babel-plugin-syntax-dynamic-import: ^6.18.0
+ broccoli-debug: ^0.6.4
+ broccoli-funnel: ^3.0.8
+ broccoli-merge-trees: ^4.2.0
+ broccoli-plugin: ^4.0.0
+ broccoli-source: ^3.0.0
+ css-loader: ^5.2.0
+ debug: ^4.3.1
+ fs-extra: ^10.0.0
+ fs-tree-diff: ^2.0.0
+ handlebars: ^4.3.1
+ js-string-escape: ^1.0.1
+ lodash: ^4.17.19
+ mini-css-extract-plugin: ^2.5.2
+ minimatch: ^3.0.0
+ parse5: ^6.0.1
+ resolve: ^1.20.0
+ resolve-package-path: ^4.0.3
+ semver: ^7.3.4
+ style-loader: ^2.0.0
+ typescript-memoize: ^1.0.0-alpha.3
+ walk-sync: ^3.0.0
+ checksum: c998028f7330ecb9ffa5242b60b55f7e6722c3b45959b1b0910cdb9b82920abdefd36f393a80e79bdfd0ff6532641ff1a3fd5b668bfdecd883ecafb00620c15f
+ languageName: node
+ linkType: hard
+
"ember-basic-dropdown@npm:6.0.1":
version: 6.0.1
resolution: "ember-basic-dropdown@npm:6.0.1"
@@ -14923,6 +15160,31 @@ __metadata:
languageName: node
linkType: hard
+"ember-exam@npm:^9.0.0":
+ version: 9.0.0
+ resolution: "ember-exam@npm:9.0.0"
+ dependencies:
+ "@babel/core": ^7.23.6
+ chalk: ^5.3.0
+ cli-table3: ^0.6.0
+ debug: ^4.2.0
+ ember-auto-import: ^2.7.0
+ ember-cli-babel: ^8.2.0
+ execa: ^8.0.1
+ fs-extra: ^11.2.0
+ js-yaml: ^4.0.0
+ npmlog: ^7.0.0
+ rimraf: ^5.0.0
+ semver: ^7.3.2
+ silent-error: ^1.1.1
+ peerDependencies:
+ ember-qunit: "*"
+ ember-source: ">= 4.0.0"
+ qunit: "*"
+ checksum: 5ff9dd11d89d96d6ca018e145eefad558f38b93ee95d39108bb919514c66c731713b00c68725e2377da738c1cf4854ce89b6132d2df44be93a4d99900f589a7b
+ languageName: node
+ linkType: hard
+
"ember-fetch@npm:^8.1.2":
version: 8.1.2
resolution: "ember-fetch@npm:8.1.2"
@@ -15538,6 +15800,13 @@ __metadata:
languageName: node
linkType: hard
+"emoji-regex@npm:^9.2.2":
+ version: 9.2.2
+ resolution: "emoji-regex@npm:9.2.2"
+ checksum: 8487182da74aabd810ac6d6f1994111dfc0e331b01271ae01ec1eb0ad7b5ecc2bbbbd2f053c05cb55a1ac30449527d819bbfbf0e3de1023db308cbcb47f86601
+ languageName: node
+ linkType: hard
+
"emoji-regex@npm:~6.1.0":
version: 6.1.3
resolution: "emoji-regex@npm:6.1.3"
@@ -16328,6 +16597,13 @@ __metadata:
languageName: node
linkType: hard
+"event-target-shim@npm:^5.0.0":
+ version: 5.0.1
+ resolution: "event-target-shim@npm:5.0.1"
+ checksum: 1ffe3bb22a6d51bdeb6bf6f7cf97d2ff4a74b017ad12284cc9e6a279e727dc30a5de6bb613e5596ff4dc3e517841339ad09a7eec44266eccb1aa201a30448166
+ languageName: node
+ linkType: hard
+
"eventemitter3@npm:^4.0.0":
version: 4.0.7
resolution: "eventemitter3@npm:4.0.7"
@@ -16342,7 +16618,7 @@ __metadata:
languageName: node
linkType: hard
-"events@npm:^3.0.0, events@npm:^3.2.0":
+"events@npm:^3.0.0, events@npm:^3.2.0, events@npm:^3.3.0":
version: 3.3.0
resolution: "events@npm:3.3.0"
checksum: f6f487ad2198aa41d878fa31452f1a3c00958f46e9019286ff4787c84aac329332ab45c9cdc8c445928fc6d7ded294b9e005a7fce9426488518017831b272780
@@ -16451,6 +16727,23 @@ __metadata:
languageName: node
linkType: hard
+"execa@npm:^8.0.1":
+ version: 8.0.1
+ resolution: "execa@npm:8.0.1"
+ dependencies:
+ cross-spawn: ^7.0.3
+ get-stream: ^8.0.1
+ human-signals: ^5.0.0
+ is-stream: ^3.0.0
+ merge-stream: ^2.0.0
+ npm-run-path: ^5.1.0
+ onetime: ^6.0.0
+ signal-exit: ^4.1.0
+ strip-final-newline: ^3.0.0
+ checksum: cac1bf86589d1d9b73bdc5dda65c52012d1a9619c44c526891956745f7b366ca2603d29fe3f7460bacc2b48c6eab5d6a4f7afe0534b31473d3708d1265545e1f
+ languageName: node
+ linkType: hard
+
"exists-sync@npm:0.0.4":
version: 0.0.4
resolution: "exists-sync@npm:0.0.4"
@@ -17187,6 +17480,16 @@ __metadata:
languageName: node
linkType: hard
+"foreground-child@npm:^3.1.0":
+ version: 3.1.1
+ resolution: "foreground-child@npm:3.1.1"
+ dependencies:
+ cross-spawn: ^7.0.0
+ signal-exit: ^4.0.1
+ checksum: 139d270bc82dc9e6f8bc045fe2aae4001dc2472157044fdfad376d0a3457f77857fa883c1c8b21b491c6caade9a926a4bed3d3d2e8d3c9202b151a4cbbd0bcd5
+ languageName: node
+ linkType: hard
+
"forever-agent@npm:~0.6.1":
version: 0.6.1
resolution: "forever-agent@npm:0.6.1"
@@ -17299,6 +17602,17 @@ __metadata:
languageName: node
linkType: hard
+"fs-extra@npm:^11.2.0":
+ version: 11.2.0
+ resolution: "fs-extra@npm:11.2.0"
+ dependencies:
+ graceful-fs: ^4.2.0
+ jsonfile: ^6.0.1
+ universalify: ^2.0.0
+ checksum: b12e42fa40ba47104202f57b8480dd098aa931c2724565e5e70779ab87605665594e76ee5fb00545f772ab9ace167fe06d2ab009c416dc8c842c5ae6df7aa7e8
+ languageName: node
+ linkType: hard
+
"fs-extra@npm:^3.0.1":
version: 3.0.1
resolution: "fs-extra@npm:3.0.1"
@@ -17545,6 +17859,22 @@ __metadata:
languageName: node
linkType: hard
+"gauge@npm:^5.0.0":
+ version: 5.0.1
+ resolution: "gauge@npm:5.0.1"
+ dependencies:
+ aproba: ^1.0.3 || ^2.0.0
+ color-support: ^1.1.3
+ console-control-strings: ^1.1.0
+ has-unicode: ^2.0.1
+ signal-exit: ^4.0.1
+ string-width: ^4.2.3
+ strip-ansi: ^6.0.1
+ wide-align: ^1.1.5
+ checksum: 09b1eb8d8c850df7e4e2822feef27427afc845d4839fa13a08ddad74f882caf668dd1e77ac5e059d3e9a7b0cef59b706d28be40e1dc5fd326da32965e1f206a6
+ languageName: node
+ linkType: hard
+
"gensync@npm:^1.0.0-beta.2":
version: 1.0.0-beta.2
resolution: "gensync@npm:1.0.0-beta.2"
@@ -17635,6 +17965,13 @@ __metadata:
languageName: node
linkType: hard
+"get-stream@npm:^8.0.1":
+ version: 8.0.1
+ resolution: "get-stream@npm:8.0.1"
+ checksum: 01e3d3cf29e1393f05f44d2f00445c5f9ec3d1c49e8179b31795484b9c117f4c695e5e07b88b50785d5c8248a788c85d9913a79266fc77e3ef11f78f10f1b974
+ languageName: node
+ linkType: hard
+
"get-symbol-description@npm:^1.0.0":
version: 1.0.0
resolution: "get-symbol-description@npm:1.0.0"
@@ -17710,6 +18047,21 @@ __metadata:
languageName: node
linkType: hard
+"glob@npm:^10.3.7":
+ version: 10.3.10
+ resolution: "glob@npm:10.3.10"
+ dependencies:
+ foreground-child: ^3.1.0
+ jackspeak: ^2.3.5
+ minimatch: ^9.0.1
+ minipass: ^5.0.0 || ^6.0.2 || ^7.0.0
+ path-scurry: ^1.10.1
+ bin:
+ glob: dist/esm/bin.mjs
+ checksum: 4f2fe2511e157b5a3f525a54092169a5f92405f24d2aed3142f4411df328baca13059f4182f1db1bf933e2c69c0bd89e57ae87edd8950cba8c7ccbe84f721cf3
+ languageName: node
+ linkType: hard
+
"glob@npm:^5.0.10":
version: 5.0.15
resolution: "glob@npm:5.0.15"
@@ -18507,6 +18859,13 @@ __metadata:
languageName: node
linkType: hard
+"human-signals@npm:^5.0.0":
+ version: 5.0.0
+ resolution: "human-signals@npm:5.0.0"
+ checksum: 6504560d5ed91444f16bea3bd9dfc66110a339442084e56c3e7fa7bbdf3f406426d6563d662bdce67064b165eac31eeabfc0857ed170aaa612cf14ec9f9a464c
+ languageName: node
+ linkType: hard
+
"humanize-ms@npm:^1.2.1":
version: 1.2.1
resolution: "humanize-ms@npm:1.2.1"
@@ -18543,7 +18902,7 @@ __metadata:
languageName: node
linkType: hard
-"ieee754@npm:^1.1.13, ieee754@npm:^1.1.4":
+"ieee754@npm:^1.1.13, ieee754@npm:^1.1.4, ieee754@npm:^1.2.1":
version: 1.2.1
resolution: "ieee754@npm:1.2.1"
checksum: 5144c0c9815e54ada181d80a0b810221a253562422e7c6c3a60b1901154184f49326ec239d618c416c1c5945a2e197107aee8d986a3dd836b53dffefd99b5e7e
@@ -19316,6 +19675,13 @@ __metadata:
languageName: node
linkType: hard
+"is-stream@npm:^3.0.0":
+ version: 3.0.0
+ resolution: "is-stream@npm:3.0.0"
+ checksum: 172093fe99119ffd07611ab6d1bcccfe8bc4aa80d864b15f43e63e54b7abc71e779acd69afdb854c4e2a67fdc16ae710e370eda40088d1cfc956a50ed82d8f16
+ languageName: node
+ linkType: hard
+
"is-string@npm:^1.0.5, is-string@npm:^1.0.7":
version: 1.0.7
resolution: "is-string@npm:1.0.7"
@@ -19508,6 +19874,19 @@ __metadata:
languageName: node
linkType: hard
+"jackspeak@npm:^2.3.5":
+ version: 2.3.6
+ resolution: "jackspeak@npm:2.3.6"
+ dependencies:
+ "@isaacs/cliui": ^8.0.2
+ "@pkgjs/parseargs": ^0.11.0
+ dependenciesMeta:
+ "@pkgjs/parseargs":
+ optional: true
+ checksum: 57d43ad11eadc98cdfe7496612f6bbb5255ea69fe51ea431162db302c2a11011642f50cfad57288bd0aea78384a0612b16e131944ad8ecd09d619041c8531b54
+ languageName: node
+ linkType: hard
+
"jest-worker@npm:^27.4.5":
version: 27.5.1
resolution: "jest-worker@npm:27.5.1"
@@ -19566,7 +19945,7 @@ __metadata:
languageName: node
linkType: hard
-"js-yaml@npm:^4.1.0":
+"js-yaml@npm:^4.0.0, js-yaml@npm:^4.1.0":
version: 4.1.0
resolution: "js-yaml@npm:4.1.0"
dependencies:
@@ -19737,7 +20116,7 @@ __metadata:
languageName: node
linkType: hard
-"json5@npm:^2.1.1, json5@npm:^2.2.2":
+"json5@npm:^2.1.1, json5@npm:^2.2.2, json5@npm:^2.2.3":
version: 2.2.3
resolution: "json5@npm:2.2.3"
bin:
@@ -20641,6 +21020,13 @@ __metadata:
languageName: node
linkType: hard
+"lru-cache@npm:^9.1.1 || ^10.0.0":
+ version: 10.1.0
+ resolution: "lru-cache@npm:10.1.0"
+ checksum: 58056d33e2500fbedce92f8c542e7c11b50d7d086578f14b7074d8c241422004af0718e08a6eaae8705cee09c77e39a61c1c79e9370ba689b7010c152e6a76ab
+ languageName: node
+ linkType: hard
+
"magic-string@npm:^0.24.0":
version: 0.24.1
resolution: "magic-string@npm:0.24.1"
@@ -21327,6 +21713,13 @@ __metadata:
languageName: node
linkType: hard
+"mimic-fn@npm:^4.0.0":
+ version: 4.0.0
+ resolution: "mimic-fn@npm:4.0.0"
+ checksum: 995dcece15ee29aa16e188de6633d43a3db4611bcf93620e7e62109ec41c79c0f34277165b8ce5e361205049766e371851264c21ac64ca35499acb5421c2ba56
+ languageName: node
+ linkType: hard
+
"min-indent@npm:^1.0.1":
version: 1.0.1
resolution: "min-indent@npm:1.0.1"
@@ -21470,6 +21863,13 @@ __metadata:
languageName: node
linkType: hard
+"minipass@npm:^5.0.0 || ^6.0.2 || ^7.0.0":
+ version: 7.0.4
+ resolution: "minipass@npm:7.0.4"
+ checksum: 87585e258b9488caf2e7acea242fd7856bbe9a2c84a7807643513a338d66f368c7d518200ad7b70a508664d408aa000517647b2930c259a8b1f9f0984f344a21
+ languageName: node
+ linkType: hard
+
"minizlib@npm:^2.1.1, minizlib@npm:^2.1.2":
version: 2.1.2
resolution: "minizlib@npm:2.1.2"
@@ -22089,6 +22489,15 @@ __metadata:
languageName: node
linkType: hard
+"npm-run-path@npm:^5.1.0":
+ version: 5.2.0
+ resolution: "npm-run-path@npm:5.2.0"
+ dependencies:
+ path-key: ^4.0.0
+ checksum: c5325e016014e715689c4014f7e0be16cc4cbf529f32a1723e511bc4689b5f823b704d2bca61ac152ce2bda65e0205dc8b3ba0ec0f5e4c3e162d302f6f5b9efb
+ languageName: node
+ linkType: hard
+
"npmlog@npm:^6.0.0":
version: 6.0.2
resolution: "npmlog@npm:6.0.2"
@@ -22101,6 +22510,18 @@ __metadata:
languageName: node
linkType: hard
+"npmlog@npm:^7.0.0":
+ version: 7.0.1
+ resolution: "npmlog@npm:7.0.1"
+ dependencies:
+ are-we-there-yet: ^4.0.0
+ console-control-strings: ^1.1.0
+ gauge: ^5.0.0
+ set-blocking: ^2.0.0
+ checksum: caabeb1f557c1094ad7ed3275b968b83ccbaefc133f17366ebb9fe8eb44e1aace28c31419d6244bfc0422aede1202875d555fe6661978bf04386f6cf617f43a4
+ languageName: node
+ linkType: hard
+
"nth-check@npm:^1.0.2":
version: 1.0.2
resolution: "nth-check@npm:1.0.2"
@@ -22320,6 +22741,15 @@ __metadata:
languageName: node
linkType: hard
+"onetime@npm:^6.0.0":
+ version: 6.0.0
+ resolution: "onetime@npm:6.0.0"
+ dependencies:
+ mimic-fn: ^4.0.0
+ checksum: 0846ce78e440841335d4e9182ef69d5762e9f38aa7499b19f42ea1c4cd40f0b4446094c455c713f9adac3f4ae86f613bb5e30c99e52652764d06a89f709b3788
+ languageName: node
+ linkType: hard
+
"optionator@npm:^0.8.1":
version: 0.8.3
resolution: "optionator@npm:0.8.3"
@@ -22767,6 +23197,13 @@ __metadata:
languageName: node
linkType: hard
+"path-key@npm:^4.0.0":
+ version: 4.0.0
+ resolution: "path-key@npm:4.0.0"
+ checksum: 8e6c314ae6d16b83e93032c61020129f6f4484590a777eed709c4a01b50e498822b00f76ceaf94bc64dbd90b327df56ceadce27da3d83393790f1219e07721d7
+ languageName: node
+ linkType: hard
+
"path-parse@npm:^1.0.6, path-parse@npm:^1.0.7":
version: 1.0.7
resolution: "path-parse@npm:1.0.7"
@@ -22797,6 +23234,16 @@ __metadata:
languageName: node
linkType: hard
+"path-scurry@npm:^1.10.1":
+ version: 1.10.1
+ resolution: "path-scurry@npm:1.10.1"
+ dependencies:
+ lru-cache: ^9.1.1 || ^10.0.0
+ minipass: ^5.0.0 || ^6.0.2 || ^7.0.0
+ checksum: e2557cff3a8fb8bc07afdd6ab163a92587884f9969b05bbbaf6fe7379348bfb09af9ed292af12ed32398b15fb443e81692047b786d1eeb6d898a51eb17ed7d90
+ languageName: node
+ linkType: hard
+
"path-to-regexp@npm:0.1.7":
version: 0.1.7
resolution: "path-to-regexp@npm:0.1.7"
@@ -23662,6 +24109,19 @@ __metadata:
languageName: node
linkType: hard
+"readable-stream@npm:^4.1.0":
+ version: 4.5.2
+ resolution: "readable-stream@npm:4.5.2"
+ dependencies:
+ abort-controller: ^3.0.0
+ buffer: ^6.0.3
+ events: ^3.3.0
+ process: ^0.11.10
+ string_decoder: ^1.3.0
+ checksum: c4030ccff010b83e4f33289c535f7830190773e274b3fcb6e2541475070bdfd69c98001c3b0cb78763fc00c8b62f514d96c2b10a8bd35d5ce45203a25fa1d33a
+ languageName: node
+ linkType: hard
+
"readable-stream@npm:~1.0.2":
version: 1.0.34
resolution: "readable-stream@npm:1.0.34"
@@ -24415,6 +24875,17 @@ __metadata:
languageName: node
linkType: hard
+"rimraf@npm:^5.0.0":
+ version: 5.0.5
+ resolution: "rimraf@npm:5.0.5"
+ dependencies:
+ glob: ^10.3.7
+ bin:
+ rimraf: dist/esm/bin.mjs
+ checksum: d66eef829b2e23b16445f34e73d75c7b7cf4cbc8834b04720def1c8f298eb0753c3d76df77325fad79d0a2c60470525d95f89c2475283ad985fd7441c32732d1
+ languageName: node
+ linkType: hard
+
"rimraf@npm:~2.6.2":
version: 2.6.3
resolution: "rimraf@npm:2.6.3"
@@ -25096,6 +25567,13 @@ __metadata:
languageName: node
linkType: hard
+"signal-exit@npm:^4.1.0":
+ version: 4.1.0
+ resolution: "signal-exit@npm:4.1.0"
+ checksum: 64c757b498cb8629ffa5f75485340594d2f8189e9b08700e69199069c8e3070fb3e255f7ab873c05dc0b3cec412aea7402e10a5990cb6a050bd33ba062a6c549
+ languageName: node
+ linkType: hard
+
"silent-error@npm:^1.0.0, silent-error@npm:^1.0.1, silent-error@npm:^1.1.0, silent-error@npm:^1.1.1":
version: 1.1.1
resolution: "silent-error@npm:1.1.1"
@@ -25646,6 +26124,17 @@ __metadata:
languageName: node
linkType: hard
+"string-width-cjs@npm:string-width@^4.2.0, string-width@npm:^4.1.0, string-width@npm:^4.2.0":
+ version: 4.2.2
+ resolution: "string-width@npm:4.2.2"
+ dependencies:
+ emoji-regex: ^8.0.0
+ is-fullwidth-code-point: ^3.0.0
+ strip-ansi: ^6.0.0
+ checksum: 343e089b0e66e0f72aab4ad1d9b6f2c9cc5255844b0c83fd9b53f2a3b3fd0421bdd6cb05be96a73117eb012db0887a6c1d64ca95aaa50c518e48980483fea0ab
+ languageName: node
+ linkType: hard
+
"string-width@npm:^1.0.2 || 2 || 3 || 4, string-width@npm:^4.2.3":
version: 4.2.3
resolution: "string-width@npm:4.2.3"
@@ -25678,14 +26167,14 @@ __metadata:
languageName: node
linkType: hard
-"string-width@npm:^4.1.0, string-width@npm:^4.2.0":
- version: 4.2.2
- resolution: "string-width@npm:4.2.2"
+"string-width@npm:^5.0.1, string-width@npm:^5.1.2":
+ version: 5.1.2
+ resolution: "string-width@npm:5.1.2"
dependencies:
- emoji-regex: ^8.0.0
- is-fullwidth-code-point: ^3.0.0
- strip-ansi: ^6.0.0
- checksum: 343e089b0e66e0f72aab4ad1d9b6f2c9cc5255844b0c83fd9b53f2a3b3fd0421bdd6cb05be96a73117eb012db0887a6c1d64ca95aaa50c518e48980483fea0ab
+ eastasianwidth: ^0.2.0
+ emoji-regex: ^9.2.2
+ strip-ansi: ^7.0.1
+ checksum: 7369deaa29f21dda9a438686154b62c2c5f661f8dda60449088f9f980196f7908fc39fdd1803e3e01541970287cf5deae336798337e9319a7055af89dafa7193
languageName: node
linkType: hard
@@ -25806,7 +26295,7 @@ __metadata:
languageName: node
linkType: hard
-"string_decoder@npm:^1.0.0, string_decoder@npm:^1.1.1":
+"string_decoder@npm:^1.0.0, string_decoder@npm:^1.1.1, string_decoder@npm:^1.3.0":
version: 1.3.0
resolution: "string_decoder@npm:1.3.0"
dependencies:
@@ -25835,6 +26324,15 @@ __metadata:
languageName: node
linkType: hard
+"strip-ansi-cjs@npm:strip-ansi@^6.0.1, strip-ansi@npm:^6.0.1":
+ version: 6.0.1
+ resolution: "strip-ansi@npm:6.0.1"
+ dependencies:
+ ansi-regex: ^5.0.1
+ checksum: f3cd25890aef3ba6e1a74e20896c21a46f482e93df4a06567cebf2b57edabb15133f1f94e57434e0a958d61186087b1008e89c94875d019910a213181a14fc8c
+ languageName: node
+ linkType: hard
+
"strip-ansi@npm:^3.0.0":
version: 3.0.1
resolution: "strip-ansi@npm:3.0.1"
@@ -25871,12 +26369,12 @@ __metadata:
languageName: node
linkType: hard
-"strip-ansi@npm:^6.0.1":
- version: 6.0.1
- resolution: "strip-ansi@npm:6.0.1"
+"strip-ansi@npm:^7.0.1":
+ version: 7.1.0
+ resolution: "strip-ansi@npm:7.1.0"
dependencies:
- ansi-regex: ^5.0.1
- checksum: f3cd25890aef3ba6e1a74e20896c21a46f482e93df4a06567cebf2b57edabb15133f1f94e57434e0a958d61186087b1008e89c94875d019910a213181a14fc8c
+ ansi-regex: ^6.0.1
+ checksum: 859c73fcf27869c22a4e4d8c6acfe690064659e84bef9458aa6d13719d09ca88dcfd40cbf31fd0be63518ea1a643fe070b4827d353e09533a5b0b9fd4553d64d
languageName: node
linkType: hard
@@ -25917,6 +26415,13 @@ __metadata:
languageName: node
linkType: hard
+"strip-final-newline@npm:^3.0.0":
+ version: 3.0.0
+ resolution: "strip-final-newline@npm:3.0.0"
+ checksum: 23ee263adfa2070cd0f23d1ac14e2ed2f000c9b44229aec9c799f1367ec001478469560abefd00c5c99ee6f0b31c137d53ec6029c53e9f32a93804e18c201050
+ languageName: node
+ linkType: hard
+
"strip-indent@npm:^4.0.0":
version: 4.0.0
resolution: "strip-indent@npm:4.0.0"
@@ -27425,6 +27930,7 @@ __metadata:
ember-d3: ^0.5.1
ember-data: ~4.11.3
ember-engines: 0.8.23
+ ember-exam: ^9.0.0
ember-fetch: ^8.1.2
ember-inflector: 4.0.2
ember-load-initializers: ^2.1.2
@@ -28025,6 +28531,17 @@ __metadata:
languageName: node
linkType: hard
+"wrap-ansi-cjs@npm:wrap-ansi@^7.0.0, wrap-ansi@npm:^7.0.0":
+ version: 7.0.0
+ resolution: "wrap-ansi@npm:7.0.0"
+ dependencies:
+ ansi-styles: ^4.0.0
+ string-width: ^4.1.0
+ strip-ansi: ^6.0.0
+ checksum: a790b846fd4505de962ba728a21aaeda189b8ee1c7568ca5e817d85930e06ef8d1689d49dbf0e881e8ef84436af3a88bc49115c2e2788d841ff1b8b5b51a608b
+ languageName: node
+ linkType: hard
+
"wrap-ansi@npm:^5.1.0":
version: 5.1.0
resolution: "wrap-ansi@npm:5.1.0"
@@ -28047,14 +28564,14 @@ __metadata:
languageName: node
linkType: hard
-"wrap-ansi@npm:^7.0.0":
- version: 7.0.0
- resolution: "wrap-ansi@npm:7.0.0"
+"wrap-ansi@npm:^8.1.0":
+ version: 8.1.0
+ resolution: "wrap-ansi@npm:8.1.0"
dependencies:
- ansi-styles: ^4.0.0
- string-width: ^4.1.0
- strip-ansi: ^6.0.0
- checksum: a790b846fd4505de962ba728a21aaeda189b8ee1c7568ca5e817d85930e06ef8d1689d49dbf0e881e8ef84436af3a88bc49115c2e2788d841ff1b8b5b51a608b
+ ansi-styles: ^6.1.0
+ string-width: ^5.0.1
+ strip-ansi: ^7.0.1
+ checksum: 371733296dc2d616900ce15a0049dca0ef67597d6394c57347ba334393599e800bab03c41d4d45221b6bc967b8c453ec3ae4749eff3894202d16800fdfe0e238
languageName: node
linkType: hard
diff --git a/vault/activity_log_util_common.go b/vault/activity_log_util_common.go
index f0f413e9aadd..eca59e3c0a55 100644
--- a/vault/activity_log_util_common.go
+++ b/vault/activity_log_util_common.go
@@ -110,59 +110,61 @@ func (a *ActivityLog) computeCurrentMonthForBillingPeriodInternal(ctx context.Co
}
hllMonthlyTimestamp = timeutil.StartOfNextMonth(hllMonthlyTimestamp)
}
-
- // Now we will add the clients for the current month to a copy of the billing period's hll to
- // see how the cardinality grows.
- billingPeriodHLLWithCurrentMonthEntityClients := billingPeriodHLL.Clone()
- billingPeriodHLLWithCurrentMonthNonEntityClients := billingPeriodHLL.Clone()
-
// There's at most one month of data here. We should validate this assumption explicitly
if len(byMonth) > 1 {
return nil, errors.New(fmt.Sprintf("multiple months of data found in partial month's client count breakdowns: %+v\n", byMonth))
}
- totalEntities := 0
- totalNonEntities := 0
- for _, month := range byMonth {
+ activityTypes := []string{entityActivityType, nonEntityTokenActivityType, secretSyncActivityType}
+
+ // Now we will add the clients for the current month to a copy of the billing period's hll to
+ // see how the cardinality grows.
+ hllByType := make(map[string]*hyperloglog.Sketch, len(activityTypes))
+ totalByType := make(map[string]int, len(activityTypes))
+ for _, typ := range activityTypes {
+ hllByType[typ] = billingPeriodHLL.Clone()
+ }
+ for _, month := range byMonth {
if month.NewClients == nil || month.NewClients.Counts == nil || month.Counts == nil {
return nil, errors.New("malformed current month used to calculate current month's activity")
}
- // Note that the following calculations assume that all clients seen are currently in
- // the NewClients section of byMonth. It is best to explicitly check this, just verify
- // our assumptions about the passed in byMonth argument.
- if month.Counts.countByType(entityActivityType) != month.NewClients.Counts.countByType(entityActivityType) ||
- month.Counts.countByType(nonEntityTokenActivityType) != month.NewClients.Counts.countByType(nonEntityTokenActivityType) {
- return nil, errors.New("current month clients cache assumes billing period")
- }
-
- // All the clients for the current month are in the newClients section, initially.
- // We need to deduplicate these clients across the billing period by adding them
- // into the billing period hyperloglogs.
- entities := month.NewClients.Counts.clientsByType(entityActivityType)
- nonEntities := month.NewClients.Counts.clientsByType(nonEntityTokenActivityType)
- if entities != nil {
- for entityID := range entities {
- billingPeriodHLLWithCurrentMonthEntityClients.Insert([]byte(entityID))
- totalEntities += 1
+ for _, typ := range activityTypes {
+ // Note that the following calculations assume that all clients seen are currently in
+ // the NewClients section of byMonth. It is best to explicitly check this, just verify
+ // our assumptions about the passed in byMonth argument.
+ if month.Counts.countByType(typ) != month.NewClients.Counts.countByType(typ) {
+ return nil, errors.New("current month clients cache assumes billing period")
}
- }
- if nonEntities != nil {
- for nonEntityID := range nonEntities {
- billingPeriodHLLWithCurrentMonthNonEntityClients.Insert([]byte(nonEntityID))
- totalNonEntities += 1
+ for clientID := range month.NewClients.Counts.clientsByType(typ) {
+ // All the clients for the current month are in the newClients section, initially.
+ // We need to deduplicate these clients across the billing period by adding them
+ // into the billing period hyperloglogs.
+ hllByType[typ].Insert([]byte(clientID))
+ totalByType[typ] += 1
}
}
}
- // The number of new entities for the current month is approximately the size of the hll with
- // the current month's entities minus the size of the initial billing period hll.
- currentMonthNewEntities := billingPeriodHLLWithCurrentMonthEntityClients.Estimate() - billingPeriodHLL.Estimate()
- currentMonthNewNonEntities := billingPeriodHLLWithCurrentMonthNonEntityClients.Estimate() - billingPeriodHLL.Estimate()
+ currentMonthNewByType := make(map[string]int, len(activityTypes))
+ for _, typ := range activityTypes {
+ // The number of new entities for the current month is approximately the size of the hll with
+ // the current month's entities minus the size of the initial billing period hll.
+ currentMonthNewByType[typ] = int(hllByType[typ].Estimate() - billingPeriodHLL.Estimate())
+ }
+
return &activity.MonthRecord{
- Timestamp: timeutil.StartOfMonth(endTime).UTC().Unix(),
- NewClients: &activity.NewClientRecord{Counts: &activity.CountsRecord{EntityClients: int(currentMonthNewEntities), NonEntityClients: int(currentMonthNewNonEntities)}},
- Counts: &activity.CountsRecord{EntityClients: totalEntities, NonEntityClients: totalNonEntities},
+ Timestamp: timeutil.StartOfMonth(endTime).UTC().Unix(),
+ NewClients: &activity.NewClientRecord{Counts: &activity.CountsRecord{
+ EntityClients: currentMonthNewByType[entityActivityType],
+ NonEntityClients: currentMonthNewByType[nonEntityTokenActivityType],
+ SecretSyncs: currentMonthNewByType[secretSyncActivityType],
+ }},
+ Counts: &activity.CountsRecord{
+ EntityClients: totalByType[entityActivityType],
+ NonEntityClients: totalByType[nonEntityTokenActivityType],
+ SecretSyncs: totalByType[secretSyncActivityType],
+ },
}, nil
}
diff --git a/vault/activity_log_util_common_test.go b/vault/activity_log_util_common_test.go
index af8d8b49951b..91b064aa4c35 100644
--- a/vault/activity_log_util_common_test.go
+++ b/vault/activity_log_util_common_test.go
@@ -18,28 +18,31 @@ import (
"google.golang.org/protobuf/proto"
)
-// Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal creates 3 months of hyperloglogs and fills them with
-// overlapping clients. The test calls computeCurrentMonthForBillingPeriodInternal with the current month map having
-// some overlap with the previous months. The test then verifies that the results have the correct number of entity and
-// non-entity clients. The test also calls computeCurrentMonthForBillingPeriodInternal with an empty current month map,
+// Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal creates 3 months
+// of hyperloglogs and fills them with overlapping clients. The test calls
+// computeCurrentMonthForBillingPeriodInternal with the current month map having
+// some overlap with the previous months. The test then verifies that the
+// results have the correct number of entity, non-entity, and secret sync
+// association clients. The test also calls
+// computeCurrentMonthForBillingPeriodInternal with an empty current month map,
// and verifies that the results are all 0.
func Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal(t *testing.T) {
- // populate the first month with clients 1-10
+ // populate the first month with clients 1-20
monthOneHLL := hyperloglog.New()
- // populate the second month with clients 5-15
+ // populate the second month with clients 10-30
monthTwoHLL := hyperloglog.New()
- // populate the third month with clients 10-20
+ // populate the third month with clients 20-40
monthThreeHLL := hyperloglog.New()
- for i := 0; i < 20; i++ {
+ for i := 0; i < 40; i++ {
clientID := []byte(fmt.Sprintf("client_%d", i))
- if i < 10 {
+ if i < 20 {
monthOneHLL.Insert(clientID)
}
- if 5 <= i && i < 15 {
+ if 10 <= i && i < 20 {
monthTwoHLL.Insert(clientID)
}
- if 10 <= i && i < 20 {
+ if 20 <= i && i < 40 {
monthThreeHLL.Insert(clientID)
}
}
@@ -57,51 +60,72 @@ func Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal(t *testing.T)
return nil, fmt.Errorf("bad start time")
}
+ // Below we register the entity, non-entity, and secret sync clients that
+ // are seen in the current month
+
// Let's add 2 entities exclusive to month 1 (clients 0,1),
- // 2 entities shared by month 1 and 2 (clients 5,6),
- // 2 entities shared by month 2 and 3 (clients 10,11), and
- // 2 entities exclusive to month 3 (15,16). Furthermore, we can add
- // 3 new entities (clients 20,21, and 22).
- entitiesStruct := make(map[string]struct{}, 0)
- entitiesStruct["client_0"] = struct{}{}
- entitiesStruct["client_1"] = struct{}{}
- entitiesStruct["client_5"] = struct{}{}
- entitiesStruct["client_6"] = struct{}{}
- entitiesStruct["client_10"] = struct{}{}
- entitiesStruct["client_11"] = struct{}{}
- entitiesStruct["client_15"] = struct{}{}
- entitiesStruct["client_16"] = struct{}{}
- entitiesStruct["client_20"] = struct{}{}
- entitiesStruct["client_21"] = struct{}{}
- entitiesStruct["client_22"] = struct{}{}
+ // 2 entities shared by month 1 and 2 (clients 10,11),
+ // 2 entities shared by month 2 and 3 (clients 20,21), and
+ // 2 entities exclusive to month 3 (30,31). Furthermore, we can add
+ // 3 new entities (clients 40,41,42).
+ entitiesStruct := map[string]struct{}{
+ "client_0": {},
+ "client_1": {},
+ "client_10": {},
+ "client_11": {},
+ "client_20": {},
+ "client_21": {},
+ "client_30": {},
+ "client_31": {},
+ "client_40": {},
+ "client_41": {},
+ "client_42": {},
+ }
// We will add 3 nonentity clients from month 1 (clients 2,3,4),
- // 3 shared by months 1 and 2 (7,8,9),
- // 3 shared by months 2 and 3 (12,13,14), and
- // 3 exclusive to month 3 (17,18,19). We will also
- // add 4 new nonentity clients.
- nonEntitiesStruct := make(map[string]struct{}, 0)
- nonEntitiesStruct["client_2"] = struct{}{}
- nonEntitiesStruct["client_3"] = struct{}{}
- nonEntitiesStruct["client_4"] = struct{}{}
- nonEntitiesStruct["client_7"] = struct{}{}
- nonEntitiesStruct["client_8"] = struct{}{}
- nonEntitiesStruct["client_9"] = struct{}{}
- nonEntitiesStruct["client_12"] = struct{}{}
- nonEntitiesStruct["client_13"] = struct{}{}
- nonEntitiesStruct["client_14"] = struct{}{}
- nonEntitiesStruct["client_17"] = struct{}{}
- nonEntitiesStruct["client_18"] = struct{}{}
- nonEntitiesStruct["client_19"] = struct{}{}
- nonEntitiesStruct["client_23"] = struct{}{}
- nonEntitiesStruct["client_24"] = struct{}{}
- nonEntitiesStruct["client_25"] = struct{}{}
- nonEntitiesStruct["client_26"] = struct{}{}
+ // 3 shared by months 1 and 2 (12,13,14),
+ // 3 shared by months 2 and 3 (22,23,24), and
+ // 3 exclusive to month 3 (32,33,34). We will also
+ // add 4 new nonentity clients (43,44,45,46)
+ nonEntitiesStruct := map[string]struct{}{
+ "client_2": {},
+ "client_3": {},
+ "client_4": {},
+ "client_12": {},
+ "client_13": {},
+ "client_14": {},
+ "client_22": {},
+ "client_23": {},
+ "client_24": {},
+ "client_32": {},
+ "client_33": {},
+ "client_34": {},
+ "client_43": {},
+ "client_44": {},
+ "client_45": {},
+ "client_46": {},
+ }
+
+ // secret syncs have 1 client from month 1 (5)
+ // 1 shared by months 1 and 2 (15)
+ // 1 shared by months 2 and 3 (25)
+ // 2 exclusive to month 3 (35,36)
+ // and 2 new clients (47,48)
+ secretSyncStruct := map[string]struct{}{
+ "client_5": {},
+ "client_15": {},
+ "client_25": {},
+ "client_35": {},
+ "client_36": {},
+ "client_47": {},
+ "client_48": {},
+ }
counts := &processCounts{
ClientsByType: map[string]clientIDSet{
entityActivityType: entitiesStruct,
nonEntityTokenActivityType: nonEntitiesStruct,
+ secretSyncActivityType: secretSyncStruct,
},
}
@@ -122,48 +146,29 @@ func Test_ActivityLog_ComputeCurrentMonthForBillingPeriodInternal(t *testing.T)
startTime := timeutil.MonthsPreviousTo(3, endTime)
monthRecord, err := a.computeCurrentMonthForBillingPeriodInternal(context.Background(), currentMonthClientsMap, mockHLLGetFunc, startTime, endTime)
- if err != nil {
- t.Fatal(err)
- }
+ require.NoError(t, err)
- // We should have 11 entity clients and 16 nonentity clients, and 3 new entity clients
- // and 4 new nonentity clients
- if monthRecord.Counts.EntityClients != 11 {
- t.Fatalf("wrong number of entity clients. Expected 11, got %d", monthRecord.Counts.EntityClients)
- }
- if monthRecord.Counts.NonEntityClients != 16 {
- t.Fatalf("wrong number of non entity clients. Expected 16, got %d", monthRecord.Counts.NonEntityClients)
- }
- if monthRecord.NewClients.Counts.EntityClients != 3 {
- t.Fatalf("wrong number of new entity clients. Expected 3, got %d", monthRecord.NewClients.Counts.EntityClients)
- }
- if monthRecord.NewClients.Counts.NonEntityClients != 4 {
- t.Fatalf("wrong number of new non entity clients. Expected 4, got %d", monthRecord.NewClients.Counts.NonEntityClients)
- }
+ require.Equal(t, &activity.CountsRecord{
+ EntityClients: 11,
+ NonEntityClients: 16,
+ SecretSyncs: 7,
+ }, monthRecord.Counts)
+
+ require.Equal(t, &activity.CountsRecord{
+ EntityClients: 3,
+ NonEntityClients: 4,
+ SecretSyncs: 2,
+ }, monthRecord.NewClients.Counts)
// Attempt to compute current month when no records exist
endTime = time.Now().UTC()
startTime = timeutil.StartOfMonth(endTime)
emptyClientsMap := make(map[int64]*processMonth, 0)
monthRecord, err = a.computeCurrentMonthForBillingPeriodInternal(context.Background(), emptyClientsMap, mockHLLGetFunc, startTime, endTime)
- if err != nil {
- t.Fatalf("failed to compute empty current month, err: %v", err)
- }
+ require.NoError(t, err)
- // We should have 0 entity clients, nonentity clients,new entity clients
- // and new nonentity clients
- if monthRecord.Counts.EntityClients != 0 {
- t.Fatalf("wrong number of entity clients. Expected 0, got %d", monthRecord.Counts.EntityClients)
- }
- if monthRecord.Counts.NonEntityClients != 0 {
- t.Fatalf("wrong number of non entity clients. Expected 0, got %d", monthRecord.Counts.NonEntityClients)
- }
- if monthRecord.NewClients.Counts.EntityClients != 0 {
- t.Fatalf("wrong number of new entity clients. Expected 0, got %d", monthRecord.NewClients.Counts.EntityClients)
- }
- if monthRecord.NewClients.Counts.NonEntityClients != 0 {
- t.Fatalf("wrong number of new non entity clients. Expected 0, got %d", monthRecord.NewClients.Counts.NonEntityClients)
- }
+ require.Equal(t, &activity.CountsRecord{}, monthRecord.Counts)
+ require.Equal(t, &activity.CountsRecord{}, monthRecord.NewClients.Counts)
}
// writeEntitySegment writes a single segment file with the given time and index for an entity
diff --git a/vault/audit.go b/vault/audit.go
index a3d6fcfab9b8..7f2e5cac47af 100644
--- a/vault/audit.go
+++ b/vault/audit.go
@@ -9,6 +9,7 @@ import (
"errors"
"fmt"
"os"
+ "strconv"
"strings"
"time"
@@ -81,6 +82,17 @@ func (c *Core) enableAudit(ctx context.Context, entry *MountEntry, updateStorage
return fmt.Errorf("backend path must be specified")
}
+ if fallbackRaw, ok := entry.Options["fallback"]; ok {
+ fallback, err := parseutil.ParseBool(fallbackRaw)
+ if err != nil {
+ return fmt.Errorf("unable to enable audit device '%s', cannot parse supplied 'fallback' setting: %w", entry.Path, err)
+ }
+
+ // Reassigning the fallback value means we can ensure that the formatting
+ // of it as a string is consistent for future comparisons.
+ entry.Options["fallback"] = strconv.FormatBool(fallback)
+ }
+
// Update the audit table
c.auditLock.Lock()
defer c.auditLock.Unlock()
@@ -88,6 +100,8 @@ func (c *Core) enableAudit(ctx context.Context, entry *MountEntry, updateStorage
// Look for matching name
for _, ent := range c.audit.Entries {
switch {
+ case entry.Options["fallback"] == "true" && ent.Options["fallback"] == "true":
+ return fmt.Errorf("unable to enable audit device '%s', a fallback device already exists '%s'", entry.Path, ent.Path)
// Existing is sql/mysql/ new is sql/ or
// existing is sql/ and new is sql/mysql/
case strings.HasPrefix(ent.Path, entry.Path):
@@ -531,7 +545,7 @@ func (c *Core) newAuditBackend(ctx context.Context, entry *MountEntry, view logi
!disableEventLogger,
c.auditedHeaders)
if err != nil {
- return nil, err
+ return nil, fmt.Errorf("unable to create new audit backend: %w", err)
}
if be == nil {
return nil, fmt.Errorf("nil backend returned from %q factory function", entry.Type)
diff --git a/vault/audit_broker.go b/vault/audit_broker.go
index 82e19de508ac..fbfa0d276915 100644
--- a/vault/audit_broker.go
+++ b/vault/audit_broker.go
@@ -32,32 +32,52 @@ type backendEntry struct {
type AuditBroker struct {
sync.RWMutex
backends map[string]backendEntry
- logger hclog.Logger
+ // broker is used to register pipelines for all devices except a fallback device.
broker *eventlogger.Broker
+
+ // fallbackBroker is used to register a pipeline to be used as a fallback
+ // in situations where we cannot use the eventlogger.Broker to guarantee that
+ // the required number of sinks were successfully written to. This situation
+ // occurs when all the audit devices registered with the broker use filtering.
+ // NOTE: there should only ever be a single device registered on the fallbackBroker.
+ fallbackBroker *eventlogger.Broker
+
+ // fallbackName stores the name (path) of the audit device which has been configured
+ // as the fallback pipeline (its eventlogger.PipelineID).
+ fallbackName string
+ logger hclog.Logger
}
// NewAuditBroker creates a new audit broker
func NewAuditBroker(log hclog.Logger, useEventLogger bool) (*AuditBroker, error) {
var eventBroker *eventlogger.Broker
+ var fallbackBroker *eventlogger.Broker
var err error
// The reason for this check is due to 1.15.x supporting the env var:
// 'VAULT_AUDIT_DISABLE_EVENTLOGGER'
// When NewAuditBroker is called, it is supplied a bool to determine whether
- // we initialize the broker, which are left nil otherwise.
+ // we initialize the broker (and fallback broker), which are left nil otherwise.
// In 1.16.x this check should go away and the env var removed.
if useEventLogger {
eventBroker, err = eventlogger.NewBroker()
if err != nil {
return nil, fmt.Errorf("error creating event broker for audit events: %w", err)
}
+
+ // Set up the broker that will support a single fallback device.
+ fallbackBroker, err = eventlogger.NewBroker()
+ if err != nil {
+ return nil, fmt.Errorf("error creating event fallback broker for audit event: %w", err)
+ }
}
b := &AuditBroker{
- backends: make(map[string]backendEntry),
- logger: log,
- broker: eventBroker,
+ backends: make(map[string]backendEntry),
+ logger: log,
+ broker: eventBroker,
+ fallbackBroker: fallbackBroker,
}
return b, nil
}
@@ -74,19 +94,42 @@ func (a *AuditBroker) Register(name string, b audit.Backend, local bool) error {
return fmt.Errorf("%s: name is required: %w", op, event.ErrInvalidParameter)
}
+ // If the backend is already registered, we cannot re-register it.
+ if a.isRegistered(name) {
+ return fmt.Errorf("%s: backend already registered '%s'", op, name)
+ }
+
+ // Fallback devices are singleton instances, we cannot register more than one or overwrite the existing one.
+ if b.IsFallback() && a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
+ existing, err := a.existingFallbackName()
+ if err != nil {
+ return fmt.Errorf("%s: existing fallback device already registered: %w", op, err)
+ }
+
+ return fmt.Errorf("%s: existing fallback device already registered: %q", op, existing)
+ }
+
// The reason for this check is due to 1.15.x supporting the env var:
// 'VAULT_AUDIT_DISABLE_EVENTLOGGER'
// When NewAuditBroker is called, it is supplied a bool to determine whether
- // we initialize the broker, which are left nil otherwise.
+ // we initialize the broker (and fallback broker), which are left nil otherwise.
// In 1.16.x this check should go away and the env var removed.
if a.broker != nil {
if name != b.Name() {
return fmt.Errorf("%s: audit registration failed due to device name mismatch: %q, %q", op, name, b.Name())
}
- err := a.register(name, b)
- if err != nil {
- return fmt.Errorf("%s: unable to register device for %q: %w", op, name, err)
+ switch {
+ case b.IsFallback():
+ err := a.registerFallback(name, b)
+ if err != nil {
+ return fmt.Errorf("%s: unable to register fallback device for %q: %w", op, name, err)
+ }
+ default:
+ err := a.register(name, b)
+ if err != nil {
+ return fmt.Errorf("%s: unable to register device for %q: %w", op, name, err)
+ }
}
}
@@ -110,6 +153,12 @@ func (a *AuditBroker) Deregister(ctx context.Context, name string) error {
return fmt.Errorf("%s: name is required: %w", op, event.ErrInvalidParameter)
}
+ // If the backend isn't actually registered, then there's nothing to do.
+ // We don't return any error so that Deregister can be idempotent.
+ if !a.isRegistered(name) {
+ return nil
+ }
+
// Remove the Backend from the map first, so that if an error occurs while
// removing the pipeline and nodes, we can quickly exit this method with
// the error.
@@ -118,23 +167,37 @@ func (a *AuditBroker) Deregister(ctx context.Context, name string) error {
// The reason for this check is due to 1.15.x supporting the env var:
// 'VAULT_AUDIT_DISABLE_EVENTLOGGER'
// When NewAuditBroker is called, it is supplied a bool to determine whether
- // we initialize the broker, which are left nil otherwise.
+ // we initialize the broker (and fallback broker), which are left nil otherwise.
// In 1.16.x this check should go away and the env var removed.
if a.broker != nil {
- err := a.deregister(ctx, name)
- if err != nil {
- return fmt.Errorf("%s: deregistration failed for audit device %q: %w", op, name, err)
+ switch {
+ case name == a.fallbackName:
+ err := a.deregisterFallback(ctx, name)
+ if err != nil {
+ return fmt.Errorf("%s: deregistration failed for fallback audit device %q: %w", op, name, err)
+ }
+ default:
+ err := a.deregister(ctx, name)
+ if err != nil {
+ return fmt.Errorf("%s: deregistration failed for audit device %q: %w", op, name, err)
+ }
}
}
return nil
}
-// IsRegistered is used to check if a given audit backend is registered
+// IsRegistered is used to check if a given audit backend is registered.
func (a *AuditBroker) IsRegistered(name string) bool {
a.RLock()
defer a.RUnlock()
+ return a.isRegistered(name)
+}
+
+// isRegistered is used to check if a given audit backend is registered.
+// This method should be used within the AuditBroker to prevent locking issues.
+func (a *AuditBroker) isRegistered(name string) bool {
_, ok := a.backends[name]
return ok
}
@@ -236,6 +299,9 @@ func (a *AuditBroker) LogRequest(ctx context.Context, in *logical.LogInput, head
e.Data = in
+ // There may be cases where only the fallback device was added but no other
+ // normal audit devices, so check if the broker had an audit based pipeline
+ // registered before trying to send to it.
var status eventlogger.Status
if a.broker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
status, err = a.broker.Send(ctx, eventlogger.EventType(event.AuditType.String()), e)
@@ -255,6 +321,15 @@ func (a *AuditBroker) LogRequest(ctx context.Context, in *logical.LogInput, head
retErr = multierror.Append(retErr, multierror.Append(errors.New("error during audit pipeline processing"), status.Warnings...))
return retErr.ErrorOrNil()
}
+
+ // If a fallback device is registered we can rely on that to 'catch all'
+ // and also the broker level guarantee for completed sinks.
+ if a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
+ status, err = a.fallbackBroker.Send(ctx, eventlogger.EventType(event.AuditType.String()), e)
+ if err != nil {
+ retErr = multierror.Append(retErr, multierror.Append(fmt.Errorf("auditing request to fallback device failed: %w", err), status.Warnings...))
+ }
+ }
}
}
@@ -349,6 +424,9 @@ func (a *AuditBroker) LogResponse(ctx context.Context, in *logical.LogInput, hea
defer auditCancel()
auditContext = namespace.ContextWithNamespace(auditContext, ns)
+ // There may be cases where only the fallback device was added but no other
+ // normal audit devices, so check if the broker had an audit based pipeline
+ // registered before trying to send to it.
var status eventlogger.Status
if a.broker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
status, err = a.broker.Send(auditContext, eventlogger.EventType(event.AuditType.String()), e)
@@ -368,6 +446,15 @@ func (a *AuditBroker) LogResponse(ctx context.Context, in *logical.LogInput, hea
retErr = multierror.Append(retErr, multierror.Append(errors.New("error during audit pipeline processing"), status.Warnings...))
return retErr.ErrorOrNil()
}
+
+ // If a fallback device is registered we can rely on that to 'catch all'
+ // and also the broker level guarantee for completed sinks.
+ if a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())) {
+ status, err = a.fallbackBroker.Send(auditContext, eventlogger.EventType(event.AuditType.String()), e)
+ if err != nil {
+ retErr = multierror.Append(retErr, multierror.Append(fmt.Errorf("auditing response to fallback device failed: %w", err), status.Warnings...))
+ }
+ }
}
}
@@ -391,13 +478,19 @@ func (a *AuditBroker) Invalidate(ctx context.Context, key string) {
// guarantee provided by setting the threshold to 1, and must set it to 0.
// If you are registering an audit device, you should first check if that backend
// does not have filtering before querying the backends via requiredSuccessThresholdSinks.
+// backends may also contain a fallback device, which should be ignored as it is
+// handled by the fallbackBroker.
func (a *AuditBroker) requiredSuccessThresholdSinks() int {
threshold := 0
// We might need to check over all the existing backends to discover if any
// don't use filtering.
for _, be := range a.backends {
- if !be.backend.HasFiltering() {
+ switch {
+ case be.backend.IsFallback():
+ // Ignore fallback devices as they're handled by a separate broker.
+ continue
+ case !be.backend.HasFiltering():
threshold = 1
break
}
@@ -432,6 +525,65 @@ func registerNodesAndPipeline(broker *eventlogger.Broker, b audit.Backend) error
return nil
}
+// existingFallbackName returns the name of the fallback device which is registered
+// with the AuditBroker.
+func (a *AuditBroker) existingFallbackName() (string, error) {
+ const op = "vault.(AuditBroker).existingFallbackName"
+
+ for _, be := range a.backends {
+ if be.backend.IsFallback() {
+ return be.backend.Name(), nil
+ }
+ }
+
+ return "", fmt.Errorf("%s: existing fallback device name is missing", op)
+}
+
+// registerFallback can be used to register a fallback device, it will also
+// configure the success threshold required for sinks.
+func (a *AuditBroker) registerFallback(name string, backend audit.Backend) error {
+ const op = "vault.(AuditBroker).registerFallback"
+
+ err := registerNodesAndPipeline(a.fallbackBroker, backend)
+ if err != nil {
+ return fmt.Errorf("%s: fallback device pipeline registration error: %w", op, err)
+ }
+
+ // Store the name of the fallback audit device so that we can check when
+ // deregistering if the device is the single fallback one.
+ a.fallbackName = backend.Name()
+
+ // We need to turn on the threshold for the fallback broker, so we can
+ // guarantee it ends up somewhere
+ err = a.fallbackBroker.SetSuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()), 1)
+ if err != nil {
+ return fmt.Errorf("%s: unable to configure fallback sink success threshold (1) for %q: %w", op, name, err)
+ }
+
+ return nil
+}
+
+// deregisterFallback can be used to deregister a fallback audit device, it will
+// also configure the success threshold required for sinks.
+func (a *AuditBroker) deregisterFallback(ctx context.Context, name string) error {
+ const op = "vault.(AuditBroker).deregisterFallback"
+
+ err := a.fallbackBroker.SetSuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()), 0)
+ if err != nil {
+ return fmt.Errorf("%s: unable to configure fallback sink success threshold (0) for %q: %w", op, name, err)
+ }
+
+ _, err = a.fallbackBroker.RemovePipelineAndNodes(ctx, eventlogger.EventType(event.AuditType.String()), eventlogger.PipelineID(name))
+ if err != nil {
+ return fmt.Errorf("%s: unable to deregister fallback device %q: %w", op, name, err)
+ }
+
+ // Clear the fallback device name now we've deregistered.
+ a.fallbackName = ""
+
+ return nil
+}
+
// register can be used to register a normal audit device, it will also calculate
// and configure the success threshold required for sinks.
func (a *AuditBroker) register(name string, backend audit.Backend) error {
diff --git a/vault/audit_broker_test.go b/vault/audit_broker_test.go
index a7fa891fcfdb..18efaa560102 100644
--- a/vault/audit_broker_test.go
+++ b/vault/audit_broker_test.go
@@ -141,3 +141,118 @@ func TestAuditBroker_Deregister_SuccessThresholdSinks(t *testing.T) {
require.True(t, ok)
require.Equal(t, 1, res)
}
+
+// TestAuditBroker_Register_Fallback ensures we can register a fallback device.
+func TestAuditBroker_Register_Fallback(t *testing.T) {
+ t.Parallel()
+
+ l := corehelpers.NewTestLogger(t)
+ a, err := NewAuditBroker(l, true)
+ require.NoError(t, err)
+ require.NotNil(t, a)
+
+ path := "juan/"
+ fallbackBackend := testAuditBackend(t, path, map[string]string{"fallback": "true"})
+ err = a.Register(path, fallbackBackend, false)
+ require.NoError(t, err)
+ require.True(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
+ require.Equal(t, path, a.fallbackName)
+ threshold, found := a.fallbackBroker.SuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()))
+ require.True(t, found)
+ require.Equal(t, 1, threshold)
+}
+
+// TestAuditBroker_Register_FallbackMultiple tests that trying to register more
+// than a single fallback device results in the correct error.
+func TestAuditBroker_Register_FallbackMultiple(t *testing.T) {
+ t.Parallel()
+
+ l := corehelpers.NewTestLogger(t)
+ a, err := NewAuditBroker(l, true)
+ require.NoError(t, err)
+ require.NotNil(t, a)
+
+ path1 := "juan1/"
+ fallbackBackend1 := testAuditBackend(t, path1, map[string]string{"fallback": "true"})
+ err = a.Register(path1, fallbackBackend1, false)
+ require.NoError(t, err)
+ require.True(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
+ require.Equal(t, path1, a.fallbackName)
+
+ path2 := "juan2/"
+ fallbackBackend2 := testAuditBackend(t, path2, map[string]string{"fallback": "true"})
+ err = a.Register(path1, fallbackBackend2, false)
+ require.Error(t, err)
+ require.EqualError(t, err, "vault.(AuditBroker).Register: backend already registered 'juan1/'")
+ require.True(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
+ require.Equal(t, path1, a.fallbackName)
+}
+
+// TestAuditBroker_Deregister_Fallback ensures that we can deregister a fallback
+// device successfully.
+func TestAuditBroker_Deregister_Fallback(t *testing.T) {
+ t.Parallel()
+
+ l := corehelpers.NewTestLogger(t)
+ a, err := NewAuditBroker(l, true)
+ require.NoError(t, err)
+ require.NotNil(t, a)
+
+ path := "juan/"
+ fallbackBackend := testAuditBackend(t, path, map[string]string{"fallback": "true"})
+ err = a.Register(path, fallbackBackend, false)
+ require.NoError(t, err)
+ require.True(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
+ require.Equal(t, path, a.fallbackName)
+
+ threshold, found := a.fallbackBroker.SuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()))
+ require.True(t, found)
+ require.Equal(t, 1, threshold)
+
+ err = a.Deregister(context.Background(), path)
+ require.NoError(t, err)
+ require.False(t, a.fallbackBroker.IsAnyPipelineRegistered(eventlogger.EventType(event.AuditType.String())))
+ require.Equal(t, "", a.fallbackName)
+
+ threshold, found = a.fallbackBroker.SuccessThresholdSinks(eventlogger.EventType(event.AuditType.String()))
+ require.True(t, found)
+ require.Equal(t, 0, threshold)
+}
+
+// TestAuditBroker_Deregister_Multiple ensures that we can call deregister multiple
+// times without issue if is no matching backend registered.
+func TestAuditBroker_Deregister_Multiple(t *testing.T) {
+ t.Parallel()
+
+ l := corehelpers.NewTestLogger(t)
+ a, err := NewAuditBroker(l, true)
+ require.NoError(t, err)
+ require.NotNil(t, a)
+
+ err = a.Deregister(context.Background(), "foo")
+ require.NoError(t, err)
+
+ err = a.Deregister(context.Background(), "foo2")
+ require.NoError(t, err)
+}
+
+// TestAuditBroker_Register_MultipleFails checks for failure when we try to
+// re-register an audit backend.
+func TestAuditBroker_Register_MultipleFails(t *testing.T) {
+ t.Parallel()
+
+ l := corehelpers.NewTestLogger(t)
+ a, err := NewAuditBroker(l, true)
+ require.NoError(t, err)
+ require.NotNil(t, a)
+
+ path := "b2-no-filter"
+ noFilterBackend := testAuditBackend(t, path, map[string]string{})
+
+ err = a.Register(path, noFilterBackend, false)
+ require.NoError(t, err)
+
+ err = a.Register(path, noFilterBackend, false)
+ require.Error(t, err)
+ require.EqualError(t, err, "vault.(AuditBroker).Register: backend already registered 'b2-no-filter'")
+}
diff --git a/vault/audit_test.go b/vault/audit_test.go
index afecafaea245..87f06f4a5731 100644
--- a/vault/audit_test.go
+++ b/vault/audit_test.go
@@ -237,6 +237,69 @@ func TestCore_EnableAudit_Local(t *testing.T) {
}
}
+// TestAudit_enableAudit_fallback_invalid ensures that supplying a bad value for
+// 'fallback' in options gives us the correct error.
+func TestAudit_enableAudit_fallback_invalid(t *testing.T) {
+ entry := &MountEntry{
+ Path: "noop/",
+ Options: map[string]string{
+ "fallback": "juan",
+ },
+ }
+
+ cluster := NewTestCluster(t, nil, nil)
+ cluster.Start()
+ defer cluster.Cleanup()
+ core := cluster.Cores[0]
+ core.auditBackends["noop"] = corehelpers.NoopAuditFactory(nil)
+ err := core.enableAudit(context.Background(), entry, false)
+ require.Error(t, err)
+ require.EqualError(t, err, "unable to enable audit device 'noop/', cannot parse supplied 'fallback' setting: cannot parse '' as bool: strconv.ParseBool: parsing \"juan\": invalid syntax")
+}
+
+// TestAudit_enableAudit_fallback_two ensures trying to enable a second fallback
+// device returns the correct error.
+func TestAudit_enableAudit_fallback_two(t *testing.T) {
+ entry1 := &MountEntry{
+ Table: auditTableType,
+ Path: "noop1/",
+ Type: "noop",
+ UUID: "abcd",
+ Accessor: "noop1-abcd",
+ NamespaceID: namespace.RootNamespaceID,
+ Options: map[string]string{
+ "fallback": "TRUE",
+ },
+ namespace: namespace.RootNamespace,
+ }
+
+ entry2 := &MountEntry{
+ Table: auditTableType,
+ Path: "noop2/",
+ Type: "noop",
+ UUID: "abcd",
+ Accessor: "noop2-abcd",
+ NamespaceID: namespace.RootNamespaceID,
+ Options: map[string]string{
+ "fallback": "1",
+ },
+ namespace: namespace.RootNamespace,
+ }
+
+ cluster := NewTestCluster(t, nil, nil)
+ cluster.Start()
+ defer cluster.Cleanup()
+ core := cluster.Cores[0]
+ core.auditBackends["noop"] = corehelpers.NoopAuditFactory(nil)
+ ctx := namespace.ContextWithNamespace(context.Background(), namespace.RootNamespace)
+ err := core.enableAudit(ctx, entry1, false)
+ require.NoError(t, err)
+
+ err = core.enableAudit(ctx, entry2, false)
+ require.Error(t, err)
+ require.EqualError(t, err, "unable to enable audit device 'noop2/', a fallback device already exists 'noop1/'")
+}
+
func TestCore_DisableAudit(t *testing.T) {
c, keys, _ := TestCoreUnsealed(t)
c.auditBackends["noop"] = corehelpers.NoopAuditFactory(nil)
diff --git a/vault/cluster/cluster.go b/vault/cluster/cluster.go
index 00967ac3c7a8..ed77acba1e72 100644
--- a/vault/cluster/cluster.go
+++ b/vault/cluster/cluster.go
@@ -270,7 +270,8 @@ func (cl *Listener) TLSConfig(ctx context.Context) (*tls.Config, error) {
}
// Run starts the tcp listeners and will accept connections until stop is
-// called. This function blocks so should be called in a goroutine.
+// called. This function does not block and will start the listeners in
+// separate goroutines.
func (cl *Listener) Run(ctx context.Context) error {
// Get our TLS config
tlsConfig, err := cl.TLSConfig(ctx)
diff --git a/vault/core.go b/vault/core.go
index 9d89d49e0cd4..c0c1f695c418 100644
--- a/vault/core.go
+++ b/vault/core.go
@@ -525,7 +525,7 @@ type Core struct {
// uiConfig contains UI configuration
uiConfig *UIConfig
- customMessageManager *uicustommessages.Manager
+ customMessageManager CustomMessagesManager
// rawEnabled indicates whether the Raw endpoint is enabled
rawEnabled bool
diff --git a/vault/custom_messages_manager.go b/vault/custom_messages_manager.go
new file mode 100644
index 000000000000..ca7c7cd3a806
--- /dev/null
+++ b/vault/custom_messages_manager.go
@@ -0,0 +1,20 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: BUSL-1.1
+
+package vault
+
+import (
+ "context"
+
+ uicustommessages "github.com/hashicorp/vault/vault/ui_custom_messages"
+)
+
+// CustomMessagesManager is the interface used by the vault package when
+// interacting with a uicustommessages.Manager instance.
+type CustomMessagesManager interface {
+ FindMessages(context.Context, uicustommessages.FindFilter) ([]uicustommessages.Message, error)
+ AddMessage(context.Context, uicustommessages.Message) (*uicustommessages.Message, error)
+ ReadMessage(context.Context, string) (*uicustommessages.Message, error)
+ UpdateMessage(context.Context, uicustommessages.Message) (*uicustommessages.Message, error)
+ DeleteMessage(context.Context, string) error
+}
diff --git a/vault/external_tests/plugin/plugin_test.go b/vault/external_tests/plugin/plugin_test.go
index e29340a17ccc..affe8fa6b888 100644
--- a/vault/external_tests/plugin/plugin_test.go
+++ b/vault/external_tests/plugin/plugin_test.go
@@ -560,6 +560,9 @@ func testSystemBackend_PluginReload(t *testing.T, reqData map[string]interface{}
if resp.Data["reload_id"] == nil {
t.Fatal("no reload_id in response")
}
+ if len(resp.Warnings) != 0 {
+ t.Fatal(resp.Warnings)
+ }
for i := 0; i < 2; i++ {
// Ensure internal backed value is reset
@@ -578,6 +581,35 @@ func testSystemBackend_PluginReload(t *testing.T, reqData map[string]interface{}
}
}
+func TestSystemBackend_PluginReload_WarningIfNoneReloaded(t *testing.T) {
+ cluster := testSystemBackendMock(t, 1, 2, logical.TypeLogical, "v5")
+ defer cluster.Cleanup()
+
+ core := cluster.Cores[0]
+ client := core.Client
+
+ for _, backendType := range []logical.BackendType{logical.TypeLogical, logical.TypeCredential} {
+ t.Run(backendType.String(), func(t *testing.T) {
+ // Perform plugin reload
+ resp, err := client.Logical().Write("sys/plugins/reload/backend", map[string]any{
+ "plugin": "does-not-exist",
+ })
+ if err != nil {
+ t.Fatalf("err: %v", err)
+ }
+ if resp == nil {
+ t.Fatalf("bad: %v", resp)
+ }
+ if resp.Data["reload_id"] == nil {
+ t.Fatal("no reload_id in response")
+ }
+ if len(resp.Warnings) == 0 {
+ t.Fatal("expected warning")
+ }
+ })
+ }
+}
+
// testSystemBackendMock returns a systemBackend with the desired number
// of mounted mock plugin backends. numMounts alternates between different
// ways of providing the plugin_name.
diff --git a/vault/logical_system.go b/vault/logical_system.go
index dcf5f0b97f0e..621595485004 100644
--- a/vault/logical_system.go
+++ b/vault/logical_system.go
@@ -50,6 +50,7 @@ import (
"github.com/hashicorp/vault/sdk/helper/wrapping"
"github.com/hashicorp/vault/sdk/logical"
"github.com/hashicorp/vault/vault/plugincatalog"
+ uicustommessages "github.com/hashicorp/vault/vault/ui_custom_messages"
"github.com/hashicorp/vault/version"
"github.com/mitchellh/mapstructure"
"golang.org/x/crypto/sha3"
@@ -142,8 +143,8 @@ func NewSystemBackend(core *Core, logger log.Logger, config *logical.BackendConf
"wrapping/pubkey",
"replication/status",
"internal/specs/openapi",
- "internal/ui/custom-messages",
- "internal/ui/custom-messages/*",
+ "internal/ui/authenticated-messages",
+ "internal/ui/unauthenticated-messages",
"internal/ui/mounts",
"internal/ui/mounts/*",
"internal/ui/namespaces",
@@ -737,11 +738,24 @@ func (b *SystemBackend) handlePluginReloadUpdate(ctx context.Context, req *logic
return logical.ErrorResponse("plugin or mounts must be provided"), nil
}
+ resp := logical.Response{
+ Data: map[string]interface{}{
+ "reload_id": req.ID,
+ },
+ }
+
if pluginName != "" {
- err := b.Core.reloadMatchingPlugin(ctx, pluginName)
+ reloaded, err := b.Core.reloadMatchingPlugin(ctx, pluginName)
if err != nil {
return nil, err
}
+ if reloaded == 0 {
+ if scope == globalScope {
+ resp.AddWarning("no plugins were reloaded locally (but they may be reloaded on other nodes)")
+ } else {
+ resp.AddWarning("no plugins were reloaded")
+ }
+ }
} else if len(pluginMounts) > 0 {
err := b.Core.reloadMatchingPluginMounts(ctx, pluginMounts)
if err != nil {
@@ -749,20 +763,14 @@ func (b *SystemBackend) handlePluginReloadUpdate(ctx context.Context, req *logic
}
}
- r := logical.Response{
- Data: map[string]interface{}{
- "reload_id": req.ID,
- },
- }
-
if scope == globalScope {
err := handleGlobalPluginReload(ctx, b.Core, req.ID, pluginName, pluginMounts)
if err != nil {
return nil, err
}
- return logical.RespondWithStatusCode(&r, req, http.StatusAccepted)
+ return logical.RespondWithStatusCode(&resp, req, http.StatusAccepted)
}
- return &r, nil
+ return &resp, nil
}
func (b *SystemBackend) handlePluginRuntimeCatalogUpdate(ctx context.Context, _ *logical.Request, d *framework.FieldData) (*logical.Response, error) {
@@ -4428,6 +4436,87 @@ func hasMountAccess(ctx context.Context, acl *ACL, path string) bool {
return aclCapabilitiesGiven
}
+// pathInternalUIAuthenticatedMessages finds all of the active messages whose
+// Authenticated property is set to true in the current namespace (based on the
+// provided context.Context) or in any ancestor namespace all the way up to the
+// root namespace.
+func (b *SystemBackend) pathInternalUIAuthenticatedMessages(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
+ // Make sure that the request includes a Vault token.
+ var tokenEntry *logical.TokenEntry
+ if token := req.ClientToken; token != "" {
+ tokenEntry, _ = b.Core.LookupToken(ctx, token)
+ }
+
+ if tokenEntry == nil {
+ return logical.ListResponseWithInfo([]string{}, map[string]any{}), nil
+ }
+
+ filter := uicustommessages.FindFilter{
+ IncludeAncestors: true,
+ }
+ filter.Active(true)
+ filter.Authenticated(true)
+
+ return b.pathInternalUICustomMessagesCommon(ctx, filter)
+}
+
+// pathInternalUIUnauthenticatedMessages finds all of the active messages whose
+// Authenticated property is set to false in the current namespace (based on the
+// provided context.Context) or in any ancestor namespace all the way up to the
+// root namespace.
+func (b *SystemBackend) pathInternalUIUnauthenticatedMessages(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
+ filter := uicustommessages.FindFilter{
+ IncludeAncestors: true,
+ }
+ filter.Active(true)
+ filter.Authenticated(false)
+
+ return b.pathInternalUICustomMessagesCommon(ctx, filter)
+}
+
+// pathInternalUICustomMessagesCommon takes care of finding the custom messages
+// that meet the criteria set in the provided uicustommessages.FindFilter.
+func (b *SystemBackend) pathInternalUICustomMessagesCommon(ctx context.Context, filter uicustommessages.FindFilter) (*logical.Response, error) {
+ messages, err := b.Core.customMessageManager.FindMessages(ctx, filter)
+ if err != nil {
+ return logical.ErrorResponse("failed to retrieve custom messages: %w", err), nil
+ }
+
+ keys := []string{}
+ keyInfo := map[string]any{}
+
+ for _, message := range messages {
+ keys = append(keys, message.ID)
+
+ var endTimeFormatted any
+
+ if message.EndTime != nil {
+ endTimeFormatted = message.EndTime.Format(time.RFC3339Nano)
+ }
+
+ var linkFormatted map[string]string = nil
+
+ if message.Link != nil {
+ linkFormatted = make(map[string]string)
+
+ linkFormatted[message.Link.Title] = message.Link.Href
+ }
+
+ keyInfo[message.ID] = map[string]any{
+ "title": message.Title,
+ "message": message.Message,
+ "authenticated": message.Authenticated,
+ "type": message.Type,
+ "start_time": message.StartTime.Format(time.RFC3339Nano),
+ "end_time": endTimeFormatted,
+ "link": linkFormatted,
+ "options": message.Options,
+ }
+ }
+
+ return logical.ListResponseWithInfo(keys, keyInfo), nil
+}
+
func (b *SystemBackend) pathInternalUIMountsRead(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
ns, err := namespace.FromContext(ctx)
if err != nil {
diff --git a/vault/logical_system_custom_messages.go b/vault/logical_system_custom_messages.go
index 6bba7a0db802..cddf33e5735a 100644
--- a/vault/logical_system_custom_messages.go
+++ b/vault/logical_system_custom_messages.go
@@ -422,28 +422,24 @@ func (b *SystemBackend) handleCreateCustomMessages(ctx context.Context, req *log
return logical.ErrorResponse(err.Error()), nil
}
+ if len(linkMap) > 1 {
+ return logical.ErrorResponse("invalid number of elements in link parameter value; only a single element can be provided"), nil
+ }
+
var link *uicustommessages.MessageLink
if linkMap != nil {
link = &uicustommessages.MessageLink{}
- linkTitle, ok := linkMap["title"]
- if !ok {
- return logical.ErrorResponse("missing title in link parameter value"), nil
- }
-
- link.Title, ok = linkTitle.(string)
- if !ok {
- return logical.ErrorResponse("invalid title value in link parameter value"), nil
- }
+ for k, v := range linkMap {
+ href, ok := v.(string)
+ if !ok {
+ return logical.ErrorResponse(fmt.Sprintf("invalid url for %q key in link parameter value", k)), nil
+ }
- linkHref, ok := linkMap["href"]
- if !ok {
- return logical.ErrorResponse("missing href in link parameter value"), nil
- }
+ link.Title = k
+ link.Href = href
- link.Href, ok = linkHref.(string)
- if !ok {
- return logical.ErrorResponse("invalid href value in link parameter value"), nil
+ break
}
}
@@ -509,6 +505,13 @@ func (b *SystemBackend) handleReadCustomMessage(ctx context.Context, req *logica
endTimeResponse = message.EndTime.Format(time.RFC3339Nano)
}
+ var linkResponse map[string]string = nil
+ if message.Link != nil {
+ linkResponse = make(map[string]string)
+
+ linkResponse[message.Link.Title] = message.Link.Href
+ }
+
return &logical.Response{
Data: map[string]any{
"id": id,
@@ -517,7 +520,7 @@ func (b *SystemBackend) handleReadCustomMessage(ctx context.Context, req *logica
"message": message.Message,
"start_time": message.StartTime.Format(time.RFC3339Nano),
"end_time": endTimeResponse,
- "link": message.Link,
+ "link": linkResponse,
"options": message.Options,
"active": message.Active(),
"title": message.Title,
@@ -558,28 +561,24 @@ func (b *SystemBackend) handleUpdateCustomMessage(ctx context.Context, req *logi
return logical.ErrorResponse(err.Error()), nil
}
+ if len(linkMap) > 1 {
+ return logical.ErrorResponse("invalid number of elements in link parameter value; only a single element can be provided"), nil
+ }
+
var link *uicustommessages.MessageLink
if linkMap != nil {
link = &uicustommessages.MessageLink{}
- linkTitle, ok := linkMap["title"]
- if !ok {
- return logical.ErrorResponse("missing title in link parameter value"), nil
- }
-
- link.Title, ok = linkTitle.(string)
- if !ok {
- return logical.ErrorResponse("invalid title value in link parameter value"), nil
- }
+ for k, v := range linkMap {
+ href, ok := v.(string)
+ if !ok {
+ return logical.ErrorResponse("invalid url for %q key link parameter value", k), nil
+ }
- linkHref, ok := linkMap["href"]
- if !ok {
- return logical.ErrorResponse("missing href in link parameter value"), nil
- }
+ link.Title = k
+ link.Href = href
- link.Href, ok = linkHref.(string)
- if !ok {
- return logical.ErrorResponse("invalid href value in link parameter value"), nil
+ break
}
}
diff --git a/vault/logical_system_custom_messages_test.go b/vault/logical_system_custom_messages_test.go
index 234c0e8172c1..d3dadc57ed2e 100644
--- a/vault/logical_system_custom_messages_test.go
+++ b/vault/logical_system_custom_messages_test.go
@@ -302,6 +302,25 @@ func TestHandleCreateCustomMessage(t *testing.T) {
},
errorExpected: true,
},
+ {
+ name: "link-parameter-href-invalid",
+ fieldRawUpdate: map[string]any{
+ "link": map[string]any{
+ "click here": []int{},
+ },
+ },
+ errorExpected: true,
+ },
+ {
+ name: "link-parameter-multiple-links",
+ fieldRawUpdate: map[string]any{
+ "link": map[string]any{
+ "click here": "http://example.org",
+ "click here 2": "http://ping.net",
+ },
+ },
+ errorExpected: true,
+ },
{
name: "options-parameter-invalid",
fieldRawUpdate: map[string]any{
@@ -321,9 +340,8 @@ func TestHandleCreateCustomMessage(t *testing.T) {
"options": map[string]any{
"color": "red",
},
- "link": map[string]any{
- "title": "Details",
- "href": "https://server.com/details",
+ "link": map[string]string{
+ "Details": "https://server.com/details",
},
},
},
@@ -373,14 +391,24 @@ func TestHandleCreateCustomMessage(t *testing.T) {
assert.Contains(t, resp.Data, "start_time", testcase.name)
assert.Contains(t, resp.Data, "end_time", testcase.name)
assert.Contains(t, resp.Data, "id", testcase.name)
- if _, ok := testcase.fieldRawUpdate["authenticated"]; !ok {
- assert.True(t, resp.Data["authenticated"].(bool), testcase.name)
- }
+ assert.Contains(t, resp.Data, "options", testcase.name)
+ assert.Contains(t, resp.Data, "link", testcase.name)
+ _, ok := testcase.fieldRawUpdate["authenticated"]
+ assert.Equal(t, !ok, resp.Data["authenticated"].(bool), testcase.name)
if _, ok := testcase.fieldRawUpdate["type"]; !ok {
assert.Equal(t, resp.Data["type"], uicustommessages.BannerMessageType, testcase.name)
+ } else {
+ assert.Equal(t, resp.Data["type"], uicustommessages.ModalMessageType, testcase.name)
}
if _, ok := testcase.fieldRawUpdate["end_time"]; !ok {
assert.Nil(t, resp.Data["end_time"], testcase.name)
+ } else {
+ assert.NotNil(t, resp.Data["end_time"], testcase.name)
+ }
+ if _, ok := testcase.fieldRawUpdate["link"]; !ok {
+ assert.Nil(t, resp.Data["link"], testcase.name)
+ } else {
+ assert.NotNil(t, resp.Data["link"], testcase.name)
}
}
}
@@ -428,7 +456,10 @@ func TestHandleReadCustomMessage(t *testing.T) {
StartTime: earlier,
EndTime: &later,
Options: make(map[string]any),
- Link: nil,
+ Link: &uicustommessages.MessageLink{
+ Title: "Click Here",
+ Href: "www.example.com",
+ },
}
message, err := backend.Core.customMessageManager.AddMessage(nsCtx, *message)
@@ -457,9 +488,12 @@ func TestHandleReadCustomMessage(t *testing.T) {
assert.Equal(t, resp.Data["active"], true)
assert.Contains(t, resp.Data, "end_time")
assert.NotNil(t, resp.Data["end_time"])
+ assert.Contains(t, resp.Data, "link")
+ assert.Equal(t, 1, len(resp.Data["link"].(map[string]string)))
// Change the message so that it doesn't have an end time.
message.EndTime = nil
+ message.Link = nil
message, err = backend.Core.customMessageManager.UpdateMessage(nsCtx, *message)
require.NoError(t, err)
require.NotNil(t, message)
@@ -474,6 +508,8 @@ func TestHandleReadCustomMessage(t *testing.T) {
assert.Equal(t, resp.Data["active"], true)
assert.Contains(t, resp.Data, "end_time")
assert.Nil(t, resp.Data["end_time"])
+ assert.Contains(t, resp.Data, "link")
+ assert.Nil(t, resp.Data["link"])
// Check that there's an error when trying to read a non-existant custom
// message.
@@ -538,7 +574,7 @@ func TestHandleUpdateCustomMessage(t *testing.T) {
endTime := now.Add(time.Hour).Format(time.RFC3339Nano)
startTime2 := now.UTC().Add(-2 * time.Hour).Format(time.RFC3339Nano)
- storageEntryValue := fmt.Sprintf(`{"messages":{"xyz":{"id":"xyz","title":"title","message":"message","authenticated":true,"type":"%s","start_time":"%s","end_time":"%s","link":{},"options":{}}}}`, uicustommessages.ModalMessageType, startTime, endTime)
+ storageEntryValue := fmt.Sprintf(`{"messages":{"xyz":{"id":"xyz","title":"title","message":"message","authenticated":true,"type":"%s","start_time":"%s","end_time":"%s","link":null,"options":null}}}`, uicustommessages.ModalMessageType, startTime, endTime)
storageEntry := &logical.StorageEntry{
Key: "sys/config/ui/custom-messages",
@@ -595,8 +631,7 @@ func TestHandleUpdateCustomMessage(t *testing.T) {
"start_time": startTime,
"end_time": endTime,
"link": map[string]any{
- "title": "link-title",
- "href": "http://link.url.com",
+ "link-title": "http://link.url.com",
},
"options": map[string]any{},
},
@@ -704,6 +739,23 @@ func TestHandleUpdateCustomMessage(t *testing.T) {
"link": "link",
},
},
+ {
+ name: "link-parameter-url-invalid",
+ fieldRawUpdate: map[string]any{
+ "link": map[string]any{
+ "my-link": []int{},
+ },
+ },
+ },
+ {
+ name: "link-parameter-multiple-links",
+ fieldRawUpdate: map[string]any{
+ "link": map[string]any{
+ "click here": "http://example.org",
+ "click here 2": "http://ping.net",
+ },
+ },
+ },
{
name: "options-parameter-invalid",
fieldRawUpdate: map[string]any{
diff --git a/vault/logical_system_paths.go b/vault/logical_system_paths.go
index e88369d647fa..c12b83e6ecaf 100644
--- a/vault/logical_system_paths.go
+++ b/vault/logical_system_paths.go
@@ -2435,6 +2435,37 @@ func (b *SystemBackend) internalPaths() []*framework.Path {
HelpSynopsis: "Generate an OpenAPI 3 document of all mounted paths.",
},
+ {
+ Pattern: "internal/ui/authenticated-messages",
+
+ DisplayAttrs: &framework.DisplayAttributes{
+ OperationPrefix: "internal-ui",
+ OperationVerb: "read",
+ OperationSuffix: "authenticated-active-custom-messages",
+ },
+
+ Operations: map[logical.Operation]framework.OperationHandler{
+ logical.ReadOperation: &framework.PathOperation{
+ Callback: b.pathInternalUIAuthenticatedMessages,
+ Summary: "Retrieves Active post-login Custom Messages",
+ Responses: map[int][]framework.Response{
+ http.StatusOK: {{
+ Description: "OK",
+ Fields: map[string]*framework.FieldSchema{
+ "keys": {
+ Type: framework.TypeStringSlice,
+ Required: true,
+ },
+ "key_info": {
+ Type: framework.TypeMap,
+ Required: true,
+ },
+ },
+ }},
+ },
+ },
+ },
+ },
{
Pattern: "internal/ui/feature-flags",
@@ -2653,6 +2684,37 @@ func (b *SystemBackend) internalPaths() []*framework.Path {
HelpSynopsis: strings.TrimSpace(sysHelp["internal-ui-resultant-acl"][0]),
HelpDescription: strings.TrimSpace(sysHelp["internal-ui-resultant-acl"][1]),
},
+ {
+ Pattern: "internal/ui/unauthenticated-messages",
+
+ DisplayAttrs: &framework.DisplayAttributes{
+ OperationPrefix: "internal-ui",
+ OperationVerb: "read",
+ OperationSuffix: "unauthenticated-active-custom-messages",
+ },
+
+ Operations: map[logical.Operation]framework.OperationHandler{
+ logical.ReadOperation: &framework.PathOperation{
+ Callback: b.pathInternalUIUnauthenticatedMessages,
+ Summary: "Retrieves Active pre-login Custom Messages",
+ Responses: map[int][]framework.Response{
+ http.StatusOK: {{
+ Description: "OK",
+ Fields: map[string]*framework.FieldSchema{
+ "keys": {
+ Type: framework.TypeStringSlice,
+ Required: true,
+ },
+ "key_info": {
+ Type: framework.TypeMap,
+ Required: true,
+ },
+ },
+ }},
+ },
+ },
+ },
+ },
{
Pattern: "internal/ui/version",
DisplayAttrs: &framework.DisplayAttributes{
diff --git a/vault/logical_system_test.go b/vault/logical_system_test.go
index 9b28f1bfc301..3f9ead9a9533 100644
--- a/vault/logical_system_test.go
+++ b/vault/logical_system_test.go
@@ -44,8 +44,10 @@ import (
"github.com/hashicorp/vault/sdk/logical"
"github.com/hashicorp/vault/vault/plugincatalog"
"github.com/hashicorp/vault/vault/seal"
+ uicustommessages "github.com/hashicorp/vault/vault/ui_custom_messages"
"github.com/hashicorp/vault/version"
"github.com/mitchellh/mapstructure"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
@@ -6383,3 +6385,166 @@ func TestSystemBackend_pluginRuntime_CannotDeleteRuntimeWithReferencingPlugins(t
t.Fatalf("err: %v %v", err, resp.Error())
}
}
+
+type testingCustomMessageManager struct {
+ findFilters []uicustommessages.FindFilter
+}
+
+func (m *testingCustomMessageManager) FindMessages(_ context.Context, filter uicustommessages.FindFilter) ([]uicustommessages.Message, error) {
+ m.findFilters = append(m.findFilters, filter)
+
+ return []uicustommessages.Message{}, nil
+}
+
+func (m *testingCustomMessageManager) AddMessage(_ context.Context, _ uicustommessages.Message) (*uicustommessages.Message, error) {
+ return nil, nil
+}
+
+func (m *testingCustomMessageManager) ReadMessage(_ context.Context, _ string) (*uicustommessages.Message, error) {
+ return nil, nil
+}
+
+func (m *testingCustomMessageManager) UpdateMessage(_ context.Context, _ uicustommessages.Message) (*uicustommessages.Message, error) {
+ return nil, nil
+}
+
+func (m *testingCustomMessageManager) DeleteMessage(_ context.Context, _ string) error {
+ return nil
+}
+
+// TestPathInternalUIUnauthenticatedMessages verifies the correct behaviour of
+// the pathInternalUIUnauthenticatedMessages method, which is to call the
+// FindMessages method of the Core.customMessagesManager field with a FindFilter
+// that has the IncludeAncestors field set to true, the active field pointing to
+// a true value, and the authenticated field pointing to a false value.
+func TestPathInternalUIUnauthenticatedMessages(t *testing.T) {
+ testingCMM := &testingCustomMessageManager{}
+ backend := &SystemBackend{
+ Core: &Core{
+ customMessageManager: testingCMM,
+ },
+ }
+
+ resp, err := backend.pathInternalUIUnauthenticatedMessages(context.Background(), &logical.Request{}, &framework.FieldData{})
+ assert.NoError(t, err)
+ assert.NotNil(t, resp)
+
+ expectedFilter := uicustommessages.FindFilter{IncludeAncestors: true}
+ expectedFilter.Active(true)
+ expectedFilter.Authenticated(false)
+
+ assert.ElementsMatch(t, testingCMM.findFilters, []uicustommessages.FindFilter{expectedFilter})
+}
+
+// TestPathInternalUIAuthenticatedMessages verifies the correct behaviour of the
+// pathInternalUIAuthenticatedMessages method, which is to first check if the
+// request has a valid token included, then call the FindMessages method of the
+// Core.customMessagesManager field with a FindFilter that has the
+// IncludeAncestors field set to true, the active field pointing to a true
+// value, and the authenticated field pointing to a true value. If the request
+// does not have a valid token, the method behaves as if no messages meet the
+// criteria.
+func TestPathInternalUIAuthenticatedMessages(t *testing.T) {
+ testingCMM := &testingCustomMessageManager{}
+ testCore := TestCoreRaw(t)
+ _, _, token := testCoreUnsealed(t, testCore)
+ testCore.customMessageManager = testingCMM
+
+ backend := &SystemBackend{
+ Core: testCore,
+ }
+
+ nsCtx := namespace.ContextWithNamespace(context.Background(), namespace.RootNamespace)
+
+ // Check with a request that includes a valid token
+ resp, err := backend.pathInternalUIAuthenticatedMessages(nsCtx, &logical.Request{
+ ClientToken: token,
+ }, &framework.FieldData{})
+ assert.NoError(t, err)
+ assert.NotNil(t, resp)
+
+ expectedFilter := uicustommessages.FindFilter{
+ IncludeAncestors: true,
+ }
+ expectedFilter.Active(true)
+ expectedFilter.Authenticated(true)
+
+ assert.ElementsMatch(t, testingCMM.findFilters, []uicustommessages.FindFilter{expectedFilter})
+
+ // Now, check with a request that has no token: expecting no new filter
+ // in the testingCMM.
+ resp, err = backend.pathInternalUIAuthenticatedMessages(nsCtx, &logical.Request{}, &framework.FieldData{})
+ assert.NoError(t, err)
+ assert.NotNil(t, resp)
+ assert.NotContains(t, resp.Data, "keys")
+ assert.NotContains(t, resp.Data, "key_info")
+
+ assert.ElementsMatch(t, testingCMM.findFilters, []uicustommessages.FindFilter{expectedFilter})
+
+ // Finally, check with an invalid token in the request: again, expecting no
+ // new filter in the testingCMM.
+ resp, err = backend.pathInternalUIAuthenticatedMessages(nsCtx, &logical.Request{ClientToken: "invalid"}, &framework.FieldData{})
+ assert.NoError(t, err)
+ assert.NotNil(t, resp)
+ assert.NotContains(t, resp.Data, "keys")
+ assert.NotContains(t, resp.Data, "key_info")
+
+ assert.ElementsMatch(t, testingCMM.findFilters, []uicustommessages.FindFilter{expectedFilter})
+}
+
+// TestPathInternalUICustomMessagesCommon verifies the correct behaviour of the
+// (*SystemBackend).pathInternalUICustomMessagesCommon method.
+func TestPathInternalUICustomMessagesCommon(t *testing.T) {
+ var storage logical.Storage = &testingStorage{getFails: true}
+ testingCMM := uicustommessages.NewManager(storage)
+ backend := &SystemBackend{
+ Core: &Core{
+ customMessageManager: testingCMM,
+ },
+ }
+
+ // First, check that when an error occurs in the FindMessages method, it's
+ // handled correctly.
+ filter := uicustommessages.FindFilter{
+ IncludeAncestors: true,
+ }
+ filter.Active(true)
+ filter.Authenticated(false)
+
+ resp, err := backend.pathInternalUICustomMessagesCommon(context.Background(), filter)
+ assert.NoError(t, err)
+ assert.NotNil(t, resp)
+ assert.Contains(t, resp.Data, "error")
+ assert.Contains(t, resp.Data["error"], "failed to retrieve custom messages")
+
+ // Next, check that when no error occur and messages are returned by
+ // FindMessages that they are correctly translated.
+ storage = &logical.InmemStorage{}
+ backend.Core.customMessageManager = uicustommessages.NewManager(storage)
+
+ // Load some messages for the root namespace and a testNS namespace.
+ startTime := time.Now().Add(-1 * time.Hour).Format(time.RFC3339Nano)
+ endTime := time.Now().Add(time.Hour).Format(time.RFC3339Nano)
+
+ messagesTemplate := `{"messages":{"%[1]d01":{"id":"%[1]d01","title":"Title-%[1]d01","message":"Message of Title-%[1]d01","authenticated":false,"type":"banner","start_time":"%[2]s"},"%[1]d02":{"id":"%[1]d02","title":"Title-%[1]d02","message":"Message of Title-%[1]d02","authenticated":false,"type":"modal","start_time":"%[2]s","end_time":"%[3]s"},"%[1]d03":{"id":"%[1]d03","title":"Title-%[1]d03","message":"Message of Title-%[1]d03","authenticated":false,"type":"banner","start_time":"%[2]s","link":{"Link":"www.example.com"}}}}`
+
+ cmStorageEntry := &logical.StorageEntry{
+ Key: "sys/config/ui/custom-messages",
+ Value: []byte(fmt.Sprintf(messagesTemplate, 0, startTime, endTime)),
+ }
+ storage.Put(context.Background(), cmStorageEntry)
+
+ cmStorageEntry = &logical.StorageEntry{
+ Key: "namespaces/testNS/sys/config/ui/custom-messages",
+ Value: []byte(fmt.Sprintf(messagesTemplate, 1, startTime, endTime)),
+ }
+ storage.Put(context.Background(), cmStorageEntry)
+
+ resp, err = backend.pathInternalUICustomMessagesCommon(namespace.ContextWithNamespace(context.Background(), namespace.RootNamespace), filter)
+ assert.NoError(t, err)
+ assert.NotNil(t, resp)
+ assert.Contains(t, resp.Data, "keys")
+ assert.Equal(t, 3, len(resp.Data["keys"].([]string)))
+ assert.Contains(t, resp.Data, "key_info")
+ assert.Equal(t, 3, len(resp.Data["key_info"].(map[string]any)))
+}
diff --git a/vault/mount.go b/vault/mount.go
index 663409b4c128..976bfb0bf023 100644
--- a/vault/mount.go
+++ b/vault/mount.go
@@ -322,7 +322,7 @@ const mountStateUnmounting = "unmounting"
// MountEntry is used to represent a mount table entry
type MountEntry struct {
Table string `json:"table"` // The table it belongs to
- Path string `json:"path"` // Mount Path
+ Path string `json:"path"` // Mount Path, as provided in the mount API call but with a trailing slash, i.e. no auth/ or namespace prefix.
Type string `json:"type"` // Logical backend Type. NB: This is the plugin name, e.g. my-vault-plugin, NOT plugin type (e.g. auth).
Description string `json:"description"` // User-provided description
UUID string `json:"uuid"` // Barrier view UUID
diff --git a/vault/plugin_reload.go b/vault/plugin_reload.go
index 7fa6e936e60e..938c47eb34bf 100644
--- a/vault/plugin_reload.go
+++ b/vault/plugin_reload.go
@@ -70,10 +70,10 @@ func (c *Core) reloadMatchingPluginMounts(ctx context.Context, mounts []string)
return errors
}
-// reloadPlugin reloads all mounted backends that are of
-// plugin pluginName (name of the plugin as registered in
-// the plugin catalog).
-func (c *Core) reloadMatchingPlugin(ctx context.Context, pluginName string) error {
+// reloadMatchingPlugin reloads all mounted backends that are named pluginName
+// (name of the plugin as registered in the plugin catalog). It returns the
+// number of plugins that were reloaded and an error if any.
+func (c *Core) reloadMatchingPlugin(ctx context.Context, pluginName string) (reloaded int, err error) {
c.mountsLock.RLock()
defer c.mountsLock.RUnlock()
c.authLock.RLock()
@@ -81,25 +81,49 @@ func (c *Core) reloadMatchingPlugin(ctx context.Context, pluginName string) erro
ns, err := namespace.FromContext(ctx)
if err != nil {
- return err
+ return reloaded, err
}
- // Filter mount entries that only matches the plugin name
for _, entry := range c.mounts.Entries {
// We dont reload mounts that are not in the same namespace
if ns.ID != entry.Namespace().ID {
continue
}
+
if entry.Type == pluginName || (entry.Type == "plugin" && entry.Config.PluginName == pluginName) {
err := c.reloadBackendCommon(ctx, entry, false)
if err != nil {
- return err
+ return reloaded, err
+ }
+ reloaded++
+ c.logger.Info("successfully reloaded plugin", "plugin", pluginName, "namespace", entry.Namespace(), "path", entry.Path, "version", entry.Version)
+ } else if entry.Type == "database" {
+ // The combined database plugin is itself a secrets engine, but
+ // knowledge of whether a database plugin is in use within a particular
+ // mount is internal to the combined database plugin's storage, so
+ // we delegate the reload request with an internally routed request.
+ req := &logical.Request{
+ Operation: logical.UpdateOperation,
+ Path: entry.Path + "reload/" + pluginName,
+ }
+ resp, err := c.router.Route(ctx, req)
+ if err != nil {
+ return reloaded, err
+ }
+ if resp == nil {
+ return reloaded, fmt.Errorf("failed to reload %q database plugin(s) mounted under %s", pluginName, entry.Path)
+ }
+ if resp.IsError() {
+ return reloaded, fmt.Errorf("failed to reload %q database plugin(s) mounted under %s: %s", pluginName, entry.Path, resp.Error())
+ }
+
+ if count, ok := resp.Data["count"].(int); ok && count > 0 {
+ c.logger.Info("successfully reloaded database plugin(s)", "plugin", pluginName, "namespace", entry.Namespace(), "path", entry.Path, "connections", resp.Data["connections"])
+ reloaded += count
}
- c.logger.Info("successfully reloaded plugin", "plugin", pluginName, "path", entry.Path, "version", entry.Version)
}
}
- // Filter auth mount entries that ony matches the plugin name
for _, entry := range c.auth.Entries {
// We dont reload mounts that are not in the same namespace
if ns.ID != entry.Namespace().ID {
@@ -109,13 +133,14 @@ func (c *Core) reloadMatchingPlugin(ctx context.Context, pluginName string) erro
if entry.Type == pluginName || (entry.Type == "plugin" && entry.Config.PluginName == pluginName) {
err := c.reloadBackendCommon(ctx, entry, true)
if err != nil {
- return err
+ return reloaded, err
}
+ reloaded++
c.logger.Info("successfully reloaded plugin", "plugin", entry.Accessor, "path", entry.Path, "version", entry.Version)
}
}
- return nil
+ return reloaded, nil
}
// reloadBackendCommon is a generic method to reload a backend provided a
diff --git a/vault/ui_custom_messages/manager.go b/vault/ui_custom_messages/manager.go
index 105d41a1ac18..47439e620b47 100644
--- a/vault/ui_custom_messages/manager.go
+++ b/vault/ui_custom_messages/manager.go
@@ -26,6 +26,12 @@ const (
MaximumMessageCountPerNamespace int = 100
)
+// nsManager is the NamespaceManager instance used to determine the set of
+// Namespaces to consider when retrieving active Custom Message. This
+// variable is re-assigned to point to a real NamespaceManager in the
+// enterprise edition.
+var nsManager NamespaceManager = &CommunityEditionNamespaceManager{}
+
// Manager is a struct that provides methods to manage messages stored in a
// logical.Storage.
type Manager struct {
@@ -223,10 +229,13 @@ func getNamespacesToSearch(ctx context.Context, filters FindFilter) ([]*namespac
// Add the current namespace based on the context.Context to nsList.
nsList = append(nsList, ns)
- //if filters.IncludeAncestors {
- // Add the parent, grand-parent, etc... namespaces all the way back up
- // to the root namespace to nsList.
- //}
+ if filters.IncludeAncestors {
+ parentNs := nsManager.GetParentNamespace(ns.Path)
+ for ; parentNs.ID != ns.ID; parentNs = nsManager.GetParentNamespace(ns.Path) {
+ ns = parentNs
+ nsList = append(nsList, ns)
+ }
+ }
return nsList, nil
}
diff --git a/vault/ui_custom_messages/manager_test.go b/vault/ui_custom_messages/manager_test.go
index 0774fa7ae0d6..e0d8e43ddf9d 100644
--- a/vault/ui_custom_messages/manager_test.go
+++ b/vault/ui_custom_messages/manager_test.go
@@ -220,9 +220,41 @@ func TestGetNamespacesToSearch(t *testing.T) {
list, err = getNamespacesToSearch(namespace.ContextWithNamespace(context.Background(), namespace.RootNamespace), FindFilter{})
assert.NoError(t, err)
- assert.NotNil(t, list)
- assert.Equal(t, 1, len(list))
+ assert.Len(t, list, 1)
assert.Equal(t, namespace.RootNamespace, list[0])
+
+ // Verify with nsManager set to an instance of testNamespaceManager to
+ // ensure that it is used to calculate the list of namespaces.
+ currentNsManager := nsManager
+ defer func() {
+ nsManager = currentNsManager
+ }()
+
+ nsManager = &testNamespaceManager{
+ results: []namespace.Namespace{
+ {
+ ID: "ccc",
+ Path: "c/",
+ },
+ {
+ ID: "bbb",
+ Path: "b/",
+ },
+ {
+ ID: "aaa",
+ Path: "a/",
+ },
+ },
+ }
+
+ list, err = getNamespacesToSearch(namespace.ContextWithNamespace(context.Background(), &namespace.Namespace{ID: "ddd", Path: "d/"}), FindFilter{IncludeAncestors: true})
+ assert.NoError(t, err)
+ assert.Len(t, list, 5)
+ assert.Equal(t, list[0].Path, "d/")
+ assert.Equal(t, list[1].Path, "c/")
+ assert.Equal(t, list[2].Path, "b/")
+ assert.Equal(t, list[3].Path, "a/")
+ assert.Equal(t, list[4].Path, "")
}
// TestStorageKeyForNamespace verifies that the storageKeyForNamespace function
@@ -633,3 +665,24 @@ func (s *testingStorage) Put(_ context.Context, _ *logical.StorageEntry) error {
return nil
}
+
+// testNamespaceManager is a perculiar type of NamespaceManager where it can be
+// instantiated with the results that successive calls to its GetParentNamespace
+// method will return.
+type testNamespaceManager struct {
+ results []namespace.Namespace
+}
+
+// GetParentNamespace effectively pops namespaces from the results field in the
+// receiver testNamespaceManager struct and returns them. Once all namespaces
+// have been returns, it returns namespace.RootNamespace.
+func (n *testNamespaceManager) GetParentNamespace(_ string) *namespace.Namespace {
+ if len(n.results) == 0 {
+ return namespace.RootNamespace
+ }
+
+ ns := n.results[0]
+ n.results = n.results[1:]
+
+ return &ns
+}
diff --git a/vault/ui_custom_messages/namespace.go b/vault/ui_custom_messages/namespace.go
new file mode 100644
index 000000000000..57af486dab43
--- /dev/null
+++ b/vault/ui_custom_messages/namespace.go
@@ -0,0 +1,24 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: BUSL-1.1
+
+package uicustommessages
+
+import "github.com/hashicorp/vault/helper/namespace"
+
+// NamespaceManager is the interface needed of a NamespaceManager by this
+// package. This interface allows setting a dummy NamespaceManager in the
+// community edition that can be replaced with the real
+// namespace.NamespaceManager in the enterprise edition.
+type NamespaceManager interface {
+ GetParentNamespace(string) *namespace.Namespace
+}
+
+// CommunityEditionNamespaceManager is a struct that implements the
+// NamespaceManager interface. This struct is used as a placeholder in the
+// community edition.
+type CommunityEditionNamespaceManager struct{}
+
+// GetParentNamespace always returns namespace.RootNamespace.
+func (n *CommunityEditionNamespaceManager) GetParentNamespace(_ string) *namespace.Namespace {
+ return namespace.RootNamespace
+}
diff --git a/vault/ui_custom_messages/namespace_test.go b/vault/ui_custom_messages/namespace_test.go
new file mode 100644
index 000000000000..aa0fa680b681
--- /dev/null
+++ b/vault/ui_custom_messages/namespace_test.go
@@ -0,0 +1,31 @@
+// Copyright (c) HashiCorp, Inc.
+// SPDX-License-Identifier: BUSL-1.1
+
+package uicustommessages
+
+import (
+ "testing"
+
+ "github.com/hashicorp/vault/helper/namespace"
+ "github.com/stretchr/testify/assert"
+)
+
+// TestCommunityEditionNamespaceManagerGetParentNamespace verifies that the
+// (*CommunityEditionNamespaceManager).GetParentNamespace behaves as intended,
+// which is to always return namespace.RootNamespace, regardless of the input.
+func TestCommunityEditionNamespaceManagerGetParentNamespace(t *testing.T) {
+ testNsManager := &CommunityEditionNamespaceManager{}
+
+ // Verify root namespace
+ assert.Equal(t, namespace.RootNamespace, testNsManager.GetParentNamespace(namespace.RootNamespace.Path))
+
+ // Verify a different namespace
+ testNamespace := namespace.Namespace{
+ ID: "abc123",
+ Path: "test/",
+ }
+ assert.Equal(t, namespace.RootNamespace, testNsManager.GetParentNamespace(testNamespace.Path))
+
+ // Verify that even a random string results in the root namespace
+ assert.Equal(t, namespace.RootNamespace, testNsManager.GetParentNamespace("blah"))
+}
diff --git a/website/content/api-docs/system/lease-count-quotas.mdx b/website/content/api-docs/system/lease-count-quotas.mdx
index 1e95b2d108f2..dc2c513c50eb 100644
--- a/website/content/api-docs/system/lease-count-quotas.mdx
+++ b/website/content/api-docs/system/lease-count-quotas.mdx
@@ -8,7 +8,7 @@ description: The `/sys/quotas/lease-count` endpoint is used to create, edit and
@include 'alerts/enterprise-only.mdx'
-@include 'alerts/restricted-root.mdx'
+@include 'alerts/restricted-admin.mdx'
The `/sys/quotas/lease-count` endpoint is used to create, edit and delete lease count quotas.
diff --git a/website/content/api-docs/system/quotas-config.mdx b/website/content/api-docs/system/quotas-config.mdx
index f1cb2700c214..d0d021dddae9 100644
--- a/website/content/api-docs/system/quotas-config.mdx
+++ b/website/content/api-docs/system/quotas-config.mdx
@@ -6,7 +6,8 @@ description: The `/sys/quotas/config` endpoint is used to configure rate limit q
# `/sys/quotas/config`
-@include 'alerts/restricted-root.mdx'
+@include 'alerts/restricted-admin.mdx'
+
The `/sys/quotas/config` endpoint is used to configure rate limit quotas.
@@ -19,7 +20,15 @@ The `/sys/quotas/config` endpoint is used to configure rate limit quotas.
### Parameters
- `rate_limit_exempt_paths` `([]string: [])` - Specifies the list of exempt paths
- from all rate limit quotas. If empty no paths will be exempt.
+ from all rate limit quotas. Exempt paths are relative and apply to all
+ namespaces. When `rate_limit_exempt_paths` is empty, Vault applies quotas to
+ all relative paths. Access to exemption data is read-only from the admin namespace.
+ **You cannot update `rate_limit_exempt_paths` from the admin namespace**
+- `global_rate_limit_exempt_paths` `([]string: [])` -Specifies the list of
+ exempt paths from all rate limit quotas. Global exempt paths are absolute and
+ do not apply across namespaces. When `global_rate_limit_exempt_paths` is empty,
+ Vault applies quotas to all absolute paths. You can only add, update, or delete
+ global paths within the scope of the calling namespace.
- `enable_rate_limit_audit_logging` `(bool: false)` - If set, starts audit logging
of requests that get rejected due to rate limit quota rule violations.
- `enable_rate_limit_response_headers` `(bool: false)` - If set, additional rate
diff --git a/website/content/api-docs/system/rate-limit-quotas.mdx b/website/content/api-docs/system/rate-limit-quotas.mdx
index a85a18a75724..e479781fea20 100644
--- a/website/content/api-docs/system/rate-limit-quotas.mdx
+++ b/website/content/api-docs/system/rate-limit-quotas.mdx
@@ -6,7 +6,7 @@ description: The `/sys/quotas/rate-limit` endpoint is used to create, edit and d
# `/sys/quotas/rate-limit`
-@include 'alerts/restricted-root.mdx'
+@include 'alerts/restricted-admin.mdx'
The `/sys/quotas/rate-limit` endpoint is used to create, edit and delete rate limit quotas.
diff --git a/website/content/docs/auth/jwt/oidc-providers/kubernetes.mdx b/website/content/docs/auth/jwt/oidc-providers/kubernetes.mdx
index 383b12592e9a..55220b7ecd14 100644
--- a/website/content/docs/auth/jwt/oidc-providers/kubernetes.mdx
+++ b/website/content/docs/auth/jwt/oidc-providers/kubernetes.mdx
@@ -81,6 +81,11 @@ This method can be useful if Kubernetes' API is not reachable from Vault or if
you would like a single JWT auth mount to service multiple Kubernetes clusters
by chaining their public signing keys.
+
+ Should the JWT Signing Key used by Kubernetes be rotated,
+ this process should be repeated with the new key.
+
+
Kubernetes cluster requirements:
* [`ServiceAccountIssuerDiscovery`][k8s-sa-issuer-discovery] feature enabled.
diff --git a/website/content/docs/concepts/client-count/index.mdx b/website/content/docs/concepts/client-count/index.mdx
index d698884b96c0..109d10a229c2 100644
--- a/website/content/docs/concepts/client-count/index.mdx
+++ b/website/content/docs/concepts/client-count/index.mdx
@@ -71,7 +71,7 @@ For example:
- ACME client requests (from the same server or separate servers) for the same
certificate identifier (a unique combination of CN,DNS, SANS and IP SANS)
- are treated as the same entities.
+ are treated as the same entity.
- If an ACME client makes a request for `a.test.com`, and subsequently makes a new
request for `b.test.com` and `*.test.com` then two distinct entities will be created,
one for `a.test.com` and another for the combination of `b.test.com` and `*.test.com`.
diff --git a/website/content/docs/platform/k8s/vso/examples.mdx b/website/content/docs/platform/k8s/vso/examples.mdx
index d2a02c7be081..62a02e3e3a3c 100644
--- a/website/content/docs/platform/k8s/vso/examples.mdx
+++ b/website/content/docs/platform/k8s/vso/examples.mdx
@@ -11,6 +11,13 @@ The Operator project provides the following examples:
- Sample use-cases are documented [here](https://github.com/hashicorp/vault-secrets-operator#samples)
- A Terraform based demo can be found [here](https://github.com/hashicorp/vault-secrets-operator/tree/main/demo)
+## JWT auth for Kubernetes clusters in private networks
+
+Vault Secrets Operator supports using the [JWT auth method](/vault/docs/platform/k8s/vso/api-reference#vaultauthconfigjwt).
+JWT auth [verifies tokens](/vault/docs/auth/jwt#jwt-verification) using the issuer's public signing key.
+Vault supports fetching this public key from the Kubernetes API, but if users can't expose the Kubernetes API to Vault, the public key can be provided directly using [`jwt_validation_pubkeys`](/vault/api-docs/auth/jwt#jwt_validation_pubkeys).
+To configure this please follow the steps outlined for [Using JWT validation public keys](/vault/docs/auth/jwt/oidc-providers/kubernetes#using-jwt-validation-public-keys)
+
## Using VaultStaticSecrets for imagePullSecrets
Vault Secret Operator supports Kubernetes' templating of Secrets based on their
diff --git a/website/content/partials/api/restricted-endpoints.mdx b/website/content/partials/api/restricted-endpoints.mdx
index 6d2d63e152cf..1e94d400a8fb 100644
--- a/website/content/partials/api/restricted-endpoints.mdx
+++ b/website/content/partials/api/restricted-endpoints.mdx
@@ -34,9 +34,9 @@ API path | Root | Admin
`sys/mfa/method/*` | YES | NO
`sys/monitor` | YES | YES
`sys/pprof/*` | YES | NO
-`sys/quotas/config` | YES | NO
-`sys/quotas/lease-count` | YES | NO
-`sys/quotas/rate-limit` | YES | NO
+`sys/quotas/config` | YES | YES
+`sys/quotas/lease-count` | YES | YES
+`sys/quotas/rate-limit` | YES | YES
`sys/raw` | YES | NO
`sys/rekey/*` | YES | NO
`sys/rekey-recovery-key` | YES | NO