rebase: Bump github.com/hashicorp/vault from 1.4.2 to 1.9.9

Bumps [github.com/hashicorp/vault](https://github.com/hashicorp/vault) from 1.4.2 to 1.9.9.
- [Release notes](https://github.com/hashicorp/vault/releases)
- [Changelog](https://github.com/hashicorp/vault/blob/main/CHANGELOG.md)
- [Commits](https://github.com/hashicorp/vault/compare/v1.4.2...v1.9.9)

---
updated-dependencies:
- dependency-name: github.com/hashicorp/vault
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
This commit is contained in:
dependabot[bot]
2023-03-07 00:32:05 +00:00
committed by mergify[bot]
parent 37c8f07ed5
commit ba40da7e36
52 changed files with 2577 additions and 248 deletions

View File

@ -17,11 +17,8 @@ JSON output mode for production.
## Stability Note
While this library is fully open source and HashiCorp will be maintaining it
(since we are and will be making extensive use of it), the API and output
format is subject to minor changes as we fully bake and vet it in our projects.
This notice will be removed once it's fully integrated into our major projects
and no further changes are anticipated.
This library has reached 1.0 stability. It's API can be considered solidified
and promised through future versions.
## Installation and Docs
@ -102,7 +99,7 @@ into all the callers.
### Using `hclog.Fmt()`
```go
var int totalBandwidth = 200
totalBandwidth := 200
appLogger.Info("total bandwidth exceeded", "bandwidth", hclog.Fmt("%d GB/s", totalBandwidth))
```
@ -146,3 +143,6 @@ log.Printf("[DEBUG] %d", 42)
Notice that if `appLogger` is initialized with the `INFO` log level _and_ you
specify `InferLevels: true`, you will not see any output here. You must change
`appLogger` to `DEBUG` to see output. See the docs for more information.
If the log lines start with a timestamp you can use the
`InferLevelsWithTimestamp` option to try and ignore them.

View File

@ -2,6 +2,7 @@ package hclog
import (
"sync"
"time"
)
var (
@ -14,6 +15,7 @@ var (
DefaultOptions = &LoggerOptions{
Level: DefaultLevel,
Output: DefaultOutput,
TimeFn: time.Now,
}
)

View File

@ -180,9 +180,10 @@ func (i *interceptLogger) StandardWriterIntercept(opts *StandardLoggerOptions) i
func (i *interceptLogger) StandardWriter(opts *StandardLoggerOptions) io.Writer {
return &stdlogAdapter{
log: i,
inferLevels: opts.InferLevels,
forceLevel: opts.ForceLevel,
log: i,
inferLevels: opts.InferLevels,
inferLevelsWithTimestamp: opts.InferLevelsWithTimestamp,
forceLevel: opts.ForceLevel,
}
}

View File

@ -60,6 +60,7 @@ type intLogger struct {
callerOffset int
name string
timeFormat string
timeFn TimeFunction
disableTime bool
// This is an interface so that it's shared by any derived loggers, since
@ -116,6 +117,7 @@ func newLogger(opts *LoggerOptions) *intLogger {
json: opts.JSONFormat,
name: opts.Name,
timeFormat: TimeFormat,
timeFn: time.Now,
disableTime: opts.DisableTime,
mutex: mutex,
writer: newWriter(output, opts.Color),
@ -130,6 +132,9 @@ func newLogger(opts *LoggerOptions) *intLogger {
if l.json {
l.timeFormat = TimeFormatJSON
}
if opts.TimeFn != nil {
l.timeFn = opts.TimeFn
}
if opts.TimeFormat != "" {
l.timeFormat = opts.TimeFormat
}
@ -152,7 +157,7 @@ func (l *intLogger) log(name string, level Level, msg string, args ...interface{
return
}
t := time.Now()
t := l.timeFn()
l.mutex.Lock()
defer l.mutex.Unlock()
@ -199,6 +204,24 @@ func trimCallerPath(path string) string {
return path[idx+1:]
}
// isNormal indicates if the rune is one allowed to exist as an unquoted
// string value. This is a subset of ASCII, `-` through `~`.
func isNormal(r rune) bool {
return 0x2D <= r && r <= 0x7E // - through ~
}
// needsQuoting returns false if all the runes in string are normal, according
// to isNormal
func needsQuoting(str string) bool {
for _, r := range str {
if !isNormal(r) {
return true
}
}
return false
}
// Non-JSON logging format function
func (l *intLogger) logPlain(t time.Time, name string, level Level, msg string, args ...interface{}) {
@ -263,6 +286,7 @@ func (l *intLogger) logPlain(t time.Time, name string, level Level, msg string,
val = st
if st == "" {
val = `""`
raw = true
}
case int:
val = strconv.FormatInt(int64(st), 10)
@ -323,13 +347,11 @@ func (l *intLogger) logPlain(t time.Time, name string, level Level, msg string,
l.writer.WriteString("=\n")
writeIndent(l.writer, val, " | ")
l.writer.WriteString(" ")
} else if !raw && strings.ContainsAny(val, " \t") {
} else if !raw && needsQuoting(val) {
l.writer.WriteByte(' ')
l.writer.WriteString(key)
l.writer.WriteByte('=')
l.writer.WriteByte('"')
l.writer.WriteString(val)
l.writer.WriteByte('"')
l.writer.WriteString(strconv.Quote(val))
} else {
l.writer.WriteByte(' ')
l.writer.WriteString(key)
@ -687,9 +709,10 @@ func (l *intLogger) StandardWriter(opts *StandardLoggerOptions) io.Writer {
newLog.callerOffset = l.callerOffset + 4
}
return &stdlogAdapter{
log: &newLog,
inferLevels: opts.InferLevels,
forceLevel: opts.ForceLevel,
log: &newLog,
inferLevels: opts.InferLevels,
inferLevelsWithTimestamp: opts.InferLevelsWithTimestamp,
forceLevel: opts.ForceLevel,
}
}

View File

@ -5,6 +5,7 @@ import (
"log"
"os"
"strings"
"time"
)
var (
@ -212,6 +213,15 @@ type StandardLoggerOptions struct {
// [DEBUG] and strip it off before reapplying it.
InferLevels bool
// Indicate that some minimal parsing should be done on strings to try
// and detect their level and re-emit them while ignoring possible
// timestamp values in the beginning of the string.
// This supports the strings like [ERROR], [ERR] [TRACE], [WARN], [INFO],
// [DEBUG] and strip it off before reapplying it.
// The timestamp detection may result in false positives and incomplete
// string outputs.
InferLevelsWithTimestamp bool
// ForceLevel is used to force all output from the standard logger to be at
// the specified level. Similar to InferLevels, this will strip any level
// prefix contained in the logged string before applying the forced level.
@ -219,6 +229,8 @@ type StandardLoggerOptions struct {
ForceLevel Level
}
type TimeFunction = func() time.Time
// LoggerOptions can be used to configure a new logger.
type LoggerOptions struct {
// Name of the subsystem to prefix logs with
@ -248,6 +260,9 @@ type LoggerOptions struct {
// The time format to use instead of the default
TimeFormat string
// A function which is called to get the time object that is formatted using `TimeFormat`
TimeFn TimeFunction
// Control whether or not to display the time at all. This is required
// because setting TimeFormat to empty assumes the default format.
DisableTime bool

View File

@ -3,16 +3,22 @@ package hclog
import (
"bytes"
"log"
"regexp"
"strings"
)
// Regex to ignore characters commonly found in timestamp formats from the
// beginning of inputs.
var logTimestampRegexp = regexp.MustCompile(`^[\d\s\:\/\.\+-TZ]*`)
// Provides a io.Writer to shim the data out of *log.Logger
// and back into our Logger. This is basically the only way to
// build upon *log.Logger.
type stdlogAdapter struct {
log Logger
inferLevels bool
forceLevel Level
log Logger
inferLevels bool
inferLevelsWithTimestamp bool
forceLevel Level
}
// Take the data, infer the levels if configured, and send it through
@ -28,6 +34,10 @@ func (s *stdlogAdapter) Write(data []byte) (int, error) {
// Log at the forced level
s.dispatch(str, s.forceLevel)
} else if s.inferLevels {
if s.inferLevelsWithTimestamp {
str = s.trimTimestamp(str)
}
level, str := s.pickLevel(str)
s.dispatch(str, level)
} else {
@ -74,6 +84,11 @@ func (s *stdlogAdapter) pickLevel(str string) (Level, string) {
}
}
func (s *stdlogAdapter) trimTimestamp(str string) string {
idx := logTimestampRegexp.FindStringIndex(str)
return str[idx[1]:]
}
type logWriter struct {
l *log.Logger
}

View File

@ -505,7 +505,7 @@ func expandObject(node ast.Node, result reflect.Value) ast.Node {
// we need to un-flatten the ast enough to decode
newNode := &ast.ObjectItem{
Keys: []*ast.ObjectKey{
&ast.ObjectKey{
{
Token: keyToken,
},
},
@ -628,6 +628,20 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
decodedFields := make([]string, 0, len(fields))
decodedFieldsVal := make([]reflect.Value, 0)
unusedKeysVal := make([]reflect.Value, 0)
// fill unusedNodeKeys with keys from the AST
// a slice because we have to do equals case fold to match Filter
unusedNodeKeys := make(map[string][]token.Pos, 0)
for _, item := range list.Items {
for _, k := range item.Keys{
if k.Token.JSON || k.Token.Type == token.IDENT {
fn := k.Token.Value().(string)
sl := unusedNodeKeys[fn]
unusedNodeKeys[fn] = append(sl, k.Token.Pos)
}
}
}
for _, f := range fields {
field, fieldValue := f.field, f.val
if !fieldValue.IsValid() {
@ -661,7 +675,7 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
fieldValue.SetString(item.Keys[0].Token.Value().(string))
continue
case "unusedKeys":
case "unusedKeyPositions":
unusedKeysVal = append(unusedKeysVal, fieldValue)
continue
}
@ -682,8 +696,9 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
continue
}
// Track the used key
// Track the used keys
usedKeys[fieldName] = struct{}{}
unusedNodeKeys = removeCaseFold(unusedNodeKeys, fieldName)
// Create the field name and decode. We range over the elements
// because we actually want the value.
@ -716,6 +731,13 @@ func (d *decoder) decodeStruct(name string, node ast.Node, result reflect.Value)
}
}
if len(unusedNodeKeys) > 0 {
// like decodedFields, populated the unusedKeys field(s)
for _, v := range unusedKeysVal {
v.Set(reflect.ValueOf(unusedNodeKeys))
}
}
return nil
}
@ -727,3 +749,17 @@ func findNodeType() reflect.Type {
value := reflect.ValueOf(nodeContainer).FieldByName("Node")
return value.Type()
}
func removeCaseFold(xs map[string][]token.Pos, y string) map[string][]token.Pos {
var toDel []string
for i := range xs {
if strings.EqualFold(i, y) {
toDel = append(toDel, i)
}
}
for _, i := range toDel {
delete(xs, i)
}
return xs
}

View File

@ -25,6 +25,8 @@ func (ObjectType) node() {}
func (LiteralType) node() {}
func (ListType) node() {}
var unknownPos token.Pos
// File represents a single HCL file
type File struct {
Node Node // usually a *ObjectList
@ -108,7 +110,12 @@ func (o *ObjectList) Elem() *ObjectList {
}
func (o *ObjectList) Pos() token.Pos {
// always returns the uninitiliazed position
// If an Object has no members, it won't have a first item
// to use as position
if len(o.Items) == 0 {
return unknownPos
}
// Return the uninitialized position
return o.Items[0].Pos()
}
@ -133,10 +140,10 @@ type ObjectItem struct {
}
func (o *ObjectItem) Pos() token.Pos {
// I'm not entirely sure what causes this, but removing this causes
// a test failure. We should investigate at some point.
// If a parsed object has no keys, there is no position
// for its first element.
if len(o.Keys) == 0 {
return token.Pos{}
return unknownPos
}
return o.Keys[0].Pos()

View File

@ -2,15 +2,24 @@ package auth
import (
"context"
"encoding/json"
"errors"
"math/rand"
"net/http"
"time"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/sdk/helper/jsonutil"
)
const (
initialBackoff = 1 * time.Second
defaultMaxBackoff = 5 * time.Minute
)
// AuthMethod is the interface that auto-auth methods implement for the agent
// to use.
type AuthMethod interface {
// Authenticate returns a mount path, header, request body, and error.
// The header may be nil if no special header is needed.
@ -20,6 +29,13 @@ type AuthMethod interface {
Shutdown()
}
// AuthMethodWithClient is an extended interface that can return an API client
// for use during the authentication call.
type AuthMethodWithClient interface {
AuthMethod
AuthClient(client *api.Client) (*api.Client, error)
}
type AuthConfig struct {
Logger hclog.Logger
MountPath string
@ -30,13 +46,14 @@ type AuthConfig struct {
// AuthHandler is responsible for keeping a token alive and renewed and passing
// new tokens to the sink server
type AuthHandler struct {
DoneCh chan struct{}
OutputCh chan string
TemplateTokenCh chan string
token string
logger hclog.Logger
client *api.Client
random *rand.Rand
wrapTTL time.Duration
maxBackoff time.Duration
enableReauthOnNewCredentials bool
enableTemplateTokenCh bool
}
@ -45,21 +62,24 @@ type AuthHandlerConfig struct {
Logger hclog.Logger
Client *api.Client
WrapTTL time.Duration
MaxBackoff time.Duration
Token string
EnableReauthOnNewCredentials bool
EnableTemplateTokenCh bool
}
func NewAuthHandler(conf *AuthHandlerConfig) *AuthHandler {
ah := &AuthHandler{
DoneCh: make(chan struct{}),
// This is buffered so that if we try to output after the sink server
// has been shut down, during agent shutdown, we won't block
OutputCh: make(chan string, 1),
TemplateTokenCh: make(chan string, 1),
token: conf.Token,
logger: conf.Logger,
client: conf.Client,
random: rand.New(rand.NewSource(int64(time.Now().Nanosecond()))),
wrapTTL: conf.WrapTTL,
maxBackoff: conf.MaxBackoff,
enableReauthOnNewCredentials: conf.EnableReauthOnNewCredentials,
enableTemplateTokenCh: conf.EnableTemplateTokenCh,
}
@ -67,23 +87,28 @@ func NewAuthHandler(conf *AuthHandlerConfig) *AuthHandler {
return ah
}
func backoffOrQuit(ctx context.Context, backoff time.Duration) {
func backoffOrQuit(ctx context.Context, backoff *agentBackoff) {
select {
case <-time.After(backoff):
case <-time.After(backoff.current):
case <-ctx.Done():
}
// Increase exponential backoff for the next time if we don't
// successfully auth/renew/etc.
backoff.next()
}
func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) {
func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) error {
if am == nil {
panic("nil auth method")
return errors.New("auth handler: nil auth method")
}
backoff := newAgentBackoff(ah.maxBackoff)
ah.logger.Info("starting auth handler")
defer func() {
am.Shutdown()
close(ah.OutputCh)
close(ah.DoneCh)
close(ah.TemplateTokenCh)
ah.logger.Info("auth handler stopped")
}()
@ -109,31 +134,70 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) {
}
var watcher *api.LifetimeWatcher
first := true
for {
select {
case <-ctx.Done():
return
return nil
default:
}
// Create a fresh backoff value
backoff := 2*time.Second + time.Duration(ah.random.Int63()%int64(time.Second*2)-int64(time.Second))
var clientToUse *api.Client
var err error
var path string
var data map[string]interface{}
var header http.Header
ah.logger.Info("authenticating")
path, header, data, err := am.Authenticate(ctx, ah.client)
if err != nil {
ah.logger.Error("error getting path or data from method", "error", err, "backoff", backoff.Seconds())
backoffOrQuit(ctx, backoff)
continue
switch am.(type) {
case AuthMethodWithClient:
clientToUse, err = am.(AuthMethodWithClient).AuthClient(ah.client)
if err != nil {
ah.logger.Error("error creating client for authentication call", "error", err, "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
default:
clientToUse = ah.client
}
clientToUse := ah.client
if ah.wrapTTL > 0 {
wrapClient, err := ah.client.Clone()
var secret *api.Secret = new(api.Secret)
if first && ah.token != "" {
ah.logger.Debug("using preloaded token")
first = false
ah.logger.Debug("lookup-self with preloaded token")
clientToUse.SetToken(ah.token)
secret, err = clientToUse.Logical().Read("auth/token/lookup-self")
if err != nil {
ah.logger.Error("error creating client for wrapped call", "error", err, "backoff", backoff.Seconds())
ah.logger.Error("could not look up token", "err", err, "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
duration, _ := secret.Data["ttl"].(json.Number).Int64()
secret.Auth = &api.SecretAuth{
ClientToken: secret.Data["id"].(string),
LeaseDuration: int(duration),
Renewable: secret.Data["renewable"].(bool),
}
} else {
ah.logger.Info("authenticating")
path, header, data, err = am.Authenticate(ctx, ah.client)
if err != nil {
ah.logger.Error("error getting path or data from method", "error", err, "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
}
if ah.wrapTTL > 0 {
wrapClient, err := clientToUse.Clone()
if err != nil {
ah.logger.Error("error creating client for wrapped call", "error", err, "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
@ -148,29 +212,33 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) {
}
}
secret, err := clientToUse.Logical().Write(path, data)
// Check errors/sanity
if err != nil {
ah.logger.Error("error authenticating", "error", err, "backoff", backoff.Seconds())
backoffOrQuit(ctx, backoff)
continue
// This should only happen if there's no preloaded token (regular auto-auth login)
// or if a preloaded token has expired and is now switching to auto-auth.
if secret.Auth == nil {
secret, err = clientToUse.Logical().Write(path, data)
// Check errors/sanity
if err != nil {
ah.logger.Error("error authenticating", "error", err, "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
}
switch {
case ah.wrapTTL > 0:
if secret.WrapInfo == nil {
ah.logger.Error("authentication returned nil wrap info", "backoff", backoff.Seconds())
ah.logger.Error("authentication returned nil wrap info", "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
if secret.WrapInfo.Token == "" {
ah.logger.Error("authentication returned empty wrapped client token", "backoff", backoff.Seconds())
ah.logger.Error("authentication returned empty wrapped client token", "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
wrappedResp, err := jsonutil.EncodeJSON(secret.WrapInfo)
if err != nil {
ah.logger.Error("failed to encode wrapinfo", "error", err, "backoff", backoff.Seconds())
ah.logger.Error("failed to encode wrapinfo", "error", err, "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
@ -181,6 +249,7 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) {
}
am.CredSuccess()
backoff.reset()
select {
case <-ctx.Done():
@ -194,12 +263,12 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) {
default:
if secret == nil || secret.Auth == nil {
ah.logger.Error("authentication returned nil auth info", "backoff", backoff.Seconds())
ah.logger.Error("authentication returned nil auth info", "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
if secret.Auth.ClientToken == "" {
ah.logger.Error("authentication returned empty client token", "backoff", backoff.Seconds())
ah.logger.Error("authentication returned empty client token", "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
@ -210,17 +279,18 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) {
}
am.CredSuccess()
backoff.reset()
}
if watcher != nil {
watcher.Stop()
}
watcher, err = ah.client.NewLifetimeWatcher(&api.LifetimeWatcherInput{
watcher, err = clientToUse.NewLifetimeWatcher(&api.LifetimeWatcherInput{
Secret: secret,
})
if err != nil {
ah.logger.Error("error creating lifetime watcher, backing off and retrying", "error", err, "backoff", backoff.Seconds())
ah.logger.Error("error creating lifetime watcher, backing off and retrying", "error", err, "backoff", backoff)
backoffOrQuit(ctx, backoff)
continue
}
@ -254,3 +324,42 @@ func (ah *AuthHandler) Run(ctx context.Context, am AuthMethod) {
}
}
}
// agentBackoff tracks exponential backoff state.
type agentBackoff struct {
max time.Duration
current time.Duration
}
func newAgentBackoff(max time.Duration) *agentBackoff {
if max <= 0 {
max = defaultMaxBackoff
}
return &agentBackoff{
max: max,
current: initialBackoff,
}
}
// next determines the next backoff duration that is roughly twice
// the current value, capped to a max value, with a measure of randomness.
func (b *agentBackoff) next() {
maxBackoff := 2 * b.current
if maxBackoff > b.max {
maxBackoff = b.max
}
// Trim a random amount (0-25%) off the doubled duration
trim := rand.Int63n(int64(maxBackoff) / 4)
b.current = maxBackoff - time.Duration(trim)
}
func (b *agentBackoff) reset() {
b.current = initialBackoff
}
func (b agentBackoff) String() string {
return b.current.Truncate(10 * time.Millisecond).String()
}

View File

@ -10,7 +10,6 @@ import (
"os"
"strings"
"github.com/hashicorp/errwrap"
hclog "github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/command/agent/auth"
@ -78,7 +77,7 @@ func (k *kubernetesMethod) Authenticate(ctx context.Context, client *api.Client)
jwtString, err := k.readJWT()
if err != nil {
return "", nil, nil, errwrap.Wrapf("error reading JWT with Kubernetes Auth: {{err}}", err)
return "", nil, nil, fmt.Errorf("error reading JWT with Kubernetes Auth: %w", err)
}
return fmt.Sprintf("%s/login", k.mountPath), nil, map[string]interface{}{