Merge pull request #562 from mattmoyer/add-cluster-credential-caching

Add cluster-specific credential caching to login subcommands.
This commit is contained in:
Matt Moyer 2021-04-08 12:59:23 -07:00 committed by GitHub
commit 043cefcd9f
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 1019 additions and 2 deletions

View File

@ -22,6 +22,7 @@ import (
clientauthv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
"k8s.io/klog/v2/klogr"
"go.pinniped.dev/internal/execcredcache"
"go.pinniped.dev/internal/groupsuffix"
"go.pinniped.dev/pkg/conciergeclient"
"go.pinniped.dev/pkg/oidcclient"
@ -65,6 +66,7 @@ type oidcLoginFlags struct {
conciergeEndpoint string
conciergeCABundle string
conciergeAPIGroupSuffix string
credentialCachePath string
}
func oidcLoginCommand(deps oidcLoginCommandDeps) *cobra.Command {
@ -95,6 +97,7 @@ func oidcLoginCommand(deps oidcLoginCommandDeps) *cobra.Command {
cmd.Flags().StringVar(&flags.conciergeEndpoint, "concierge-endpoint", "", "API base for the Concierge endpoint")
cmd.Flags().StringVar(&flags.conciergeCABundle, "concierge-ca-bundle-data", "", "CA bundle to use when connecting to the Concierge")
cmd.Flags().StringVar(&flags.conciergeAPIGroupSuffix, "concierge-api-group-suffix", groupsuffix.PinnipedDefaultSuffix, "Concierge API group suffix")
cmd.Flags().StringVar(&flags.credentialCachePath, "credential-cache", filepath.Join(mustGetConfigDir(), "credentials.yaml"), "Cluster-specific credentials cache path (\"\" disables the cache)")
mustMarkHidden(cmd, "debug-session-cache")
mustMarkRequired(cmd, "issuer")
@ -164,6 +167,20 @@ func runOIDCLogin(cmd *cobra.Command, deps oidcLoginCommandDeps, flags oidcLogin
opts = append(opts, oidcclient.WithClient(client))
}
// Look up cached credentials based on a hash of all the CLI arguments.
cacheKey := struct {
Args []string `json:"args"`
}{
Args: os.Args[1:],
}
var credCache *execcredcache.Cache
if flags.credentialCachePath != "" {
credCache = execcredcache.New(flags.credentialCachePath)
if cred := credCache.Get(cacheKey); cred != nil {
return json.NewEncoder(cmd.OutOrStdout()).Encode(cred)
}
}
// Do the basic login to get an OIDC token.
token, err := deps.login(flags.issuer, flags.clientID, opts...)
if err != nil {
@ -181,6 +198,11 @@ func runOIDCLogin(cmd *cobra.Command, deps oidcLoginCommandDeps, flags oidcLogin
return fmt.Errorf("could not complete Concierge credential exchange: %w", err)
}
}
// If there was a credential cache, save the resulting credential for future use.
if credCache != nil {
credCache.Put(cacheKey, cred)
}
return json.NewEncoder(cmd.OutOrStdout()).Encode(cred)
}

View File

@ -64,6 +64,7 @@ func TestLoginOIDCCommand(t *testing.T) {
--concierge-authenticator-type string Concierge authenticator type (e.g., 'webhook', 'jwt')
--concierge-ca-bundle-data string CA bundle to use when connecting to the Concierge
--concierge-endpoint string API base for the Concierge endpoint
--credential-cache string Cluster-specific credentials cache path ("" disables the cache) (default "` + cfgDir + `/credentials.yaml")
--enable-concierge Use the Concierge to login
-h, --help help for oidc
--issuer string OpenID Connect issuer URL
@ -189,6 +190,7 @@ func TestLoginOIDCCommand(t *testing.T) {
"--concierge-endpoint", "https://127.0.0.1:1234/",
"--concierge-ca-bundle-data", base64.StdEncoding.EncodeToString(testCA.Bundle()),
"--concierge-api-group-suffix", "some.suffix.com",
"--credential-cache", testutil.TempDir(t) + "/credentials.yaml",
},
wantOptionsCount: 7,
wantStdout: `{"kind":"ExecCredential","apiVersion":"client.authentication.k8s.io/v1beta1","spec":{},"status":{"token":"exchanged-token"}}` + "\n",

View File

@ -9,11 +9,13 @@ import (
"fmt"
"io"
"os"
"path/filepath"
"time"
"github.com/spf13/cobra"
clientauthv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
"go.pinniped.dev/internal/execcredcache"
"go.pinniped.dev/internal/groupsuffix"
"go.pinniped.dev/pkg/conciergeclient"
"go.pinniped.dev/pkg/oidcclient/oidctypes"
@ -47,6 +49,7 @@ type staticLoginParams struct {
conciergeEndpoint string
conciergeCABundle string
conciergeAPIGroupSuffix string
credentialCachePath string
}
func staticLoginCommand(deps staticLoginDeps) *cobra.Command {
@ -69,6 +72,7 @@ func staticLoginCommand(deps staticLoginDeps) *cobra.Command {
cmd.Flags().StringVar(&flags.conciergeEndpoint, "concierge-endpoint", "", "API base for the Concierge endpoint")
cmd.Flags().StringVar(&flags.conciergeCABundle, "concierge-ca-bundle-data", "", "CA bundle to use when connecting to the Concierge")
cmd.Flags().StringVar(&flags.conciergeAPIGroupSuffix, "concierge-api-group-suffix", groupsuffix.PinnipedDefaultSuffix, "Concierge API group suffix")
cmd.Flags().StringVar(&flags.credentialCachePath, "credential-cache", filepath.Join(mustGetConfigDir(), "credentials.yaml"), "Cluster-specific credentials cache path (\"\" disables the cache)")
cmd.RunE = func(cmd *cobra.Command, args []string) error { return runStaticLogin(cmd.OutOrStdout(), deps, flags) }
@ -113,6 +117,22 @@ func runStaticLogin(out io.Writer, deps staticLoginDeps, flags staticLoginParams
}
cred := tokenCredential(&oidctypes.Token{IDToken: &oidctypes.IDToken{Token: token}})
// Look up cached credentials based on a hash of all the CLI arguments and the current token value.
cacheKey := struct {
Args []string `json:"args"`
Token string `json:"token"`
}{
Args: os.Args[1:],
Token: token,
}
var credCache *execcredcache.Cache
if flags.credentialCachePath != "" {
credCache = execcredcache.New(flags.credentialCachePath)
if cred := credCache.Get(cacheKey); cred != nil {
return json.NewEncoder(out).Encode(cred)
}
}
// If the concierge was configured, exchange the credential for a separate short-lived, cluster-specific credential.
if concierge != nil {
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
@ -124,5 +144,12 @@ func runStaticLogin(out io.Writer, deps staticLoginDeps, flags staticLoginParams
return fmt.Errorf("could not complete Concierge credential exchange: %w", err)
}
}
// If there was a credential cache, save the resulting credential for future use. We only save to the cache if
// the credential came from the concierge, since that's the only static token case where the cache is useful.
if credCache != nil && concierge != nil {
credCache.Put(cacheKey, cred)
}
return json.NewEncoder(out).Encode(cred)
}

View File

@ -23,6 +23,8 @@ import (
)
func TestLoginStaticCommand(t *testing.T) {
cfgDir := mustGetConfigDir()
testCA, err := certauthority.New("Test CA", 1*time.Hour)
require.NoError(t, err)
tmpdir := testutil.TempDir(t)
@ -55,6 +57,7 @@ func TestLoginStaticCommand(t *testing.T) {
--concierge-authenticator-type string Concierge authenticator type (e.g., 'webhook', 'jwt')
--concierge-ca-bundle-data string CA bundle to use when connecting to the Concierge
--concierge-endpoint string API base for the Concierge endpoint
--credential-cache string Cluster-specific credentials cache path ("" disables the cache) (default "` + cfgDir + `/credentials.yaml")
--enable-concierge Use the Concierge to login
-h, --help help for static
--token string Static token to present during login

View File

@ -0,0 +1,127 @@
// Copyright 2021 the Pinniped contributors. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
package execcredcache
import (
"errors"
"fmt"
"io/ioutil"
"os"
"sort"
"time"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
clientauthenticationv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
"sigs.k8s.io/yaml"
)
var (
// errUnsupportedVersion is returned (internally) when we encounter a version of the cache file that we
// don't understand how to handle (such as one produced by a future version of Pinniped).
errUnsupportedVersion = fmt.Errorf("unsupported credential cache version")
)
const (
// apiVersion is the Kubernetes-style API version of the credential cache file object.
apiVersion = "config.supervisor.pinniped.dev/v1alpha1"
// apiKind is the Kubernetes-style Kind of the credential cache file object.
apiKind = "CredentialCache"
// maxCacheDuration is how long a credential can remain in the cache even if it's still otherwise valid.
maxCacheDuration = 1 * time.Hour
)
type (
// credCache is the object which is YAML-serialized to form the contents of the cache file.
credCache struct {
metav1.TypeMeta
Entries []entry `json:"credentials"`
}
// entry is a single credential in the cache file.
entry struct {
Key string `json:"key"`
CreationTimestamp metav1.Time `json:"creationTimestamp"`
LastUsedTimestamp metav1.Time `json:"lastUsedTimestamp"`
Credential *clientauthenticationv1beta1.ExecCredentialStatus `json:"credential"`
}
)
// readCache loads a credCache from a path on disk. If the requested path does not exist, it returns an empty cache.
func readCache(path string) (*credCache, error) {
cacheYAML, err := ioutil.ReadFile(path)
if err != nil {
if errors.Is(err, os.ErrNotExist) {
// If the file was not found, generate a freshly initialized empty cache.
return emptyCache(), nil
}
// Otherwise bubble up the error.
return nil, fmt.Errorf("could not read cache file: %w", err)
}
// If we read the file successfully, unmarshal it from YAML.
var cache credCache
if err := yaml.Unmarshal(cacheYAML, &cache); err != nil {
return nil, fmt.Errorf("invalid cache file: %w", err)
}
// Validate that we're reading a version of the config we understand how to parse.
if !(cache.TypeMeta.APIVersion == apiVersion && cache.TypeMeta.Kind == apiKind) {
return nil, fmt.Errorf("%w: %#v", errUnsupportedVersion, cache.TypeMeta)
}
return &cache, nil
}
// emptyCache returns an empty, initialized credCache.
func emptyCache() *credCache {
return &credCache{
TypeMeta: metav1.TypeMeta{APIVersion: apiVersion, Kind: apiKind},
Entries: make([]entry, 0, 1),
}
}
// writeTo writes the cache to the specified file path.
func (c *credCache) writeTo(path string) error {
// Marshal the cache back to YAML and save it to the file.
cacheYAML, err := yaml.Marshal(c)
if err == nil {
err = ioutil.WriteFile(path, cacheYAML, 0600)
}
return err
}
// normalized returns a copy of the credCache with stale entries removed and entries sorted in a canonical order.
func (c *credCache) normalized() *credCache {
result := emptyCache()
// Clean up expired/invalid tokens.
now := time.Now()
result.Entries = make([]entry, 0, len(c.Entries))
for _, e := range c.Entries {
// Eliminate any cache entries that are missing a credential or an expiration timestamp.
if e.Credential == nil || e.Credential.ExpirationTimestamp == nil {
continue
}
// Eliminate any expired credentials.
if e.Credential.ExpirationTimestamp.Time.Before(time.Now()) {
continue
}
// Eliminate any entries older than maxCacheDuration.
if e.CreationTimestamp.Time.Before(now.Add(-maxCacheDuration)) {
continue
}
result.Entries = append(result.Entries, e)
}
// Sort the entries by creation time.
sort.SliceStable(result.Entries, func(i, j int) bool {
return result.Entries[i].CreationTimestamp.Before(&result.Entries[j].CreationTimestamp)
})
return result
}

View File

@ -0,0 +1,207 @@
// Copyright 2021 the Pinniped contributors. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
package execcredcache
import (
"os"
"testing"
"time"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
clientauthenticationv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
"go.pinniped.dev/internal/testutil"
)
var (
// validCache should be the same data as `testdata/valid.yaml`.
validCache = credCache{
TypeMeta: metav1.TypeMeta{APIVersion: "config.supervisor.pinniped.dev/v1alpha1", Kind: "CredentialCache"},
Entries: []entry{
{
Key: "test-key",
CreationTimestamp: metav1.NewTime(time.Date(2020, 10, 20, 18, 42, 7, 0, time.UTC).Local()),
LastUsedTimestamp: metav1.NewTime(time.Date(2020, 10, 20, 18, 45, 31, 0, time.UTC).Local()),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
Token: "test-token",
ExpirationTimestamp: &expTime,
},
},
},
}
expTime = metav1.NewTime(time.Date(2020, 10, 20, 19, 46, 30, 0, time.UTC).Local())
)
func TestReadCache(t *testing.T) {
t.Parallel()
tests := []struct {
name string
path string
want *credCache
wantErr string
}{
{
name: "does not exist",
path: "./testdata/does-not-exist.yaml",
want: &credCache{
TypeMeta: metav1.TypeMeta{APIVersion: "config.supervisor.pinniped.dev/v1alpha1", Kind: "CredentialCache"},
Entries: []entry{},
},
},
{
name: "other file error",
path: "./testdata/",
wantErr: "could not read cache file: read ./testdata/: is a directory",
},
{
name: "invalid YAML",
path: "./testdata/invalid.yaml",
wantErr: "invalid cache file: error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type execcredcache.credCache",
},
{
name: "wrong version",
path: "./testdata/wrong-version.yaml",
wantErr: `unsupported credential cache version: v1.TypeMeta{Kind:"NotACredentialCache", APIVersion:"config.supervisor.pinniped.dev/v2alpha6"}`,
},
{
name: "valid",
path: "./testdata/valid.yaml",
want: &validCache,
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
got, err := readCache(tt.path)
if tt.wantErr != "" {
require.EqualError(t, err, tt.wantErr)
require.Nil(t, got)
return
}
require.NoError(t, err)
require.NotNil(t, got)
require.Equal(t, tt.want, got)
})
}
}
func TestEmptyCache(t *testing.T) {
t.Parallel()
got := emptyCache()
require.Equal(t, metav1.TypeMeta{APIVersion: "config.supervisor.pinniped.dev/v1alpha1", Kind: "CredentialCache"}, got.TypeMeta)
require.Equal(t, 0, len(got.Entries))
require.Equal(t, 1, cap(got.Entries))
}
func TestWriteTo(t *testing.T) {
t.Parallel()
t.Run("io error", func(t *testing.T) {
t.Parallel()
tmp := testutil.TempDir(t) + "/credentials.yaml"
require.NoError(t, os.Mkdir(tmp, 0700))
err := validCache.writeTo(tmp)
require.EqualError(t, err, "open "+tmp+": is a directory")
})
t.Run("success", func(t *testing.T) {
t.Parallel()
require.NoError(t, validCache.writeTo(testutil.TempDir(t)+"/credentials.yaml"))
})
}
func TestNormalized(t *testing.T) {
t.Parallel()
t.Run("empty", func(t *testing.T) {
t.Parallel()
require.Equal(t, emptyCache(), emptyCache().normalized())
})
t.Run("nonempty", func(t *testing.T) {
t.Parallel()
input := emptyCache()
now := time.Now()
oneMinuteAgo := metav1.NewTime(now.Add(-1 * time.Minute))
oneHourFromNow := metav1.NewTime(now.Add(1 * time.Hour))
input.Entries = []entry{
// Credential is nil.
{
Key: "nil-credential-key",
LastUsedTimestamp: metav1.NewTime(now),
Credential: nil,
},
// Credential's expiration is nil.
{
Key: "nil-expiration-key",
LastUsedTimestamp: metav1.NewTime(now),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{},
},
// Credential is expired.
{
Key: "expired-key",
LastUsedTimestamp: metav1.NewTime(now),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: &oneMinuteAgo,
Token: "expired-token",
},
},
// Credential is still valid but is older than maxCacheDuration.
{
Key: "too-old-key",
LastUsedTimestamp: metav1.NewTime(now),
CreationTimestamp: metav1.NewTime(now.Add(-3 * time.Hour)),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: &oneHourFromNow,
Token: "too-old-token",
},
},
// Two entries that are still valid but are out of order.
{
Key: "key-two",
CreationTimestamp: metav1.NewTime(now.Add(-1 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: &oneHourFromNow,
Token: "token-two",
},
},
{
Key: "key-one",
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: &oneHourFromNow,
Token: "token-one",
},
},
}
// Expect that all but the last two valid entries are pruned, and that they're sorted.
require.Equal(t, &credCache{
TypeMeta: metav1.TypeMeta{APIVersion: "config.supervisor.pinniped.dev/v1alpha1", Kind: "CredentialCache"},
Entries: []entry{
{
Key: "key-one",
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: &oneHourFromNow,
Token: "token-one",
},
},
{
Key: "key-two",
CreationTimestamp: metav1.NewTime(now.Add(-1 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: &oneHourFromNow,
Token: "token-two",
},
},
},
}, input.normalized())
})
}

View File

@ -0,0 +1,159 @@
// Copyright 2021 the Pinniped contributors. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
// Package execcredcache implements a cache for Kubernetes ExecCredential data.
package execcredcache
import (
"context"
"crypto/sha256"
"encoding/hex"
"encoding/json"
"errors"
"fmt"
"os"
"path/filepath"
"time"
"github.com/gofrs/flock"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
clientauthenticationv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
)
const (
// defaultFileLockTimeout is how long we will wait trying to acquire the file lock on the cache file before timing out.
defaultFileLockTimeout = 10 * time.Second
// defaultFileLockRetryInterval is how often we will poll while waiting for the file lock to become available.
defaultFileLockRetryInterval = 10 * time.Millisecond
)
type Cache struct {
path string
errReporter func(error)
trylockFunc func() error
unlockFunc func() error
}
func New(path string) *Cache {
lock := flock.New(path + ".lock")
return &Cache{
path: path,
trylockFunc: func() error {
ctx, cancel := context.WithTimeout(context.Background(), defaultFileLockTimeout)
defer cancel()
_, err := lock.TryLockContext(ctx, defaultFileLockRetryInterval)
return err
},
unlockFunc: lock.Unlock,
errReporter: func(_ error) {},
}
}
func (c *Cache) Get(key interface{}) *clientauthenticationv1beta1.ExecCredential {
// If the cache file does not exist, exit immediately with no error log
if _, err := os.Stat(c.path); errors.Is(err, os.ErrNotExist) {
return nil
}
// Read the cache and lookup the matching entry. If one exists, update its last used timestamp and return it.
var result *clientauthenticationv1beta1.ExecCredential
cacheKey := jsonSHA256Hex(key)
c.withCache(func(cache *credCache) {
// Find the existing entry, if one exists
for i := range cache.Entries {
if cache.Entries[i].Key == cacheKey {
result = &clientauthenticationv1beta1.ExecCredential{
TypeMeta: metav1.TypeMeta{
Kind: "ExecCredential",
APIVersion: "client.authentication.k8s.io/v1beta1",
},
Status: cache.Entries[i].Credential,
}
// Update the last-used timestamp.
cache.Entries[i].LastUsedTimestamp = metav1.Now()
break
}
}
})
return result
}
func (c *Cache) Put(key interface{}, cred *clientauthenticationv1beta1.ExecCredential) {
// Create the cache directory if it does not exist.
if err := os.MkdirAll(filepath.Dir(c.path), 0700); err != nil && !errors.Is(err, os.ErrExist) {
c.errReporter(fmt.Errorf("could not create credential cache directory: %w", err))
return
}
// Mutate the cache to upsert the new entry.
cacheKey := jsonSHA256Hex(key)
c.withCache(func(cache *credCache) {
// Find the existing entry, if one exists
for i := range cache.Entries {
if cache.Entries[i].Key == cacheKey {
// Update the stored entry and return.
cache.Entries[i].Credential = cred.Status
cache.Entries[i].LastUsedTimestamp = metav1.Now()
return
}
}
// If there's not an entry for this key, insert one.
now := metav1.Now()
cache.Entries = append(cache.Entries, entry{
Key: cacheKey,
CreationTimestamp: now,
LastUsedTimestamp: now,
Credential: cred.Status,
})
})
}
func jsonSHA256Hex(key interface{}) string {
hash := sha256.New()
if err := json.NewEncoder(hash).Encode(key); err != nil {
panic(err)
}
return hex.EncodeToString(hash.Sum(nil))
}
// withCache is an internal helper which locks, reads the cache, processes/mutates it with the provided function, then
// saves it back to the file.
func (c *Cache) withCache(transact func(*credCache)) {
// Grab the file lock so we have exclusive access to read the file.
if err := c.trylockFunc(); err != nil {
c.errReporter(fmt.Errorf("could not lock cache file: %w", err))
return
}
// Unlock the file at the end of this call, bubbling up the error if things were otherwise successful.
defer func() {
if err := c.unlockFunc(); err != nil {
c.errReporter(fmt.Errorf("could not unlock cache file: %w", err))
}
}()
// Try to read the existing cache.
cache, err := readCache(c.path)
if err != nil {
// If that fails, fall back to resetting to a blank slate.
c.errReporter(fmt.Errorf("failed to read cache, resetting: %w", err))
cache = emptyCache()
}
// Normalize the cache before modifying it, to remove any entries that have already expired.
cache = cache.normalized()
// Process/mutate the cache using the provided function.
transact(cache)
// Normalize again to put everything into a known order.
cache = cache.normalized()
// Marshal the cache back to YAML and save it to the file.
if err := cache.writeTo(c.path); err != nil {
c.errReporter(fmt.Errorf("could not write cache: %w", err))
}
}

View File

@ -0,0 +1,389 @@
// Copyright 2021 the Pinniped contributors. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
package execcredcache
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"strings"
"testing"
"time"
"github.com/stretchr/testify/require"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
clientauthenticationv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
"go.pinniped.dev/internal/testutil"
)
func TestNew(t *testing.T) {
t.Parallel()
tmp := testutil.TempDir(t) + "/credentials.yaml"
c := New(tmp)
require.NotNil(t, c)
require.Equal(t, tmp, c.path)
require.NotNil(t, c.errReporter)
c.errReporter(fmt.Errorf("some error"))
}
func TestGet(t *testing.T) {
t.Parallel()
now := time.Now().Round(1 * time.Second)
oneHourFromNow := metav1.NewTime(now.Add(1 * time.Hour))
type testKey struct{ K1, K2 string }
tests := []struct {
name string
makeTestFile func(t *testing.T, tmp string)
trylockFunc func(*testing.T) error
unlockFunc func(*testing.T) error
key testKey
want *clientauthenticationv1beta1.ExecCredential
wantErrors []string
wantTestFile func(t *testing.T, tmp string)
}{
{
name: "not found",
key: testKey{},
},
{
name: "file lock error",
makeTestFile: func(t *testing.T, tmp string) { require.NoError(t, ioutil.WriteFile(tmp, []byte(""), 0600)) },
trylockFunc: func(t *testing.T) error { return fmt.Errorf("some lock error") },
unlockFunc: func(t *testing.T) error { require.Fail(t, "should not be called"); return nil },
key: testKey{},
wantErrors: []string{"could not lock cache file: some lock error"},
},
{
name: "invalid file",
makeTestFile: func(t *testing.T, tmp string) {
require.NoError(t, ioutil.WriteFile(tmp, []byte("invalid yaml"), 0600))
},
key: testKey{},
wantErrors: []string{
"failed to read cache, resetting: invalid cache file: error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type execcredcache.credCache",
},
},
{
name: "invalid file, fail to unlock",
makeTestFile: func(t *testing.T, tmp string) { require.NoError(t, ioutil.WriteFile(tmp, []byte("invalid"), 0600)) },
trylockFunc: func(t *testing.T) error { return nil },
unlockFunc: func(t *testing.T) error { return fmt.Errorf("some unlock error") },
key: testKey{},
wantErrors: []string{
"failed to read cache, resetting: invalid cache file: error unmarshaling JSON: while decoding JSON: json: cannot unmarshal string into Go value of type execcredcache.credCache",
"could not unlock cache file: some unlock error",
},
},
{
name: "unreadable file",
makeTestFile: func(t *testing.T, tmp string) {
require.NoError(t, os.Mkdir(tmp, 0700))
},
key: testKey{},
wantErrors: []string{
"failed to read cache, resetting: could not read cache file: read TEMPFILE: is a directory",
"could not write cache: open TEMPFILE: is a directory",
},
},
{
name: "valid file but cache miss",
makeTestFile: func(t *testing.T, tmp string) {
validCache := emptyCache()
validCache.Entries = []entry{{
Key: jsonSHA256Hex(testKey{K1: "v3", K2: "v4"}),
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now.Add(-1 * time.Minute)),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
Token: "test-token",
ExpirationTimestamp: &oneHourFromNow,
},
}}
require.NoError(t, validCache.writeTo(tmp))
},
key: testKey{K1: "v1", K2: "v2"},
wantErrors: []string{},
},
{
name: "valid file but expired cache hit",
makeTestFile: func(t *testing.T, tmp string) {
validCache := emptyCache()
oneMinuteAgo := metav1.NewTime(now.Add(-1 * time.Minute))
validCache.Entries = []entry{{
Key: jsonSHA256Hex(testKey{K1: "v1", K2: "v2"}),
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now.Add(-1 * time.Minute)),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
Token: "test-token",
ExpirationTimestamp: &oneMinuteAgo,
},
}}
require.NoError(t, validCache.writeTo(tmp))
},
key: testKey{K1: "v1", K2: "v2"},
wantErrors: []string{},
},
{
name: "valid file with cache hit",
makeTestFile: func(t *testing.T, tmp string) {
validCache := emptyCache()
validCache.Entries = []entry{{
Key: jsonSHA256Hex(testKey{K1: "v1", K2: "v2"}),
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now.Add(-1 * time.Minute)),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
Token: "test-token",
ExpirationTimestamp: &oneHourFromNow,
},
}}
require.NoError(t, validCache.writeTo(tmp))
},
key: testKey{K1: "v1", K2: "v2"},
wantErrors: []string{},
want: &clientauthenticationv1beta1.ExecCredential{
TypeMeta: metav1.TypeMeta{
Kind: "ExecCredential",
APIVersion: "client.authentication.k8s.io/v1beta1",
},
Spec: clientauthenticationv1beta1.ExecCredentialSpec{},
Status: &clientauthenticationv1beta1.ExecCredentialStatus{
Token: "test-token",
ExpirationTimestamp: &oneHourFromNow,
},
},
wantTestFile: func(t *testing.T, tmp string) {
cache, err := readCache(tmp)
require.NoError(t, err)
require.Len(t, cache.Entries, 1)
require.Less(t, time.Since(cache.Entries[0].LastUsedTimestamp.Time).Nanoseconds(), (5 * time.Second).Nanoseconds())
},
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
tmp := testutil.TempDir(t) + "/sessions.yaml"
if tt.makeTestFile != nil {
tt.makeTestFile(t, tmp)
}
// Initialize a cache with a reporter that collects errors
errors := errorCollector{t: t}
c := New(tmp)
c.errReporter = errors.report
if tt.trylockFunc != nil {
c.trylockFunc = func() error { return tt.trylockFunc(t) }
}
if tt.unlockFunc != nil {
c.unlockFunc = func() error { return tt.unlockFunc(t) }
}
got := c.Get(tt.key)
require.Equal(t, tt.want, got)
errors.require(tt.wantErrors, "TEMPFILE", tmp)
if tt.wantTestFile != nil {
tt.wantTestFile(t, tmp)
}
})
}
}
func TestPutToken(t *testing.T) {
t.Parallel()
now := time.Now().Round(1 * time.Second)
type testKey struct{ K1, K2 string }
tests := []struct {
name string
makeTestFile func(t *testing.T, tmp string)
key testKey
cred *clientauthenticationv1beta1.ExecCredential
wantErrors []string
wantTestFile func(t *testing.T, tmp string)
}{
{
name: "fail to create directory",
makeTestFile: func(t *testing.T, tmp string) {
require.NoError(t, ioutil.WriteFile(filepath.Dir(tmp), []byte{}, 0600))
},
wantErrors: []string{
"could not create credential cache directory: mkdir TEMPDIR: not a directory",
},
},
{
name: "update to existing entry",
makeTestFile: func(t *testing.T, tmp string) {
validCache := emptyCache()
validCache.Entries = []entry{
{
Key: jsonSHA256Hex(testKey{K1: "v1", K2: "v2"}),
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now.Add(-1 * time.Minute)),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: timePtr(now.Add(1 * time.Hour)),
Token: "token-one",
},
},
// A second entry that was created over a day ago.
{
Key: jsonSHA256Hex(testKey{K1: "v3", K2: "v4"}),
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Hour)),
LastUsedTimestamp: metav1.NewTime(now.Add(-1 * time.Hour)),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: timePtr(now.Add(1 * time.Hour)),
Token: "token-two",
},
},
}
require.NoError(t, os.MkdirAll(filepath.Dir(tmp), 0700))
require.NoError(t, validCache.writeTo(tmp))
},
key: testKey{K1: "v1", K2: "v2"},
cred: &clientauthenticationv1beta1.ExecCredential{
TypeMeta: metav1.TypeMeta{
Kind: "ExecCredential",
APIVersion: "client.authentication.k8s.io/v1beta1",
},
Status: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: timePtr(now.Add(1 * time.Hour)),
Token: "token-one",
},
},
wantTestFile: func(t *testing.T, tmp string) {
cache, err := readCache(tmp)
require.NoError(t, err)
require.Len(t, cache.Entries, 1)
require.Less(t, time.Since(cache.Entries[0].LastUsedTimestamp.Time).Nanoseconds(), (5 * time.Second).Nanoseconds())
require.Equal(t, &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: timePtr(now.Add(1 * time.Hour).Local()),
Token: "token-one",
}, cache.Entries[0].Credential)
},
},
{
name: "new entry",
makeTestFile: func(t *testing.T, tmp string) {
validCache := emptyCache()
validCache.Entries = []entry{
{
Key: jsonSHA256Hex(testKey{K1: "v3", K2: "v4"}),
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Minute)),
LastUsedTimestamp: metav1.NewTime(now.Add(-1 * time.Minute)),
Credential: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: timePtr(now.Add(1 * time.Hour)),
Token: "other-token",
},
},
}
require.NoError(t, os.MkdirAll(filepath.Dir(tmp), 0700))
require.NoError(t, validCache.writeTo(tmp))
},
key: testKey{K1: "v1", K2: "v2"},
cred: &clientauthenticationv1beta1.ExecCredential{
TypeMeta: metav1.TypeMeta{
Kind: "ExecCredential",
APIVersion: "client.authentication.k8s.io/v1beta1",
},
Status: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: timePtr(now.Add(1 * time.Hour)),
Token: "token-one",
},
},
wantTestFile: func(t *testing.T, tmp string) {
cache, err := readCache(tmp)
require.NoError(t, err)
require.Len(t, cache.Entries, 2)
require.Less(t, time.Since(cache.Entries[1].LastUsedTimestamp.Time).Nanoseconds(), (5 * time.Second).Nanoseconds())
require.Equal(t, &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: timePtr(now.Add(1 * time.Hour).Local()),
Token: "token-one",
}, cache.Entries[1].Credential)
},
},
{
name: "error writing cache",
makeTestFile: func(t *testing.T, tmp string) {
require.NoError(t, os.MkdirAll(tmp, 0700))
},
key: testKey{K1: "v1", K2: "v2"},
cred: &clientauthenticationv1beta1.ExecCredential{
TypeMeta: metav1.TypeMeta{
Kind: "ExecCredential",
APIVersion: "client.authentication.k8s.io/v1beta1",
},
Status: &clientauthenticationv1beta1.ExecCredentialStatus{
ExpirationTimestamp: timePtr(now.Add(1 * time.Hour)),
Token: "token-one",
},
},
wantErrors: []string{
"failed to read cache, resetting: could not read cache file: read TEMPFILE: is a directory",
"could not write cache: open TEMPFILE: is a directory",
},
},
}
for _, tt := range tests {
tt := tt
t.Run(tt.name, func(t *testing.T) {
t.Parallel()
tmp := testutil.TempDir(t) + "/cachedir/credentials.yaml"
if tt.makeTestFile != nil {
tt.makeTestFile(t, tmp)
}
// Initialize a cache with a reporter that collects errors
errors := errorCollector{t: t}
c := New(tmp)
c.errReporter = errors.report
c.Put(tt.key, tt.cred)
errors.require(tt.wantErrors, "TEMPFILE", tmp, "TEMPDIR", filepath.Dir(tmp))
if tt.wantTestFile != nil {
tt.wantTestFile(t, tmp)
}
})
}
}
func TestHashing(t *testing.T) {
type testKey struct{ K1, K2 string }
require.Equal(t, "38e0b9de817f645c4bec37c0d4a3e58baecccb040f5718dc069a72c7385a0bed", jsonSHA256Hex(nil))
require.Equal(t, "625bb1f93dc90a1bda400fdaceb8c96328e567a0c6aaf81e7fccc68958b4565d", jsonSHA256Hex([]string{"k1", "k2"}))
require.Equal(t, "8fb659f5dd266ffd8d0c96116db1d96fe10e3879f9cb6f7e9ace016696ff69f6", jsonSHA256Hex(testKey{K1: "v1", K2: "v2"}))
require.Equal(t, "42c783a2c29f91127b064df368bda61788181d2dd1709b417f9506102ea8da67", jsonSHA256Hex(testKey{K1: "v3", K2: "v4"}))
require.Panics(t, func() { jsonSHA256Hex(&unmarshalable{}) })
}
type errorCollector struct {
t *testing.T
saw []error
}
func (e *errorCollector) report(err error) {
e.saw = append(e.saw, err)
}
func (e *errorCollector) require(want []string, subs ...string) {
require.Len(e.t, e.saw, len(want))
for i, w := range want {
for i := 0; i < len(subs); i += 2 {
w = strings.ReplaceAll(w, subs[i], subs[i+1])
}
require.EqualError(e.t, e.saw[i], w)
}
}
func timePtr(from time.Time) *metav1.Time {
t := metav1.NewTime(from)
return &t
}
type unmarshalable struct{}
func (*unmarshalable) MarshalJSON() ([]byte, error) { return nil, fmt.Errorf("some MarshalJSON error") }

View File

@ -0,0 +1 @@
invalid YAML

View File

@ -0,0 +1,9 @@
apiVersion: config.supervisor.pinniped.dev/v1alpha1
kind: CredentialCache
credentials:
- key: "test-key"
creationTimestamp: "2020-10-20T18:42:07Z"
lastUsedTimestamp: "2020-10-20T18:45:31Z"
credential:
expirationTimestamp: "2020-10-20T19:46:30Z"
token: "test-token"

View File

@ -0,0 +1,3 @@
apiVersion: config.supervisor.pinniped.dev/v2alpha6
kind: NotACredentialCache
credentials: []

View File

@ -1,4 +1,4 @@
// Copyright 2020 the Pinniped contributors. All Rights Reserved.
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
// Package filesession implements a simple YAML file-based login.sessionCache.
@ -137,9 +137,15 @@ func (c *Cache) withCache(transact func(*sessionCache)) {
cache = emptySessionCache()
}
// Normalize the cache before modifying it, to remove any entries that have already expired.
cache = cache.normalized()
// Process/mutate the session using the provided function.
transact(cache)
// Normalize again to put everything into a known order.
cache = cache.normalized()
// Marshal the session back to YAML and save it to the file.
if err := cache.writeTo(c.path); err != nil {
c.errReporter(fmt.Errorf("could not write session cache: %w", err))

View File

@ -1,4 +1,4 @@
// Copyright 2020 the Pinniped contributors. All Rights Reserved.
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
// SPDX-License-Identifier: Apache-2.0
package filesession
@ -125,6 +125,41 @@ func TestGetToken(t *testing.T) {
},
wantErrors: []string{},
},
{
name: "valid file but expired cache hit",
makeTestFile: func(t *testing.T, tmp string) {
validCache := emptySessionCache()
validCache.insert(sessionEntry{
Key: oidcclient.SessionCacheKey{
Issuer: "test-issuer",
ClientID: "test-client-id",
Scopes: []string{"email", "offline_access", "openid", "profile"},
RedirectURI: "http://localhost:0/callback",
},
CreationTimestamp: metav1.NewTime(now.Add(-2 * time.Hour)),
LastUsedTimestamp: metav1.NewTime(now.Add(-1 * time.Hour)),
Tokens: oidctypes.Token{
AccessToken: &oidctypes.AccessToken{
Token: "test-access-token",
Type: "Bearer",
Expiry: metav1.NewTime(now.Add(-1 * time.Hour)),
},
IDToken: &oidctypes.IDToken{
Token: "test-id-token",
Expiry: metav1.NewTime(now.Add(-1 * time.Hour)),
},
},
})
require.NoError(t, validCache.writeTo(tmp))
},
key: oidcclient.SessionCacheKey{
Issuer: "test-issuer",
ClientID: "test-client-id",
Scopes: []string{"email", "offline_access", "openid", "profile"},
RedirectURI: "http://localhost:0/callback",
},
wantErrors: []string{},
},
{
name: "valid file with cache hit",
makeTestFile: func(t *testing.T, tmp string) {
@ -261,6 +296,33 @@ func TestPutToken(t *testing.T) {
},
},
})
// Insert another entry that hasn't been used for over 90 days.
validCache.insert(sessionEntry{
Key: oidcclient.SessionCacheKey{
Issuer: "test-issuer-2",
ClientID: "test-client-id-2",
Scopes: []string{"email", "offline_access", "openid", "profile"},
RedirectURI: "http://localhost:0/callback",
},
CreationTimestamp: metav1.NewTime(now.Add(-95 * 24 * time.Hour)),
LastUsedTimestamp: metav1.NewTime(now.Add(-91 * 24 * time.Hour)),
Tokens: oidctypes.Token{
AccessToken: &oidctypes.AccessToken{
Token: "old-access-token2",
Type: "Bearer",
Expiry: metav1.NewTime(now.Add(-1 * time.Hour)),
},
IDToken: &oidctypes.IDToken{
Token: "old-id-token2",
Expiry: metav1.NewTime(now.Add(-1 * time.Hour)),
},
RefreshToken: &oidctypes.RefreshToken{
Token: "old-refresh-token2",
},
},
})
require.NoError(t, os.MkdirAll(filepath.Dir(tmp), 0700))
require.NoError(t, validCache.writeTo(tmp))
},