Merge branch 'main' into main
This commit is contained in:
commit
225bbdd36b
@ -8,22 +8,23 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"k8s.io/apimachinery/pkg/util/sets"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
concierge "go.pinniped.dev/internal/concierge/server"
|
||||
// this side effect import ensures that we use fipsonly crypto in fips_strict mode.
|
||||
concierge "go.pinniped.dev/internal/concierge/server"
|
||||
_ "go.pinniped.dev/internal/crypto/ptls"
|
||||
lua "go.pinniped.dev/internal/localuserauthenticator"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
supervisor "go.pinniped.dev/internal/supervisor/server"
|
||||
)
|
||||
|
||||
// nolint: gochecknoglobals // these are swapped during unit tests.
|
||||
var (
|
||||
fail = klog.Fatalf
|
||||
fail = plog.Fatal
|
||||
subcommands = map[string]func(){
|
||||
"pinniped-concierge": concierge.Main,
|
||||
"pinniped-supervisor": supervisor.Main,
|
||||
@ -33,11 +34,11 @@ var (
|
||||
|
||||
func main() {
|
||||
if len(os.Args) == 0 {
|
||||
fail("missing os.Args")
|
||||
fail(fmt.Errorf("missing os.Args"))
|
||||
}
|
||||
binary := filepath.Base(os.Args[0])
|
||||
if subcommands[binary] == nil {
|
||||
fail("must be invoked as one of %v, not %q", sets.StringKeySet(subcommands).List(), binary)
|
||||
fail(fmt.Errorf("must be invoked as one of %v, not %q", sets.StringKeySet(subcommands).List(), binary))
|
||||
}
|
||||
subcommands[binary]()
|
||||
}
|
||||
|
@ -43,8 +43,11 @@ func TestEntrypoint(t *testing.T) {
|
||||
var logBuf bytes.Buffer
|
||||
testLog := log.New(&logBuf, "", 0)
|
||||
exited := "exiting via fatal"
|
||||
fail = func(format string, v ...interface{}) {
|
||||
testLog.Printf(format, v...)
|
||||
fail = func(err error, keysAndValues ...interface{}) {
|
||||
testLog.Print(err)
|
||||
if len(keysAndValues) > 0 {
|
||||
testLog.Print(keysAndValues...)
|
||||
}
|
||||
panic(exited)
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strconv"
|
||||
@ -19,8 +18,6 @@ import (
|
||||
"time"
|
||||
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"github.com/go-logr/logr"
|
||||
"github.com/go-logr/stdr"
|
||||
"github.com/spf13/cobra"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
clientauthenticationv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
|
||||
@ -34,19 +31,20 @@ import (
|
||||
conciergeclientset "go.pinniped.dev/generated/latest/client/concierge/clientset/versioned"
|
||||
"go.pinniped.dev/internal/groupsuffix"
|
||||
"go.pinniped.dev/internal/net/phttp"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
type kubeconfigDeps struct {
|
||||
getPathToSelf func() (string, error)
|
||||
getClientset getConciergeClientsetFunc
|
||||
log logr.Logger
|
||||
log plog.MinLogger
|
||||
}
|
||||
|
||||
func kubeconfigRealDeps() kubeconfigDeps {
|
||||
return kubeconfigDeps{
|
||||
getPathToSelf: os.Executable,
|
||||
getClientset: getRealConciergeClientset,
|
||||
log: stdr.New(log.New(os.Stderr, "", 0)),
|
||||
log: plog.New(),
|
||||
}
|
||||
}
|
||||
|
||||
@ -175,6 +173,11 @@ func runGetKubeconfig(ctx context.Context, out io.Writer, deps kubeconfigDeps, f
|
||||
ctx, cancel := context.WithTimeout(ctx, flags.timeout)
|
||||
defer cancel()
|
||||
|
||||
// the log statements in this file assume that Info logs are unconditionally printed so we set the global level to info
|
||||
if err := plog.ValidateAndSetLogLevelAndFormatGlobally(ctx, plog.LogSpec{Level: plog.LevelInfo, Format: plog.FormatCLI}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Validate api group suffix and immediately return an error if it is invalid.
|
||||
if err := groupsuffix.Validate(flags.concierge.apiGroupSuffix); err != nil {
|
||||
return fmt.Errorf("invalid API group suffix: %w", err)
|
||||
@ -233,7 +236,7 @@ func runGetKubeconfig(ctx context.Context, out io.Writer, deps kubeconfigDeps, f
|
||||
// When all the upstream IDP flags are set by the user, then skip discovery and don't validate their input. Maybe they know something
|
||||
// that we can't know, like the name of an IDP that they are going to define in the future.
|
||||
if len(flags.oidc.issuer) > 0 && (flags.oidc.upstreamIDPType == "" || flags.oidc.upstreamIDPName == "" || flags.oidc.upstreamIDPFlow == "") {
|
||||
if err := discoverSupervisorUpstreamIDP(ctx, &flags); err != nil {
|
||||
if err := discoverSupervisorUpstreamIDP(ctx, &flags, deps.log); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -398,7 +401,7 @@ func waitForCredentialIssuer(ctx context.Context, clientset conciergeclientset.I
|
||||
return credentialIssuer, nil
|
||||
}
|
||||
|
||||
func discoverConciergeParams(credentialIssuer *configv1alpha1.CredentialIssuer, flags *getKubeconfigParams, v1Cluster *clientcmdapi.Cluster, log logr.Logger) error {
|
||||
func discoverConciergeParams(credentialIssuer *configv1alpha1.CredentialIssuer, flags *getKubeconfigParams, v1Cluster *clientcmdapi.Cluster, log plog.MinLogger) error {
|
||||
// Autodiscover the --concierge-mode.
|
||||
frontend, err := getConciergeFrontend(credentialIssuer, flags.concierge.mode)
|
||||
if err != nil {
|
||||
@ -446,7 +449,7 @@ func discoverConciergeParams(credentialIssuer *configv1alpha1.CredentialIssuer,
|
||||
return nil
|
||||
}
|
||||
|
||||
func logStrategies(credentialIssuer *configv1alpha1.CredentialIssuer, log logr.Logger) {
|
||||
func logStrategies(credentialIssuer *configv1alpha1.CredentialIssuer, log plog.MinLogger) {
|
||||
for _, strategy := range credentialIssuer.Status.Strategies {
|
||||
log.Info("found CredentialIssuer strategy",
|
||||
"type", strategy.Type,
|
||||
@ -457,7 +460,7 @@ func logStrategies(credentialIssuer *configv1alpha1.CredentialIssuer, log logr.L
|
||||
}
|
||||
}
|
||||
|
||||
func discoverAuthenticatorParams(authenticator metav1.Object, flags *getKubeconfigParams, log logr.Logger) error {
|
||||
func discoverAuthenticatorParams(authenticator metav1.Object, flags *getKubeconfigParams, log plog.MinLogger) error {
|
||||
switch auth := authenticator.(type) {
|
||||
case *conciergev1alpha1.WebhookAuthenticator:
|
||||
// If the --concierge-authenticator-type/--concierge-authenticator-name flags were not set explicitly, set
|
||||
@ -556,7 +559,7 @@ func newExecKubeconfig(cluster *clientcmdapi.Cluster, execConfig *clientcmdapi.E
|
||||
}
|
||||
}
|
||||
|
||||
func lookupCredentialIssuer(clientset conciergeclientset.Interface, name string, log logr.Logger) (*configv1alpha1.CredentialIssuer, error) {
|
||||
func lookupCredentialIssuer(clientset conciergeclientset.Interface, name string, log plog.MinLogger) (*configv1alpha1.CredentialIssuer, error) {
|
||||
ctx, cancelFunc := context.WithTimeout(context.Background(), time.Second*20)
|
||||
defer cancelFunc()
|
||||
|
||||
@ -582,7 +585,7 @@ func lookupCredentialIssuer(clientset conciergeclientset.Interface, name string,
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func lookupAuthenticator(clientset conciergeclientset.Interface, authType, authName string, log logr.Logger) (metav1.Object, error) {
|
||||
func lookupAuthenticator(clientset conciergeclientset.Interface, authType, authName string, log plog.MinLogger) (metav1.Object, error) {
|
||||
ctx, cancelFunc := context.WithTimeout(context.Background(), time.Second*20)
|
||||
defer cancelFunc()
|
||||
|
||||
@ -643,7 +646,7 @@ func writeConfigAsYAML(out io.Writer, config clientcmdapi.Config) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func validateKubeconfig(ctx context.Context, flags getKubeconfigParams, kubeconfig clientcmdapi.Config, log logr.Logger) error {
|
||||
func validateKubeconfig(ctx context.Context, flags getKubeconfigParams, kubeconfig clientcmdapi.Config, log plog.MinLogger) error {
|
||||
if flags.skipValidate {
|
||||
return nil
|
||||
}
|
||||
@ -706,7 +709,7 @@ func validateKubeconfig(ctx context.Context, flags getKubeconfigParams, kubeconf
|
||||
log.Info("validated connection to the cluster", "attempts", attempts)
|
||||
return nil
|
||||
}
|
||||
log.Error(err, "could not connect to cluster, retrying...", "attempts", attempts, "remaining", time.Until(deadline).Round(time.Second).String())
|
||||
log.Info("could not connect to cluster, retrying...", "error", err, "attempts", attempts, "remaining", time.Until(deadline).Round(time.Second).String())
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -726,7 +729,7 @@ func hasPendingStrategy(credentialIssuer *configv1alpha1.CredentialIssuer) bool
|
||||
return false
|
||||
}
|
||||
|
||||
func discoverSupervisorUpstreamIDP(ctx context.Context, flags *getKubeconfigParams) error {
|
||||
func discoverSupervisorUpstreamIDP(ctx context.Context, flags *getKubeconfigParams, log plog.MinLogger) error {
|
||||
httpClient, err := newDiscoveryHTTPClient(flags.oidc.caBundle)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -758,7 +761,7 @@ func discoverSupervisorUpstreamIDP(ctx context.Context, flags *getKubeconfigPara
|
||||
return err
|
||||
}
|
||||
|
||||
selectedIDPFlow, err := selectUpstreamIDPFlow(discoveredIDPFlows, selectedIDPName, selectedIDPType, flags.oidc.upstreamIDPFlow)
|
||||
selectedIDPFlow, err := selectUpstreamIDPFlow(discoveredIDPFlows, selectedIDPName, selectedIDPType, flags.oidc.upstreamIDPFlow, log)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -898,7 +901,7 @@ func selectUpstreamIDPNameAndType(pinnipedIDPs []idpdiscoveryv1alpha1.PinnipedID
|
||||
}
|
||||
}
|
||||
|
||||
func selectUpstreamIDPFlow(discoveredIDPFlows []idpdiscoveryv1alpha1.IDPFlow, selectedIDPName string, selectedIDPType idpdiscoveryv1alpha1.IDPType, specifiedFlow string) (idpdiscoveryv1alpha1.IDPFlow, error) {
|
||||
func selectUpstreamIDPFlow(discoveredIDPFlows []idpdiscoveryv1alpha1.IDPFlow, selectedIDPName string, selectedIDPType idpdiscoveryv1alpha1.IDPType, specifiedFlow string, log plog.MinLogger) (idpdiscoveryv1alpha1.IDPFlow, error) {
|
||||
switch {
|
||||
case len(discoveredIDPFlows) == 0:
|
||||
// No flows listed by discovery means that we are talking to an old Supervisor from before this feature existed.
|
||||
@ -922,10 +925,9 @@ func selectUpstreamIDPFlow(discoveredIDPFlows []idpdiscoveryv1alpha1.IDPFlow, se
|
||||
return discoveredIDPFlows[0], nil
|
||||
default:
|
||||
// The user did not specify a flow, and more than one was found.
|
||||
return "", fmt.Errorf(
|
||||
"multiple client flows for Supervisor upstream identity provider %q of type %q were found, "+
|
||||
"so the --upstream-identity-provider-flow flag must be specified. "+
|
||||
"Found these flows: %v",
|
||||
selectedIDPName, selectedIDPType, discoveredIDPFlows)
|
||||
log.Info("multiple client flows found, selecting first value as default",
|
||||
"idpName", selectedIDPName, "idpType", selectedIDPType,
|
||||
"selectedFlow", discoveredIDPFlows[0].String(), "availableFlows", discoveredIDPFlows)
|
||||
return discoveredIDPFlows[0], nil
|
||||
}
|
||||
}
|
||||
|
@ -1261,13 +1261,52 @@ func TestGetKubeconfig(t *testing.T) {
|
||||
oidcDiscoveryResponse: happyOIDCDiscoveryResponse,
|
||||
idpsDiscoveryResponse: here.Docf(`{
|
||||
"pinniped_identity_providers": [
|
||||
{"name": "some-oidc-idp", "type": "oidc", "flows": ["flow1", "flow2"]}
|
||||
{"name": "some-ldap-idp", "type": "ldap", "flows": ["cli_password", "flow2"]}
|
||||
]
|
||||
}`),
|
||||
wantError: true,
|
||||
wantStderr: func(issuerCABundle string, issuerURL string) string {
|
||||
return `Error: multiple client flows for Supervisor upstream identity provider "some-oidc-idp" of type "oidc" were found, so the --upstream-identity-provider-flow flag must be specified.` +
|
||||
` Found these flows: [flow1 flow2]` + "\n"
|
||||
wantStdout: func(issuerCABundle string, issuerURL string) string {
|
||||
return here.Docf(`
|
||||
apiVersion: v1
|
||||
clusters:
|
||||
- cluster:
|
||||
certificate-authority-data: ZmFrZS1jZXJ0aWZpY2F0ZS1hdXRob3JpdHktZGF0YS12YWx1ZQ==
|
||||
server: https://fake-server-url-value
|
||||
name: kind-cluster-pinniped
|
||||
contexts:
|
||||
- context:
|
||||
cluster: kind-cluster-pinniped
|
||||
user: kind-user-pinniped
|
||||
name: kind-context-pinniped
|
||||
current-context: kind-context-pinniped
|
||||
kind: Config
|
||||
preferences: {}
|
||||
users:
|
||||
- name: kind-user-pinniped
|
||||
user:
|
||||
exec:
|
||||
apiVersion: client.authentication.k8s.io/v1beta1
|
||||
args:
|
||||
- login
|
||||
- oidc
|
||||
- --issuer=%s
|
||||
- --client-id=pinniped-cli
|
||||
- --scopes=offline_access,openid,pinniped:request-audience
|
||||
- --ca-bundle-data=%s
|
||||
- --upstream-identity-provider-name=some-ldap-idp
|
||||
- --upstream-identity-provider-type=ldap
|
||||
- --upstream-identity-provider-flow=cli_password
|
||||
command: '.../path/to/pinniped'
|
||||
env: []
|
||||
installHint: The pinniped CLI does not appear to be installed. See https://get.pinniped.dev/cli
|
||||
for more details
|
||||
provideClusterInfo: true
|
||||
`,
|
||||
issuerURL,
|
||||
base64.StdEncoding.EncodeToString([]byte(issuerCABundle)))
|
||||
},
|
||||
wantLogs: func(_ string, _ string) []string {
|
||||
return []string{`"level"=0 "msg"="multiple client flows found, selecting first value as default" ` +
|
||||
`"availableFlows"=["cli_password","flow2"] "idpName"="some-ldap-idp" "idpType"="ldap" "selectedFlow"="cli_password"`}
|
||||
},
|
||||
},
|
||||
{
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package cmd
|
||||
@ -20,7 +20,6 @@ import (
|
||||
"github.com/spf13/cobra"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
clientauthv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
|
||||
"k8s.io/klog/v2/klogr"
|
||||
|
||||
idpdiscoveryv1alpha1 "go.pinniped.dev/generated/latest/apis/supervisor/idpdiscovery/v1alpha1"
|
||||
"go.pinniped.dev/internal/execcredcache"
|
||||
@ -125,7 +124,7 @@ func oidcLoginCommand(deps oidcLoginCommandDeps) *cobra.Command {
|
||||
}
|
||||
|
||||
func runOIDCLogin(cmd *cobra.Command, deps oidcLoginCommandDeps, flags oidcLoginFlags) error { //nolint:funlen
|
||||
pLogger, err := SetLogLevel(deps.lookupEnv)
|
||||
pLogger, err := SetLogLevel(cmd.Context(), deps.lookupEnv)
|
||||
if err != nil {
|
||||
plog.WarningErr("Received error while setting log level", err)
|
||||
}
|
||||
@ -133,11 +132,11 @@ func runOIDCLogin(cmd *cobra.Command, deps oidcLoginCommandDeps, flags oidcLogin
|
||||
// Initialize the session cache.
|
||||
var sessionOptions []filesession.Option
|
||||
|
||||
// If the hidden --debug-session-cache option is passed, log all the errors from the session cache with klog.
|
||||
// If the hidden --debug-session-cache option is passed, log all the errors from the session cache.
|
||||
if flags.debugSessionCache {
|
||||
logger := klogr.New().WithName("session")
|
||||
logger := plog.WithName("session")
|
||||
sessionOptions = append(sessionOptions, filesession.WithErrorReporter(func(err error) {
|
||||
logger.Error(err, "error during session cache operation")
|
||||
logger.Error("error during session cache operation", err)
|
||||
}))
|
||||
}
|
||||
sessionCache := filesession.New(flags.sessionCachePath, sessionOptions...)
|
||||
@ -145,7 +144,7 @@ func runOIDCLogin(cmd *cobra.Command, deps oidcLoginCommandDeps, flags oidcLogin
|
||||
// Initialize the login handler.
|
||||
opts := []oidcclient.Option{
|
||||
oidcclient.WithContext(cmd.Context()),
|
||||
oidcclient.WithLogger(klogr.New()),
|
||||
oidcclient.WithLogger(plog.Logr()), // nolint: staticcheck // old code with lots of log statements
|
||||
oidcclient.WithScopes(flags.scopes),
|
||||
oidcclient.WithSessionCache(sessionCache),
|
||||
}
|
||||
@ -271,11 +270,11 @@ func flowOptions(requestedIDPType idpdiscoveryv1alpha1.IDPType, requestedFlow id
|
||||
case idpdiscoveryv1alpha1.IDPFlowCLIPassword, "":
|
||||
return useCLIFlow, nil
|
||||
case idpdiscoveryv1alpha1.IDPFlowBrowserAuthcode:
|
||||
fallthrough // not supported for LDAP providers, so fallthrough to error case
|
||||
return nil, nil
|
||||
default:
|
||||
return nil, fmt.Errorf(
|
||||
"--upstream-identity-provider-flow value not recognized for identity provider type %q: %s (supported values: %s)",
|
||||
requestedIDPType, requestedFlow, []string{idpdiscoveryv1alpha1.IDPFlowCLIPassword.String()})
|
||||
requestedIDPType, requestedFlow, strings.Join([]string{idpdiscoveryv1alpha1.IDPFlowCLIPassword.String(), idpdiscoveryv1alpha1.IDPFlowBrowserAuthcode.String()}, ", "))
|
||||
}
|
||||
default:
|
||||
// Surprisingly cobra does not support this kind of flag validation. See https://github.com/spf13/pflag/issues/236
|
||||
@ -326,15 +325,15 @@ func tokenCredential(token *oidctypes.Token) *clientauthv1beta1.ExecCredential {
|
||||
return &cred
|
||||
}
|
||||
|
||||
func SetLogLevel(lookupEnv func(string) (string, bool)) (plog.Logger, error) {
|
||||
func SetLogLevel(ctx context.Context, lookupEnv func(string) (string, bool)) (plog.Logger, error) {
|
||||
debug, _ := lookupEnv("PINNIPED_DEBUG")
|
||||
if debug == "true" {
|
||||
err := plog.ValidateAndSetLogLevelGlobally(plog.LevelDebug)
|
||||
err := plog.ValidateAndSetLogLevelAndFormatGlobally(ctx, plog.LogSpec{Level: plog.LevelDebug, Format: plog.FormatCLI})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
logger := plog.New("Pinniped login: ")
|
||||
logger := plog.New().WithName("pinniped-login")
|
||||
return logger, nil
|
||||
}
|
||||
|
||||
|
@ -10,18 +10,20 @@ import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/zap"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
clientauthv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
|
||||
"k8s.io/klog/v2"
|
||||
clocktesting "k8s.io/utils/clock/testing"
|
||||
|
||||
"go.pinniped.dev/internal/certauthority"
|
||||
"go.pinniped.dev/internal/here"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
"go.pinniped.dev/internal/testutil/testlogger"
|
||||
"go.pinniped.dev/pkg/conciergeclient"
|
||||
"go.pinniped.dev/pkg/oidcclient"
|
||||
"go.pinniped.dev/pkg/oidcclient/oidctypes"
|
||||
@ -38,6 +40,10 @@ func TestLoginOIDCCommand(t *testing.T) {
|
||||
|
||||
time1 := time.Date(3020, 10, 12, 13, 14, 15, 16, time.UTC)
|
||||
|
||||
now, err := time.Parse(time.RFC3339Nano, "2028-10-11T23:37:26.953313745Z")
|
||||
require.NoError(t, err)
|
||||
nowStr := now.Local().Format(time.RFC1123)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
args []string
|
||||
@ -235,18 +241,30 @@ func TestLoginOIDCCommand(t *testing.T) {
|
||||
wantOptionsCount: 5,
|
||||
wantStdout: `{"kind":"ExecCredential","apiVersion":"client.authentication.k8s.io/v1beta1","spec":{"interactive":false},"status":{"expirationTimestamp":"3020-10-12T13:14:15Z","token":"test-id-token"}}` + "\n",
|
||||
},
|
||||
{
|
||||
name: "ldap upstream type with browser_authcode flow is allowed",
|
||||
args: []string{
|
||||
"--issuer", "test-issuer",
|
||||
"--client-id", "test-client-id",
|
||||
"--upstream-identity-provider-type", "ldap",
|
||||
"--upstream-identity-provider-flow", "browser_authcode",
|
||||
"--credential-cache", "", // must specify --credential-cache or else the cache file on disk causes test pollution
|
||||
},
|
||||
wantOptionsCount: 4,
|
||||
wantStdout: `{"kind":"ExecCredential","apiVersion":"client.authentication.k8s.io/v1beta1","spec":{"interactive":false},"status":{"expirationTimestamp":"3020-10-12T13:14:15Z","token":"test-id-token"}}` + "\n",
|
||||
},
|
||||
{
|
||||
name: "ldap upstream type with unsupported flow is an error",
|
||||
args: []string{
|
||||
"--issuer", "test-issuer",
|
||||
"--client-id", "test-client-id",
|
||||
"--upstream-identity-provider-type", "ldap",
|
||||
"--upstream-identity-provider-flow", "browser_authcode", // "browser_authcode" is only supported for OIDC upstreams
|
||||
"--upstream-identity-provider-flow", "foo",
|
||||
"--credential-cache", "", // must specify --credential-cache or else the cache file on disk causes test pollution
|
||||
},
|
||||
wantError: true,
|
||||
wantStderr: here.Doc(`
|
||||
Error: --upstream-identity-provider-flow value not recognized for identity provider type "ldap": browser_authcode (supported values: [cli_password])
|
||||
Error: --upstream-identity-provider-flow value not recognized for identity provider type "ldap": foo (supported values: cli_password, browser_authcode)
|
||||
`),
|
||||
},
|
||||
{
|
||||
@ -261,18 +279,30 @@ func TestLoginOIDCCommand(t *testing.T) {
|
||||
wantOptionsCount: 5,
|
||||
wantStdout: `{"kind":"ExecCredential","apiVersion":"client.authentication.k8s.io/v1beta1","spec":{"interactive":false},"status":{"expirationTimestamp":"3020-10-12T13:14:15Z","token":"test-id-token"}}` + "\n",
|
||||
},
|
||||
{
|
||||
name: "active directory upstream type with browser_authcode is allowed",
|
||||
args: []string{
|
||||
"--issuer", "test-issuer",
|
||||
"--client-id", "test-client-id",
|
||||
"--upstream-identity-provider-type", "activedirectory",
|
||||
"--upstream-identity-provider-flow", "browser_authcode",
|
||||
"--credential-cache", "", // must specify --credential-cache or else the cache file on disk causes test pollution
|
||||
},
|
||||
wantOptionsCount: 4,
|
||||
wantStdout: `{"kind":"ExecCredential","apiVersion":"client.authentication.k8s.io/v1beta1","spec":{"interactive":false},"status":{"expirationTimestamp":"3020-10-12T13:14:15Z","token":"test-id-token"}}` + "\n",
|
||||
},
|
||||
{
|
||||
name: "active directory upstream type with unsupported flow is an error",
|
||||
args: []string{
|
||||
"--issuer", "test-issuer",
|
||||
"--client-id", "test-client-id",
|
||||
"--upstream-identity-provider-type", "activedirectory",
|
||||
"--upstream-identity-provider-flow", "browser_authcode", // "browser_authcode" is only supported for OIDC upstreams
|
||||
"--upstream-identity-provider-flow", "foo",
|
||||
"--credential-cache", "", // must specify --credential-cache or else the cache file on disk causes test pollution
|
||||
},
|
||||
wantError: true,
|
||||
wantStderr: here.Doc(`
|
||||
Error: --upstream-identity-provider-flow value not recognized for identity provider type "activedirectory": browser_authcode (supported values: [cli_password])
|
||||
Error: --upstream-identity-provider-flow value not recognized for identity provider type "activedirectory": foo (supported values: cli_password, browser_authcode)
|
||||
`),
|
||||
},
|
||||
{
|
||||
@ -318,8 +348,8 @@ func TestLoginOIDCCommand(t *testing.T) {
|
||||
wantOptionsCount: 4,
|
||||
wantStdout: `{"kind":"ExecCredential","apiVersion":"client.authentication.k8s.io/v1beta1","spec":{"interactive":false},"status":{"expirationTimestamp":"3020-10-12T13:14:15Z","token":"test-id-token"}}` + "\n",
|
||||
wantLogs: []string{
|
||||
"\"level\"=0 \"msg\"=\"Pinniped login: Performing OIDC login\" \"client id\"=\"test-client-id\" \"issuer\"=\"test-issuer\"",
|
||||
"\"level\"=0 \"msg\"=\"Pinniped login: No concierge configured, skipping token credential exchange\"",
|
||||
nowStr + ` pinniped-login cmd/login_oidc.go:222 Performing OIDC login {"issuer": "test-issuer", "client id": "test-client-id"}`,
|
||||
nowStr + ` pinniped-login cmd/login_oidc.go:242 No concierge configured, skipping token credential exchange`,
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -348,18 +378,20 @@ func TestLoginOIDCCommand(t *testing.T) {
|
||||
wantOptionsCount: 11,
|
||||
wantStdout: `{"kind":"ExecCredential","apiVersion":"client.authentication.k8s.io/v1beta1","spec":{"interactive":false},"status":{"token":"exchanged-token"}}` + "\n",
|
||||
wantLogs: []string{
|
||||
"\"level\"=0 \"msg\"=\"Pinniped login: Performing OIDC login\" \"client id\"=\"test-client-id\" \"issuer\"=\"test-issuer\"",
|
||||
"\"level\"=0 \"msg\"=\"Pinniped login: Exchanging token for cluster credential\" \"authenticator name\"=\"test-authenticator\" \"authenticator type\"=\"webhook\" \"endpoint\"=\"https://127.0.0.1:1234/\"",
|
||||
"\"level\"=0 \"msg\"=\"Pinniped login: Successfully exchanged token for cluster credential.\"",
|
||||
"\"level\"=0 \"msg\"=\"Pinniped login: caching cluster credential for future use.\"",
|
||||
nowStr + ` pinniped-login cmd/login_oidc.go:222 Performing OIDC login {"issuer": "test-issuer", "client id": "test-client-id"}`,
|
||||
nowStr + ` pinniped-login cmd/login_oidc.go:232 Exchanging token for cluster credential {"endpoint": "https://127.0.0.1:1234/", "authenticator type": "webhook", "authenticator name": "test-authenticator"}`,
|
||||
nowStr + ` pinniped-login cmd/login_oidc.go:240 Successfully exchanged token for cluster credential.`,
|
||||
nowStr + ` pinniped-login cmd/login_oidc.go:247 caching cluster credential for future use.`,
|
||||
},
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
testLogger := testlogger.NewLegacy(t) //nolint: staticcheck // old test with lots of log statements
|
||||
klog.SetLogger(testLogger.Logger)
|
||||
var buf bytes.Buffer
|
||||
fakeClock := clocktesting.NewFakeClock(now)
|
||||
ctx := plog.TestZapOverrides(context.Background(), t, &buf, nil, zap.WithClock(plog.ZapClock(fakeClock)))
|
||||
|
||||
var (
|
||||
gotOptions []oidcclient.Option
|
||||
)
|
||||
@ -404,7 +436,7 @@ func TestLoginOIDCCommand(t *testing.T) {
|
||||
cmd.SetOut(&stdout)
|
||||
cmd.SetErr(&stderr)
|
||||
cmd.SetArgs(tt.args)
|
||||
err := cmd.Execute()
|
||||
err = cmd.ExecuteContext(ctx)
|
||||
if tt.wantError {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
@ -414,7 +446,15 @@ func TestLoginOIDCCommand(t *testing.T) {
|
||||
require.Equal(t, tt.wantStderr, stderr.String(), "unexpected stderr")
|
||||
require.Len(t, gotOptions, tt.wantOptionsCount)
|
||||
|
||||
require.Equal(t, tt.wantLogs, testLogger.Lines())
|
||||
require.Equal(t, tt.wantLogs, logLines(buf.String()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func logLines(logs string) []string {
|
||||
if len(logs) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return strings.Split(strings.TrimSpace(logs), "\n")
|
||||
}
|
||||
|
@ -7,7 +7,6 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"time"
|
||||
@ -75,7 +74,7 @@ func staticLoginCommand(deps staticLoginDeps) *cobra.Command {
|
||||
cmd.Flags().StringVar(&flags.conciergeAPIGroupSuffix, "concierge-api-group-suffix", groupsuffix.PinnipedDefaultSuffix, "Concierge API group suffix")
|
||||
cmd.Flags().StringVar(&flags.credentialCachePath, "credential-cache", filepath.Join(mustGetConfigDir(), "credentials.yaml"), "Path to cluster-specific credentials cache (\"\" disables the cache)")
|
||||
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) error { return runStaticLogin(cmd.OutOrStdout(), deps, flags) }
|
||||
cmd.RunE = func(cmd *cobra.Command, args []string) error { return runStaticLogin(cmd, deps, flags) }
|
||||
|
||||
mustMarkDeprecated(cmd, "concierge-namespace", "not needed anymore")
|
||||
mustMarkHidden(cmd, "concierge-namespace")
|
||||
@ -83,8 +82,9 @@ func staticLoginCommand(deps staticLoginDeps) *cobra.Command {
|
||||
return cmd
|
||||
}
|
||||
|
||||
func runStaticLogin(out io.Writer, deps staticLoginDeps, flags staticLoginParams) error {
|
||||
pLogger, err := SetLogLevel(deps.lookupEnv)
|
||||
func runStaticLogin(cmd *cobra.Command, deps staticLoginDeps, flags staticLoginParams) error {
|
||||
out := cmd.OutOrStdout()
|
||||
pLogger, err := SetLogLevel(cmd.Context(), deps.lookupEnv)
|
||||
if err != nil {
|
||||
plog.WarningErr("Received error while setting log level", err)
|
||||
}
|
||||
|
@ -12,16 +12,15 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/testutil/testlogger"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/zap"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
clientauthv1beta1 "k8s.io/client-go/pkg/apis/clientauthentication/v1beta1"
|
||||
clocktesting "k8s.io/utils/clock/testing"
|
||||
|
||||
"go.pinniped.dev/internal/certauthority"
|
||||
"go.pinniped.dev/internal/here"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
"go.pinniped.dev/pkg/conciergeclient"
|
||||
)
|
||||
@ -35,6 +34,10 @@ func TestLoginStaticCommand(t *testing.T) {
|
||||
testCABundlePath := filepath.Join(tmpdir, "testca.pem")
|
||||
require.NoError(t, ioutil.WriteFile(testCABundlePath, testCA.Bundle(), 0600))
|
||||
|
||||
now, err := time.Parse(time.RFC3339Nano, "2038-12-07T23:37:26.953313745Z")
|
||||
require.NoError(t, err)
|
||||
nowStr := now.Local().Format(time.RFC1123)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
args []string
|
||||
@ -136,7 +139,9 @@ func TestLoginStaticCommand(t *testing.T) {
|
||||
wantStderr: here.Doc(`
|
||||
Error: could not complete Concierge credential exchange: some concierge error
|
||||
`),
|
||||
wantLogs: []string{"\"level\"=0 \"msg\"=\"Pinniped login: exchanging static token for cluster credential\" \"authenticator name\"=\"test-authenticator\" \"authenticator type\"=\"webhook\" \"endpoint\"=\"https://127.0.0.1/\""},
|
||||
wantLogs: []string{
|
||||
nowStr + ` pinniped-login cmd/login_static.go:147 exchanging static token for cluster credential {"endpoint": "https://127.0.0.1/", "authenticator type": "webhook", "authenticator name": "test-authenticator"}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid API group suffix",
|
||||
@ -165,8 +170,10 @@ func TestLoginStaticCommand(t *testing.T) {
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
testLogger := testlogger.NewLegacy(t) //nolint: staticcheck // old test with lots of log statements
|
||||
klog.SetLogger(testLogger.Logger)
|
||||
var buf bytes.Buffer
|
||||
fakeClock := clocktesting.NewFakeClock(now)
|
||||
ctx := plog.TestZapOverrides(context.Background(), t, &buf, nil, zap.WithClock(plog.ZapClock(fakeClock)))
|
||||
|
||||
cmd := staticLoginCommand(staticLoginDeps{
|
||||
lookupEnv: func(s string) (string, bool) {
|
||||
v, ok := tt.env[s]
|
||||
@ -194,7 +201,7 @@ func TestLoginStaticCommand(t *testing.T) {
|
||||
cmd.SetOut(&stdout)
|
||||
cmd.SetErr(&stderr)
|
||||
cmd.SetArgs(tt.args)
|
||||
err := cmd.Execute()
|
||||
err := cmd.ExecuteContext(ctx)
|
||||
if tt.wantError {
|
||||
require.Error(t, err)
|
||||
} else {
|
||||
@ -203,7 +210,7 @@ func TestLoginStaticCommand(t *testing.T) {
|
||||
require.Equal(t, tt.wantStdout, stdout.String(), "unexpected stdout")
|
||||
require.Equal(t, tt.wantStderr, stderr.String(), "unexpected stderr")
|
||||
|
||||
require.Equal(t, tt.wantLogs, testLogger.Lines())
|
||||
require.Equal(t, tt.wantLogs, logLines(buf.String()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
@ -4,9 +4,11 @@
|
||||
package cmd
|
||||
|
||||
import (
|
||||
"os"
|
||||
"context"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
// nolint: gochecknoglobals
|
||||
@ -19,8 +21,11 @@ var rootCmd = &cobra.Command{
|
||||
|
||||
// Execute adds all child commands to the root command and sets flags appropriately.
|
||||
// This is called by main.main(). It only needs to happen once to the rootCmd.
|
||||
func Execute() {
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
os.Exit(1)
|
||||
func Execute() error {
|
||||
defer plog.Setup()()
|
||||
// the context does not matter here because it is unused when CLI formatting is provided
|
||||
if err := plog.ValidateAndSetLogLevelAndFormatGlobally(context.Background(), plog.LogSpec{Format: plog.FormatCLI}); err != nil {
|
||||
return err
|
||||
}
|
||||
return rootCmd.Execute()
|
||||
}
|
||||
|
@ -21,5 +21,7 @@ func init() {
|
||||
}
|
||||
|
||||
func main() {
|
||||
cmd.Execute()
|
||||
if err := cmd.Execute(); err != nil {
|
||||
os.Exit(1)
|
||||
}
|
||||
}
|
||||
|
@ -86,8 +86,14 @@ data:
|
||||
imagePullSecrets:
|
||||
- image-pull-secret
|
||||
(@ end @)
|
||||
(@ if data.values.log_level or data.values.deprecated_log_format: @)
|
||||
log:
|
||||
(@ if data.values.log_level: @)
|
||||
logLevel: (@= getAndValidateLogLevel() @)
|
||||
level: (@= getAndValidateLogLevel() @)
|
||||
(@ end @)
|
||||
(@ if data.values.deprecated_log_format: @)
|
||||
format: (@= data.values.deprecated_log_format @)
|
||||
(@ end @)
|
||||
(@ end @)
|
||||
---
|
||||
#@ if data.values.image_pull_dockerconfigjson and data.values.image_pull_dockerconfigjson != "":
|
||||
|
@ -54,6 +54,10 @@ api_serving_certificate_renew_before_seconds: 2160000
|
||||
#! Specify the verbosity of logging: info ("nice to know" information), debug (developer
|
||||
#! information), trace (timing information), all (kitchen sink).
|
||||
log_level: #! By default, when this value is left unset, only warnings and errors are printed. There is no way to suppress warning and error logs.
|
||||
#! Specify the format of logging: json (for machine parsable logs) and text (for legacy klog formatted logs).
|
||||
#! By default, when this value is left unset, logs are formatted in json.
|
||||
#! This configuration is deprecated and will be removed in a future release at which point logs will always be formatted as json.
|
||||
deprecated_log_format:
|
||||
|
||||
run_as_user: 65532 #! run_as_user specifies the user ID that will own the process, see the Dockerfile for the reasoning behind this choice
|
||||
run_as_group: 65532 #! run_as_group specifies the group ID that will own the process, see the Dockerfile for the reasoning behind this choice
|
||||
|
@ -54,8 +54,14 @@ _: #@ template.replace(data.values.custom_labels)
|
||||
#@ "labels": labels(),
|
||||
#@ "insecureAcceptExternalUnencryptedHttpRequests": data.values.deprecated_insecure_accept_external_unencrypted_http_requests
|
||||
#@ }
|
||||
#@ if data.values.log_level or data.values.deprecated_log_format:
|
||||
#@ config["log"] = {}
|
||||
#@ end
|
||||
#@ if data.values.log_level:
|
||||
#@ config["logLevel"] = getAndValidateLogLevel()
|
||||
#@ config["log"]["level"] = getAndValidateLogLevel()
|
||||
#@ end
|
||||
#@ if data.values.deprecated_log_format:
|
||||
#@ config["log"]["format"] = data.values.deprecated_log_format
|
||||
#@ end
|
||||
#@ if data.values.endpoints:
|
||||
#@ config["endpoints"] = data.values.endpoints
|
||||
|
@ -57,6 +57,10 @@ service_loadbalancer_ip: #! e.g. 1.2.3.4
|
||||
#! Specify the verbosity of logging: info ("nice to know" information), debug (developer information), trace (timing information),
|
||||
#! or all (kitchen sink). Do not use trace or all on production systems, as credentials may get logged.
|
||||
log_level: #! By default, when this value is left unset, only warnings and errors are printed. There is no way to suppress warning and error logs.
|
||||
#! Specify the format of logging: json (for machine parsable logs) and text (for legacy klog formatted logs).
|
||||
#! By default, when this value is left unset, logs are formatted in json.
|
||||
#! This configuration is deprecated and will be removed in a future release at which point logs will always be formatted as json.
|
||||
deprecated_log_format:
|
||||
|
||||
run_as_user: 65532 #! run_as_user specifies the user ID that will own the process, see the Dockerfile for the reasoning behind this choice
|
||||
run_as_group: 65532 #! run_as_group specifies the group ID that will own the process, see the Dockerfile for the reasoning behind this choice
|
||||
|
3
go.mod
3
go.mod
@ -45,6 +45,7 @@ require (
|
||||
github.com/go-ldap/ldap/v3 v3.4.3
|
||||
github.com/go-logr/logr v1.2.3
|
||||
github.com/go-logr/stdr v1.2.2
|
||||
github.com/go-logr/zapr v1.2.3
|
||||
github.com/gofrs/flock v0.8.1
|
||||
github.com/golang/mock v1.6.0
|
||||
github.com/google/go-cmp v0.5.8
|
||||
@ -64,6 +65,7 @@ require (
|
||||
github.com/stretchr/testify v1.7.1
|
||||
github.com/tdewolff/minify/v2 v2.11.2
|
||||
go.uber.org/atomic v1.9.0
|
||||
go.uber.org/zap v1.21.0
|
||||
golang.org/x/crypto v0.0.0-20220427172511-eb4f295cb31f
|
||||
golang.org/x/net v0.0.0-20220425223048-2871e0cb64e4
|
||||
golang.org/x/oauth2 v0.0.0-20220411215720-9780585627b5
|
||||
@ -168,7 +170,6 @@ require (
|
||||
go.opentelemetry.io/otel/trace v1.6.3 // indirect
|
||||
go.opentelemetry.io/proto/otlp v0.15.0 // indirect
|
||||
go.uber.org/multierr v1.8.0 // indirect
|
||||
go.uber.org/zap v1.21.0 // indirect
|
||||
golang.org/x/mod v0.6.0-dev.0.20220106191415-9b9b3d81d5e3 // indirect
|
||||
golang.org/x/sys v0.0.0-20220422013727-9388b58f7150 // indirect
|
||||
golang.org/x/time v0.0.0-20220411224347-583f2d630306 // indirect
|
||||
|
2
go.sum
2
go.sum
@ -299,6 +299,8 @@ github.com/go-logr/logr v1.2.3/go.mod h1:jdQByPbusPIv2/zmleS9BjJVeZ6kBagPoEUsqbV
|
||||
github.com/go-logr/stdr v1.2.2 h1:hSWxHoqTgW2S2qGc0LTAI563KZ5YKYRhT3MFKZMbjag=
|
||||
github.com/go-logr/stdr v1.2.2/go.mod h1:mMo/vtBO5dYbehREoey6XUKy/eSumjCCveDpRre4VKE=
|
||||
github.com/go-logr/zapr v1.2.0/go.mod h1:Qa4Bsj2Vb+FAVeAKsLD8RLQ+YRJB8YDmOAKxaBQf7Ro=
|
||||
github.com/go-logr/zapr v1.2.3 h1:a9vnzlIBPQBBkeaR9IuMUfmVOrQlkoC4YfPoFkX3T7A=
|
||||
github.com/go-logr/zapr v1.2.3/go.mod h1:eIauM6P8qSvTw5o2ez6UEAfGjQKrxQTl5EoK+Qa2oG4=
|
||||
github.com/go-openapi/analysis v0.0.0-20180825180245-b006789cd277/go.mod h1:k70tL6pCuVxPJOHXQ+wIac1FUrvNkHolPie/cLEU6hI=
|
||||
github.com/go-openapi/analysis v0.17.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik=
|
||||
github.com/go-openapi/analysis v0.18.0/go.mod h1:IowGgpVeD0vNm45So8nr+IcQ3pxVtpRoBWb8PVZO0ik=
|
||||
|
@ -25,11 +25,36 @@ set -euo pipefail
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "$ROOT"
|
||||
|
||||
function log_error() {
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m'
|
||||
if [[ ${COLORTERM:-unknown} =~ ^(truecolor|24bit)$ ]]; then
|
||||
echo -e "🙁${RED} Error: $* ${NC}"
|
||||
else
|
||||
echo ":( Error: $*"
|
||||
fi
|
||||
}
|
||||
|
||||
use_oidc_upstream=no
|
||||
use_ldap_upstream=no
|
||||
use_ad_upstream=no
|
||||
use_flow=""
|
||||
while (("$#")); do
|
||||
case "$1" in
|
||||
--flow)
|
||||
shift
|
||||
# If there are no more command line arguments, or there is another command line argument but it starts with a dash, then error
|
||||
if [[ "$#" == "0" || "$1" == -* ]]; then
|
||||
log_error "--flow requires a flow name to be specified (e.g. cli_password or browser_authcode"
|
||||
exit 1
|
||||
fi
|
||||
if [[ "$1" != "browser_authcode" && "$1" != "cli_password" ]]; then
|
||||
log_error "--flow must be cli_password or browser_authcode"
|
||||
exit 1
|
||||
fi
|
||||
use_flow=$1
|
||||
shift
|
||||
;;
|
||||
--ldap)
|
||||
use_ldap_upstream=yes
|
||||
shift
|
||||
@ -56,7 +81,7 @@ while (("$#")); do
|
||||
done
|
||||
|
||||
if [[ "$use_oidc_upstream" == "no" && "$use_ldap_upstream" == "no" && "$use_ad_upstream" == "no" ]]; then
|
||||
echo "Error: Please use --oidc, --ldap, or --ad to specify which type of upstream identity provider(s) you would like"
|
||||
log_error "Error: Please use --oidc, --ldap, or --ad to specify which type of upstream identity provider(s) you would like"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@ -127,6 +152,7 @@ spec:
|
||||
certificateAuthorityData: "$PINNIPED_TEST_SUPERVISOR_UPSTREAM_OIDC_ISSUER_CA_BUNDLE"
|
||||
authorizationConfig:
|
||||
additionalScopes: [ ${PINNIPED_TEST_SUPERVISOR_UPSTREAM_OIDC_ADDITIONAL_SCOPES} ]
|
||||
allowPasswordGrant: true
|
||||
claims:
|
||||
username: "$PINNIPED_TEST_SUPERVISOR_UPSTREAM_OIDC_USERNAME_CLAIM"
|
||||
groups: "$PINNIPED_TEST_SUPERVISOR_UPSTREAM_OIDC_GROUPS_CLAIM"
|
||||
@ -196,7 +222,7 @@ EOF
|
||||
fi
|
||||
|
||||
if [[ "$use_ad_upstream" == "yes" ]]; then
|
||||
# Make an ActiveDirectoryIdentityProvider.
|
||||
# Make an ActiveDirectoryIdentityProvider. Needs to be pointed to a real AD server by env vars.
|
||||
cat <<EOF | kubectl apply --namespace "$PINNIPED_TEST_SUPERVISOR_NAMESPACE" -f -
|
||||
apiVersion: idp.supervisor.pinniped.dev/v1alpha1
|
||||
kind: ActiveDirectoryIdentityProvider
|
||||
@ -256,7 +282,11 @@ while [[ -z "$(kubectl get credentialissuer pinniped-concierge-config -o=jsonpat
|
||||
done
|
||||
|
||||
# Use the CLI to get the kubeconfig. Tell it that you don't want the browser to automatically open for logins.
|
||||
https_proxy="$PINNIPED_TEST_PROXY" no_proxy="127.0.0.1" ./pinniped get kubeconfig --oidc-skip-browser >kubeconfig
|
||||
flow_arg=""
|
||||
if [[ -n "$use_flow" ]]; then
|
||||
flow_arg="--upstream-identity-provider-flow $use_flow"
|
||||
fi
|
||||
https_proxy="$PINNIPED_TEST_PROXY" no_proxy="127.0.0.1" ./pinniped get kubeconfig --oidc-skip-browser $flow_arg >kubeconfig
|
||||
|
||||
# Clear the local CLI cache to ensure that the kubectl command below will need to perform a fresh login.
|
||||
rm -f "$HOME/.config/pinniped/sessions.yaml"
|
||||
@ -265,25 +295,27 @@ rm -f "$HOME/.config/pinniped/credentials.yaml"
|
||||
echo
|
||||
echo "Ready! 🚀"
|
||||
|
||||
if [[ "$use_oidc_upstream" == "yes" ]]; then
|
||||
if [[ "$use_oidc_upstream" == "yes" || "$use_flow" == "browser_authcode" ]]; then
|
||||
echo
|
||||
echo "To be able to access the login URL shown below, start Chrome like this:"
|
||||
echo " open -a \"Google Chrome\" --args --proxy-server=\"$PINNIPED_TEST_PROXY\""
|
||||
echo "Then use these credentials at the Dex login page:"
|
||||
echo "Note that Chrome must be fully quit before being started with --proxy-server."
|
||||
echo "Then open the login URL shown below in that new Chrome window."
|
||||
echo
|
||||
echo "When prompted for username and password, use these values:"
|
||||
fi
|
||||
|
||||
if [[ "$use_oidc_upstream" == "yes" ]]; then
|
||||
echo " Username: $PINNIPED_TEST_SUPERVISOR_UPSTREAM_OIDC_USERNAME"
|
||||
echo " Password: $PINNIPED_TEST_SUPERVISOR_UPSTREAM_OIDC_PASSWORD"
|
||||
fi
|
||||
|
||||
if [[ "$use_ldap_upstream" == "yes" ]]; then
|
||||
echo
|
||||
echo "When prompted for username and password by the CLI, use these values:"
|
||||
echo " Username: $PINNIPED_TEST_LDAP_USER_CN"
|
||||
echo " Password: $PINNIPED_TEST_LDAP_USER_PASSWORD"
|
||||
fi
|
||||
|
||||
if [[ "$use_ad_upstream" == "yes" ]]; then
|
||||
echo
|
||||
echo "When prompted for username and password by the CLI, use these values:"
|
||||
echo " Username: $PINNIPED_TEST_AD_USER_USER_PRINCIPAL_NAME"
|
||||
echo " Password: $PINNIPED_TEST_AD_USER_PASSWORD"
|
||||
fi
|
||||
|
@ -15,12 +15,11 @@ import (
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/apimachinery/pkg/runtime/schema"
|
||||
"k8s.io/apimachinery/pkg/runtime/serializer"
|
||||
apimachineryversion "k8s.io/apimachinery/pkg/version"
|
||||
genericapiserver "k8s.io/apiserver/pkg/server"
|
||||
genericoptions "k8s.io/apiserver/pkg/server/options"
|
||||
"k8s.io/client-go/pkg/version"
|
||||
"k8s.io/client-go/rest"
|
||||
"k8s.io/component-base/logs"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/certauthority/dynamiccertauthority"
|
||||
"go.pinniped.dev/internal/concierge/apiserver"
|
||||
@ -35,6 +34,7 @@ import (
|
||||
"go.pinniped.dev/internal/here"
|
||||
"go.pinniped.dev/internal/issuer"
|
||||
"go.pinniped.dev/internal/kubeclient"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/registry/credentialrequest"
|
||||
)
|
||||
|
||||
@ -100,7 +100,7 @@ func addCommandlineFlagsToCommand(cmd *cobra.Command, app *App) {
|
||||
// Boot the aggregated API server, which will in turn boot the controllers.
|
||||
func (a *App) runServer(ctx context.Context) error {
|
||||
// Read the server config file.
|
||||
cfg, err := concierge.FromPath(a.configPath)
|
||||
cfg, err := concierge.FromPath(ctx, a.configPath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not load config: %w", err)
|
||||
}
|
||||
@ -250,16 +250,20 @@ func getAggregatedAPIServerConfig(
|
||||
return apiServerConfig, nil
|
||||
}
|
||||
|
||||
func main() error { // return an error instead of klog.Fatal to allow defer statements to run
|
||||
logs.InitLogs()
|
||||
defer logs.FlushLogs()
|
||||
func main() error { // return an error instead of plog.Fatal to allow defer statements to run
|
||||
defer plog.Setup()()
|
||||
|
||||
// Dump out the time since compile (mostly useful for benchmarking our local development cycle latency).
|
||||
var timeSinceCompile time.Duration
|
||||
if buildDate, err := time.Parse(time.RFC3339, version.Get().BuildDate); err == nil {
|
||||
timeSinceCompile = time.Since(buildDate).Round(time.Second)
|
||||
}
|
||||
klog.Infof("Running %s at %#v (%s since build)", rest.DefaultKubernetesUserAgent(), version.Get(), timeSinceCompile)
|
||||
|
||||
plog.Always("Running concierge",
|
||||
"user-agent", rest.DefaultKubernetesUserAgent(),
|
||||
"version", versionInfo(version.Get()),
|
||||
"time-since-build", timeSinceCompile,
|
||||
)
|
||||
|
||||
ctx := genericapiserver.SetupSignalContext()
|
||||
|
||||
@ -268,6 +272,8 @@ func main() error { // return an error instead of klog.Fatal to allow defer stat
|
||||
|
||||
func Main() {
|
||||
if err := main(); err != nil {
|
||||
klog.Fatal(err)
|
||||
plog.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
type versionInfo apimachineryversion.Info // hide .String() method from plog
|
||||
|
@ -6,6 +6,7 @@
|
||||
package concierge
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
@ -41,7 +42,7 @@ const (
|
||||
// Note! The Config file should contain base64-encoded WebhookCABundle data.
|
||||
// This function will decode that base64-encoded data to PEM bytes to be stored
|
||||
// in the Config.
|
||||
func FromPath(path string) (*Config, error) {
|
||||
func FromPath(ctx context.Context, path string) (*Config, error) {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read file: %w", err)
|
||||
@ -78,7 +79,8 @@ func FromPath(path string) (*Config, error) {
|
||||
return nil, fmt.Errorf("validate names: %w", err)
|
||||
}
|
||||
|
||||
if err := plog.ValidateAndSetLogLevelGlobally(config.LogLevel); err != nil {
|
||||
plog.MaybeSetDeprecatedLogLevel(config.LogLevel, &config.Log)
|
||||
if err := plog.ValidateAndSetLogLevelAndFormatGlobally(ctx, config.Log); err != nil {
|
||||
return nil, fmt.Errorf("validate log level: %w", err)
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@
|
||||
package concierge
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"testing"
|
||||
@ -90,9 +91,186 @@ func TestFromPath(t *testing.T) {
|
||||
Image: pointer.StringPtr("kube-cert-agent-image"),
|
||||
ImagePullSecrets: []string{"kube-cert-agent-image-pull-secret"},
|
||||
},
|
||||
LogLevel: plog.LevelDebug,
|
||||
LogLevel: func(level plog.LogLevel) *plog.LogLevel { return &level }(plog.LevelDebug),
|
||||
Log: plog.LogSpec{
|
||||
Level: plog.LevelDebug,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Fully filled out new log struct",
|
||||
yaml: here.Doc(`
|
||||
---
|
||||
discovery:
|
||||
url: https://some.discovery/url
|
||||
api:
|
||||
servingCertificate:
|
||||
durationSeconds: 3600
|
||||
renewBeforeSeconds: 2400
|
||||
apiGroupSuffix: some.suffix.com
|
||||
aggregatedAPIServerPort: 12345
|
||||
impersonationProxyServerPort: 4242
|
||||
names:
|
||||
servingCertificateSecret: pinniped-concierge-api-tls-serving-certificate
|
||||
credentialIssuer: pinniped-config
|
||||
apiService: pinniped-api
|
||||
kubeCertAgentPrefix: kube-cert-agent-prefix
|
||||
impersonationLoadBalancerService: impersonationLoadBalancerService-value
|
||||
impersonationClusterIPService: impersonationClusterIPService-value
|
||||
impersonationTLSCertificateSecret: impersonationTLSCertificateSecret-value
|
||||
impersonationCACertificateSecret: impersonationCACertificateSecret-value
|
||||
impersonationSignerSecret: impersonationSignerSecret-value
|
||||
impersonationSignerSecret: impersonationSignerSecret-value
|
||||
agentServiceAccount: agentServiceAccount-value
|
||||
extraName: extraName-value
|
||||
labels:
|
||||
myLabelKey1: myLabelValue1
|
||||
myLabelKey2: myLabelValue2
|
||||
kubeCertAgent:
|
||||
namePrefix: kube-cert-agent-name-prefix-
|
||||
image: kube-cert-agent-image
|
||||
imagePullSecrets: [kube-cert-agent-image-pull-secret]
|
||||
log:
|
||||
level: all
|
||||
format: json
|
||||
`),
|
||||
wantConfig: &Config{
|
||||
DiscoveryInfo: DiscoveryInfoSpec{
|
||||
URL: pointer.StringPtr("https://some.discovery/url"),
|
||||
},
|
||||
APIConfig: APIConfigSpec{
|
||||
ServingCertificateConfig: ServingCertificateConfigSpec{
|
||||
DurationSeconds: pointer.Int64Ptr(3600),
|
||||
RenewBeforeSeconds: pointer.Int64Ptr(2400),
|
||||
},
|
||||
},
|
||||
APIGroupSuffix: pointer.StringPtr("some.suffix.com"),
|
||||
AggregatedAPIServerPort: pointer.Int64Ptr(12345),
|
||||
ImpersonationProxyServerPort: pointer.Int64Ptr(4242),
|
||||
NamesConfig: NamesConfigSpec{
|
||||
ServingCertificateSecret: "pinniped-concierge-api-tls-serving-certificate",
|
||||
CredentialIssuer: "pinniped-config",
|
||||
APIService: "pinniped-api",
|
||||
ImpersonationLoadBalancerService: "impersonationLoadBalancerService-value",
|
||||
ImpersonationClusterIPService: "impersonationClusterIPService-value",
|
||||
ImpersonationTLSCertificateSecret: "impersonationTLSCertificateSecret-value",
|
||||
ImpersonationCACertificateSecret: "impersonationCACertificateSecret-value",
|
||||
ImpersonationSignerSecret: "impersonationSignerSecret-value",
|
||||
AgentServiceAccount: "agentServiceAccount-value",
|
||||
},
|
||||
Labels: map[string]string{
|
||||
"myLabelKey1": "myLabelValue1",
|
||||
"myLabelKey2": "myLabelValue2",
|
||||
},
|
||||
KubeCertAgentConfig: KubeCertAgentSpec{
|
||||
NamePrefix: pointer.StringPtr("kube-cert-agent-name-prefix-"),
|
||||
Image: pointer.StringPtr("kube-cert-agent-image"),
|
||||
ImagePullSecrets: []string{"kube-cert-agent-image-pull-secret"},
|
||||
},
|
||||
Log: plog.LogSpec{
|
||||
Level: plog.LevelAll,
|
||||
Format: plog.FormatJSON,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Fully filled out old log and new log struct",
|
||||
yaml: here.Doc(`
|
||||
---
|
||||
discovery:
|
||||
url: https://some.discovery/url
|
||||
api:
|
||||
servingCertificate:
|
||||
durationSeconds: 3600
|
||||
renewBeforeSeconds: 2400
|
||||
apiGroupSuffix: some.suffix.com
|
||||
aggregatedAPIServerPort: 12345
|
||||
impersonationProxyServerPort: 4242
|
||||
names:
|
||||
servingCertificateSecret: pinniped-concierge-api-tls-serving-certificate
|
||||
credentialIssuer: pinniped-config
|
||||
apiService: pinniped-api
|
||||
kubeCertAgentPrefix: kube-cert-agent-prefix
|
||||
impersonationLoadBalancerService: impersonationLoadBalancerService-value
|
||||
impersonationClusterIPService: impersonationClusterIPService-value
|
||||
impersonationTLSCertificateSecret: impersonationTLSCertificateSecret-value
|
||||
impersonationCACertificateSecret: impersonationCACertificateSecret-value
|
||||
impersonationSignerSecret: impersonationSignerSecret-value
|
||||
impersonationSignerSecret: impersonationSignerSecret-value
|
||||
agentServiceAccount: agentServiceAccount-value
|
||||
extraName: extraName-value
|
||||
labels:
|
||||
myLabelKey1: myLabelValue1
|
||||
myLabelKey2: myLabelValue2
|
||||
kubeCertAgent:
|
||||
namePrefix: kube-cert-agent-name-prefix-
|
||||
image: kube-cert-agent-image
|
||||
imagePullSecrets: [kube-cert-agent-image-pull-secret]
|
||||
logLevel: debug
|
||||
log:
|
||||
level: all
|
||||
format: json
|
||||
`),
|
||||
wantConfig: &Config{
|
||||
DiscoveryInfo: DiscoveryInfoSpec{
|
||||
URL: pointer.StringPtr("https://some.discovery/url"),
|
||||
},
|
||||
APIConfig: APIConfigSpec{
|
||||
ServingCertificateConfig: ServingCertificateConfigSpec{
|
||||
DurationSeconds: pointer.Int64Ptr(3600),
|
||||
RenewBeforeSeconds: pointer.Int64Ptr(2400),
|
||||
},
|
||||
},
|
||||
APIGroupSuffix: pointer.StringPtr("some.suffix.com"),
|
||||
AggregatedAPIServerPort: pointer.Int64Ptr(12345),
|
||||
ImpersonationProxyServerPort: pointer.Int64Ptr(4242),
|
||||
NamesConfig: NamesConfigSpec{
|
||||
ServingCertificateSecret: "pinniped-concierge-api-tls-serving-certificate",
|
||||
CredentialIssuer: "pinniped-config",
|
||||
APIService: "pinniped-api",
|
||||
ImpersonationLoadBalancerService: "impersonationLoadBalancerService-value",
|
||||
ImpersonationClusterIPService: "impersonationClusterIPService-value",
|
||||
ImpersonationTLSCertificateSecret: "impersonationTLSCertificateSecret-value",
|
||||
ImpersonationCACertificateSecret: "impersonationCACertificateSecret-value",
|
||||
ImpersonationSignerSecret: "impersonationSignerSecret-value",
|
||||
AgentServiceAccount: "agentServiceAccount-value",
|
||||
},
|
||||
Labels: map[string]string{
|
||||
"myLabelKey1": "myLabelValue1",
|
||||
"myLabelKey2": "myLabelValue2",
|
||||
},
|
||||
KubeCertAgentConfig: KubeCertAgentSpec{
|
||||
NamePrefix: pointer.StringPtr("kube-cert-agent-name-prefix-"),
|
||||
Image: pointer.StringPtr("kube-cert-agent-image"),
|
||||
ImagePullSecrets: []string{"kube-cert-agent-image-pull-secret"},
|
||||
},
|
||||
LogLevel: func(level plog.LogLevel) *plog.LogLevel { return &level }(plog.LevelDebug),
|
||||
Log: plog.LogSpec{
|
||||
Level: plog.LevelDebug,
|
||||
Format: plog.FormatJSON,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "invalid log format",
|
||||
yaml: here.Doc(`
|
||||
---
|
||||
names:
|
||||
servingCertificateSecret: pinniped-concierge-api-tls-serving-certificate
|
||||
credentialIssuer: pinniped-config
|
||||
apiService: pinniped-api
|
||||
impersonationLoadBalancerService: impersonationLoadBalancerService-value
|
||||
impersonationClusterIPService: impersonationClusterIPService-value
|
||||
impersonationTLSCertificateSecret: impersonationTLSCertificateSecret-value
|
||||
impersonationCACertificateSecret: impersonationCACertificateSecret-value
|
||||
impersonationSignerSecret: impersonationSignerSecret-value
|
||||
agentServiceAccount: agentServiceAccount-value
|
||||
log:
|
||||
level: all
|
||||
format: snorlax
|
||||
`),
|
||||
wantError: "decode yaml: error unmarshaling JSON: while decoding JSON: invalid log format, valid choices are the empty string, json and text",
|
||||
},
|
||||
{
|
||||
name: "When only the required fields are present, causes other fields to be defaulted",
|
||||
yaml: here.Doc(`
|
||||
@ -404,6 +582,8 @@ func TestFromPath(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
test := test
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
// this is a serial test because it sets the global logger
|
||||
|
||||
// Write yaml to temp file
|
||||
f, err := ioutil.TempFile("", "pinniped-test-config-yaml-*")
|
||||
require.NoError(t, err)
|
||||
@ -417,7 +597,9 @@ func TestFromPath(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
// Test FromPath()
|
||||
config, err := FromPath(f.Name())
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
t.Cleanup(cancel)
|
||||
config, err := FromPath(ctx, f.Name())
|
||||
|
||||
if test.wantError != "" {
|
||||
require.EqualError(t, err, test.wantError)
|
||||
|
@ -15,7 +15,9 @@ type Config struct {
|
||||
NamesConfig NamesConfigSpec `json:"names"`
|
||||
KubeCertAgentConfig KubeCertAgentSpec `json:"kubeCertAgent"`
|
||||
Labels map[string]string `json:"labels"`
|
||||
LogLevel plog.LogLevel `json:"logLevel"`
|
||||
// Deprecated: use log.level instead
|
||||
LogLevel *plog.LogLevel `json:"logLevel"`
|
||||
Log plog.LogSpec `json:"log"`
|
||||
}
|
||||
|
||||
// DiscoveryInfoSpec contains configuration knobs specific to
|
||||
|
@ -6,6 +6,7 @@
|
||||
package supervisor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"net"
|
||||
@ -28,7 +29,7 @@ const (
|
||||
// FromPath loads an Config from a provided local file path, inserts any
|
||||
// defaults (from the Config documentation), and verifies that the config is
|
||||
// valid (Config documentation).
|
||||
func FromPath(path string) (*Config, error) {
|
||||
func FromPath(ctx context.Context, path string) (*Config, error) {
|
||||
data, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("read file: %w", err)
|
||||
@ -53,7 +54,8 @@ func FromPath(path string) (*Config, error) {
|
||||
return nil, fmt.Errorf("validate names: %w", err)
|
||||
}
|
||||
|
||||
if err := plog.ValidateAndSetLogLevelGlobally(config.LogLevel); err != nil {
|
||||
plog.MaybeSetDeprecatedLogLevel(config.LogLevel, &config.Log)
|
||||
if err := plog.ValidateAndSetLogLevelAndFormatGlobally(ctx, config.Log); err != nil {
|
||||
return nil, fmt.Errorf("validate log level: %w", err)
|
||||
}
|
||||
|
||||
|
@ -4,6 +4,7 @@
|
||||
package supervisor
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
@ -13,6 +14,7 @@ import (
|
||||
"k8s.io/utils/pointer"
|
||||
|
||||
"go.pinniped.dev/internal/here"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
func TestFromPath(t *testing.T) {
|
||||
@ -40,6 +42,7 @@ func TestFromPath(t *testing.T) {
|
||||
network: tcp
|
||||
address: 127.0.0.1:1234
|
||||
insecureAcceptExternalUnencryptedHttpRequests: false
|
||||
logLevel: trace
|
||||
`),
|
||||
wantConfig: &Config{
|
||||
APIGroupSuffix: pointer.StringPtr("some.suffix.com"),
|
||||
@ -61,8 +64,122 @@ func TestFromPath(t *testing.T) {
|
||||
},
|
||||
},
|
||||
AllowExternalHTTP: false,
|
||||
LogLevel: func(level plog.LogLevel) *plog.LogLevel { return &level }(plog.LevelTrace),
|
||||
Log: plog.LogSpec{
|
||||
Level: plog.LevelTrace,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Happy with new log field",
|
||||
yaml: here.Doc(`
|
||||
---
|
||||
apiGroupSuffix: some.suffix.com
|
||||
labels:
|
||||
myLabelKey1: myLabelValue1
|
||||
myLabelKey2: myLabelValue2
|
||||
names:
|
||||
defaultTLSCertificateSecret: my-secret-name
|
||||
endpoints:
|
||||
https:
|
||||
network: unix
|
||||
address: :1234
|
||||
http:
|
||||
network: tcp
|
||||
address: 127.0.0.1:1234
|
||||
insecureAcceptExternalUnencryptedHttpRequests: false
|
||||
log:
|
||||
level: info
|
||||
format: text
|
||||
`),
|
||||
wantConfig: &Config{
|
||||
APIGroupSuffix: pointer.StringPtr("some.suffix.com"),
|
||||
Labels: map[string]string{
|
||||
"myLabelKey1": "myLabelValue1",
|
||||
"myLabelKey2": "myLabelValue2",
|
||||
},
|
||||
NamesConfig: NamesConfigSpec{
|
||||
DefaultTLSCertificateSecret: "my-secret-name",
|
||||
},
|
||||
Endpoints: &Endpoints{
|
||||
HTTPS: &Endpoint{
|
||||
Network: "unix",
|
||||
Address: ":1234",
|
||||
},
|
||||
HTTP: &Endpoint{
|
||||
Network: "tcp",
|
||||
Address: "127.0.0.1:1234",
|
||||
},
|
||||
},
|
||||
AllowExternalHTTP: false,
|
||||
Log: plog.LogSpec{
|
||||
Level: plog.LevelInfo,
|
||||
Format: plog.FormatText,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "Happy with old and new log field",
|
||||
yaml: here.Doc(`
|
||||
---
|
||||
apiGroupSuffix: some.suffix.com
|
||||
labels:
|
||||
myLabelKey1: myLabelValue1
|
||||
myLabelKey2: myLabelValue2
|
||||
names:
|
||||
defaultTLSCertificateSecret: my-secret-name
|
||||
endpoints:
|
||||
https:
|
||||
network: unix
|
||||
address: :1234
|
||||
http:
|
||||
network: tcp
|
||||
address: 127.0.0.1:1234
|
||||
insecureAcceptExternalUnencryptedHttpRequests: false
|
||||
logLevel: trace
|
||||
log:
|
||||
level: info
|
||||
format: text
|
||||
`),
|
||||
wantConfig: &Config{
|
||||
APIGroupSuffix: pointer.StringPtr("some.suffix.com"),
|
||||
Labels: map[string]string{
|
||||
"myLabelKey1": "myLabelValue1",
|
||||
"myLabelKey2": "myLabelValue2",
|
||||
},
|
||||
NamesConfig: NamesConfigSpec{
|
||||
DefaultTLSCertificateSecret: "my-secret-name",
|
||||
},
|
||||
Endpoints: &Endpoints{
|
||||
HTTPS: &Endpoint{
|
||||
Network: "unix",
|
||||
Address: ":1234",
|
||||
},
|
||||
HTTP: &Endpoint{
|
||||
Network: "tcp",
|
||||
Address: "127.0.0.1:1234",
|
||||
},
|
||||
},
|
||||
AllowExternalHTTP: false,
|
||||
LogLevel: func(level plog.LogLevel) *plog.LogLevel { return &level }(plog.LevelTrace),
|
||||
Log: plog.LogSpec{
|
||||
Level: plog.LevelTrace,
|
||||
Format: plog.FormatText,
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
name: "bad log format",
|
||||
yaml: here.Doc(`
|
||||
---
|
||||
names:
|
||||
defaultTLSCertificateSecret: my-secret-name
|
||||
log:
|
||||
level: info
|
||||
format: cli
|
||||
`),
|
||||
wantError: "decode yaml: error unmarshaling JSON: while decoding JSON: invalid log format, valid choices are the empty string, json and text",
|
||||
},
|
||||
{
|
||||
name: "When only the required fields are present, causes other fields to be defaulted",
|
||||
yaml: here.Doc(`
|
||||
@ -307,7 +424,7 @@ func TestFromPath(t *testing.T) {
|
||||
for _, test := range tests {
|
||||
test := test
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
// this is a serial test because it sets the global logger
|
||||
|
||||
// Write yaml to temp file
|
||||
f, err := ioutil.TempFile("", "pinniped-test-config-yaml-*")
|
||||
@ -322,7 +439,9 @@ func TestFromPath(t *testing.T) {
|
||||
require.NoError(t, err)
|
||||
|
||||
// Test FromPath()
|
||||
config, err := FromPath(f.Name())
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
t.Cleanup(cancel)
|
||||
config, err := FromPath(ctx, f.Name())
|
||||
|
||||
if test.wantError != "" {
|
||||
require.EqualError(t, err, test.wantError)
|
||||
|
@ -14,7 +14,9 @@ type Config struct {
|
||||
APIGroupSuffix *string `json:"apiGroupSuffix,omitempty"`
|
||||
Labels map[string]string `json:"labels"`
|
||||
NamesConfig NamesConfigSpec `json:"names"`
|
||||
LogLevel plog.LogLevel `json:"logLevel"`
|
||||
// Deprecated: use log.level instead
|
||||
LogLevel *plog.LogLevel `json:"logLevel"`
|
||||
Log plog.LogSpec `json:"log"`
|
||||
Endpoints *Endpoints `json:"endpoints"`
|
||||
AllowExternalHTTP stringOrBoolAsBool `json:"insecureAcceptExternalUnencryptedHttpRequests"`
|
||||
}
|
||||
|
@ -32,6 +32,8 @@ type certsExpirerController struct {
|
||||
renewBefore time.Duration
|
||||
|
||||
secretKey string
|
||||
|
||||
logger plog.Logger
|
||||
}
|
||||
|
||||
// NewCertsExpirerController returns a controllerlib.Controller that will delete a
|
||||
@ -45,10 +47,12 @@ func NewCertsExpirerController(
|
||||
withInformer pinnipedcontroller.WithInformerOptionFunc,
|
||||
renewBefore time.Duration,
|
||||
secretKey string,
|
||||
logger plog.Logger,
|
||||
) controllerlib.Controller {
|
||||
const name = "certs-expirer-controller"
|
||||
return controllerlib.New(
|
||||
controllerlib.Config{
|
||||
Name: "certs-expirer-controller",
|
||||
Name: name,
|
||||
Syncer: &certsExpirerController{
|
||||
namespace: namespace,
|
||||
certsSecretResourceName: certsSecretResourceName,
|
||||
@ -56,6 +60,7 @@ func NewCertsExpirerController(
|
||||
secretInformer: secretInformer,
|
||||
renewBefore: renewBefore,
|
||||
secretKey: secretKey,
|
||||
logger: logger.WithName(name),
|
||||
},
|
||||
},
|
||||
withInformer(
|
||||
@ -74,7 +79,7 @@ func (c *certsExpirerController) Sync(ctx controllerlib.Context) error {
|
||||
return fmt.Errorf("failed to get %s/%s secret: %w", c.namespace, c.certsSecretResourceName, err)
|
||||
}
|
||||
if notFound {
|
||||
plog.Info("secret does not exist yet or was deleted",
|
||||
c.logger.Info("secret does not exist yet or was deleted",
|
||||
"controller", ctx.Name,
|
||||
"namespace", c.namespace,
|
||||
"name", c.certsSecretResourceName,
|
||||
@ -91,7 +96,7 @@ func (c *certsExpirerController) Sync(ctx controllerlib.Context) error {
|
||||
|
||||
certAge := time.Since(notBefore)
|
||||
renewDelta := certAge - c.renewBefore
|
||||
plog.Debug("found renew delta",
|
||||
c.logger.Debug("found renew delta",
|
||||
"controller", ctx.Name,
|
||||
"namespace", c.namespace,
|
||||
"name", c.certsSecretResourceName,
|
||||
|
@ -4,12 +4,15 @@
|
||||
package apicerts
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/ecdsa"
|
||||
"crypto/elliptic"
|
||||
"crypto/rand"
|
||||
"crypto/x509"
|
||||
"errors"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@ -24,6 +27,7 @@ import (
|
||||
kubetesting "k8s.io/client-go/testing"
|
||||
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
)
|
||||
|
||||
@ -101,6 +105,7 @@ func TestExpirerControllerFilters(t *testing.T) {
|
||||
withInformer.WithInformer,
|
||||
0, // renewBefore, not needed
|
||||
"", // not needed
|
||||
plog.TestLogger(t, io.Discard),
|
||||
)
|
||||
|
||||
unrelated := corev1.Secret{}
|
||||
@ -125,10 +130,12 @@ func TestExpirerControllerSync(t *testing.T) {
|
||||
fillSecretData func(*testing.T, map[string][]byte)
|
||||
configKubeAPIClient func(*kubernetesfake.Clientset)
|
||||
wantDelete bool
|
||||
wantLog string
|
||||
wantError string
|
||||
}{
|
||||
{
|
||||
name: "secret does not exist",
|
||||
wantLog: `{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"certs-expirer-controller","caller":"apicerts/certs_expirer.go:<line>$apicerts.(*certsExpirerController).Sync","message":"secret does not exist yet or was deleted","controller":"","namespace":"some-namespace","name":"some-resource-name","key":"some-awesome-key","renewBefore":"0s"}`,
|
||||
wantDelete: false,
|
||||
},
|
||||
{
|
||||
@ -251,6 +258,8 @@ func TestExpirerControllerSync(t *testing.T) {
|
||||
0,
|
||||
)
|
||||
|
||||
var log bytes.Buffer
|
||||
|
||||
c := NewCertsExpirerController(
|
||||
namespace,
|
||||
certsSecretResourceName,
|
||||
@ -259,6 +268,7 @@ func TestExpirerControllerSync(t *testing.T) {
|
||||
controllerlib.WithInformer,
|
||||
test.renewBefore,
|
||||
fakeTestKey,
|
||||
plog.TestLogger(t, &log),
|
||||
)
|
||||
|
||||
// Must start informers before calling TestRunSynchronously().
|
||||
@ -268,6 +278,9 @@ func TestExpirerControllerSync(t *testing.T) {
|
||||
err := controllerlib.TestSync(t, c, controllerlib.Context{
|
||||
Context: ctx,
|
||||
})
|
||||
if len(test.wantLog) > 0 {
|
||||
require.Equal(t, test.wantLog, strings.TrimSpace(log.String()))
|
||||
}
|
||||
if test.wantError != "" {
|
||||
require.EqualError(t, err, test.wantError)
|
||||
return
|
||||
|
@ -12,11 +12,11 @@ import (
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
corev1informers "k8s.io/client-go/informers/core/v1"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/certauthority"
|
||||
pinnipedcontroller "go.pinniped.dev/internal/controller"
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -139,6 +139,6 @@ func (c *certsManagerController) Sync(ctx controllerlib.Context) error {
|
||||
return fmt.Errorf("could not create secret: %w", err)
|
||||
}
|
||||
|
||||
klog.Info("certsManagerController Sync successfully created secret")
|
||||
plog.Info("certsManagerController Sync successfully created secret")
|
||||
return nil
|
||||
}
|
||||
|
@ -8,11 +8,11 @@ import (
|
||||
|
||||
k8serrors "k8s.io/apimachinery/pkg/api/errors"
|
||||
corev1informers "k8s.io/client-go/informers/core/v1"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
pinnipedcontroller "go.pinniped.dev/internal/controller"
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/dynamiccert"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
type certsObserverController struct {
|
||||
@ -55,7 +55,7 @@ func (c *certsObserverController) Sync(_ controllerlib.Context) error {
|
||||
return fmt.Errorf("failed to get %s/%s secret: %w", c.namespace, c.certsSecretResourceName, err)
|
||||
}
|
||||
if notFound {
|
||||
klog.Info("certsObserverController Sync found that the secret does not exist yet or was deleted")
|
||||
plog.Info("certsObserverController Sync found that the secret does not exist yet or was deleted")
|
||||
// The secret does not exist yet or was deleted.
|
||||
c.dynamicCertProvider.UnsetCertKeyContent()
|
||||
return nil
|
||||
@ -66,6 +66,6 @@ func (c *certsObserverController) Sync(_ controllerlib.Context) error {
|
||||
return fmt.Errorf("failed to set serving cert/key content from secret %s/%s: %w", c.namespace, c.certsSecretResourceName, err)
|
||||
}
|
||||
|
||||
klog.Info("certsObserverController Sync updated certs in the dynamic cert provider")
|
||||
plog.Info("certsObserverController Sync updated certs in the dynamic cert provider")
|
||||
return nil
|
||||
}
|
||||
|
@ -6,15 +6,15 @@ package conditionsutil
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"github.com/go-logr/logr"
|
||||
"k8s.io/apimachinery/pkg/api/equality"
|
||||
v1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
|
||||
"go.pinniped.dev/generated/latest/apis/supervisor/idp/v1alpha1"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
// Merge merges conditions into conditionsToUpdate. If returns true if it merged any error conditions.
|
||||
func Merge(conditions []*v1alpha1.Condition, observedGeneration int64, conditionsToUpdate *[]v1alpha1.Condition, log logr.Logger) bool {
|
||||
func Merge(conditions []*v1alpha1.Condition, observedGeneration int64, conditionsToUpdate *[]v1alpha1.Condition, log plog.MinLogger) bool {
|
||||
hadErrorCondition := false
|
||||
for i := range conditions {
|
||||
cond := conditions[i].DeepCopy()
|
||||
|
@ -45,6 +45,7 @@ import (
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/dynamiccert"
|
||||
"go.pinniped.dev/internal/endpointaddr"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -132,8 +133,8 @@ func NewImpersonatorConfigController(
|
||||
impersonationSigningCertProvider: impersonationSigningCertProvider,
|
||||
impersonatorFunc: impersonatorFunc,
|
||||
tlsServingCertDynamicCertProvider: dynamiccert.NewServingCert("impersonation-proxy-serving-cert"),
|
||||
infoLog: log.V(2),
|
||||
debugLog: log.V(4),
|
||||
infoLog: log.V(plog.KlogLevelInfo),
|
||||
debugLog: log.V(plog.KlogLevelDebug),
|
||||
},
|
||||
},
|
||||
withInformer(credentialIssuerInformer,
|
||||
|
@ -43,8 +43,8 @@ import (
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/dynamiccert"
|
||||
"go.pinniped.dev/internal/kubeclient"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
"go.pinniped.dev/internal/testutil/testlogger"
|
||||
)
|
||||
|
||||
func TestImpersonatorConfigControllerOptions(t *testing.T) {
|
||||
@ -63,7 +63,6 @@ func TestImpersonatorConfigControllerOptions(t *testing.T) {
|
||||
var credIssuerInformerFilter controllerlib.Filter
|
||||
var servicesInformerFilter controllerlib.Filter
|
||||
var secretsInformerFilter controllerlib.Filter
|
||||
var testLog *testlogger.Logger
|
||||
|
||||
it.Before(func() {
|
||||
r = require.New(t)
|
||||
@ -73,7 +72,6 @@ func TestImpersonatorConfigControllerOptions(t *testing.T) {
|
||||
credIssuerInformer := pinnipedInformerFactory.Config().V1alpha1().CredentialIssuers()
|
||||
servicesInformer := sharedInformerFactory.Core().V1().Services()
|
||||
secretsInformer := sharedInformerFactory.Core().V1().Secrets()
|
||||
testLog = testlogger.New(t)
|
||||
|
||||
_ = NewImpersonatorConfigController(
|
||||
installedInNamespace,
|
||||
@ -94,7 +92,7 @@ func TestImpersonatorConfigControllerOptions(t *testing.T) {
|
||||
nil,
|
||||
caSignerName,
|
||||
nil,
|
||||
testLog.Logger,
|
||||
plog.Logr(), // nolint: staticcheck // old test with no log assertions
|
||||
)
|
||||
credIssuerInformerFilter = observableWithInformerOption.GetFilterForInformer(credIssuerInformer)
|
||||
servicesInformerFilter = observableWithInformerOption.GetFilterForInformer(servicesInformer)
|
||||
@ -292,7 +290,6 @@ func TestImpersonatorConfigControllerSync(t *testing.T) {
|
||||
var testHTTPServerInterruptCh chan struct{}
|
||||
var queue *testQueue
|
||||
var validClientCert *tls.Certificate
|
||||
var testLog *testlogger.Logger
|
||||
|
||||
var impersonatorFunc = func(
|
||||
port int,
|
||||
@ -563,7 +560,7 @@ func TestImpersonatorConfigControllerSync(t *testing.T) {
|
||||
impersonatorFunc,
|
||||
caSignerName,
|
||||
signingCertProvider,
|
||||
testLog.Logger,
|
||||
plog.Logr(), // nolint: staticcheck // old test with no log assertions
|
||||
)
|
||||
controllerlib.TestWrap(t, subject, func(syncer controllerlib.Syncer) controllerlib.Syncer {
|
||||
tlsServingCertDynamicCertProvider = syncer.(*impersonatorConfigController).tlsServingCertDynamicCertProvider
|
||||
@ -1120,7 +1117,6 @@ func TestImpersonatorConfigControllerSync(t *testing.T) {
|
||||
signingCASecret = newSigningKeySecret(caSignerName, signingCACertPEM, signingCAKeyPEM)
|
||||
validClientCert, err = ca.IssueClientCert("username", nil, time.Hour)
|
||||
r.NoError(err)
|
||||
testLog = testlogger.New(t)
|
||||
})
|
||||
|
||||
it.After(func() {
|
||||
|
@ -28,7 +28,6 @@ import (
|
||||
corev1informers "k8s.io/client-go/informers/core/v1"
|
||||
"k8s.io/client-go/tools/clientcmd"
|
||||
"k8s.io/klog/v2"
|
||||
"k8s.io/klog/v2/klogr"
|
||||
"k8s.io/utils/clock"
|
||||
"k8s.io/utils/pointer"
|
||||
|
||||
@ -39,6 +38,7 @@ import (
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/dynamiccert"
|
||||
"go.pinniped.dev/internal/kubeclient"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -179,7 +179,7 @@ func NewAgentController(
|
||||
dynamicCertProvider,
|
||||
&clock.RealClock{},
|
||||
cache.NewExpiring(),
|
||||
klogr.New(),
|
||||
plog.Logr(), // nolint: staticcheck // old controller with lots of log statements
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -4,8 +4,10 @@
|
||||
package kubecertagent
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
@ -35,8 +37,8 @@ import (
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/here"
|
||||
"go.pinniped.dev/internal/kubeclient"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
"go.pinniped.dev/internal/testutil/testlogger"
|
||||
"go.pinniped.dev/test/testlib"
|
||||
)
|
||||
|
||||
@ -339,7 +341,7 @@ func TestAgentController(t *testing.T) {
|
||||
"could not ensure agent deployment: some creation error",
|
||||
},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="creating new deployment" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"creating new deployment","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
},
|
||||
wantStrategy: &configv1alpha1.CredentialIssuerStrategy{
|
||||
Type: configv1alpha1.KubeClusterSigningCertificateStrategyType,
|
||||
@ -386,7 +388,7 @@ func TestAgentController(t *testing.T) {
|
||||
`could not ensure agent deployment: deployments.apps "pinniped-concierge-kube-cert-agent" already exists`,
|
||||
},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="creating new deployment" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"creating new deployment","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
},
|
||||
wantAgentDeployment: healthyAgentDeployment,
|
||||
wantDeploymentActionVerbs: []string{"list", "watch", "create"},
|
||||
@ -435,7 +437,7 @@ func TestAgentController(t *testing.T) {
|
||||
`could not ensure agent deployment: deployments.apps "pinniped-concierge-kube-cert-agent" already exists`,
|
||||
},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="creating new deployment" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"creating new deployment","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
},
|
||||
wantAgentDeployment: healthyAgentDeploymentWithDefaultedPaths,
|
||||
wantDeploymentActionVerbs: []string{"list", "watch", "create"},
|
||||
@ -461,8 +463,8 @@ func TestAgentController(t *testing.T) {
|
||||
"could not find a healthy agent pod (1 candidate)",
|
||||
},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="deleting deployment to update immutable Selector field" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`kube-cert-agent-controller "level"=0 "msg"="creating new deployment to update immutable Selector field" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"deleting deployment to update immutable Selector field","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"creating new deployment to update immutable Selector field","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
},
|
||||
wantAgentDeployment: healthyAgentDeployment,
|
||||
wantDeploymentActionVerbs: []string{"list", "watch", "delete", "create"}, // must recreate deployment when Selector field changes
|
||||
@ -496,7 +498,7 @@ func TestAgentController(t *testing.T) {
|
||||
"could not ensure agent deployment: some delete error",
|
||||
},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="deleting deployment to update immutable Selector field" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"deleting deployment to update immutable Selector field","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
},
|
||||
wantAgentDeployment: healthyAgentDeploymentWithOldStyleSelector, // couldn't be deleted, so it didn't change
|
||||
// delete to try to recreate deployment when Selector field changes, but delete always fails, so keeps trying to delete
|
||||
@ -532,9 +534,9 @@ func TestAgentController(t *testing.T) {
|
||||
"could not ensure agent deployment: some create error",
|
||||
},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="deleting deployment to update immutable Selector field" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`kube-cert-agent-controller "level"=0 "msg"="creating new deployment to update immutable Selector field" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`kube-cert-agent-controller "level"=0 "msg"="creating new deployment" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"deleting deployment to update immutable Selector field","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"creating new deployment to update immutable Selector field","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"creating new deployment","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
},
|
||||
wantAgentDeployment: nil, // was deleted, but couldn't be recreated
|
||||
// delete to try to recreate deployment when Selector field changes, but create always fails, so keeps trying to recreate
|
||||
@ -584,7 +586,7 @@ func TestAgentController(t *testing.T) {
|
||||
"could not find a healthy agent pod (1 candidate)",
|
||||
},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="updating existing deployment" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"updating existing deployment","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
},
|
||||
wantAgentDeployment: healthyAgentDeploymentWithExtraLabels,
|
||||
wantDeploymentActionVerbs: []string{"list", "watch", "update"},
|
||||
@ -619,7 +621,7 @@ func TestAgentController(t *testing.T) {
|
||||
LastUpdateTime: metav1.NewTime(now),
|
||||
},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="updating existing deployment" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"updating existing deployment","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
},
|
||||
},
|
||||
{
|
||||
@ -931,8 +933,8 @@ func TestAgentController(t *testing.T) {
|
||||
// delete to try to recreate deployment when Selector field changes, but delete always fails, so keeps trying to delete
|
||||
wantDeploymentActionVerbs: []string{"list", "watch", "delete", "delete"},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="deleting deployment to update immutable Selector field" "deployment"={"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"} "templatePod"={"name":"kube-controller-manager-1","namespace":"kube-system"}`,
|
||||
`kube-cert-agent-controller "level"=0 "msg"="successfully loaded signing key from agent pod into cache"`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).createOrUpdateDeployment","message":"deleting deployment to update immutable Selector field","deployment":{"name":"pinniped-concierge-kube-cert-agent","namespace":"concierge"},"templatePod":{"name":"kube-controller-manager-1","namespace":"kube-system"}}`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).loadSigningKey","message":"successfully loaded signing key from agent pod into cache"}`,
|
||||
},
|
||||
wantDeploymentDeleteActionOpts: []metav1.DeleteOptions{
|
||||
testutil.NewPreconditions(healthyAgentDeploymentWithOldStyleSelector.UID, healthyAgentDeploymentWithOldStyleSelector.ResourceVersion),
|
||||
@ -962,7 +964,7 @@ func TestAgentController(t *testing.T) {
|
||||
wantAgentDeployment: healthyAgentDeployment,
|
||||
wantDeploymentActionVerbs: []string{"list", "watch"},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="successfully loaded signing key from agent pod into cache"`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).loadSigningKey","message":"successfully loaded signing key from agent pod into cache"}`,
|
||||
},
|
||||
wantStrategy: &configv1alpha1.CredentialIssuerStrategy{
|
||||
Type: configv1alpha1.KubeClusterSigningCertificateStrategyType,
|
||||
@ -996,7 +998,7 @@ func TestAgentController(t *testing.T) {
|
||||
wantAgentDeployment: healthyAgentDeployment,
|
||||
wantDeploymentActionVerbs: []string{"list", "watch"},
|
||||
wantDistinctLogs: []string{
|
||||
`kube-cert-agent-controller "level"=0 "msg"="successfully loaded signing key from agent pod into cache"`,
|
||||
`{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"kube-cert-agent-controller","caller":"kubecertagent/kubecertagent.go:<line>$kubecertagent.(*agentController).loadSigningKey","message":"successfully loaded signing key from agent pod into cache"}`,
|
||||
},
|
||||
wantStrategy: &configv1alpha1.CredentialIssuerStrategy{
|
||||
Type: configv1alpha1.KubeClusterSigningCertificateStrategyType,
|
||||
@ -1028,7 +1030,9 @@ func TestAgentController(t *testing.T) {
|
||||
}
|
||||
|
||||
kubeInformers := informers.NewSharedInformerFactory(kubeClientset, 0)
|
||||
log := testlogger.NewLegacy(t) // nolint: staticcheck // old test with lots of log statements
|
||||
|
||||
var buf bytes.Buffer
|
||||
log := plog.TestZapr(t, &buf)
|
||||
|
||||
ctrl := gomock.NewController(t)
|
||||
defer ctrl.Finish()
|
||||
@ -1066,7 +1070,7 @@ func TestAgentController(t *testing.T) {
|
||||
mockDynamicCert,
|
||||
fakeClock,
|
||||
execCache,
|
||||
log.Logger,
|
||||
log,
|
||||
)
|
||||
|
||||
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
|
||||
@ -1081,7 +1085,7 @@ func TestAgentController(t *testing.T) {
|
||||
allAllowedErrors = append(allAllowedErrors, tt.alsoAllowUndesiredDistinctErrors...)
|
||||
assert.Subsetf(t, allAllowedErrors, actualErrors, "actual errors contained additional error(s) which is not expected by the test")
|
||||
|
||||
assert.Equal(t, tt.wantDistinctLogs, deduplicate(log.Lines()), "unexpected logs")
|
||||
assert.Equal(t, tt.wantDistinctLogs, deduplicate(logLines(buf.String())), "unexpected logs")
|
||||
|
||||
// Assert on all actions that happened to deployments.
|
||||
var actualDeploymentActionVerbs []string
|
||||
@ -1124,6 +1128,14 @@ func TestAgentController(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func logLines(logs string) []string {
|
||||
if len(logs) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
return strings.Split(strings.TrimSpace(logs), "\n")
|
||||
}
|
||||
|
||||
func TestMergeLabelsAndAnnotations(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
@ -13,12 +13,10 @@ import (
|
||||
|
||||
"github.com/go-ldap/ldap/v3"
|
||||
"github.com/google/uuid"
|
||||
|
||||
"k8s.io/apimachinery/pkg/api/equality"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/labels"
|
||||
corev1informers "k8s.io/client-go/informers/core/v1"
|
||||
"k8s.io/klog/v2/klogr"
|
||||
|
||||
"go.pinniped.dev/generated/latest/apis/supervisor/idp/v1alpha1"
|
||||
pinnipedclientset "go.pinniped.dev/generated/latest/client/supervisor/clientset/versioned"
|
||||
@ -28,6 +26,7 @@ import (
|
||||
"go.pinniped.dev/internal/controller/supervisorconfig/upstreamwatchers"
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/upstreamldap"
|
||||
)
|
||||
|
||||
@ -360,7 +359,7 @@ func (c *activeDirectoryWatcherController) validateUpstream(ctx context.Context,
|
||||
}
|
||||
|
||||
func (c *activeDirectoryWatcherController) updateStatus(ctx context.Context, upstream *v1alpha1.ActiveDirectoryIdentityProvider, conditions []*v1alpha1.Condition) {
|
||||
log := klogr.New().WithValues("namespace", upstream.Namespace, "name", upstream.Name)
|
||||
log := plog.WithValues("namespace", upstream.Namespace, "name", upstream.Name)
|
||||
updated := upstream.DeepCopy()
|
||||
|
||||
hadErrorCondition := conditionsutil.Merge(conditions, upstream.Generation, &updated.Status.Conditions, log)
|
||||
@ -379,7 +378,7 @@ func (c *activeDirectoryWatcherController) updateStatus(ctx context.Context, ups
|
||||
ActiveDirectoryIdentityProviders(upstream.Namespace).
|
||||
UpdateStatus(ctx, updated, metav1.UpdateOptions{})
|
||||
if err != nil {
|
||||
log.Error(err, "failed to update status")
|
||||
log.Error("failed to update status", err)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,7 +12,6 @@ import (
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/labels"
|
||||
corev1informers "k8s.io/client-go/informers/core/v1"
|
||||
"k8s.io/klog/v2/klogr"
|
||||
|
||||
"go.pinniped.dev/generated/latest/apis/supervisor/idp/v1alpha1"
|
||||
pinnipedclientset "go.pinniped.dev/generated/latest/client/supervisor/clientset/versioned"
|
||||
@ -22,6 +21,7 @@ import (
|
||||
"go.pinniped.dev/internal/controller/supervisorconfig/upstreamwatchers"
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/upstreamldap"
|
||||
)
|
||||
|
||||
@ -252,7 +252,7 @@ func (c *ldapWatcherController) validateUpstream(ctx context.Context, upstream *
|
||||
}
|
||||
|
||||
func (c *ldapWatcherController) updateStatus(ctx context.Context, upstream *v1alpha1.LDAPIdentityProvider, conditions []*v1alpha1.Condition) {
|
||||
log := klogr.New().WithValues("namespace", upstream.Namespace, "name", upstream.Name)
|
||||
log := plog.WithValues("namespace", upstream.Namespace, "name", upstream.Name)
|
||||
updated := upstream.DeepCopy()
|
||||
|
||||
hadErrorCondition := conditionsutil.Merge(conditions, upstream.Generation, &updated.Status.Conditions, log)
|
||||
@ -271,6 +271,6 @@ func (c *ldapWatcherController) updateStatus(ctx context.Context, upstream *v1al
|
||||
LDAPIdentityProviders(upstream.Namespace).
|
||||
UpdateStatus(ctx, updated, metav1.UpdateOptions{})
|
||||
if err != nil {
|
||||
log.Error(err, "failed to update status")
|
||||
log.Error("failed to update status", err)
|
||||
}
|
||||
}
|
||||
|
@ -14,8 +14,6 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"k8s.io/apimachinery/pkg/util/sets"
|
||||
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"github.com/go-logr/logr"
|
||||
"golang.org/x/oauth2"
|
||||
@ -24,6 +22,7 @@ import (
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/labels"
|
||||
"k8s.io/apimachinery/pkg/util/cache"
|
||||
"k8s.io/apimachinery/pkg/util/sets"
|
||||
corev1informers "k8s.io/client-go/informers/core/v1"
|
||||
|
||||
"go.pinniped.dev/generated/latest/apis/supervisor/idp/v1alpha1"
|
||||
@ -36,6 +35,7 @@ import (
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/net/phttp"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/upstreamoidc"
|
||||
)
|
||||
|
||||
@ -331,8 +331,7 @@ func (c *oidcWatcherController) validateIssuer(ctx context.Context, upstream *v1
|
||||
|
||||
discoveredProvider, err = oidc.NewProvider(oidc.ClientContext(ctx, httpClient), upstream.Spec.Issuer)
|
||||
if err != nil {
|
||||
const klogLevelTrace = 6
|
||||
c.log.V(klogLevelTrace).WithValues(
|
||||
c.log.V(plog.KlogLevelTrace).WithValues(
|
||||
"namespace", upstream.Namespace,
|
||||
"name", upstream.Name,
|
||||
"issuer", upstream.Spec.Issuer,
|
||||
|
@ -29,6 +29,7 @@ import (
|
||||
"go.pinniped.dev/internal/certauthority"
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
"go.pinniped.dev/internal/testutil/oidctestutil"
|
||||
"go.pinniped.dev/internal/testutil/testlogger"
|
||||
@ -78,7 +79,6 @@ func TestOIDCUpstreamWatcherControllerFilterSecret(t *testing.T) {
|
||||
pinnipedInformers := pinnipedinformers.NewSharedInformerFactory(fakePinnipedClient, 0)
|
||||
fakeKubeClient := fake.NewSimpleClientset()
|
||||
kubeInformers := informers.NewSharedInformerFactory(fakeKubeClient, 0)
|
||||
testLog := testlogger.New(t)
|
||||
cache := provider.NewDynamicUpstreamIDPProvider()
|
||||
cache.SetOIDCIdentityProviders([]provider.UpstreamOIDCIdentityProviderI{
|
||||
&upstreamoidc.ProviderConfig{Name: "initial-entry"},
|
||||
@ -91,7 +91,7 @@ func TestOIDCUpstreamWatcherControllerFilterSecret(t *testing.T) {
|
||||
nil,
|
||||
pinnipedInformers.IDP().V1alpha1().OIDCIdentityProviders(),
|
||||
secretInformer,
|
||||
testLog.Logger,
|
||||
plog.Logr(), // nolint: staticcheck // old test with no log assertions
|
||||
withInformer.WithInformer,
|
||||
)
|
||||
|
||||
|
@ -15,7 +15,6 @@ import (
|
||||
"k8s.io/client-go/tools/cache"
|
||||
"k8s.io/client-go/tools/events"
|
||||
"k8s.io/client-go/util/workqueue"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
@ -215,7 +214,7 @@ func (c *controller) handleKey(key Key, err error) {
|
||||
|
||||
if errors.Is(err, ErrSyntheticRequeue) {
|
||||
// logging this helps detecting wedged controllers with missing pre-requirements
|
||||
klog.V(4).InfoS("requested synthetic requeue", "controller", c.Name(), "key", key)
|
||||
plog.Debug("requested synthetic requeue", "controller", c.Name(), "key", key)
|
||||
} else {
|
||||
utilruntime.HandleError(fmt.Errorf("%s: %v failed with: %w", c.Name(), key, err))
|
||||
}
|
||||
|
@ -13,7 +13,8 @@ import (
|
||||
"k8s.io/client-go/tools/cache"
|
||||
"k8s.io/client-go/tools/events"
|
||||
"k8s.io/client-go/util/workqueue"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
type Option func(*controller)
|
||||
@ -62,7 +63,7 @@ func WithInformer(getter InformerGetter, filter Filter, opt InformerOption) Opti
|
||||
AddFunc: func(obj interface{}) {
|
||||
object := metaOrDie(obj)
|
||||
if filter.Add(object) {
|
||||
klog.V(4).InfoS("handling add",
|
||||
plog.Debug("handling add",
|
||||
"controller", c.Name(),
|
||||
"namespace", object.GetNamespace(),
|
||||
"name", object.GetName(),
|
||||
@ -76,7 +77,7 @@ func WithInformer(getter InformerGetter, filter Filter, opt InformerOption) Opti
|
||||
oldObject := metaOrDie(oldObj)
|
||||
newObject := metaOrDie(newObj)
|
||||
if filter.Update(oldObject, newObject) {
|
||||
klog.V(4).InfoS("handling update",
|
||||
plog.Debug("handling update",
|
||||
"controller", c.Name(),
|
||||
"namespace", newObject.GetNamespace(),
|
||||
"name", newObject.GetName(),
|
||||
@ -101,7 +102,7 @@ func WithInformer(getter InformerGetter, filter Filter, opt InformerOption) Opti
|
||||
}
|
||||
}
|
||||
if filter.Delete(accessor) {
|
||||
klog.V(4).InfoS("handling delete",
|
||||
plog.Debug("handling delete",
|
||||
"controller", c.Name(),
|
||||
"namespace", accessor.GetNamespace(),
|
||||
"name", accessor.GetName(),
|
||||
|
@ -8,7 +8,8 @@ import (
|
||||
|
||||
"k8s.io/apimachinery/pkg/runtime"
|
||||
"k8s.io/client-go/tools/events"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
var _ events.EventRecorder = klogRecorder{}
|
||||
@ -16,7 +17,7 @@ var _ events.EventRecorder = klogRecorder{}
|
||||
type klogRecorder struct{}
|
||||
|
||||
func (n klogRecorder) Eventf(regarding runtime.Object, related runtime.Object, eventtype, reason, action, note string, args ...interface{}) {
|
||||
klog.V(4).InfoS("recording event",
|
||||
plog.Debug("recording event",
|
||||
"regarding", regarding,
|
||||
"related", related,
|
||||
"eventtype", eventtype,
|
||||
|
@ -14,10 +14,10 @@ import (
|
||||
corev1informers "k8s.io/client-go/informers/core/v1"
|
||||
corev1client "k8s.io/client-go/kubernetes/typed/core/v1"
|
||||
"k8s.io/client-go/tools/events"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/controllerlib"
|
||||
"go.pinniped.dev/internal/controllerlib/test/integration/examplecontroller/api"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
//nolint:funlen
|
||||
@ -59,7 +59,7 @@ func NewExampleCreatingController(
|
||||
}
|
||||
|
||||
generateSecret := func(service *corev1.Service) error {
|
||||
klog.V(4).InfoS("generating new secret for service", "namespace", service.Namespace, "name", service.Name)
|
||||
plog.Debug("generating new secret for service", "namespace", service.Namespace, "name", service.Name)
|
||||
|
||||
secret := &corev1.Secret{
|
||||
ObjectMeta: metav1.ObjectMeta{
|
||||
@ -97,7 +97,7 @@ func NewExampleCreatingController(
|
||||
return nil // drop from queue because we cannot safely update this secret
|
||||
}
|
||||
|
||||
klog.V(4).InfoS("updating data in existing secret", "namespace", secret.Namespace, "name", secret.Name)
|
||||
plog.Debug("updating data in existing secret", "namespace", secret.Namespace, "name", secret.Name)
|
||||
// Actually update the secret in the regeneration case (the secret already exists but we want to update to new secretData).
|
||||
_, updateErr := secretClient.Secrets(secret.Namespace).Update(context.TODO(), secret, metav1.UpdateOptions{})
|
||||
return updateErr
|
||||
@ -169,7 +169,7 @@ func NewExampleCreatingController(
|
||||
utilruntime.HandleError(fmt.Errorf("unable to get service %s/%s: %w", secret.Namespace, serviceName, err))
|
||||
return false
|
||||
}
|
||||
klog.V(4).InfoS("recreating secret", "namespace", service.Namespace, "name", service.Name)
|
||||
plog.Debug("recreating secret", "namespace", service.Namespace, "name", service.Name)
|
||||
return true
|
||||
},
|
||||
}, controllerlib.InformerOption{}),
|
||||
|
@ -11,7 +11,6 @@ import (
|
||||
|
||||
k8sinformers "k8s.io/client-go/informers"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
"k8s.io/klog/v2/klogr"
|
||||
"k8s.io/utils/clock"
|
||||
|
||||
pinnipedclientset "go.pinniped.dev/generated/latest/client/concierge/clientset/versioned"
|
||||
@ -34,6 +33,7 @@ import (
|
||||
"go.pinniped.dev/internal/groupsuffix"
|
||||
"go.pinniped.dev/internal/kubeclient"
|
||||
"go.pinniped.dev/internal/leaderelection"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -197,6 +197,7 @@ func PrepareControllers(c *Config) (controllerinit.RunnerBuilder, error) {
|
||||
controllerlib.WithInformer,
|
||||
c.ServingCertRenewBefore,
|
||||
apicerts.TLSCertificateChainSecretKey,
|
||||
plog.New(),
|
||||
),
|
||||
singletonWorker,
|
||||
).
|
||||
@ -222,7 +223,7 @@ func PrepareControllers(c *Config) (controllerinit.RunnerBuilder, error) {
|
||||
agentConfig,
|
||||
client,
|
||||
informers.installationNamespaceK8s.Core().V1().Pods(),
|
||||
klogr.New(),
|
||||
plog.Logr(), // nolint: staticcheck // old controller with lots of log statements
|
||||
),
|
||||
singletonWorker,
|
||||
).
|
||||
@ -232,7 +233,7 @@ func PrepareControllers(c *Config) (controllerinit.RunnerBuilder, error) {
|
||||
webhookcachefiller.New(
|
||||
c.AuthenticatorCache,
|
||||
informers.pinniped.Authentication().V1alpha1().WebhookAuthenticators(),
|
||||
klogr.New(),
|
||||
plog.Logr(), // nolint: staticcheck // old controller with lots of log statements
|
||||
),
|
||||
singletonWorker,
|
||||
).
|
||||
@ -240,7 +241,7 @@ func PrepareControllers(c *Config) (controllerinit.RunnerBuilder, error) {
|
||||
jwtcachefiller.New(
|
||||
c.AuthenticatorCache,
|
||||
informers.pinniped.Authentication().V1alpha1().JWTAuthenticators(),
|
||||
klogr.New(),
|
||||
plog.Logr(), // nolint: staticcheck // old controller with lots of log statements
|
||||
),
|
||||
singletonWorker,
|
||||
).
|
||||
@ -249,7 +250,7 @@ func PrepareControllers(c *Config) (controllerinit.RunnerBuilder, error) {
|
||||
c.AuthenticatorCache,
|
||||
informers.pinniped.Authentication().V1alpha1().WebhookAuthenticators(),
|
||||
informers.pinniped.Authentication().V1alpha1().JWTAuthenticators(),
|
||||
klogr.New(),
|
||||
plog.Logr(), // nolint: staticcheck // old controller with lots of log statements
|
||||
),
|
||||
singletonWorker,
|
||||
).
|
||||
@ -275,7 +276,7 @@ func PrepareControllers(c *Config) (controllerinit.RunnerBuilder, error) {
|
||||
impersonator.New,
|
||||
c.NamesConfig.ImpersonationSignerSecret,
|
||||
c.ImpersonationSigningCertProvider,
|
||||
klogr.New(),
|
||||
plog.Logr(), // nolint: staticcheck // old controller with lots of log statements
|
||||
),
|
||||
singletonWorker,
|
||||
).
|
||||
@ -303,6 +304,7 @@ func PrepareControllers(c *Config) (controllerinit.RunnerBuilder, error) {
|
||||
controllerlib.WithInformer,
|
||||
365*24*time.Hour-time.Hour, // 1 year minus 1 hour hard coded value (i.e. wait until the last moment to break the signer)
|
||||
apicerts.CACertificateSecretKey,
|
||||
plog.New(),
|
||||
),
|
||||
singletonWorker,
|
||||
)
|
||||
|
@ -20,7 +20,8 @@ import (
|
||||
_ "crypto/tls/fipsonly" // restricts all TLS configuration to FIPS-approved settings.
|
||||
|
||||
"k8s.io/apiserver/pkg/server/options"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
// Always use TLS 1.2 for FIPs
|
||||
@ -36,9 +37,7 @@ func init() {
|
||||
|
||||
// this init runs before we have parsed our config to determine our log level
|
||||
// thus we must use a log statement that will always print instead of conditionally print
|
||||
// for plog, that is only error and warning logs, neither of which seem appropriate here
|
||||
// therefore, just use klog directly with no V level requirement
|
||||
klog.InfoS("using boring crypto in fips only mode", "go version", runtime.Version())
|
||||
plog.Always("using boring crypto in fips only mode", "go version", runtime.Version())
|
||||
}
|
||||
|
||||
func Default(rootCAs *x509.CertPool) *tls.Config {
|
||||
|
@ -16,7 +16,6 @@ import (
|
||||
loginv1alpha1 "go.pinniped.dev/generated/latest/apis/concierge/login/v1alpha1"
|
||||
"go.pinniped.dev/internal/constable"
|
||||
"go.pinniped.dev/internal/kubeclient"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -86,8 +85,8 @@ func New(apiGroupSuffix string) kubeclient.Middleware {
|
||||
// want our middleware to be opinionated so that it can be really good at a specific task
|
||||
// and give us specific feedback when it can't do that specific task
|
||||
return fmt.Errorf(
|
||||
"cannot replace token credential request %q without authenticator API group",
|
||||
plog.KObj(obj),
|
||||
"cannot replace token credential request %s/%s without authenticator API group",
|
||||
obj.GetNamespace(), obj.GetName(),
|
||||
)
|
||||
}
|
||||
|
||||
@ -95,8 +94,8 @@ func New(apiGroupSuffix string) kubeclient.Middleware {
|
||||
if !ok {
|
||||
// see comment above about specificity of middleware
|
||||
return fmt.Errorf(
|
||||
"cannot replace token credential request %q authenticator API group %q with group suffix %q",
|
||||
plog.KObj(obj),
|
||||
"cannot replace token credential request %s/%s authenticator API group %q with group suffix %q",
|
||||
obj.GetNamespace(), obj.GetName(),
|
||||
*tokenCredentialRequest.Spec.Authenticator.APIGroup,
|
||||
apiGroupSuffix,
|
||||
)
|
||||
|
@ -435,7 +435,7 @@ func TestMiddlware(t *testing.T) {
|
||||
responseObj: tokenCredentialRequestWithNewGroup, // a token credential response does not contain a spec
|
||||
wantMutateRequests: 3,
|
||||
wantMutateResponses: 1,
|
||||
wantMutateRequestErrors: []string{`cannot replace token credential request "/" authenticator API group "authentication.concierge.some.suffix.com" with group suffix "some.suffix.com"`},
|
||||
wantMutateRequestErrors: []string{`cannot replace token credential request / authenticator API group "authentication.concierge.some.suffix.com" with group suffix "some.suffix.com"`},
|
||||
wantResponseObj: tokenCredentialRequestWithNewGroup, // the middleware will reset object GVK for us
|
||||
},
|
||||
{
|
||||
@ -475,7 +475,7 @@ func TestMiddlware(t *testing.T) {
|
||||
responseObj: tokenCredentialRequestWithNewGroup, // a token credential response does not contain a spec
|
||||
wantMutateRequests: 3,
|
||||
wantMutateResponses: 1,
|
||||
wantMutateRequestErrors: []string{`cannot replace token credential request "/" without authenticator API group`},
|
||||
wantMutateRequestErrors: []string{`cannot replace token credential request / without authenticator API group`},
|
||||
wantResponseObj: tokenCredentialRequestWithNewGroup, // the middleware will reset object GVK for us
|
||||
},
|
||||
{
|
||||
|
@ -39,9 +39,9 @@ func glogBody(prefix string, body []byte) {
|
||||
if bytes.IndexFunc(body, func(r rune) bool {
|
||||
return r < 0x0a
|
||||
}) != -1 {
|
||||
plog.Debug(prefix, "body", hex.Dump(body))
|
||||
plog.All(prefix, "body", hex.Dump(body))
|
||||
} else {
|
||||
plog.Debug(prefix, "body", string(body))
|
||||
plog.All(prefix, "body", string(body))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -22,6 +22,7 @@ import (
|
||||
"os"
|
||||
"os/signal"
|
||||
"strings"
|
||||
"syscall"
|
||||
"time"
|
||||
|
||||
"golang.org/x/crypto/bcrypt"
|
||||
@ -31,7 +32,6 @@ import (
|
||||
kubeinformers "k8s.io/client-go/informers"
|
||||
corev1informers "k8s.io/client-go/informers/core/v1"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/constable"
|
||||
"go.pinniped.dev/internal/controller/apicerts"
|
||||
@ -339,10 +339,7 @@ func waitForSignal() os.Signal {
|
||||
return <-signalCh
|
||||
}
|
||||
|
||||
func run() error {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
defer cancel()
|
||||
|
||||
func run(ctx context.Context) error {
|
||||
client, err := kubeclient.New()
|
||||
if err != nil {
|
||||
return fmt.Errorf("cannot create k8s client: %w", err)
|
||||
@ -378,13 +375,35 @@ func run() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func Main() {
|
||||
func main() error { // return an error instead of plog.Fatal to allow defer statements to run
|
||||
ctx := signalCtx()
|
||||
|
||||
// Hardcode the logging level to debug, since this is a test app and it is very helpful to have
|
||||
// verbose logs to debug test failures.
|
||||
if err := plog.ValidateAndSetLogLevelGlobally(plog.LevelDebug); err != nil {
|
||||
klog.Fatal(err)
|
||||
if err := plog.ValidateAndSetLogLevelAndFormatGlobally(ctx, plog.LogSpec{Level: plog.LevelDebug}); err != nil {
|
||||
plog.Fatal(err)
|
||||
}
|
||||
if err := run(); err != nil {
|
||||
klog.Fatal(err)
|
||||
|
||||
return run(ctx)
|
||||
}
|
||||
|
||||
func Main() {
|
||||
if err := main(); err != nil {
|
||||
plog.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func signalCtx() context.Context {
|
||||
signalCh := make(chan os.Signal, 1)
|
||||
signal.Notify(signalCh, os.Interrupt, syscall.SIGTERM)
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
go func() {
|
||||
defer cancel()
|
||||
|
||||
s := <-signalCh
|
||||
plog.Debug("saw signal", "signal", s)
|
||||
}()
|
||||
|
||||
return ctx
|
||||
}
|
||||
|
@ -7,23 +7,22 @@ package auth
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"time"
|
||||
|
||||
coreosoidc "github.com/coreos/go-oidc/v3/oidc"
|
||||
"github.com/felixge/httpsnoop"
|
||||
"github.com/ory/fosite"
|
||||
"github.com/ory/fosite/handler/openid"
|
||||
"github.com/ory/fosite/token/jwt"
|
||||
"github.com/pkg/errors"
|
||||
"golang.org/x/oauth2"
|
||||
|
||||
supervisoroidc "go.pinniped.dev/generated/latest/apis/supervisor/oidc"
|
||||
"go.pinniped.dev/internal/authenticators"
|
||||
"go.pinniped.dev/internal/httputil/httperr"
|
||||
"go.pinniped.dev/internal/httputil/securityheader"
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/oidc/csrftoken"
|
||||
"go.pinniped.dev/internal/oidc/downstreamsession"
|
||||
"go.pinniped.dev/internal/oidc/login"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/psession"
|
||||
@ -55,6 +54,12 @@ func NewHandler(
|
||||
return httperr.Newf(http.StatusMethodNotAllowed, "%s (try GET or POST)", r.Method)
|
||||
}
|
||||
|
||||
// Note that the client might have used supervisoroidc.AuthorizeUpstreamIDPNameParamName and
|
||||
// supervisoroidc.AuthorizeUpstreamIDPTypeParamName query params to request a certain upstream IDP.
|
||||
// The Pinniped CLI has been sending these params since v0.9.0.
|
||||
// Currently, these are ignored because the Supervisor does not yet support logins when multiple IDPs
|
||||
// are configured. However, these params should be honored in the future when choosing an upstream
|
||||
// here, e.g. by calling supervisoroidc.FindUpstreamIDPByNameAndType() when the params are present.
|
||||
oidcUpstream, ldapUpstream, idpType, err := chooseUpstreamIDP(idpLister)
|
||||
if err != nil {
|
||||
plog.WarningErr("authorize upstream config", err)
|
||||
@ -62,11 +67,12 @@ func NewHandler(
|
||||
}
|
||||
|
||||
if idpType == psession.ProviderTypeOIDC {
|
||||
if len(r.Header.Values(supervisoroidc.AuthorizeUsernameHeaderName)) > 0 {
|
||||
if len(r.Header.Values(supervisoroidc.AuthorizeUsernameHeaderName)) > 0 ||
|
||||
len(r.Header.Values(supervisoroidc.AuthorizePasswordHeaderName)) > 0 {
|
||||
// The client set a username header, so they are trying to log in with a username/password.
|
||||
return handleAuthRequestForOIDCUpstreamPasswordGrant(r, w, oauthHelperWithStorage, oidcUpstream)
|
||||
}
|
||||
return handleAuthRequestForOIDCUpstreamAuthcodeGrant(r, w,
|
||||
return handleAuthRequestForOIDCUpstreamBrowserFlow(r, w,
|
||||
oauthHelperWithoutStorage,
|
||||
generateCSRF, generateNonce, generatePKCE,
|
||||
oidcUpstream,
|
||||
@ -75,15 +81,34 @@ func NewHandler(
|
||||
cookieCodec,
|
||||
)
|
||||
}
|
||||
return handleAuthRequestForLDAPUpstream(r, w,
|
||||
|
||||
// We know it's an AD/LDAP upstream.
|
||||
if len(r.Header.Values(supervisoroidc.AuthorizeUsernameHeaderName)) > 0 ||
|
||||
len(r.Header.Values(supervisoroidc.AuthorizePasswordHeaderName)) > 0 {
|
||||
// The client set a username header, so they are trying to log in with a username/password.
|
||||
return handleAuthRequestForLDAPUpstreamCLIFlow(r, w,
|
||||
oauthHelperWithStorage,
|
||||
ldapUpstream,
|
||||
idpType,
|
||||
)
|
||||
}
|
||||
return handleAuthRequestForLDAPUpstreamBrowserFlow(
|
||||
r,
|
||||
w,
|
||||
oauthHelperWithoutStorage,
|
||||
generateCSRF,
|
||||
generateNonce,
|
||||
generatePKCE,
|
||||
ldapUpstream,
|
||||
idpType,
|
||||
downstreamIssuer,
|
||||
upstreamStateEncoder,
|
||||
cookieCodec,
|
||||
)
|
||||
}))
|
||||
}
|
||||
|
||||
func handleAuthRequestForLDAPUpstream(
|
||||
func handleAuthRequestForLDAPUpstreamCLIFlow(
|
||||
r *http.Request,
|
||||
w http.ResponseWriter,
|
||||
oauthHelper fosite.OAuth2Provider,
|
||||
@ -106,36 +131,55 @@ func handleAuthRequestForLDAPUpstream(
|
||||
return httperr.New(http.StatusBadGateway, "unexpected error during upstream authentication")
|
||||
}
|
||||
if !authenticated {
|
||||
return writeAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
fosite.ErrAccessDenied.WithHintf("Username/password not accepted by LDAP provider."), true)
|
||||
return nil
|
||||
}
|
||||
|
||||
subject := downstreamSubjectFromUpstreamLDAP(ldapUpstream, authenticateResponse)
|
||||
subject := downstreamsession.DownstreamSubjectFromUpstreamLDAP(ldapUpstream, authenticateResponse)
|
||||
username = authenticateResponse.User.GetName()
|
||||
groups := authenticateResponse.User.GetGroups()
|
||||
dn := authenticateResponse.DN
|
||||
customSessionData := downstreamsession.MakeDownstreamLDAPOrADCustomSessionData(ldapUpstream, idpType, authenticateResponse)
|
||||
openIDSession := downstreamsession.MakeDownstreamSession(subject, username, groups, customSessionData)
|
||||
oidc.PerformAuthcodeRedirect(r, w, oauthHelper, authorizeRequester, openIDSession, true)
|
||||
|
||||
customSessionData := &psession.CustomSessionData{
|
||||
ProviderUID: ldapUpstream.GetResourceUID(),
|
||||
ProviderName: ldapUpstream.GetName(),
|
||||
ProviderType: idpType,
|
||||
return nil
|
||||
}
|
||||
|
||||
if idpType == psession.ProviderTypeLDAP {
|
||||
customSessionData.LDAP = &psession.LDAPSessionData{
|
||||
UserDN: dn,
|
||||
ExtraRefreshAttributes: authenticateResponse.ExtraRefreshAttributes,
|
||||
}
|
||||
}
|
||||
if idpType == psession.ProviderTypeActiveDirectory {
|
||||
customSessionData.ActiveDirectory = &psession.ActiveDirectorySessionData{
|
||||
UserDN: dn,
|
||||
ExtraRefreshAttributes: authenticateResponse.ExtraRefreshAttributes,
|
||||
func handleAuthRequestForLDAPUpstreamBrowserFlow(
|
||||
r *http.Request,
|
||||
w http.ResponseWriter,
|
||||
oauthHelper fosite.OAuth2Provider,
|
||||
generateCSRF func() (csrftoken.CSRFToken, error),
|
||||
generateNonce func() (nonce.Nonce, error),
|
||||
generatePKCE func() (pkce.Code, error),
|
||||
ldapUpstream provider.UpstreamLDAPIdentityProviderI,
|
||||
idpType psession.ProviderType,
|
||||
downstreamIssuer string,
|
||||
upstreamStateEncoder oidc.Encoder,
|
||||
cookieCodec oidc.Codec,
|
||||
) error {
|
||||
authRequestState, err := handleBrowserFlowAuthRequest(
|
||||
r,
|
||||
w,
|
||||
oauthHelper,
|
||||
generateCSRF,
|
||||
generateNonce,
|
||||
generatePKCE,
|
||||
ldapUpstream.GetName(),
|
||||
idpType,
|
||||
cookieCodec,
|
||||
upstreamStateEncoder,
|
||||
)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if authRequestState == nil {
|
||||
// There was an error but handleBrowserFlowAuthRequest() already took care of writing the response for it.
|
||||
return nil
|
||||
}
|
||||
|
||||
return makeDownstreamSessionAndReturnAuthcodeRedirect(r, w,
|
||||
oauthHelper, authorizeRequester, subject, username, groups, customSessionData)
|
||||
return login.RedirectToLoginPage(r, w, downstreamIssuer, authRequestState.encodedStateParam, login.ShowNoError)
|
||||
}
|
||||
|
||||
func handleAuthRequestForOIDCUpstreamPasswordGrant(
|
||||
@ -156,9 +200,10 @@ func handleAuthRequestForOIDCUpstreamPasswordGrant(
|
||||
|
||||
if !oidcUpstream.AllowsPasswordGrant() {
|
||||
// Return a user-friendly error for this case which is entirely within our control.
|
||||
return writeAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
fosite.ErrAccessDenied.WithHint(
|
||||
"Resource owner password credentials grant is not allowed for this upstream provider according to its configuration."), true)
|
||||
return nil
|
||||
}
|
||||
|
||||
token, err := oidcUpstream.PasswordCredentialsGrantAndValidateTokens(r.Context(), username, password)
|
||||
@ -170,29 +215,36 @@ func handleAuthRequestForOIDCUpstreamPasswordGrant(
|
||||
// However, the exact response is undefined in the sense that there is no such thing as a password grant in
|
||||
// the OIDC spec, so we don't try too hard to read the upstream errors in this case. (E.g. Dex departs from the
|
||||
// spec and returns something other than an "invalid_grant" error for bad resource owner credentials.)
|
||||
return writeAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
fosite.ErrAccessDenied.WithDebug(err.Error()), true) // WithDebug hides the error from the client
|
||||
return nil
|
||||
}
|
||||
|
||||
subject, username, groups, err := downstreamsession.GetDownstreamIdentityFromUpstreamIDToken(oidcUpstream, token.IDToken.Claims)
|
||||
if err != nil {
|
||||
// Return a user-friendly error for this case which is entirely within our control.
|
||||
return writeAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
fosite.ErrAccessDenied.WithHintf("Reason: %s.", err.Error()), true,
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
customSessionData, err := downstreamsession.MakeDownstreamOIDCCustomSessionData(oidcUpstream, token)
|
||||
if err != nil {
|
||||
return writeAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
fosite.ErrAccessDenied.WithHintf("Reason: %s.", err.Error()), true,
|
||||
)
|
||||
return nil
|
||||
}
|
||||
|
||||
return makeDownstreamSessionAndReturnAuthcodeRedirect(r, w, oauthHelper, authorizeRequester, subject, username, groups, customSessionData)
|
||||
openIDSession := downstreamsession.MakeDownstreamSession(subject, username, groups, customSessionData)
|
||||
|
||||
oidc.PerformAuthcodeRedirect(r, w, oauthHelper, authorizeRequester, openIDSession, true)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func handleAuthRequestForOIDCUpstreamAuthcodeGrant(
|
||||
func handleAuthRequestForOIDCUpstreamBrowserFlow(
|
||||
r *http.Request,
|
||||
w http.ResponseWriter,
|
||||
oauthHelper fosite.OAuth2Provider,
|
||||
@ -204,34 +256,24 @@ func handleAuthRequestForOIDCUpstreamAuthcodeGrant(
|
||||
upstreamStateEncoder oidc.Encoder,
|
||||
cookieCodec oidc.Codec,
|
||||
) error {
|
||||
authorizeRequester, created := newAuthorizeRequest(r, w, oauthHelper, false)
|
||||
if !created {
|
||||
return nil
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
_, err := oauthHelper.NewAuthorizeResponse(r.Context(), authorizeRequester, &psession.PinnipedSession{
|
||||
Fosite: &openid.DefaultSession{
|
||||
Claims: &jwt.IDTokenClaims{
|
||||
// Temporary claim values to allow `NewAuthorizeResponse` to perform other OIDC validations.
|
||||
Subject: "none",
|
||||
AuthTime: now,
|
||||
RequestedAt: now,
|
||||
},
|
||||
},
|
||||
})
|
||||
authRequestState, err := handleBrowserFlowAuthRequest(
|
||||
r,
|
||||
w,
|
||||
oauthHelper,
|
||||
generateCSRF,
|
||||
generateNonce,
|
||||
generatePKCE,
|
||||
oidcUpstream.GetName(),
|
||||
psession.ProviderTypeOIDC,
|
||||
cookieCodec,
|
||||
upstreamStateEncoder,
|
||||
)
|
||||
if err != nil {
|
||||
return writeAuthorizeError(w, oauthHelper, authorizeRequester, err, false)
|
||||
}
|
||||
|
||||
csrfValue, nonceValue, pkceValue, err := generateValues(generateCSRF, generateNonce, generatePKCE)
|
||||
if err != nil {
|
||||
plog.Error("authorize generate error", err)
|
||||
return err
|
||||
}
|
||||
csrfFromCookie := readCSRFCookie(r, cookieCodec)
|
||||
if csrfFromCookie != "" {
|
||||
csrfValue = csrfFromCookie
|
||||
if authRequestState == nil {
|
||||
// There was an error but handleBrowserFlowAuthRequest() already took care of writing the response for it.
|
||||
return nil
|
||||
}
|
||||
|
||||
upstreamOAuthConfig := oauth2.Config{
|
||||
@ -243,46 +285,19 @@ func handleAuthRequestForOIDCUpstreamAuthcodeGrant(
|
||||
Scopes: oidcUpstream.GetScopes(),
|
||||
}
|
||||
|
||||
encodedStateParamValue, err := upstreamStateParam(
|
||||
authorizeRequester,
|
||||
oidcUpstream.GetName(),
|
||||
nonceValue,
|
||||
csrfValue,
|
||||
pkceValue,
|
||||
upstreamStateEncoder,
|
||||
)
|
||||
if err != nil {
|
||||
plog.Error("authorize upstream state param error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
authCodeOptions := []oauth2.AuthCodeOption{
|
||||
nonceValue.Param(),
|
||||
pkceValue.Challenge(),
|
||||
pkceValue.Method(),
|
||||
}
|
||||
|
||||
promptParam := r.Form.Get(promptParamName)
|
||||
if promptParam == promptParamNone && oidc.ScopeWasRequested(authorizeRequester, coreosoidc.ScopeOpenID) {
|
||||
return writeAuthorizeError(w, oauthHelper, authorizeRequester, fosite.ErrLoginRequired, false)
|
||||
authRequestState.nonce.Param(),
|
||||
authRequestState.pkce.Challenge(),
|
||||
authRequestState.pkce.Method(),
|
||||
}
|
||||
|
||||
for key, val := range oidcUpstream.GetAdditionalAuthcodeParams() {
|
||||
authCodeOptions = append(authCodeOptions, oauth2.SetAuthURLParam(key, val))
|
||||
}
|
||||
|
||||
if csrfFromCookie == "" {
|
||||
// We did not receive an incoming CSRF cookie, so write a new one.
|
||||
err := addCSRFSetCookieHeader(w, csrfValue, cookieCodec)
|
||||
if err != nil {
|
||||
plog.Error("error setting CSRF cookie", err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
http.Redirect(w, r,
|
||||
upstreamOAuthConfig.AuthCodeURL(
|
||||
encodedStateParamValue,
|
||||
authRequestState.encodedStateParam,
|
||||
authCodeOptions...,
|
||||
),
|
||||
http.StatusSeeOther, // match fosite and https://tools.ietf.org/id/draft-ietf-oauth-security-topics-18.html#section-4.11
|
||||
@ -291,78 +306,11 @@ func handleAuthRequestForOIDCUpstreamAuthcodeGrant(
|
||||
return nil
|
||||
}
|
||||
|
||||
func writeAuthorizeError(w http.ResponseWriter, oauthHelper fosite.OAuth2Provider, authorizeRequester fosite.AuthorizeRequester, err error, isBrowserless bool) error {
|
||||
if plog.Enabled(plog.LevelTrace) {
|
||||
// When trace level logging is enabled, include the stack trace in the log message.
|
||||
keysAndValues := oidc.FositeErrorForLog(err)
|
||||
errWithStack := errors.WithStack(err)
|
||||
keysAndValues = append(keysAndValues, "errWithStack")
|
||||
// klog always prints error values using %s, which does not include stack traces,
|
||||
// so convert the error to a string which includes the stack trace here.
|
||||
keysAndValues = append(keysAndValues, fmt.Sprintf("%+v", errWithStack))
|
||||
plog.Trace("authorize response error", keysAndValues...)
|
||||
} else {
|
||||
plog.Info("authorize response error", oidc.FositeErrorForLog(err)...)
|
||||
}
|
||||
if isBrowserless {
|
||||
w = rewriteStatusSeeOtherToStatusFoundForBrowserless(w)
|
||||
}
|
||||
// Return an error according to OIDC spec 3.1.2.6 (second paragraph).
|
||||
oauthHelper.WriteAuthorizeError(w, authorizeRequester, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
func makeDownstreamSessionAndReturnAuthcodeRedirect(
|
||||
r *http.Request,
|
||||
w http.ResponseWriter,
|
||||
oauthHelper fosite.OAuth2Provider,
|
||||
authorizeRequester fosite.AuthorizeRequester,
|
||||
subject string,
|
||||
username string,
|
||||
groups []string,
|
||||
customSessionData *psession.CustomSessionData,
|
||||
) error {
|
||||
openIDSession := downstreamsession.MakeDownstreamSession(subject, username, groups, customSessionData)
|
||||
|
||||
authorizeResponder, err := oauthHelper.NewAuthorizeResponse(r.Context(), authorizeRequester, openIDSession)
|
||||
if err != nil {
|
||||
return writeAuthorizeError(w, oauthHelper, authorizeRequester, err, true)
|
||||
}
|
||||
|
||||
w = rewriteStatusSeeOtherToStatusFoundForBrowserless(w)
|
||||
oauthHelper.WriteAuthorizeResponse(w, authorizeRequester, authorizeResponder)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func rewriteStatusSeeOtherToStatusFoundForBrowserless(w http.ResponseWriter) http.ResponseWriter {
|
||||
// rewrite http.StatusSeeOther to http.StatusFound for backwards compatibility with old pinniped CLIs.
|
||||
// we can drop this in a few releases once we feel enough time has passed for users to update.
|
||||
//
|
||||
// WriteAuthorizeResponse/WriteAuthorizeError calls used to result in http.StatusFound until
|
||||
// https://github.com/ory/fosite/pull/636 changed it to http.StatusSeeOther to address
|
||||
// https://tools.ietf.org/id/draft-ietf-oauth-security-topics-18.html#section-4.11
|
||||
// Safari has the bad behavior in the case of http.StatusFound and not just http.StatusTemporaryRedirect.
|
||||
//
|
||||
// in the browserless flows, the OAuth client is the pinniped CLI and it already has access to the user's
|
||||
// password. Thus there is no security issue with using http.StatusFound vs. http.StatusSeeOther.
|
||||
return httpsnoop.Wrap(w, httpsnoop.Hooks{
|
||||
WriteHeader: func(delegate httpsnoop.WriteHeaderFunc) httpsnoop.WriteHeaderFunc {
|
||||
return func(code int) {
|
||||
if code == http.StatusSeeOther {
|
||||
code = http.StatusFound
|
||||
}
|
||||
delegate(code)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func requireNonEmptyUsernameAndPasswordHeaders(r *http.Request, w http.ResponseWriter, oauthHelper fosite.OAuth2Provider, authorizeRequester fosite.AuthorizeRequester) (string, string, bool) {
|
||||
username := r.Header.Get(supervisoroidc.AuthorizeUsernameHeaderName)
|
||||
password := r.Header.Get(supervisoroidc.AuthorizePasswordHeaderName)
|
||||
if username == "" || password == "" {
|
||||
_ = writeAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester,
|
||||
fosite.ErrAccessDenied.WithHintf("Missing or blank username or password."), true)
|
||||
return "", "", false
|
||||
}
|
||||
@ -372,7 +320,7 @@ func requireNonEmptyUsernameAndPasswordHeaders(r *http.Request, w http.ResponseW
|
||||
func newAuthorizeRequest(r *http.Request, w http.ResponseWriter, oauthHelper fosite.OAuth2Provider, isBrowserless bool) (fosite.AuthorizeRequester, bool) {
|
||||
authorizeRequester, err := oauthHelper.NewAuthorizeRequest(r.Context(), r)
|
||||
if err != nil {
|
||||
_ = writeAuthorizeError(w, oauthHelper, authorizeRequester, err, isBrowserless)
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester, err, isBrowserless)
|
||||
return nil, false
|
||||
}
|
||||
|
||||
@ -404,7 +352,8 @@ func readCSRFCookie(r *http.Request, codec oidc.Decoder) csrftoken.CSRFToken {
|
||||
return csrfFromCookie
|
||||
}
|
||||
|
||||
// Select either an OIDC, an LDAP or an AD IDP, or return an error.
|
||||
// chooseUpstreamIDP selects either an OIDC, an LDAP, or an AD IDP, or returns an error.
|
||||
// Note that AD and LDAP IDPs both return the same interface type, but different ProviderTypes values.
|
||||
func chooseUpstreamIDP(idpLister oidc.UpstreamIdentityProvidersLister) (provider.UpstreamOIDCIdentityProviderI, provider.UpstreamLDAPIdentityProviderI, psession.ProviderType, error) {
|
||||
oidcUpstreams := idpLister.GetOIDCIdentityProviders()
|
||||
ldapUpstreams := idpLister.GetLDAPIdentityProviders()
|
||||
@ -440,6 +389,99 @@ func chooseUpstreamIDP(idpLister oidc.UpstreamIdentityProvidersLister) (provider
|
||||
}
|
||||
}
|
||||
|
||||
type browserFlowAuthRequestState struct {
|
||||
encodedStateParam string
|
||||
pkce pkce.Code
|
||||
nonce nonce.Nonce
|
||||
}
|
||||
|
||||
// handleBrowserFlowAuthRequest performs the shared validations and setup between browser based
|
||||
// auth requests regardless of IDP type-- LDAP, Active Directory and OIDC.
|
||||
// It generates the state param, sets the CSRF cookie, and validates the prompt param.
|
||||
// It returns an error when it encounters an error without handling it, leaving it to
|
||||
// the caller to decide how to handle it.
|
||||
// It returns nil with no error when it encounters an error and also has already handled writing
|
||||
// the error response to the ResponseWriter, in which case the caller should not also try to
|
||||
// write the error response.
|
||||
func handleBrowserFlowAuthRequest(
|
||||
r *http.Request,
|
||||
w http.ResponseWriter,
|
||||
oauthHelper fosite.OAuth2Provider,
|
||||
generateCSRF func() (csrftoken.CSRFToken, error),
|
||||
generateNonce func() (nonce.Nonce, error),
|
||||
generatePKCE func() (pkce.Code, error),
|
||||
upstreamName string,
|
||||
idpType psession.ProviderType,
|
||||
cookieCodec oidc.Codec,
|
||||
upstreamStateEncoder oidc.Encoder,
|
||||
) (*browserFlowAuthRequestState, error) {
|
||||
authorizeRequester, created := newAuthorizeRequest(r, w, oauthHelper, false)
|
||||
if !created {
|
||||
return nil, nil // already wrote the error response, don't return error
|
||||
}
|
||||
|
||||
now := time.Now()
|
||||
_, err := oauthHelper.NewAuthorizeResponse(r.Context(), authorizeRequester, &psession.PinnipedSession{
|
||||
Fosite: &openid.DefaultSession{
|
||||
Claims: &jwt.IDTokenClaims{
|
||||
// Temporary claim values to allow `NewAuthorizeResponse` to perform other OIDC validations.
|
||||
Subject: "none",
|
||||
AuthTime: now,
|
||||
RequestedAt: now,
|
||||
},
|
||||
},
|
||||
})
|
||||
if err != nil {
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester, err, false)
|
||||
return nil, nil // already wrote the error response, don't return error
|
||||
}
|
||||
|
||||
csrfValue, nonceValue, pkceValue, err := generateValues(generateCSRF, generateNonce, generatePKCE)
|
||||
if err != nil {
|
||||
plog.Error("authorize generate error", err)
|
||||
return nil, err
|
||||
}
|
||||
csrfFromCookie := readCSRFCookie(r, cookieCodec)
|
||||
if csrfFromCookie != "" {
|
||||
csrfValue = csrfFromCookie
|
||||
}
|
||||
|
||||
encodedStateParamValue, err := upstreamStateParam(
|
||||
authorizeRequester,
|
||||
upstreamName,
|
||||
string(idpType),
|
||||
nonceValue,
|
||||
csrfValue,
|
||||
pkceValue,
|
||||
upstreamStateEncoder,
|
||||
)
|
||||
if err != nil {
|
||||
plog.Error("authorize upstream state param error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
promptParam := r.Form.Get(promptParamName)
|
||||
if promptParam == promptParamNone && oidc.ScopeWasRequested(authorizeRequester, coreosoidc.ScopeOpenID) {
|
||||
oidc.WriteAuthorizeError(w, oauthHelper, authorizeRequester, fosite.ErrLoginRequired, false)
|
||||
return nil, nil // already wrote the error response, don't return error
|
||||
}
|
||||
|
||||
if csrfFromCookie == "" {
|
||||
// We did not receive an incoming CSRF cookie, so write a new one.
|
||||
err = addCSRFSetCookieHeader(w, csrfValue, cookieCodec)
|
||||
if err != nil {
|
||||
plog.Error("error setting CSRF cookie", err)
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return &browserFlowAuthRequestState{
|
||||
encodedStateParam: encodedStateParamValue,
|
||||
pkce: pkceValue,
|
||||
nonce: nonceValue,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func generateValues(
|
||||
generateCSRF func() (csrftoken.CSRFToken, error),
|
||||
generateNonce func() (nonce.Nonce, error),
|
||||
@ -463,14 +505,21 @@ func generateValues(
|
||||
func upstreamStateParam(
|
||||
authorizeRequester fosite.AuthorizeRequester,
|
||||
upstreamName string,
|
||||
upstreamType string,
|
||||
nonceValue nonce.Nonce,
|
||||
csrfValue csrftoken.CSRFToken,
|
||||
pkceValue pkce.Code,
|
||||
encoder oidc.Encoder,
|
||||
) (string, error) {
|
||||
stateParamData := oidc.UpstreamStateParamData{
|
||||
AuthParams: authorizeRequester.GetRequestForm().Encode(),
|
||||
// The auth params might have included supervisoroidc.AuthorizeUpstreamIDPNameParamName and
|
||||
// supervisoroidc.AuthorizeUpstreamIDPTypeParamName, but those can be ignored by other handlers
|
||||
// that are reading from the encoded upstream state param being built here.
|
||||
// The UpstreamName and UpstreamType struct fields can be used instead.
|
||||
// Remove those params here to avoid potential confusion about which should be used later.
|
||||
AuthParams: removeCustomIDPParams(authorizeRequester.GetRequestForm()).Encode(),
|
||||
UpstreamName: upstreamName,
|
||||
UpstreamType: upstreamType,
|
||||
Nonce: nonceValue,
|
||||
CSRFToken: csrfValue,
|
||||
PKCECode: pkceValue,
|
||||
@ -483,6 +532,18 @@ func upstreamStateParam(
|
||||
return encodedStateParamValue, nil
|
||||
}
|
||||
|
||||
func removeCustomIDPParams(params url.Values) url.Values {
|
||||
p := url.Values{}
|
||||
// Copy all params.
|
||||
for k, v := range params {
|
||||
p[k] = v
|
||||
}
|
||||
// Remove the unnecessary params.
|
||||
delete(p, supervisoroidc.AuthorizeUpstreamIDPNameParamName)
|
||||
delete(p, supervisoroidc.AuthorizeUpstreamIDPTypeParamName)
|
||||
return p
|
||||
}
|
||||
|
||||
func addCSRFSetCookieHeader(w http.ResponseWriter, csrfValue csrftoken.CSRFToken, codec oidc.Encoder) error {
|
||||
encodedCSRFValue, err := codec.Encode(oidc.CSRFCookieEncodingName, csrfValue)
|
||||
if err != nil {
|
||||
@ -500,8 +561,3 @@ func addCSRFSetCookieHeader(w http.ResponseWriter, csrfValue csrftoken.CSRFToken
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func downstreamSubjectFromUpstreamLDAP(ldapUpstream provider.UpstreamLDAPIdentityProviderI, authenticateResponse *authenticators.Response) string {
|
||||
ldapURL := *ldapUpstream.GetURL()
|
||||
return downstreamsession.DownstreamLDAPSubject(authenticateResponse.User.GetUID(), ldapURL)
|
||||
}
|
||||
|
@ -70,6 +70,8 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
downstreamClientID = "pinniped-cli"
|
||||
upstreamLDAPURL = "ldaps://some-ldap-host:123?base=ou%3Dusers%2Cdc%3Dpinniped%2Cdc%3Ddev"
|
||||
htmlContentType = "text/html; charset=utf-8"
|
||||
jsonContentType = "application/json; charset=utf-8"
|
||||
formContentType = "application/x-www-form-urlencoded"
|
||||
)
|
||||
|
||||
require.Len(t, happyState, 8, "we expect fosite to allow 8 byte state params, so we want to test that boundary case")
|
||||
@ -409,23 +411,20 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
return pathWithQuery("/some/path", modifiedHappyGetRequestQueryMap(queryOverrides))
|
||||
}
|
||||
|
||||
expectedUpstreamStateParam := func(queryOverrides map[string]string, csrfValueOverride, upstreamNameOverride string) string {
|
||||
expectedUpstreamStateParam := func(queryOverrides map[string]string, csrfValueOverride, upstreamName, upstreamType string) string {
|
||||
csrf := happyCSRF
|
||||
if csrfValueOverride != "" {
|
||||
csrf = csrfValueOverride
|
||||
}
|
||||
upstreamName := oidcUpstreamName
|
||||
if upstreamNameOverride != "" {
|
||||
upstreamName = upstreamNameOverride
|
||||
}
|
||||
encoded, err := happyStateEncoder.Encode("s",
|
||||
oidctestutil.ExpectedUpstreamStateParamFormat{
|
||||
P: encodeQuery(modifiedHappyGetRequestQueryMap(queryOverrides)),
|
||||
U: upstreamName,
|
||||
T: upstreamType,
|
||||
N: happyNonce,
|
||||
C: csrf,
|
||||
K: happyPKCE,
|
||||
V: "1",
|
||||
V: "2",
|
||||
},
|
||||
)
|
||||
require.NoError(t, err)
|
||||
@ -558,7 +557,41 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, "", ""), nil),
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, "", oidcUpstreamName, "oidc"), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
{
|
||||
name: "LDAP upstream browser flow happy path using GET without a CSRF cookie",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
generateCSRF: happyCSRFGenerator,
|
||||
generatePKCE: happyPKCEGenerator,
|
||||
generateNonce: happyNonceGenerator,
|
||||
stateEncoder: happyStateEncoder,
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodGet,
|
||||
path: happyGetRequestPath,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: urlWithQuery(downstreamIssuer+"/login", map[string]string{"state": expectedUpstreamStateParam(nil, "", ldapUpstreamName, "ldap")}),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
{
|
||||
name: "Active Directory upstream browser flow happy path using GET without a CSRF cookie",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
generateCSRF: happyCSRFGenerator,
|
||||
generatePKCE: happyPKCEGenerator,
|
||||
generateNonce: happyNonceGenerator,
|
||||
stateEncoder: happyStateEncoder,
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodGet,
|
||||
path: happyGetRequestPath,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: urlWithQuery(downstreamIssuer+"/login", map[string]string{"state": expectedUpstreamStateParam(nil, "", activeDirectoryUpstreamName, "activedirectory")}),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
@ -585,7 +618,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantDownstreamCustomSessionData: expectedHappyOIDCPasswordGrantCustomSession,
|
||||
},
|
||||
{
|
||||
name: "LDAP upstream happy path using GET",
|
||||
name: "LDAP cli upstream happy path using GET",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: happyGetRequestPath,
|
||||
@ -606,7 +639,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantDownstreamCustomSessionData: expectedHappyLDAPUpstreamCustomSession,
|
||||
},
|
||||
{
|
||||
name: "ActiveDirectory upstream happy path using GET",
|
||||
name: "ActiveDirectory cli upstream happy path using GET",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: happyGetRequestPath,
|
||||
@ -639,7 +672,41 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
csrfCookie: "__Host-pinniped-csrf=" + encodedIncomingCookieCSRFValue + " ",
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, incomingCookieCSRFValue, ""), nil),
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, incomingCookieCSRFValue, oidcUpstreamName, "oidc"), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
{
|
||||
name: "LDAP upstream browser flow happy path using GET with a CSRF cookie",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
generateCSRF: happyCSRFGenerator,
|
||||
generatePKCE: happyPKCEGenerator,
|
||||
generateNonce: happyNonceGenerator,
|
||||
stateEncoder: happyStateEncoder,
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodGet,
|
||||
path: happyGetRequestPath,
|
||||
csrfCookie: "__Host-pinniped-csrf=" + encodedIncomingCookieCSRFValue + " ",
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamIssuer+"/login", map[string]string{"state": expectedUpstreamStateParam(nil, incomingCookieCSRFValue, ldapUpstreamName, "ldap")}),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
{
|
||||
name: "Active Directory upstream browser flow happy path using GET with a CSRF cookie",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
generateCSRF: happyCSRFGenerator,
|
||||
generatePKCE: happyPKCEGenerator,
|
||||
generateNonce: happyNonceGenerator,
|
||||
stateEncoder: happyStateEncoder,
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodGet,
|
||||
path: happyGetRequestPath,
|
||||
csrfCookie: "__Host-pinniped-csrf=" + encodedIncomingCookieCSRFValue + " ",
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamIssuer+"/login", map[string]string{"state": expectedUpstreamStateParam(nil, incomingCookieCSRFValue, activeDirectoryUpstreamName, "activedirectory")}),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
@ -653,13 +720,51 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodPost,
|
||||
path: "/some/path",
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
contentType: formContentType,
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "",
|
||||
wantBodyString: "",
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, "", ""), nil),
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, "", oidcUpstreamName, "oidc"), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
},
|
||||
{
|
||||
name: "LDAP upstream browser flow happy path using POST",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
generateCSRF: happyCSRFGenerator,
|
||||
generatePKCE: happyPKCEGenerator,
|
||||
generateNonce: happyNonceGenerator,
|
||||
stateEncoder: happyStateEncoder,
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodPost,
|
||||
path: "/some/path",
|
||||
contentType: formContentType,
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "",
|
||||
wantBodyString: "",
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: urlWithQuery(downstreamIssuer+"/login", map[string]string{"state": expectedUpstreamStateParam(nil, "", ldapUpstreamName, "ldap")}),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
},
|
||||
{
|
||||
name: "Active Directory upstream browser flow happy path using POST",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
generateCSRF: happyCSRFGenerator,
|
||||
generatePKCE: happyPKCEGenerator,
|
||||
generateNonce: happyNonceGenerator,
|
||||
stateEncoder: happyStateEncoder,
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodPost,
|
||||
path: "/some/path",
|
||||
contentType: formContentType,
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "",
|
||||
wantBodyString: "",
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: urlWithQuery(downstreamIssuer+"/login", map[string]string{"state": expectedUpstreamStateParam(nil, "", activeDirectoryUpstreamName, "activedirectory")}),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
},
|
||||
{
|
||||
@ -667,7 +772,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithOIDC(passwordGrantUpstreamOIDCIdentityProviderBuilder().Build()),
|
||||
method: http.MethodPost,
|
||||
path: "/some/path",
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
contentType: formContentType,
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
@ -687,11 +792,11 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantDownstreamCustomSessionData: expectedHappyOIDCPasswordGrantCustomSession,
|
||||
},
|
||||
{
|
||||
name: "LDAP upstream happy path using POST",
|
||||
name: "LDAP cli upstream happy path using POST",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
method: http.MethodPost,
|
||||
path: "/some/path",
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
contentType: formContentType,
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
@ -710,11 +815,11 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantDownstreamCustomSessionData: expectedHappyLDAPUpstreamCustomSession,
|
||||
},
|
||||
{
|
||||
name: "Active Directory upstream happy path using POST",
|
||||
name: "Active Directory cli upstream happy path using POST",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
method: http.MethodPost,
|
||||
path: "/some/path",
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
contentType: formContentType,
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
@ -742,13 +847,29 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"prompt": "login"}),
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(map[string]string{"prompt": "login"}, "", ""), nil),
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(map[string]string{"prompt": "login"}, "", oidcUpstreamName, "oidc"), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
},
|
||||
{
|
||||
name: "OIDC upstream browser flow happy path with custom IDP name and type query params, which are excluded from the query params in the upstream state",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithOIDC(upstreamOIDCIdentityProviderBuilder().Build()),
|
||||
generateCSRF: happyCSRFGenerator,
|
||||
generatePKCE: happyPKCEGenerator,
|
||||
generateNonce: happyNonceGenerator,
|
||||
stateEncoder: happyStateEncoder,
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"pinniped_idp_name": "currently-ignored", "pinniped_idp_type": "oidc"}),
|
||||
contentType: formContentType,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, "", oidcUpstreamName, "oidc"), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
},
|
||||
{
|
||||
@ -761,13 +882,11 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"prompt": "login"}),
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(map[string]string{"prompt": "login"}, "", ""), map[string]string{"prompt": "consent", "abc": "123", "def": "456"}),
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(map[string]string{"prompt": "login"}, "", oidcUpstreamName, "oidc"), map[string]string{"prompt": "consent", "abc": "123", "def": "456"}),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
},
|
||||
{
|
||||
@ -780,10 +899,8 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
cookieEncoder: happyCookieEncoder,
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"prompt": "none"}),
|
||||
contentType: "application/x-www-form-urlencoded",
|
||||
body: encodeQuery(happyGetRequestQueryMap),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeLoginRequiredErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -802,7 +919,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantContentType: htmlContentType,
|
||||
// Generated a new CSRF cookie and set it in the response.
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, "", ""), nil),
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(nil, "", oidcUpstreamName, "oidc"), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
@ -823,7 +940,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(map[string]string{
|
||||
"redirect_uri": downstreamRedirectURIWithDifferentPort, // not the same port number that is registered for the client
|
||||
}, "", ""), nil),
|
||||
}, "", oidcUpstreamName, "oidc"), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
@ -889,7 +1006,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(map[string]string{
|
||||
"scope": "openid offline_access",
|
||||
}, "", ""), nil),
|
||||
}, "", oidcUpstreamName, "oidc"), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
},
|
||||
@ -1010,7 +1127,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
Password: "wrong-password",
|
||||
}},
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1022,7 +1139,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr("wrong-password"),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithBadUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1034,7 +1151,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr("wrong-password"),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithBadUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1046,7 +1163,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr("wrong-username"),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithBadUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1058,19 +1175,31 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr("wrong-username"),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithBadUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "missing upstream username on request for LDAP authentication",
|
||||
name: "missing upstream username but has password on request for OIDC password grant",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithOIDC(passwordGrantUpstreamOIDCIdentityProviderBuilder().Build()),
|
||||
method: http.MethodGet,
|
||||
path: happyGetRequestPath,
|
||||
customUsernameHeader: nil, // do not send header
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "missing upstream username but has password on request for LDAP authentication",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: happyGetRequestPath,
|
||||
customUsernameHeader: nil, // do not send header
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1082,7 +1211,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: nil, // do not send header
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1094,7 +1223,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: nil, // do not send header
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1106,7 +1235,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: nil, // do not send header
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1119,7 +1248,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingUserInfoEndpointErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1132,7 +1261,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingUserInfoEndpointErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1145,7 +1274,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingAccessTokenErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1158,7 +1287,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingAccessTokenErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1171,7 +1300,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingAccessTokenErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1184,7 +1313,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingAccessTokenErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1196,7 +1325,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: nil, // do not send header
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithMissingUsernamePasswordHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1208,7 +1337,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithPasswordGrantDisallowedHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1225,7 +1354,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
"redirect_uri": "http://127.0.0.1/does-not-match-what-is-configured-for-pinniped-cli-client",
|
||||
}),
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidRedirectURIErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1238,7 +1367,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidRedirectURIErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1251,7 +1380,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidRedirectURIErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1264,7 +1393,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidRedirectURIErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1278,7 +1407,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"client_id": "invalid-client"}),
|
||||
wantStatus: http.StatusUnauthorized,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidClientErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1289,7 +1418,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusUnauthorized,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidClientErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1298,7 +1427,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"client_id": "invalid-client"}),
|
||||
wantStatus: http.StatusUnauthorized,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidClientErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1307,7 +1436,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"client_id": "invalid-client"}),
|
||||
wantStatus: http.StatusUnauthorized,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidClientErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1321,7 +1450,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": "unsupported"}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeUnsupportedResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1333,27 +1462,51 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeUnsupportedResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "response type is unsupported when using LDAP upstream",
|
||||
name: "response type is unsupported when using LDAP cli upstream",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": "unsupported"}),
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeUnsupportedResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "response type is unsupported when using active directory upstream",
|
||||
name: "response type is unsupported when using LDAP browser upstream",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": "unsupported"}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeUnsupportedResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "response type is unsupported when using active directory cli upstream",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": "unsupported"}),
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeUnsupportedResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "response type is unsupported when using active directory browser upstream",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": "unsupported"}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeUnsupportedResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1368,7 +1521,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"scope": "openid profile email tuna"}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidScopeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1380,7 +1533,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidScopeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1392,7 +1545,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidScopeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1404,7 +1557,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidScopeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1419,7 +1572,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": ""}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1431,27 +1584,51 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "missing response type in request using LDAP upstream",
|
||||
name: "missing response type in request using LDAP cli upstream",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": ""}),
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "missing response type in request using Active Directory upstream",
|
||||
name: "missing response type in request using LDAP browser upstream",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": ""}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "missing response type in request using Active Directory cli upstream",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": ""}),
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
{
|
||||
name: "missing response type in request using Active Directory browser upstream",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider),
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"response_type": ""}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingResponseTypeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1466,7 +1643,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"client_id": ""}),
|
||||
wantStatus: http.StatusUnauthorized,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidClientErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1477,7 +1654,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusUnauthorized,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidClientErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1486,7 +1663,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"client_id": ""}),
|
||||
wantStatus: http.StatusUnauthorized,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantBodyJSON: fositeInvalidClientErrorBody,
|
||||
},
|
||||
{
|
||||
@ -1500,7 +1677,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"code_challenge": ""}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1513,7 +1690,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
@ -1526,7 +1703,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
@ -1542,7 +1719,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"code_challenge_method": "this-is-not-a-valid-pkce-alg"}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidCodeChallengeErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1555,7 +1732,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidCodeChallengeErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
@ -1568,7 +1745,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidCodeChallengeErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
@ -1584,7 +1761,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"code_challenge_method": "plain"}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeMethodErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1597,7 +1774,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeMethodErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
@ -1610,7 +1787,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeMethodErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
@ -1626,7 +1803,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"code_challenge_method": ""}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeMethodErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1639,7 +1816,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeMethodErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
@ -1652,7 +1829,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeMethodErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
@ -1670,7 +1847,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"prompt": "none login"}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositePromptHasNoneAndOtherValueErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1685,7 +1862,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositePromptHasNoneAndOtherValueErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 1, // fosite already stored the authcode before it noticed the error
|
||||
@ -1700,7 +1877,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositePromptHasNoneAndOtherValueErrorQuery),
|
||||
wantBodyString: "",
|
||||
wantUnnecessaryStoredRecords: 1, // fosite already stored the authcode before it noticed the error
|
||||
@ -1720,7 +1897,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
wantContentType: htmlContentType,
|
||||
wantCSRFValueInCookieHeader: happyCSRF,
|
||||
wantLocationHeader: expectedRedirectLocationForUpstreamOIDC(expectedUpstreamStateParam(
|
||||
map[string]string{"prompt": "none login", "scope": "email"}, "", "",
|
||||
map[string]string{"prompt": "none login", "scope": "email"}, "", oidcUpstreamName, "oidc",
|
||||
), nil),
|
||||
wantUpstreamStateParamInLocationHeader: true,
|
||||
wantBodyStringWithLocationInHref: true,
|
||||
@ -1889,7 +2066,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithInvalidEmailVerifiedHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1907,7 +2084,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithFalseEmailVerifiedHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -1996,7 +2173,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimMissingHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2035,7 +2212,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimInvalidFormatHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2050,7 +2227,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimEmptyHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2065,7 +2242,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimMissingHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2080,7 +2257,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimEmptyHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2095,7 +2272,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimInvalidFormatHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2110,7 +2287,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimMissingHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2125,7 +2302,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimEmptyHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2140,7 +2317,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimInvalidFormatHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2155,7 +2332,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimInvalidFormatHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2170,7 +2347,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimInvalidFormatHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2185,7 +2362,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantPasswordGrantCall: happyUpstreamPasswordGrantMockExpectation,
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeAccessDeniedWithRequiredClaimInvalidFormatHintErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2200,7 +2377,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
method: http.MethodGet,
|
||||
path: modifiedHappyGetRequestPath(map[string]string{"state": "short"}),
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidStateErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2212,7 +2389,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(oidcUpstreamUsername),
|
||||
customPasswordHeader: pointer.StringPtr(oidcUpstreamPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidStateErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2224,7 +2401,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
customUsernameHeader: pointer.StringPtr(happyLDAPUsername),
|
||||
customPasswordHeader: pointer.StringPtr(happyLDAPPassword),
|
||||
wantStatus: http.StatusFound,
|
||||
wantContentType: "application/json; charset=utf-8",
|
||||
wantContentType: jsonContentType,
|
||||
wantLocationHeader: urlWithQuery(downstreamRedirectURI, fositeInvalidStateErrorQuery),
|
||||
wantBodyString: "",
|
||||
},
|
||||
@ -2401,7 +2578,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
|
||||
require.Equal(t, test.wantStatus, rsp.Code)
|
||||
testutil.RequireEqualContentType(t, rsp.Header().Get("Content-Type"), test.wantContentType)
|
||||
testutil.RequireSecurityHeaders(t, rsp)
|
||||
testutil.RequireSecurityHeadersWithoutCustomCSPs(t, rsp)
|
||||
|
||||
if test.wantPasswordGrantCall != nil {
|
||||
test.wantPasswordGrantCall.args.Ctx = reqContext
|
||||
@ -2543,7 +2720,7 @@ func TestAuthorizationEndpoint(t *testing.T) {
|
||||
"scope": "some-other-new-scope1 some-other-new-scope2", // updated expectation
|
||||
"client_id": "some-other-new-client-id", // updated expectation
|
||||
"state": expectedUpstreamStateParam(
|
||||
nil, "", "some-other-new-idp-name",
|
||||
nil, "", "some-other-new-idp-name", "oidc",
|
||||
), // updated expectation
|
||||
"nonce": happyNonce,
|
||||
"code_challenge": expectedUpstreamCodeChallenge,
|
||||
|
@ -5,7 +5,6 @@
|
||||
package callback
|
||||
|
||||
import (
|
||||
"crypto/subtle"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
@ -14,7 +13,6 @@ import (
|
||||
"go.pinniped.dev/internal/httputil/httperr"
|
||||
"go.pinniped.dev/internal/httputil/securityheader"
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/oidc/csrftoken"
|
||||
"go.pinniped.dev/internal/oidc/downstreamsession"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/oidc/provider/formposthtml"
|
||||
@ -102,9 +100,9 @@ func validateRequest(r *http.Request, stateDecoder, cookieDecoder oidc.Decoder)
|
||||
return nil, httperr.Newf(http.StatusMethodNotAllowed, "%s (try GET)", r.Method)
|
||||
}
|
||||
|
||||
csrfValue, err := readCSRFCookie(r, cookieDecoder)
|
||||
_, decodedState, err := oidc.ReadStateParamAndValidateCSRFCookie(r, cookieDecoder, stateDecoder)
|
||||
if err != nil {
|
||||
plog.InfoErr("error reading CSRF cookie", err)
|
||||
plog.InfoErr("state or CSRF error", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
@ -113,23 +111,7 @@ func validateRequest(r *http.Request, stateDecoder, cookieDecoder oidc.Decoder)
|
||||
return nil, httperr.New(http.StatusBadRequest, "code param not found")
|
||||
}
|
||||
|
||||
if r.FormValue("state") == "" {
|
||||
plog.Info("state param not found")
|
||||
return nil, httperr.New(http.StatusBadRequest, "state param not found")
|
||||
}
|
||||
|
||||
state, err := readState(r, stateDecoder)
|
||||
if err != nil {
|
||||
plog.InfoErr("error reading state", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if subtle.ConstantTimeCompare([]byte(state.CSRFToken), []byte(csrfValue)) != 1 {
|
||||
plog.InfoErr("CSRF value does not match", err)
|
||||
return nil, httperr.Wrap(http.StatusForbidden, "CSRF value does not match", err)
|
||||
}
|
||||
|
||||
return state, nil
|
||||
return decodedState, nil
|
||||
}
|
||||
|
||||
func findUpstreamIDPConfig(upstreamName string, upstreamIDPs oidc.UpstreamOIDCIdentityProvidersLister) provider.UpstreamOIDCIdentityProviderI {
|
||||
@ -140,36 +122,3 @@ func findUpstreamIDPConfig(upstreamName string, upstreamIDPs oidc.UpstreamOIDCId
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func readCSRFCookie(r *http.Request, cookieDecoder oidc.Decoder) (csrftoken.CSRFToken, error) {
|
||||
receivedCSRFCookie, err := r.Cookie(oidc.CSRFCookieName)
|
||||
if err != nil {
|
||||
// Error means that the cookie was not found
|
||||
return "", httperr.Wrap(http.StatusForbidden, "CSRF cookie is missing", err)
|
||||
}
|
||||
|
||||
var csrfFromCookie csrftoken.CSRFToken
|
||||
err = cookieDecoder.Decode(oidc.CSRFCookieEncodingName, receivedCSRFCookie.Value, &csrfFromCookie)
|
||||
if err != nil {
|
||||
return "", httperr.Wrap(http.StatusForbidden, "error reading CSRF cookie", err)
|
||||
}
|
||||
|
||||
return csrfFromCookie, nil
|
||||
}
|
||||
|
||||
func readState(r *http.Request, stateDecoder oidc.Decoder) (*oidc.UpstreamStateParamData, error) {
|
||||
var state oidc.UpstreamStateParamData
|
||||
if err := stateDecoder.Decode(
|
||||
oidc.UpstreamStateParamEncodingName,
|
||||
r.FormValue("state"),
|
||||
&state,
|
||||
); err != nil {
|
||||
return nil, httperr.New(http.StatusBadRequest, "error reading state")
|
||||
}
|
||||
|
||||
if state.FormatVersion != oidc.UpstreamStateParamFormatVersion {
|
||||
return nil, httperr.New(http.StatusUnprocessableEntity, "state format version is invalid")
|
||||
}
|
||||
|
||||
return &state, nil
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ const (
|
||||
happyDownstreamCSRF = "test-csrf"
|
||||
happyDownstreamPKCE = "test-pkce"
|
||||
happyDownstreamNonce = "test-nonce"
|
||||
happyDownstreamStateVersion = "1"
|
||||
happyDownstreamStateVersion = "2"
|
||||
|
||||
downstreamIssuer = "https://my-downstream-issuer.com/path"
|
||||
downstreamRedirectURI = "http://127.0.0.1/callback"
|
||||
@ -1034,7 +1034,7 @@ func TestCallbackEndpoint(t *testing.T) {
|
||||
t.Logf("response: %#v", rsp)
|
||||
t.Logf("response body: %q", rsp.Body.String())
|
||||
|
||||
testutil.RequireSecurityHeaders(t, rsp)
|
||||
testutil.RequireSecurityHeadersWithFormPostPageCSPs(t, rsp)
|
||||
|
||||
if test.wantAuthcodeExchangeCall != nil {
|
||||
test.wantAuthcodeExchangeCall.args.Ctx = reqContext
|
||||
@ -1156,12 +1156,11 @@ func (r *requestPath) String() string {
|
||||
return path + params.Encode()
|
||||
}
|
||||
|
||||
type upstreamStateParamBuilder oidctestutil.ExpectedUpstreamStateParamFormat
|
||||
|
||||
func happyUpstreamStateParam() *upstreamStateParamBuilder {
|
||||
return &upstreamStateParamBuilder{
|
||||
func happyUpstreamStateParam() *oidctestutil.UpstreamStateParamBuilder {
|
||||
return &oidctestutil.UpstreamStateParamBuilder{
|
||||
U: happyUpstreamIDPName,
|
||||
P: happyDownstreamRequestParams,
|
||||
T: "oidc",
|
||||
N: happyDownstreamNonce,
|
||||
C: happyDownstreamCSRF,
|
||||
K: happyDownstreamPKCE,
|
||||
@ -1169,37 +1168,6 @@ func happyUpstreamStateParam() *upstreamStateParamBuilder {
|
||||
}
|
||||
}
|
||||
|
||||
func (b upstreamStateParamBuilder) Build(t *testing.T, stateEncoder *securecookie.SecureCookie) string {
|
||||
state, err := stateEncoder.Encode("s", b)
|
||||
require.NoError(t, err)
|
||||
return state
|
||||
}
|
||||
|
||||
func (b *upstreamStateParamBuilder) WithAuthorizeRequestParams(params string) *upstreamStateParamBuilder {
|
||||
b.P = params
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *upstreamStateParamBuilder) WithNonce(nonce string) *upstreamStateParamBuilder {
|
||||
b.N = nonce
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *upstreamStateParamBuilder) WithCSRF(csrf string) *upstreamStateParamBuilder {
|
||||
b.C = csrf
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *upstreamStateParamBuilder) WithPKCVE(pkce string) *upstreamStateParamBuilder {
|
||||
b.K = pkce
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *upstreamStateParamBuilder) WithStateVersion(version string) *upstreamStateParamBuilder {
|
||||
b.V = version
|
||||
return b
|
||||
}
|
||||
|
||||
func happyUpstream() *oidctestutil.TestUpstreamOIDCIdentityProviderBuilder {
|
||||
return oidctestutil.NewTestUpstreamOIDCIdentityProviderBuilder().
|
||||
WithName(happyUpstreamIDPName).
|
||||
|
@ -16,6 +16,7 @@ import (
|
||||
"github.com/ory/fosite/token/jwt"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
|
||||
"go.pinniped.dev/internal/authenticators"
|
||||
"go.pinniped.dev/internal/constable"
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
@ -61,6 +62,34 @@ func MakeDownstreamSession(subject string, username string, groups []string, cus
|
||||
return openIDSession
|
||||
}
|
||||
|
||||
func MakeDownstreamLDAPOrADCustomSessionData(
|
||||
ldapUpstream provider.UpstreamLDAPIdentityProviderI,
|
||||
idpType psession.ProviderType,
|
||||
authenticateResponse *authenticators.Response,
|
||||
) *psession.CustomSessionData {
|
||||
customSessionData := &psession.CustomSessionData{
|
||||
ProviderUID: ldapUpstream.GetResourceUID(),
|
||||
ProviderName: ldapUpstream.GetName(),
|
||||
ProviderType: idpType,
|
||||
}
|
||||
|
||||
if idpType == psession.ProviderTypeLDAP {
|
||||
customSessionData.LDAP = &psession.LDAPSessionData{
|
||||
UserDN: authenticateResponse.DN,
|
||||
ExtraRefreshAttributes: authenticateResponse.ExtraRefreshAttributes,
|
||||
}
|
||||
}
|
||||
|
||||
if idpType == psession.ProviderTypeActiveDirectory {
|
||||
customSessionData.ActiveDirectory = &psession.ActiveDirectorySessionData{
|
||||
UserDN: authenticateResponse.DN,
|
||||
ExtraRefreshAttributes: authenticateResponse.ExtraRefreshAttributes,
|
||||
}
|
||||
}
|
||||
|
||||
return customSessionData
|
||||
}
|
||||
|
||||
func MakeDownstreamOIDCCustomSessionData(oidcUpstream provider.UpstreamOIDCIdentityProviderI, token *oidctypes.Token) (*psession.CustomSessionData, error) {
|
||||
upstreamSubject, err := ExtractStringClaimValue(oidc.IDTokenSubjectClaim, oidcUpstream.GetName(), token.IDToken.Claims)
|
||||
if err != nil {
|
||||
@ -228,6 +257,11 @@ func ExtractStringClaimValue(claimName string, upstreamIDPName string, idTokenCl
|
||||
return valueAsString, nil
|
||||
}
|
||||
|
||||
func DownstreamSubjectFromUpstreamLDAP(ldapUpstream provider.UpstreamLDAPIdentityProviderI, authenticateResponse *authenticators.Response) string {
|
||||
ldapURL := *ldapUpstream.GetURL()
|
||||
return DownstreamLDAPSubject(authenticateResponse.User.GetUID(), ldapURL)
|
||||
}
|
||||
|
||||
func DownstreamLDAPSubject(uid string, ldapURL url.URL) string {
|
||||
q := ldapURL.Query()
|
||||
q.Set(oidc.IDTokenSubjectClaim, uid)
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2021 the Pinniped contributors. All Rights Reserved.
|
||||
// Copyright 2021-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Package idpdiscovery provides a handler for the upstream IDP discovery endpoint.
|
||||
@ -44,14 +44,14 @@ func responseAsJSON(upstreamIDPs oidc.UpstreamIdentityProvidersLister) ([]byte,
|
||||
r.PinnipedIDPs = append(r.PinnipedIDPs, v1alpha1.PinnipedIDP{
|
||||
Name: provider.GetName(),
|
||||
Type: v1alpha1.IDPTypeLDAP,
|
||||
Flows: []v1alpha1.IDPFlow{v1alpha1.IDPFlowCLIPassword},
|
||||
Flows: []v1alpha1.IDPFlow{v1alpha1.IDPFlowCLIPassword, v1alpha1.IDPFlowBrowserAuthcode},
|
||||
})
|
||||
}
|
||||
for _, provider := range upstreamIDPs.GetActiveDirectoryIdentityProviders() {
|
||||
r.PinnipedIDPs = append(r.PinnipedIDPs, v1alpha1.PinnipedIDP{
|
||||
Name: provider.GetName(),
|
||||
Type: v1alpha1.IDPTypeActiveDirectory,
|
||||
Flows: []v1alpha1.IDPFlow{v1alpha1.IDPFlowCLIPassword},
|
||||
Flows: []v1alpha1.IDPFlow{v1alpha1.IDPFlowCLIPassword, v1alpha1.IDPFlowBrowserAuthcode},
|
||||
})
|
||||
}
|
||||
for _, provider := range upstreamIDPs.GetOIDCIdentityProviders() {
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2021 the Pinniped contributors. All Rights Reserved.
|
||||
// Copyright 2021-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package idpdiscovery
|
||||
@ -37,22 +37,22 @@ func TestIDPDiscovery(t *testing.T) {
|
||||
wantContentType: "application/json",
|
||||
wantFirstResponseBodyJSON: here.Doc(`{
|
||||
"pinniped_identity_providers": [
|
||||
{"name": "a-some-ldap-idp", "type": "ldap", "flows": ["cli_password"]},
|
||||
{"name": "a-some-ldap-idp", "type": "ldap", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "a-some-oidc-idp", "type": "oidc", "flows": ["browser_authcode"]},
|
||||
{"name": "x-some-idp", "type": "ldap", "flows": ["cli_password"]},
|
||||
{"name": "x-some-idp", "type": "ldap", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "x-some-idp", "type": "oidc", "flows": ["browser_authcode"]},
|
||||
{"name": "y-some-ad-idp", "type": "activedirectory", "flows": ["cli_password"]},
|
||||
{"name": "z-some-ad-idp", "type": "activedirectory", "flows": ["cli_password"]},
|
||||
{"name": "z-some-ldap-idp", "type": "ldap", "flows": ["cli_password"]},
|
||||
{"name": "y-some-ad-idp", "type": "activedirectory", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "z-some-ad-idp", "type": "activedirectory", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "z-some-ldap-idp", "type": "ldap", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "z-some-oidc-idp", "type": "oidc", "flows": ["browser_authcode", "cli_password"]}
|
||||
]
|
||||
}`),
|
||||
wantSecondResponseBodyJSON: here.Doc(`{
|
||||
"pinniped_identity_providers": [
|
||||
{"name": "some-other-ad-idp-1", "type": "activedirectory", "flows": ["cli_password"]},
|
||||
{"name": "some-other-ad-idp-2", "type": "activedirectory", "flows": ["cli_password"]},
|
||||
{"name": "some-other-ldap-idp-1", "type": "ldap", "flows": ["cli_password"]},
|
||||
{"name": "some-other-ldap-idp-2", "type": "ldap", "flows": ["cli_password"]},
|
||||
{"name": "some-other-ad-idp-1", "type": "activedirectory", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "some-other-ad-idp-2", "type": "activedirectory", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "some-other-ldap-idp-1", "type": "ldap", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "some-other-ldap-idp-2", "type": "ldap", "flows": ["cli_password", "browser_authcode"]},
|
||||
{"name": "some-other-oidc-idp-1", "type": "oidc", "flows": ["browser_authcode", "cli_password"]},
|
||||
{"name": "some-other-oidc-idp-2", "type": "oidc", "flows": ["browser_authcode"]}
|
||||
]
|
||||
|
42
internal/oidc/login/get_login_handler.go
Normal file
42
internal/oidc/login/get_login_handler.go
Normal file
@ -0,0 +1,42 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package login
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/oidc/login/loginhtml"
|
||||
)
|
||||
|
||||
const (
|
||||
internalErrorMessage = "An internal error occurred. Please contact your administrator for help."
|
||||
incorrectUsernameOrPasswordErrorMessage = "Incorrect username or password."
|
||||
)
|
||||
|
||||
func NewGetHandler(loginPath string) HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request, encodedState string, decodedState *oidc.UpstreamStateParamData) error {
|
||||
alertMessage, hasAlert := getAlert(r)
|
||||
|
||||
pageInputs := &loginhtml.PageData{
|
||||
PostPath: loginPath,
|
||||
State: encodedState,
|
||||
IDPName: decodedState.UpstreamName,
|
||||
HasAlertError: hasAlert,
|
||||
AlertMessage: alertMessage,
|
||||
}
|
||||
return loginhtml.Template().Execute(w, pageInputs)
|
||||
}
|
||||
}
|
||||
|
||||
func getAlert(r *http.Request) (string, bool) {
|
||||
errorParamValue := r.URL.Query().Get(errParamName)
|
||||
|
||||
message := internalErrorMessage
|
||||
if errorParamValue == string(ShowBadUserPassErr) {
|
||||
message = incorrectUsernameOrPasswordErrorMessage
|
||||
}
|
||||
|
||||
return message, errorParamValue != ""
|
||||
}
|
116
internal/oidc/login/get_login_handler_test.go
Normal file
116
internal/oidc/login/get_login_handler_test.go
Normal file
@ -0,0 +1,116 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package login
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/oidc/login/loginhtml"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
)
|
||||
|
||||
func TestGetLogin(t *testing.T) {
|
||||
const (
|
||||
testPath = "/some/path/login"
|
||||
testUpstreamName = "some-ldap-idp"
|
||||
testUpstreamType = "ldap"
|
||||
testEncodedState = "fake-encoded-state-value"
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
decodedState *oidc.UpstreamStateParamData
|
||||
encodedState string
|
||||
errParam string
|
||||
idps oidc.UpstreamIdentityProvidersLister
|
||||
wantStatus int
|
||||
wantContentType string
|
||||
wantBody string
|
||||
}{
|
||||
{
|
||||
name: "Happy path ldap",
|
||||
decodedState: &oidc.UpstreamStateParamData{
|
||||
UpstreamName: testUpstreamName,
|
||||
UpstreamType: testUpstreamType,
|
||||
},
|
||||
encodedState: testEncodedState, // the encoded and decoded state don't match, but that verification is handled one level up.
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: testutil.ExpectedLoginPageHTML(loginhtml.CSS(), testUpstreamName, testPath, testEncodedState, ""), // no alert message
|
||||
},
|
||||
{
|
||||
name: "displays error banner when err=login_error param is sent",
|
||||
decodedState: &oidc.UpstreamStateParamData{
|
||||
UpstreamName: testUpstreamName,
|
||||
UpstreamType: testUpstreamType,
|
||||
},
|
||||
encodedState: testEncodedState,
|
||||
errParam: "login_error",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: testutil.ExpectedLoginPageHTML(loginhtml.CSS(), testUpstreamName, testPath, testEncodedState,
|
||||
"Incorrect username or password.",
|
||||
),
|
||||
},
|
||||
{
|
||||
name: "displays error banner when err=internal_error param is sent",
|
||||
decodedState: &oidc.UpstreamStateParamData{
|
||||
UpstreamName: testUpstreamName,
|
||||
UpstreamType: testUpstreamType,
|
||||
},
|
||||
encodedState: testEncodedState,
|
||||
errParam: "internal_error",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: testutil.ExpectedLoginPageHTML(loginhtml.CSS(), testUpstreamName, testPath, testEncodedState,
|
||||
"An internal error occurred. Please contact your administrator for help.",
|
||||
),
|
||||
},
|
||||
{
|
||||
// If we get an error that we don't recognize, that's also an error, so we
|
||||
// should probably just tell you to contact your administrator...
|
||||
name: "displays generic error banner when unrecognized err param is sent",
|
||||
decodedState: &oidc.UpstreamStateParamData{
|
||||
UpstreamName: testUpstreamName,
|
||||
UpstreamType: testUpstreamType,
|
||||
},
|
||||
encodedState: testEncodedState,
|
||||
errParam: "some_other_error",
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: testutil.ExpectedLoginPageHTML(loginhtml.CSS(), testUpstreamName, testPath, testEncodedState,
|
||||
"An internal error occurred. Please contact your administrator for help.",
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tt := test
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
handler := NewGetHandler(testPath)
|
||||
target := testPath + "?state=" + tt.encodedState
|
||||
if tt.errParam != "" {
|
||||
target += "&err=" + tt.errParam
|
||||
}
|
||||
req := httptest.NewRequest(http.MethodGet, target, nil)
|
||||
rsp := httptest.NewRecorder()
|
||||
err := handler(rsp, req, tt.encodedState, tt.decodedState)
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, tt.wantStatus, rsp.Code)
|
||||
testutil.RequireEqualContentType(t, rsp.Header().Get("Content-Type"), tt.wantContentType)
|
||||
body := rsp.Body.String()
|
||||
// t.Log("actual body:", body) // useful when updating expected values
|
||||
require.Equal(t, tt.wantBody, body)
|
||||
})
|
||||
}
|
||||
}
|
125
internal/oidc/login/login_handler.go
Normal file
125
internal/oidc/login/login_handler.go
Normal file
@ -0,0 +1,125 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package login
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
idpdiscoveryv1alpha1 "go.pinniped.dev/generated/latest/apis/supervisor/idpdiscovery/v1alpha1"
|
||||
"go.pinniped.dev/internal/httputil/httperr"
|
||||
"go.pinniped.dev/internal/httputil/securityheader"
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/oidc/login/loginhtml"
|
||||
"go.pinniped.dev/internal/oidc/provider/formposthtml"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
type ErrorParamValue string
|
||||
|
||||
const (
|
||||
usernameParamName = "username"
|
||||
passwordParamName = "password"
|
||||
stateParamName = "state"
|
||||
errParamName = "err"
|
||||
|
||||
ShowNoError ErrorParamValue = ""
|
||||
ShowInternalError ErrorParamValue = "internal_error"
|
||||
ShowBadUserPassErr ErrorParamValue = "login_error"
|
||||
)
|
||||
|
||||
// HandlerFunc is a function that can handle either a GET or POST request for the login endpoint.
|
||||
type HandlerFunc func(
|
||||
w http.ResponseWriter,
|
||||
r *http.Request,
|
||||
encodedState string,
|
||||
decodedState *oidc.UpstreamStateParamData,
|
||||
) error
|
||||
|
||||
// NewHandler returns a http.Handler that serves the login endpoint for IDPs that don't have their own web UI for login.
|
||||
//
|
||||
// This handler takes care of the shared concerns between the GET and POST methods of the login endpoint:
|
||||
// checking the method, checking the CSRF cookie, decoding the state param, and adding security headers.
|
||||
// Then it defers the rest of the handling to the passed in handler functions for GET and POST requests.
|
||||
// Note that CSRF protection isn't needed on GET requests, but it doesn't hurt. Putting it here
|
||||
// keeps the implementations and tests of HandlerFunc simpler since they won't need to deal with any decoders.
|
||||
// Users should always initially get redirected to this page from the authorization endpoint, and never need
|
||||
// to navigate directly to this page in their browser without going through the authorization endpoint first.
|
||||
// Once their browser has landed on this page, it should be okay for the user to refresh the browser.
|
||||
func NewHandler(
|
||||
stateDecoder oidc.Decoder,
|
||||
cookieDecoder oidc.Decoder,
|
||||
getHandler HandlerFunc, // use NewGetHandler() for production
|
||||
postHandler HandlerFunc, // use NewPostHandler() for production
|
||||
) http.Handler {
|
||||
loginHandler := httperr.HandlerFunc(func(w http.ResponseWriter, r *http.Request) error {
|
||||
var handler HandlerFunc
|
||||
switch r.Method {
|
||||
case http.MethodGet:
|
||||
handler = getHandler
|
||||
case http.MethodPost:
|
||||
handler = postHandler
|
||||
default:
|
||||
return httperr.Newf(http.StatusMethodNotAllowed, "%s (try GET or POST)", r.Method)
|
||||
}
|
||||
|
||||
encodedState, decodedState, err := oidc.ReadStateParamAndValidateCSRFCookie(r, cookieDecoder, stateDecoder)
|
||||
if err != nil {
|
||||
plog.InfoErr("state or CSRF error", err)
|
||||
return err
|
||||
}
|
||||
|
||||
switch decodedState.UpstreamType {
|
||||
case string(idpdiscoveryv1alpha1.IDPTypeLDAP), string(idpdiscoveryv1alpha1.IDPTypeActiveDirectory):
|
||||
// these are the types supported by this endpoint, so no error here
|
||||
default:
|
||||
return httperr.Newf(http.StatusBadRequest, "not a supported upstream IDP type for this endpoint: %q", decodedState.UpstreamType)
|
||||
}
|
||||
|
||||
return handler(w, r, encodedState, decodedState)
|
||||
})
|
||||
|
||||
return wrapSecurityHeaders(loginHandler)
|
||||
}
|
||||
|
||||
func wrapSecurityHeaders(handler http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
|
||||
wrapped := securityheader.WrapWithCustomCSP(handler, loginhtml.ContentSecurityPolicy())
|
||||
if r.Method == http.MethodPost {
|
||||
// POST requests can result in the form_post html page, so allow it with CSP headers.
|
||||
wrapped = securityheader.WrapWithCustomCSP(handler, formposthtml.ContentSecurityPolicy())
|
||||
}
|
||||
wrapped.ServeHTTP(w, r)
|
||||
})
|
||||
}
|
||||
|
||||
// RedirectToLoginPage redirects to the GET /login page of the specified issuer.
|
||||
// The specified issuer should never end with a "/", which is validated by
|
||||
// provider.FederationDomainIssuer when the issuer string comes from that type.
|
||||
func RedirectToLoginPage(
|
||||
r *http.Request,
|
||||
w http.ResponseWriter,
|
||||
downstreamIssuer string,
|
||||
encodedStateParamValue string,
|
||||
errToDisplay ErrorParamValue,
|
||||
) error {
|
||||
loginURL, err := url.Parse(downstreamIssuer + oidc.PinnipedLoginPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
q := loginURL.Query()
|
||||
q.Set(stateParamName, encodedStateParamValue)
|
||||
if errToDisplay != ShowNoError {
|
||||
q.Set(errParamName, string(errToDisplay))
|
||||
}
|
||||
loginURL.RawQuery = q.Encode()
|
||||
|
||||
http.Redirect(w, r,
|
||||
loginURL.String(),
|
||||
http.StatusSeeOther, // match fosite and https://tools.ietf.org/id/draft-ietf-oauth-security-topics-18.html#section-4.11
|
||||
)
|
||||
|
||||
return nil
|
||||
}
|
457
internal/oidc/login/login_handler_test.go
Normal file
457
internal/oidc/login/login_handler_test.go
Normal file
@ -0,0 +1,457 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package login
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/gorilla/securecookie"
|
||||
"github.com/stretchr/testify/require"
|
||||
|
||||
"go.pinniped.dev/internal/httputil/httperr"
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
"go.pinniped.dev/internal/testutil/oidctestutil"
|
||||
)
|
||||
|
||||
const (
|
||||
htmlContentType = "text/html; charset=utf-8"
|
||||
)
|
||||
|
||||
func TestLoginEndpoint(t *testing.T) {
|
||||
const (
|
||||
happyGetResult = "<p>get handler result</p>"
|
||||
happyPostResult = "<p>post handler result</p>"
|
||||
|
||||
happyUpstreamIDPName = "upstream-idp-name"
|
||||
happyUpstreamIDPType = "ldap"
|
||||
happyDownstreamCSRF = "test-csrf"
|
||||
happyDownstreamPKCE = "test-pkce"
|
||||
happyDownstreamNonce = "test-nonce"
|
||||
happyDownstreamStateVersion = "2"
|
||||
|
||||
downstreamClientID = "pinniped-cli"
|
||||
happyDownstreamState = "8b-state"
|
||||
downstreamNonce = "some-nonce-value"
|
||||
downstreamPKCEChallenge = "some-challenge"
|
||||
downstreamPKCEChallengeMethod = "S256"
|
||||
downstreamRedirectURI = "http://127.0.0.1/callback"
|
||||
)
|
||||
|
||||
happyDownstreamScopesRequested := []string{"openid"}
|
||||
happyDownstreamRequestParamsQuery := url.Values{
|
||||
"response_type": []string{"code"},
|
||||
"scope": []string{strings.Join(happyDownstreamScopesRequested, " ")},
|
||||
"client_id": []string{downstreamClientID},
|
||||
"state": []string{happyDownstreamState},
|
||||
"nonce": []string{downstreamNonce},
|
||||
"code_challenge": []string{downstreamPKCEChallenge},
|
||||
"code_challenge_method": []string{downstreamPKCEChallengeMethod},
|
||||
"redirect_uri": []string{downstreamRedirectURI},
|
||||
}
|
||||
happyDownstreamRequestParams := happyDownstreamRequestParamsQuery.Encode()
|
||||
|
||||
expectedHappyDecodedUpstreamStateParam := func() *oidc.UpstreamStateParamData {
|
||||
return &oidc.UpstreamStateParamData{
|
||||
UpstreamName: happyUpstreamIDPName,
|
||||
UpstreamType: happyUpstreamIDPType,
|
||||
AuthParams: happyDownstreamRequestParams,
|
||||
Nonce: happyDownstreamNonce,
|
||||
CSRFToken: happyDownstreamCSRF,
|
||||
PKCECode: happyDownstreamPKCE,
|
||||
FormatVersion: happyDownstreamStateVersion,
|
||||
}
|
||||
}
|
||||
|
||||
expectedHappyDecodedUpstreamStateParamForActiveDirectory := func() *oidc.UpstreamStateParamData {
|
||||
s := expectedHappyDecodedUpstreamStateParam()
|
||||
s.UpstreamType = "activedirectory"
|
||||
return s
|
||||
}
|
||||
|
||||
happyUpstreamStateParam := func() *oidctestutil.UpstreamStateParamBuilder {
|
||||
return &oidctestutil.UpstreamStateParamBuilder{
|
||||
U: happyUpstreamIDPName,
|
||||
T: happyUpstreamIDPType,
|
||||
P: happyDownstreamRequestParams,
|
||||
N: happyDownstreamNonce,
|
||||
C: happyDownstreamCSRF,
|
||||
K: happyDownstreamPKCE,
|
||||
V: happyDownstreamStateVersion,
|
||||
}
|
||||
}
|
||||
|
||||
stateEncoderHashKey := []byte("fake-hash-secret")
|
||||
stateEncoderBlockKey := []byte("0123456789ABCDEF") // block encryption requires 16/24/32 bytes for AES
|
||||
cookieEncoderHashKey := []byte("fake-hash-secret2")
|
||||
cookieEncoderBlockKey := []byte("0123456789ABCDE2") // block encryption requires 16/24/32 bytes for AES
|
||||
require.NotEqual(t, stateEncoderHashKey, cookieEncoderHashKey)
|
||||
require.NotEqual(t, stateEncoderBlockKey, cookieEncoderBlockKey)
|
||||
|
||||
happyStateCodec := securecookie.New(stateEncoderHashKey, stateEncoderBlockKey)
|
||||
happyStateCodec.SetSerializer(securecookie.JSONEncoder{})
|
||||
happyCookieCodec := securecookie.New(cookieEncoderHashKey, cookieEncoderBlockKey)
|
||||
happyCookieCodec.SetSerializer(securecookie.JSONEncoder{})
|
||||
|
||||
happyState := happyUpstreamStateParam().Build(t, happyStateCodec)
|
||||
happyPathWithState := newRequestPath().WithState(happyState).String()
|
||||
|
||||
happyActiveDirectoryState := happyUpstreamStateParam().WithUpstreamIDPType("activedirectory").Build(t, happyStateCodec)
|
||||
|
||||
encodedIncomingCookieCSRFValue, err := happyCookieCodec.Encode("csrf", happyDownstreamCSRF)
|
||||
require.NoError(t, err)
|
||||
happyCSRFCookie := "__Host-pinniped-csrf=" + encodedIncomingCookieCSRFValue
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
method string
|
||||
path string
|
||||
csrfCookie string
|
||||
getHandlerErr error
|
||||
postHandlerErr error
|
||||
|
||||
wantStatus int
|
||||
wantContentType string
|
||||
wantBody string
|
||||
wantEncodedState string
|
||||
wantDecodedState *oidc.UpstreamStateParamData
|
||||
}{
|
||||
{
|
||||
name: "PUT method is invalid",
|
||||
method: http.MethodPut,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusMethodNotAllowed,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Method Not Allowed: PUT (try GET or POST)\n",
|
||||
},
|
||||
{
|
||||
name: "PATCH method is invalid",
|
||||
method: http.MethodPatch,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusMethodNotAllowed,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Method Not Allowed: PATCH (try GET or POST)\n",
|
||||
},
|
||||
{
|
||||
name: "DELETE method is invalid",
|
||||
method: http.MethodDelete,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusMethodNotAllowed,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Method Not Allowed: DELETE (try GET or POST)\n",
|
||||
},
|
||||
{
|
||||
name: "HEAD method is invalid",
|
||||
method: http.MethodHead,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusMethodNotAllowed,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Method Not Allowed: HEAD (try GET or POST)\n",
|
||||
},
|
||||
{
|
||||
name: "CONNECT method is invalid",
|
||||
method: http.MethodConnect,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusMethodNotAllowed,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Method Not Allowed: CONNECT (try GET or POST)\n",
|
||||
},
|
||||
{
|
||||
name: "OPTIONS method is invalid",
|
||||
method: http.MethodOptions,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusMethodNotAllowed,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Method Not Allowed: OPTIONS (try GET or POST)\n",
|
||||
},
|
||||
{
|
||||
name: "TRACE method is invalid",
|
||||
method: http.MethodTrace,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusMethodNotAllowed,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Method Not Allowed: TRACE (try GET or POST)\n",
|
||||
},
|
||||
{
|
||||
name: "state param was not included on GET request",
|
||||
method: http.MethodGet,
|
||||
path: newRequestPath().WithoutState().String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Bad Request: state param not found\n",
|
||||
},
|
||||
{
|
||||
name: "state param was not included on POST request",
|
||||
method: http.MethodPost,
|
||||
path: newRequestPath().WithoutState().String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Bad Request: state param not found\n",
|
||||
},
|
||||
{
|
||||
name: "state param was not signed correctly, has expired, or otherwise cannot be decoded for any reason on GET request",
|
||||
method: http.MethodGet,
|
||||
path: newRequestPath().WithState("this-will-not-decode").String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Bad Request: error reading state\n",
|
||||
},
|
||||
{
|
||||
name: "state param was not signed correctly, has expired, or otherwise cannot be decoded for any reason on POST request",
|
||||
method: http.MethodPost,
|
||||
path: newRequestPath().WithState("this-will-not-decode").String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Bad Request: error reading state\n",
|
||||
},
|
||||
{
|
||||
name: "the CSRF cookie does not exist on GET request",
|
||||
method: http.MethodGet,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: "",
|
||||
wantStatus: http.StatusForbidden,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Forbidden: CSRF cookie is missing\n",
|
||||
},
|
||||
{
|
||||
name: "the CSRF cookie does not exist on POST request",
|
||||
method: http.MethodPost,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: "",
|
||||
wantStatus: http.StatusForbidden,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Forbidden: CSRF cookie is missing\n",
|
||||
},
|
||||
{
|
||||
name: "the CSRF cookie was not signed correctly, has expired, or otherwise cannot be decoded for any reason on GET request",
|
||||
method: http.MethodGet,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: "__Host-pinniped-csrf=this-value-was-not-signed-by-pinniped",
|
||||
wantStatus: http.StatusForbidden,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Forbidden: error reading CSRF cookie\n",
|
||||
},
|
||||
{
|
||||
name: "the CSRF cookie was not signed correctly, has expired, or otherwise cannot be decoded for any reason on POST request",
|
||||
method: http.MethodPost,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: "__Host-pinniped-csrf=this-value-was-not-signed-by-pinniped",
|
||||
wantStatus: http.StatusForbidden,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Forbidden: error reading CSRF cookie\n",
|
||||
},
|
||||
{
|
||||
name: "cookie csrf value does not match state csrf value on GET request",
|
||||
method: http.MethodGet,
|
||||
path: newRequestPath().WithState(happyUpstreamStateParam().WithCSRF("wrong-csrf-value").Build(t, happyStateCodec)).String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusForbidden,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Forbidden: CSRF value does not match\n",
|
||||
},
|
||||
{
|
||||
name: "cookie csrf value does not match state csrf value on POST request",
|
||||
method: http.MethodPost,
|
||||
path: newRequestPath().WithState(happyUpstreamStateParam().WithCSRF("wrong-csrf-value").Build(t, happyStateCodec)).String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusForbidden,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Forbidden: CSRF value does not match\n",
|
||||
},
|
||||
{
|
||||
name: "GET request when upstream IDP type in state param is not supported by this endpoint",
|
||||
method: http.MethodGet,
|
||||
path: newRequestPath().WithState(
|
||||
happyUpstreamStateParam().WithUpstreamIDPType("oidc").Build(t, happyStateCodec),
|
||||
).String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Bad Request: not a supported upstream IDP type for this endpoint: \"oidc\"\n",
|
||||
},
|
||||
{
|
||||
name: "POST request when upstream IDP type in state param is not supported by this endpoint",
|
||||
method: http.MethodPost,
|
||||
path: newRequestPath().WithState(
|
||||
happyUpstreamStateParam().WithUpstreamIDPType("oidc").Build(t, happyStateCodec),
|
||||
).String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusBadRequest,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Bad Request: not a supported upstream IDP type for this endpoint: \"oidc\"\n",
|
||||
},
|
||||
{
|
||||
name: "valid GET request when GET endpoint handler returns an error",
|
||||
method: http.MethodGet,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
getHandlerErr: httperr.Newf(http.StatusInternalServerError, "some get error"),
|
||||
wantStatus: http.StatusInternalServerError,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Internal Server Error: some get error\n",
|
||||
wantEncodedState: happyState,
|
||||
wantDecodedState: expectedHappyDecodedUpstreamStateParam(),
|
||||
},
|
||||
{
|
||||
name: "valid POST request when POST endpoint handler returns an error",
|
||||
method: http.MethodPost,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
postHandlerErr: httperr.Newf(http.StatusInternalServerError, "some post error"),
|
||||
wantStatus: http.StatusInternalServerError,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: "Internal Server Error: some post error\n",
|
||||
wantEncodedState: happyState,
|
||||
wantDecodedState: expectedHappyDecodedUpstreamStateParam(),
|
||||
},
|
||||
{
|
||||
name: "happy GET request for LDAP upstream",
|
||||
method: http.MethodGet,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: happyGetResult,
|
||||
wantEncodedState: happyState,
|
||||
wantDecodedState: expectedHappyDecodedUpstreamStateParam(),
|
||||
},
|
||||
{
|
||||
name: "happy POST request for LDAP upstream",
|
||||
method: http.MethodPost,
|
||||
path: happyPathWithState,
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: happyPostResult,
|
||||
wantEncodedState: happyState,
|
||||
wantDecodedState: expectedHappyDecodedUpstreamStateParam(),
|
||||
},
|
||||
{
|
||||
name: "happy GET request for ActiveDirectory upstream",
|
||||
method: http.MethodGet,
|
||||
path: newRequestPath().WithState(happyActiveDirectoryState).String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: happyGetResult,
|
||||
wantEncodedState: happyActiveDirectoryState,
|
||||
wantDecodedState: expectedHappyDecodedUpstreamStateParamForActiveDirectory(),
|
||||
},
|
||||
{
|
||||
name: "happy POST request for ActiveDirectory upstream",
|
||||
method: http.MethodPost,
|
||||
path: newRequestPath().WithState(happyActiveDirectoryState).String(),
|
||||
csrfCookie: happyCSRFCookie,
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBody: happyPostResult,
|
||||
wantEncodedState: happyActiveDirectoryState,
|
||||
wantDecodedState: expectedHappyDecodedUpstreamStateParamForActiveDirectory(),
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tt := test
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
req := httptest.NewRequest(tt.method, tt.path, nil)
|
||||
if tt.csrfCookie != "" {
|
||||
req.Header.Set("Cookie", tt.csrfCookie)
|
||||
}
|
||||
rsp := httptest.NewRecorder()
|
||||
|
||||
testGetHandler := func(
|
||||
w http.ResponseWriter,
|
||||
r *http.Request,
|
||||
encodedState string,
|
||||
decodedState *oidc.UpstreamStateParamData,
|
||||
) error {
|
||||
require.Equal(t, req, r)
|
||||
require.Equal(t, rsp, w)
|
||||
require.Equal(t, tt.wantEncodedState, encodedState)
|
||||
require.Equal(t, tt.wantDecodedState, decodedState)
|
||||
if tt.getHandlerErr == nil {
|
||||
_, err := w.Write([]byte(happyGetResult))
|
||||
require.NoError(t, err)
|
||||
}
|
||||
return tt.getHandlerErr
|
||||
}
|
||||
|
||||
testPostHandler := func(
|
||||
w http.ResponseWriter,
|
||||
r *http.Request,
|
||||
encodedState string,
|
||||
decodedState *oidc.UpstreamStateParamData,
|
||||
) error {
|
||||
require.Equal(t, req, r)
|
||||
require.Equal(t, rsp, w)
|
||||
require.Equal(t, tt.wantEncodedState, encodedState)
|
||||
require.Equal(t, tt.wantDecodedState, decodedState)
|
||||
if tt.postHandlerErr == nil {
|
||||
_, err := w.Write([]byte(happyPostResult))
|
||||
require.NoError(t, err)
|
||||
}
|
||||
return tt.postHandlerErr
|
||||
}
|
||||
|
||||
subject := NewHandler(happyStateCodec, happyCookieCodec, testGetHandler, testPostHandler)
|
||||
|
||||
subject.ServeHTTP(rsp, req)
|
||||
|
||||
if tt.method == http.MethodPost {
|
||||
testutil.RequireSecurityHeadersWithFormPostPageCSPs(t, rsp)
|
||||
} else {
|
||||
testutil.RequireSecurityHeadersWithLoginPageCSPs(t, rsp)
|
||||
}
|
||||
|
||||
require.Equal(t, tt.wantStatus, rsp.Code)
|
||||
testutil.RequireEqualContentType(t, rsp.Header().Get("Content-Type"), tt.wantContentType)
|
||||
require.Equal(t, tt.wantBody, rsp.Body.String())
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
type requestPath struct {
|
||||
state *string
|
||||
}
|
||||
|
||||
func newRequestPath() *requestPath {
|
||||
return &requestPath{}
|
||||
}
|
||||
|
||||
func (r *requestPath) WithState(state string) *requestPath {
|
||||
r.state = &state
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *requestPath) WithoutState() *requestPath {
|
||||
r.state = nil
|
||||
return r
|
||||
}
|
||||
|
||||
func (r *requestPath) String() string {
|
||||
path := "/login?"
|
||||
params := url.Values{}
|
||||
if r.state != nil {
|
||||
params.Add("state", *r.state)
|
||||
}
|
||||
return path + params.Encode()
|
||||
}
|
94
internal/oidc/login/loginhtml/login_form.css
Normal file
94
internal/oidc/login/loginhtml/login_form.css
Normal file
@ -0,0 +1,94 @@
|
||||
/* Copyright 2022 the Pinniped contributors. All Rights Reserved. */
|
||||
/* SPDX-License-Identifier: Apache-2.0 */
|
||||
|
||||
html {
|
||||
height: 100%;
|
||||
}
|
||||
|
||||
body {
|
||||
font-family: "Metropolis-Light", Helvetica, sans-serif;
|
||||
display: flex;
|
||||
flex-flow: column wrap;
|
||||
justify-content: flex-start;
|
||||
align-items: center;
|
||||
/* subtle gradient make the login box stand out */
|
||||
background: linear-gradient(to top, #f8f8f8, white);
|
||||
min-height: 100%;
|
||||
}
|
||||
|
||||
h1 {
|
||||
font-size: 20px;
|
||||
margin: 0;
|
||||
}
|
||||
|
||||
.box {
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
flex-wrap: nowrap;
|
||||
border-radius: 4px;
|
||||
border-color: #ddd;
|
||||
border-width: 1px;
|
||||
border-style: solid;
|
||||
width: 400px;
|
||||
padding:30px 30px 0;
|
||||
margin: 60px 20px 0;
|
||||
background: white;
|
||||
font-size: 14px;
|
||||
}
|
||||
|
||||
input {
|
||||
color: inherit;
|
||||
font: inherit;
|
||||
border: 0;
|
||||
margin: 0;
|
||||
outline: 0;
|
||||
padding: 0;
|
||||
}
|
||||
|
||||
.form-field {
|
||||
display: flex;
|
||||
margin-bottom: 30px;
|
||||
}
|
||||
|
||||
.form-field input[type="password"], .form-field input[type="text"], .form-field input[type="submit"] {
|
||||
width: 100%;
|
||||
padding: 1em;
|
||||
}
|
||||
|
||||
.form-field input[type="password"], .form-field input[type="text"] {
|
||||
border-radius: 3px;
|
||||
border-width: 1px;
|
||||
border-style: solid;
|
||||
border-color: #a6a6a6;
|
||||
}
|
||||
|
||||
.form-field input[type="submit"] {
|
||||
background-color: #218fcf; /* this is a color from the Pinniped logo :) */
|
||||
color: #eee;
|
||||
font-weight: bold;
|
||||
cursor: pointer;
|
||||
transition: all .3s;
|
||||
}
|
||||
|
||||
.form-field input[type="submit"]:focus, .form-field input[type="submit"]:hover {
|
||||
background-color: #1abfd3; /* this is a color from the Pinniped logo :) */
|
||||
}
|
||||
|
||||
.form-field input[type="submit"]:active {
|
||||
transform: scale(.99);
|
||||
}
|
||||
|
||||
.hidden {
|
||||
border: 0;
|
||||
clip: rect(0 0 0 0);
|
||||
height: 1px;
|
||||
margin: -1px;
|
||||
overflow: hidden;
|
||||
padding: 0;
|
||||
position: absolute;
|
||||
width: 1px;
|
||||
}
|
||||
|
||||
.alert {
|
||||
color: crimson;
|
||||
}
|
50
internal/oidc/login/loginhtml/login_form.gohtml
Normal file
50
internal/oidc/login/loginhtml/login_form.gohtml
Normal file
File diff suppressed because one or more lines are too long
65
internal/oidc/login/loginhtml/loginhtml.go
Normal file
65
internal/oidc/login/loginhtml/loginhtml.go
Normal file
@ -0,0 +1,65 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Package loginhtml defines HTML templates used by the Supervisor.
|
||||
//nolint: gochecknoglobals // This package uses globals to ensure that all parsing and minifying happens at init.
|
||||
package loginhtml
|
||||
|
||||
import (
|
||||
_ "embed" // Needed to trigger //go:embed directives below.
|
||||
"html/template"
|
||||
"strings"
|
||||
|
||||
"github.com/tdewolff/minify/v2/minify"
|
||||
|
||||
"go.pinniped.dev/internal/oidc/provider/csp"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed login_form.css
|
||||
rawCSS string
|
||||
minifiedCSS = panicOnError(minify.CSS(rawCSS))
|
||||
|
||||
//go:embed login_form.gohtml
|
||||
rawHTMLTemplate string
|
||||
)
|
||||
|
||||
// Parse the Go templated HTML and inject functions providing the minified inline CSS and JS.
|
||||
var parsedHTMLTemplate = template.Must(template.New("login_form.gohtml").Funcs(template.FuncMap{
|
||||
"minifiedCSS": func() template.CSS { return template.CSS(CSS()) },
|
||||
}).Parse(rawHTMLTemplate))
|
||||
|
||||
// Generate the CSP header value once since it's effectively constant.
|
||||
var cspValue = strings.Join([]string{
|
||||
`default-src 'none'`,
|
||||
`style-src '` + csp.Hash(minifiedCSS) + `'`,
|
||||
`frame-ancestors 'none'`,
|
||||
}, "; ")
|
||||
|
||||
func panicOnError(s string, err error) string {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// ContentSecurityPolicy returns the Content-Security-Policy header value to make the Template() operate correctly.
|
||||
//
|
||||
// See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy.
|
||||
func ContentSecurityPolicy() string { return cspValue }
|
||||
|
||||
// Template returns the html/template.Template for rendering the login page.
|
||||
func Template() *template.Template { return parsedHTMLTemplate }
|
||||
|
||||
// CSS returns the minified CSS that will be embedded into the page template.
|
||||
func CSS() string { return minifiedCSS }
|
||||
|
||||
// PageData represents the inputs to the template.
|
||||
type PageData struct {
|
||||
State string
|
||||
IDPName string
|
||||
HasAlertError bool
|
||||
AlertMessage string
|
||||
MinifiedCSS template.CSS
|
||||
PostPath string
|
||||
}
|
68
internal/oidc/login/loginhtml/loginhtml_test.go
Normal file
68
internal/oidc/login/loginhtml/loginhtml_test.go
Normal file
@ -0,0 +1,68 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package loginhtml
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
var (
|
||||
testExpectedCSS = `html{height:100%}body{font-family:metropolis-light,Helvetica,sans-serif;display:flex;flex-flow:column wrap;justify-content:flex-start;align-items:center;background:linear-gradient(to top,#f8f8f8,white);min-height:100%}h1{font-size:20px;margin:0}.box{display:flex;flex-direction:column;flex-wrap:nowrap;border-radius:4px;border-color:#ddd;border-width:1px;border-style:solid;width:400px;padding:30px 30px 0;margin:60px 20px 0;background:#fff;font-size:14px}input{color:inherit;font:inherit;border:0;margin:0;outline:0;padding:0}.form-field{display:flex;margin-bottom:30px}.form-field input[type=password],.form-field input[type=text],.form-field input[type=submit]{width:100%;padding:1em}.form-field input[type=password],.form-field input[type=text]{border-radius:3px;border-width:1px;border-style:solid;border-color:#a6a6a6}.form-field input[type=submit]{background-color:#218fcf;color:#eee;font-weight:700;cursor:pointer;transition:all .3s}.form-field input[type=submit]:focus,.form-field input[type=submit]:hover{background-color:#1abfd3}.form-field input[type=submit]:active{transform:scale(.99)}.hidden{border:0;clip:rect(0 0 0 0);height:1px;margin:-1px;overflow:hidden;padding:0;position:absolute;width:1px}.alert{color:crimson}`
|
||||
|
||||
// It's okay if this changes in the future, but this gives us a chance to eyeball the formatting.
|
||||
// Our browser-based integration tests should find any incompatibilities.
|
||||
testExpectedCSP = `default-src 'none'; ` +
|
||||
`style-src 'sha256-QC9ckaUFAdcN0Ysmu8q8iqCazYFgrJSQDJPa/przPXU='; ` +
|
||||
`frame-ancestors 'none'`
|
||||
)
|
||||
|
||||
func TestTemplate(t *testing.T) {
|
||||
const (
|
||||
testUpstreamName = "test-idp-name"
|
||||
testPath = "test-post-path"
|
||||
testEncodedState = "test-encoded-state"
|
||||
testAlert = "test-alert-message"
|
||||
)
|
||||
|
||||
var buf bytes.Buffer
|
||||
pageInputs := &PageData{
|
||||
PostPath: testPath,
|
||||
State: testEncodedState,
|
||||
IDPName: testUpstreamName,
|
||||
HasAlertError: true,
|
||||
AlertMessage: testAlert,
|
||||
}
|
||||
|
||||
// Render with an alert.
|
||||
expectedHTMLWithAlert := testutil.ExpectedLoginPageHTML(testExpectedCSS, testUpstreamName, testPath, testEncodedState, testAlert)
|
||||
require.NoError(t, Template().Execute(&buf, pageInputs))
|
||||
// t.Logf("actual value:\n%s", buf.String()) // useful when updating minify library causes new output
|
||||
require.Equal(t, expectedHTMLWithAlert, buf.String())
|
||||
|
||||
// Render again without an alert.
|
||||
pageInputs.HasAlertError = false
|
||||
expectedHTMLWithoutAlert := testutil.ExpectedLoginPageHTML(testExpectedCSS, testUpstreamName, testPath, testEncodedState, "")
|
||||
buf = bytes.Buffer{} // clear previous result from buffer
|
||||
require.NoError(t, Template().Execute(&buf, pageInputs))
|
||||
require.Equal(t, expectedHTMLWithoutAlert, buf.String())
|
||||
}
|
||||
|
||||
func TestContentSecurityPolicy(t *testing.T) {
|
||||
require.Equal(t, testExpectedCSP, ContentSecurityPolicy())
|
||||
}
|
||||
|
||||
func TestCSS(t *testing.T) {
|
||||
require.Equal(t, testExpectedCSS, CSS())
|
||||
}
|
||||
|
||||
func TestHelpers(t *testing.T) {
|
||||
require.Equal(t, "test", panicOnError("test", nil))
|
||||
require.PanicsWithError(t, "some error", func() { panicOnError("", fmt.Errorf("some error")) })
|
||||
}
|
88
internal/oidc/login/post_login_handler.go
Normal file
88
internal/oidc/login/post_login_handler.go
Normal file
@ -0,0 +1,88 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package login
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"github.com/ory/fosite"
|
||||
|
||||
"go.pinniped.dev/internal/httputil/httperr"
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/oidc/downstreamsession"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
)
|
||||
|
||||
func NewPostHandler(issuerURL string, upstreamIDPs oidc.UpstreamIdentityProvidersLister, oauthHelper fosite.OAuth2Provider) HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request, encodedState string, decodedState *oidc.UpstreamStateParamData) error {
|
||||
// Note that the login handler prevents this handler from being called with OIDC upstreams.
|
||||
_, ldapUpstream, idpType, err := oidc.FindUpstreamIDPByNameAndType(upstreamIDPs, decodedState.UpstreamName, decodedState.UpstreamType)
|
||||
if err != nil {
|
||||
// This shouldn't normally happen because the authorization endpoint ensured that this provider existed
|
||||
// at that time. It would be possible in the unlikely event that the provider was deleted during the login.
|
||||
plog.Error("error finding upstream provider", err)
|
||||
return httperr.Wrap(http.StatusUnprocessableEntity, "error finding upstream provider", err)
|
||||
}
|
||||
|
||||
// Get the original params that were used at the authorization endpoint.
|
||||
downstreamAuthParams, err := url.ParseQuery(decodedState.AuthParams)
|
||||
if err != nil {
|
||||
// This shouldn't really happen because the authorization endpoint encoded these query params correctly.
|
||||
plog.Error("error reading state downstream auth params", err)
|
||||
return httperr.New(http.StatusBadRequest, "error reading state downstream auth params")
|
||||
}
|
||||
|
||||
// Recreate enough of the original authorize request so we can pass it to NewAuthorizeRequest().
|
||||
reconstitutedAuthRequest := &http.Request{Form: downstreamAuthParams}
|
||||
authorizeRequester, err := oauthHelper.NewAuthorizeRequest(r.Context(), reconstitutedAuthRequest)
|
||||
if err != nil {
|
||||
// This shouldn't really happen because the authorization endpoint has already validated these params
|
||||
// by calling NewAuthorizeRequest() itself.
|
||||
plog.Error("error using state downstream auth params", err)
|
||||
return httperr.New(http.StatusBadRequest, "error using state downstream auth params")
|
||||
}
|
||||
|
||||
// Automatically grant the openid, offline_access, and pinniped:request-audience scopes, but only if they were requested.
|
||||
downstreamsession.GrantScopesIfRequested(authorizeRequester)
|
||||
|
||||
// Get the username and password form params from the POST body.
|
||||
username := r.PostFormValue(usernameParamName)
|
||||
password := r.PostFormValue(passwordParamName)
|
||||
|
||||
// Treat blank username or password as a bad username/password combination, as opposed to an internal error.
|
||||
if username == "" || password == "" {
|
||||
// User forgot to enter one of the required fields.
|
||||
// The user may try to log in again if they'd like, so redirect back to the login page with an error.
|
||||
return RedirectToLoginPage(r, w, issuerURL, encodedState, ShowBadUserPassErr)
|
||||
}
|
||||
|
||||
// Attempt to authenticate the user with the upstream IDP.
|
||||
authenticateResponse, authenticated, err := ldapUpstream.AuthenticateUser(r.Context(), username, password)
|
||||
if err != nil {
|
||||
plog.WarningErr("unexpected error during upstream LDAP authentication", err, "upstreamName", ldapUpstream.GetName())
|
||||
// There was some problem during authentication with the upstream, aside from bad username/password.
|
||||
// The user may try to log in again if they'd like, so redirect back to the login page with an error.
|
||||
return RedirectToLoginPage(r, w, issuerURL, encodedState, ShowInternalError)
|
||||
}
|
||||
if !authenticated {
|
||||
// The upstream did not accept the username/password combination.
|
||||
// The user may try to log in again if they'd like, so redirect back to the login page with an error.
|
||||
return RedirectToLoginPage(r, w, issuerURL, encodedState, ShowBadUserPassErr)
|
||||
}
|
||||
|
||||
// We had previously interrupted the regular steps of the OIDC authcode flow to show the login page UI.
|
||||
// Now the upstream IDP has authenticated the user, so now we're back into the regular OIDC authcode flow steps.
|
||||
// Both success and error responses from this point onwards should look like the usual fosite redirect
|
||||
// responses, and a happy redirect response will include a downstream authcode.
|
||||
subject := downstreamsession.DownstreamSubjectFromUpstreamLDAP(ldapUpstream, authenticateResponse)
|
||||
username = authenticateResponse.User.GetName()
|
||||
groups := authenticateResponse.User.GetGroups()
|
||||
customSessionData := downstreamsession.MakeDownstreamLDAPOrADCustomSessionData(ldapUpstream, idpType, authenticateResponse)
|
||||
openIDSession := downstreamsession.MakeDownstreamSession(subject, username, groups, customSessionData)
|
||||
oidc.PerformAuthcodeRedirect(r, w, oauthHelper, authorizeRequester, openIDSession, false)
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
748
internal/oidc/login/post_login_handler_test.go
Normal file
748
internal/oidc/login/post_login_handler_test.go
Normal file
@ -0,0 +1,748 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package login
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/http/httptest"
|
||||
"net/url"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"k8s.io/apiserver/pkg/authentication/user"
|
||||
"k8s.io/client-go/kubernetes/fake"
|
||||
|
||||
"go.pinniped.dev/internal/authenticators"
|
||||
"go.pinniped.dev/internal/oidc"
|
||||
"go.pinniped.dev/internal/oidc/jwks"
|
||||
"go.pinniped.dev/internal/psession"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
"go.pinniped.dev/internal/testutil/oidctestutil"
|
||||
)
|
||||
|
||||
func TestPostLoginEndpoint(t *testing.T) {
|
||||
const (
|
||||
htmlContentType = "text/html; charset=utf-8"
|
||||
|
||||
happyDownstreamCSRF = "test-csrf"
|
||||
happyDownstreamPKCE = "test-pkce"
|
||||
happyDownstreamNonce = "test-nonce"
|
||||
happyDownstreamStateVersion = "2"
|
||||
happyEncodedUpstreamState = "fake-encoded-state-param-value"
|
||||
|
||||
downstreamIssuer = "https://my-downstream-issuer.com/path"
|
||||
downstreamRedirectURI = "http://127.0.0.1/callback"
|
||||
downstreamClientID = "pinniped-cli"
|
||||
happyDownstreamState = "8b-state"
|
||||
downstreamNonce = "some-nonce-value"
|
||||
downstreamPKCEChallenge = "some-challenge"
|
||||
downstreamPKCEChallengeMethod = "S256"
|
||||
|
||||
ldapUpstreamName = "some-ldap-idp"
|
||||
ldapUpstreamType = "ldap"
|
||||
ldapUpstreamResourceUID = "ldap-resource-uid"
|
||||
activeDirectoryUpstreamName = "some-active-directory-idp"
|
||||
activeDirectoryUpstreamType = "activedirectory"
|
||||
activeDirectoryUpstreamResourceUID = "active-directory-resource-uid"
|
||||
upstreamLDAPURL = "ldaps://some-ldap-host:123?base=ou%3Dusers%2Cdc%3Dpinniped%2Cdc%3Ddev"
|
||||
|
||||
userParam = "username"
|
||||
passParam = "password"
|
||||
badUserPassErrParamValue = "login_error"
|
||||
internalErrParamValue = "internal_error"
|
||||
)
|
||||
|
||||
var (
|
||||
fositeMissingCodeChallengeErrorQuery = map[string]string{
|
||||
"error": "invalid_request",
|
||||
"error_description": "The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. Clients must include a code_challenge when performing the authorize code flow, but it is missing.",
|
||||
"state": happyDownstreamState,
|
||||
}
|
||||
|
||||
fositeInvalidCodeChallengeErrorQuery = map[string]string{
|
||||
"error": "invalid_request",
|
||||
"error_description": "The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. The code_challenge_method is not supported, use S256 instead.",
|
||||
"state": happyDownstreamState,
|
||||
}
|
||||
|
||||
fositeMissingCodeChallengeMethodErrorQuery = map[string]string{
|
||||
"error": "invalid_request",
|
||||
"error_description": "The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. Clients must use code_challenge_method=S256, plain is not allowed.",
|
||||
"state": happyDownstreamState,
|
||||
}
|
||||
|
||||
fositePromptHasNoneAndOtherValueErrorQuery = map[string]string{
|
||||
"error": "invalid_request",
|
||||
"error_description": "The request is missing a required parameter, includes an invalid parameter value, includes a parameter more than once, or is otherwise malformed. Parameter 'prompt' was set to 'none', but contains other values as well which is not allowed.",
|
||||
"state": happyDownstreamState,
|
||||
}
|
||||
)
|
||||
|
||||
happyDownstreamScopesRequested := []string{"openid"}
|
||||
happyDownstreamScopesGranted := []string{"openid"}
|
||||
|
||||
happyDownstreamRequestParamsQuery := url.Values{
|
||||
"response_type": []string{"code"},
|
||||
"scope": []string{strings.Join(happyDownstreamScopesRequested, " ")},
|
||||
"client_id": []string{downstreamClientID},
|
||||
"state": []string{happyDownstreamState},
|
||||
"nonce": []string{downstreamNonce},
|
||||
"code_challenge": []string{downstreamPKCEChallenge},
|
||||
"code_challenge_method": []string{downstreamPKCEChallengeMethod},
|
||||
"redirect_uri": []string{downstreamRedirectURI},
|
||||
}
|
||||
happyDownstreamRequestParams := happyDownstreamRequestParamsQuery.Encode()
|
||||
|
||||
copyOfHappyDownstreamRequestParamsQuery := func() url.Values {
|
||||
params := url.Values{}
|
||||
for k, v := range happyDownstreamRequestParamsQuery {
|
||||
params[k] = make([]string, len(v))
|
||||
copy(params[k], v)
|
||||
}
|
||||
return params
|
||||
}
|
||||
|
||||
happyLDAPDecodedState := &oidc.UpstreamStateParamData{
|
||||
AuthParams: happyDownstreamRequestParams,
|
||||
UpstreamName: ldapUpstreamName,
|
||||
UpstreamType: ldapUpstreamType,
|
||||
Nonce: happyDownstreamNonce,
|
||||
CSRFToken: happyDownstreamCSRF,
|
||||
PKCECode: happyDownstreamPKCE,
|
||||
FormatVersion: happyDownstreamStateVersion,
|
||||
}
|
||||
|
||||
modifyHappyLDAPDecodedState := func(edit func(*oidc.UpstreamStateParamData)) *oidc.UpstreamStateParamData {
|
||||
copyOfHappyLDAPDecodedState := *happyLDAPDecodedState
|
||||
edit(©OfHappyLDAPDecodedState)
|
||||
return ©OfHappyLDAPDecodedState
|
||||
}
|
||||
|
||||
happyActiveDirectoryDecodedState := &oidc.UpstreamStateParamData{
|
||||
AuthParams: happyDownstreamRequestParams,
|
||||
UpstreamName: activeDirectoryUpstreamName,
|
||||
UpstreamType: activeDirectoryUpstreamType,
|
||||
Nonce: happyDownstreamNonce,
|
||||
CSRFToken: happyDownstreamCSRF,
|
||||
PKCECode: happyDownstreamPKCE,
|
||||
FormatVersion: happyDownstreamStateVersion,
|
||||
}
|
||||
|
||||
happyLDAPUsername := "some-ldap-user"
|
||||
happyLDAPUsernameFromAuthenticator := "some-mapped-ldap-username"
|
||||
happyLDAPPassword := "some-ldap-password" //nolint:gosec
|
||||
happyLDAPUID := "some-ldap-uid"
|
||||
happyLDAPUserDN := "cn=foo,dn=bar"
|
||||
happyLDAPGroups := []string{"group1", "group2", "group3"}
|
||||
happyLDAPExtraRefreshAttribute := "some-refresh-attribute"
|
||||
happyLDAPExtraRefreshValue := "some-refresh-attribute-value"
|
||||
|
||||
parsedUpstreamLDAPURL, err := url.Parse(upstreamLDAPURL)
|
||||
require.NoError(t, err)
|
||||
|
||||
ldapAuthenticateFunc := func(ctx context.Context, username, password string) (*authenticators.Response, bool, error) {
|
||||
if username == "" || password == "" {
|
||||
return nil, false, fmt.Errorf("should not have passed empty username or password to the authenticator")
|
||||
}
|
||||
if username == happyLDAPUsername && password == happyLDAPPassword {
|
||||
return &authenticators.Response{
|
||||
User: &user.DefaultInfo{
|
||||
Name: happyLDAPUsernameFromAuthenticator,
|
||||
UID: happyLDAPUID,
|
||||
Groups: happyLDAPGroups,
|
||||
},
|
||||
DN: happyLDAPUserDN,
|
||||
ExtraRefreshAttributes: map[string]string{
|
||||
happyLDAPExtraRefreshAttribute: happyLDAPExtraRefreshValue,
|
||||
},
|
||||
}, true, nil
|
||||
}
|
||||
return nil, false, nil
|
||||
}
|
||||
|
||||
upstreamLDAPIdentityProvider := oidctestutil.TestUpstreamLDAPIdentityProvider{
|
||||
Name: ldapUpstreamName,
|
||||
ResourceUID: ldapUpstreamResourceUID,
|
||||
URL: parsedUpstreamLDAPURL,
|
||||
AuthenticateFunc: ldapAuthenticateFunc,
|
||||
}
|
||||
|
||||
upstreamActiveDirectoryIdentityProvider := oidctestutil.TestUpstreamLDAPIdentityProvider{
|
||||
Name: activeDirectoryUpstreamName,
|
||||
ResourceUID: activeDirectoryUpstreamResourceUID,
|
||||
URL: parsedUpstreamLDAPURL,
|
||||
AuthenticateFunc: ldapAuthenticateFunc,
|
||||
}
|
||||
|
||||
erroringUpstreamLDAPIdentityProvider := oidctestutil.TestUpstreamLDAPIdentityProvider{
|
||||
Name: ldapUpstreamName,
|
||||
ResourceUID: ldapUpstreamResourceUID,
|
||||
AuthenticateFunc: func(ctx context.Context, username, password string) (*authenticators.Response, bool, error) {
|
||||
return nil, false, fmt.Errorf("some ldap upstream auth error")
|
||||
},
|
||||
}
|
||||
|
||||
expectedHappyActiveDirectoryUpstreamCustomSession := &psession.CustomSessionData{
|
||||
ProviderUID: activeDirectoryUpstreamResourceUID,
|
||||
ProviderName: activeDirectoryUpstreamName,
|
||||
ProviderType: psession.ProviderTypeActiveDirectory,
|
||||
OIDC: nil,
|
||||
LDAP: nil,
|
||||
ActiveDirectory: &psession.ActiveDirectorySessionData{
|
||||
UserDN: happyLDAPUserDN,
|
||||
ExtraRefreshAttributes: map[string]string{happyLDAPExtraRefreshAttribute: happyLDAPExtraRefreshValue},
|
||||
},
|
||||
}
|
||||
|
||||
expectedHappyLDAPUpstreamCustomSession := &psession.CustomSessionData{
|
||||
ProviderUID: ldapUpstreamResourceUID,
|
||||
ProviderName: ldapUpstreamName,
|
||||
ProviderType: psession.ProviderTypeLDAP,
|
||||
OIDC: nil,
|
||||
LDAP: &psession.LDAPSessionData{
|
||||
UserDN: happyLDAPUserDN,
|
||||
ExtraRefreshAttributes: map[string]string{happyLDAPExtraRefreshAttribute: happyLDAPExtraRefreshValue},
|
||||
},
|
||||
ActiveDirectory: nil,
|
||||
}
|
||||
|
||||
// Note that fosite puts the granted scopes as a param in the redirect URI even though the spec doesn't seem to require it
|
||||
happyAuthcodeDownstreamRedirectLocationRegexp := downstreamRedirectURI + `\?code=([^&]+)&scope=openid&state=` + happyDownstreamState
|
||||
|
||||
happyUsernamePasswordFormParams := url.Values{userParam: []string{happyLDAPUsername}, passParam: []string{happyLDAPPassword}}
|
||||
|
||||
encodeQuery := func(query map[string]string) string {
|
||||
values := url.Values{}
|
||||
for k, v := range query {
|
||||
values[k] = []string{v}
|
||||
}
|
||||
return values.Encode()
|
||||
}
|
||||
|
||||
urlWithQuery := func(baseURL string, query map[string]string) string {
|
||||
urlToReturn := fmt.Sprintf("%s?%s", baseURL, encodeQuery(query))
|
||||
_, err := url.Parse(urlToReturn)
|
||||
require.NoError(t, err, "urlWithQuery helper was used to create an illegal URL")
|
||||
return urlToReturn
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
idps *oidctestutil.UpstreamIDPListerBuilder
|
||||
decodedState *oidc.UpstreamStateParamData
|
||||
formParams url.Values
|
||||
reqURIQuery url.Values
|
||||
|
||||
wantStatus int
|
||||
wantContentType string
|
||||
wantBodyString string
|
||||
wantErr string
|
||||
|
||||
// Assertion that the response should be a redirect to the login page with an error param.
|
||||
wantRedirectToLoginPageError string
|
||||
|
||||
// Assertions for when an authcode should be returned, i.e. the request was authenticated by an
|
||||
// upstream LDAP or AD provider.
|
||||
wantRedirectLocationRegexp string // for loose matching
|
||||
wantRedirectLocationString string // for exact matching instead
|
||||
wantBodyFormResponseRegexp string // for form_post html page matching instead
|
||||
wantDownstreamRedirectURI string
|
||||
wantDownstreamGrantedScopes []string
|
||||
wantDownstreamIDTokenSubject string
|
||||
wantDownstreamIDTokenUsername string
|
||||
wantDownstreamIDTokenGroups []string
|
||||
wantDownstreamRequestedScopes []string
|
||||
wantDownstreamPKCEChallenge string
|
||||
wantDownstreamPKCEChallengeMethod string
|
||||
wantDownstreamNonce string
|
||||
wantDownstreamCustomSessionData *psession.CustomSessionData
|
||||
|
||||
// Authorization requests for either a successful OIDC upstream or for an error with any upstream
|
||||
// should never use Kube storage. There is only one exception to this rule, which is that certain
|
||||
// OIDC validations are checked in fosite after the OAuth authcode (and sometimes the OIDC session)
|
||||
// is stored, so it is possible with an LDAP upstream to store objects and then return an error to
|
||||
// the client anyway (which makes the stored objects useless, but oh well).
|
||||
wantUnnecessaryStoredRecords int
|
||||
}{
|
||||
{
|
||||
name: "happy LDAP login",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().
|
||||
WithLDAP(&upstreamLDAPIdentityProvider). // should pick this one
|
||||
WithActiveDirectory(&erroringUpstreamLDAPIdentityProvider),
|
||||
decodedState: happyLDAPDecodedState,
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationRegexp: happyAuthcodeDownstreamRedirectLocationRegexp,
|
||||
wantDownstreamIDTokenSubject: upstreamLDAPURL + "&sub=" + happyLDAPUID,
|
||||
wantDownstreamIDTokenUsername: happyLDAPUsernameFromAuthenticator,
|
||||
wantDownstreamIDTokenGroups: happyLDAPGroups,
|
||||
wantDownstreamRequestedScopes: happyDownstreamScopesRequested,
|
||||
wantDownstreamRedirectURI: downstreamRedirectURI,
|
||||
wantDownstreamGrantedScopes: happyDownstreamScopesGranted,
|
||||
wantDownstreamNonce: downstreamNonce,
|
||||
wantDownstreamPKCEChallenge: downstreamPKCEChallenge,
|
||||
wantDownstreamPKCEChallengeMethod: downstreamPKCEChallengeMethod,
|
||||
wantDownstreamCustomSessionData: expectedHappyLDAPUpstreamCustomSession,
|
||||
},
|
||||
{
|
||||
name: "happy AD login",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().
|
||||
WithLDAP(&erroringUpstreamLDAPIdentityProvider).
|
||||
WithActiveDirectory(&upstreamActiveDirectoryIdentityProvider), // should pick this one
|
||||
decodedState: happyActiveDirectoryDecodedState,
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationRegexp: happyAuthcodeDownstreamRedirectLocationRegexp,
|
||||
wantDownstreamIDTokenSubject: upstreamLDAPURL + "&sub=" + happyLDAPUID,
|
||||
wantDownstreamIDTokenUsername: happyLDAPUsernameFromAuthenticator,
|
||||
wantDownstreamIDTokenGroups: happyLDAPGroups,
|
||||
wantDownstreamRequestedScopes: happyDownstreamScopesRequested,
|
||||
wantDownstreamRedirectURI: downstreamRedirectURI,
|
||||
wantDownstreamGrantedScopes: happyDownstreamScopesGranted,
|
||||
wantDownstreamNonce: downstreamNonce,
|
||||
wantDownstreamPKCEChallenge: downstreamPKCEChallenge,
|
||||
wantDownstreamPKCEChallengeMethod: downstreamPKCEChallengeMethod,
|
||||
wantDownstreamCustomSessionData: expectedHappyActiveDirectoryUpstreamCustomSession,
|
||||
},
|
||||
{
|
||||
name: "happy LDAP login when downstream response_mode=form_post returns 200 with HTML+JS form",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["response_mode"] = []string{"form_post"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusOK,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyFormResponseRegexp: `(?s)<html.*<script>.*To finish logging in, paste this authorization code` +
|
||||
`.*<form>.*<code id="manual-auth-code">(.+)</code>.*</html>`, // "(?s)" means match "." across newlines
|
||||
wantDownstreamIDTokenSubject: upstreamLDAPURL + "&sub=" + happyLDAPUID,
|
||||
wantDownstreamIDTokenUsername: happyLDAPUsernameFromAuthenticator,
|
||||
wantDownstreamIDTokenGroups: happyLDAPGroups,
|
||||
wantDownstreamRequestedScopes: happyDownstreamScopesRequested,
|
||||
wantDownstreamRedirectURI: downstreamRedirectURI,
|
||||
wantDownstreamGrantedScopes: happyDownstreamScopesGranted,
|
||||
wantDownstreamNonce: downstreamNonce,
|
||||
wantDownstreamPKCEChallenge: downstreamPKCEChallenge,
|
||||
wantDownstreamPKCEChallengeMethod: downstreamPKCEChallengeMethod,
|
||||
wantDownstreamCustomSessionData: expectedHappyLDAPUpstreamCustomSession,
|
||||
},
|
||||
{
|
||||
name: "happy LDAP login when downstream redirect uri matches what is configured for client except for the port number",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["redirect_uri"] = []string{"http://127.0.0.1:4242/callback"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationRegexp: "http://127.0.0.1:4242/callback" + `\?code=([^&]+)&scope=openid&state=` + happyDownstreamState,
|
||||
wantDownstreamIDTokenSubject: upstreamLDAPURL + "&sub=" + happyLDAPUID,
|
||||
wantDownstreamIDTokenUsername: happyLDAPUsernameFromAuthenticator,
|
||||
wantDownstreamIDTokenGroups: happyLDAPGroups,
|
||||
wantDownstreamRequestedScopes: happyDownstreamScopesRequested,
|
||||
wantDownstreamRedirectURI: "http://127.0.0.1:4242/callback",
|
||||
wantDownstreamGrantedScopes: happyDownstreamScopesGranted,
|
||||
wantDownstreamNonce: downstreamNonce,
|
||||
wantDownstreamPKCEChallenge: downstreamPKCEChallenge,
|
||||
wantDownstreamPKCEChallengeMethod: downstreamPKCEChallengeMethod,
|
||||
wantDownstreamCustomSessionData: expectedHappyLDAPUpstreamCustomSession,
|
||||
},
|
||||
{
|
||||
name: "happy LDAP login when there are additional allowed downstream requested scopes",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["scope"] = []string{"openid offline_access pinniped:request-audience"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationRegexp: downstreamRedirectURI + `\?code=([^&]+)&scope=openid\+offline_access\+pinniped%3Arequest-audience&state=` + happyDownstreamState,
|
||||
wantDownstreamIDTokenSubject: upstreamLDAPURL + "&sub=" + happyLDAPUID,
|
||||
wantDownstreamIDTokenUsername: happyLDAPUsernameFromAuthenticator,
|
||||
wantDownstreamIDTokenGroups: happyLDAPGroups,
|
||||
wantDownstreamRequestedScopes: []string{"openid", "offline_access", "pinniped:request-audience"},
|
||||
wantDownstreamRedirectURI: downstreamRedirectURI,
|
||||
wantDownstreamGrantedScopes: []string{"openid", "offline_access", "pinniped:request-audience"},
|
||||
wantDownstreamNonce: downstreamNonce,
|
||||
wantDownstreamPKCEChallenge: downstreamPKCEChallenge,
|
||||
wantDownstreamPKCEChallengeMethod: downstreamPKCEChallengeMethod,
|
||||
wantDownstreamCustomSessionData: expectedHappyLDAPUpstreamCustomSession,
|
||||
},
|
||||
{
|
||||
name: "happy LDAP when downstream OIDC validations are skipped because the openid scope was not requested",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["scope"] = []string{"email"}
|
||||
// The following prompt value is illegal when openid is requested, but note that openid is not requested.
|
||||
query["prompt"] = []string{"none login"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationRegexp: downstreamRedirectURI + `\?code=([^&]+)&scope=&state=` + happyDownstreamState, // no scopes granted
|
||||
wantDownstreamIDTokenSubject: upstreamLDAPURL + "&sub=" + happyLDAPUID,
|
||||
wantDownstreamIDTokenUsername: happyLDAPUsernameFromAuthenticator,
|
||||
wantDownstreamIDTokenGroups: happyLDAPGroups,
|
||||
wantDownstreamRequestedScopes: []string{"email"}, // only email was requested
|
||||
wantDownstreamRedirectURI: downstreamRedirectURI,
|
||||
wantDownstreamGrantedScopes: []string{}, // no scopes granted
|
||||
wantDownstreamNonce: downstreamNonce,
|
||||
wantDownstreamPKCEChallenge: downstreamPKCEChallenge,
|
||||
wantDownstreamPKCEChallengeMethod: downstreamPKCEChallengeMethod,
|
||||
wantDownstreamCustomSessionData: expectedHappyLDAPUpstreamCustomSession,
|
||||
},
|
||||
{
|
||||
name: "bad username LDAP login",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: happyLDAPDecodedState,
|
||||
formParams: url.Values{userParam: []string{"wrong!"}, passParam: []string{happyLDAPPassword}},
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectToLoginPageError: badUserPassErrParamValue,
|
||||
},
|
||||
{
|
||||
name: "bad password LDAP login",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: happyLDAPDecodedState,
|
||||
formParams: url.Values{userParam: []string{happyLDAPUsername}, passParam: []string{"wrong!"}},
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectToLoginPageError: badUserPassErrParamValue,
|
||||
},
|
||||
{
|
||||
name: "blank username LDAP login",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: happyLDAPDecodedState,
|
||||
formParams: url.Values{userParam: []string{""}, passParam: []string{happyLDAPPassword}},
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectToLoginPageError: badUserPassErrParamValue,
|
||||
},
|
||||
{
|
||||
name: "blank password LDAP login",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: happyLDAPDecodedState,
|
||||
formParams: url.Values{userParam: []string{happyLDAPUsername}, passParam: []string{""}},
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectToLoginPageError: badUserPassErrParamValue,
|
||||
},
|
||||
{
|
||||
name: "username and password sent as URI query params should be ignored since they are expected in form post body",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: happyLDAPDecodedState,
|
||||
reqURIQuery: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectToLoginPageError: badUserPassErrParamValue,
|
||||
},
|
||||
{
|
||||
name: "error during upstream LDAP authentication",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&erroringUpstreamLDAPIdentityProvider),
|
||||
decodedState: happyLDAPDecodedState,
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectToLoginPageError: internalErrParamValue,
|
||||
},
|
||||
{
|
||||
name: "downstream redirect uri does not match what is configured for client",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["redirect_uri"] = []string{"http://127.0.0.1/wrong_callback"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error using state downstream auth params",
|
||||
},
|
||||
{
|
||||
name: "downstream client does not exist",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["client_id"] = []string{"wrong_client_id"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error using state downstream auth params",
|
||||
},
|
||||
{
|
||||
name: "downstream client is missing",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
delete(query, "client_id")
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error using state downstream auth params",
|
||||
},
|
||||
{
|
||||
name: "response type is unsupported",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["response_type"] = []string{"unsupported"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error using state downstream auth params",
|
||||
},
|
||||
{
|
||||
name: "response type is missing",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
delete(query, "response_type")
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error using state downstream auth params",
|
||||
},
|
||||
{
|
||||
name: "PKCE code_challenge is missing",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
delete(query, "code_challenge")
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationString: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeErrorQuery),
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
},
|
||||
{
|
||||
name: "PKCE code_challenge_method is invalid",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["code_challenge_method"] = []string{"this-is-not-a-valid-pkce-alg"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationString: urlWithQuery(downstreamRedirectURI, fositeInvalidCodeChallengeErrorQuery),
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
},
|
||||
{
|
||||
name: "PKCE code_challenge_method is `plain`",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["code_challenge_method"] = []string{"plain"} // plain is not allowed
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationString: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeMethodErrorQuery),
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
},
|
||||
{
|
||||
name: "PKCE code_challenge_method is missing",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
delete(query, "code_challenge_method")
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationString: urlWithQuery(downstreamRedirectURI, fositeMissingCodeChallengeMethodErrorQuery),
|
||||
wantUnnecessaryStoredRecords: 2, // fosite already stored the authcode and oidc session before it noticed the error
|
||||
},
|
||||
{
|
||||
name: "prompt param is not allowed to have none and another legal value at the same time",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["prompt"] = []string{"none login"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantStatus: http.StatusSeeOther,
|
||||
wantContentType: htmlContentType,
|
||||
wantBodyString: "",
|
||||
wantRedirectLocationString: urlWithQuery(downstreamRedirectURI, fositePromptHasNoneAndOtherValueErrorQuery),
|
||||
wantUnnecessaryStoredRecords: 1, // fosite already stored the authcode before it noticed the error
|
||||
},
|
||||
{
|
||||
name: "downstream state does not have enough entropy",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["state"] = []string{"short"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error using state downstream auth params",
|
||||
},
|
||||
{
|
||||
name: "downstream scopes do not match what is configured for client",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: modifyHappyLDAPDecodedState(func(data *oidc.UpstreamStateParamData) {
|
||||
query := copyOfHappyDownstreamRequestParamsQuery()
|
||||
query["scope"] = []string{"openid offline_access pinniped:request-audience scope_not_allowed"}
|
||||
data.AuthParams = query.Encode()
|
||||
}),
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error using state downstream auth params",
|
||||
},
|
||||
{
|
||||
name: "no upstream providers are configured or provider cannot be found by name",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder(), // empty
|
||||
decodedState: happyLDAPDecodedState,
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error finding upstream provider: provider not found",
|
||||
},
|
||||
{
|
||||
name: "upstream provider cannot be found by name and type",
|
||||
idps: oidctestutil.NewUpstreamIDPListerBuilder().WithLDAP(&upstreamLDAPIdentityProvider),
|
||||
decodedState: happyActiveDirectoryDecodedState, // correct upstream IDP name, but wrong upstream IDP type
|
||||
formParams: happyUsernamePasswordFormParams,
|
||||
wantErr: "error finding upstream provider: provider not found",
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
tt := test
|
||||
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
kubeClient := fake.NewSimpleClientset()
|
||||
secretsClient := kubeClient.CoreV1().Secrets("some-namespace")
|
||||
|
||||
// Configure fosite the same way that the production code would.
|
||||
// Inject this into our test subject at the last second so we get a fresh storage for every test.
|
||||
timeoutsConfiguration := oidc.DefaultOIDCTimeoutsConfiguration()
|
||||
kubeOauthStore := oidc.NewKubeStorage(secretsClient, timeoutsConfiguration)
|
||||
hmacSecretFunc := func() []byte { return []byte("some secret - must have at least 32 bytes") }
|
||||
require.GreaterOrEqual(t, len(hmacSecretFunc()), 32, "fosite requires that hmac secrets have at least 32 bytes")
|
||||
jwksProviderIsUnused := jwks.NewDynamicJWKSProvider()
|
||||
oauthHelper := oidc.FositeOauth2Helper(kubeOauthStore, downstreamIssuer, hmacSecretFunc, jwksProviderIsUnused, timeoutsConfiguration)
|
||||
|
||||
req := httptest.NewRequest(http.MethodPost, "/ignored", strings.NewReader(tt.formParams.Encode()))
|
||||
req.Header.Set("Content-Type", "application/x-www-form-urlencoded")
|
||||
if tt.reqURIQuery != nil {
|
||||
req.URL.RawQuery = tt.reqURIQuery.Encode()
|
||||
}
|
||||
|
||||
rsp := httptest.NewRecorder()
|
||||
|
||||
subject := NewPostHandler(downstreamIssuer, tt.idps.Build(), oauthHelper)
|
||||
|
||||
err := subject(rsp, req, happyEncodedUpstreamState, tt.decodedState)
|
||||
if tt.wantErr != "" {
|
||||
require.EqualError(t, err, tt.wantErr)
|
||||
require.Empty(t, kubeClient.Actions())
|
||||
return // the http response doesn't matter when the function returns an error, because the caller should handle the error
|
||||
}
|
||||
// Otherwise, expect no error.
|
||||
require.NoError(t, err)
|
||||
|
||||
require.Equal(t, tt.wantStatus, rsp.Code)
|
||||
testutil.RequireEqualContentType(t, rsp.Header().Get("Content-Type"), tt.wantContentType)
|
||||
|
||||
actualLocation := rsp.Header().Get("Location")
|
||||
|
||||
switch {
|
||||
case tt.wantRedirectLocationRegexp != "":
|
||||
// Expecting a success redirect to the client.
|
||||
require.Equal(t, tt.wantBodyString, rsp.Body.String())
|
||||
require.Len(t, rsp.Header().Values("Location"), 1)
|
||||
oidctestutil.RequireAuthCodeRegexpMatch(
|
||||
t,
|
||||
actualLocation,
|
||||
tt.wantRedirectLocationRegexp,
|
||||
kubeClient,
|
||||
secretsClient,
|
||||
kubeOauthStore,
|
||||
tt.wantDownstreamGrantedScopes,
|
||||
tt.wantDownstreamIDTokenSubject,
|
||||
tt.wantDownstreamIDTokenUsername,
|
||||
tt.wantDownstreamIDTokenGroups,
|
||||
tt.wantDownstreamRequestedScopes,
|
||||
tt.wantDownstreamPKCEChallenge,
|
||||
tt.wantDownstreamPKCEChallengeMethod,
|
||||
tt.wantDownstreamNonce,
|
||||
downstreamClientID,
|
||||
tt.wantDownstreamRedirectURI,
|
||||
tt.wantDownstreamCustomSessionData,
|
||||
)
|
||||
case tt.wantRedirectToLoginPageError != "":
|
||||
// Expecting an error redirect to the login UI page.
|
||||
require.Equal(t, tt.wantBodyString, rsp.Body.String())
|
||||
expectedLocation := downstreamIssuer + oidc.PinnipedLoginPath +
|
||||
"?err=" + tt.wantRedirectToLoginPageError + "&state=" + happyEncodedUpstreamState
|
||||
require.Equal(t, expectedLocation, actualLocation)
|
||||
require.Len(t, kubeClient.Actions(), tt.wantUnnecessaryStoredRecords)
|
||||
case tt.wantRedirectLocationString != "":
|
||||
// Expecting an error redirect to the client.
|
||||
require.Equal(t, tt.wantBodyString, rsp.Body.String())
|
||||
require.Equal(t, tt.wantRedirectLocationString, actualLocation)
|
||||
require.Len(t, kubeClient.Actions(), tt.wantUnnecessaryStoredRecords)
|
||||
case tt.wantBodyFormResponseRegexp != "":
|
||||
// Expecting the body of the response to be a html page with a form (for "response_mode=form_post").
|
||||
_, hasLocationHeader := rsp.Header()["Location"]
|
||||
require.False(t, hasLocationHeader)
|
||||
oidctestutil.RequireAuthCodeRegexpMatch(
|
||||
t,
|
||||
rsp.Body.String(),
|
||||
tt.wantBodyFormResponseRegexp,
|
||||
kubeClient,
|
||||
secretsClient,
|
||||
kubeOauthStore,
|
||||
tt.wantDownstreamGrantedScopes,
|
||||
tt.wantDownstreamIDTokenSubject,
|
||||
tt.wantDownstreamIDTokenUsername,
|
||||
tt.wantDownstreamIDTokenGroups,
|
||||
tt.wantDownstreamRequestedScopes,
|
||||
tt.wantDownstreamPKCEChallenge,
|
||||
tt.wantDownstreamPKCEChallengeMethod,
|
||||
tt.wantDownstreamNonce,
|
||||
downstreamClientID,
|
||||
tt.wantDownstreamRedirectURI,
|
||||
tt.wantDownstreamCustomSessionData,
|
||||
)
|
||||
default:
|
||||
require.Failf(t, "test should have expected a redirect or form body",
|
||||
"actual location was %q", actualLocation)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
@ -1,20 +1,30 @@
|
||||
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Package oidc contains common OIDC functionality needed by Pinniped.
|
||||
package oidc
|
||||
|
||||
import (
|
||||
"crypto/subtle"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
coreosoidc "github.com/coreos/go-oidc/v3/oidc"
|
||||
"github.com/felixge/httpsnoop"
|
||||
"github.com/ory/fosite"
|
||||
"github.com/ory/fosite/compose"
|
||||
errorsx "github.com/pkg/errors"
|
||||
|
||||
"go.pinniped.dev/generated/latest/apis/supervisor/idpdiscovery/v1alpha1"
|
||||
"go.pinniped.dev/internal/httputil/httperr"
|
||||
"go.pinniped.dev/internal/oidc/csrftoken"
|
||||
"go.pinniped.dev/internal/oidc/jwks"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/oidc/provider/formposthtml"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/psession"
|
||||
"go.pinniped.dev/pkg/oidcclient/nonce"
|
||||
"go.pinniped.dev/pkg/oidcclient/pkce"
|
||||
)
|
||||
@ -26,13 +36,17 @@ const (
|
||||
CallbackEndpointPath = "/callback"
|
||||
JWKSEndpointPath = "/jwks.json"
|
||||
PinnipedIDPsPathV1Alpha1 = "/v1alpha1/pinniped_identity_providers"
|
||||
PinnipedLoginPath = "/login"
|
||||
)
|
||||
|
||||
const (
|
||||
// Just in case we need to make a breaking change to the format of the upstream state param,
|
||||
// we are including a format version number. This gives the opportunity for a future version of Pinniped
|
||||
// to have the consumer of this format decide to reject versions that it doesn't understand.
|
||||
UpstreamStateParamFormatVersion = "1"
|
||||
//
|
||||
// Version 1 was the original version.
|
||||
// Version 2 added the UpstreamType field to the UpstreamStateParamData struct.
|
||||
UpstreamStateParamFormatVersion = "2"
|
||||
|
||||
// The `name` passed to the encoder for encoding the upstream state param value. This name is short
|
||||
// because it will be encoded into the upstream state param value and we're trying to keep that small.
|
||||
@ -93,6 +107,7 @@ type Codec interface {
|
||||
type UpstreamStateParamData struct {
|
||||
AuthParams string `json:"p"`
|
||||
UpstreamName string `json:"u"`
|
||||
UpstreamType string `json:"t"`
|
||||
Nonce nonce.Nonce `json:"n"`
|
||||
CSRFToken csrftoken.CSRFToken `json:"c"`
|
||||
PKCECode pkce.Code `json:"k"`
|
||||
@ -295,3 +310,171 @@ func ScopeWasRequested(authorizeRequester fosite.AuthorizeRequester, scopeName s
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func ReadStateParamAndValidateCSRFCookie(r *http.Request, cookieDecoder Decoder, stateDecoder Decoder) (string, *UpstreamStateParamData, error) {
|
||||
csrfValue, err := readCSRFCookie(r, cookieDecoder)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
encodedState, decodedState, err := readStateParam(r, stateDecoder)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
err = validateCSRFValue(decodedState, csrfValue)
|
||||
if err != nil {
|
||||
return "", nil, err
|
||||
}
|
||||
|
||||
return encodedState, decodedState, nil
|
||||
}
|
||||
|
||||
func readCSRFCookie(r *http.Request, cookieDecoder Decoder) (csrftoken.CSRFToken, error) {
|
||||
receivedCSRFCookie, err := r.Cookie(CSRFCookieName)
|
||||
if err != nil {
|
||||
// Error means that the cookie was not found
|
||||
return "", httperr.Wrap(http.StatusForbidden, "CSRF cookie is missing", err)
|
||||
}
|
||||
|
||||
var csrfFromCookie csrftoken.CSRFToken
|
||||
err = cookieDecoder.Decode(CSRFCookieEncodingName, receivedCSRFCookie.Value, &csrfFromCookie)
|
||||
if err != nil {
|
||||
return "", httperr.Wrap(http.StatusForbidden, "error reading CSRF cookie", err)
|
||||
}
|
||||
|
||||
return csrfFromCookie, nil
|
||||
}
|
||||
|
||||
func readStateParam(r *http.Request, stateDecoder Decoder) (string, *UpstreamStateParamData, error) {
|
||||
encodedState := r.FormValue("state")
|
||||
|
||||
if encodedState == "" {
|
||||
return "", nil, httperr.New(http.StatusBadRequest, "state param not found")
|
||||
}
|
||||
|
||||
var state UpstreamStateParamData
|
||||
if err := stateDecoder.Decode(
|
||||
UpstreamStateParamEncodingName,
|
||||
r.FormValue("state"),
|
||||
&state,
|
||||
); err != nil {
|
||||
return "", nil, httperr.New(http.StatusBadRequest, "error reading state")
|
||||
}
|
||||
|
||||
if state.FormatVersion != UpstreamStateParamFormatVersion {
|
||||
return "", nil, httperr.New(http.StatusUnprocessableEntity, "state format version is invalid")
|
||||
}
|
||||
|
||||
return encodedState, &state, nil
|
||||
}
|
||||
|
||||
func validateCSRFValue(state *UpstreamStateParamData, csrfCookieValue csrftoken.CSRFToken) error {
|
||||
if subtle.ConstantTimeCompare([]byte(state.CSRFToken), []byte(csrfCookieValue)) != 1 {
|
||||
return httperr.New(http.StatusForbidden, "CSRF value does not match")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// FindUpstreamIDPByNameAndType finds the requested IDP by name and type, or returns an error.
|
||||
// Note that AD and LDAP IDPs both return the same interface type, but different ProviderTypes values.
|
||||
func FindUpstreamIDPByNameAndType(
|
||||
idpLister UpstreamIdentityProvidersLister,
|
||||
upstreamName string,
|
||||
upstreamType string,
|
||||
) (
|
||||
provider.UpstreamOIDCIdentityProviderI,
|
||||
provider.UpstreamLDAPIdentityProviderI,
|
||||
psession.ProviderType,
|
||||
error,
|
||||
) {
|
||||
switch upstreamType {
|
||||
case string(v1alpha1.IDPTypeOIDC):
|
||||
for _, p := range idpLister.GetOIDCIdentityProviders() {
|
||||
if p.GetName() == upstreamName {
|
||||
return p, nil, psession.ProviderTypeOIDC, nil
|
||||
}
|
||||
}
|
||||
case string(v1alpha1.IDPTypeLDAP):
|
||||
for _, p := range idpLister.GetLDAPIdentityProviders() {
|
||||
if p.GetName() == upstreamName {
|
||||
return nil, p, psession.ProviderTypeLDAP, nil
|
||||
}
|
||||
}
|
||||
case string(v1alpha1.IDPTypeActiveDirectory):
|
||||
for _, p := range idpLister.GetActiveDirectoryIdentityProviders() {
|
||||
if p.GetName() == upstreamName {
|
||||
return nil, p, psession.ProviderTypeActiveDirectory, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, nil, "", errors.New("provider not found")
|
||||
}
|
||||
|
||||
// WriteAuthorizeError writes an authorization error as it should be returned by the authorization endpoint and other
|
||||
// similar endpoints that are the end of the downstream authcode flow. Errors responses are written in the usual fosite style.
|
||||
func WriteAuthorizeError(w http.ResponseWriter, oauthHelper fosite.OAuth2Provider, authorizeRequester fosite.AuthorizeRequester, err error, isBrowserless bool) {
|
||||
if plog.Enabled(plog.LevelTrace) {
|
||||
// When trace level logging is enabled, include the stack trace in the log message.
|
||||
keysAndValues := FositeErrorForLog(err)
|
||||
errWithStack := errorsx.WithStack(err)
|
||||
keysAndValues = append(keysAndValues, "errWithStack")
|
||||
// klog always prints error values using %s, which does not include stack traces,
|
||||
// so convert the error to a string which includes the stack trace here.
|
||||
keysAndValues = append(keysAndValues, fmt.Sprintf("%+v", errWithStack))
|
||||
plog.Trace("authorize response error", keysAndValues...)
|
||||
} else {
|
||||
plog.Info("authorize response error", FositeErrorForLog(err)...)
|
||||
}
|
||||
if isBrowserless {
|
||||
w = rewriteStatusSeeOtherToStatusFoundForBrowserless(w)
|
||||
}
|
||||
// Return an error according to OIDC spec 3.1.2.6 (second paragraph).
|
||||
oauthHelper.WriteAuthorizeError(w, authorizeRequester, err)
|
||||
}
|
||||
|
||||
// PerformAuthcodeRedirect successfully completes a downstream login by creating a session and
|
||||
// writing the authcode redirect response as it should be returned by the authorization endpoint and other
|
||||
// similar endpoints that are the end of the downstream authcode flow.
|
||||
func PerformAuthcodeRedirect(
|
||||
r *http.Request,
|
||||
w http.ResponseWriter,
|
||||
oauthHelper fosite.OAuth2Provider,
|
||||
authorizeRequester fosite.AuthorizeRequester,
|
||||
openIDSession *psession.PinnipedSession,
|
||||
isBrowserless bool,
|
||||
) {
|
||||
authorizeResponder, err := oauthHelper.NewAuthorizeResponse(r.Context(), authorizeRequester, openIDSession)
|
||||
if err != nil {
|
||||
plog.WarningErr("error while generating and saving authcode", err)
|
||||
WriteAuthorizeError(w, oauthHelper, authorizeRequester, err, isBrowserless)
|
||||
return
|
||||
}
|
||||
if isBrowserless {
|
||||
w = rewriteStatusSeeOtherToStatusFoundForBrowserless(w)
|
||||
}
|
||||
oauthHelper.WriteAuthorizeResponse(w, authorizeRequester, authorizeResponder)
|
||||
}
|
||||
|
||||
func rewriteStatusSeeOtherToStatusFoundForBrowserless(w http.ResponseWriter) http.ResponseWriter {
|
||||
// rewrite http.StatusSeeOther to http.StatusFound for backwards compatibility with old pinniped CLIs.
|
||||
// we can drop this in a few releases once we feel enough time has passed for users to update.
|
||||
//
|
||||
// WriteAuthorizeResponse/WriteAuthorizeError calls used to result in http.StatusFound until
|
||||
// https://github.com/ory/fosite/pull/636 changed it to http.StatusSeeOther to address
|
||||
// https://tools.ietf.org/id/draft-ietf-oauth-security-topics-18.html#section-4.11
|
||||
// Safari has the bad behavior in the case of http.StatusFound and not just http.StatusTemporaryRedirect.
|
||||
//
|
||||
// in the browserless flows, the OAuth client is the pinniped CLI and it already has access to the user's
|
||||
// password. Thus there is no security issue with using http.StatusFound vs. http.StatusSeeOther.
|
||||
return httpsnoop.Wrap(w, httpsnoop.Hooks{
|
||||
WriteHeader: func(delegate httpsnoop.WriteHeaderFunc) httpsnoop.WriteHeaderFunc {
|
||||
return func(code int) {
|
||||
if code == http.StatusSeeOther {
|
||||
code = http.StatusFound
|
||||
}
|
||||
delegate(code)
|
||||
}
|
||||
},
|
||||
})
|
||||
}
|
||||
|
15
internal/oidc/provider/csp/csp.go
Normal file
15
internal/oidc/provider/csp/csp.go
Normal file
@ -0,0 +1,15 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Package csp defines helpers related to HTML Content Security Policies.
|
||||
package csp
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
)
|
||||
|
||||
func Hash(s string) string {
|
||||
hashBytes := sha256.Sum256([]byte(s))
|
||||
return "sha256-" + base64.StdEncoding.EncodeToString(hashBytes[:])
|
||||
}
|
15
internal/oidc/provider/csp/csp_test.go
Normal file
15
internal/oidc/provider/csp/csp_test.go
Normal file
@ -0,0 +1,15 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package csp
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestHash(t *testing.T) {
|
||||
// Example test vector from https://content-security-policy.com/hash/.
|
||||
require.Equal(t, "sha256-RFWPLDbv2BY+rCkDzsE+0fr8ylGr2R2faWMhq4lfEQc=", Hash("doSomething();"))
|
||||
}
|
@ -6,23 +6,23 @@
|
||||
package formposthtml
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
_ "embed" // Needed to trigger //go:embed directives below.
|
||||
"encoding/base64"
|
||||
"html/template"
|
||||
"strings"
|
||||
|
||||
"github.com/tdewolff/minify/v2/minify"
|
||||
|
||||
"go.pinniped.dev/internal/oidc/provider/csp"
|
||||
)
|
||||
|
||||
var (
|
||||
//go:embed form_post.css
|
||||
rawCSS string
|
||||
minifiedCSS = mustMinify(minify.CSS(rawCSS))
|
||||
minifiedCSS = panicOnError(minify.CSS(rawCSS))
|
||||
|
||||
//go:embed form_post.js
|
||||
rawJS string
|
||||
minifiedJS = mustMinify(minify.JS(rawJS))
|
||||
minifiedJS = panicOnError(minify.JS(rawJS))
|
||||
|
||||
//go:embed form_post.gohtml
|
||||
rawHTMLTemplate string
|
||||
@ -37,28 +37,23 @@ var parsedHTMLTemplate = template.Must(template.New("form_post.gohtml").Funcs(te
|
||||
// Generate the CSP header value once since it's effectively constant.
|
||||
var cspValue = strings.Join([]string{
|
||||
`default-src 'none'`,
|
||||
`script-src '` + cspHash(minifiedJS) + `'`,
|
||||
`style-src '` + cspHash(minifiedCSS) + `'`,
|
||||
`script-src '` + csp.Hash(minifiedJS) + `'`,
|
||||
`style-src '` + csp.Hash(minifiedCSS) + `'`,
|
||||
`img-src data:`,
|
||||
`connect-src *`,
|
||||
`frame-ancestors 'none'`,
|
||||
}, "; ")
|
||||
|
||||
func mustMinify(s string, err error) string {
|
||||
func panicOnError(s string, err error) string {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
func cspHash(s string) string {
|
||||
hashBytes := sha256.Sum256([]byte(s))
|
||||
return "sha256-" + base64.StdEncoding.EncodeToString(hashBytes[:])
|
||||
}
|
||||
|
||||
// ContentSecurityPolicy returns the Content-Security-Policy header value to make the Template() operate correctly.
|
||||
//
|
||||
// See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy/default-src#:~:text=%27%3Chash-algorithm%3E-%3Cbase64-value%3E%27.
|
||||
// See https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Security-Policy.
|
||||
func ContentSecurityPolicy() string { return cspValue }
|
||||
|
||||
// Template returns the html/template.Template for rendering the response_type=form_post response page.
|
||||
|
@ -93,10 +93,6 @@ func TestContentSecurityPolicyHashes(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestHelpers(t *testing.T) {
|
||||
// These are silly tests but it's easy to we might as well have them.
|
||||
require.Equal(t, "test", mustMinify("test", nil))
|
||||
require.PanicsWithError(t, "some error", func() { mustMinify("", fmt.Errorf("some error")) })
|
||||
|
||||
// Example test vector from https://content-security-policy.com/hash/.
|
||||
require.Equal(t, "sha256-RFWPLDbv2BY+rCkDzsE+0fr8ylGr2R2faWMhq4lfEQc=", cspHash("doSomething();"))
|
||||
require.Equal(t, "test", panicOnError("test", nil))
|
||||
require.PanicsWithError(t, "some error", func() { panicOnError("", fmt.Errorf("some error")) })
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package manager
|
||||
@ -18,6 +18,7 @@ import (
|
||||
"go.pinniped.dev/internal/oidc/dynamiccodec"
|
||||
"go.pinniped.dev/internal/oidc/idpdiscovery"
|
||||
"go.pinniped.dev/internal/oidc/jwks"
|
||||
"go.pinniped.dev/internal/oidc/login"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/oidc/token"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
@ -134,6 +135,13 @@ func (m *Manager) SetProviders(federationDomains ...*provider.FederationDomainIs
|
||||
oauthHelperWithKubeStorage,
|
||||
)
|
||||
|
||||
m.providerHandlers[(issuerHostWithPath + oidc.PinnipedLoginPath)] = login.NewHandler(
|
||||
upstreamStateEncoder,
|
||||
csrfCookieEncoder,
|
||||
login.NewGetHandler(incomingProvider.IssuerPath()+oidc.PinnipedLoginPath),
|
||||
login.NewPostHandler(issuer, m.upstreamIDPs, oauthHelperWithKubeStorage),
|
||||
)
|
||||
|
||||
plog.Debug("oidc provider manager added or updated issuer", "issuer", issuer)
|
||||
}
|
||||
}
|
||||
|
107
internal/plog/config.go
Normal file
107
internal/plog/config.go
Normal file
@ -0,0 +1,107 @@
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package plog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"strconv"
|
||||
"time"
|
||||
|
||||
"go.uber.org/zap/zapcore"
|
||||
"k8s.io/apimachinery/pkg/util/wait"
|
||||
"k8s.io/component-base/logs"
|
||||
|
||||
"go.pinniped.dev/internal/constable"
|
||||
)
|
||||
|
||||
type LogFormat string
|
||||
|
||||
func (l *LogFormat) UnmarshalJSON(b []byte) error {
|
||||
switch string(b) {
|
||||
case `""`, `"json"`:
|
||||
*l = FormatJSON
|
||||
case `"text"`:
|
||||
*l = FormatText
|
||||
// there is no "cli" case because it is not a supported option via our config
|
||||
default:
|
||||
return errInvalidLogFormat
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
const (
|
||||
FormatJSON LogFormat = "json"
|
||||
FormatText LogFormat = "text"
|
||||
FormatCLI LogFormat = "cli" // only used by the pinniped CLI and not the server components
|
||||
|
||||
errInvalidLogLevel = constable.Error("invalid log level, valid choices are the empty string, info, debug, trace and all")
|
||||
errInvalidLogFormat = constable.Error("invalid log format, valid choices are the empty string, json and text")
|
||||
)
|
||||
|
||||
var _ json.Unmarshaler = func() *LogFormat {
|
||||
var f LogFormat
|
||||
return &f
|
||||
}()
|
||||
|
||||
type LogSpec struct {
|
||||
Level LogLevel `json:"level,omitempty"`
|
||||
Format LogFormat `json:"format,omitempty"`
|
||||
}
|
||||
|
||||
func MaybeSetDeprecatedLogLevel(level *LogLevel, log *LogSpec) {
|
||||
if level != nil {
|
||||
Warning("logLevel is deprecated, set log.level instead")
|
||||
log.Level = *level
|
||||
}
|
||||
}
|
||||
|
||||
func ValidateAndSetLogLevelAndFormatGlobally(ctx context.Context, spec LogSpec) error {
|
||||
klogLevel := klogLevelForPlogLevel(spec.Level)
|
||||
if klogLevel < 0 {
|
||||
return errInvalidLogLevel
|
||||
}
|
||||
|
||||
// set the global log levels used by our code and the kube code underneath us
|
||||
if _, err := logs.GlogSetter(strconv.Itoa(int(klogLevel))); err != nil {
|
||||
panic(err) // programmer error
|
||||
}
|
||||
globalLevel.SetLevel(zapcore.Level(-klogLevel)) // klog levels are inverted when zap handles them
|
||||
|
||||
var encoding string
|
||||
switch spec.Format {
|
||||
case "", FormatJSON:
|
||||
encoding = "json"
|
||||
case FormatCLI:
|
||||
encoding = "console"
|
||||
case FormatText:
|
||||
encoding = "text"
|
||||
default:
|
||||
return errInvalidLogFormat
|
||||
}
|
||||
|
||||
log, flush, err := newLogr(ctx, encoding, klogLevel)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
setGlobalLoggers(log, flush)
|
||||
|
||||
// nolint: exhaustive // the switch above is exhaustive for format already
|
||||
switch spec.Format {
|
||||
case FormatCLI:
|
||||
return nil // do not spawn go routines on the CLI to allow the CLI to call this more than once
|
||||
case FormatText:
|
||||
Warning("setting log.format to 'text' is deprecated - this option will be removed in a future release")
|
||||
}
|
||||
|
||||
// do spawn go routines on the server
|
||||
go wait.UntilWithContext(ctx, func(_ context.Context) { flush() }, time.Minute)
|
||||
go func() {
|
||||
<-ctx.Done()
|
||||
flush() // best effort flush before shutdown as this is not coordinated with a wait group
|
||||
}()
|
||||
|
||||
return nil
|
||||
}
|
359
internal/plog/config_test.go
Normal file
359
internal/plog/config_test.go
Normal file
@ -0,0 +1,359 @@
|
||||
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package plog
|
||||
|
||||
import (
|
||||
"bufio"
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"runtime"
|
||||
"strconv"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/zap"
|
||||
"k8s.io/component-base/logs"
|
||||
"k8s.io/klog/v2"
|
||||
"k8s.io/klog/v2/textlogger"
|
||||
clocktesting "k8s.io/utils/clock/testing"
|
||||
)
|
||||
|
||||
func TestFormat(t *testing.T) {
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
t.Cleanup(cancel)
|
||||
|
||||
var buf bytes.Buffer
|
||||
|
||||
scanner := bufio.NewScanner(&buf)
|
||||
|
||||
now, err := time.Parse(time.RFC3339Nano, "2022-11-21T23:37:26.953313745Z")
|
||||
require.NoError(t, err)
|
||||
fakeClock := clocktesting.NewFakeClock(now)
|
||||
nowStr := now.Local().Format(time.RFC1123)
|
||||
|
||||
ctx = TestZapOverrides(ctx, t, &buf, nil, zap.WithClock(ZapClock(fakeClock)))
|
||||
|
||||
err = ValidateAndSetLogLevelAndFormatGlobally(ctx, LogSpec{Level: LevelDebug})
|
||||
require.NoError(t, err)
|
||||
|
||||
wd, err := os.Getwd()
|
||||
require.NoError(t, err)
|
||||
|
||||
const startLogLine = 46 // make this match the current line number
|
||||
|
||||
Info("hello", "happy", "day", "duration", time.Hour+time.Minute)
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.JSONEq(t, fmt.Sprintf(`
|
||||
{
|
||||
"level": "info",
|
||||
"timestamp": "2022-11-21T23:37:26.953313Z",
|
||||
"caller": "%s/config_test.go:%d$plog.TestFormat",
|
||||
"message": "hello",
|
||||
"happy": "day",
|
||||
"duration": "1h1m0s"
|
||||
}`, wd, startLogLine+2), scanner.Text())
|
||||
|
||||
Logr().WithName("burrito").Error(errInvalidLogLevel, "wee", "a", "b")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.JSONEq(t, fmt.Sprintf(`
|
||||
{
|
||||
"level": "error",
|
||||
"timestamp": "2022-11-21T23:37:26.953313Z",
|
||||
"caller": "%s/config_test.go:%d$plog.TestFormat",
|
||||
"message": "wee",
|
||||
"a": "b",
|
||||
"error": "invalid log level, valid choices are the empty string, info, debug, trace and all",
|
||||
"logger": "burrito"
|
||||
}`, wd, startLogLine+2+13), scanner.Text())
|
||||
|
||||
Logr().V(klogLevelWarning).Info("hey") // note that this fails to set the custom warning key because it is not via plog
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.JSONEq(t, fmt.Sprintf(`
|
||||
{
|
||||
"level": "info",
|
||||
"timestamp": "2022-11-21T23:37:26.953313Z",
|
||||
"caller": "%s/config_test.go:%d$plog.TestFormat",
|
||||
"message": "hey"
|
||||
}`, wd, startLogLine+2+13+14), scanner.Text())
|
||||
|
||||
Warning("bad stuff") // note that this sets the custom warning key because it is via plog
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.JSONEq(t, fmt.Sprintf(`
|
||||
{
|
||||
"level": "info",
|
||||
"timestamp": "2022-11-21T23:37:26.953313Z",
|
||||
"caller": "%s/config_test.go:%d$plog.TestFormat",
|
||||
"message": "bad stuff",
|
||||
"warning": true
|
||||
}`, wd, startLogLine+2+13+14+11), scanner.Text())
|
||||
|
||||
func() { DebugErr("something happened", errInvalidLogFormat, "an", "item") }()
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.JSONEq(t, fmt.Sprintf(`
|
||||
{
|
||||
"level": "debug",
|
||||
"timestamp": "2022-11-21T23:37:26.953313Z",
|
||||
"caller": "%s/config_test.go:%d$plog.TestFormat.func1",
|
||||
"message": "something happened",
|
||||
"error": "invalid log format, valid choices are the empty string, json and text",
|
||||
"an": "item"
|
||||
}`, wd, startLogLine+2+13+14+11+12), scanner.Text())
|
||||
|
||||
Trace("should not be logged", "for", "sure")
|
||||
require.Empty(t, buf.String())
|
||||
|
||||
Logr().V(klogLevelAll).Info("also should not be logged", "open", "close")
|
||||
require.Empty(t, buf.String())
|
||||
|
||||
ctx = TestZapOverrides(ctx, t, &buf, nil, zap.WithClock(ZapClock(fakeClock)), zap.AddStacktrace(LevelInfo))
|
||||
|
||||
err = ValidateAndSetLogLevelAndFormatGlobally(ctx, LogSpec{Level: LevelDebug})
|
||||
require.NoError(t, err)
|
||||
|
||||
WithName("stacky").WithName("does").Info("has a stack trace!")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.JSONEq(t, fmt.Sprintf(`
|
||||
{
|
||||
"level": "info",
|
||||
"timestamp": "2022-11-21T23:37:26.953313Z",
|
||||
"caller": "%s/config_test.go:%d$plog.TestFormat",
|
||||
"message": "has a stack trace!",
|
||||
"logger": "stacky.does",
|
||||
"stacktrace": %s
|
||||
}`, wd, startLogLine+2+13+14+11+12+24,
|
||||
strconv.Quote(
|
||||
fmt.Sprintf(
|
||||
`go.pinniped.dev/internal/plog.TestFormat
|
||||
%s/config_test.go:%d
|
||||
testing.tRunner
|
||||
%s/src/testing/testing.go:1439`,
|
||||
wd, startLogLine+2+13+14+11+12+24, runtime.GOROOT(),
|
||||
),
|
||||
),
|
||||
), scanner.Text())
|
||||
|
||||
ctx = TestZapOverrides(ctx, t, &buf, nil, zap.WithClock(ZapClock(fakeClock)))
|
||||
|
||||
err = ValidateAndSetLogLevelAndFormatGlobally(ctx, LogSpec{Level: LevelDebug, Format: FormatCLI})
|
||||
require.NoError(t, err)
|
||||
|
||||
DebugErr("something happened", errInvalidLogFormat, "an", "item")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(nowStr+` plog/config_test.go:%d something happened {"error": "invalid log format, valid choices are the empty string, json and text", "an": "item"}`,
|
||||
startLogLine+2+13+14+11+12+24+28), scanner.Text())
|
||||
|
||||
Logr().WithName("burrito").Error(errInvalidLogLevel, "wee", "a", "b", "slightly less than a year", 363*24*time.Hour, "slightly more than 2 years", 2*367*24*time.Hour)
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(nowStr+` burrito plog/config_test.go:%d wee {"a": "b", "slightly less than a year": "363d", "slightly more than 2 years": "2y4d", "error": "invalid log level, valid choices are the empty string, info, debug, trace and all"}`,
|
||||
startLogLine+2+13+14+11+12+24+28+6), scanner.Text())
|
||||
|
||||
origTimeNow := textlogger.TimeNow
|
||||
t.Cleanup(func() {
|
||||
textlogger.TimeNow = origTimeNow
|
||||
})
|
||||
textlogger.TimeNow = func() time.Time {
|
||||
return now
|
||||
}
|
||||
|
||||
old := New().WithName("created before mode change").WithValues("is", "old")
|
||||
|
||||
err = ValidateAndSetLogLevelAndFormatGlobally(ctx, LogSpec{Level: LevelDebug, Format: FormatText})
|
||||
require.NoError(t, err)
|
||||
pid := os.Getpid()
|
||||
|
||||
// check for the deprecation warning
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config.go:96] "setting log.format to 'text' is deprecated - this option will be removed in a future release" warning=true`,
|
||||
pid), scanner.Text())
|
||||
|
||||
Debug("what is happening", "does klog", "work?")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config_test.go:%d] "what is happening" does klog="work?"`,
|
||||
pid, startLogLine+2+13+14+11+12+24+28+6+26), scanner.Text())
|
||||
|
||||
Logr().WithName("panda").V(KlogLevelDebug).Info("are the best", "yes?", "yes.")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config_test.go:%d] "panda: are the best" yes?="yes."`,
|
||||
pid, startLogLine+2+13+14+11+12+24+28+6+26+6), scanner.Text())
|
||||
|
||||
New().WithName("hi").WithName("there").WithValues("a", 1, "b", 2).Always("do it")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config_test.go:%d] "hi/there: do it" a=1 b=2`,
|
||||
pid, startLogLine+2+13+14+11+12+24+28+6+26+6+6), scanner.Text())
|
||||
|
||||
l := WithValues("x", 33, "z", 22)
|
||||
l.Debug("what to do")
|
||||
l.Debug("and why")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config_test.go:%d] "what to do" x=33 z=22`,
|
||||
pid, startLogLine+2+13+14+11+12+24+28+6+26+6+6+7), scanner.Text())
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config_test.go:%d] "and why" x=33 z=22`,
|
||||
pid, startLogLine+2+13+14+11+12+24+28+6+26+6+6+7+1), scanner.Text())
|
||||
|
||||
old.Always("should be klog text format", "for", "sure")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config_test.go:%d] "created before mode change: should be klog text format" is="old" for="sure"`,
|
||||
pid, startLogLine+2+13+14+11+12+24+28+6+26+6+6+7+1+10), scanner.Text())
|
||||
|
||||
// make sure child loggers do not share state
|
||||
old1 := old.WithValues("i am", "old1")
|
||||
old2 := old.WithName("old2")
|
||||
old1.Warning("warn")
|
||||
old2.Info("info")
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config_test.go:%d] "created before mode change: warn" is="old" i am="old1" warning=true`,
|
||||
pid, startLogLine+2+13+14+11+12+24+28+6+26+6+6+7+1+10+9), scanner.Text())
|
||||
require.True(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Equal(t, fmt.Sprintf(`I1121 23:37:26.953313%8d config_test.go:%d] "created before mode change/old2: info" is="old"`,
|
||||
pid, startLogLine+2+13+14+11+12+24+28+6+26+6+6+7+1+10+9+1), scanner.Text())
|
||||
|
||||
Trace("should not be logged", "for", "sure")
|
||||
require.Empty(t, buf.String())
|
||||
|
||||
Logr().V(klogLevelAll).Info("also should not be logged", "open", "close")
|
||||
require.Empty(t, buf.String())
|
||||
|
||||
require.False(t, scanner.Scan())
|
||||
require.NoError(t, scanner.Err())
|
||||
require.Empty(t, scanner.Text())
|
||||
require.Empty(t, buf.String())
|
||||
}
|
||||
|
||||
func TestValidateAndSetLogLevelGlobally(t *testing.T) {
|
||||
originalLogLevel := getKlogLevel()
|
||||
require.GreaterOrEqual(t, int(originalLogLevel), int(klog.Level(0)), "cannot get klog level")
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
level LogLevel
|
||||
wantLevel klog.Level
|
||||
wantEnabled []LogLevel
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "unset",
|
||||
wantLevel: 0,
|
||||
wantEnabled: []LogLevel{LevelWarning},
|
||||
},
|
||||
{
|
||||
name: "warning",
|
||||
level: LevelWarning,
|
||||
wantLevel: 0,
|
||||
wantEnabled: []LogLevel{LevelWarning},
|
||||
},
|
||||
{
|
||||
name: "info",
|
||||
level: LevelInfo,
|
||||
wantLevel: 2,
|
||||
wantEnabled: []LogLevel{LevelWarning, LevelInfo},
|
||||
},
|
||||
{
|
||||
name: "debug",
|
||||
level: LevelDebug,
|
||||
wantLevel: 4,
|
||||
wantEnabled: []LogLevel{LevelWarning, LevelInfo, LevelDebug},
|
||||
},
|
||||
{
|
||||
name: "trace",
|
||||
level: LevelTrace,
|
||||
wantLevel: 6,
|
||||
wantEnabled: []LogLevel{LevelWarning, LevelInfo, LevelDebug, LevelTrace},
|
||||
},
|
||||
{
|
||||
name: "all",
|
||||
level: LevelAll,
|
||||
wantLevel: 108,
|
||||
wantEnabled: []LogLevel{LevelWarning, LevelInfo, LevelDebug, LevelTrace, LevelAll},
|
||||
},
|
||||
{
|
||||
name: "invalid level",
|
||||
level: "panda",
|
||||
wantLevel: originalLogLevel,
|
||||
wantErr: errInvalidLogLevel.Error(),
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt // capture range variable
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
defer func() {
|
||||
undoGlobalLogLevelChanges(t, originalLogLevel)
|
||||
}()
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
t.Cleanup(cancel)
|
||||
err := ValidateAndSetLogLevelAndFormatGlobally(ctx, LogSpec{Level: tt.level})
|
||||
require.Equal(t, tt.wantErr, errString(err))
|
||||
require.Equal(t, tt.wantLevel, getKlogLevel())
|
||||
|
||||
if tt.wantEnabled != nil {
|
||||
allLevels := []LogLevel{LevelWarning, LevelInfo, LevelDebug, LevelTrace, LevelAll}
|
||||
for _, level := range allLevels {
|
||||
if contains(tt.wantEnabled, level) {
|
||||
require.Truef(t, Enabled(level), "wanted %q to be enabled", level)
|
||||
} else {
|
||||
require.False(t, Enabled(level), "did not want %q to be enabled", level)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
require.Equal(t, originalLogLevel, getKlogLevel())
|
||||
}
|
||||
|
||||
func contains(haystack []LogLevel, needle LogLevel) bool {
|
||||
for _, hay := range haystack {
|
||||
if hay == needle {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func errString(err error) string {
|
||||
if err == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
return err.Error()
|
||||
}
|
||||
|
||||
func undoGlobalLogLevelChanges(t *testing.T, originalLogLevel klog.Level) {
|
||||
t.Helper()
|
||||
_, err := logs.GlogSetter(strconv.Itoa(int(originalLogLevel)))
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func getKlogLevel() klog.Level {
|
||||
// hack around klog not exposing a Get method
|
||||
for i := klog.Level(0); i < 256; i++ {
|
||||
if klog.V(i).Enabled() {
|
||||
continue
|
||||
}
|
||||
return i - 1
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
77
internal/plog/global.go
Normal file
77
internal/plog/global.go
Normal file
@ -0,0 +1,77 @@
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package plog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/url"
|
||||
"sync"
|
||||
|
||||
"github.com/go-logr/logr"
|
||||
"go.uber.org/zap"
|
||||
"k8s.io/component-base/logs"
|
||||
"k8s.io/klog/v2"
|
||||
)
|
||||
|
||||
// nolint: gochecknoglobals
|
||||
var (
|
||||
// note that these globals have no locks on purpose - they are expected to be set at init and then again after config parsing.
|
||||
globalLevel zap.AtomicLevel
|
||||
globalLogger logr.Logger
|
||||
globalFlush func()
|
||||
|
||||
// used as a temporary storage for a buffer per call of newLogr. see the init function below for more details.
|
||||
sinkMap sync.Map
|
||||
)
|
||||
|
||||
// nolint: gochecknoinits
|
||||
func init() {
|
||||
// make sure we always have a functional global logger
|
||||
globalLevel = zap.NewAtomicLevelAt(0) // log at the 0 verbosity level to start with, i.e. the "always" logs
|
||||
// use json encoding to start with
|
||||
// the context here is just used for test injection and thus can be ignored
|
||||
log, flush, err := newLogr(context.Background(), "json", 0)
|
||||
if err != nil {
|
||||
panic(err) // default logging config must always work
|
||||
}
|
||||
setGlobalLoggers(log, flush)
|
||||
|
||||
// this is a little crazy but zap's builder code does not allow us to directly specify what
|
||||
// writer we want to use as our log sink. to get around this limitation in tests, we use a
|
||||
// global map to temporarily hold the writer (the key is a random string that is generated
|
||||
// per invocation of newLogr). we register a fake "pinniped" scheme so that we can lookup
|
||||
// the writer via pinniped:///<per newLogr invocation random string>.
|
||||
if err := zap.RegisterSink("pinniped", func(u *url.URL) (zap.Sink, error) {
|
||||
value, ok := sinkMap.Load(u.Path)
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("key %q not in global sink", u.Path)
|
||||
}
|
||||
return value.(zap.Sink), nil
|
||||
}); err != nil {
|
||||
panic(err) // custom sink must always work
|
||||
}
|
||||
}
|
||||
|
||||
// Deprecated: Use New instead. This is meant for old code only.
|
||||
// New provides a more ergonomic API and correctly responds to global log config change.
|
||||
func Logr() logr.Logger {
|
||||
return globalLogger
|
||||
}
|
||||
|
||||
func Setup() func() {
|
||||
logs.InitLogs()
|
||||
return func() {
|
||||
logs.FlushLogs()
|
||||
globalFlush()
|
||||
}
|
||||
}
|
||||
|
||||
// setGlobalLoggers sets the plog and klog global loggers. it is *not* go routine safe.
|
||||
func setGlobalLoggers(log logr.Logger, flush func()) {
|
||||
// a contextual logger does its own level based enablement checks, which is true for all of our loggers
|
||||
klog.SetLoggerWithOptions(log, klog.ContextualLogger(true), klog.FlushLogger(flush))
|
||||
globalLogger = log
|
||||
globalFlush = flush
|
||||
}
|
@ -1,32 +0,0 @@
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package plog
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"k8s.io/klog/v2"
|
||||
)
|
||||
|
||||
// KObj is (mostly) copied from klog - it is a standard way to represent a metav1.Object in logs.
|
||||
func KObj(obj klog.KMetadata) string {
|
||||
return fmt.Sprintf("%s/%s", obj.GetNamespace(), obj.GetName())
|
||||
}
|
||||
|
||||
func klogLevelForPlogLevel(plogLevel LogLevel) klog.Level {
|
||||
switch plogLevel {
|
||||
case LevelWarning:
|
||||
return klogLevelWarning // unset means minimal logs (Error and Warning)
|
||||
case LevelInfo:
|
||||
return klogLevelInfo
|
||||
case LevelDebug:
|
||||
return klogLevelDebug
|
||||
case LevelTrace:
|
||||
return klogLevelTrace
|
||||
case LevelAll:
|
||||
return klogLevelAll + 100 // make all really mean all
|
||||
default:
|
||||
return -1
|
||||
}
|
||||
}
|
@ -4,18 +4,18 @@
|
||||
package plog
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
|
||||
"k8s.io/component-base/logs"
|
||||
"go.uber.org/zap/zapcore"
|
||||
"k8s.io/klog/v2"
|
||||
|
||||
"go.pinniped.dev/internal/constable"
|
||||
)
|
||||
|
||||
// LogLevel is an enum that controls verbosity of logs.
|
||||
// Valid values in order of increasing verbosity are leaving it unset, info, debug, trace and all.
|
||||
type LogLevel string
|
||||
|
||||
func (l LogLevel) Enabled(_ zapcore.Level) bool {
|
||||
return Enabled(l) // this basically says "log if the global plog level is l or greater"
|
||||
}
|
||||
|
||||
const (
|
||||
// LevelWarning (i.e. leaving the log level unset) maps to klog log level 0.
|
||||
LevelWarning LogLevel = ""
|
||||
@ -27,33 +27,40 @@ const (
|
||||
LevelTrace LogLevel = "trace"
|
||||
// LevelAll maps to klog log level 100 (conceptually it is log level 8).
|
||||
LevelAll LogLevel = "all"
|
||||
|
||||
errInvalidLogLevel = constable.Error("invalid log level, valid choices are the empty string, info, debug, trace and all")
|
||||
)
|
||||
|
||||
var _ zapcore.LevelEnabler = LevelWarning
|
||||
|
||||
const (
|
||||
klogLevelWarning = iota * 2
|
||||
klogLevelInfo
|
||||
klogLevelDebug
|
||||
klogLevelTrace
|
||||
KlogLevelInfo
|
||||
KlogLevelDebug
|
||||
KlogLevelTrace
|
||||
klogLevelAll
|
||||
)
|
||||
|
||||
func ValidateAndSetLogLevelGlobally(level LogLevel) error {
|
||||
klogLevel := klogLevelForPlogLevel(level)
|
||||
if klogLevel < 0 {
|
||||
return errInvalidLogLevel
|
||||
}
|
||||
|
||||
if _, err := logs.GlogSetter(strconv.Itoa(int(klogLevel))); err != nil {
|
||||
panic(err) // programmer error
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// Enabled returns whether the provided plog level is enabled, i.e., whether print statements at the
|
||||
// provided level will show up.
|
||||
func Enabled(level LogLevel) bool {
|
||||
return klog.V(klogLevelForPlogLevel(level)).Enabled()
|
||||
l := klogLevelForPlogLevel(level)
|
||||
// check that both our global level and the klog global level agree that the plog level is enabled
|
||||
// klog levels are inverted when zap handles them
|
||||
return globalLevel.Enabled(zapcore.Level(-l)) && klog.V(l).Enabled()
|
||||
}
|
||||
|
||||
func klogLevelForPlogLevel(plogLevel LogLevel) klog.Level {
|
||||
switch plogLevel {
|
||||
case LevelWarning:
|
||||
return klogLevelWarning // unset means minimal logs (Error and Warning)
|
||||
case LevelInfo:
|
||||
return KlogLevelInfo
|
||||
case LevelDebug:
|
||||
return KlogLevelDebug
|
||||
case LevelTrace:
|
||||
return KlogLevelTrace
|
||||
case LevelAll:
|
||||
return klogLevelAll + 100 // make all really mean all
|
||||
default:
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
@ -1,128 +0,0 @@
|
||||
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package plog
|
||||
|
||||
import (
|
||||
"strconv"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
"k8s.io/component-base/logs"
|
||||
"k8s.io/klog/v2"
|
||||
)
|
||||
|
||||
func TestValidateAndSetLogLevelGlobally(t *testing.T) {
|
||||
originalLogLevel := getKlogLevel()
|
||||
require.GreaterOrEqual(t, int(originalLogLevel), int(klog.Level(0)), "cannot get klog level")
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
level LogLevel
|
||||
wantLevel klog.Level
|
||||
wantEnabled []LogLevel
|
||||
wantErr string
|
||||
}{
|
||||
{
|
||||
name: "unset",
|
||||
wantLevel: 0,
|
||||
wantEnabled: []LogLevel{LevelWarning},
|
||||
},
|
||||
{
|
||||
name: "warning",
|
||||
level: LevelWarning,
|
||||
wantLevel: 0,
|
||||
wantEnabled: []LogLevel{LevelWarning},
|
||||
},
|
||||
{
|
||||
name: "info",
|
||||
level: LevelInfo,
|
||||
wantLevel: 2,
|
||||
wantEnabled: []LogLevel{LevelWarning, LevelInfo},
|
||||
},
|
||||
{
|
||||
name: "debug",
|
||||
level: LevelDebug,
|
||||
wantLevel: 4,
|
||||
wantEnabled: []LogLevel{LevelWarning, LevelInfo, LevelDebug},
|
||||
},
|
||||
{
|
||||
name: "trace",
|
||||
level: LevelTrace,
|
||||
wantLevel: 6,
|
||||
wantEnabled: []LogLevel{LevelWarning, LevelInfo, LevelDebug, LevelTrace},
|
||||
},
|
||||
{
|
||||
name: "all",
|
||||
level: LevelAll,
|
||||
wantLevel: 108,
|
||||
wantEnabled: []LogLevel{LevelWarning, LevelInfo, LevelDebug, LevelTrace, LevelAll},
|
||||
},
|
||||
{
|
||||
name: "invalid level",
|
||||
level: "panda",
|
||||
wantLevel: originalLogLevel,
|
||||
wantErr: errInvalidLogLevel.Error(),
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt // capture range variable
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
defer func() {
|
||||
undoGlobalLogLevelChanges(t, originalLogLevel)
|
||||
}()
|
||||
|
||||
err := ValidateAndSetLogLevelGlobally(tt.level)
|
||||
require.Equal(t, tt.wantErr, errString(err))
|
||||
require.Equal(t, tt.wantLevel, getKlogLevel())
|
||||
|
||||
if tt.wantEnabled != nil {
|
||||
allLevels := []LogLevel{LevelWarning, LevelInfo, LevelDebug, LevelTrace, LevelAll}
|
||||
for _, level := range allLevels {
|
||||
if contains(tt.wantEnabled, level) {
|
||||
require.Truef(t, Enabled(level), "wanted %q to be enabled", level)
|
||||
} else {
|
||||
require.False(t, Enabled(level), "did not want %q to be enabled", level)
|
||||
}
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
require.Equal(t, originalLogLevel, getKlogLevel())
|
||||
}
|
||||
|
||||
func contains(haystack []LogLevel, needle LogLevel) bool {
|
||||
for _, hay := range haystack {
|
||||
if hay == needle {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func errString(err error) string {
|
||||
if err == nil {
|
||||
return ""
|
||||
}
|
||||
|
||||
return err.Error()
|
||||
}
|
||||
|
||||
func undoGlobalLogLevelChanges(t *testing.T, originalLogLevel klog.Level) {
|
||||
t.Helper()
|
||||
_, err := logs.GlogSetter(strconv.Itoa(int(originalLogLevel)))
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func getKlogLevel() klog.Level {
|
||||
// hack around klog not exposing a Get method
|
||||
for i := klog.Level(0); i < 256; i++ {
|
||||
if klog.V(i).Enabled() {
|
||||
continue
|
||||
}
|
||||
return i - 1
|
||||
}
|
||||
|
||||
return -1
|
||||
}
|
@ -1,7 +1,7 @@
|
||||
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
// Package plog implements a thin layer over klog to help enforce pinniped's logging convention.
|
||||
// Package plog implements a thin layer over logr to help enforce pinniped's logging convention.
|
||||
// Logs are always structured as a constant message with key and value pairs of related metadata.
|
||||
//
|
||||
// The logging levels in order of increasing verbosity are:
|
||||
@ -26,10 +26,18 @@
|
||||
// act of desperation to determine why the system is broken.
|
||||
package plog
|
||||
|
||||
import "k8s.io/klog/v2"
|
||||
import (
|
||||
"os"
|
||||
|
||||
const errorKey = "error"
|
||||
"github.com/go-logr/logr"
|
||||
)
|
||||
|
||||
const errorKey = "error" // this matches zapr's default for .Error calls (which is asserted via tests)
|
||||
|
||||
// Logger implements the plog logging convention described above. The global functions in this package
|
||||
// such as Info should be used when one does not intend to write tests assertions for specific log messages.
|
||||
// If test assertions are desired, Logger should be passed in as an input. New should be used as the
|
||||
// production implementation and TestLogger should be used to write test assertions.
|
||||
type Logger interface {
|
||||
Error(msg string, err error, keysAndValues ...interface{})
|
||||
Warning(msg string, keysAndValues ...interface{})
|
||||
@ -41,100 +49,155 @@ type Logger interface {
|
||||
Trace(msg string, keysAndValues ...interface{})
|
||||
TraceErr(msg string, err error, keysAndValues ...interface{})
|
||||
All(msg string, keysAndValues ...interface{})
|
||||
Always(msg string, keysAndValues ...interface{})
|
||||
WithValues(keysAndValues ...interface{}) Logger
|
||||
WithName(name string) Logger
|
||||
|
||||
// does not include Fatal on purpose because that is not a method you should be using
|
||||
|
||||
// for internal and test use only
|
||||
withDepth(d int) Logger
|
||||
withLogrMod(mod func(logr.Logger) logr.Logger) Logger
|
||||
}
|
||||
|
||||
// MinLogger is the overlap between Logger and logr.Logger.
|
||||
type MinLogger interface {
|
||||
Info(msg string, keysAndValues ...interface{})
|
||||
}
|
||||
|
||||
var _ Logger = pLogger{}
|
||||
var _, _, _ MinLogger = pLogger{}, logr.Logger{}, Logger(nil)
|
||||
|
||||
type pLogger struct {
|
||||
prefix string
|
||||
mods []func(logr.Logger) logr.Logger
|
||||
depth int
|
||||
}
|
||||
|
||||
func New(prefix string) Logger {
|
||||
return &pLogger{
|
||||
depth: 0,
|
||||
prefix: prefix,
|
||||
}
|
||||
func New() Logger {
|
||||
return pLogger{}
|
||||
}
|
||||
|
||||
func (p *pLogger) Error(msg string, err error, keysAndValues ...interface{}) {
|
||||
klog.ErrorSDepth(p.depth+1, err, p.prefix+msg, keysAndValues...)
|
||||
func (p pLogger) Error(msg string, err error, keysAndValues ...interface{}) {
|
||||
p.logr().WithCallDepth(p.depth+1).Error(err, msg, keysAndValues...)
|
||||
}
|
||||
|
||||
func (p *pLogger) warningDepth(msg string, depth int, keysAndValues ...interface{}) {
|
||||
func (p pLogger) warningDepth(msg string, depth int, keysAndValues ...interface{}) {
|
||||
if p.logr().V(klogLevelWarning).Enabled() {
|
||||
// klog's structured logging has no concept of a warning (i.e. no WarningS function)
|
||||
// Thus we use info at log level zero as a proxy
|
||||
// klog's info logs have an I prefix and its warning logs have a W prefix
|
||||
// Since we lose the W prefix by using InfoS, just add a key to make these easier to find
|
||||
keysAndValues = append([]interface{}{"warning", "true"}, keysAndValues...)
|
||||
if klog.V(klogLevelWarning).Enabled() {
|
||||
klog.InfoSDepth(depth+1, p.prefix+msg, keysAndValues...)
|
||||
keysAndValues = append([]interface{}{"warning", true}, keysAndValues...)
|
||||
p.logr().V(klogLevelWarning).WithCallDepth(depth+1).Info(msg, keysAndValues...)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *pLogger) Warning(msg string, keysAndValues ...interface{}) {
|
||||
func (p pLogger) Warning(msg string, keysAndValues ...interface{}) {
|
||||
p.warningDepth(msg, p.depth+1, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use WarningErr to issue a Warning message with an error object as part of the message.
|
||||
func (p *pLogger) WarningErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
func (p pLogger) WarningErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
p.warningDepth(msg, p.depth+1, append([]interface{}{errorKey, err}, keysAndValues...)...)
|
||||
}
|
||||
|
||||
func (p *pLogger) infoDepth(msg string, depth int, keysAndValues ...interface{}) {
|
||||
if klog.V(klogLevelInfo).Enabled() {
|
||||
klog.InfoSDepth(depth+1, p.prefix+msg, keysAndValues...)
|
||||
func (p pLogger) infoDepth(msg string, depth int, keysAndValues ...interface{}) {
|
||||
if p.logr().V(KlogLevelInfo).Enabled() {
|
||||
p.logr().V(KlogLevelInfo).WithCallDepth(depth+1).Info(msg, keysAndValues...)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *pLogger) Info(msg string, keysAndValues ...interface{}) {
|
||||
func (p pLogger) Info(msg string, keysAndValues ...interface{}) {
|
||||
p.infoDepth(msg, p.depth+1, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use InfoErr to log an expected error, e.g. validation failure of an http parameter.
|
||||
func (p *pLogger) InfoErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
func (p pLogger) InfoErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
p.infoDepth(msg, p.depth+1, append([]interface{}{errorKey, err}, keysAndValues...)...)
|
||||
}
|
||||
|
||||
func (p *pLogger) debugDepth(msg string, depth int, keysAndValues ...interface{}) {
|
||||
if klog.V(klogLevelDebug).Enabled() {
|
||||
klog.InfoSDepth(depth+1, p.prefix+msg, keysAndValues...)
|
||||
func (p pLogger) debugDepth(msg string, depth int, keysAndValues ...interface{}) {
|
||||
if p.logr().V(KlogLevelDebug).Enabled() {
|
||||
p.logr().V(KlogLevelDebug).WithCallDepth(depth+1).Info(msg, keysAndValues...)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *pLogger) Debug(msg string, keysAndValues ...interface{}) {
|
||||
func (p pLogger) Debug(msg string, keysAndValues ...interface{}) {
|
||||
p.debugDepth(msg, p.depth+1, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use DebugErr to issue a Debug message with an error object as part of the message.
|
||||
func (p *pLogger) DebugErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
func (p pLogger) DebugErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
p.debugDepth(msg, p.depth+1, append([]interface{}{errorKey, err}, keysAndValues...)...)
|
||||
}
|
||||
|
||||
func (p *pLogger) traceDepth(msg string, depth int, keysAndValues ...interface{}) {
|
||||
if klog.V(klogLevelTrace).Enabled() {
|
||||
klog.InfoSDepth(depth+1, p.prefix+msg, keysAndValues...)
|
||||
func (p pLogger) traceDepth(msg string, depth int, keysAndValues ...interface{}) {
|
||||
if p.logr().V(KlogLevelTrace).Enabled() {
|
||||
p.logr().V(KlogLevelTrace).WithCallDepth(depth+1).Info(msg, keysAndValues...)
|
||||
}
|
||||
}
|
||||
|
||||
func (p *pLogger) Trace(msg string, keysAndValues ...interface{}) {
|
||||
func (p pLogger) Trace(msg string, keysAndValues ...interface{}) {
|
||||
p.traceDepth(msg, p.depth+1, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use TraceErr to issue a Trace message with an error object as part of the message.
|
||||
func (p *pLogger) TraceErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
func (p pLogger) TraceErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
p.traceDepth(msg, p.depth+1, append([]interface{}{errorKey, err}, keysAndValues...)...)
|
||||
}
|
||||
|
||||
func (p *pLogger) All(msg string, keysAndValues ...interface{}) {
|
||||
if klog.V(klogLevelAll).Enabled() {
|
||||
klog.InfoSDepth(p.depth+1, p.prefix+msg, keysAndValues...)
|
||||
func (p pLogger) All(msg string, keysAndValues ...interface{}) {
|
||||
if p.logr().V(klogLevelAll).Enabled() {
|
||||
p.logr().V(klogLevelAll).WithCallDepth(p.depth+1).Info(msg, keysAndValues...)
|
||||
}
|
||||
}
|
||||
|
||||
var logger Logger = &pLogger{ //nolint:gochecknoglobals
|
||||
depth: 1,
|
||||
func (p pLogger) Always(msg string, keysAndValues ...interface{}) {
|
||||
p.logr().WithCallDepth(p.depth+1).Info(msg, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use Error to log an unexpected system error.
|
||||
func (p pLogger) WithValues(keysAndValues ...interface{}) Logger {
|
||||
if len(keysAndValues) == 0 {
|
||||
return p
|
||||
}
|
||||
|
||||
return p.withLogrMod(func(l logr.Logger) logr.Logger {
|
||||
return l.WithValues(keysAndValues...)
|
||||
})
|
||||
}
|
||||
|
||||
func (p pLogger) WithName(name string) Logger {
|
||||
if len(name) == 0 {
|
||||
return p
|
||||
}
|
||||
|
||||
return p.withLogrMod(func(l logr.Logger) logr.Logger {
|
||||
return l.WithName(name)
|
||||
})
|
||||
}
|
||||
|
||||
func (p pLogger) withDepth(d int) Logger {
|
||||
out := p
|
||||
out.depth += d // out is a copy so this does not mutate p
|
||||
return out
|
||||
}
|
||||
|
||||
func (p pLogger) withLogrMod(mod func(logr.Logger) logr.Logger) Logger {
|
||||
out := p // make a copy and carefully avoid mutating the mods slice
|
||||
mods := make([]func(logr.Logger) logr.Logger, 0, len(out.mods)+1)
|
||||
mods = append(mods, out.mods...)
|
||||
mods = append(mods, mod)
|
||||
out.mods = mods
|
||||
return out
|
||||
}
|
||||
|
||||
func (p pLogger) logr() logr.Logger {
|
||||
l := Logr() // grab the current global logger and its current config
|
||||
for _, mod := range p.mods {
|
||||
mod := mod
|
||||
l = mod(l) // and then update it with all modifications
|
||||
}
|
||||
return l // this logger is guaranteed to have the latest config and all modifications
|
||||
}
|
||||
|
||||
var logger = New().withDepth(1) //nolint:gochecknoglobals
|
||||
|
||||
func Error(msg string, err error, keysAndValues ...interface{}) {
|
||||
logger.Error(msg, err, keysAndValues...)
|
||||
}
|
||||
@ -143,7 +206,6 @@ func Warning(msg string, keysAndValues ...interface{}) {
|
||||
logger.Warning(msg, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use WarningErr to issue a Warning message with an error object as part of the message.
|
||||
func WarningErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
logger.WarningErr(msg, err, keysAndValues...)
|
||||
}
|
||||
@ -152,7 +214,6 @@ func Info(msg string, keysAndValues ...interface{}) {
|
||||
logger.Info(msg, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use InfoErr to log an expected error, e.g. validation failure of an http parameter.
|
||||
func InfoErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
logger.InfoErr(msg, err, keysAndValues...)
|
||||
}
|
||||
@ -161,7 +222,6 @@ func Debug(msg string, keysAndValues ...interface{}) {
|
||||
logger.Debug(msg, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use DebugErr to issue a Debug message with an error object as part of the message.
|
||||
func DebugErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
logger.DebugErr(msg, err, keysAndValues...)
|
||||
}
|
||||
@ -170,7 +230,6 @@ func Trace(msg string, keysAndValues ...interface{}) {
|
||||
logger.Trace(msg, keysAndValues...)
|
||||
}
|
||||
|
||||
// Use TraceErr to issue a Trace message with an error object as part of the message.
|
||||
func TraceErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
logger.TraceErr(msg, err, keysAndValues...)
|
||||
}
|
||||
@ -178,3 +237,23 @@ func TraceErr(msg string, err error, keysAndValues ...interface{}) {
|
||||
func All(msg string, keysAndValues ...interface{}) {
|
||||
logger.All(msg, keysAndValues...)
|
||||
}
|
||||
|
||||
func Always(msg string, keysAndValues ...interface{}) {
|
||||
logger.Always(msg, keysAndValues...)
|
||||
}
|
||||
|
||||
func WithValues(keysAndValues ...interface{}) Logger {
|
||||
// this looks weird but it is the same as New().WithValues(keysAndValues...) because it returns a new logger rooted at the call site
|
||||
return logger.withDepth(-1).WithValues(keysAndValues...)
|
||||
}
|
||||
|
||||
func WithName(name string) Logger {
|
||||
// this looks weird but it is the same as New().WithName(name) because it returns a new logger rooted at the call site
|
||||
return logger.withDepth(-1).WithName(name)
|
||||
}
|
||||
|
||||
func Fatal(err error, keysAndValues ...interface{}) {
|
||||
logger.Error("unrecoverable error encountered", err, keysAndValues...)
|
||||
globalFlush()
|
||||
os.Exit(1)
|
||||
}
|
||||
|
366
internal/plog/plog_test.go
Normal file
366
internal/plog/plog_test.go
Normal file
@ -0,0 +1,366 @@
|
||||
// Copyright 2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package plog
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestPlog(t *testing.T) {
|
||||
tests := []struct {
|
||||
name string
|
||||
run func(Logger)
|
||||
want string
|
||||
}{
|
||||
{
|
||||
name: "basic",
|
||||
run: testAllPlogMethods,
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "with values",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.WithValues("hi", 42))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"e","hi":42,"panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"w","hi":42,"warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"we","hi":42,"warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"i","hi":42,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"ie","hi":42,"error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"d","hi":42,"panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"de","hi":42,"error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"t","hi":42,"panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"te","hi":42,"error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"all","hi":42,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"always","hi":42,"panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "with values conflict", // duplicate key is included twice ...
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.WithValues("panda", false))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"e","panda":false,"panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"w","panda":false,"warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"we","panda":false,"warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"i","panda":false,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"ie","panda":false,"error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"d","panda":false,"panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"de","panda":false,"error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"t","panda":false,"panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"te","panda":false,"error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"all","panda":false,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"always","panda":false,"panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "with values nested",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.WithValues("hi", 42).WithValues("not", time.Hour))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"e","hi":42,"not":"1h0m0s","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"w","hi":42,"not":"1h0m0s","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"we","hi":42,"not":"1h0m0s","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"i","hi":42,"not":"1h0m0s","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"ie","hi":42,"not":"1h0m0s","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"d","hi":42,"not":"1h0m0s","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"de","hi":42,"not":"1h0m0s","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"t","hi":42,"not":"1h0m0s","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"te","hi":42,"not":"1h0m0s","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"all","hi":42,"not":"1h0m0s","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"always","hi":42,"not":"1h0m0s","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "with name",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.WithName("yoyo"))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "with name nested",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.WithName("yoyo").WithName("gold"))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","logger":"yoyo.gold","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "depth 3",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.withDepth(3))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"testing/testing.go:<line>$testing.tRunner","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "depth 2",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.withDepth(2))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func15","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "depth 1",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.withDepth(1))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func8","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "depth 0",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.withDepth(0))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.testAllPlogMethods","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "depth -1",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.withDepth(-1))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Error","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Warning","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.WarningErr","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Info","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.InfoErr","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Debug","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.DebugErr","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Trace","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.TraceErr","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.All","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Always","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "depth -2",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.withDepth(-2))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Error","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.warningDepth","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.warningDepth","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.infoDepth","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.infoDepth","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.debugDepth","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.debugDepth","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.traceDepth","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.traceDepth","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "depth -3",
|
||||
run: func(l Logger) {
|
||||
testAllPlogMethods(l.withDepth(-3))
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"zapr@v1.2.3/zapr.go:<line>$zapr.(*zapLogger).Error","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"logr@v1.2.3/logr.go:<line>$logr.Logger.Info","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"zapr@v1.2.3/zapr.go:<line>$zapr.(*zapLogger).Info","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"zapr@v1.2.3/zapr.go:<line>$zapr.(*zapLogger).Info","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "closure",
|
||||
run: func(l Logger) {
|
||||
func() {
|
||||
func() {
|
||||
testErr := fmt.Errorf("some err")
|
||||
|
||||
l.Error("e", testErr, "panda", 2)
|
||||
l.Warning("w", "panda", 2)
|
||||
l.WarningErr("we", testErr, "panda", 2)
|
||||
l.Info("i", "panda", 2)
|
||||
l.InfoErr("ie", testErr, "panda", 2)
|
||||
l.Debug("d", "panda", 2)
|
||||
l.DebugErr("de", testErr, "panda", 2)
|
||||
l.Trace("t", "panda", 2)
|
||||
l.TraceErr("te", testErr, "panda", 2)
|
||||
l.All("all", "panda", 2)
|
||||
l.Always("always", "panda", 2)
|
||||
}()
|
||||
}()
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog_test.go:<line>$plog.TestPlog.func13.1.1","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
{
|
||||
name: "closure depth -1",
|
||||
run: func(l Logger) {
|
||||
func() {
|
||||
func() {
|
||||
testErr := fmt.Errorf("some err")
|
||||
|
||||
l = l.withDepth(-1)
|
||||
l.Error("e", testErr, "panda", 2)
|
||||
l.Warning("w", "panda", 2)
|
||||
l.WarningErr("we", testErr, "panda", 2)
|
||||
l.Info("i", "panda", 2)
|
||||
l.InfoErr("ie", testErr, "panda", 2)
|
||||
l.Debug("d", "panda", 2)
|
||||
l.DebugErr("de", testErr, "panda", 2)
|
||||
l.Trace("t", "panda", 2)
|
||||
l.TraceErr("te", testErr, "panda", 2)
|
||||
l.All("all", "panda", 2)
|
||||
l.Always("always", "panda", 2)
|
||||
}()
|
||||
}()
|
||||
},
|
||||
want: `
|
||||
{"level":"error","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Error","message":"e","panda":2,"error":"some err"}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Warning","message":"w","warning":true,"panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.WarningErr","message":"we","warning":true,"error":"some err","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Info","message":"i","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.InfoErr","message":"ie","error":"some err","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Debug","message":"d","panda":2}
|
||||
{"level":"debug","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.DebugErr","message":"de","error":"some err","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Trace","message":"t","panda":2}
|
||||
{"level":"trace","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.TraceErr","message":"te","error":"some err","panda":2}
|
||||
{"level":"all","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.All","message":"all","panda":2}
|
||||
{"level":"info","timestamp":"2099-08-08T13:57:36.123456Z","caller":"plog/plog.go:<line>$plog.pLogger.Always","message":"always","panda":2}
|
||||
`,
|
||||
},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
tt := tt
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
var log bytes.Buffer
|
||||
tt.run(TestLogger(t, &log))
|
||||
|
||||
require.Equal(t, strings.TrimSpace(tt.want), strings.TrimSpace(log.String()))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func testAllPlogMethods(l Logger) {
|
||||
testErr := fmt.Errorf("some err")
|
||||
|
||||
l.Error("e", testErr, "panda", 2)
|
||||
l.Warning("w", "panda", 2)
|
||||
l.WarningErr("we", testErr, "panda", 2)
|
||||
l.Info("i", "panda", 2)
|
||||
l.InfoErr("ie", testErr, "panda", 2)
|
||||
l.Debug("d", "panda", 2)
|
||||
l.DebugErr("de", testErr, "panda", 2)
|
||||
l.Trace("t", "panda", 2)
|
||||
l.TraceErr("te", testErr, "panda", 2)
|
||||
l.All("all", "panda", 2)
|
||||
l.Always("always", "panda", 2)
|
||||
}
|
120
internal/plog/testing.go
Normal file
120
internal/plog/testing.go
Normal file
@ -0,0 +1,120 @@
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package plog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"math"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/go-logr/logr"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
"k8s.io/utils/clock"
|
||||
clocktesting "k8s.io/utils/clock/testing"
|
||||
)
|
||||
|
||||
// contextKey type is unexported to prevent collisions.
|
||||
type contextKey int
|
||||
|
||||
const zapOverridesKey contextKey = iota
|
||||
|
||||
func TestZapOverrides(ctx context.Context, t *testing.T, w io.Writer, f func(*zap.Config), opts ...zap.Option) context.Context {
|
||||
t.Helper() // discourage use outside of tests
|
||||
|
||||
overrides := &testOverrides{
|
||||
t: t,
|
||||
w: w,
|
||||
f: f,
|
||||
opts: opts,
|
||||
}
|
||||
return context.WithValue(ctx, zapOverridesKey, overrides)
|
||||
}
|
||||
|
||||
func TestLogger(t *testing.T, w io.Writer) Logger {
|
||||
t.Helper()
|
||||
|
||||
return New().withLogrMod(func(l logr.Logger) logr.Logger {
|
||||
return l.WithSink(TestZapr(t, w).GetSink())
|
||||
})
|
||||
}
|
||||
|
||||
func TestZapr(t *testing.T, w io.Writer) logr.Logger {
|
||||
t.Helper()
|
||||
|
||||
now, err := time.Parse(time.RFC3339Nano, "2099-08-08T13:57:36.123456789Z")
|
||||
require.NoError(t, err)
|
||||
|
||||
ctx, cancel := context.WithCancel(context.Background())
|
||||
t.Cleanup(cancel)
|
||||
|
||||
ctx = TestZapOverrides(ctx, t, w,
|
||||
func(config *zap.Config) {
|
||||
config.Level = zap.NewAtomicLevelAt(math.MinInt8) // log everything during tests
|
||||
|
||||
// make test assertions less painful to write while keeping them as close to the real thing as possible
|
||||
config.EncoderConfig.EncodeCaller = func(caller zapcore.EntryCaller, enc zapcore.PrimitiveArrayEncoder) {
|
||||
trimmed := caller.TrimmedPath()
|
||||
if idx := strings.LastIndexByte(trimmed, ':'); idx != -1 {
|
||||
trimmed = trimmed[:idx+1] + "<line>"
|
||||
}
|
||||
enc.AppendString(trimmed + funcEncoder(caller))
|
||||
}
|
||||
},
|
||||
zap.WithClock(ZapClock(clocktesting.NewFakeClock(now))), // have the clock be static during tests
|
||||
zap.AddStacktrace(nopLevelEnabler{}), // do not log stacktraces
|
||||
)
|
||||
|
||||
// there is no buffering so we can ignore flush
|
||||
zl, _, err := newLogr(ctx, "json", 0)
|
||||
require.NoError(t, err)
|
||||
|
||||
return zl
|
||||
}
|
||||
|
||||
var _ zapcore.Clock = &clockAdapter{}
|
||||
|
||||
type clockAdapter struct {
|
||||
clock clock.Clock
|
||||
}
|
||||
|
||||
func (c *clockAdapter) Now() time.Time {
|
||||
return c.clock.Now()
|
||||
}
|
||||
|
||||
func (c *clockAdapter) NewTicker(duration time.Duration) *time.Ticker {
|
||||
return &time.Ticker{C: c.clock.Tick(duration)}
|
||||
}
|
||||
|
||||
func ZapClock(c clock.Clock) zapcore.Clock {
|
||||
return &clockAdapter{clock: c}
|
||||
}
|
||||
|
||||
var _ zap.Sink = nopCloserSink{}
|
||||
|
||||
type nopCloserSink struct{ zapcore.WriteSyncer }
|
||||
|
||||
func (nopCloserSink) Close() error { return nil }
|
||||
|
||||
// newSink returns a wrapper around the input writer that is safe for concurrent use.
|
||||
func newSink(w io.Writer) zap.Sink {
|
||||
return nopCloserSink{WriteSyncer: zapcore.Lock(zapcore.AddSync(w))}
|
||||
}
|
||||
|
||||
var _ zapcore.LevelEnabler = nopLevelEnabler{}
|
||||
|
||||
type nopLevelEnabler struct{}
|
||||
|
||||
func (nopLevelEnabler) Enabled(_ zapcore.Level) bool { return false }
|
||||
|
||||
type testOverrides struct {
|
||||
t *testing.T
|
||||
w io.Writer
|
||||
f func(*zap.Config)
|
||||
opts []zap.Option
|
||||
}
|
187
internal/plog/zap.go
Normal file
187
internal/plog/zap.go
Normal file
@ -0,0 +1,187 @@
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package plog
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/go-logr/logr"
|
||||
"github.com/go-logr/zapr"
|
||||
"github.com/stretchr/testify/require"
|
||||
"go.uber.org/zap"
|
||||
"go.uber.org/zap/zapcore"
|
||||
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
|
||||
"k8s.io/apimachinery/pkg/util/duration"
|
||||
"k8s.io/apimachinery/pkg/util/rand"
|
||||
"k8s.io/klog/v2"
|
||||
"k8s.io/klog/v2/textlogger"
|
||||
)
|
||||
|
||||
func newLogr(ctx context.Context, encoding string, klogLevel klog.Level) (logr.Logger, func(), error) {
|
||||
if encoding == "text" {
|
||||
var w io.Writer = os.Stderr
|
||||
flush := func() { _ = os.Stderr.Sync() }
|
||||
|
||||
// allow tests to override klog config (but cheat and re-use the zap override key)
|
||||
if overrides, ok := ctx.Value(zapOverridesKey).(*testOverrides); ok {
|
||||
if overrides.w != nil {
|
||||
w = newSink(overrides.w) // make sure the value is safe for concurrent use
|
||||
flush = func() {}
|
||||
}
|
||||
}
|
||||
|
||||
return textlogger.NewLogger(textlogger.NewConfig(textlogger.Verbosity(int(klogLevel)), textlogger.Output(w))), flush, nil
|
||||
}
|
||||
|
||||
path := "stderr" // this is how zap refers to os.Stderr
|
||||
f := func(config *zap.Config) {
|
||||
if encoding == "console" {
|
||||
config.EncoderConfig.LevelKey = zapcore.OmitKey
|
||||
config.EncoderConfig.EncodeCaller = zapcore.ShortCallerEncoder
|
||||
config.EncoderConfig.EncodeTime = humanTimeEncoder
|
||||
config.EncoderConfig.EncodeDuration = humanDurationEncoder
|
||||
}
|
||||
}
|
||||
var opts []zap.Option
|
||||
|
||||
// allow tests to override zap config
|
||||
if overrides, ok := ctx.Value(zapOverridesKey).(*testOverrides); ok {
|
||||
if overrides.w != nil {
|
||||
// use a per invocation random string as the key into the global map
|
||||
testKey := "/" + base64.RawURLEncoding.EncodeToString([]byte(rand.String(32)))
|
||||
|
||||
// tell zap to use our custom sink registry to find the writer
|
||||
path = "pinniped://" + testKey
|
||||
|
||||
// the registry may be called multiple times so make sure the value is safe for concurrent use
|
||||
sink := newSink(overrides.w)
|
||||
|
||||
// store the test's buffer where we can find it globally
|
||||
actual, loaded := sinkMap.LoadOrStore(testKey, sink)
|
||||
require.False(overrides.t, loaded)
|
||||
require.Equal(overrides.t, sink, actual)
|
||||
|
||||
defer func() {
|
||||
// delete buffer from the global map to prevent a memory leak
|
||||
value, loaded := sinkMap.LoadAndDelete(testKey)
|
||||
require.True(overrides.t, loaded)
|
||||
require.Equal(overrides.t, sink, value)
|
||||
}()
|
||||
}
|
||||
if overrides.f != nil {
|
||||
f = overrides.f
|
||||
}
|
||||
if overrides.opts != nil {
|
||||
opts = overrides.opts
|
||||
}
|
||||
}
|
||||
|
||||
// when using the trace or all log levels, an error log will contain the full stack.
|
||||
// this is too noisy for regular use because things like leader election conflicts
|
||||
// result in transient errors and we do not want all of that noise in the logs.
|
||||
// this check is performed dynamically on the global log level.
|
||||
return newZapr(globalLevel, LevelTrace, encoding, path, f, opts...)
|
||||
}
|
||||
|
||||
func newZapr(level zap.AtomicLevel, addStack zapcore.LevelEnabler, encoding, path string, f func(config *zap.Config), opts ...zap.Option) (logr.Logger, func(), error) {
|
||||
opts = append([]zap.Option{zap.AddStacktrace(addStack)}, opts...)
|
||||
|
||||
config := zap.Config{
|
||||
Level: level,
|
||||
Development: false,
|
||||
DisableCaller: false,
|
||||
DisableStacktrace: true, // handled via the AddStacktrace call above
|
||||
Sampling: nil, // keep all logs for now
|
||||
Encoding: encoding,
|
||||
EncoderConfig: zapcore.EncoderConfig{
|
||||
MessageKey: "message",
|
||||
LevelKey: "level",
|
||||
TimeKey: "timestamp",
|
||||
NameKey: "logger",
|
||||
CallerKey: "caller",
|
||||
FunctionKey: zapcore.OmitKey, // included in caller
|
||||
StacktraceKey: "stacktrace",
|
||||
SkipLineEnding: false,
|
||||
LineEnding: zapcore.DefaultLineEnding,
|
||||
EncodeLevel: levelEncoder,
|
||||
// human-readable and machine parsable with microsecond precision (same as klog, kube audit event, etc)
|
||||
EncodeTime: zapcore.TimeEncoderOfLayout(metav1.RFC3339Micro),
|
||||
EncodeDuration: zapcore.StringDurationEncoder,
|
||||
EncodeCaller: callerEncoder,
|
||||
EncodeName: nil,
|
||||
NewReflectedEncoder: nil,
|
||||
ConsoleSeparator: " ",
|
||||
},
|
||||
OutputPaths: []string{path},
|
||||
ErrorOutputPaths: []string{path},
|
||||
InitialFields: nil,
|
||||
}
|
||||
|
||||
f(&config)
|
||||
|
||||
log, err := config.Build(opts...)
|
||||
if err != nil {
|
||||
return logr.Logger{}, nil, fmt.Errorf("failed to build zap logger: %w", err)
|
||||
}
|
||||
|
||||
return zapr.NewLogger(log), func() { _ = log.Sync() }, nil
|
||||
}
|
||||
|
||||
func levelEncoder(l zapcore.Level, enc zapcore.PrimitiveArrayEncoder) {
|
||||
plogLevel := zapLevelToPlogLevel(l)
|
||||
|
||||
if len(plogLevel) == 0 {
|
||||
return // this tells zap that it should handle encoding the level itself because we do not know the mapping
|
||||
}
|
||||
|
||||
enc.AppendString(string(plogLevel))
|
||||
}
|
||||
|
||||
func zapLevelToPlogLevel(l zapcore.Level) LogLevel {
|
||||
if l > 0 {
|
||||
// best effort mapping, the zap levels do not really translate to klog
|
||||
// but this is correct for "error" level which is all we need for logr
|
||||
return LogLevel(l.String())
|
||||
}
|
||||
|
||||
// klog levels are inverted when zap handles them
|
||||
switch {
|
||||
case -l >= klogLevelAll:
|
||||
return LevelAll
|
||||
case -l >= KlogLevelTrace:
|
||||
return LevelTrace
|
||||
case -l >= KlogLevelDebug:
|
||||
return LevelDebug
|
||||
case -l >= KlogLevelInfo:
|
||||
return LevelInfo
|
||||
default:
|
||||
return "" // warning is handled via a custom key since klog level 0 is ambiguous
|
||||
}
|
||||
}
|
||||
|
||||
func callerEncoder(caller zapcore.EntryCaller, enc zapcore.PrimitiveArrayEncoder) {
|
||||
enc.AppendString(caller.String() + funcEncoder(caller))
|
||||
}
|
||||
|
||||
func funcEncoder(caller zapcore.EntryCaller) string {
|
||||
funcName := caller.Function
|
||||
if idx := strings.LastIndexByte(funcName, '/'); idx != -1 {
|
||||
funcName = funcName[idx+1:] // keep everything after the last /
|
||||
}
|
||||
return "$" + funcName
|
||||
}
|
||||
|
||||
func humanDurationEncoder(d time.Duration, enc zapcore.PrimitiveArrayEncoder) {
|
||||
enc.AppendString(duration.HumanDuration(d))
|
||||
}
|
||||
|
||||
func humanTimeEncoder(t time.Time, enc zapcore.PrimitiveArrayEncoder) {
|
||||
enc.AppendString(t.Local().Format(time.RFC1123))
|
||||
}
|
@ -71,7 +71,7 @@ func TestCreate(t *testing.T) {
|
||||
it.Before(func() {
|
||||
r = require.New(t)
|
||||
ctrl = gomock.NewController(t)
|
||||
logger = testutil.NewTranscriptLogger(t)
|
||||
logger = testutil.NewTranscriptLogger(t) // nolint: staticcheck // old test with lots of log statements
|
||||
klog.SetLogger(logr.New(logger)) // this is unfortunately a global logger, so can't run these tests in parallel :(
|
||||
})
|
||||
|
||||
|
@ -22,14 +22,12 @@ import (
|
||||
"github.com/joshlf/go-acl"
|
||||
appsv1 "k8s.io/api/apps/v1"
|
||||
corev1 "k8s.io/api/core/v1"
|
||||
apimachineryversion "k8s.io/apimachinery/pkg/version"
|
||||
genericapifilters "k8s.io/apiserver/pkg/endpoints/filters"
|
||||
kubeinformers "k8s.io/client-go/informers"
|
||||
"k8s.io/client-go/kubernetes"
|
||||
"k8s.io/client-go/pkg/version"
|
||||
"k8s.io/client-go/rest"
|
||||
"k8s.io/component-base/logs"
|
||||
"k8s.io/klog/v2"
|
||||
"k8s.io/klog/v2/klogr"
|
||||
"k8s.io/utils/clock"
|
||||
|
||||
configv1alpha1 "go.pinniped.dev/generated/latest/apis/supervisor/config/v1alpha1"
|
||||
@ -272,7 +270,7 @@ func prepareControllers(
|
||||
pinnipedClient,
|
||||
pinnipedInformers.IDP().V1alpha1().OIDCIdentityProviders(),
|
||||
secretInformer,
|
||||
klogr.New(),
|
||||
plog.Logr(), // nolint: staticcheck // old controller with lots of log statements
|
||||
controllerlib.WithInformer,
|
||||
),
|
||||
singletonWorker).
|
||||
@ -315,7 +313,7 @@ func startControllers(ctx context.Context, shutdown *sync.WaitGroup, buildContro
|
||||
}
|
||||
|
||||
//nolint:funlen
|
||||
func runSupervisor(podInfo *downward.PodInfo, cfg *supervisor.Config) error {
|
||||
func runSupervisor(ctx context.Context, podInfo *downward.PodInfo, cfg *supervisor.Config) error {
|
||||
serverInstallationNamespace := podInfo.Namespace
|
||||
|
||||
dref, supervisorDeployment, supervisorPod, err := deploymentref.New(podInfo)
|
||||
@ -389,7 +387,6 @@ func runSupervisor(podInfo *downward.PodInfo, cfg *supervisor.Config) error {
|
||||
leaderElector,
|
||||
)
|
||||
|
||||
ctx := signalCtx()
|
||||
shutdown := &sync.WaitGroup{}
|
||||
|
||||
if err := startControllers(ctx, shutdown, buildControllersFunc); err != nil {
|
||||
@ -504,12 +501,14 @@ func maybeSetupUnixPerms(endpoint *supervisor.Endpoint, pod *corev1.Pod) func()
|
||||
}
|
||||
}
|
||||
|
||||
func main() error { // return an error instead of klog.Fatal to allow defer statements to run
|
||||
logs.InitLogs()
|
||||
defer logs.FlushLogs()
|
||||
func main() error { // return an error instead of plog.Fatal to allow defer statements to run
|
||||
defer plog.Setup()()
|
||||
|
||||
klog.Infof("Running %s at %#v", rest.DefaultKubernetesUserAgent(), version.Get())
|
||||
klog.Infof("Command-line arguments were: %s %s %s", os.Args[0], os.Args[1], os.Args[2])
|
||||
plog.Always("Running supervisor",
|
||||
"user-agent", rest.DefaultKubernetesUserAgent(),
|
||||
"version", versionInfo(version.Get()),
|
||||
"arguments", os.Args,
|
||||
)
|
||||
|
||||
// Discover in which namespace we are installed.
|
||||
podInfo, err := downward.Load(os.Args[1])
|
||||
@ -517,17 +516,21 @@ func main() error { // return an error instead of klog.Fatal to allow defer stat
|
||||
return fmt.Errorf("could not read pod metadata: %w", err)
|
||||
}
|
||||
|
||||
ctx := signalCtx()
|
||||
|
||||
// Read the server config file.
|
||||
cfg, err := supervisor.FromPath(os.Args[2])
|
||||
cfg, err := supervisor.FromPath(ctx, os.Args[2])
|
||||
if err != nil {
|
||||
return fmt.Errorf("could not load config: %w", err)
|
||||
}
|
||||
|
||||
return runSupervisor(podInfo, cfg)
|
||||
return runSupervisor(ctx, podInfo, cfg)
|
||||
}
|
||||
|
||||
func Main() {
|
||||
if err := main(); err != nil {
|
||||
klog.Fatal(err)
|
||||
plog.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
type versionInfo apimachineryversion.Info // hide .String() method from plog
|
||||
|
@ -1,4 +1,4 @@
|
||||
// Copyright 2020-2021 the Pinniped contributors. All Rights Reserved.
|
||||
// Copyright 2020-2022 the Pinniped contributors. All Rights Reserved.
|
||||
// SPDX-License-Identifier: Apache-2.0
|
||||
|
||||
package testutil
|
||||
@ -54,9 +54,47 @@ func RequireNumberOfSecretsMatchingLabelSelector(t *testing.T, secrets v1.Secret
|
||||
require.Len(t, storedAuthcodeSecrets.Items, expectedNumberOfSecrets)
|
||||
}
|
||||
|
||||
func RequireSecurityHeaders(t *testing.T, response *httptest.ResponseRecorder) {
|
||||
// This is a more relaxed assertion rather than an exact match, so it can cover all the CSP headers we use.
|
||||
require.Contains(t, response.Header().Get("Content-Security-Policy"), "default-src 'none'")
|
||||
func RequireSecurityHeadersWithFormPostPageCSPs(t *testing.T, response *httptest.ResponseRecorder) {
|
||||
// Loosely confirm that the unique CSPs needed for the form_post page were used.
|
||||
cspHeader := response.Header().Get("Content-Security-Policy")
|
||||
require.Contains(t, cspHeader, "script-src '") // loose assertion
|
||||
require.Contains(t, cspHeader, "style-src '") // loose assertion
|
||||
require.Contains(t, cspHeader, "img-src data:")
|
||||
require.Contains(t, cspHeader, "connect-src *")
|
||||
|
||||
// Also require all the usual security headers.
|
||||
requireSecurityHeaders(t, response)
|
||||
}
|
||||
|
||||
func RequireSecurityHeadersWithLoginPageCSPs(t *testing.T, response *httptest.ResponseRecorder) {
|
||||
// Loosely confirm that the unique CSPs needed for the login page were used.
|
||||
cspHeader := response.Header().Get("Content-Security-Policy")
|
||||
require.Contains(t, cspHeader, "style-src '") // loose assertion
|
||||
require.NotContains(t, cspHeader, "script-src") // only needed by form_post page
|
||||
require.NotContains(t, cspHeader, "img-src data:") // only needed by form_post page
|
||||
require.NotContains(t, cspHeader, "connect-src *") // only needed by form_post page
|
||||
|
||||
// Also require all the usual security headers.
|
||||
requireSecurityHeaders(t, response)
|
||||
}
|
||||
|
||||
func RequireSecurityHeadersWithoutCustomCSPs(t *testing.T, response *httptest.ResponseRecorder) {
|
||||
// Confirm that the unique CSPs needed for the form_post or login page were NOT used.
|
||||
cspHeader := response.Header().Get("Content-Security-Policy")
|
||||
require.NotContains(t, cspHeader, "script-src")
|
||||
require.NotContains(t, cspHeader, "style-src")
|
||||
require.NotContains(t, cspHeader, "img-src data:")
|
||||
require.NotContains(t, cspHeader, "connect-src *")
|
||||
|
||||
// Also require all the usual security headers.
|
||||
requireSecurityHeaders(t, response)
|
||||
}
|
||||
|
||||
func requireSecurityHeaders(t *testing.T, response *httptest.ResponseRecorder) {
|
||||
// Loosely confirm that the generic default CSPs were used.
|
||||
cspHeader := response.Header().Get("Content-Security-Policy")
|
||||
require.Contains(t, cspHeader, "default-src 'none'")
|
||||
require.Contains(t, cspHeader, "frame-ancestors 'none'")
|
||||
|
||||
require.Equal(t, "DENY", response.Header().Get("X-Frame-Options"))
|
||||
require.Equal(t, "1; mode=block", response.Header().Get("X-XSS-Protection"))
|
||||
|
64
internal/testutil/loginhtml.go
Normal file
64
internal/testutil/loginhtml.go
Normal file
File diff suppressed because one or more lines are too long
@ -15,6 +15,7 @@ import (
|
||||
"time"
|
||||
|
||||
coreosoidc "github.com/coreos/go-oidc/v3/oidc"
|
||||
"github.com/gorilla/securecookie"
|
||||
"github.com/ory/fosite"
|
||||
"github.com/stretchr/testify/require"
|
||||
"golang.org/x/oauth2"
|
||||
@ -830,12 +831,51 @@ func NewTestUpstreamOIDCIdentityProviderBuilder() *TestUpstreamOIDCIdentityProvi
|
||||
type ExpectedUpstreamStateParamFormat struct {
|
||||
P string `json:"p"`
|
||||
U string `json:"u"`
|
||||
T string `json:"t"`
|
||||
N string `json:"n"`
|
||||
C string `json:"c"`
|
||||
K string `json:"k"`
|
||||
V string `json:"v"`
|
||||
}
|
||||
|
||||
type UpstreamStateParamBuilder ExpectedUpstreamStateParamFormat
|
||||
|
||||
func (b UpstreamStateParamBuilder) Build(t *testing.T, stateEncoder *securecookie.SecureCookie) string {
|
||||
state, err := stateEncoder.Encode("s", b)
|
||||
require.NoError(t, err)
|
||||
return state
|
||||
}
|
||||
|
||||
func (b *UpstreamStateParamBuilder) WithAuthorizeRequestParams(params string) *UpstreamStateParamBuilder {
|
||||
b.P = params
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *UpstreamStateParamBuilder) WithNonce(nonce string) *UpstreamStateParamBuilder {
|
||||
b.N = nonce
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *UpstreamStateParamBuilder) WithCSRF(csrf string) *UpstreamStateParamBuilder {
|
||||
b.C = csrf
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *UpstreamStateParamBuilder) WithPKCE(pkce string) *UpstreamStateParamBuilder {
|
||||
b.K = pkce
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *UpstreamStateParamBuilder) WithUpstreamIDPType(upstreamIDPType string) *UpstreamStateParamBuilder {
|
||||
b.T = upstreamIDPType
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *UpstreamStateParamBuilder) WithStateVersion(version string) *UpstreamStateParamBuilder {
|
||||
b.V = version
|
||||
return b
|
||||
}
|
||||
|
||||
type staticKeySet struct {
|
||||
publicKey crypto.PublicKey
|
||||
}
|
||||
|
@ -24,14 +24,14 @@ type Logger struct {
|
||||
buffer syncBuffer
|
||||
}
|
||||
|
||||
// New returns a new test Logger. Use this for all new tests.
|
||||
// Deprecated: Use plog.TestLogger or plog.TestZapr instead. This is meant for old tests only.
|
||||
func New(t *testing.T) *Logger {
|
||||
res := Logger{t: t}
|
||||
res.Logger = stdr.New(log.New(&res.buffer, "", 0))
|
||||
return &res
|
||||
}
|
||||
|
||||
// Deprecated: NewLegacy returns a new test Logger. Use this for old tests if necessary.
|
||||
// Deprecated: Use plog.TestLogger or plog.TestZapr instead. This is meant for old tests only.
|
||||
func NewLegacy(t *testing.T) *Logger {
|
||||
res := New(t)
|
||||
res.Logger = newStdLogger(log.New(&res.buffer, "", 0))
|
||||
|
@ -24,6 +24,7 @@ type TranscriptLogMessage struct {
|
||||
Message string
|
||||
}
|
||||
|
||||
// Deprecated: Use plog.TestLogger or plog.TestZapr instead. This is meant for old tests only.
|
||||
func NewTranscriptLogger(t *testing.T) *TranscriptLogger {
|
||||
return &TranscriptLogger{t: t}
|
||||
}
|
||||
|
@ -411,13 +411,13 @@ func (p *ProviderConfig) maybeFetchUserInfo(ctx context.Context, tok *oauth2.Tok
|
||||
func maybeLogClaims(msg, name string, claims map[string]interface{}) {
|
||||
if plog.Enabled(plog.LevelAll) { // log keys and values at all level
|
||||
data, _ := json.Marshal(claims) // nothing we can do if it fails, but it really never should
|
||||
plog.Info(msg, "providerName", name, "claims", string(data))
|
||||
plog.All(msg, "providerName", name, "claims", string(data))
|
||||
return
|
||||
}
|
||||
|
||||
if plog.Enabled(plog.LevelDebug) { // log keys at debug level
|
||||
keys := sets.StringKeySet(claims).List() // note: this is only safe because the compiler asserts that claims is a map[string]<anything>
|
||||
plog.Info(msg, "providerName", name, "keys", keys)
|
||||
plog.Debug(msg, "providerName", name, "keys", keys)
|
||||
return
|
||||
}
|
||||
}
|
||||
|
@ -33,6 +33,7 @@ import (
|
||||
"go.pinniped.dev/internal/httputil/securityheader"
|
||||
"go.pinniped.dev/internal/net/phttp"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/upstreamoidc"
|
||||
"go.pinniped.dev/pkg/oidcclient/nonce"
|
||||
"go.pinniped.dev/pkg/oidcclient/oidctypes"
|
||||
@ -63,8 +64,6 @@ const (
|
||||
defaultPasswordEnvVarName = "PINNIPED_PASSWORD" //nolint:gosec // this is not a credential
|
||||
|
||||
httpLocationHeaderName = "Location"
|
||||
|
||||
debugLogLevel = 4
|
||||
)
|
||||
|
||||
// stdin returns the file descriptor for stdin as an int.
|
||||
@ -356,7 +355,7 @@ func (h *handlerState) baseLogin() (*oidctypes.Token, error) {
|
||||
// If the ID token is still valid for a bit, return it immediately and skip the rest of the flow.
|
||||
cached := h.cache.GetToken(cacheKey)
|
||||
if cached != nil && cached.IDToken != nil && time.Until(cached.IDToken.Expiry.Time) > minIDTokenValidity {
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Found unexpired cached token.")
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Found unexpired cached token.")
|
||||
return cached, nil
|
||||
}
|
||||
|
||||
@ -520,7 +519,7 @@ func (h *handlerState) getUsernameAndPassword() (string, string, error) {
|
||||
return "", "", fmt.Errorf("error prompting for username: %w", err)
|
||||
}
|
||||
} else {
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Read username from environment variable", "name", defaultUsernameEnvVarName)
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Read username from environment variable", "name", defaultUsernameEnvVarName)
|
||||
}
|
||||
|
||||
password := h.getEnv(defaultPasswordEnvVarName)
|
||||
@ -530,7 +529,7 @@ func (h *handlerState) getUsernameAndPassword() (string, string, error) {
|
||||
return "", "", fmt.Errorf("error prompting for password: %w", err)
|
||||
}
|
||||
} else {
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Read password from environment variable", "name", defaultPasswordEnvVarName)
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Read password from environment variable", "name", defaultPasswordEnvVarName)
|
||||
}
|
||||
|
||||
return username, password, nil
|
||||
@ -542,7 +541,7 @@ func (h *handlerState) webBrowserBasedAuth(authorizeOptions *[]oauth2.AuthCodeOp
|
||||
// Attempt to open a local TCP listener, logging but otherwise ignoring any error.
|
||||
listener, err := h.listen("tcp", h.listenAddr)
|
||||
if err != nil {
|
||||
h.logger.V(debugLogLevel).Error(err, "could not open callback listener")
|
||||
h.logger.V(plog.KlogLevelDebug).Error(err, "could not open callback listener")
|
||||
}
|
||||
|
||||
// If the listener failed to start and stdin is not a TTY, then we have no hope of succeeding,
|
||||
@ -578,7 +577,7 @@ func (h *handlerState) webBrowserBasedAuth(authorizeOptions *[]oauth2.AuthCodeOp
|
||||
|
||||
// Open the authorize URL in the users browser, logging but otherwise ignoring any error.
|
||||
if err := h.openURL(authorizeURL); err != nil {
|
||||
h.logger.V(debugLogLevel).Error(err, "could not open browser")
|
||||
h.logger.V(plog.KlogLevelDebug).Error(err, "could not open browser")
|
||||
}
|
||||
|
||||
// Prompt the user to visit the authorize URL, and to paste a manually-copied auth code (if possible).
|
||||
@ -709,7 +708,7 @@ func (h *handlerState) initOIDCDiscovery() error {
|
||||
return err
|
||||
}
|
||||
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Performing OIDC discovery", "issuer", h.issuer)
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Performing OIDC discovery", "issuer", h.issuer)
|
||||
var err error
|
||||
h.provider, err = oidc.NewProvider(h.ctx, h.issuer)
|
||||
if err != nil {
|
||||
@ -767,7 +766,7 @@ func stringSliceContains(slice []string, s string) bool {
|
||||
}
|
||||
|
||||
func (h *handlerState) tokenExchangeRFC8693(baseToken *oidctypes.Token) (*oidctypes.Token, error) {
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Performing RFC8693 token exchange", "requestedAudience", h.requestedAudience)
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Performing RFC8693 token exchange", "requestedAudience", h.requestedAudience)
|
||||
// Perform OIDC discovery. This may have already been performed if there was not a cached base token.
|
||||
if err := h.initOIDCDiscovery(); err != nil {
|
||||
return nil, err
|
||||
@ -838,13 +837,13 @@ func (h *handlerState) tokenExchangeRFC8693(baseToken *oidctypes.Token) (*oidcty
|
||||
}
|
||||
|
||||
func (h *handlerState) handleRefresh(ctx context.Context, refreshToken *oidctypes.RefreshToken) (*oidctypes.Token, error) {
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Refreshing cached token.")
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Refreshing cached token.")
|
||||
upstreamOIDCIdentityProvider := h.getProvider(h.oauth2Config, h.provider, h.httpClient)
|
||||
|
||||
refreshed, err := upstreamOIDCIdentityProvider.PerformRefresh(ctx, refreshToken.Token)
|
||||
if err != nil {
|
||||
// Ignore errors during refresh, but return nil which will trigger the full login flow.
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Refresh failed.", "error", err.Error())
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Refresh failed.", "error", err.Error())
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
@ -865,7 +864,7 @@ func (h *handlerState) handleAuthCodeCallback(w http.ResponseWriter, r *http.Req
|
||||
if h.useFormPost { // nolint:nestif
|
||||
// Return HTTP 405 for anything that's not a POST or an OPTIONS request.
|
||||
if r.Method != http.MethodPost && r.Method != http.MethodOptions {
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Got unexpected request on callback listener", "method", r.Method)
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Got unexpected request on callback listener", "method", r.Method)
|
||||
w.WriteHeader(http.StatusMethodNotAllowed)
|
||||
return nil // keep listening for more requests
|
||||
}
|
||||
@ -883,11 +882,11 @@ func (h *handlerState) handleAuthCodeCallback(w http.ResponseWriter, r *http.Req
|
||||
origin := r.Header.Get("Origin")
|
||||
if origin == "" {
|
||||
// The CORS preflight request should have an origin.
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Got OPTIONS request without origin header")
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Got OPTIONS request without origin header")
|
||||
w.WriteHeader(http.StatusBadRequest)
|
||||
return nil // keep listening for more requests
|
||||
}
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Got CORS preflight request from browser", "origin", origin)
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Got CORS preflight request from browser", "origin", origin)
|
||||
// To tell the browser that it is okay to make the real POST request, return the following response.
|
||||
w.Header().Set("Access-Control-Allow-Origin", allowOrigin)
|
||||
w.Header().Set("Vary", "*") // supposed to use Vary when Access-Control-Allow-Origin is a specific host
|
||||
@ -921,7 +920,7 @@ func (h *handlerState) handleAuthCodeCallback(w http.ResponseWriter, r *http.Req
|
||||
} else {
|
||||
// Return HTTP 405 for anything that's not a GET.
|
||||
if r.Method != http.MethodGet {
|
||||
h.logger.V(debugLogLevel).Info("Pinniped: Got unexpected request on callback listener", "method", r.Method)
|
||||
h.logger.V(plog.KlogLevelDebug).Info("Pinniped: Got unexpected request on callback listener", "method", r.Method)
|
||||
w.WriteHeader(http.StatusMethodNotAllowed)
|
||||
return nil // keep listening for more requests
|
||||
}
|
||||
|
@ -20,10 +20,6 @@ import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"go.pinniped.dev/internal/net/phttp"
|
||||
|
||||
"go.pinniped.dev/internal/testutil/tlsserver"
|
||||
|
||||
"github.com/coreos/go-oidc/v3/oidc"
|
||||
"github.com/golang/mock/gomock"
|
||||
"github.com/stretchr/testify/assert"
|
||||
@ -35,9 +31,12 @@ import (
|
||||
"go.pinniped.dev/internal/httputil/httperr"
|
||||
"go.pinniped.dev/internal/httputil/roundtripper"
|
||||
"go.pinniped.dev/internal/mocks/mockupstreamoidcidentityprovider"
|
||||
"go.pinniped.dev/internal/net/phttp"
|
||||
"go.pinniped.dev/internal/oidc/provider"
|
||||
"go.pinniped.dev/internal/plog"
|
||||
"go.pinniped.dev/internal/testutil"
|
||||
"go.pinniped.dev/internal/testutil/testlogger"
|
||||
"go.pinniped.dev/internal/testutil/tlsserver"
|
||||
"go.pinniped.dev/internal/upstreamoidc"
|
||||
"go.pinniped.dev/pkg/oidcclient/nonce"
|
||||
"go.pinniped.dev/pkg/oidcclient/oidctypes"
|
||||
@ -2334,7 +2333,7 @@ func TestHandleAuthCodeCallback(t *testing.T) {
|
||||
state: state.State("test-state"),
|
||||
pkce: pkce.Code("test-pkce"),
|
||||
nonce: nonce.Nonce("test-nonce"),
|
||||
logger: testlogger.New(t).Logger,
|
||||
logger: plog.Logr(), // nolint: staticcheck // old test with no log assertions
|
||||
issuer: "https://valid-issuer.com/with/some/path",
|
||||
}
|
||||
if tt.opt != nil {
|
||||
|
@ -27,8 +27,8 @@ Create an [ActiveDirectoryIdentityProvider](https://github.com/vmware-tanzu/pinn
|
||||
### ActiveDirectoryIdentityProvider with default options
|
||||
|
||||
This ActiveDirectoryIdentityProvider uses all the default configuration options.
|
||||
|
||||
Learn more about the default configuration [here]({{< ref "../reference/active-directory-configuration">}})
|
||||
The default configuration options are documented in the
|
||||
[Active Directory configuration reference]({{< ref "../reference/active-directory-configuration">}}).
|
||||
|
||||
```yaml
|
||||
apiVersion: idp.supervisor.pinniped.dev/v1alpha1
|
||||
@ -41,14 +41,13 @@ spec:
|
||||
# Specify the host of the Active Directory server.
|
||||
host: "activedirectory.example.com:636"
|
||||
|
||||
# Specify the name of the Kubernetes Secret that contains your Active Directory
|
||||
# bind account credentials. This service account will be used by the
|
||||
# Supervisor to perform LDAP user and group searches.
|
||||
# Specify the name of the Kubernetes Secret that contains your Active
|
||||
# Directory bind account credentials. This service account will be
|
||||
# used by the Supervisor to perform LDAP user and group searches.
|
||||
bind:
|
||||
secretName: "active-directory-bind-account"
|
||||
|
||||
---
|
||||
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
@ -64,6 +63,10 @@ stringData:
|
||||
password: "YOUR_PASSWORD"
|
||||
```
|
||||
|
||||
Note that the `metadata.name` of the ActiveDirectoryIdentityProvider resource may be visible to end users at login prompts,
|
||||
so choose a name which will be understood by your end users.
|
||||
For example, if you work at Acme Corp, choose something like `acme-corporate-active-directory` over `my-idp`.
|
||||
|
||||
If you've saved this into a file `activedirectory.yaml`, then install it into your cluster using:
|
||||
|
||||
```sh
|
||||
@ -140,13 +143,16 @@ spec:
|
||||
# successful authentication.
|
||||
groupName: "dn"
|
||||
|
||||
# Specify the name of the Kubernetes Secret that contains your Active Directory
|
||||
# bind account credentials. This service account will be used by the
|
||||
# Supervisor to perform LDAP user and group searches.
|
||||
# Specify the name of the Kubernetes Secret that contains your Active
|
||||
# Directory bind account credentials. This service account will be
|
||||
# used by the Supervisor to perform LDAP user and group searches.
|
||||
bind:
|
||||
secretName: "active-directory-bind-account"
|
||||
```
|
||||
|
||||
More information about the defaults for these configuration options can be found in
|
||||
the [Active Directory configuration reference]({{< ref "../reference/active-directory-configuration">}}).
|
||||
|
||||
## Next steps
|
||||
|
||||
Next, [configure the Concierge to validate JWTs issued by the Supervisor]({{< ref "configure-concierge-supervisor-jwt" >}})!
|
||||
|
@ -104,19 +104,21 @@ spec:
|
||||
# to the "username" claim in downstream tokens minted by the Supervisor.
|
||||
username: email
|
||||
|
||||
# Specify the name of the claim in your Dex ID token that represents the groups
|
||||
# that the user belongs to. This matches what you specified above
|
||||
# Specify the name of the claim in your Dex ID token that represents the
|
||||
# groups to which the user belongs. This matches what you specified above
|
||||
# with the Groups claim filter.
|
||||
# Note that the group claims from Github are in the format of "org:team".
|
||||
# To query for the group scope, you should set the organization you want Dex to
|
||||
# search against in its configuration, otherwise your group claim would be empty.
|
||||
# An example config can be found at - https://dexidp.io/docs/connectors/github/#configuration
|
||||
# To query for the group scope, you should set the organization you
|
||||
# want Dex to search against in its configuration, otherwise your group
|
||||
# claim would be empty. An example config can be found at
|
||||
# https://dexidp.io/docs/connectors/github/#configuration
|
||||
groups: groups
|
||||
|
||||
# Specify the name of the Kubernetes Secret that contains your Dex
|
||||
# application's client credentials (created below).
|
||||
client:
|
||||
secretName: dex-client-credentials
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
@ -125,13 +127,19 @@ metadata:
|
||||
name: dex-client-credentials
|
||||
type: secrets.pinniped.dev/oidc-client
|
||||
stringData:
|
||||
# The "Client ID" that you set in Dex. For example, in our case this is "pinniped-supervisor"
|
||||
# The "Client ID" that you set in Dex. For example, in our case
|
||||
# this is "pinniped-supervisor".
|
||||
clientID: "<your-client-id>"
|
||||
|
||||
# The "Client secret" that you set in Dex. For example, in our case this is "pinniped-supervisor-secret"
|
||||
# The "Client secret" that you set in Dex. For example, in our
|
||||
# case this is "pinniped-supervisor-secret".
|
||||
clientSecret: "<your-client-secret>"
|
||||
```
|
||||
|
||||
Note that the `metadata.name` of the OIDCIdentityProvider resource may be visible to end users at login prompts
|
||||
if you choose to enable `allowPasswordGrant`, so choose a name which will be understood by your end users.
|
||||
For example, if you work at Acme Corp, choose something like `acme-corporate-ldap` over `my-idp`.
|
||||
|
||||
Once your OIDCIdentityProvider resource has been created, you can validate your configuration by running:
|
||||
|
||||
```bash
|
||||
|
@ -89,6 +89,7 @@ spec:
|
||||
# application's client credentials (created below).
|
||||
client:
|
||||
secretName: gitlab-client-credentials
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
@ -105,6 +106,10 @@ stringData:
|
||||
clientSecret: "<your-client-secret>"
|
||||
```
|
||||
|
||||
Note that the `metadata.name` of the OIDCIdentityProvider resource may be visible to end users at login prompts
|
||||
if you choose to enable `allowPasswordGrant`, so choose a name which will be understood by your end users.
|
||||
For example, if you work at Acme Corp, choose something like `acme-corporate-gitlab` over `my-idp`.
|
||||
|
||||
Once your OIDCIdentityProvider has been created, you can validate your configuration by running:
|
||||
|
||||
```shell
|
||||
|
@ -120,7 +120,6 @@ spec:
|
||||
secretName: "jumpcloudldap-bind-account"
|
||||
|
||||
---
|
||||
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
@ -138,6 +137,10 @@ stringData:
|
||||
password: "YOUR_PASSWORD"
|
||||
```
|
||||
|
||||
Note that the `metadata.name` of the LDAPIdentityProvider resource may be visible to end users at login prompts,
|
||||
so choose a name which will be understood by your end users.
|
||||
For example, if you work at Acme Corp, choose something like `acme-corporate-ldap` over `my-idp`.
|
||||
|
||||
If you've saved this into a file `jumpcloud.yaml`, then install it into your cluster using:
|
||||
|
||||
```sh
|
||||
|
@ -97,6 +97,7 @@ spec:
|
||||
# application's client credentials (created below).
|
||||
client:
|
||||
secretName: okta-client-credentials
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
@ -113,6 +114,10 @@ stringData:
|
||||
clientSecret: "<your-client-secret>"
|
||||
```
|
||||
|
||||
Note that the `metadata.name` of the OIDCIdentityProvider resource may be visible to end users at login prompts
|
||||
if you choose to enable `allowPasswordGrant`, so choose a name which will be understood by your end users.
|
||||
For example, if you work at Acme Corp, choose something like `acme-corporate-okta` over `my-idp`.
|
||||
|
||||
Once your OIDCIdentityProvider has been created, you can validate your configuration by running:
|
||||
|
||||
```shell
|
||||
|
@ -158,6 +158,7 @@ spec:
|
||||
- name: certs
|
||||
secret:
|
||||
secretName: certs
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Service
|
||||
@ -265,7 +266,6 @@ spec:
|
||||
secretName: openldap-bind-account
|
||||
|
||||
---
|
||||
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
metadata:
|
||||
@ -284,6 +284,10 @@ stringData:
|
||||
EOF
|
||||
```
|
||||
|
||||
Note that the `metadata.name` of the LDAPIdentityProvider resource may be visible to end users at login prompts,
|
||||
so choose a name which will be understood by your end users.
|
||||
For example, if you work at Acme Corp, choose something like `acme-corporate-ldap` over `my-idp`.
|
||||
|
||||
Once your LDAPIdentityProvider has been created, you can validate your configuration by running:
|
||||
|
||||
```sh
|
||||
|
@ -76,7 +76,8 @@ spec:
|
||||
# the default claims in your token. The "openid" scope is always
|
||||
# included.
|
||||
#
|
||||
# See the example claims below to learn how to customize the claims returned.
|
||||
# See the example claims below to learn how to customize the
|
||||
# claims returned.
|
||||
additionalScopes: [group, email]
|
||||
|
||||
# Specify how Workspace ONE Access claims are mapped to Kubernetes identities.
|
||||
@ -85,22 +86,22 @@ spec:
|
||||
# Specify the name of the claim in your Workspace ONE Access token that
|
||||
# will be mapped to the username in your Kubernetes environment.
|
||||
#
|
||||
# User's emails can change. Use the sub claim if
|
||||
# your environment requires a stable identifier.
|
||||
# User's emails can change. Use the sub claim if your environment
|
||||
# requires a stable identifier.
|
||||
username: email
|
||||
|
||||
# Specify the name of the claim in Workspace ONE Access that represents the
|
||||
# groups the user belongs to.
|
||||
# Specify the name of the claim in Workspace ONE Access that represents
|
||||
# the groups to which the user belongs.
|
||||
#
|
||||
# Group names may not be unique and can change.
|
||||
# The group_ids claim is recommended for environments
|
||||
# that want to use a more stable identifier.
|
||||
# Group names may not be unique and can change. The group_ids claim is
|
||||
# recommended for environments that want to use a more stable identifier.
|
||||
groups: group_names
|
||||
|
||||
# Specify the name of the Kubernetes Secret that contains your
|
||||
# Workspace ONE Access application's client credentials (created below).
|
||||
client:
|
||||
secretName: ws1-client-credentials
|
||||
|
||||
---
|
||||
apiVersion: v1
|
||||
kind: Secret
|
||||
|
@ -244,6 +244,6 @@ should be signed by a certificate authority that is trusted by their browsers.
|
||||
## Next steps
|
||||
|
||||
Next, configure an OIDCIdentityProvider, ActiveDirectoryIdentityProvider, or an LDAPIdentityProvider for the Supervisor
|
||||
(several examples are available in these guides),
|
||||
and [configure the Concierge to use the Supervisor for authentication]({{< ref "configure-concierge-supervisor-jwt" >}})
|
||||
(several examples are available in these guides). Then
|
||||
[configure the Concierge to use the Supervisor for authentication]({{< ref "configure-concierge-supervisor-jwt" >}})
|
||||
on each cluster!
|
||||
|
@ -72,6 +72,9 @@ pinniped get kubeconfig \
|
||||
The new Pinniped-compatible kubeconfig YAML will be output as stdout, and can be redirected to a file.
|
||||
|
||||
Various default behaviors of `pinniped get kubeconfig` can be overridden using [its command-line options]({{< ref "cli" >}}).
|
||||
One flag of note is `--upstream-identity-provider-flow browser_authcode` to choose end-user `kubectl` login via a web browser
|
||||
(the default for OIDCIdentityProviders), and `--upstream-identity-provider-flow cli_password` to choose end-user `kubectl`
|
||||
login via CLI username/password prompts (the default for LDAPIdentityProviders and ActiveDirectoryIdentityProviders).
|
||||
|
||||
## Use the generated kubeconfig with `kubectl` to access the cluster
|
||||
|
||||
@ -94,20 +97,33 @@ to authenticate the user to the cluster.
|
||||
If the Pinniped Supervisor is used for authentication to that cluster, then the user's authentication experience
|
||||
will depend on which type of identity provider was configured.
|
||||
|
||||
- For an OIDC identity provider, there are two supported client flows.
|
||||
- For an OIDC identity provider, there are two supported client flows:
|
||||
|
||||
When using the default browser-based flow, `kubectl` will open the user's web browser and direct it to the login page of
|
||||
1. When using the default browser-based flow, `kubectl` will open the user's web browser and direct it to the login page of
|
||||
their OIDC Provider. This login flow is controlled by the provider, so it may include two-factor authentication or
|
||||
other features provided by the OIDC Provider. If the user's browser is not available, then `kubectl` will instead
|
||||
print a URL which can be visited in a browser (potentially on a different computer) to complete the authentication.
|
||||
|
||||
When using the optional CLI-based flow, `kubectl` will interactively prompt the user for their username and password at the CLI.
|
||||
2. When using the optional CLI-based flow, `kubectl` will interactively prompt the user for their username and password at the CLI.
|
||||
Alternatively, the user can set the environment variables `PINNIPED_USERNAME` and `PINNIPED_PASSWORD` for the
|
||||
`kubectl` process to avoid the interactive prompts. Note that the optional CLI-based flow must be enabled by the
|
||||
administrator in the OIDCIdentityProvider configuration before use
|
||||
(see `allowPasswordGrant` in the
|
||||
[API docs](https://github.com/vmware-tanzu/pinniped/blob/main/generated/{{< latestcodegenversion >}}/README.adoc#oidcauthorizationconfig)
|
||||
for more details).
|
||||
|
||||
- For LDAP and Active Directory identity providers, there are also two supported client flows:
|
||||
|
||||
1. When using the default CLI-based flow, `kubectl` will interactively prompt the user for their username and password at the CLI.
|
||||
Alternatively, the user can set the environment variables `PINNIPED_USERNAME` and `PINNIPED_PASSWORD` for the
|
||||
`kubectl` process to avoid the interactive prompts.
|
||||
|
||||
- For an LDAP identity provider, `kubectl` will interactively prompt the user for their username and password at the CLI.
|
||||
Alternatively, the user can set the environment variables `PINNIPED_USERNAME` and `PINNIPED_PASSWORD` for the
|
||||
`kubectl` process to avoid the interactive prompts.
|
||||
2. When using the optional browser-based flow, `kubectl` will open the user's web browser and direct it to a login page
|
||||
hosted by the Pinniped Supervisor. When the user enters their username and password, the Supervisor will authenticate
|
||||
the user using the LDAP or Active Directory provider. If the user's browser is not available, then `kubectl` will instead
|
||||
print a URL which can be visited in a browser (potentially on a different computer) to complete the authentication.
|
||||
Unlike the optional flow for OIDC providers described above, this optional flow does not need to be configured in
|
||||
the LDAPIdentityProvider or ActiveDirectoryIdentityProvider resource, so it is always available for end-users.
|
||||
|
||||
Once the user completes authentication, the `kubectl` command will automatically continue and complete the user's requested command.
|
||||
For the example above, `kubectl` would list the cluster's namespaces.
|
||||
@ -135,8 +151,14 @@ in the upstream identity provider, for example:
|
||||
--group auditors
|
||||
```
|
||||
|
||||
## Other notes
|
||||
## Session and credential caching by the CLI
|
||||
|
||||
- Temporary session credentials such as ID, access, and refresh tokens are stored in:
|
||||
- `~/.config/pinniped/sessions.yaml` (macOS/Linux)
|
||||
Temporary session credentials such as ID, access, and refresh tokens are stored in:
|
||||
- `$HOME/.config/pinniped/sessions.yaml` (macOS/Linux)
|
||||
- `%USERPROFILE%/.config/pinniped/sessions.yaml` (Windows).
|
||||
|
||||
Temporary cluster credentials such mTLS client certificates are stored in:
|
||||
- `$HOME/.config/pinniped/credentials.yaml` (macOS/Linux)
|
||||
- `%USERPROFILE%/.config/pinniped/credentials.yaml` (Windows).
|
||||
|
||||
Deleting the contents of these directories is equivalent to performing a client-side logout.
|
||||
|
@ -206,6 +206,8 @@ The per-FederationDomain endpoints are:
|
||||
See [internal/oidc/callback/callback_handler.go](https://github.com/vmware-tanzu/pinniped/blob/main/internal/oidc/callback/callback_handler.go).
|
||||
- `<issuer_path>/v1alpha1/pinniped_identity_providers` is a custom discovery endpoint for clients to learn about available upstream identity providers.
|
||||
See [internal/oidc/idpdiscovery/idp_discovery_handler.go](https://github.com/vmware-tanzu/pinniped/blob/main/internal/oidc/idpdiscovery/idp_discovery_handler.go).
|
||||
- `<issuer_path>/login` is a login UI page to support the optional browser-based login flow for LDAP and Active Directory identity providers.
|
||||
See [internal/oidc/login/login_handler.go](https://github.com/vmware-tanzu/pinniped/blob/main/internal/oidc/login/login_handler.go).
|
||||
|
||||
The OIDC specifications implemented by the Supervisor can be found at [openid.net](https://openid.net/connect).
|
||||
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user