index int64 | repo_id string | file_path string | content string |
|---|---|---|---|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/OAuthBearerSaslServerProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals;
import java.security.Provider;
import java.security.Security;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule;
import org.apache.kafka.common.security.oauthbearer.internals.OAuthBearerSaslServer.OAuthBearerSaslServerFactory;
public class OAuthBearerSaslServerProvider extends Provider {
private static final long serialVersionUID = 1L;
protected OAuthBearerSaslServerProvider() {
super("SASL/OAUTHBEARER Server Provider", 1.0, "SASL/OAUTHBEARER Server Provider for Kafka");
put("SaslServerFactory." + OAuthBearerLoginModule.OAUTHBEARER_MECHANISM,
OAuthBearerSaslServerFactory.class.getName());
}
public static void initialize() {
Security.addProvider(new OAuthBearerSaslServerProvider());
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/expiring/ExpiringCredential.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.expiring;
/**
* A credential that expires and that can potentially be refreshed
*
* @see ExpiringCredentialRefreshingLogin
*/
public interface ExpiringCredential {
/**
* The name of the principal to which this credential applies (used only for
* logging)
*
* @return the always non-null/non-empty principal name
*/
String principalName();
/**
* When the credential became valid, in terms of the number of milliseconds
* since the epoch, if known, otherwise null. An expiring credential may not
* necessarily indicate when it was created -- just when it expires -- so we
* need to support a null return value here.
*
* @return the time when the credential became valid, in terms of the number of
* milliseconds since the epoch, if known, otherwise null
*/
Long startTimeMs();
/**
* When the credential expires, in terms of the number of milliseconds since the
* epoch. All expiring credentials by definition must indicate their expiration
* time -- thus, unlike other methods, we do not support a null return value
* here.
*
* @return the time when the credential expires, in terms of the number of
* milliseconds since the epoch
*/
long expireTimeMs();
/**
* The point after which the credential can no longer be refreshed, in terms of
* the number of milliseconds since the epoch, if any, otherwise null. Some
* expiring credentials can be refreshed over and over again without limit, so
* we support a null return value here.
*
* @return the point after which the credential can no longer be refreshed, in
* terms of the number of milliseconds since the epoch, if any,
* otherwise null
*/
Long absoluteLastRefreshTimeMs();
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/expiring/ExpiringCredentialRefreshConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.expiring;
import java.util.Map;
import java.util.Objects;
import org.apache.kafka.common.config.SaslConfigs;
/**
* Immutable refresh-related configuration for expiring credentials that can be
* parsed from a producer/consumer/broker config.
*/
public class ExpiringCredentialRefreshConfig {
private final double loginRefreshWindowFactor;
private final double loginRefreshWindowJitter;
private final short loginRefreshMinPeriodSeconds;
private final short loginRefreshBufferSeconds;
private final boolean loginRefreshReloginAllowedBeforeLogout;
/**
* Constructor based on producer/consumer/broker configs and the indicated value
* for whether or not client relogin is allowed before logout
*
* @param configs
* the mandatory (but possibly empty) producer/consumer/broker
* configs upon which to build this instance
* @param clientReloginAllowedBeforeLogout
* if the {@code LoginModule} and {@code SaslClient} implementations
* support multiple simultaneous login contexts on a single
* {@code Subject} at the same time. If true, then upon refresh,
* logout will only be invoked on the original {@code LoginContext}
* after a new one successfully logs in. This can be helpful if the
* original credential still has some lifetime left when an attempt
* to refresh the credential fails; the client will still be able to
* create new connections as long as the original credential remains
* valid. Otherwise, if logout is immediately invoked prior to
* relogin, a relogin failure leaves the client without the ability
* to connect until relogin does in fact succeed.
*/
public ExpiringCredentialRefreshConfig(Map<String, ?> configs, boolean clientReloginAllowedBeforeLogout) {
Objects.requireNonNull(configs);
this.loginRefreshWindowFactor = (Double) configs.get(SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_FACTOR);
this.loginRefreshWindowJitter = (Double) configs.get(SaslConfigs.SASL_LOGIN_REFRESH_WINDOW_JITTER);
this.loginRefreshMinPeriodSeconds = (Short) configs.get(SaslConfigs.SASL_LOGIN_REFRESH_MIN_PERIOD_SECONDS);
this.loginRefreshBufferSeconds = (Short) configs.get(SaslConfigs.SASL_LOGIN_REFRESH_BUFFER_SECONDS);
this.loginRefreshReloginAllowedBeforeLogout = clientReloginAllowedBeforeLogout;
}
/**
* Background login refresh thread will sleep until the specified window factor
* relative to the credential's total lifetime has been reached, at which time
* it will try to refresh the credential.
*
* @return the login refresh window factor
*/
public double loginRefreshWindowFactor() {
return loginRefreshWindowFactor;
}
/**
* Amount of random jitter added to the background login refresh thread's sleep
* time.
*
* @return the login refresh window jitter
*/
public double loginRefreshWindowJitter() {
return loginRefreshWindowJitter;
}
/**
* The desired minimum time between checks by the background login refresh
* thread, in seconds
*
* @return the desired minimum refresh period, in seconds
*/
public short loginRefreshMinPeriodSeconds() {
return loginRefreshMinPeriodSeconds;
}
/**
* The amount of buffer time before expiration to maintain when refreshing. If a
* refresh is scheduled to occur closer to expiration than the number of seconds
* defined here then the refresh will be moved up to maintain as much of the
* desired buffer as possible.
*
* @return the refresh buffer, in seconds
*/
public short loginRefreshBufferSeconds() {
return loginRefreshBufferSeconds;
}
/**
* If the LoginModule and SaslClient implementations support multiple
* simultaneous login contexts on a single Subject at the same time. If true,
* then upon refresh, logout will only be invoked on the original LoginContext
* after a new one successfully logs in. This can be helpful if the original
* credential still has some lifetime left when an attempt to refresh the
* credential fails; the client will still be able to create new connections as
* long as the original credential remains valid. Otherwise, if logout is
* immediately invoked prior to relogin, a relogin failure leaves the client
* without the ability to connect until relogin does in fact succeed.
*
* @return true if relogin is allowed prior to discarding an existing
* (presumably unexpired) credential, otherwise false
*/
public boolean loginRefreshReloginAllowedBeforeLogout() {
return loginRefreshReloginAllowedBeforeLogout;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/expiring/ExpiringCredentialRefreshingLogin.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.expiring;
import java.util.Date;
import java.util.Objects;
import java.util.Random;
import javax.security.auth.Subject;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler;
import org.apache.kafka.common.security.auth.Login;
import org.apache.kafka.common.utils.KafkaThread;
import org.apache.kafka.common.utils.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* This class is responsible for refreshing logins for both Kafka client and
* server when the login is a type that has a limited lifetime/will expire. The
* credentials for the login must implement {@link ExpiringCredential}.
*/
public abstract class ExpiringCredentialRefreshingLogin implements AutoCloseable {
/**
* Class that can be overridden for testing
*/
static class LoginContextFactory {
public LoginContext createLoginContext(ExpiringCredentialRefreshingLogin expiringCredentialRefreshingLogin)
throws LoginException {
return new LoginContext(expiringCredentialRefreshingLogin.contextName(),
expiringCredentialRefreshingLogin.subject(), expiringCredentialRefreshingLogin.callbackHandler(),
expiringCredentialRefreshingLogin.configuration());
}
public void refresherThreadStarted() {
// empty
}
public void refresherThreadDone() {
// empty
}
}
private static class ExitRefresherThreadDueToIllegalStateException extends Exception {
private static final long serialVersionUID = -6108495378411920380L;
public ExitRefresherThreadDueToIllegalStateException(String message) {
super(message);
}
}
private class Refresher implements Runnable {
@Override
public void run() {
log.info("[Principal={}]: Expiring credential re-login thread started.", principalLogText());
while (true) {
/*
* Refresh thread's main loop. Each expiring credential lives for one iteration
* of the loop. Thread will exit if the loop exits from here.
*/
long nowMs = currentMs();
Long nextRefreshMs = refreshMs(nowMs);
if (nextRefreshMs == null) {
loginContextFactory.refresherThreadDone();
return;
}
// safety check motivated by KAFKA-7945,
// should generally never happen except due to a bug
if (nextRefreshMs.longValue() < nowMs) {
log.warn("[Principal={}]: Expiring credential re-login sleep time was calculated to be in the past! Will explicitly adjust. ({})", principalLogText(),
new Date(nextRefreshMs));
nextRefreshMs = Long.valueOf(nowMs + 10 * 1000); // refresh in 10 seconds
}
log.info("[Principal={}]: Expiring credential re-login sleeping until: {}", principalLogText(),
new Date(nextRefreshMs));
time.sleep(nextRefreshMs - nowMs);
if (Thread.currentThread().isInterrupted()) {
log.info("[Principal={}]: Expiring credential re-login thread has been interrupted and will exit.",
principalLogText());
loginContextFactory.refresherThreadDone();
return;
}
while (true) {
/*
* Perform a re-login over and over again with some intervening delay
* unless/until either the refresh succeeds or we are interrupted.
*/
try {
reLogin();
break; // success
} catch (ExitRefresherThreadDueToIllegalStateException e) {
log.error(e.getMessage(), e);
loginContextFactory.refresherThreadDone();
return;
} catch (LoginException loginException) {
log.warn(String.format(
"[Principal=%s]: LoginException during login retry; will sleep %d seconds before trying again.",
principalLogText(), DELAY_SECONDS_BEFORE_NEXT_RETRY_WHEN_RELOGIN_FAILS),
loginException);
// Sleep and allow loop to run/try again unless interrupted
time.sleep(DELAY_SECONDS_BEFORE_NEXT_RETRY_WHEN_RELOGIN_FAILS * 1000);
if (Thread.currentThread().isInterrupted()) {
log.error(
"[Principal={}]: Interrupted while trying to perform a subsequent expiring credential re-login after one or more initial re-login failures: re-login thread exiting now: {}",
principalLogText(), String.valueOf(loginException.getMessage()));
loginContextFactory.refresherThreadDone();
return;
}
}
}
}
}
}
private static final Logger log = LoggerFactory.getLogger(ExpiringCredentialRefreshingLogin.class);
private static final long DELAY_SECONDS_BEFORE_NEXT_RETRY_WHEN_RELOGIN_FAILS = 10L;
private static final Random RNG = new Random();
private final Time time;
private Thread refresherThread;
private final LoginContextFactory loginContextFactory;
private final String contextName;
private final Configuration configuration;
private final ExpiringCredentialRefreshConfig expiringCredentialRefreshConfig;
private final AuthenticateCallbackHandler callbackHandler;
// mark volatile due to existence of public subject() method
private volatile Subject subject = null;
private boolean hasExpiringCredential = false;
private String principalName = null;
private LoginContext loginContext = null;
private ExpiringCredential expiringCredential = null;
private final Class<?> mandatoryClassToSynchronizeOnPriorToRefresh;
public ExpiringCredentialRefreshingLogin(String contextName, Configuration configuration,
ExpiringCredentialRefreshConfig expiringCredentialRefreshConfig,
AuthenticateCallbackHandler callbackHandler, Class<?> mandatoryClassToSynchronizeOnPriorToRefresh) {
this(contextName, configuration, expiringCredentialRefreshConfig, callbackHandler,
mandatoryClassToSynchronizeOnPriorToRefresh, new LoginContextFactory(), Time.SYSTEM);
}
public ExpiringCredentialRefreshingLogin(String contextName, Configuration configuration,
ExpiringCredentialRefreshConfig expiringCredentialRefreshConfig,
AuthenticateCallbackHandler callbackHandler, Class<?> mandatoryClassToSynchronizeOnPriorToRefresh,
LoginContextFactory loginContextFactory, Time time) {
this.contextName = Objects.requireNonNull(contextName);
this.configuration = Objects.requireNonNull(configuration);
this.expiringCredentialRefreshConfig = Objects.requireNonNull(expiringCredentialRefreshConfig);
this.callbackHandler = callbackHandler;
this.mandatoryClassToSynchronizeOnPriorToRefresh = Objects
.requireNonNull(mandatoryClassToSynchronizeOnPriorToRefresh);
this.loginContextFactory = loginContextFactory;
this.time = Objects.requireNonNull(time);
}
public Subject subject() {
return subject; // field requires volatile keyword
}
public String contextName() {
return contextName;
}
public Configuration configuration() {
return configuration;
}
public AuthenticateCallbackHandler callbackHandler() {
return callbackHandler;
}
public String serviceName() {
return "kafka";
}
/**
* Performs login for each login module specified for the login context of this
* instance and starts the thread used to periodically re-login.
* <p>
* The synchronized keyword is not necessary because an implementation of
* {@link Login} will delegate to this code (e.g. OAuthBearerRefreshingLogin},
* and the {@code login()} method on the delegating class will itself be
* synchronized if necessary.
*/
public LoginContext login() throws LoginException {
LoginContext tmpLoginContext = loginContextFactory.createLoginContext(this);
tmpLoginContext.login();
log.info("Successfully logged in.");
loginContext = tmpLoginContext;
subject = loginContext.getSubject();
expiringCredential = expiringCredential();
hasExpiringCredential = expiringCredential != null;
if (!hasExpiringCredential) {
// do not bother with re-logins.
log.debug("No Expiring Credential");
principalName = null;
refresherThread = null;
return loginContext;
}
principalName = expiringCredential.principalName();
// Check for a clock skew problem
long expireTimeMs = expiringCredential.expireTimeMs();
long nowMs = currentMs();
if (nowMs > expireTimeMs) {
log.error(
"[Principal={}]: Current clock: {} is later than expiry {}. This may indicate a clock skew problem."
+ " Check that this host's and remote host's clocks are in sync. Not starting refresh thread."
+ " This process is likely unable to authenticate SASL connections (for example, it is unlikely"
+ " to be able to authenticate a connection with a Kafka Broker).",
principalLogText(), new Date(nowMs), new Date(expireTimeMs));
return loginContext;
}
if (log.isDebugEnabled())
log.debug("[Principal={}]: It is an expiring credential", principalLogText());
/*
* Re-login periodically. How often is determined by the expiration date of the
* credential and refresh-related configuration values.
*/
refresherThread = KafkaThread.daemon(String.format("kafka-expiring-relogin-thread-%s", principalName),
new Refresher());
refresherThread.start();
loginContextFactory.refresherThreadStarted();
return loginContext;
}
public void close() {
if (refresherThread != null && refresherThread.isAlive()) {
refresherThread.interrupt();
try {
refresherThread.join();
} catch (InterruptedException e) {
log.warn("[Principal={}]: Interrupted while waiting for re-login thread to shutdown.",
principalLogText(), e);
Thread.currentThread().interrupt();
}
}
}
public abstract ExpiringCredential expiringCredential();
/**
* Determine when to sleep until before performing a refresh
*
* @param relativeToMs
* the point (in terms of number of milliseconds since the epoch) at
* which to perform the calculation
* @return null if no refresh should occur, otherwise the time to sleep until
* (in terms of the number of milliseconds since the epoch) before
* performing a refresh
*/
private Long refreshMs(long relativeToMs) {
if (expiringCredential == null) {
/*
* Re-login failed because our login() invocation did not generate a credential
* but also did not generate an exception. Try logging in again after some delay
* (it seems likely to be a bug, but it doesn't hurt to keep trying to refresh).
*/
long retvalNextRefreshMs = relativeToMs + DELAY_SECONDS_BEFORE_NEXT_RETRY_WHEN_RELOGIN_FAILS * 1000L;
log.warn("[Principal={}]: No Expiring credential found: will try again at {}", principalLogText(),
new Date(retvalNextRefreshMs));
return retvalNextRefreshMs;
}
long expireTimeMs = expiringCredential.expireTimeMs();
if (relativeToMs > expireTimeMs) {
boolean logoutRequiredBeforeLoggingBackIn = isLogoutRequiredBeforeLoggingBackIn();
if (logoutRequiredBeforeLoggingBackIn) {
log.error(
"[Principal={}]: Current clock: {} is later than expiry {}. This may indicate a clock skew problem."
+ " Check that this host's and remote host's clocks are in sync. Exiting refresh thread.",
principalLogText(), new Date(relativeToMs), new Date(expireTimeMs));
return null;
} else {
/*
* Since the current soon-to-expire credential isn't logged out until we have a
* new credential with a refreshed lifetime, it is possible that the current
* credential could expire if the re-login continually fails over and over again
* making us unable to get the new credential. Therefore keep trying rather than
* exiting.
*/
long retvalNextRefreshMs = relativeToMs + DELAY_SECONDS_BEFORE_NEXT_RETRY_WHEN_RELOGIN_FAILS * 1000L;
log.warn("[Principal={}]: Expiring credential already expired at {}: will try to refresh again at {}",
principalLogText(), new Date(expireTimeMs), new Date(retvalNextRefreshMs));
return retvalNextRefreshMs;
}
}
Long absoluteLastRefreshTimeMs = expiringCredential.absoluteLastRefreshTimeMs();
if (absoluteLastRefreshTimeMs != null && absoluteLastRefreshTimeMs.longValue() < expireTimeMs) {
log.warn("[Principal={}]: Expiring credential refresh thread exiting because the"
+ " expiring credential's current expiration time ({}) exceeds the latest possible refresh time ({})."
+ " This process will not be able to authenticate new SASL connections after that"
+ " time (for example, it will not be able to authenticate a new connection with a Kafka Broker).",
principalLogText(), new Date(expireTimeMs), new Date(absoluteLastRefreshTimeMs.longValue()));
return null;
}
Long optionalStartTime = expiringCredential.startTimeMs();
long startMs = optionalStartTime != null ? optionalStartTime.longValue() : relativeToMs;
log.info("[Principal={}]: Expiring credential valid from {} to {}", expiringCredential.principalName(),
new java.util.Date(startMs), new java.util.Date(expireTimeMs));
double pct = expiringCredentialRefreshConfig.loginRefreshWindowFactor()
+ (expiringCredentialRefreshConfig.loginRefreshWindowJitter() * RNG.nextDouble());
/*
* Ignore buffer times if the credential's remaining lifetime is less than their
* sum.
*/
long refreshMinPeriodSeconds = expiringCredentialRefreshConfig.loginRefreshMinPeriodSeconds();
long clientRefreshBufferSeconds = expiringCredentialRefreshConfig.loginRefreshBufferSeconds();
if (relativeToMs + 1000L * (refreshMinPeriodSeconds + clientRefreshBufferSeconds) > expireTimeMs) {
long retvalRefreshMs = relativeToMs + (long) ((expireTimeMs - relativeToMs) * pct);
log.warn(
"[Principal={}]: Expiring credential expires at {}, so buffer times of {} and {} seconds"
+ " at the front and back, respectively, cannot be accommodated. We will refresh at {}.",
principalLogText(), new Date(expireTimeMs), refreshMinPeriodSeconds, clientRefreshBufferSeconds,
new Date(retvalRefreshMs));
return retvalRefreshMs;
}
long proposedRefreshMs = startMs + (long) ((expireTimeMs - startMs) * pct);
// Don't let it violate the requested end buffer time
long beginningOfEndBufferTimeMs = expireTimeMs - clientRefreshBufferSeconds * 1000;
if (proposedRefreshMs > beginningOfEndBufferTimeMs) {
log.info(
"[Principal={}]: Proposed refresh time of {} extends into the desired buffer time of {} seconds before expiration, so refresh it at the desired buffer begin point, at {}",
expiringCredential.principalName(), new Date(proposedRefreshMs), clientRefreshBufferSeconds,
new Date(beginningOfEndBufferTimeMs));
return beginningOfEndBufferTimeMs;
}
// Don't let it violate the minimum refresh period
long endOfMinRefreshBufferTime = relativeToMs + 1000 * refreshMinPeriodSeconds;
if (proposedRefreshMs < endOfMinRefreshBufferTime) {
log.info(
"[Principal={}]: Expiring credential re-login thread time adjusted from {} to {} since the former is sooner "
+ "than the minimum refresh interval ({} seconds from now).",
principalLogText(), new Date(proposedRefreshMs), new Date(endOfMinRefreshBufferTime),
refreshMinPeriodSeconds);
return endOfMinRefreshBufferTime;
}
// Proposed refresh time doesn't violate any constraints
return proposedRefreshMs;
}
private void reLogin() throws LoginException, ExitRefresherThreadDueToIllegalStateException {
synchronized (mandatoryClassToSynchronizeOnPriorToRefresh) {
// Only perform one refresh of a particular type at a time
boolean logoutRequiredBeforeLoggingBackIn = isLogoutRequiredBeforeLoggingBackIn();
if (hasExpiringCredential && logoutRequiredBeforeLoggingBackIn) {
String principalLogTextPriorToLogout = principalLogText();
log.info("Initiating logout for {}", principalLogTextPriorToLogout);
loginContext.logout();
// Make absolutely sure we were logged out
expiringCredential = expiringCredential();
hasExpiringCredential = expiringCredential != null;
if (hasExpiringCredential)
// We can't force the removal because we don't know how to do it, so abort
throw new ExitRefresherThreadDueToIllegalStateException(String.format(
"Subject's private credentials still contains an instance of %s even though logout() was invoked; exiting refresh thread",
expiringCredential.getClass().getName()));
}
/*
* Perform a login, making note of any credential that might need a logout()
* afterwards
*/
ExpiringCredential optionalCredentialToLogout = expiringCredential;
LoginContext optionalLoginContextToLogout = loginContext;
boolean cleanLogin = false; // remember to restore the original if necessary
try {
loginContext = loginContextFactory.createLoginContext(ExpiringCredentialRefreshingLogin.this);
log.info("Initiating re-login for {}, logout() still needs to be called on a previous login = {}",
principalName, optionalCredentialToLogout != null);
loginContext.login();
cleanLogin = true; // no need to restore the original
// Perform a logout() on any original credential if necessary
if (optionalCredentialToLogout != null)
optionalLoginContextToLogout.logout();
} finally {
if (!cleanLogin)
// restore the original
loginContext = optionalLoginContextToLogout;
}
/*
* Get the new credential and make sure it is not any old one that required a
* logout() after the login()
*/
expiringCredential = expiringCredential();
hasExpiringCredential = expiringCredential != null;
if (!hasExpiringCredential) {
/*
* Re-login has failed because our login() invocation has not generated a
* credential but has also not generated an exception. We won't exit here;
* instead we will allow login retries in case we can somehow fix the issue (it
* seems likely to be a bug, but it doesn't hurt to keep trying to refresh).
*/
log.error("No Expiring Credential after a supposedly-successful re-login");
principalName = null;
} else {
if (expiringCredential == optionalCredentialToLogout)
/*
* The login() didn't identify a new credential; we still have the old one. We
* don't know how to fix this, so abort.
*/
throw new ExitRefresherThreadDueToIllegalStateException(String.format(
"Subject's private credentials still contains the previous, soon-to-expire instance of %s even though login() followed by logout() was invoked; exiting refresh thread",
expiringCredential.getClass().getName()));
principalName = expiringCredential.principalName();
if (log.isDebugEnabled())
log.debug("[Principal={}]: It is an expiring credential after re-login as expected",
principalLogText());
}
}
}
private String principalLogText() {
return expiringCredential == null ? principalName
: expiringCredential.getClass().getSimpleName() + ":" + principalName;
}
private long currentMs() {
return time.milliseconds();
}
private boolean isLogoutRequiredBeforeLoggingBackIn() {
return !expiringCredentialRefreshConfig.loginRefreshReloginAllowedBeforeLogout();
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/AccessTokenRetriever.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.io.Closeable;
import java.io.IOException;
/**
* An <code>AccessTokenRetriever</code> is the internal API by which the login module will
* retrieve an access token for use in authorization by the broker. The implementation may
* involve authentication to a remote system, or it can be as simple as loading the contents
* of a file or configuration setting.
*
* <i>Retrieval</i> is a separate concern from <i>validation</i>, so it isn't necessary for
* the <code>AccessTokenRetriever</code> implementation to validate the integrity of the JWT
* access token.
*
* @see HttpAccessTokenRetriever
* @see FileTokenRetriever
*/
public interface AccessTokenRetriever extends Initable, Closeable {
/**
* Retrieves a JWT access token in its serialized three-part form. The implementation
* is free to determine how it should be retrieved but should not perform validation
* on the result.
*
* <b>Note</b>: This is a blocking function and callers should be aware that the
* implementation may be communicating over a network, with the file system, coordinating
* threads, etc. The facility in the {@link javax.security.auth.spi.LoginModule} from
* which this is ultimately called does not provide an asynchronous approach.
*
* @return Non-<code>null</code> JWT access token string
*
* @throws IOException Thrown on errors related to IO during retrieval
*/
String retrieve() throws IOException;
/**
* Lifecycle method to perform a clean shutdown of the retriever. This must
* be performed by the caller to ensure the correct state, freeing up and releasing any
* resources performed in {@link #init()}.
*
* @throws IOException Thrown on errors related to IO during closure
*/
default void close() throws IOException {
// This method left intentionally blank.
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/AccessTokenRetrieverFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_CONNECT_TIMEOUT_MS;
import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_READ_TIMEOUT_MS;
import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_RETRY_BACKOFF_MAX_MS;
import static org.apache.kafka.common.config.SaslConfigs.SASL_LOGIN_RETRY_BACKOFF_MS;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL;
import static org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginCallbackHandler.CLIENT_ID_CONFIG;
import static org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginCallbackHandler.CLIENT_SECRET_CONFIG;
import static org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginCallbackHandler.SCOPE_CONFIG;
import java.net.URL;
import java.util.Locale;
import java.util.Map;
import javax.net.ssl.SSLSocketFactory;
public class AccessTokenRetrieverFactory {
/**
* Create an {@link AccessTokenRetriever} from the given SASL and JAAS configuration.
*
* <b>Note</b>: the returned <code>AccessTokenRetriever</code> is <em>not</em> initialized
* here and must be done by the caller prior to use.
*
* @param configs SASL configuration
* @param jaasConfig JAAS configuration
*
* @return Non-<code>null</code> {@link AccessTokenRetriever}
*/
public static AccessTokenRetriever create(Map<String, ?> configs, Map<String, Object> jaasConfig) {
return create(configs, null, jaasConfig);
}
public static AccessTokenRetriever create(Map<String, ?> configs,
String saslMechanism,
Map<String, Object> jaasConfig) {
ConfigurationUtils cu = new ConfigurationUtils(configs, saslMechanism);
URL tokenEndpointUrl = cu.validateUrl(SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL);
if (tokenEndpointUrl.getProtocol().toLowerCase(Locale.ROOT).equals("file")) {
return new FileTokenRetriever(cu.validateFile(SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL));
} else {
JaasOptionsUtils jou = new JaasOptionsUtils(jaasConfig);
String clientId = jou.validateString(CLIENT_ID_CONFIG);
String clientSecret = jou.validateString(CLIENT_SECRET_CONFIG);
String scope = jou.validateString(SCOPE_CONFIG, false);
SSLSocketFactory sslSocketFactory = null;
if (jou.shouldCreateSSLSocketFactory(tokenEndpointUrl))
sslSocketFactory = jou.createSSLSocketFactory();
return new HttpAccessTokenRetriever(clientId,
clientSecret,
scope,
sslSocketFactory,
tokenEndpointUrl.toString(),
cu.validateLong(SASL_LOGIN_RETRY_BACKOFF_MS),
cu.validateLong(SASL_LOGIN_RETRY_BACKOFF_MAX_MS),
cu.validateInteger(SASL_LOGIN_CONNECT_TIMEOUT_MS, false),
cu.validateInteger(SASL_LOGIN_READ_TIMEOUT_MS, false));
}
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/AccessTokenValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken;
/**
* An instance of <code>AccessTokenValidator</code> acts as a function object that, given an access
* token in base-64 encoded JWT format, can parse the data, perform validation, and construct an
* {@link OAuthBearerToken} for use by the caller.
*
* The primary reason for this abstraction is that client and broker may have different libraries
* available to them to perform these operations. Additionally, the exact steps for validation may
* differ between implementations. To put this more concretely: the implementation in the Kafka
* client does not have bundled a robust library to perform this logic, and it is not the
* responsibility of the client to perform vigorous validation. However, the Kafka broker ships with
* a richer set of library dependencies that can perform more substantial validation and is also
* expected to perform a trust-but-verify test of the access token's signature.
*
* See:
*
* <ul>
* <li><a href="https://datatracker.ietf.org/doc/html/rfc6749#section-1.4">RFC 6749, Section 1.4</a></li>
* <li><a href="https://datatracker.ietf.org/doc/html/rfc6750#section-2.1">RFC 6750, Section 2.1</a></li>
* <li><a href="https://datatracker.ietf.org/doc/html/draft-ietf-oauth-access-token-jwt">RFC 6750, Section 2.1</a></li>
* </ul>
*
* @see LoginAccessTokenValidator A basic AccessTokenValidator used by client-side login
* authentication
* @see ValidatorAccessTokenValidator A more robust AccessTokenValidator that is used on the broker
* to validate the token's contents and verify the signature
*/
public interface AccessTokenValidator {
/**
* Accepts an OAuth JWT access token in base-64 encoded format, validates, and returns an
* OAuthBearerToken.
*
* @param accessToken Non-<code>null</code> JWT access token
*
* @return {@link OAuthBearerToken}
*
* @throws ValidateException Thrown on errors performing validation of given token
*/
OAuthBearerToken validate(String accessToken) throws ValidateException;
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/AccessTokenValidatorFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_CLOCK_SKEW_SECONDS;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_EXPECTED_AUDIENCE;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_EXPECTED_ISSUER;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_SCOPE_CLAIM_NAME;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_SUB_CLAIM_NAME;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.jose4j.keys.resolvers.VerificationKeyResolver;
public class AccessTokenValidatorFactory {
public static AccessTokenValidator create(Map<String, ?> configs) {
return create(configs, (String) null);
}
public static AccessTokenValidator create(Map<String, ?> configs, String saslMechanism) {
ConfigurationUtils cu = new ConfigurationUtils(configs, saslMechanism);
String scopeClaimName = cu.get(SASL_OAUTHBEARER_SCOPE_CLAIM_NAME);
String subClaimName = cu.get(SASL_OAUTHBEARER_SUB_CLAIM_NAME);
return new LoginAccessTokenValidator(scopeClaimName, subClaimName);
}
public static AccessTokenValidator create(Map<String, ?> configs,
VerificationKeyResolver verificationKeyResolver) {
return create(configs, null, verificationKeyResolver);
}
public static AccessTokenValidator create(Map<String, ?> configs,
String saslMechanism,
VerificationKeyResolver verificationKeyResolver) {
ConfigurationUtils cu = new ConfigurationUtils(configs, saslMechanism);
Set<String> expectedAudiences = null;
List<String> l = cu.get(SASL_OAUTHBEARER_EXPECTED_AUDIENCE);
if (l != null)
expectedAudiences = Collections.unmodifiableSet(new HashSet<>(l));
Integer clockSkew = cu.validateInteger(SASL_OAUTHBEARER_CLOCK_SKEW_SECONDS, false);
String expectedIssuer = cu.validateString(SASL_OAUTHBEARER_EXPECTED_ISSUER, false);
String scopeClaimName = cu.validateString(SASL_OAUTHBEARER_SCOPE_CLAIM_NAME);
String subClaimName = cu.validateString(SASL_OAUTHBEARER_SUB_CLAIM_NAME);
return new ValidatorAccessTokenValidator(clockSkew,
expectedAudiences,
expectedIssuer,
verificationKeyResolver,
scopeClaimName,
subClaimName);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/BasicOAuthBearerToken.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.util.Set;
import java.util.StringJoiner;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken;
/**
* An implementation of the {@link OAuthBearerToken} that fairly straightforwardly stores the values
* given to its constructor (except the scope set which is copied to avoid modifications).
*
* Very little validation is applied here with respect to the validity of the given values. All
* validation is assumed to happen by users of this class.
*
* @see <a href="https://tools.ietf.org/html/rfc7515">RFC 7515: JSON Web Signature (JWS)</a>
*/
public class BasicOAuthBearerToken implements OAuthBearerToken {
private final String token;
private final Set<String> scopes;
private final Long lifetimeMs;
private final String principalName;
private final Long startTimeMs;
/**
* Creates a new OAuthBearerToken instance around the given values.
*
* @param token Value containing the compact serialization as a base 64 string that
* can be parsed, decoded, and validated as a well-formed JWS. Must be
* non-<code>null</code>, non-blank, and non-whitespace only.
* @param scopes Set of non-<code>null</code> scopes. May contain case-sensitive
* "duplicates". The given set is copied and made unmodifiable so neither
* the caller of this constructor nor any downstream users can modify it.
* @param lifetimeMs The token's lifetime, expressed as the number of milliseconds since the
* epoch. Must be non-negative.
* @param principalName The name of the principal to which this credential applies. Must be
* non-<code>null</code>, non-blank, and non-whitespace only.
* @param startTimeMs The token's start time, expressed as the number of milliseconds since
* the epoch, if available, otherwise <code>null</code>. Must be
* non-negative if a non-<code>null</code> value is provided.
*/
public BasicOAuthBearerToken(String token,
Set<String> scopes,
long lifetimeMs,
String principalName,
Long startTimeMs) {
this.token = token;
this.scopes = scopes;
this.lifetimeMs = lifetimeMs;
this.principalName = principalName;
this.startTimeMs = startTimeMs;
}
/**
* The <code>b64token</code> value as defined in
* <a href="https://tools.ietf.org/html/rfc6750#section-2.1">RFC 6750 Section
* 2.1</a>
*
* @return <code>b64token</code> value as defined in
* <a href="https://tools.ietf.org/html/rfc6750#section-2.1">RFC 6750
* Section 2.1</a>
*/
@Override
public String value() {
return token;
}
/**
* The token's scope of access, as per
* <a href="https://tools.ietf.org/html/rfc6749#section-1.4">RFC 6749 Section
* 1.4</a>
*
* @return the token's (always non-null but potentially empty) scope of access,
* as per <a href="https://tools.ietf.org/html/rfc6749#section-1.4">RFC
* 6749 Section 1.4</a>. Note that all values in the returned set will
* be trimmed of preceding and trailing whitespace, and the result will
* never contain the empty string.
*/
@Override
public Set<String> scope() {
// Immutability of the set is performed in the constructor/validation utils class, so
// we don't need to repeat it here.
return scopes;
}
/**
* The token's lifetime, expressed as the number of milliseconds since the
* epoch, as per <a href="https://tools.ietf.org/html/rfc6749#section-1.4">RFC
* 6749 Section 1.4</a>
*
* @return the token's lifetime, expressed as the number of milliseconds since
* the epoch, as per
* <a href="https://tools.ietf.org/html/rfc6749#section-1.4">RFC 6749
* Section 1.4</a>.
*/
@Override
public long lifetimeMs() {
return lifetimeMs;
}
/**
* The name of the principal to which this credential applies
*
* @return the always non-null/non-empty principal name
*/
@Override
public String principalName() {
return principalName;
}
/**
* When the credential became valid, in terms of the number of milliseconds
* since the epoch, if known, otherwise null. An expiring credential may not
* necessarily indicate when it was created -- just when it expires -- so we
* need to support a null return value here.
*
* @return the time when the credential became valid, in terms of the number of
* milliseconds since the epoch, if known, otherwise null
*/
@Override
public Long startTimeMs() {
return startTimeMs;
}
@Override
public String toString() {
return new StringJoiner(", ", BasicOAuthBearerToken.class.getSimpleName() + "[", "]")
.add("token='" + token + "'")
.add("scopes=" + scopes)
.add("lifetimeMs=" + lifetimeMs)
.add("principalName='" + principalName + "'")
.add("startTimeMs=" + startTimeMs)
.toString();
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/ClaimValidationUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.util.Collection;
import java.util.Collections;
import java.util.HashSet;
import java.util.Set;
/**
* Simple utility class to perform basic cleaning and validation on input values so that they're
* performed consistently throughout the code base.
*/
public class ClaimValidationUtils {
/**
* Validates that the scopes are valid, where <i>invalid</i> means <i>any</i> of
* the following:
*
* <ul>
* <li>Collection is <code>null</code></li>
* <li>Collection has duplicates</li>
* <li>Any of the elements in the collection are <code>null</code></li>
* <li>Any of the elements in the collection are zero length</li>
* <li>Any of the elements in the collection are whitespace only</li>
* </ul>
*
* @param scopeClaimName Name of the claim used for the scope values
* @param scopes Collection of String scopes
*
* @return Unmodifiable {@link Set} that includes the values of the original set, but with
* each value trimmed
*
* @throws ValidateException Thrown if the value is <code>null</code>, contains duplicates, or
* if any of the values in the set are <code>null</code>, empty,
* or whitespace only
*/
public static Set<String> validateScopes(String scopeClaimName, Collection<String> scopes) throws ValidateException {
if (scopes == null)
throw new ValidateException(String.format("%s value must be non-null", scopeClaimName));
Set<String> copy = new HashSet<>();
for (String scope : scopes) {
scope = validateString(scopeClaimName, scope);
if (copy.contains(scope))
throw new ValidateException(String.format("%s value must not contain duplicates - %s already present", scopeClaimName, scope));
copy.add(scope);
}
return Collections.unmodifiableSet(copy);
}
/**
* Validates that the given lifetime is valid, where <i>invalid</i> means <i>any</i> of
* the following:
*
* <ul>
* <li><code>null</code></li>
* <li>Negative</li>
* </ul>
*
* @param claimName Name of the claim
* @param claimValue Expiration time (in milliseconds)
*
* @return Input parameter, as provided
*
* @throws ValidateException Thrown if the value is <code>null</code> or negative
*/
public static long validateExpiration(String claimName, Long claimValue) throws ValidateException {
if (claimValue == null)
throw new ValidateException(String.format("%s value must be non-null", claimName));
if (claimValue < 0)
throw new ValidateException(String.format("%s value must be non-negative; value given was \"%s\"", claimName, claimValue));
return claimValue;
}
/**
* Validates that the given claim value is valid, where <i>invalid</i> means <i>any</i> of
* the following:
*
* <ul>
* <li><code>null</code></li>
* <li>Zero length</li>
* <li>Whitespace only</li>
* </ul>
*
* @param claimName Name of the claim
* @param claimValue Name of the subject
*
* @return Trimmed version of the <code>claimValue</code> parameter
*
* @throws ValidateException Thrown if the value is <code>null</code>, empty, or whitespace only
*/
public static String validateSubject(String claimName, String claimValue) throws ValidateException {
return validateString(claimName, claimValue);
}
/**
* Validates that the given issued at claim name is valid, where <i>invalid</i> means <i>any</i> of
* the following:
*
* <ul>
* <li>Negative</li>
* </ul>
*
* @param claimName Name of the claim
* @param claimValue Start time (in milliseconds) or <code>null</code> if not used
*
* @return Input parameter, as provided
*
* @throws ValidateException Thrown if the value is negative
*/
public static Long validateIssuedAt(String claimName, Long claimValue) throws ValidateException {
if (claimValue != null && claimValue < 0)
throw new ValidateException(String.format("%s value must be null or non-negative; value given was \"%s\"", claimName, claimValue));
return claimValue;
}
/**
* Validates that the given claim name override is valid, where <i>invalid</i> means
* <i>any</i> of the following:
*
* <ul>
* <li><code>null</code></li>
* <li>Zero length</li>
* <li>Whitespace only</li>
* </ul>
*
* @param name "Standard" name of the claim, e.g. <code>sub</code>
* @param value "Override" name of the claim, e.g. <code>email</code>
*
* @return Trimmed version of the <code>value</code> parameter
*
* @throws ValidateException Thrown if the value is <code>null</code>, empty, or whitespace only
*/
public static String validateClaimNameOverride(String name, String value) throws ValidateException {
return validateString(name, value);
}
private static String validateString(String name, String value) throws ValidateException {
if (value == null)
throw new ValidateException(String.format("%s value must be non-null", name));
if (value.isEmpty())
throw new ValidateException(String.format("%s value must be non-empty", name));
value = value.trim();
if (value.isEmpty())
throw new ValidateException(String.format("%s value must not contain only whitespace", name));
return value;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/CloseableVerificationKeyResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.io.Closeable;
import java.io.IOException;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerValidatorCallbackHandler;
import org.jose4j.keys.resolvers.VerificationKeyResolver;
/**
* The {@link OAuthBearerValidatorCallbackHandler} uses a {@link VerificationKeyResolver} as
* part of its validation of the incoming JWT. Some of the <code>VerificationKeyResolver</code>
* implementations use resources like threads, connections, etc. that should be properly closed
* when no longer needed. Since the <code>VerificationKeyResolver</code> interface itself doesn't
* define a <code>close</code> method, we provide a means to do that here.
*
* @see OAuthBearerValidatorCallbackHandler
* @see VerificationKeyResolver
* @see Closeable
*/
public interface CloseableVerificationKeyResolver extends Initable, Closeable, VerificationKeyResolver {
/**
* Lifecycle method to perform a clean shutdown of the {@link VerificationKeyResolver}.
* This must be performed by the caller to ensure the correct state, freeing up
* and releasing any resources performed in {@link #init()}.
*
* @throws IOException Thrown on errors related to IO during closure
*/
default void close() throws IOException {
// This method left intentionally blank.
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/ConfigurationUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.io.File;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.net.URL;
import java.nio.file.Path;
import java.util.Locale;
import java.util.Map;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.network.ListenerName;
/**
* <code>ConfigurationUtils</code> is a utility class to perform basic configuration-related
* logic and is separated out here for easier, more direct testing.
*/
public class ConfigurationUtils {
private final Map<String, ?> configs;
private final String prefix;
public ConfigurationUtils(Map<String, ?> configs) {
this(configs, null);
}
public ConfigurationUtils(Map<String, ?> configs, String saslMechanism) {
this.configs = configs;
if (saslMechanism != null && !saslMechanism.trim().isEmpty())
this.prefix = ListenerName.saslMechanismPrefix(saslMechanism.trim());
else
this.prefix = null;
}
/**
* Validates that, if a value is supplied, is a file that:
*
* <li>
* <ul>exists</ul>
* <ul>has read permission</ul>
* <ul>points to a file</ul>
* </li>
*
* If the value is null or an empty string, it is assumed to be an "empty" value and thus.
* ignored. Any whitespace is trimmed off of the beginning and end.
*/
public Path validateFile(String name) {
URL url = validateUrl(name);
File file;
try {
file = new File(url.toURI().getRawPath()).getAbsoluteFile();
} catch (URISyntaxException e) {
throw new ConfigException(name, url.toString(), String.format("The OAuth configuration option %s contains a URL (%s) that is malformed: %s", name, url, e.getMessage()));
}
if (!file.exists())
throw new ConfigException(name, file, String.format("The OAuth configuration option %s contains a file (%s) that doesn't exist", name, file));
if (!file.canRead())
throw new ConfigException(name, file, String.format("The OAuth configuration option %s contains a file (%s) that doesn't have read permission", name, file));
if (file.isDirectory())
throw new ConfigException(name, file, String.format("The OAuth configuration option %s references a directory (%s), not a file", name, file));
return file.toPath();
}
/**
* Validates that, if a value is supplied, is a value that:
*
* <li>
* <ul>is an Integer</ul>
* <ul>has a value that is not less than the provided minimum value</ul>
* </li>
*
* If the value is null or an empty string, it is assumed to be an "empty" value and thus
* ignored. Any whitespace is trimmed off of the beginning and end.
*/
public Integer validateInteger(String name, boolean isRequired) {
Integer value = get(name);
if (value == null) {
if (isRequired)
throw new ConfigException(name, null, String.format("The OAuth configuration option %s must be non-null", name));
else
return null;
}
return value;
}
/**
* Validates that, if a value is supplied, is a value that:
*
* <li>
* <ul>is an Integer</ul>
* <ul>has a value that is not less than the provided minimum value</ul>
* </li>
*
* If the value is null or an empty string, it is assumed to be an "empty" value and thus
* ignored. Any whitespace is trimmed off of the beginning and end.
*/
public Long validateLong(String name) {
return validateLong(name, true);
}
public Long validateLong(String name, boolean isRequired) {
return validateLong(name, isRequired, null);
}
public Long validateLong(String name, boolean isRequired, Long min) {
Long value = get(name);
if (value == null) {
if (isRequired)
throw new ConfigException(name, null, String.format("The OAuth configuration option %s must be non-null", name));
else
return null;
}
if (min != null && value < min)
throw new ConfigException(name, value, String.format("The OAuth configuration option %s value must be at least %s", name, min));
return value;
}
/**
* Validates that the configured URL that:
*
* <li>
* <ul>is well-formed</ul>
* <ul>contains a scheme</ul>
* <ul>uses either HTTP, HTTPS, or file protocols</ul>
* </li>
*
* No effort is made to connect to the URL in the validation step.
*/
public URL validateUrl(String name) {
String value = validateString(name);
URL url;
try {
url = new URL(value);
} catch (MalformedURLException e) {
throw new ConfigException(name, value, String.format("The OAuth configuration option %s contains a URL (%s) that is malformed: %s", name, value, e.getMessage()));
}
String protocol = url.getProtocol();
if (protocol == null || protocol.trim().isEmpty())
throw new ConfigException(name, value, String.format("The OAuth configuration option %s contains a URL (%s) that is missing the protocol", name, value));
protocol = protocol.toLowerCase(Locale.ROOT);
if (!(protocol.equals("http") || protocol.equals("https") || protocol.equals("file")))
throw new ConfigException(name, value, String.format("The OAuth configuration option %s contains a URL (%s) that contains an invalid protocol (%s); only \"http\", \"https\", and \"file\" protocol are supported", name, value, protocol));
return url;
}
public String validateString(String name) throws ValidateException {
return validateString(name, true);
}
public String validateString(String name, boolean isRequired) throws ValidateException {
String value = get(name);
if (value == null) {
if (isRequired)
throw new ConfigException(String.format("The OAuth configuration option %s value must be non-null", name));
else
return null;
}
value = value.trim();
if (value.isEmpty()) {
if (isRequired)
throw new ConfigException(String.format("The OAuth configuration option %s value must not contain only whitespace", name));
else
return null;
}
return value;
}
@SuppressWarnings("unchecked")
public <T> T get(String name) {
T value = (T) configs.get(prefix + name);
if (value != null)
return value;
return (T) configs.get(name);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/FileTokenRetriever.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.io.IOException;
import java.nio.file.Path;
import org.apache.kafka.common.utils.Utils;
/**
* <code>FileTokenRetriever</code> is an {@link AccessTokenRetriever} that will load the contents,
* interpreting them as a JWT access key in the serialized form.
*
* @see AccessTokenRetriever
*/
public class FileTokenRetriever implements AccessTokenRetriever {
private final Path accessTokenFile;
private String accessToken;
public FileTokenRetriever(Path accessTokenFile) {
this.accessTokenFile = accessTokenFile;
}
@Override
public void init() throws IOException {
this.accessToken = Utils.readFileAsString(accessTokenFile.toFile().getPath());
}
@Override
public String retrieve() throws IOException {
if (accessToken == null)
throw new IllegalStateException("Access token is null; please call init() first");
return accessToken;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/HttpAccessTokenRetriever.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.net.HttpURLConnection;
import java.net.URL;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.Collections;
import java.util.HashSet;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import javax.net.ssl.HttpsURLConnection;
import javax.net.ssl.SSLSocketFactory;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginCallbackHandler;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <code>HttpAccessTokenRetriever</code> is an {@link AccessTokenRetriever} that will
* communicate with an OAuth/OIDC provider directly via HTTP to post client credentials
* ({@link OAuthBearerLoginCallbackHandler#CLIENT_ID_CONFIG}/{@link OAuthBearerLoginCallbackHandler#CLIENT_SECRET_CONFIG})
* to a publicized token endpoint URL
* ({@link SaslConfigs#SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL}).
*
* @see AccessTokenRetriever
* @see OAuthBearerLoginCallbackHandler#CLIENT_ID_CONFIG
* @see OAuthBearerLoginCallbackHandler#CLIENT_SECRET_CONFIG
* @see OAuthBearerLoginCallbackHandler#SCOPE_CONFIG
* @see SaslConfigs#SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL
*/
public class HttpAccessTokenRetriever implements AccessTokenRetriever {
private static final Logger log = LoggerFactory.getLogger(HttpAccessTokenRetriever.class);
private static final Set<Integer> UNRETRYABLE_HTTP_CODES;
private static final int MAX_RESPONSE_BODY_LENGTH = 1000;
public static final String AUTHORIZATION_HEADER = "Authorization";
static {
// This does not have to be an exhaustive list. There are other HTTP codes that
// are defined in different RFCs (e.g. https://datatracker.ietf.org/doc/html/rfc6585)
// that we won't worry about yet. The worst case if a status code is missing from
// this set is that the request will be retried.
UNRETRYABLE_HTTP_CODES = new HashSet<>();
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_BAD_REQUEST);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_UNAUTHORIZED);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_PAYMENT_REQUIRED);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_FORBIDDEN);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_NOT_FOUND);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_BAD_METHOD);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_NOT_ACCEPTABLE);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_PROXY_AUTH);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_CONFLICT);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_GONE);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_LENGTH_REQUIRED);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_PRECON_FAILED);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_ENTITY_TOO_LARGE);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_REQ_TOO_LONG);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_UNSUPPORTED_TYPE);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_NOT_IMPLEMENTED);
UNRETRYABLE_HTTP_CODES.add(HttpURLConnection.HTTP_VERSION);
}
private final String clientId;
private final String clientSecret;
private final String scope;
private final SSLSocketFactory sslSocketFactory;
private final String tokenEndpointUrl;
private final long loginRetryBackoffMs;
private final long loginRetryBackoffMaxMs;
private final Integer loginConnectTimeoutMs;
private final Integer loginReadTimeoutMs;
public HttpAccessTokenRetriever(String clientId,
String clientSecret,
String scope,
SSLSocketFactory sslSocketFactory,
String tokenEndpointUrl,
long loginRetryBackoffMs,
long loginRetryBackoffMaxMs,
Integer loginConnectTimeoutMs,
Integer loginReadTimeoutMs) {
this.clientId = Objects.requireNonNull(clientId);
this.clientSecret = Objects.requireNonNull(clientSecret);
this.scope = scope;
this.sslSocketFactory = sslSocketFactory;
this.tokenEndpointUrl = Objects.requireNonNull(tokenEndpointUrl);
this.loginRetryBackoffMs = loginRetryBackoffMs;
this.loginRetryBackoffMaxMs = loginRetryBackoffMaxMs;
this.loginConnectTimeoutMs = loginConnectTimeoutMs;
this.loginReadTimeoutMs = loginReadTimeoutMs;
}
/**
* Retrieves a JWT access token in its serialized three-part form. The implementation
* is free to determine how it should be retrieved but should not perform validation
* on the result.
*
* <b>Note</b>: This is a blocking function and callers should be aware that the
* implementation communicates over a network. The facility in the
* {@link javax.security.auth.spi.LoginModule} from which this is ultimately called
* does not provide an asynchronous approach.
*
* @return Non-<code>null</code> JWT access token string
*
* @throws IOException Thrown on errors related to IO during retrieval
*/
@Override
public String retrieve() throws IOException {
String authorizationHeader = formatAuthorizationHeader(clientId, clientSecret);
String requestBody = formatRequestBody(scope);
Retry<String> retry = new Retry<>(loginRetryBackoffMs, loginRetryBackoffMaxMs);
Map<String, String> headers = Collections.singletonMap(AUTHORIZATION_HEADER, authorizationHeader);
String responseBody;
try {
responseBody = retry.execute(() -> {
HttpURLConnection con = null;
try {
con = (HttpURLConnection) new URL(tokenEndpointUrl).openConnection();
if (sslSocketFactory != null && con instanceof HttpsURLConnection)
((HttpsURLConnection) con).setSSLSocketFactory(sslSocketFactory);
return post(con, headers, requestBody, loginConnectTimeoutMs, loginReadTimeoutMs);
} catch (IOException e) {
throw new ExecutionException(e);
} finally {
if (con != null)
con.disconnect();
}
});
} catch (ExecutionException e) {
if (e.getCause() instanceof IOException)
throw (IOException) e.getCause();
else
throw new KafkaException(e.getCause());
}
return parseAccessToken(responseBody);
}
public static String post(HttpURLConnection con,
Map<String, String> headers,
String requestBody,
Integer connectTimeoutMs,
Integer readTimeoutMs)
throws IOException, UnretryableException {
handleInput(con, headers, requestBody, connectTimeoutMs, readTimeoutMs);
return handleOutput(con);
}
private static void handleInput(HttpURLConnection con,
Map<String, String> headers,
String requestBody,
Integer connectTimeoutMs,
Integer readTimeoutMs)
throws IOException, UnretryableException {
log.debug("handleInput - starting post for {}", con.getURL());
con.setRequestMethod("POST");
con.setRequestProperty("Accept", "application/json");
if (headers != null) {
for (Map.Entry<String, String> header : headers.entrySet())
con.setRequestProperty(header.getKey(), header.getValue());
}
con.setRequestProperty("Cache-Control", "no-cache");
if (requestBody != null) {
con.setRequestProperty("Content-Length", String.valueOf(requestBody.length()));
con.setDoOutput(true);
}
con.setUseCaches(false);
if (connectTimeoutMs != null)
con.setConnectTimeout(connectTimeoutMs);
if (readTimeoutMs != null)
con.setReadTimeout(readTimeoutMs);
log.debug("handleInput - preparing to connect to {}", con.getURL());
con.connect();
if (requestBody != null) {
try (OutputStream os = con.getOutputStream()) {
ByteArrayInputStream is = new ByteArrayInputStream(requestBody.getBytes(StandardCharsets.UTF_8));
log.debug("handleInput - preparing to write request body to {}", con.getURL());
copy(is, os);
}
}
}
static String handleOutput(final HttpURLConnection con) throws IOException {
int responseCode = con.getResponseCode();
log.debug("handleOutput - responseCode: {}", responseCode);
// NOTE: the contents of the response should not be logged so that we don't leak any
// sensitive data.
String responseBody = null;
// NOTE: It is OK to log the error response body and/or its formatted version as
// per the OAuth spec, it doesn't include sensitive information.
// See https://www.ietf.org/rfc/rfc6749.txt, section 5.2
String errorResponseBody = null;
try (InputStream is = con.getInputStream()) {
ByteArrayOutputStream os = new ByteArrayOutputStream();
log.debug("handleOutput - preparing to read response body from {}", con.getURL());
copy(is, os);
responseBody = os.toString(StandardCharsets.UTF_8.name());
} catch (Exception e) {
// there still can be useful error response from the servers, lets get it
try (InputStream is = con.getErrorStream()) {
ByteArrayOutputStream os = new ByteArrayOutputStream();
log.debug("handleOutput - preparing to read error response body from {}", con.getURL());
copy(is, os);
errorResponseBody = os.toString(StandardCharsets.UTF_8.name());
} catch (Exception e2) {
log.warn("handleOutput - error retrieving error information", e2);
}
log.warn("handleOutput - error retrieving data", e);
}
if (responseCode == HttpURLConnection.HTTP_OK || responseCode == HttpURLConnection.HTTP_CREATED) {
log.debug("handleOutput - responseCode: {}, error response: {}", responseCode,
errorResponseBody);
if (responseBody == null || responseBody.isEmpty())
throw new IOException(String.format("The token endpoint response was unexpectedly empty despite response code %s from %s and error message %s",
responseCode, con.getURL(), formatErrorMessage(errorResponseBody)));
return responseBody;
} else {
log.warn("handleOutput - error response code: {}, error response body: {}", responseCode,
formatErrorMessage(errorResponseBody));
if (UNRETRYABLE_HTTP_CODES.contains(responseCode)) {
// We know that this is a non-transient error, so let's not keep retrying the
// request unnecessarily.
throw new UnretryableException(new IOException(String.format("The response code %s and error response %s was encountered reading the token endpoint response; will not attempt further retries",
responseCode, formatErrorMessage(errorResponseBody))));
} else {
// We don't know if this is a transient (retryable) error or not, so let's assume
// it is.
throw new IOException(String.format("The unexpected response code %s and error message %s was encountered reading the token endpoint response",
responseCode, formatErrorMessage(errorResponseBody)));
}
}
}
static void copy(InputStream is, OutputStream os) throws IOException {
byte[] buf = new byte[4096];
int b;
while ((b = is.read(buf)) != -1)
os.write(buf, 0, b);
}
static String formatErrorMessage(String errorResponseBody) {
// See https://www.ietf.org/rfc/rfc6749.txt, section 5.2 for the format
// of this error message.
if (errorResponseBody == null || errorResponseBody.trim().equals("")) {
return "{}";
}
ObjectMapper mapper = new ObjectMapper();
try {
JsonNode rootNode = mapper.readTree(errorResponseBody);
if (!rootNode.at("/error").isMissingNode()) {
return String.format("{%s - %s}", rootNode.at("/error"), rootNode.at("/error_description"));
} else if (!rootNode.at("/errorCode").isMissingNode()) {
return String.format("{%s - %s}", rootNode.at("/errorCode"), rootNode.at("/errorSummary"));
} else {
return errorResponseBody;
}
} catch (Exception e) {
log.warn("Error parsing error response", e);
}
return String.format("{%s}", errorResponseBody);
}
static String parseAccessToken(String responseBody) throws IOException {
ObjectMapper mapper = new ObjectMapper();
JsonNode rootNode = mapper.readTree(responseBody);
JsonNode accessTokenNode = rootNode.at("/access_token");
if (accessTokenNode == null) {
// Only grab the first N characters so that if the response body is huge, we don't
// blow up.
String snippet = responseBody;
if (snippet.length() > MAX_RESPONSE_BODY_LENGTH) {
int actualLength = responseBody.length();
String s = responseBody.substring(0, MAX_RESPONSE_BODY_LENGTH);
snippet = String.format("%s (trimmed to first %s characters out of %s total)", s, MAX_RESPONSE_BODY_LENGTH, actualLength);
}
throw new IOException(String.format("The token endpoint response did not contain an access_token value. Response: (%s)", snippet));
}
return sanitizeString("the token endpoint response's access_token JSON attribute", accessTokenNode.textValue());
}
static String formatAuthorizationHeader(String clientId, String clientSecret) {
clientId = sanitizeString("the token endpoint request client ID parameter", clientId);
clientSecret = sanitizeString("the token endpoint request client secret parameter", clientSecret);
String s = String.format("%s:%s", clientId, clientSecret);
// Per RFC-7617, we need to use the *non-URL safe* base64 encoder. See KAFKA-14496.
String encoded = Base64.getEncoder().encodeToString(Utils.utf8(s));
return String.format("Basic %s", encoded);
}
static String formatRequestBody(String scope) throws IOException {
try {
StringBuilder requestParameters = new StringBuilder();
requestParameters.append("grant_type=client_credentials");
if (scope != null && !scope.trim().isEmpty()) {
scope = scope.trim();
String encodedScope = URLEncoder.encode(scope, StandardCharsets.UTF_8.name());
requestParameters.append("&scope=").append(encodedScope);
}
return requestParameters.toString();
} catch (UnsupportedEncodingException e) {
// The world has gone crazy!
throw new IOException(String.format("Encoding %s not supported", StandardCharsets.UTF_8.name()));
}
}
private static String sanitizeString(String name, String value) {
if (value == null)
throw new IllegalArgumentException(String.format("The value for %s must be non-null", name));
if (value.isEmpty())
throw new IllegalArgumentException(String.format("The value for %s must be non-empty", name));
value = value.trim();
if (value.isEmpty())
throw new IllegalArgumentException(String.format("The value for %s must not contain only whitespace", name));
return value;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/Initable.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.io.IOException;
public interface Initable {
/**
* Lifecycle method to perform any one-time initialization of the retriever. This must
* be performed by the caller to ensure the correct state before methods are invoked.
*
* @throws IOException Thrown on errors related to IO during initialization
*/
default void init() throws IOException {
// This method left intentionally blank.
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/JaasOptionsUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.net.URL;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.net.ssl.SSLSocketFactory;
import javax.security.auth.login.AppConfigurationEntry;
import org.apache.kafka.common.config.AbstractConfig;
import org.apache.kafka.common.config.ConfigDef;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.network.Mode;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule;
import org.apache.kafka.common.security.ssl.DefaultSslEngineFactory;
import org.apache.kafka.common.security.ssl.SslFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <code>JaasOptionsUtils</code> is a utility class to perform logic for the JAAS options and
* is separated out here for easier, more direct testing.
*/
public class JaasOptionsUtils {
private static final Logger log = LoggerFactory.getLogger(JaasOptionsUtils.class);
private final Map<String, Object> options;
public JaasOptionsUtils(Map<String, Object> options) {
this.options = options;
}
public static Map<String, Object> getOptions(String saslMechanism, List<AppConfigurationEntry> jaasConfigEntries) {
if (!OAuthBearerLoginModule.OAUTHBEARER_MECHANISM.equals(saslMechanism))
throw new IllegalArgumentException(String.format("Unexpected SASL mechanism: %s", saslMechanism));
if (Objects.requireNonNull(jaasConfigEntries).size() != 1 || jaasConfigEntries.get(0) == null)
throw new IllegalArgumentException(String.format("Must supply exactly 1 non-null JAAS mechanism configuration (size was %d)", jaasConfigEntries.size()));
return Collections.unmodifiableMap(jaasConfigEntries.get(0).getOptions());
}
public boolean shouldCreateSSLSocketFactory(URL url) {
return url.getProtocol().equalsIgnoreCase("https");
}
public Map<String, ?> getSslClientConfig() {
ConfigDef sslConfigDef = new ConfigDef();
sslConfigDef.withClientSslSupport();
AbstractConfig sslClientConfig = new AbstractConfig(sslConfigDef, options, "ssl");
return sslClientConfig.values();
}
public SSLSocketFactory createSSLSocketFactory() {
Map<String, ?> sslClientConfig = getSslClientConfig();
SslFactory sslFactory = new SslFactory(Mode.CLIENT);
sslFactory.configure(sslClientConfig);
SSLSocketFactory socketFactory = ((DefaultSslEngineFactory) sslFactory.sslEngineFactory()).sslContext().getSocketFactory();
log.debug("Created SSLSocketFactory: {}", sslClientConfig);
return socketFactory;
}
public String validateString(String name) throws ValidateException {
return validateString(name, true);
}
public String validateString(String name, boolean isRequired) throws ValidateException {
String value = (String) options.get(name);
if (value == null) {
if (isRequired)
throw new ConfigException(String.format("The OAuth configuration option %s value must be non-null", name));
else
return null;
}
value = value.trim();
if (value.isEmpty()) {
if (isRequired)
throw new ConfigException(String.format("The OAuth configuration option %s value must not contain only whitespace", name));
else
return null;
}
return value;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/JwksFileVerificationKeyResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.io.IOException;
import java.nio.file.Path;
import java.security.Key;
import java.util.List;
import org.apache.kafka.common.utils.Utils;
import org.jose4j.jwk.JsonWebKeySet;
import org.jose4j.jws.JsonWebSignature;
import org.jose4j.jwx.JsonWebStructure;
import org.jose4j.keys.resolvers.JwksVerificationKeyResolver;
import org.jose4j.keys.resolvers.VerificationKeyResolver;
import org.jose4j.lang.JoseException;
import org.jose4j.lang.UnresolvableKeyException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <code>JwksFileVerificationKeyResolver</code> is a {@link VerificationKeyResolver} implementation
* that will load the JWKS from the given file system directory.
*
* A <a href="https://datatracker.ietf.org/doc/html/rfc7517#section-5">JWKS (JSON Web Key Set)</a>
* is a JSON document provided by the OAuth/OIDC provider that lists the keys used to sign the JWTs
* it issues.
*
* Here is a sample JWKS JSON document:
*
* <pre>
* {
* "keys": [
* {
* "kty": "RSA",
* "alg": "RS256",
* "kid": "abc123",
* "use": "sig",
* "e": "AQAB",
* "n": "..."
* },
* {
* "kty": "RSA",
* "alg": "RS256",
* "kid": "def456",
* "use": "sig",
* "e": "AQAB",
* "n": "..."
* }
* ]
* }
* </pre>
*
* Without going into too much detail, the array of keys enumerates the key data that the provider
* is using to sign the JWT. The key ID (<code>kid</code>) is referenced by the JWT's header in
* order to match up the JWT's signing key with the key in the JWKS. During the validation step of
* the broker, the jose4j OAuth library will use the contents of the appropriate key in the JWKS
* to validate the signature.
*
* Given that the JWKS is referenced by the JWT, the JWKS must be made available by the
* OAuth/OIDC provider so that a JWT can be validated.
*
* @see org.apache.kafka.common.config.SaslConfigs#SASL_OAUTHBEARER_TOKEN_ENDPOINT_URL
* @see VerificationKeyResolver
*/
public class JwksFileVerificationKeyResolver implements CloseableVerificationKeyResolver {
private static final Logger log = LoggerFactory.getLogger(JwksFileVerificationKeyResolver.class);
private final Path jwksFile;
private VerificationKeyResolver delegate;
public JwksFileVerificationKeyResolver(Path jwksFile) {
this.jwksFile = jwksFile;
}
@Override
public void init() throws IOException {
log.debug("Starting creation of new VerificationKeyResolver from {}", jwksFile);
String json = Utils.readFileAsString(jwksFile.toFile().getPath());
JsonWebKeySet jwks;
try {
jwks = new JsonWebKeySet(json);
} catch (JoseException e) {
throw new IOException(e);
}
delegate = new JwksVerificationKeyResolver(jwks.getJsonWebKeys());
}
@Override
public Key resolveKey(JsonWebSignature jws, List<JsonWebStructure> nestingContext) throws UnresolvableKeyException {
if (delegate == null)
throw new UnresolvableKeyException("VerificationKeyResolver delegate is null; please call init() first");
return delegate.resolveKey(jws, nestingContext);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/LoginAccessTokenValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import static org.apache.kafka.common.config.SaslConfigs.DEFAULT_SASL_OAUTHBEARER_SCOPE_CLAIM_NAME;
import static org.apache.kafka.common.config.SaslConfigs.DEFAULT_SASL_OAUTHBEARER_SUB_CLAIM_NAME;
import java.util.Collection;
import java.util.Collections;
import java.util.Map;
import java.util.Set;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken;
import org.apache.kafka.common.security.oauthbearer.internals.unsecured.OAuthBearerIllegalTokenException;
import org.apache.kafka.common.security.oauthbearer.internals.unsecured.OAuthBearerUnsecuredJws;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* LoginAccessTokenValidator is an implementation of {@link AccessTokenValidator} that is used
* by the client to perform some rudimentary validation of the JWT access token that is received
* as part of the response from posting the client credentials to the OAuth/OIDC provider's
* token endpoint.
*
* The validation steps performed are:
*
* <ol>
* <li>
* Basic structural validation of the <code>b64token</code> value as defined in
* <a href="https://tools.ietf.org/html/rfc6750#section-2.1">RFC 6750 Section 2.1</a>
* </li>
* <li>Basic conversion of the token into an in-memory map</li>
* <li>Presence of scope, <code>exp</code>, subject, and <code>iat</code> claims</li>
* </ol>
*/
public class LoginAccessTokenValidator implements AccessTokenValidator {
private static final Logger log = LoggerFactory.getLogger(LoginAccessTokenValidator.class);
public static final String EXPIRATION_CLAIM_NAME = "exp";
public static final String ISSUED_AT_CLAIM_NAME = "iat";
private final String scopeClaimName;
private final String subClaimName;
/**
* Creates a new LoginAccessTokenValidator that will be used by the client for lightweight
* validation of the JWT.
*
* @param scopeClaimName Name of the scope claim to use; must be non-<code>null</code>
* @param subClaimName Name of the subject claim to use; must be non-<code>null</code>
*/
public LoginAccessTokenValidator(String scopeClaimName, String subClaimName) {
this.scopeClaimName = ClaimValidationUtils.validateClaimNameOverride(DEFAULT_SASL_OAUTHBEARER_SCOPE_CLAIM_NAME, scopeClaimName);
this.subClaimName = ClaimValidationUtils.validateClaimNameOverride(DEFAULT_SASL_OAUTHBEARER_SUB_CLAIM_NAME, subClaimName);
}
/**
* Accepts an OAuth JWT access token in base-64 encoded format, validates, and returns an
* OAuthBearerToken.
*
* @param accessToken Non-<code>null</code> JWT access token
* @return {@link OAuthBearerToken}
* @throws ValidateException Thrown on errors performing validation of given token
*/
@SuppressWarnings("unchecked")
public OAuthBearerToken validate(String accessToken) throws ValidateException {
SerializedJwt serializedJwt = new SerializedJwt(accessToken);
Map<String, Object> payload;
try {
payload = OAuthBearerUnsecuredJws.toMap(serializedJwt.getPayload());
} catch (OAuthBearerIllegalTokenException e) {
throw new ValidateException(String.format("Could not validate the access token: %s", e.getMessage()), e);
}
Object scopeRaw = getClaim(payload, scopeClaimName);
Collection<String> scopeRawCollection;
if (scopeRaw instanceof String)
scopeRawCollection = Collections.singletonList((String) scopeRaw);
else if (scopeRaw instanceof Collection)
scopeRawCollection = (Collection<String>) scopeRaw;
else
scopeRawCollection = Collections.emptySet();
Number expirationRaw = (Number) getClaim(payload, EXPIRATION_CLAIM_NAME);
String subRaw = (String) getClaim(payload, subClaimName);
Number issuedAtRaw = (Number) getClaim(payload, ISSUED_AT_CLAIM_NAME);
Set<String> scopes = ClaimValidationUtils.validateScopes(scopeClaimName, scopeRawCollection);
long expiration = ClaimValidationUtils.validateExpiration(EXPIRATION_CLAIM_NAME,
expirationRaw != null ? expirationRaw.longValue() * 1000L : null);
String subject = ClaimValidationUtils.validateSubject(subClaimName, subRaw);
Long issuedAt = ClaimValidationUtils.validateIssuedAt(ISSUED_AT_CLAIM_NAME,
issuedAtRaw != null ? issuedAtRaw.longValue() * 1000L : null);
OAuthBearerToken token = new BasicOAuthBearerToken(accessToken,
scopes,
expiration,
subject,
issuedAt);
return token;
}
private Object getClaim(Map<String, Object> payload, String claimName) {
Object value = payload.get(claimName);
log.debug("getClaim - {}: {}", claimName, value);
return value;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/RefreshingHttpsJwks.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.io.Closeable;
import java.io.IOException;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.kafka.common.utils.Time;
import org.jose4j.jwk.HttpsJwks;
import org.jose4j.jwk.JsonWebKey;
import org.jose4j.lang.JoseException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Implementation of {@link HttpsJwks} that will periodically refresh the JWKS cache to reduce or
* even prevent HTTP/HTTPS traffic in the hot path of validation. It is assumed that it's
* possible to receive a JWT that contains a <code>kid</code> that points to yet-unknown JWK,
* thus requiring a connection to the OAuth/OIDC provider to be made. Hopefully, in practice,
* keys are made available for some amount of time before they're used within JWTs.
*
* This instance is created and provided to the
* {@link org.jose4j.keys.resolvers.HttpsJwksVerificationKeyResolver} that is used when using
* an HTTP-/HTTPS-based {@link org.jose4j.keys.resolvers.VerificationKeyResolver}, which is then
* provided to the {@link ValidatorAccessTokenValidator} to use in validating the signature of
* a JWT.
*
* @see org.jose4j.keys.resolvers.HttpsJwksVerificationKeyResolver
* @see org.jose4j.keys.resolvers.VerificationKeyResolver
* @see ValidatorAccessTokenValidator
*/
public final class RefreshingHttpsJwks implements Initable, Closeable {
private static final Logger log = LoggerFactory.getLogger(RefreshingHttpsJwks.class);
private static final int MISSING_KEY_ID_CACHE_MAX_ENTRIES = 16;
static final long MISSING_KEY_ID_CACHE_IN_FLIGHT_MS = 60000;
static final int MISSING_KEY_ID_MAX_KEY_LENGTH = 1000;
private static final int SHUTDOWN_TIMEOUT = 10;
private static final TimeUnit SHUTDOWN_TIME_UNIT = TimeUnit.SECONDS;
/**
* {@link HttpsJwks} does the actual work of contacting the OAuth/OIDC endpoint to get the
* JWKS. In some cases, the call to {@link HttpsJwks#getJsonWebKeys()} will trigger a call
* to {@link HttpsJwks#refresh()} which will block the current thread in network I/O. We cache
* the JWKS ourselves (see {@link #jsonWebKeys}) to avoid the network I/O.
*
* We want to be very careful where we use the {@link HttpsJwks} instance so that we don't
* perform any operation (directly or indirectly) that could cause blocking. This is because
* the JWKS logic is part of the larger authentication logic which operates on Kafka's network
* thread. It's OK to execute {@link HttpsJwks#getJsonWebKeys()} (which calls
* {@link HttpsJwks#refresh()}) from within {@link #init()} as that method is called only at
* startup, and we can afford the blocking hit there.
*/
private final HttpsJwks httpsJwks;
private final ScheduledExecutorService executorService;
private final Time time;
private final long refreshMs;
private final long refreshRetryBackoffMs;
private final long refreshRetryBackoffMaxMs;
/**
* Protects {@link #missingKeyIds} and {@link #jsonWebKeys}.
*/
private final ReadWriteLock refreshLock = new ReentrantReadWriteLock();
private final Map<String, Long> missingKeyIds;
/**
* Flag to prevent concurrent refresh invocations.
*/
private final AtomicBoolean refreshInProgressFlag = new AtomicBoolean(false);
/**
* As mentioned in the comments for {@link #httpsJwks}, we cache the JWKS ourselves so that
* we can return the list immediately without any network I/O. They are only cached within
* calls to {@link #refresh()}.
*/
private List<JsonWebKey> jsonWebKeys;
private boolean isInitialized;
/**
* Creates a <code>RefreshingHttpsJwks</code> that will be used by the
* {@link RefreshingHttpsJwksVerificationKeyResolver} to resolve new key IDs in JWTs.
*
* @param time {@link Time} instance
* @param httpsJwks {@link HttpsJwks} instance from which to retrieve the JWKS
* based on the OAuth/OIDC standard
* @param refreshMs The number of milliseconds between refresh passes to connect
* to the OAuth/OIDC JWKS endpoint to retrieve the latest set
* @param refreshRetryBackoffMs Time for delay after initial failed attempt to retrieve JWKS
* @param refreshRetryBackoffMaxMs Maximum time to retrieve JWKS
*/
public RefreshingHttpsJwks(Time time,
HttpsJwks httpsJwks,
long refreshMs,
long refreshRetryBackoffMs,
long refreshRetryBackoffMaxMs) {
if (refreshMs <= 0)
throw new IllegalArgumentException("JWKS validation key refresh configuration value retryWaitMs value must be positive");
this.httpsJwks = httpsJwks;
this.time = time;
this.refreshMs = refreshMs;
this.refreshRetryBackoffMs = refreshRetryBackoffMs;
this.refreshRetryBackoffMaxMs = refreshRetryBackoffMaxMs;
this.executorService = Executors.newSingleThreadScheduledExecutor();
this.missingKeyIds = new LinkedHashMap<String, Long>(MISSING_KEY_ID_CACHE_MAX_ENTRIES, .75f, true) {
@Override
protected boolean removeEldestEntry(Map.Entry<String, Long> eldest) {
return this.size() > MISSING_KEY_ID_CACHE_MAX_ENTRIES;
}
};
}
@Override
public void init() throws IOException {
try {
log.debug("init started");
List<JsonWebKey> localJWKs;
try {
localJWKs = httpsJwks.getJsonWebKeys();
} catch (JoseException e) {
throw new IOException("Could not refresh JWKS", e);
}
try {
refreshLock.writeLock().lock();
jsonWebKeys = Collections.unmodifiableList(localJWKs);
} finally {
refreshLock.writeLock().unlock();
}
// Since we just grabbed the keys (which will have invoked a HttpsJwks.refresh()
// internally), we can delay our first invocation by refreshMs.
//
// Note: we refer to this as a _scheduled_ refresh.
executorService.scheduleAtFixedRate(this::refresh,
refreshMs,
refreshMs,
TimeUnit.MILLISECONDS);
log.info("JWKS validation key refresh thread started with a refresh interval of {} ms", refreshMs);
} finally {
isInitialized = true;
log.debug("init completed");
}
}
@Override
public void close() {
try {
log.debug("close started");
try {
log.debug("JWKS validation key refresh thread shutting down");
executorService.shutdown();
if (!executorService.awaitTermination(SHUTDOWN_TIMEOUT, SHUTDOWN_TIME_UNIT)) {
log.warn("JWKS validation key refresh thread termination did not end after {} {}",
SHUTDOWN_TIMEOUT, SHUTDOWN_TIME_UNIT);
}
} catch (InterruptedException e) {
log.warn("JWKS validation key refresh thread error during close", e);
}
} finally {
log.debug("close completed");
}
}
/**
* Our implementation avoids the blocking call within {@link HttpsJwks#refresh()} that is
* sometimes called internal to {@link HttpsJwks#getJsonWebKeys()}. We want to avoid any
* blocking I/O as this code is running in the authentication path on the Kafka network thread.
*
* The list may be stale up to {@link #refreshMs}.
*
* @return {@link List} of {@link JsonWebKey} instances
*
* @throws JoseException Thrown if a problem is encountered parsing the JSON content into JWKs
* @throws IOException Thrown f a problem is encountered making the HTTP request
*/
public List<JsonWebKey> getJsonWebKeys() throws JoseException, IOException {
if (!isInitialized)
throw new IllegalStateException("Please call init() first");
try {
refreshLock.readLock().lock();
return jsonWebKeys;
} finally {
refreshLock.readLock().unlock();
}
}
public String getLocation() {
return httpsJwks.getLocation();
}
/**
* <p>
* <code>refresh</code> is an internal method that will refresh the JWKS cache and is
* invoked in one of two ways:
*
* <ol>
* <li>Scheduled</li>
* <li>Expedited</li>
* </ol>
* </p>
*
* <p>
* The <i>scheduled</i> refresh is scheduled in {@link #init()} and runs every
* {@link #refreshMs} milliseconds. An <i>expedited</i> refresh is performed when an
* incoming JWT refers to a key ID that isn't in our JWKS cache ({@link #jsonWebKeys})
* and we try to perform a refresh sooner than the next scheduled refresh.
* </p>
*/
private void refresh() {
if (!refreshInProgressFlag.compareAndSet(false, true)) {
log.debug("OAuth JWKS refresh is already in progress; ignoring concurrent refresh");
return;
}
try {
log.info("OAuth JWKS refresh of {} starting", httpsJwks.getLocation());
Retry<List<JsonWebKey>> retry = new Retry<>(refreshRetryBackoffMs, refreshRetryBackoffMaxMs);
List<JsonWebKey> localJWKs = retry.execute(() -> {
try {
log.debug("JWKS validation key calling refresh of {} starting", httpsJwks.getLocation());
// Call the *actual* refresh implementation that will more than likely issue
// HTTP(S) calls over the network.
httpsJwks.refresh();
List<JsonWebKey> jwks = httpsJwks.getJsonWebKeys();
log.debug("JWKS validation key refresh of {} complete", httpsJwks.getLocation());
return jwks;
} catch (Exception e) {
throw new ExecutionException(e);
}
});
try {
refreshLock.writeLock().lock();
for (JsonWebKey jwk : localJWKs)
missingKeyIds.remove(jwk.getKeyId());
jsonWebKeys = Collections.unmodifiableList(localJWKs);
} finally {
refreshLock.writeLock().unlock();
}
log.info("OAuth JWKS refresh of {} complete", httpsJwks.getLocation());
} catch (ExecutionException e) {
log.warn("OAuth JWKS refresh of {} encountered an error; not updating local JWKS cache", httpsJwks.getLocation(), e);
} finally {
refreshInProgressFlag.set(false);
}
}
/**
* <p>
* <code>maybeExpediteRefresh</code> is a public method that will trigger a refresh of
* the JWKS cache if all of the following conditions are met:
*
* <ul>
* <li>The given <code>keyId</code> parameter is <e; the
* {@link #MISSING_KEY_ID_MAX_KEY_LENGTH}</li>
* <li>The key isn't in the process of being expedited already</li>
* </ul>
*
* <p>
* This <i>expedited</i> refresh is scheduled immediately.
* </p>
*
* @param keyId JWT key ID
* @return <code>true</code> if an expedited refresh was scheduled, <code>false</code> otherwise
*/
public boolean maybeExpediteRefresh(String keyId) {
if (keyId.length() > MISSING_KEY_ID_MAX_KEY_LENGTH) {
// Although there's no limit on the length of the key ID, they're generally
// "reasonably" short. If we have a very long key ID length, we're going to assume
// the JWT is malformed, and we will not actually try to resolve the key.
//
// In this case, let's prevent blowing out our memory in two ways:
//
// 1. Don't try to resolve the key as the large ID will sit in our cache
// 2. Report the issue in the logs but include only the first N characters
int actualLength = keyId.length();
String s = keyId.substring(0, MISSING_KEY_ID_MAX_KEY_LENGTH);
String snippet = String.format("%s (trimmed to first %s characters out of %s total)", s, MISSING_KEY_ID_MAX_KEY_LENGTH, actualLength);
log.warn("Key ID {} was too long to cache", snippet);
return false;
} else {
try {
refreshLock.writeLock().lock();
Long nextCheckTime = missingKeyIds.get(keyId);
long currTime = time.milliseconds();
log.debug("For key ID {}, nextCheckTime: {}, currTime: {}", keyId, nextCheckTime, currTime);
if (nextCheckTime == null || nextCheckTime <= currTime) {
// If there's no entry in the missing key ID cache for the incoming key ID,
// or it has expired, schedule a refresh ASAP.
nextCheckTime = currTime + MISSING_KEY_ID_CACHE_IN_FLIGHT_MS;
missingKeyIds.put(keyId, nextCheckTime);
executorService.schedule(this::refresh, 0, TimeUnit.MILLISECONDS);
return true;
} else {
return false;
}
} finally {
refreshLock.writeLock().unlock();
}
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/RefreshingHttpsJwksVerificationKeyResolver.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.io.IOException;
import java.security.Key;
import java.util.List;
import org.jose4j.jwk.HttpsJwks;
import org.jose4j.jwk.JsonWebKey;
import org.jose4j.jwk.VerificationJwkSelector;
import org.jose4j.jws.JsonWebSignature;
import org.jose4j.jwx.JsonWebStructure;
import org.jose4j.keys.resolvers.VerificationKeyResolver;
import org.jose4j.lang.JoseException;
import org.jose4j.lang.UnresolvableKeyException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* <code>RefreshingHttpsJwksVerificationKeyResolver</code> is a
* {@link VerificationKeyResolver} implementation that will periodically refresh the
* JWKS using its {@link HttpsJwks} instance.
*
* A <a href="https://datatracker.ietf.org/doc/html/rfc7517#section-5">JWKS (JSON Web Key Set)</a>
* is a JSON document provided by the OAuth/OIDC provider that lists the keys used to sign the JWTs
* it issues.
*
* Here is a sample JWKS JSON document:
*
* <pre>
* {
* "keys": [
* {
* "kty": "RSA",
* "alg": "RS256",
* "kid": "abc123",
* "use": "sig",
* "e": "AQAB",
* "n": "..."
* },
* {
* "kty": "RSA",
* "alg": "RS256",
* "kid": "def456",
* "use": "sig",
* "e": "AQAB",
* "n": "..."
* }
* ]
* }
* </pre>
*
* Without going into too much detail, the array of keys enumerates the key data that the provider
* is using to sign the JWT. The key ID (<code>kid</code>) is referenced by the JWT's header in
* order to match up the JWT's signing key with the key in the JWKS. During the validation step of
* the broker, the jose4j OAuth library will use the contents of the appropriate key in the JWKS
* to validate the signature.
*
* Given that the JWKS is referenced by the JWT, the JWKS must be made available by the
* OAuth/OIDC provider so that a JWT can be validated.
*
* @see CloseableVerificationKeyResolver
* @see VerificationKeyResolver
* @see RefreshingHttpsJwks
* @see HttpsJwks
*/
public class RefreshingHttpsJwksVerificationKeyResolver implements CloseableVerificationKeyResolver {
private static final Logger log = LoggerFactory.getLogger(RefreshingHttpsJwksVerificationKeyResolver.class);
private final RefreshingHttpsJwks refreshingHttpsJwks;
private final VerificationJwkSelector verificationJwkSelector;
private boolean isInitialized;
public RefreshingHttpsJwksVerificationKeyResolver(RefreshingHttpsJwks refreshingHttpsJwks) {
this.refreshingHttpsJwks = refreshingHttpsJwks;
this.verificationJwkSelector = new VerificationJwkSelector();
}
@Override
public void init() throws IOException {
try {
log.debug("init started");
refreshingHttpsJwks.init();
} finally {
isInitialized = true;
log.debug("init completed");
}
}
@Override
public void close() {
try {
log.debug("close started");
refreshingHttpsJwks.close();
} finally {
log.debug("close completed");
}
}
@Override
public Key resolveKey(JsonWebSignature jws, List<JsonWebStructure> nestingContext) throws UnresolvableKeyException {
if (!isInitialized)
throw new IllegalStateException("Please call init() first");
try {
List<JsonWebKey> jwks = refreshingHttpsJwks.getJsonWebKeys();
JsonWebKey jwk = verificationJwkSelector.select(jws, jwks);
if (jwk != null)
return jwk.getKey();
String keyId = jws.getKeyIdHeaderValue();
if (refreshingHttpsJwks.maybeExpediteRefresh(keyId))
log.debug("Refreshing JWKs from {} as no suitable verification key for JWS w/ header {} was found in {}", refreshingHttpsJwks.getLocation(), jws.getHeaders().getFullHeaderAsJsonString(), jwks);
StringBuilder sb = new StringBuilder();
sb.append("Unable to find a suitable verification key for JWS w/ header ").append(jws.getHeaders().getFullHeaderAsJsonString());
sb.append(" from JWKs ").append(jwks).append(" obtained from ").append(
refreshingHttpsJwks.getLocation());
throw new UnresolvableKeyException(sb.toString());
} catch (JoseException | IOException e) {
StringBuilder sb = new StringBuilder();
sb.append("Unable to find a suitable verification key for JWS w/ header ").append(jws.getHeaders().getFullHeaderAsJsonString());
sb.append(" due to an unexpected exception (").append(e).append(") while obtaining or using keys from JWKS endpoint at ").append(
refreshingHttpsJwks.getLocation());
throw new UnresolvableKeyException(sb.toString(), e);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/Retry.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.common.utils.Time;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Retry encapsulates the mechanism to perform a retry and then exponential
* backoff using provided wait times between attempts.
*
* @param <R> Result type
*/
public class Retry<R> {
private static final Logger log = LoggerFactory.getLogger(Retry.class);
private final Time time;
private final long retryBackoffMs;
private final long retryBackoffMaxMs;
public Retry(long retryBackoffMs, long retryBackoffMaxMs) {
this(Time.SYSTEM, retryBackoffMs, retryBackoffMaxMs);
}
public Retry(Time time, long retryBackoffMs, long retryBackoffMaxMs) {
this.time = time;
this.retryBackoffMs = retryBackoffMs;
this.retryBackoffMaxMs = retryBackoffMaxMs;
if (this.retryBackoffMs < 0)
throw new IllegalArgumentException(String.format("retryBackoffMs value (%d) must be non-negative", retryBackoffMs));
if (this.retryBackoffMaxMs < 0)
throw new IllegalArgumentException(String.format("retryBackoffMaxMs value (%d) must be non-negative", retryBackoffMaxMs));
if (this.retryBackoffMaxMs < this.retryBackoffMs)
throw new IllegalArgumentException(String.format("retryBackoffMaxMs value (%d) is less than retryBackoffMs value (%d)", retryBackoffMaxMs, retryBackoffMs));
}
public R execute(Retryable<R> retryable) throws ExecutionException {
long endMs = time.milliseconds() + retryBackoffMaxMs;
int currAttempt = 0;
ExecutionException error = null;
while (time.milliseconds() <= endMs) {
currAttempt++;
try {
return retryable.call();
} catch (UnretryableException e) {
// We've deemed this error to not be worth retrying, so collect the error and
// fail immediately.
if (error == null)
error = new ExecutionException(e);
break;
} catch (ExecutionException e) {
log.warn("Error during retry attempt {}", currAttempt, e);
if (error == null)
error = e;
long waitMs = retryBackoffMs * (long) Math.pow(2, currAttempt - 1);
long diff = endMs - time.milliseconds();
waitMs = Math.min(waitMs, diff);
if (waitMs <= 0)
break;
String message = String.format("Attempt %d to make call resulted in an error; sleeping %d ms before retrying",
currAttempt, waitMs);
log.warn(message, e);
time.sleep(waitMs);
}
}
if (error == null)
// Really shouldn't ever get to here, but...
error = new ExecutionException(new IllegalStateException("Exhausted all retry attempts but no attempt returned value or encountered exception"));
throw error;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/Retryable.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import java.util.concurrent.ExecutionException;
/**
* Simple interface to abstract out the call that is made so that it can be retried.
*
* @param <R> Result type
*
* @see Retry
* @see UnretryableException
*/
public interface Retryable<R> {
/**
* Perform the operation and return the data from the response.
*
* @return Return response data, formatted in the given data type
*
* @throws ExecutionException Thrown on errors connecting, writing, reading, timeouts, etc.
* that can likely be tried again
* @throws UnretryableException Thrown on errors that we can determine should not be tried again
*/
R call() throws ExecutionException, UnretryableException;
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/SerializedJwt.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
/**
* SerializedJwt provides a modicum of structure and validation around a JWT's serialized form by
* splitting and making the three sections (header, payload, and signature) available to the user.
*/
public class SerializedJwt {
private final String token;
private final String header;
private final String payload;
private final String signature;
public SerializedJwt(String token) {
if (token == null)
token = "";
else
token = token.trim();
if (token.isEmpty())
throw new ValidateException("Empty JWT provided; expected three sections (header, payload, and signature)");
String[] splits = token.split("\\.");
if (splits.length != 3)
throw new ValidateException(String.format("Malformed JWT provided (%s); expected three sections (header, payload, and signature), but %s sections provided",
token, splits.length));
this.token = token.trim();
this.header = validateSection(splits[0], "header");
this.payload = validateSection(splits[1], "payload");
this.signature = validateSection(splits[2], "signature");
}
/**
* Returns the entire base 64-encoded JWT.
*
* @return JWT
*/
public String getToken() {
return token;
}
/**
* Returns the first section--the JWT header--in its base 64-encoded form.
*
* @return Header section of the JWT
*/
public String getHeader() {
return header;
}
/**
* Returns the second section--the JWT payload--in its base 64-encoded form.
*
* @return Payload section of the JWT
*/
public String getPayload() {
return payload;
}
/**
* Returns the third section--the JWT signature--in its base 64-encoded form.
*
* @return Signature section of the JWT
*/
public String getSignature() {
return signature;
}
private String validateSection(String section, String sectionName) throws ValidateException {
section = section.trim();
if (section.isEmpty())
throw new ValidateException(String.format(
"Malformed JWT provided; expected at least three sections (header, payload, and signature), but %s section missing",
sectionName));
return section;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/UnretryableException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import org.apache.kafka.common.KafkaException;
public class UnretryableException extends KafkaException {
public UnretryableException(String message) {
super(message);
}
public UnretryableException(Throwable cause) {
super(cause);
}
public UnretryableException(String message, Throwable cause) {
super(message, cause);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/ValidateException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import javax.security.auth.callback.Callback;
import org.apache.kafka.common.KafkaException;
/**
* ValidateException is thrown in cases where a JWT access token cannot be determined to be
* valid for one reason or another. It is intended to be used when errors arise within the
* processing of a {@link javax.security.auth.callback.CallbackHandler#handle(Callback[])}.
* This error, however, is not thrown from that method directly.
*
* @see AccessTokenValidator#validate(String)
*/
public class ValidateException extends KafkaException {
public ValidateException(String message) {
super(message);
}
public ValidateException(Throwable cause) {
super(cause);
}
public ValidateException(String message, Throwable cause) {
super(message, cause);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/ValidatorAccessTokenValidator.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import static org.jose4j.jwa.AlgorithmConstraints.DISALLOW_NONE;
import java.util.Collection;
import java.util.Collections;
import java.util.Set;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken;
import org.jose4j.jwt.JwtClaims;
import org.jose4j.jwt.MalformedClaimException;
import org.jose4j.jwt.NumericDate;
import org.jose4j.jwt.ReservedClaimNames;
import org.jose4j.jwt.consumer.InvalidJwtException;
import org.jose4j.jwt.consumer.JwtConsumer;
import org.jose4j.jwt.consumer.JwtConsumerBuilder;
import org.jose4j.jwt.consumer.JwtContext;
import org.jose4j.keys.resolvers.VerificationKeyResolver;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* ValidatorAccessTokenValidator is an implementation of {@link AccessTokenValidator} that is used
* by the broker to perform more extensive validation of the JWT access token that is received
* from the client, but ultimately from posting the client credentials to the OAuth/OIDC provider's
* token endpoint.
*
* The validation steps performed (primary by the jose4j library) are:
*
* <ol>
* <li>
* Basic structural validation of the <code>b64token</code> value as defined in
* <a href="https://tools.ietf.org/html/rfc6750#section-2.1">RFC 6750 Section 2.1</a>
* </li>
* <li>Basic conversion of the token into an in-memory data structure</li>
* <li>
* Presence of scope, <code>exp</code>, subject, <code>iss</code>, and
* <code>iat</code> claims
* </li>
* <li>
* Signature matching validation against the <code>kid</code> and those provided by
* the OAuth/OIDC provider's JWKS
* </li>
* </ol>
*/
public class ValidatorAccessTokenValidator implements AccessTokenValidator {
private static final Logger log = LoggerFactory.getLogger(ValidatorAccessTokenValidator.class);
private final JwtConsumer jwtConsumer;
private final String scopeClaimName;
private final String subClaimName;
/**
* Creates a new ValidatorAccessTokenValidator that will be used by the broker for more
* thorough validation of the JWT.
*
* @param clockSkew The optional value (in seconds) to allow for differences
* between the time of the OAuth/OIDC identity provider and
* the broker. If <code>null</code> is provided, the broker
* and the OAUth/OIDC identity provider are assumed to have
* very close clock settings.
* @param expectedAudiences The (optional) set the broker will use to verify that
* the JWT was issued for one of the expected audiences.
* The JWT will be inspected for the standard OAuth
* <code>aud</code> claim and if this value is set, the
* broker will match the value from JWT's <code>aud</code>
* claim to see if there is an <b>exact</b> match. If there is no
* match, the broker will reject the JWT and authentication
* will fail. May be <code>null</code> to not perform any
* check to verify the JWT's <code>aud</code> claim matches any
* fixed set of known/expected audiences.
* @param expectedIssuer The (optional) value for the broker to use to verify that
* the JWT was created by the expected issuer. The JWT will
* be inspected for the standard OAuth <code>iss</code> claim
* and if this value is set, the broker will match it
* <b>exactly</b> against what is in the JWT's <code>iss</code>
* claim. If there is no match, the broker will reject the JWT
* and authentication will fail. May be <code>null</code> to not
* perform any check to verify the JWT's <code>iss</code> claim
* matches a specific issuer.
* @param verificationKeyResolver jose4j-based {@link VerificationKeyResolver} that is used
* to validate the signature matches the contents of the header
* and payload
* @param scopeClaimName Name of the scope claim to use; must be non-<code>null</code>
* @param subClaimName Name of the subject claim to use; must be
* non-<code>null</code>
*
* @see JwtConsumerBuilder
* @see JwtConsumer
* @see VerificationKeyResolver
*/
public ValidatorAccessTokenValidator(Integer clockSkew,
Set<String> expectedAudiences,
String expectedIssuer,
VerificationKeyResolver verificationKeyResolver,
String scopeClaimName,
String subClaimName) {
final JwtConsumerBuilder jwtConsumerBuilder = new JwtConsumerBuilder();
if (clockSkew != null)
jwtConsumerBuilder.setAllowedClockSkewInSeconds(clockSkew);
if (expectedAudiences != null && !expectedAudiences.isEmpty())
jwtConsumerBuilder.setExpectedAudience(expectedAudiences.toArray(new String[0]));
if (expectedIssuer != null)
jwtConsumerBuilder.setExpectedIssuer(expectedIssuer);
this.jwtConsumer = jwtConsumerBuilder
.setJwsAlgorithmConstraints(DISALLOW_NONE)
.setRequireExpirationTime()
.setRequireIssuedAt()
.setVerificationKeyResolver(verificationKeyResolver)
.build();
this.scopeClaimName = scopeClaimName;
this.subClaimName = subClaimName;
}
/**
* Accepts an OAuth JWT access token in base-64 encoded format, validates, and returns an
* OAuthBearerToken.
*
* @param accessToken Non-<code>null</code> JWT access token
* @return {@link OAuthBearerToken}
* @throws ValidateException Thrown on errors performing validation of given token
*/
@SuppressWarnings("unchecked")
public OAuthBearerToken validate(String accessToken) throws ValidateException {
SerializedJwt serializedJwt = new SerializedJwt(accessToken);
JwtContext jwt;
try {
jwt = jwtConsumer.process(serializedJwt.getToken());
} catch (InvalidJwtException e) {
throw new ValidateException(String.format("Could not validate the access token: %s", e.getMessage()), e);
}
JwtClaims claims = jwt.getJwtClaims();
Object scopeRaw = getClaim(() -> claims.getClaimValue(scopeClaimName), scopeClaimName);
Collection<String> scopeRawCollection;
if (scopeRaw instanceof String)
scopeRawCollection = Collections.singletonList((String) scopeRaw);
else if (scopeRaw instanceof Collection)
scopeRawCollection = (Collection<String>) scopeRaw;
else
scopeRawCollection = Collections.emptySet();
NumericDate expirationRaw = getClaim(claims::getExpirationTime, ReservedClaimNames.EXPIRATION_TIME);
String subRaw = getClaim(() -> claims.getStringClaimValue(subClaimName), subClaimName);
NumericDate issuedAtRaw = getClaim(claims::getIssuedAt, ReservedClaimNames.ISSUED_AT);
Set<String> scopes = ClaimValidationUtils.validateScopes(scopeClaimName, scopeRawCollection);
long expiration = ClaimValidationUtils.validateExpiration(ReservedClaimNames.EXPIRATION_TIME,
expirationRaw != null ? expirationRaw.getValueInMillis() : null);
String sub = ClaimValidationUtils.validateSubject(subClaimName, subRaw);
Long issuedAt = ClaimValidationUtils.validateIssuedAt(ReservedClaimNames.ISSUED_AT,
issuedAtRaw != null ? issuedAtRaw.getValueInMillis() : null);
OAuthBearerToken token = new BasicOAuthBearerToken(accessToken,
scopes,
expiration,
sub,
issuedAt);
return token;
}
private <T> T getClaim(ClaimSupplier<T> supplier, String claimName) throws ValidateException {
try {
T value = supplier.get();
log.debug("getClaim - {}: {}", claimName, value);
return value;
} catch (MalformedClaimException e) {
throw new ValidateException(String.format("Could not extract the '%s' claim from the access token", claimName), e);
}
}
public interface ClaimSupplier<T> {
T get() throws MalformedClaimException;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/secured/VerificationKeyResolverFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.secured;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_REFRESH_MS;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MAX_MS;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MS;
import static org.apache.kafka.common.config.SaslConfigs.SASL_OAUTHBEARER_JWKS_ENDPOINT_URL;
import java.net.URL;
import java.nio.file.Path;
import java.util.Locale;
import java.util.Map;
import javax.net.ssl.SSLSocketFactory;
import org.apache.kafka.common.utils.Time;
import org.jose4j.http.Get;
import org.jose4j.jwk.HttpsJwks;
public class VerificationKeyResolverFactory {
/**
* Create an {@link AccessTokenRetriever} from the given
* {@link org.apache.kafka.common.config.SaslConfigs}.
*
* <b>Note</b>: the returned <code>CloseableVerificationKeyResolver</code> is not
* initialized here and must be done by the caller.
*
* Primarily exposed here for unit testing.
*
* @param configs SASL configuration
*
* @return Non-<code>null</code> {@link CloseableVerificationKeyResolver}
*/
public static CloseableVerificationKeyResolver create(Map<String, ?> configs,
Map<String, Object> jaasConfig) {
return create(configs, null, jaasConfig);
}
public static CloseableVerificationKeyResolver create(Map<String, ?> configs,
String saslMechanism,
Map<String, Object> jaasConfig) {
ConfigurationUtils cu = new ConfigurationUtils(configs, saslMechanism);
URL jwksEndpointUrl = cu.validateUrl(SASL_OAUTHBEARER_JWKS_ENDPOINT_URL);
if (jwksEndpointUrl.getProtocol().toLowerCase(Locale.ROOT).equals("file")) {
Path p = cu.validateFile(SASL_OAUTHBEARER_JWKS_ENDPOINT_URL);
return new JwksFileVerificationKeyResolver(p);
} else {
long refreshIntervalMs = cu.validateLong(SASL_OAUTHBEARER_JWKS_ENDPOINT_REFRESH_MS, true, 0L);
JaasOptionsUtils jou = new JaasOptionsUtils(jaasConfig);
SSLSocketFactory sslSocketFactory = null;
if (jou.shouldCreateSSLSocketFactory(jwksEndpointUrl))
sslSocketFactory = jou.createSSLSocketFactory();
HttpsJwks httpsJwks = new HttpsJwks(jwksEndpointUrl.toString());
httpsJwks.setDefaultCacheDuration(refreshIntervalMs);
if (sslSocketFactory != null) {
Get get = new Get();
get.setSslSocketFactory(sslSocketFactory);
httpsJwks.setSimpleHttpGet(get);
}
RefreshingHttpsJwks refreshingHttpsJwks = new RefreshingHttpsJwks(Time.SYSTEM,
httpsJwks,
refreshIntervalMs,
cu.validateLong(SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MS),
cu.validateLong(SASL_OAUTHBEARER_JWKS_ENDPOINT_RETRY_BACKOFF_MAX_MS));
return new RefreshingHttpsJwksVerificationKeyResolver(refreshingHttpsJwks);
}
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerConfigException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.unsecured;
import org.apache.kafka.common.KafkaException;
/**
* Exception thrown when there is a problem with the configuration (an invalid
* option in a JAAS config, for example).
*/
public class OAuthBearerConfigException extends KafkaException {
private static final long serialVersionUID = -8056105648062343518L;
public OAuthBearerConfigException(String s) {
super(s);
}
public OAuthBearerConfigException(String message, Throwable cause) {
super(message, cause);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerIllegalTokenException.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.unsecured;
import java.util.Objects;
import org.apache.kafka.common.KafkaException;
/**
* Exception thrown when token validation fails due to a problem with the token
* itself (as opposed to a missing remote resource or a configuration problem)
*/
public class OAuthBearerIllegalTokenException extends KafkaException {
private static final long serialVersionUID = -5275276640051316350L;
private final OAuthBearerValidationResult reason;
/**
* Constructor
*
* @param reason
* the mandatory reason for the validation failure; it must indicate
* failure
*/
public OAuthBearerIllegalTokenException(OAuthBearerValidationResult reason) {
super(Objects.requireNonNull(reason).failureDescription());
if (reason.success())
throw new IllegalArgumentException("The reason indicates success; it must instead indicate failure");
this.reason = reason;
}
/**
* Return the (always non-null) reason for the validation failure
*
* @return the reason for the validation failure
*/
public OAuthBearerValidationResult reason() {
return reason;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerScopeUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.unsecured;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Objects;
import java.util.regex.Pattern;
/**
* Utility class for help dealing with
* <a href="https://tools.ietf.org/html/rfc6749#section-3.3">Access Token
* Scopes</a>
*/
public class OAuthBearerScopeUtils {
private static final Pattern INDIVIDUAL_SCOPE_ITEM_PATTERN = Pattern.compile("[\\x23-\\x5B\\x5D-\\x7E\\x21]+");
/**
* Return true if the given value meets the definition of a valid scope item as
* per <a href="https://tools.ietf.org/html/rfc6749#section-3.3">RFC 6749
* Section 3.3</a>, otherwise false
*
* @param scopeItem
* the mandatory scope item to check for validity
* @return true if the given value meets the definition of a valid scope item,
* otherwise false
*/
public static boolean isValidScopeItem(String scopeItem) {
return INDIVIDUAL_SCOPE_ITEM_PATTERN.matcher(Objects.requireNonNull(scopeItem)).matches();
}
/**
* Convert a space-delimited list of scope values (for example,
* <code>"scope1 scope2"</code>) to a List containing the individual elements
* (<code>"scope1"</code> and <code>"scope2"</code>)
*
* @param spaceDelimitedScope
* the mandatory (but possibly empty) space-delimited scope values,
* each of which must be valid according to
* {@link #isValidScopeItem(String)}
* @return the list of the given (possibly empty) space-delimited values
* @throws OAuthBearerConfigException
* if any of the individual scope values are malformed/illegal
*/
public static List<String> parseScope(String spaceDelimitedScope) throws OAuthBearerConfigException {
List<String> retval = new ArrayList<>();
for (String individualScopeItem : Objects.requireNonNull(spaceDelimitedScope).split(" ")) {
if (!individualScopeItem.isEmpty()) {
if (!isValidScopeItem(individualScopeItem))
throw new OAuthBearerConfigException(String.format("Invalid scope value: %s", individualScopeItem));
retval.add(individualScopeItem);
}
}
return Collections.unmodifiableList(retval);
}
private OAuthBearerScopeUtils() {
// empty
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerUnsecuredJws.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.unsecured;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Objects;
import java.util.Set;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken;
import org.apache.kafka.common.utils.Utils;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.JsonNodeType;
/**
* A simple unsecured JWS implementation. The '{@code nbf}' claim is ignored if
* it is given because the related logic is not required for Kafka testing and
* development purposes.
*
* @see <a href="https://tools.ietf.org/html/rfc7515">RFC 7515</a>
*/
public class OAuthBearerUnsecuredJws implements OAuthBearerToken {
private final String compactSerialization;
private final List<String> splits;
private final Map<String, Object> header;
private final String principalClaimName;
private final String scopeClaimName;
private final Map<String, Object> claims;
private final Set<String> scope;
private final long lifetime;
private final String principalName;
private final Long startTimeMs;
/**
* Constructor with the given principal and scope claim names
*
* @param compactSerialization
* the compact serialization to parse as an unsecured JWS
* @param principalClaimName
* the required principal claim name
* @param scopeClaimName
* the required scope claim name
* @throws OAuthBearerIllegalTokenException
* if the compact serialization is not a valid unsecured JWS
* (meaning it did not have 3 dot-separated Base64URL sections
* without an empty digital signature; or the header or claims
* either are not valid Base 64 URL encoded values or are not JSON
* after decoding; or the mandatory '{@code alg}' header value is
* not "{@code none}")
*/
public OAuthBearerUnsecuredJws(String compactSerialization, String principalClaimName, String scopeClaimName)
throws OAuthBearerIllegalTokenException {
this.compactSerialization = Objects.requireNonNull(compactSerialization);
if (compactSerialization.contains(".."))
throw new OAuthBearerIllegalTokenException(
OAuthBearerValidationResult.newFailure("Malformed compact serialization contains '..'"));
this.splits = extractCompactSerializationSplits();
this.header = toMap(splits().get(0));
String claimsSplit = splits.get(1);
this.claims = toMap(claimsSplit);
String alg = Objects.requireNonNull(header().get("alg"), "JWS header must have an Algorithm value").toString();
if (!"none".equals(alg))
throw new OAuthBearerIllegalTokenException(
OAuthBearerValidationResult.newFailure("Unsecured JWS must have 'none' for an algorithm"));
String digitalSignatureSplit = splits.get(2);
if (!digitalSignatureSplit.isEmpty())
throw new OAuthBearerIllegalTokenException(
OAuthBearerValidationResult.newFailure("Unsecured JWS must not contain a digital signature"));
this.principalClaimName = Objects.requireNonNull(principalClaimName).trim();
if (this.principalClaimName.isEmpty())
throw new IllegalArgumentException("Must specify a non-blank principal claim name");
this.scopeClaimName = Objects.requireNonNull(scopeClaimName).trim();
if (this.scopeClaimName.isEmpty())
throw new IllegalArgumentException("Must specify a non-blank scope claim name");
this.scope = calculateScope();
Number expirationTimeSeconds = expirationTime();
if (expirationTimeSeconds == null)
throw new OAuthBearerIllegalTokenException(
OAuthBearerValidationResult.newFailure("No expiration time in JWT"));
lifetime = convertClaimTimeInSecondsToMs(expirationTimeSeconds);
String principalName = claim(this.principalClaimName, String.class);
if (Utils.isBlank(principalName))
throw new OAuthBearerIllegalTokenException(OAuthBearerValidationResult
.newFailure("No principal name in JWT claim: " + this.principalClaimName));
this.principalName = principalName;
this.startTimeMs = calculateStartTimeMs();
}
@Override
public String value() {
return compactSerialization;
}
/**
* Return the 3 or 5 dot-separated sections of the JWT compact serialization
*
* @return the 3 or 5 dot-separated sections of the JWT compact serialization
*/
public List<String> splits() {
return splits;
}
/**
* Return the JOSE Header as a {@code Map}
*
* @return the JOSE header
*/
public Map<String, Object> header() {
return header;
}
@Override
public String principalName() {
return principalName;
}
@Override
public Long startTimeMs() {
return startTimeMs;
}
@Override
public long lifetimeMs() {
return lifetime;
}
@Override
public Set<String> scope() throws OAuthBearerIllegalTokenException {
return scope;
}
/**
* Return the JWT Claim Set as a {@code Map}
*
* @return the (always non-null but possibly empty) claims
*/
public Map<String, Object> claims() {
return claims;
}
/**
* Return the (always non-null/non-empty) principal claim name
*
* @return the (always non-null/non-empty) principal claim name
*/
public String principalClaimName() {
return principalClaimName;
}
/**
* Return the (always non-null/non-empty) scope claim name
*
* @return the (always non-null/non-empty) scope claim name
*/
public String scopeClaimName() {
return scopeClaimName;
}
/**
* Indicate if the claim exists and is the given type
*
* @param claimName
* the mandatory JWT claim name
* @param type
* the mandatory type, which should either be String.class,
* Number.class, or List.class
* @return true if the claim exists and is the given type, otherwise false
*/
public boolean isClaimType(String claimName, Class<?> type) {
Object value = rawClaim(claimName);
Objects.requireNonNull(type);
if (value == null)
return false;
if (type == String.class && value instanceof String)
return true;
if (type == Number.class && value instanceof Number)
return true;
return type == List.class && value instanceof List;
}
/**
* Extract a claim of the given type
*
* @param claimName
* the mandatory JWT claim name
* @param type
* the mandatory type, which must either be String.class,
* Number.class, or List.class
* @return the claim if it exists, otherwise null
* @throws OAuthBearerIllegalTokenException
* if the claim exists but is not the given type
*/
public <T> T claim(String claimName, Class<T> type) throws OAuthBearerIllegalTokenException {
Object value = rawClaim(claimName);
try {
return Objects.requireNonNull(type).cast(value);
} catch (ClassCastException e) {
throw new OAuthBearerIllegalTokenException(
OAuthBearerValidationResult.newFailure(String.format("The '%s' claim was not of type %s: %s",
claimName, type.getSimpleName(), value.getClass().getSimpleName())));
}
}
/**
* Extract a claim in its raw form
*
* @param claimName
* the mandatory JWT claim name
* @return the raw claim value, if it exists, otherwise null
*/
public Object rawClaim(String claimName) {
return claims().get(Objects.requireNonNull(claimName));
}
/**
* Return the
* <a href="https://tools.ietf.org/html/rfc7519#section-4.1.4">Expiration
* Time</a> claim
*
* @return the <a href=
* "https://tools.ietf.org/html/rfc7519#section-4.1.4">Expiration
* Time</a> claim if available, otherwise null
* @throws OAuthBearerIllegalTokenException
* if the claim value is the incorrect type
*/
public Number expirationTime() throws OAuthBearerIllegalTokenException {
return claim("exp", Number.class);
}
/**
* Return the <a href="https://tools.ietf.org/html/rfc7519#section-4.1.6">Issued
* At</a> claim
*
* @return the
* <a href= "https://tools.ietf.org/html/rfc7519#section-4.1.6">Issued
* At</a> claim if available, otherwise null
* @throws OAuthBearerIllegalTokenException
* if the claim value is the incorrect type
*/
public Number issuedAt() throws OAuthBearerIllegalTokenException {
return claim("iat", Number.class);
}
/**
* Return the
* <a href="https://tools.ietf.org/html/rfc7519#section-4.1.2">Subject</a> claim
*
* @return the <a href=
* "https://tools.ietf.org/html/rfc7519#section-4.1.2">Subject</a> claim
* if available, otherwise null
* @throws OAuthBearerIllegalTokenException
* if the claim value is the incorrect type
*/
public String subject() throws OAuthBearerIllegalTokenException {
return claim("sub", String.class);
}
/**
* Decode the given Base64URL-encoded value, parse the resulting JSON as a JSON
* object, and return the map of member names to their values (each value being
* represented as either a String, a Number, or a List of Strings).
*
* @param split
* the value to decode and parse
* @return the map of JSON member names to their String, Number, or String List
* value
* @throws OAuthBearerIllegalTokenException
* if the given Base64URL-encoded value cannot be decoded or parsed
*/
public static Map<String, Object> toMap(String split) throws OAuthBearerIllegalTokenException {
Map<String, Object> retval = new HashMap<>();
try {
byte[] decode = Base64.getDecoder().decode(split);
JsonNode jsonNode = new ObjectMapper().readTree(decode);
if (jsonNode == null)
throw new OAuthBearerIllegalTokenException(OAuthBearerValidationResult.newFailure("malformed JSON"));
for (Iterator<Entry<String, JsonNode>> iterator = jsonNode.fields(); iterator.hasNext();) {
Entry<String, JsonNode> entry = iterator.next();
retval.put(entry.getKey(), convert(entry.getValue()));
}
return Collections.unmodifiableMap(retval);
} catch (IllegalArgumentException e) {
// potentially thrown by java.util.Base64.Decoder implementations
throw new OAuthBearerIllegalTokenException(
OAuthBearerValidationResult.newFailure("malformed Base64 URL encoded value"));
} catch (IOException e) {
throw new OAuthBearerIllegalTokenException(OAuthBearerValidationResult.newFailure("malformed JSON"));
}
}
private List<String> extractCompactSerializationSplits() {
List<String> tmpSplits = new ArrayList<>(Arrays.asList(compactSerialization.split("\\.")));
if (compactSerialization.endsWith("."))
tmpSplits.add("");
if (tmpSplits.size() != 3)
throw new OAuthBearerIllegalTokenException(OAuthBearerValidationResult.newFailure(
"Unsecured JWS compact serializations must have 3 dot-separated Base64URL-encoded values"));
return Collections.unmodifiableList(tmpSplits);
}
private static Object convert(JsonNode value) {
if (value.isArray()) {
List<String> retvalList = new ArrayList<>();
for (JsonNode arrayElement : value)
retvalList.add(arrayElement.asText());
return retvalList;
}
return value.getNodeType() == JsonNodeType.NUMBER ? value.numberValue() : value.asText();
}
private Long calculateStartTimeMs() throws OAuthBearerIllegalTokenException {
Number issuedAtSeconds = claim("iat", Number.class);
return issuedAtSeconds == null ? null : convertClaimTimeInSecondsToMs(issuedAtSeconds);
}
private static long convertClaimTimeInSecondsToMs(Number claimValue) {
return Math.round(claimValue.doubleValue() * 1000);
}
private Set<String> calculateScope() {
String scopeClaimName = scopeClaimName();
if (isClaimType(scopeClaimName, String.class)) {
String scopeClaimValue = claim(scopeClaimName, String.class);
if (Utils.isBlank(scopeClaimValue))
return Collections.emptySet();
else {
Set<String> retval = new HashSet<>();
retval.add(scopeClaimValue.trim());
return Collections.unmodifiableSet(retval);
}
}
List<?> scopeClaimValue = claim(scopeClaimName, List.class);
if (scopeClaimValue == null || scopeClaimValue.isEmpty())
return Collections.emptySet();
@SuppressWarnings("unchecked")
List<String> stringList = (List<String>) scopeClaimValue;
Set<String> retval = new HashSet<>();
for (String scope : stringList) {
if (!Utils.isBlank(scope)) {
retval.add(scope.trim());
}
}
return Collections.unmodifiableSet(retval);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerUnsecuredLoginCallbackHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.unsecured;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.Base64;
import java.util.Base64.Encoder;
import java.util.Collections;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.sasl.SaslException;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler;
import org.apache.kafka.common.security.auth.SaslExtensionsCallback;
import org.apache.kafka.common.security.auth.SaslExtensions;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerTokenCallback;
import org.apache.kafka.common.security.oauthbearer.internals.OAuthBearerClientInitialResponse;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@code CallbackHandler} that recognizes {@link OAuthBearerTokenCallback}
* to return an unsecured OAuth 2 bearer token and {@link SaslExtensionsCallback} to return SASL extensions
* <p>
* Claims and their values on the returned token can be specified using
* {@code unsecuredLoginStringClaim_<claimname>},
* {@code unsecuredLoginNumberClaim_<claimname>}, and
* {@code unsecuredLoginListClaim_<claimname>} options. The first character of
* the value is taken as the delimiter for list claims. You may define any claim
* name and value except '{@code iat}' and '{@code exp}', both of which are
* calculated automatically.
* <p>
* <p>
* You can also add custom unsecured SASL extensions using
* {@code unsecuredLoginExtension_<extensionname>}. Extension keys and values are subject to regex validation.
* The extension key must also not be equal to the reserved key {@link OAuthBearerClientInitialResponse#AUTH_KEY}
* <p>
* This implementation also accepts the following options:
* <ul>
* <li>{@code unsecuredLoginPrincipalClaimName} set to a custom claim name if
* you wish the name of the String claim holding the principal name to be
* something other than '{@code sub}'.</li>
* <li>{@code unsecuredLoginLifetimeSeconds} set to an integer value if the
* token expiration is to be set to something other than the default value of
* 3600 seconds (which is 1 hour). The '{@code exp}' claim reflects the
* expiration time.</li>
* <li>{@code unsecuredLoginScopeClaimName} set to a custom claim name if you
* wish the name of the String or String List claim holding any token scope to
* be something other than '{@code scope}'</li>
* </ul>
* For example:
*
* <pre>
* KafkaClient {
* org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule Required
* unsecuredLoginStringClaim_sub="thePrincipalName"
* unsecuredLoginListClaim_scope="|scopeValue1|scopeValue2"
* unsecuredLoginLifetimeSeconds="60"
* unsecuredLoginExtension_traceId="123";
* };
* </pre>
*
* This class is the default when the SASL mechanism is OAUTHBEARER and no value
* is explicitly set via either the {@code sasl.login.callback.handler.class}
* client configuration property or the
* {@code listener.name.sasl_[plaintext|ssl].oauthbearer.sasl.login.callback.handler.class}
* broker configuration property.
*/
public class OAuthBearerUnsecuredLoginCallbackHandler implements AuthenticateCallbackHandler {
private final Logger log = LoggerFactory.getLogger(OAuthBearerUnsecuredLoginCallbackHandler.class);
private static final String OPTION_PREFIX = "unsecuredLogin";
private static final String PRINCIPAL_CLAIM_NAME_OPTION = OPTION_PREFIX + "PrincipalClaimName";
private static final String LIFETIME_SECONDS_OPTION = OPTION_PREFIX + "LifetimeSeconds";
private static final String SCOPE_CLAIM_NAME_OPTION = OPTION_PREFIX + "ScopeClaimName";
private static final Set<String> RESERVED_CLAIMS = Collections
.unmodifiableSet(new HashSet<>(Arrays.asList("iat", "exp")));
private static final String DEFAULT_PRINCIPAL_CLAIM_NAME = "sub";
private static final String DEFAULT_LIFETIME_SECONDS_ONE_HOUR = "3600";
private static final String DEFAULT_SCOPE_CLAIM_NAME = "scope";
private static final String STRING_CLAIM_PREFIX = OPTION_PREFIX + "StringClaim_";
private static final String NUMBER_CLAIM_PREFIX = OPTION_PREFIX + "NumberClaim_";
private static final String LIST_CLAIM_PREFIX = OPTION_PREFIX + "ListClaim_";
private static final String EXTENSION_PREFIX = OPTION_PREFIX + "Extension_";
private static final String QUOTE = "\"";
private Time time = Time.SYSTEM;
private Map<String, String> moduleOptions = null;
private boolean configured = false;
private static final Pattern DOUBLEQUOTE = Pattern.compile("\"", Pattern.LITERAL);
private static final Pattern BACKSLASH = Pattern.compile("\\", Pattern.LITERAL);
/**
* For testing
*
* @param time
* the mandatory time to set
*/
void time(Time time) {
this.time = Objects.requireNonNull(time);
}
/**
* Return true if this instance has been configured, otherwise false
*
* @return true if this instance has been configured, otherwise false
*/
public boolean configured() {
return configured;
}
@SuppressWarnings("unchecked")
@Override
public void configure(Map<String, ?> configs, String saslMechanism, List<AppConfigurationEntry> jaasConfigEntries) {
if (!OAuthBearerLoginModule.OAUTHBEARER_MECHANISM.equals(saslMechanism))
throw new IllegalArgumentException(String.format("Unexpected SASL mechanism: %s", saslMechanism));
if (Objects.requireNonNull(jaasConfigEntries).size() != 1 || jaasConfigEntries.get(0) == null)
throw new IllegalArgumentException(
String.format("Must supply exactly 1 non-null JAAS mechanism configuration (size was %d)",
jaasConfigEntries.size()));
this.moduleOptions = Collections.unmodifiableMap((Map<String, String>) jaasConfigEntries.get(0).getOptions());
configured = true;
}
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
if (!configured())
throw new IllegalStateException("Callback handler not configured");
for (Callback callback : callbacks) {
if (callback instanceof OAuthBearerTokenCallback)
try {
handleTokenCallback((OAuthBearerTokenCallback) callback);
} catch (KafkaException e) {
throw new IOException(e.getMessage(), e);
}
else if (callback instanceof SaslExtensionsCallback)
try {
handleExtensionsCallback((SaslExtensionsCallback) callback);
} catch (KafkaException e) {
throw new IOException(e.getMessage(), e);
}
else
throw new UnsupportedCallbackException(callback);
}
}
@Override
public void close() {
// empty
}
private void handleTokenCallback(OAuthBearerTokenCallback callback) {
if (callback.token() != null)
throw new IllegalArgumentException("Callback had a token already");
if (moduleOptions.isEmpty()) {
log.debug("Token not provided, this login cannot be used to establish client connections");
callback.token(null);
return;
}
if (moduleOptions.keySet().stream().noneMatch(name -> !name.startsWith(EXTENSION_PREFIX))) {
throw new OAuthBearerConfigException("Extensions provided in login context without a token");
}
String principalClaimNameValue = optionValue(PRINCIPAL_CLAIM_NAME_OPTION);
String principalClaimName = Utils.isBlank(principalClaimNameValue) ? DEFAULT_PRINCIPAL_CLAIM_NAME : principalClaimNameValue.trim();
String scopeClaimNameValue = optionValue(SCOPE_CLAIM_NAME_OPTION);
String scopeClaimName = Utils.isBlank(scopeClaimNameValue) ? DEFAULT_SCOPE_CLAIM_NAME : scopeClaimNameValue.trim();
String headerJson = "{" + claimOrHeaderJsonText("alg", "none") + "}";
String lifetimeSecondsValueToUse = optionValue(LIFETIME_SECONDS_OPTION, DEFAULT_LIFETIME_SECONDS_ONE_HOUR);
String claimsJson;
try {
claimsJson = String.format("{%s,%s%s}", expClaimText(Long.parseLong(lifetimeSecondsValueToUse)),
claimOrHeaderJsonText("iat", time.milliseconds() / 1000.0),
commaPrependedStringNumberAndListClaimsJsonText());
} catch (NumberFormatException e) {
throw new OAuthBearerConfigException(e.getMessage());
}
try {
Encoder urlEncoderNoPadding = Base64.getUrlEncoder().withoutPadding();
OAuthBearerUnsecuredJws jws = new OAuthBearerUnsecuredJws(
String.format("%s.%s.",
urlEncoderNoPadding.encodeToString(headerJson.getBytes(StandardCharsets.UTF_8)),
urlEncoderNoPadding.encodeToString(claimsJson.getBytes(StandardCharsets.UTF_8))),
principalClaimName, scopeClaimName);
log.info("Retrieved token with principal {}", jws.principalName());
callback.token(jws);
} catch (OAuthBearerIllegalTokenException e) {
// occurs if the principal claim doesn't exist or has an empty value
throw new OAuthBearerConfigException(e.getMessage(), e);
}
}
/**
* Add and validate all the configured extensions.
* Token keys, apart from passing regex validation, must not be equal to the reserved key {@link OAuthBearerClientInitialResponse#AUTH_KEY}
*/
private void handleExtensionsCallback(SaslExtensionsCallback callback) {
Map<String, String> extensions = new HashMap<>();
for (Map.Entry<String, String> configEntry : this.moduleOptions.entrySet()) {
String key = configEntry.getKey();
if (!key.startsWith(EXTENSION_PREFIX))
continue;
extensions.put(key.substring(EXTENSION_PREFIX.length()), configEntry.getValue());
}
SaslExtensions saslExtensions = new SaslExtensions(extensions);
try {
OAuthBearerClientInitialResponse.validateExtensions(saslExtensions);
} catch (SaslException e) {
throw new ConfigException(e.getMessage());
}
callback.extensions(saslExtensions);
}
private String commaPrependedStringNumberAndListClaimsJsonText() throws OAuthBearerConfigException {
StringBuilder sb = new StringBuilder();
for (String key : moduleOptions.keySet()) {
if (key.startsWith(STRING_CLAIM_PREFIX) && key.length() > STRING_CLAIM_PREFIX.length())
sb.append(',').append(claimOrHeaderJsonText(
confirmNotReservedClaimName(key.substring(STRING_CLAIM_PREFIX.length())), optionValue(key)));
else if (key.startsWith(NUMBER_CLAIM_PREFIX) && key.length() > NUMBER_CLAIM_PREFIX.length())
sb.append(',')
.append(claimOrHeaderJsonText(
confirmNotReservedClaimName(key.substring(NUMBER_CLAIM_PREFIX.length())),
Double.valueOf(optionValue(key))));
else if (key.startsWith(LIST_CLAIM_PREFIX) && key.length() > LIST_CLAIM_PREFIX.length())
sb.append(',')
.append(claimOrHeaderJsonArrayText(
confirmNotReservedClaimName(key.substring(LIST_CLAIM_PREFIX.length())),
listJsonText(optionValue(key))));
}
return sb.toString();
}
private String confirmNotReservedClaimName(String claimName) throws OAuthBearerConfigException {
if (RESERVED_CLAIMS.contains(claimName))
throw new OAuthBearerConfigException(String.format("Cannot explicitly set the '%s' claim", claimName));
return claimName;
}
private String listJsonText(String value) {
if (value.isEmpty() || value.length() <= 1)
return "[]";
String delimiter;
String unescapedDelimiterChar = value.substring(0, 1);
switch (unescapedDelimiterChar) {
case "\\":
case ".":
case "[":
case "(":
case "{":
case "|":
case "^":
case "$":
delimiter = "\\" + unescapedDelimiterChar;
break;
default:
delimiter = unescapedDelimiterChar;
break;
}
String listText = value.substring(1);
String[] elements = listText.split(delimiter);
StringBuilder sb = new StringBuilder();
for (String element : elements) {
sb.append(sb.length() == 0 ? '[' : ',');
sb.append('"').append(escape(element)).append('"');
}
if (listText.startsWith(unescapedDelimiterChar) || listText.endsWith(unescapedDelimiterChar)
|| listText.contains(unescapedDelimiterChar + unescapedDelimiterChar))
sb.append(",\"\"");
return sb.append(']').toString();
}
private String optionValue(String key) {
return optionValue(key, null);
}
private String optionValue(String key, String defaultValue) {
String explicitValue = option(key);
return explicitValue != null ? explicitValue : defaultValue;
}
private String option(String key) {
if (!configured)
throw new IllegalStateException("Callback handler not configured");
return moduleOptions.get(Objects.requireNonNull(key));
}
private String claimOrHeaderJsonText(String claimName, Number claimValue) {
return QUOTE + escape(claimName) + QUOTE + ":" + claimValue;
}
private String claimOrHeaderJsonText(String claimName, String claimValue) {
return QUOTE + escape(claimName) + QUOTE + ":" + QUOTE + escape(claimValue) + QUOTE;
}
private String claimOrHeaderJsonArrayText(String claimName, String escapedClaimValue) {
if (!escapedClaimValue.startsWith("[") || !escapedClaimValue.endsWith("]"))
throw new IllegalArgumentException(String.format("Illegal JSON array: %s", escapedClaimValue));
return QUOTE + escape(claimName) + QUOTE + ":" + escapedClaimValue;
}
private String escape(String jsonStringValue) {
String replace1 = DOUBLEQUOTE.matcher(jsonStringValue).replaceAll(Matcher.quoteReplacement("\\\""));
return BACKSLASH.matcher(replace1).replaceAll(Matcher.quoteReplacement("\\\\"));
}
private String expClaimText(long lifetimeSeconds) {
return claimOrHeaderJsonText("exp", time.milliseconds() / 1000.0 + lifetimeSeconds);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerUnsecuredValidatorCallbackHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.unsecured;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.AppConfigurationEntry;
import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerExtensionsValidatorCallback;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerValidatorCallback;
import org.apache.kafka.common.utils.Time;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* A {@code CallbackHandler} that recognizes
* {@link OAuthBearerValidatorCallback} and validates an unsecured OAuth 2
* bearer token. It requires there to be an <code>"exp" (Expiration Time)</code>
* claim of type Number. If <code>"iat" (Issued At)</code> or
* <code>"nbf" (Not Before)</code> claims are present each must be a number that
* precedes the Expiration Time claim, and if both are present the Not Before
* claim must not precede the Issued At claim. It also accepts the following
* options, none of which are required:
* <ul>
* <li>{@code unsecuredValidatorPrincipalClaimName} set to a non-empty value if
* you wish a particular String claim holding a principal name to be checked for
* existence; the default is to check for the existence of the '{@code sub}'
* claim</li>
* <li>{@code unsecuredValidatorScopeClaimName} set to a custom claim name if
* you wish the name of the String or String List claim holding any token scope
* to be something other than '{@code scope}'</li>
* <li>{@code unsecuredValidatorRequiredScope} set to a space-delimited list of
* scope values if you wish the String/String List claim holding the token scope
* to be checked to make sure it contains certain values</li>
* <li>{@code unsecuredValidatorAllowableClockSkewMs} set to a positive integer
* value if you wish to allow up to some number of positive milliseconds of
* clock skew (the default is 0)</li>
* <ul>
* For example:
*
* <pre>
* KafkaServer {
* org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginModule Required
* unsecuredLoginStringClaim_sub="thePrincipalName"
* unsecuredLoginListClaim_scope=",KAFKA_BROKER,LOGIN_TO_KAFKA"
* unsecuredValidatorRequiredScope="LOGIN_TO_KAFKA"
* unsecuredValidatorAllowableClockSkewMs="3000";
* };
* </pre>
* It also recognizes {@link OAuthBearerExtensionsValidatorCallback} and validates every extension passed to it.
*
* This class is the default when the SASL mechanism is OAUTHBEARER and no value
* is explicitly set via the
* {@code listener.name.sasl_[plaintext|ssl].oauthbearer.sasl.server.callback.handler.class}
* broker configuration property.
* It is worth noting that this class is not suitable for production use due to the use of unsecured JWT tokens and
* validation of every given extension.
*/
public class OAuthBearerUnsecuredValidatorCallbackHandler implements AuthenticateCallbackHandler {
private static final Logger log = LoggerFactory.getLogger(OAuthBearerUnsecuredValidatorCallbackHandler.class);
private static final String OPTION_PREFIX = "unsecuredValidator";
private static final String PRINCIPAL_CLAIM_NAME_OPTION = OPTION_PREFIX + "PrincipalClaimName";
private static final String SCOPE_CLAIM_NAME_OPTION = OPTION_PREFIX + "ScopeClaimName";
private static final String REQUIRED_SCOPE_OPTION = OPTION_PREFIX + "RequiredScope";
private static final String ALLOWABLE_CLOCK_SKEW_MILLIS_OPTION = OPTION_PREFIX + "AllowableClockSkewMs";
private Time time = Time.SYSTEM;
private Map<String, String> moduleOptions = null;
private boolean configured = false;
/**
* For testing
*
* @param time
* the mandatory time to set
*/
void time(Time time) {
this.time = Objects.requireNonNull(time);
}
/**
* Return true if this instance has been configured, otherwise false
*
* @return true if this instance has been configured, otherwise false
*/
public boolean configured() {
return configured;
}
@SuppressWarnings("unchecked")
@Override
public void configure(Map<String, ?> configs, String saslMechanism, List<AppConfigurationEntry> jaasConfigEntries) {
if (!OAuthBearerLoginModule.OAUTHBEARER_MECHANISM.equals(saslMechanism))
throw new IllegalArgumentException(String.format("Unexpected SASL mechanism: %s", saslMechanism));
if (Objects.requireNonNull(jaasConfigEntries).size() != 1 || jaasConfigEntries.get(0) == null)
throw new IllegalArgumentException(
String.format("Must supply exactly 1 non-null JAAS mechanism configuration (size was %d)",
jaasConfigEntries.size()));
final Map<String, String> unmodifiableModuleOptions = Collections
.unmodifiableMap((Map<String, String>) jaasConfigEntries.get(0).getOptions());
this.moduleOptions = unmodifiableModuleOptions;
configured = true;
}
@Override
public void handle(Callback[] callbacks) throws UnsupportedCallbackException {
if (!configured())
throw new IllegalStateException("Callback handler not configured");
for (Callback callback : callbacks) {
if (callback instanceof OAuthBearerValidatorCallback) {
OAuthBearerValidatorCallback validationCallback = (OAuthBearerValidatorCallback) callback;
try {
handleCallback(validationCallback);
} catch (OAuthBearerIllegalTokenException e) {
OAuthBearerValidationResult failureReason = e.reason();
String failureScope = failureReason.failureScope();
validationCallback.error(failureScope != null ? "insufficient_scope" : "invalid_token",
failureScope, failureReason.failureOpenIdConfig());
}
} else if (callback instanceof OAuthBearerExtensionsValidatorCallback) {
OAuthBearerExtensionsValidatorCallback extensionsCallback = (OAuthBearerExtensionsValidatorCallback) callback;
extensionsCallback.inputExtensions().map().forEach((extensionName, v) -> extensionsCallback.valid(extensionName));
} else
throw new UnsupportedCallbackException(callback);
}
}
@Override
public void close() {
// empty
}
private void handleCallback(OAuthBearerValidatorCallback callback) {
String tokenValue = callback.tokenValue();
if (tokenValue == null)
throw new IllegalArgumentException("Callback missing required token value");
String principalClaimName = principalClaimName();
String scopeClaimName = scopeClaimName();
List<String> requiredScope = requiredScope();
int allowableClockSkewMs = allowableClockSkewMs();
OAuthBearerUnsecuredJws unsecuredJwt = new OAuthBearerUnsecuredJws(tokenValue, principalClaimName,
scopeClaimName);
long now = time.milliseconds();
OAuthBearerValidationUtils
.validateClaimForExistenceAndType(unsecuredJwt, true, principalClaimName, String.class)
.throwExceptionIfFailed();
OAuthBearerValidationUtils.validateIssuedAt(unsecuredJwt, false, now, allowableClockSkewMs)
.throwExceptionIfFailed();
OAuthBearerValidationUtils.validateExpirationTime(unsecuredJwt, now, allowableClockSkewMs)
.throwExceptionIfFailed();
OAuthBearerValidationUtils.validateTimeConsistency(unsecuredJwt).throwExceptionIfFailed();
OAuthBearerValidationUtils.validateScope(unsecuredJwt, requiredScope).throwExceptionIfFailed();
log.info("Successfully validated token with principal {}: {}", unsecuredJwt.principalName(),
unsecuredJwt.claims());
callback.token(unsecuredJwt);
}
private String principalClaimName() {
String principalClaimNameValue = option(PRINCIPAL_CLAIM_NAME_OPTION);
return Utils.isBlank(principalClaimNameValue) ? "sub" : principalClaimNameValue.trim();
}
private String scopeClaimName() {
String scopeClaimNameValue = option(SCOPE_CLAIM_NAME_OPTION);
return Utils.isBlank(scopeClaimNameValue) ? "scope" : scopeClaimNameValue.trim();
}
private List<String> requiredScope() {
String requiredSpaceDelimitedScope = option(REQUIRED_SCOPE_OPTION);
return Utils.isBlank(requiredSpaceDelimitedScope) ? Collections.emptyList() : OAuthBearerScopeUtils.parseScope(requiredSpaceDelimitedScope.trim());
}
private int allowableClockSkewMs() {
String allowableClockSkewMsValue = option(ALLOWABLE_CLOCK_SKEW_MILLIS_OPTION);
int allowableClockSkewMs = 0;
try {
allowableClockSkewMs = Utils.isBlank(allowableClockSkewMsValue) ? 0 : Integer.parseInt(allowableClockSkewMsValue.trim());
} catch (NumberFormatException e) {
throw new OAuthBearerConfigException(e.getMessage(), e);
}
if (allowableClockSkewMs < 0) {
throw new OAuthBearerConfigException(
String.format("Allowable clock skew millis must not be negative: %s", allowableClockSkewMsValue));
}
return allowableClockSkewMs;
}
private String option(String key) {
if (!configured)
throw new IllegalStateException("Callback handler not configured");
return moduleOptions.get(Objects.requireNonNull(key));
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerValidationResult.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.unsecured;
import java.io.Serializable;
/**
* The result of some kind of token validation
*/
public class OAuthBearerValidationResult implements Serializable {
private static final long serialVersionUID = 5774669940899777373L;
private final boolean success;
private final String failureDescription;
private final String failureScope;
private final String failureOpenIdConfig;
/**
* Return an instance indicating success
*
* @return an instance indicating success
*/
public static OAuthBearerValidationResult newSuccess() {
return new OAuthBearerValidationResult(true, null, null, null);
}
/**
* Return a new validation failure instance
*
* @param failureDescription
* optional description of the failure
* @return a new validation failure instance
*/
public static OAuthBearerValidationResult newFailure(String failureDescription) {
return newFailure(failureDescription, null, null);
}
/**
* Return a new validation failure instance
*
* @param failureDescription
* optional description of the failure
* @param failureScope
* optional scope to be reported with the failure
* @param failureOpenIdConfig
* optional OpenID Connect configuration to be reported with the
* failure
* @return a new validation failure instance
*/
public static OAuthBearerValidationResult newFailure(String failureDescription, String failureScope,
String failureOpenIdConfig) {
return new OAuthBearerValidationResult(false, failureDescription, failureScope, failureOpenIdConfig);
}
private OAuthBearerValidationResult(boolean success, String failureDescription, String failureScope,
String failureOpenIdConfig) {
if (success && (failureScope != null || failureOpenIdConfig != null))
throw new IllegalArgumentException("success was indicated but failure scope/OpenIdConfig were provided");
this.success = success;
this.failureDescription = failureDescription;
this.failureScope = failureScope;
this.failureOpenIdConfig = failureOpenIdConfig;
}
/**
* Return true if this instance indicates success, otherwise false
*
* @return true if this instance indicates success, otherwise false
*/
public boolean success() {
return success;
}
/**
* Return the (potentially null) descriptive message for the failure
*
* @return the (potentially null) descriptive message for the failure
*/
public String failureDescription() {
return failureDescription;
}
/**
* Return the (potentially null) scope to be reported with the failure
*
* @return the (potentially null) scope to be reported with the failure
*/
public String failureScope() {
return failureScope;
}
/**
* Return the (potentially null) OpenID Connect configuration to be reported
* with the failure
*
* @return the (potentially null) OpenID Connect configuration to be reported
* with the failure
*/
public String failureOpenIdConfig() {
return failureOpenIdConfig;
}
/**
* Raise an exception if this instance indicates failure, otherwise do nothing
*
* @throws OAuthBearerIllegalTokenException
* if this instance indicates failure
*/
public void throwExceptionIfFailed() throws OAuthBearerIllegalTokenException {
if (!success())
throw new OAuthBearerIllegalTokenException(this);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/internals/unsecured/OAuthBearerValidationUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.internals.unsecured;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import org.apache.kafka.common.security.oauthbearer.OAuthBearerToken;
public class OAuthBearerValidationUtils {
/**
* Validate the given claim for existence and type. It can be required to exist
* in the given claims, and if it exists it must be one of the types indicated
*
* @param jwt
* the mandatory JWT to which the validation will be applied
* @param required
* true if the claim is required to exist
* @param claimName
* the required claim name identifying the claim to be checked
* @param allowedTypes
* one or more of {@code String.class}, {@code Number.class}, and
* {@code List.class} identifying the type(s) that the claim value is
* allowed to be if it exists
* @return the result of the validation
*/
public static OAuthBearerValidationResult validateClaimForExistenceAndType(OAuthBearerUnsecuredJws jwt,
boolean required, String claimName, Class<?>... allowedTypes) {
Object rawClaim = Objects.requireNonNull(jwt).rawClaim(Objects.requireNonNull(claimName));
if (rawClaim == null)
return required
? OAuthBearerValidationResult.newFailure(String.format("Required claim missing: %s", claimName))
: OAuthBearerValidationResult.newSuccess();
for (Class<?> allowedType : allowedTypes) {
if (allowedType != null && allowedType.isAssignableFrom(rawClaim.getClass()))
return OAuthBearerValidationResult.newSuccess();
}
return OAuthBearerValidationResult.newFailure(String.format("The %s claim had the incorrect type: %s",
claimName, rawClaim.getClass().getSimpleName()));
}
/**
* Validate the 'iat' (Issued At) claim. It can be required to exist in the
* given claims, and if it exists it must be a (potentially fractional) number
* of seconds since the epoch defining when the JWT was issued; it is a
* validation error if the Issued At time is after the time at which the check
* is being done (plus any allowable clock skew).
*
* @param jwt
* the mandatory JWT to which the validation will be applied
* @param required
* true if the claim is required to exist
* @param whenCheckTimeMs
* the time relative to which the validation is to occur
* @param allowableClockSkewMs
* non-negative number to take into account some potential clock skew
* @return the result of the validation
* @throws OAuthBearerConfigException
* if the given allowable clock skew is negative
*/
public static OAuthBearerValidationResult validateIssuedAt(OAuthBearerUnsecuredJws jwt, boolean required,
long whenCheckTimeMs, int allowableClockSkewMs) throws OAuthBearerConfigException {
Number value;
try {
value = Objects.requireNonNull(jwt).issuedAt();
} catch (OAuthBearerIllegalTokenException e) {
return e.reason();
}
boolean exists = value != null;
if (!exists)
return doesNotExistResult(required, "iat");
double doubleValue = value.doubleValue();
return 1000 * doubleValue > whenCheckTimeMs + confirmNonNegative(allowableClockSkewMs)
? OAuthBearerValidationResult.newFailure(String.format(
"The Issued At value (%f seconds) was after the indicated time (%d ms) plus allowable clock skew (%d ms)",
doubleValue, whenCheckTimeMs, allowableClockSkewMs))
: OAuthBearerValidationResult.newSuccess();
}
/**
* Validate the 'exp' (Expiration Time) claim. It must exist and it must be a
* (potentially fractional) number of seconds defining the point at which the
* JWT expires. It is a validation error if the time at which the check is being
* done (minus any allowable clock skew) is on or after the Expiration Time
* time.
*
* @param jwt
* the mandatory JWT to which the validation will be applied
* @param whenCheckTimeMs
* the time relative to which the validation is to occur
* @param allowableClockSkewMs
* non-negative number to take into account some potential clock skew
* @return the result of the validation
* @throws OAuthBearerConfigException
* if the given allowable clock skew is negative
*/
public static OAuthBearerValidationResult validateExpirationTime(OAuthBearerUnsecuredJws jwt, long whenCheckTimeMs,
int allowableClockSkewMs) throws OAuthBearerConfigException {
Number value;
try {
value = Objects.requireNonNull(jwt).expirationTime();
} catch (OAuthBearerIllegalTokenException e) {
return e.reason();
}
boolean exists = value != null;
if (!exists)
return doesNotExistResult(true, "exp");
double doubleValue = value.doubleValue();
return whenCheckTimeMs - confirmNonNegative(allowableClockSkewMs) >= 1000 * doubleValue
? OAuthBearerValidationResult.newFailure(String.format(
"The indicated time (%d ms) minus allowable clock skew (%d ms) was on or after the Expiration Time value (%f seconds)",
whenCheckTimeMs, allowableClockSkewMs, doubleValue))
: OAuthBearerValidationResult.newSuccess();
}
/**
* Validate the 'iat' (Issued At) and 'exp' (Expiration Time) claims for
* internal consistency. The following must be true if both claims exist:
*
* <pre>
* exp > iat
* </pre>
*
* @param jwt
* the mandatory JWT to which the validation will be applied
* @return the result of the validation
*/
public static OAuthBearerValidationResult validateTimeConsistency(OAuthBearerUnsecuredJws jwt) {
Number issuedAt;
Number expirationTime;
try {
issuedAt = Objects.requireNonNull(jwt).issuedAt();
expirationTime = jwt.expirationTime();
} catch (OAuthBearerIllegalTokenException e) {
return e.reason();
}
if (expirationTime != null && issuedAt != null && expirationTime.doubleValue() <= issuedAt.doubleValue())
return OAuthBearerValidationResult.newFailure(
String.format("The Expiration Time time (%f seconds) was not after the Issued At time (%f seconds)",
expirationTime.doubleValue(), issuedAt.doubleValue()));
return OAuthBearerValidationResult.newSuccess();
}
/**
* Validate the given token's scope against the required scope. Every required
* scope element (if any) must exist in the provided token's scope for the
* validation to succeed.
*
* @param token
* the required token for which the scope will to validate
* @param requiredScope
* the optional required scope against which the given token's scope
* will be validated
* @return the result of the validation
*/
public static OAuthBearerValidationResult validateScope(OAuthBearerToken token, List<String> requiredScope) {
final Set<String> tokenScope = token.scope();
if (requiredScope == null || requiredScope.isEmpty())
return OAuthBearerValidationResult.newSuccess();
for (String requiredScopeElement : requiredScope) {
if (!tokenScope.contains(requiredScopeElement))
return OAuthBearerValidationResult.newFailure(String.format(
"The provided scope (%s) was mising a required scope (%s). All required scope elements: %s",
String.valueOf(tokenScope), requiredScopeElement, requiredScope),
requiredScope.toString(), null);
}
return OAuthBearerValidationResult.newSuccess();
}
private static int confirmNonNegative(int allowableClockSkewMs) throws OAuthBearerConfigException {
if (allowableClockSkewMs < 0)
throw new OAuthBearerConfigException(
String.format("Allowable clock skew must not be negative: %d", allowableClockSkewMs));
return allowableClockSkewMs;
}
private static OAuthBearerValidationResult doesNotExistResult(boolean required, String claimName) {
return required ? OAuthBearerValidationResult.newFailure(String.format("Required claim missing: %s", claimName))
: OAuthBearerValidationResult.newSuccess();
}
private OAuthBearerValidationUtils() {
// empty
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/secured/OAuthBearerLoginCallbackHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.secured;
/**
* @deprecated See org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginCallbackHandler
*/
@Deprecated
public class OAuthBearerLoginCallbackHandler extends org.apache.kafka.common.security.oauthbearer.OAuthBearerLoginCallbackHandler {
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/secured/OAuthBearerValidatorCallbackHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.oauthbearer.secured;
/**
* @deprecated See org.apache.kafka.common.security.oauthbearer.OAuthBearerValidatorCallbackHandler
*/
@Deprecated
public class OAuthBearerValidatorCallbackHandler extends org.apache.kafka.common.security.oauthbearer.OAuthBearerValidatorCallbackHandler {
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/oauthbearer/secured/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* This package is deprecated.
* See {@link org.apache.kafka.common.security.oauthbearer}
*/
package org.apache.kafka.common.security.oauthbearer.secured; |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain/PlainAuthenticateCallback.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.plain;
import javax.security.auth.callback.Callback;
/*
* Authentication callback for SASL/PLAIN authentication. Callback handler must
* set authenticated flag to true if the client provided password in the callback
* matches the expected password.
*/
public class PlainAuthenticateCallback implements Callback {
private final char[] password;
private boolean authenticated;
/**
* Creates a callback with the password provided by the client
* @param password The password provided by the client during SASL/PLAIN authentication
*/
public PlainAuthenticateCallback(char[] password) {
this.password = password;
}
/**
* Returns the password provided by the client during SASL/PLAIN authentication
*/
public char[] password() {
return password;
}
/**
* Returns true if client password matches expected password, false otherwise.
* This state is set the server-side callback handler.
*/
public boolean authenticated() {
return this.authenticated;
}
/**
* Sets the authenticated state. This is set by the server-side callback handler
* by matching the client provided password with the expected password.
*
* @param authenticated true indicates successful authentication
*/
public void authenticated(boolean authenticated) {
this.authenticated = authenticated;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain/PlainLoginModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.plain;
import org.apache.kafka.common.security.plain.internals.PlainSaslServerProvider;
import java.util.Map;
import javax.security.auth.Subject;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.spi.LoginModule;
public class PlainLoginModule implements LoginModule {
private static final String USERNAME_CONFIG = "username";
private static final String PASSWORD_CONFIG = "password";
static {
PlainSaslServerProvider.initialize();
}
@Override
public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState, Map<String, ?> options) {
String username = (String) options.get(USERNAME_CONFIG);
if (username != null)
subject.getPublicCredentials().add(username);
String password = (String) options.get(PASSWORD_CONFIG);
if (password != null)
subject.getPrivateCredentials().add(password);
}
@Override
public boolean login() {
return true;
}
@Override
public boolean logout() {
return true;
}
@Override
public boolean commit() {
return true;
}
@Override
public boolean abort() {
return false;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Provides implementation to use plaintext credentials authentication for securing Kafka clusters.
*/
package org.apache.kafka.common.security.plain; |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain/internals/PlainSaslServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.plain.internals;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.sasl.Sasl;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
import javax.security.sasl.SaslServerFactory;
import org.apache.kafka.common.errors.SaslAuthenticationException;
import org.apache.kafka.common.security.plain.PlainAuthenticateCallback;
/**
* Simple SaslServer implementation for SASL/PLAIN. In order to make this implementation
* fully pluggable, authentication of username/password is fully contained within the
* server implementation.
* <p>
* Valid users with passwords are specified in the Jaas configuration file. Each user
* is specified with user_<username> as key and <password> as value. This is consistent
* with Zookeeper Digest-MD5 implementation.
* <p>
* To avoid storing clear passwords on disk or to integrate with external authentication
* servers in production systems, this module can be replaced with a different implementation.
*
*/
public class PlainSaslServer implements SaslServer {
public static final String PLAIN_MECHANISM = "PLAIN";
private final CallbackHandler callbackHandler;
private boolean complete;
private String authorizationId;
public PlainSaslServer(CallbackHandler callbackHandler) {
this.callbackHandler = callbackHandler;
}
/**
* @throws SaslAuthenticationException if username/password combination is invalid or if the requested
* authorization id is not the same as username.
* <p>
* <b>Note:</b> This method may throw {@link SaslAuthenticationException} to provide custom error messages
* to clients. But care should be taken to avoid including any information in the exception message that
* should not be leaked to unauthenticated clients. It may be safer to throw {@link SaslException} in
* some cases so that a standard error message is returned to clients.
* </p>
*/
@Override
public byte[] evaluateResponse(byte[] responseBytes) throws SaslAuthenticationException {
/*
* Message format (from https://tools.ietf.org/html/rfc4616):
*
* message = [authzid] UTF8NUL authcid UTF8NUL passwd
* authcid = 1*SAFE ; MUST accept up to 255 octets
* authzid = 1*SAFE ; MUST accept up to 255 octets
* passwd = 1*SAFE ; MUST accept up to 255 octets
* UTF8NUL = %x00 ; UTF-8 encoded NUL character
*
* SAFE = UTF1 / UTF2 / UTF3 / UTF4
* ;; any UTF-8 encoded Unicode character except NUL
*/
String response = new String(responseBytes, StandardCharsets.UTF_8);
List<String> tokens = extractTokens(response);
String authorizationIdFromClient = tokens.get(0);
String username = tokens.get(1);
String password = tokens.get(2);
if (username.isEmpty()) {
throw new SaslAuthenticationException("Authentication failed: username not specified");
}
if (password.isEmpty()) {
throw new SaslAuthenticationException("Authentication failed: password not specified");
}
NameCallback nameCallback = new NameCallback("username", username);
PlainAuthenticateCallback authenticateCallback = new PlainAuthenticateCallback(password.toCharArray());
try {
callbackHandler.handle(new Callback[]{nameCallback, authenticateCallback});
} catch (Throwable e) {
throw new SaslAuthenticationException("Authentication failed: credentials for user could not be verified", e);
}
if (!authenticateCallback.authenticated())
throw new SaslAuthenticationException("Authentication failed: Invalid username or password");
if (!authorizationIdFromClient.isEmpty() && !authorizationIdFromClient.equals(username))
throw new SaslAuthenticationException("Authentication failed: Client requested an authorization id that is different from username");
this.authorizationId = username;
complete = true;
return new byte[0];
}
private List<String> extractTokens(String string) {
List<String> tokens = new ArrayList<>();
int startIndex = 0;
for (int i = 0; i < 4; ++i) {
int endIndex = string.indexOf("\u0000", startIndex);
if (endIndex == -1) {
tokens.add(string.substring(startIndex));
break;
}
tokens.add(string.substring(startIndex, endIndex));
startIndex = endIndex + 1;
}
if (tokens.size() != 3)
throw new SaslAuthenticationException("Invalid SASL/PLAIN response: expected 3 tokens, got " +
tokens.size());
return tokens;
}
@Override
public String getAuthorizationID() {
if (!complete)
throw new IllegalStateException("Authentication exchange has not completed");
return authorizationId;
}
@Override
public String getMechanismName() {
return PLAIN_MECHANISM;
}
@Override
public Object getNegotiatedProperty(String propName) {
if (!complete)
throw new IllegalStateException("Authentication exchange has not completed");
return null;
}
@Override
public boolean isComplete() {
return complete;
}
@Override
public byte[] unwrap(byte[] incoming, int offset, int len) {
if (!complete)
throw new IllegalStateException("Authentication exchange has not completed");
return Arrays.copyOfRange(incoming, offset, offset + len);
}
@Override
public byte[] wrap(byte[] outgoing, int offset, int len) {
if (!complete)
throw new IllegalStateException("Authentication exchange has not completed");
return Arrays.copyOfRange(outgoing, offset, offset + len);
}
@Override
public void dispose() {
}
public static class PlainSaslServerFactory implements SaslServerFactory {
@Override
public SaslServer createSaslServer(String mechanism, String protocol, String serverName, Map<String, ?> props, CallbackHandler cbh)
throws SaslException {
if (!PLAIN_MECHANISM.equals(mechanism))
throw new SaslException(String.format("Mechanism \'%s\' is not supported. Only PLAIN is supported.", mechanism));
return new PlainSaslServer(cbh);
}
@Override
public String[] getMechanismNames(Map<String, ?> props) {
if (props == null) return new String[]{PLAIN_MECHANISM};
String noPlainText = (String) props.get(Sasl.POLICY_NOPLAINTEXT);
if ("true".equals(noPlainText))
return new String[]{};
else
return new String[]{PLAIN_MECHANISM};
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain/internals/PlainSaslServerProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.plain.internals;
import java.security.Provider;
import java.security.Security;
import org.apache.kafka.common.security.plain.internals.PlainSaslServer.PlainSaslServerFactory;
public class PlainSaslServerProvider extends Provider {
private static final long serialVersionUID = 1L;
@SuppressWarnings("deprecation")
protected PlainSaslServerProvider() {
super("Simple SASL/PLAIN Server Provider", 1.0, "Simple SASL/PLAIN Server Provider for Kafka");
put("SaslServerFactory." + PlainSaslServer.PLAIN_MECHANISM, PlainSaslServerFactory.class.getName());
}
public static void initialize() {
Security.addProvider(new PlainSaslServerProvider());
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/plain/internals/PlainServerCallbackHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.plain.internals;
import org.apache.kafka.common.security.JaasContext;
import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.security.plain.PlainAuthenticateCallback;
import org.apache.kafka.common.security.plain.PlainLoginModule;
import org.apache.kafka.common.utils.Utils;
import java.io.IOException;
import java.util.List;
import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.AppConfigurationEntry;
public class PlainServerCallbackHandler implements AuthenticateCallbackHandler {
private static final String JAAS_USER_PREFIX = "user_";
private List<AppConfigurationEntry> jaasConfigEntries;
@Override
public void configure(Map<String, ?> configs, String mechanism, List<AppConfigurationEntry> jaasConfigEntries) {
this.jaasConfigEntries = jaasConfigEntries;
}
@Override
public void handle(Callback[] callbacks) throws IOException, UnsupportedCallbackException {
String username = null;
for (Callback callback: callbacks) {
if (callback instanceof NameCallback)
username = ((NameCallback) callback).getDefaultName();
else if (callback instanceof PlainAuthenticateCallback) {
PlainAuthenticateCallback plainCallback = (PlainAuthenticateCallback) callback;
boolean authenticated = authenticate(username, plainCallback.password());
plainCallback.authenticated(authenticated);
} else
throw new UnsupportedCallbackException(callback);
}
}
protected boolean authenticate(String username, char[] password) throws IOException {
if (username == null)
return false;
else {
String expectedPassword = JaasContext.configEntryOption(jaasConfigEntries,
JAAS_USER_PREFIX + username,
PlainLoginModule.class.getName());
return expectedPassword != null && Utils.isEqualConstantTime(password, expectedPassword.toCharArray());
}
}
@Override
public void close() throws KafkaException {
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/ScramCredential.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram;
/**
* SCRAM credential class that encapsulates the credential data persisted for each user that is
* accessible to the server. See <a href="https://tools.ietf.org/html/rfc5802#section-5">RFC rfc5802</a>
* for details.
*/
public class ScramCredential {
private final byte[] salt;
private final byte[] serverKey;
private final byte[] storedKey;
private final int iterations;
/**
* Constructs a new credential.
*/
public ScramCredential(byte[] salt, byte[] storedKey, byte[] serverKey, int iterations) {
this.salt = salt;
this.serverKey = serverKey;
this.storedKey = storedKey;
this.iterations = iterations;
}
/**
* Returns the salt used to process this credential using the SCRAM algorithm.
*/
public byte[] salt() {
return salt;
}
/**
* Server key computed from the client password using the SCRAM algorithm.
*/
public byte[] serverKey() {
return serverKey;
}
/**
* Stored key computed from the client password using the SCRAM algorithm.
*/
public byte[] storedKey() {
return storedKey;
}
/**
* Number of iterations used to process this credential using the SCRAM algorithm.
*/
public int iterations() {
return iterations;
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/ScramCredentialCallback.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram;
import javax.security.auth.callback.Callback;
/**
* Callback used for SCRAM mechanisms.
*/
public class ScramCredentialCallback implements Callback {
private ScramCredential scramCredential;
/**
* Sets the SCRAM credential for this instance.
*/
public void scramCredential(ScramCredential scramCredential) {
this.scramCredential = scramCredential;
}
/**
* Returns the SCRAM credential if set on this instance.
*/
public ScramCredential scramCredential() {
return scramCredential;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/ScramExtensionsCallback.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram;
import javax.security.auth.callback.Callback;
import java.util.Collections;
import java.util.Map;
/**
* Optional callback used for SCRAM mechanisms if any extensions need to be set
* in the SASL/SCRAM exchange.
*/
public class ScramExtensionsCallback implements Callback {
private Map<String, String> extensions = Collections.emptyMap();
/**
* Returns map of the extension names and values that are sent by the client to
* the server in the initial client SCRAM authentication message.
* Default is an empty unmodifiable map.
*/
public Map<String, String> extensions() {
return extensions;
}
/**
* Sets the SCRAM extensions on this callback. Maps passed in should be unmodifiable
*/
public void extensions(Map<String, String> extensions) {
this.extensions = extensions;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/ScramLoginModule.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram;
import org.apache.kafka.common.security.scram.internals.ScramSaslClientProvider;
import org.apache.kafka.common.security.scram.internals.ScramSaslServerProvider;
import java.util.Collections;
import java.util.Map;
import javax.security.auth.Subject;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.spi.LoginModule;
public class ScramLoginModule implements LoginModule {
private static final String USERNAME_CONFIG = "username";
private static final String PASSWORD_CONFIG = "password";
public static final String TOKEN_AUTH_CONFIG = "tokenauth";
static {
ScramSaslClientProvider.initialize();
ScramSaslServerProvider.initialize();
}
@Override
public void initialize(Subject subject, CallbackHandler callbackHandler, Map<String, ?> sharedState, Map<String, ?> options) {
String username = (String) options.get(USERNAME_CONFIG);
if (username != null)
subject.getPublicCredentials().add(username);
String password = (String) options.get(PASSWORD_CONFIG);
if (password != null)
subject.getPrivateCredentials().add(password);
Boolean useTokenAuthentication = "true".equalsIgnoreCase((String) options.get(TOKEN_AUTH_CONFIG));
if (useTokenAuthentication) {
Map<String, String> scramExtensions = Collections.singletonMap(TOKEN_AUTH_CONFIG, "true");
subject.getPublicCredentials().add(scramExtensions);
}
}
@Override
public boolean login() {
return true;
}
@Override
public boolean logout() {
return true;
}
@Override
public boolean commit() {
return true;
}
@Override
public boolean abort() {
return false;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Provides adaptor to use the Salted Challenge Response Authentication Mechanism for securing Kafka clusters.
*/
package org.apache.kafka.common.security.scram; |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramCredentialUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.util.Base64;
import java.util.Collection;
import java.util.Properties;
import org.apache.kafka.common.security.authenticator.CredentialCache;
import org.apache.kafka.common.security.scram.ScramCredential;
/**
* SCRAM Credential persistence utility functions. Implements format conversion used
* for the credential store implemented in Kafka. Credentials are persisted as a comma-separated
* String of key-value pairs:
* <pre>
* salt=<i>salt</i>,stored_key=<i>stored_key</i>,server_key=<i>server_key</i>,iterations=<i>iterations</i>
* </pre>
*
*/
public final class ScramCredentialUtils {
private static final String SALT = "salt";
private static final String STORED_KEY = "stored_key";
private static final String SERVER_KEY = "server_key";
private static final String ITERATIONS = "iterations";
private ScramCredentialUtils() {}
public static String credentialToString(ScramCredential credential) {
return String.format("%s=%s,%s=%s,%s=%s,%s=%d",
SALT,
Base64.getEncoder().encodeToString(credential.salt()),
STORED_KEY,
Base64.getEncoder().encodeToString(credential.storedKey()),
SERVER_KEY,
Base64.getEncoder().encodeToString(credential.serverKey()),
ITERATIONS,
credential.iterations());
}
public static ScramCredential credentialFromString(String str) {
Properties props = toProps(str);
if (props.size() != 4 || !props.containsKey(SALT) || !props.containsKey(STORED_KEY) ||
!props.containsKey(SERVER_KEY) || !props.containsKey(ITERATIONS)) {
throw new IllegalArgumentException("Credentials not valid: " + str);
}
byte[] salt = Base64.getDecoder().decode(props.getProperty(SALT));
byte[] storedKey = Base64.getDecoder().decode(props.getProperty(STORED_KEY));
byte[] serverKey = Base64.getDecoder().decode(props.getProperty(SERVER_KEY));
int iterations = Integer.parseInt(props.getProperty(ITERATIONS));
return new ScramCredential(salt, storedKey, serverKey, iterations);
}
private static Properties toProps(String str) {
Properties props = new Properties();
String[] tokens = str.split(",");
for (String token : tokens) {
int index = token.indexOf('=');
if (index <= 0)
throw new IllegalArgumentException("Credentials not valid: " + str);
props.put(token.substring(0, index), token.substring(index + 1));
}
return props;
}
public static void createCache(CredentialCache cache, Collection<String> mechanisms) {
for (String mechanism : ScramMechanism.mechanismNames()) {
if (mechanisms.contains(mechanism))
cache.createCache(mechanism, ScramCredential.class);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramExtensions.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import org.apache.kafka.common.security.auth.SaslExtensions;
import org.apache.kafka.common.security.scram.ScramLoginModule;
import org.apache.kafka.common.utils.Utils;
import java.util.Collections;
import java.util.Map;
public class ScramExtensions extends SaslExtensions {
public ScramExtensions() {
this(Collections.emptyMap());
}
public ScramExtensions(String extensions) {
this(Utils.parseMap(extensions, "=", ","));
}
public ScramExtensions(Map<String, String> extensionMap) {
super(extensionMap);
}
public boolean tokenAuthenticated() {
return Boolean.parseBoolean(map().get(ScramLoginModule.TOKEN_AUTH_CONFIG));
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramFormatter.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.math.BigInteger;
import java.nio.charset.StandardCharsets;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.security.SecureRandom;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.crypto.Mac;
import javax.crypto.spec.SecretKeySpec;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.security.scram.ScramCredential;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ClientFinalMessage;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ClientFirstMessage;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ServerFirstMessage;
/**
* Scram message salt and hash functions defined in <a href="https://tools.ietf.org/html/rfc5802">RFC 5802</a>.
*/
public class ScramFormatter {
private static final Pattern EQUAL = Pattern.compile("=", Pattern.LITERAL);
private static final Pattern COMMA = Pattern.compile(",", Pattern.LITERAL);
private static final Pattern EQUAL_TWO_C = Pattern.compile("=2C", Pattern.LITERAL);
private static final Pattern EQUAL_THREE_D = Pattern.compile("=3D", Pattern.LITERAL);
private final MessageDigest messageDigest;
private final Mac mac;
private final SecureRandom random;
public ScramFormatter(ScramMechanism mechanism) throws NoSuchAlgorithmException {
this.messageDigest = MessageDigest.getInstance(mechanism.hashAlgorithm());
this.mac = Mac.getInstance(mechanism.macAlgorithm());
this.random = new SecureRandom();
}
public byte[] hmac(byte[] key, byte[] bytes) throws InvalidKeyException {
mac.init(new SecretKeySpec(key, mac.getAlgorithm()));
return mac.doFinal(bytes);
}
public byte[] hash(byte[] str) {
return messageDigest.digest(str);
}
public static byte[] xor(byte[] first, byte[] second) {
if (first.length != second.length)
throw new IllegalArgumentException("Argument arrays must be of the same length");
byte[] result = new byte[first.length];
for (int i = 0; i < result.length; i++)
result[i] = (byte) (first[i] ^ second[i]);
return result;
}
public byte[] hi(byte[] str, byte[] salt, int iterations) throws InvalidKeyException {
mac.init(new SecretKeySpec(str, mac.getAlgorithm()));
mac.update(salt);
byte[] u1 = mac.doFinal(new byte[]{0, 0, 0, 1});
byte[] prev = u1;
byte[] result = u1;
for (int i = 2; i <= iterations; i++) {
byte[] ui = hmac(str, prev);
result = xor(result, ui);
prev = ui;
}
return result;
}
public static byte[] normalize(String str) {
return toBytes(str);
}
public byte[] saltedPassword(String password, byte[] salt, int iterations) throws InvalidKeyException {
return hi(normalize(password), salt, iterations);
}
public byte[] clientKey(byte[] saltedPassword) throws InvalidKeyException {
return hmac(saltedPassword, toBytes("Client Key"));
}
public byte[] storedKey(byte[] clientKey) {
return hash(clientKey);
}
public static String saslName(String username) {
String replace1 = EQUAL.matcher(username).replaceAll(Matcher.quoteReplacement("=3D"));
return COMMA.matcher(replace1).replaceAll(Matcher.quoteReplacement("=2C"));
}
public static String username(String saslName) {
String username = EQUAL_TWO_C.matcher(saslName).replaceAll(Matcher.quoteReplacement(","));
if (EQUAL_THREE_D.matcher(username).replaceAll(Matcher.quoteReplacement("")).indexOf('=') >= 0) {
throw new IllegalArgumentException("Invalid username: " + saslName);
}
return EQUAL_THREE_D.matcher(username).replaceAll(Matcher.quoteReplacement("="));
}
public static String authMessage(String clientFirstMessageBare, String serverFirstMessage, String clientFinalMessageWithoutProof) {
return clientFirstMessageBare + "," + serverFirstMessage + "," + clientFinalMessageWithoutProof;
}
public byte[] clientSignature(byte[] storedKey, ClientFirstMessage clientFirstMessage, ServerFirstMessage serverFirstMessage, ClientFinalMessage clientFinalMessage) throws InvalidKeyException {
byte[] authMessage = authMessage(clientFirstMessage, serverFirstMessage, clientFinalMessage);
return hmac(storedKey, authMessage);
}
public byte[] clientProof(byte[] saltedPassword, ClientFirstMessage clientFirstMessage, ServerFirstMessage serverFirstMessage, ClientFinalMessage clientFinalMessage) throws InvalidKeyException {
byte[] clientKey = clientKey(saltedPassword);
byte[] storedKey = hash(clientKey);
byte[] clientSignature = hmac(storedKey, authMessage(clientFirstMessage, serverFirstMessage, clientFinalMessage));
return xor(clientKey, clientSignature);
}
private byte[] authMessage(ClientFirstMessage clientFirstMessage, ServerFirstMessage serverFirstMessage, ClientFinalMessage clientFinalMessage) {
return toBytes(authMessage(clientFirstMessage.clientFirstMessageBare(),
serverFirstMessage.toMessage(),
clientFinalMessage.clientFinalMessageWithoutProof()));
}
public byte[] storedKey(byte[] clientSignature, byte[] clientProof) {
return hash(xor(clientSignature, clientProof));
}
public byte[] serverKey(byte[] saltedPassword) throws InvalidKeyException {
return hmac(saltedPassword, toBytes("Server Key"));
}
public byte[] serverSignature(byte[] serverKey, ClientFirstMessage clientFirstMessage, ServerFirstMessage serverFirstMessage, ClientFinalMessage clientFinalMessage) throws InvalidKeyException {
byte[] authMessage = authMessage(clientFirstMessage, serverFirstMessage, clientFinalMessage);
return hmac(serverKey, authMessage);
}
public String secureRandomString() {
return secureRandomString(random);
}
public static String secureRandomString(SecureRandom random) {
return new BigInteger(130, random).toString(Character.MAX_RADIX);
}
public byte[] secureRandomBytes() {
return secureRandomBytes(random);
}
public static byte[] secureRandomBytes(SecureRandom random) {
return toBytes(secureRandomString(random));
}
public static byte[] toBytes(String str) {
return str.getBytes(StandardCharsets.UTF_8);
}
public ScramCredential generateCredential(String password, int iterations) {
try {
byte[] salt = secureRandomBytes();
byte[] saltedPassword = saltedPassword(password, salt, iterations);
return generateCredential(salt, saltedPassword, iterations);
} catch (InvalidKeyException e) {
throw new KafkaException("Could not create credential", e);
}
}
public ScramCredential generateCredential(byte[] salt, byte[] saltedPassword, int iterations) {
try {
byte[] clientKey = clientKey(saltedPassword);
byte[] storedKey = storedKey(clientKey);
byte[] serverKey = serverKey(saltedPassword);
return new ScramCredential(salt, storedKey, serverKey, iterations);
} catch (InvalidKeyException e) {
throw new KafkaException("Could not create credential", e);
}
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramMechanism.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
/*
* This code is duplicated in org.apache.kafka.clients.admin.ScramMechanism.
* The type field in both files must match and must not change. The type field
* is used both for passing ScramCredentialUpsertion and for the internal
* UserScramCredentialRecord. Do not change the type field.
*/
public enum ScramMechanism {
SCRAM_SHA_256((byte) 1, "SHA-256", "HmacSHA256", 4096, 16384),
SCRAM_SHA_512((byte) 2, "SHA-512", "HmacSHA512", 4096, 16384);
private final byte type;
private final String mechanismName;
private final String hashAlgorithm;
private final String macAlgorithm;
private final int minIterations;
private final int maxIterations;
private static final Map<String, ScramMechanism> MECHANISMS_MAP;
static {
Map<String, ScramMechanism> map = new HashMap<>();
for (ScramMechanism mech : values())
map.put(mech.mechanismName, mech);
MECHANISMS_MAP = Collections.unmodifiableMap(map);
}
ScramMechanism(
byte type,
String hashAlgorithm,
String macAlgorithm,
int minIterations,
int maxIterations
) {
this.type = type;
this.mechanismName = "SCRAM-" + hashAlgorithm;
this.hashAlgorithm = hashAlgorithm;
this.macAlgorithm = macAlgorithm;
this.minIterations = minIterations;
this.maxIterations = maxIterations;
}
public final String mechanismName() {
return mechanismName;
}
public String hashAlgorithm() {
return hashAlgorithm;
}
public String macAlgorithm() {
return macAlgorithm;
}
public int minIterations() {
return minIterations;
}
public int maxIterations() {
return maxIterations;
}
public static ScramMechanism forMechanismName(String mechanismName) {
return MECHANISMS_MAP.get(mechanismName);
}
public static Collection<String> mechanismNames() {
return MECHANISMS_MAP.keySet();
}
public static boolean isScram(String mechanismName) {
return MECHANISMS_MAP.containsKey(mechanismName);
}
/**
*
* @return the type indicator for this SASL SCRAM mechanism
*/
public byte type() {
return this.type;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramMessages.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import org.apache.kafka.common.utils.Utils;
import java.nio.charset.StandardCharsets;
import java.util.Base64;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import javax.security.sasl.SaslException;
/**
* SCRAM request/response message creation and parsing based on
* <a href="https://tools.ietf.org/html/rfc5802">RFC 5802</a>
*
*/
public class ScramMessages {
static abstract class AbstractScramMessage {
static final String ALPHA = "[A-Za-z]+";
static final String VALUE_SAFE = "[\\x01-\\x7F&&[^=,]]+";
static final String VALUE = "[\\x01-\\x7F&&[^,]]+";
static final String PRINTABLE = "[\\x21-\\x7E&&[^,]]+";
static final String SASLNAME = "(?:[\\x01-\\x7F&&[^=,]]|=2C|=3D)+";
static final String BASE64_CHAR = "[a-zA-Z0-9/+]";
static final String BASE64 = String.format("(?:%s{4})*(?:%s{3}=|%s{2}==)?", BASE64_CHAR, BASE64_CHAR, BASE64_CHAR);
static final String RESERVED = String.format("(m=%s,)?", VALUE);
static final String EXTENSIONS = String.format("(,%s=%s)*", ALPHA, VALUE);
abstract String toMessage();
public byte[] toBytes() {
return toMessage().getBytes(StandardCharsets.UTF_8);
}
protected String toMessage(byte[] messageBytes) {
return new String(messageBytes, StandardCharsets.UTF_8);
}
}
/**
* Format:
* gs2-header [reserved-mext ","] username "," nonce ["," extensions]
* Limitations:
* Only gs2-header "n" is supported.
* Extensions are ignored.
*
*/
public static class ClientFirstMessage extends AbstractScramMessage {
private static final Pattern PATTERN = Pattern.compile(String.format(
"n,(a=(?<authzid>%s))?,%sn=(?<saslname>%s),r=(?<nonce>%s)(?<extensions>%s)",
SASLNAME,
RESERVED,
SASLNAME,
PRINTABLE,
EXTENSIONS));
private final String saslName;
private final String nonce;
private final String authorizationId;
private final ScramExtensions extensions;
public ClientFirstMessage(byte[] messageBytes) throws SaslException {
String message = toMessage(messageBytes);
Matcher matcher = PATTERN.matcher(message);
if (!matcher.matches())
throw new SaslException("Invalid SCRAM client first message format: " + message);
String authzid = matcher.group("authzid");
this.authorizationId = authzid != null ? authzid : "";
this.saslName = matcher.group("saslname");
this.nonce = matcher.group("nonce");
String extString = matcher.group("extensions");
this.extensions = extString.startsWith(",") ? new ScramExtensions(extString.substring(1)) : new ScramExtensions();
}
public ClientFirstMessage(String saslName, String nonce, Map<String, String> extensions) {
this.saslName = saslName;
this.nonce = nonce;
this.extensions = new ScramExtensions(extensions);
this.authorizationId = ""; // Optional authzid not specified in gs2-header
}
public String saslName() {
return saslName;
}
public String nonce() {
return nonce;
}
public String authorizationId() {
return authorizationId;
}
public String gs2Header() {
return "n," + authorizationId + ",";
}
public ScramExtensions extensions() {
return extensions;
}
public String clientFirstMessageBare() {
String extensionStr = Utils.mkString(extensions.map(), "", "", "=", ",");
if (extensionStr.isEmpty())
return String.format("n=%s,r=%s", saslName, nonce);
else
return String.format("n=%s,r=%s,%s", saslName, nonce, extensionStr);
}
String toMessage() {
return gs2Header() + clientFirstMessageBare();
}
}
/**
* Format:
* [reserved-mext ","] nonce "," salt "," iteration-count ["," extensions]
* Limitations:
* Extensions are ignored.
*
*/
public static class ServerFirstMessage extends AbstractScramMessage {
private static final Pattern PATTERN = Pattern.compile(String.format(
"%sr=(?<nonce>%s),s=(?<salt>%s),i=(?<iterations>[0-9]+)%s",
RESERVED,
PRINTABLE,
BASE64,
EXTENSIONS));
private final String nonce;
private final byte[] salt;
private final int iterations;
public ServerFirstMessage(byte[] messageBytes) throws SaslException {
String message = toMessage(messageBytes);
Matcher matcher = PATTERN.matcher(message);
if (!matcher.matches())
throw new SaslException("Invalid SCRAM server first message format: " + message);
try {
this.iterations = Integer.parseInt(matcher.group("iterations"));
if (this.iterations <= 0)
throw new SaslException("Invalid SCRAM server first message format: invalid iterations " + iterations);
} catch (NumberFormatException e) {
throw new SaslException("Invalid SCRAM server first message format: invalid iterations", e);
}
this.nonce = matcher.group("nonce");
String salt = matcher.group("salt");
this.salt = Base64.getDecoder().decode(salt);
}
public ServerFirstMessage(String clientNonce, String serverNonce, byte[] salt, int iterations) {
this.nonce = clientNonce + serverNonce;
this.salt = salt;
this.iterations = iterations;
}
public String nonce() {
return nonce;
}
public byte[] salt() {
return salt;
}
public int iterations() {
return iterations;
}
String toMessage() {
return String.format("r=%s,s=%s,i=%d", nonce, Base64.getEncoder().encodeToString(salt), iterations);
}
}
/**
* Format:
* channel-binding "," nonce ["," extensions]"," proof
* Limitations:
* Extensions are ignored.
*
*/
public static class ClientFinalMessage extends AbstractScramMessage {
private static final Pattern PATTERN = Pattern.compile(String.format(
"c=(?<channel>%s),r=(?<nonce>%s)%s,p=(?<proof>%s)",
BASE64,
PRINTABLE,
EXTENSIONS,
BASE64));
private final byte[] channelBinding;
private final String nonce;
private byte[] proof;
public ClientFinalMessage(byte[] messageBytes) throws SaslException {
String message = toMessage(messageBytes);
Matcher matcher = PATTERN.matcher(message);
if (!matcher.matches())
throw new SaslException("Invalid SCRAM client final message format: " + message);
this.channelBinding = Base64.getDecoder().decode(matcher.group("channel"));
this.nonce = matcher.group("nonce");
this.proof = Base64.getDecoder().decode(matcher.group("proof"));
}
public ClientFinalMessage(byte[] channelBinding, String nonce) {
this.channelBinding = channelBinding;
this.nonce = nonce;
}
public byte[] channelBinding() {
return channelBinding;
}
public String nonce() {
return nonce;
}
public byte[] proof() {
return proof;
}
public void proof(byte[] proof) {
this.proof = proof;
}
public String clientFinalMessageWithoutProof() {
return String.format("c=%s,r=%s",
Base64.getEncoder().encodeToString(channelBinding),
nonce);
}
String toMessage() {
return String.format("%s,p=%s",
clientFinalMessageWithoutProof(),
Base64.getEncoder().encodeToString(proof));
}
}
/**
* Format:
* ("e=" server-error-value | "v=" base64_server_signature) ["," extensions]
* Limitations:
* Extensions are ignored.
*
*/
public static class ServerFinalMessage extends AbstractScramMessage {
private static final Pattern PATTERN = Pattern.compile(String.format(
"(?:e=(?<error>%s))|(?:v=(?<signature>%s))%s",
VALUE_SAFE,
BASE64,
EXTENSIONS));
private final String error;
private final byte[] serverSignature;
public ServerFinalMessage(byte[] messageBytes) throws SaslException {
String message = toMessage(messageBytes);
Matcher matcher = PATTERN.matcher(message);
if (!matcher.matches())
throw new SaslException("Invalid SCRAM server final message format: " + message);
String error = null;
try {
error = matcher.group("error");
} catch (IllegalArgumentException e) {
// ignore
}
if (error == null) {
this.serverSignature = Base64.getDecoder().decode(matcher.group("signature"));
this.error = null;
} else {
this.serverSignature = null;
this.error = error;
}
}
public ServerFinalMessage(String error, byte[] serverSignature) {
this.error = error;
this.serverSignature = serverSignature;
}
public String error() {
return error;
}
public byte[] serverSignature() {
return serverSignature;
}
String toMessage() {
if (error != null)
return "e=" + error;
else
return "v=" + Base64.getEncoder().encodeToString(serverSignature);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramSaslClient.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.nio.charset.StandardCharsets;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.PasswordCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.sasl.SaslClient;
import javax.security.sasl.SaslClientFactory;
import javax.security.sasl.SaslException;
import org.apache.kafka.common.errors.IllegalSaslStateException;
import org.apache.kafka.common.security.scram.ScramExtensionsCallback;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ClientFinalMessage;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ServerFinalMessage;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ServerFirstMessage;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* SaslClient implementation for SASL/SCRAM.
* <p>
* This implementation expects a login module that populates username as
* the Subject's public credential and password as the private credential.
*
* @see <a href="https://tools.ietf.org/html/rfc5802">RFC 5802</a>
*
*/
public class ScramSaslClient implements SaslClient {
private static final Logger log = LoggerFactory.getLogger(ScramSaslClient.class);
enum State {
SEND_CLIENT_FIRST_MESSAGE,
RECEIVE_SERVER_FIRST_MESSAGE,
RECEIVE_SERVER_FINAL_MESSAGE,
COMPLETE,
FAILED
}
private final ScramMechanism mechanism;
private final CallbackHandler callbackHandler;
private final ScramFormatter formatter;
private String clientNonce;
private State state;
private byte[] saltedPassword;
private ScramMessages.ClientFirstMessage clientFirstMessage;
private ScramMessages.ServerFirstMessage serverFirstMessage;
private ScramMessages.ClientFinalMessage clientFinalMessage;
public ScramSaslClient(ScramMechanism mechanism, CallbackHandler cbh) throws NoSuchAlgorithmException {
this.mechanism = mechanism;
this.callbackHandler = cbh;
this.formatter = new ScramFormatter(mechanism);
setState(State.SEND_CLIENT_FIRST_MESSAGE);
}
@Override
public String getMechanismName() {
return mechanism.mechanismName();
}
@Override
public boolean hasInitialResponse() {
return true;
}
@Override
public byte[] evaluateChallenge(byte[] challenge) throws SaslException {
try {
switch (state) {
case SEND_CLIENT_FIRST_MESSAGE:
if (challenge != null && challenge.length != 0)
throw new SaslException("Expected empty challenge");
clientNonce = formatter.secureRandomString();
NameCallback nameCallback = new NameCallback("Name:");
ScramExtensionsCallback extensionsCallback = new ScramExtensionsCallback();
try {
callbackHandler.handle(new Callback[]{nameCallback});
try {
callbackHandler.handle(new Callback[]{extensionsCallback});
} catch (UnsupportedCallbackException e) {
log.debug("Extensions callback is not supported by client callback handler {}, no extensions will be added",
callbackHandler);
}
} catch (Throwable e) {
throw new SaslException("User name or extensions could not be obtained", e);
}
String username = nameCallback.getName();
String saslName = ScramFormatter.saslName(username);
Map<String, String> extensions = extensionsCallback.extensions();
this.clientFirstMessage = new ScramMessages.ClientFirstMessage(saslName, clientNonce, extensions);
setState(State.RECEIVE_SERVER_FIRST_MESSAGE);
return clientFirstMessage.toBytes();
case RECEIVE_SERVER_FIRST_MESSAGE:
this.serverFirstMessage = new ServerFirstMessage(challenge);
if (!serverFirstMessage.nonce().startsWith(clientNonce))
throw new SaslException("Invalid server nonce: does not start with client nonce");
if (serverFirstMessage.iterations() < mechanism.minIterations())
throw new SaslException("Requested iterations " + serverFirstMessage.iterations() + " is less than the minimum " + mechanism.minIterations() + " for " + mechanism);
PasswordCallback passwordCallback = new PasswordCallback("Password:", false);
try {
callbackHandler.handle(new Callback[]{passwordCallback});
} catch (Throwable e) {
throw new SaslException("User name could not be obtained", e);
}
this.clientFinalMessage = handleServerFirstMessage(passwordCallback.getPassword());
setState(State.RECEIVE_SERVER_FINAL_MESSAGE);
return clientFinalMessage.toBytes();
case RECEIVE_SERVER_FINAL_MESSAGE:
ServerFinalMessage serverFinalMessage = new ServerFinalMessage(challenge);
if (serverFinalMessage.error() != null)
throw new SaslException("Sasl authentication using " + mechanism + " failed with error: " + serverFinalMessage.error());
handleServerFinalMessage(serverFinalMessage.serverSignature());
setState(State.COMPLETE);
return null;
default:
throw new IllegalSaslStateException("Unexpected challenge in Sasl client state " + state);
}
} catch (SaslException e) {
setState(State.FAILED);
throw e;
}
}
@Override
public boolean isComplete() {
return state == State.COMPLETE;
}
@Override
public byte[] unwrap(byte[] incoming, int offset, int len) {
if (!isComplete())
throw new IllegalStateException("Authentication exchange has not completed");
return Arrays.copyOfRange(incoming, offset, offset + len);
}
@Override
public byte[] wrap(byte[] outgoing, int offset, int len) {
if (!isComplete())
throw new IllegalStateException("Authentication exchange has not completed");
return Arrays.copyOfRange(outgoing, offset, offset + len);
}
@Override
public Object getNegotiatedProperty(String propName) {
if (!isComplete())
throw new IllegalStateException("Authentication exchange has not completed");
return null;
}
@Override
public void dispose() {
}
private void setState(State state) {
log.debug("Setting SASL/{} client state to {}", mechanism, state);
this.state = state;
}
private ClientFinalMessage handleServerFirstMessage(char[] password) throws SaslException {
try {
byte[] passwordBytes = ScramFormatter.normalize(new String(password));
this.saltedPassword = formatter.hi(passwordBytes, serverFirstMessage.salt(), serverFirstMessage.iterations());
ClientFinalMessage clientFinalMessage = new ClientFinalMessage("n,,".getBytes(StandardCharsets.UTF_8), serverFirstMessage.nonce());
byte[] clientProof = formatter.clientProof(saltedPassword, clientFirstMessage, serverFirstMessage, clientFinalMessage);
clientFinalMessage.proof(clientProof);
return clientFinalMessage;
} catch (InvalidKeyException e) {
throw new SaslException("Client final message could not be created", e);
}
}
private void handleServerFinalMessage(byte[] signature) throws SaslException {
try {
byte[] serverKey = formatter.serverKey(saltedPassword);
byte[] serverSignature = formatter.serverSignature(serverKey, clientFirstMessage, serverFirstMessage, clientFinalMessage);
if (!MessageDigest.isEqual(signature, serverSignature))
throw new SaslException("Invalid server signature in server final message");
} catch (InvalidKeyException e) {
throw new SaslException("Sasl server signature verification failed", e);
}
}
public static class ScramSaslClientFactory implements SaslClientFactory {
@Override
public SaslClient createSaslClient(String[] mechanisms,
String authorizationId,
String protocol,
String serverName,
Map<String, ?> props,
CallbackHandler cbh) throws SaslException {
ScramMechanism mechanism = null;
for (String mech : mechanisms) {
mechanism = ScramMechanism.forMechanismName(mech);
if (mechanism != null)
break;
}
if (mechanism == null)
throw new SaslException(String.format("Requested mechanisms '%s' not supported. Supported mechanisms are '%s'.",
Arrays.asList(mechanisms), ScramMechanism.mechanismNames()));
try {
return new ScramSaslClient(mechanism, cbh);
} catch (NoSuchAlgorithmException e) {
throw new SaslException("Hash algorithm not supported for mechanism " + mechanism, e);
}
}
@Override
public String[] getMechanismNames(Map<String, ?> props) {
Collection<String> mechanisms = ScramMechanism.mechanismNames();
return mechanisms.toArray(new String[0]);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramSaslClientProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.security.Provider;
import java.security.Security;
import org.apache.kafka.common.security.scram.internals.ScramSaslClient.ScramSaslClientFactory;
public class ScramSaslClientProvider extends Provider {
private static final long serialVersionUID = 1L;
@SuppressWarnings("deprecation")
protected ScramSaslClientProvider() {
super("SASL/SCRAM Client Provider", 1.0, "SASL/SCRAM Client Provider for Kafka");
for (ScramMechanism mechanism : ScramMechanism.values())
put("SaslClientFactory." + mechanism.mechanismName(), ScramSaslClientFactory.class.getName());
}
public static void initialize() {
Security.addProvider(new ScramSaslClientProvider());
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramSaslServer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.security.InvalidKeyException;
import java.security.MessageDigest;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Map;
import java.util.Set;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.CallbackHandler;
import javax.security.auth.callback.NameCallback;
import javax.security.sasl.SaslException;
import javax.security.sasl.SaslServer;
import javax.security.sasl.SaslServerFactory;
import org.apache.kafka.common.errors.AuthenticationException;
import org.apache.kafka.common.errors.IllegalSaslStateException;
import org.apache.kafka.common.errors.SaslAuthenticationException;
import org.apache.kafka.common.security.authenticator.SaslInternalConfigs;
import org.apache.kafka.common.security.scram.ScramCredential;
import org.apache.kafka.common.security.scram.ScramCredentialCallback;
import org.apache.kafka.common.security.scram.ScramLoginModule;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ClientFinalMessage;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ClientFirstMessage;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ServerFinalMessage;
import org.apache.kafka.common.security.scram.internals.ScramMessages.ServerFirstMessage;
import org.apache.kafka.common.security.token.delegation.internals.DelegationTokenCredentialCallback;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* SaslServer implementation for SASL/SCRAM. This server is configured with a callback
* handler for integration with a credential manager. Kafka brokers provide callbacks
* based on a Zookeeper-based password store.
*
* @see <a href="https://tools.ietf.org/html/rfc5802">RFC 5802</a>
*/
public class ScramSaslServer implements SaslServer {
private static final Logger log = LoggerFactory.getLogger(ScramSaslServer.class);
private static final Set<String> SUPPORTED_EXTENSIONS = Utils.mkSet(ScramLoginModule.TOKEN_AUTH_CONFIG);
enum State {
RECEIVE_CLIENT_FIRST_MESSAGE,
RECEIVE_CLIENT_FINAL_MESSAGE,
COMPLETE,
FAILED
}
private final ScramMechanism mechanism;
private final ScramFormatter formatter;
private final CallbackHandler callbackHandler;
private State state;
private String username;
private ClientFirstMessage clientFirstMessage;
private ServerFirstMessage serverFirstMessage;
private ScramExtensions scramExtensions;
private ScramCredential scramCredential;
private String authorizationId;
private Long tokenExpiryTimestamp;
public ScramSaslServer(ScramMechanism mechanism, Map<String, ?> props, CallbackHandler callbackHandler) throws NoSuchAlgorithmException {
this.mechanism = mechanism;
this.formatter = new ScramFormatter(mechanism);
this.callbackHandler = callbackHandler;
setState(State.RECEIVE_CLIENT_FIRST_MESSAGE);
}
/**
* @throws SaslAuthenticationException if the requested authorization id is not the same as username.
* <p>
* <b>Note:</b> This method may throw {@link SaslAuthenticationException} to provide custom error messages
* to clients. But care should be taken to avoid including any information in the exception message that
* should not be leaked to unauthenticated clients. It may be safer to throw {@link SaslException} in
* most cases so that a standard error message is returned to clients.
* </p>
*/
@Override
public byte[] evaluateResponse(byte[] response) throws SaslException, SaslAuthenticationException {
try {
switch (state) {
case RECEIVE_CLIENT_FIRST_MESSAGE:
this.clientFirstMessage = new ClientFirstMessage(response);
this.scramExtensions = clientFirstMessage.extensions();
if (!SUPPORTED_EXTENSIONS.containsAll(scramExtensions.map().keySet())) {
log.debug("Unsupported extensions will be ignored, supported {}, provided {}",
SUPPORTED_EXTENSIONS, scramExtensions.map().keySet());
}
String serverNonce = formatter.secureRandomString();
try {
String saslName = clientFirstMessage.saslName();
this.username = ScramFormatter.username(saslName);
NameCallback nameCallback = new NameCallback("username", username);
ScramCredentialCallback credentialCallback;
if (scramExtensions.tokenAuthenticated()) {
DelegationTokenCredentialCallback tokenCallback = new DelegationTokenCredentialCallback();
credentialCallback = tokenCallback;
callbackHandler.handle(new Callback[]{nameCallback, tokenCallback});
if (tokenCallback.tokenOwner() == null)
throw new SaslException("Token Authentication failed: Invalid tokenId : " + username);
this.authorizationId = tokenCallback.tokenOwner();
this.tokenExpiryTimestamp = tokenCallback.tokenExpiryTimestamp();
} else {
credentialCallback = new ScramCredentialCallback();
callbackHandler.handle(new Callback[]{nameCallback, credentialCallback});
this.authorizationId = username;
this.tokenExpiryTimestamp = null;
}
this.scramCredential = credentialCallback.scramCredential();
if (scramCredential == null)
throw new SaslException("Authentication failed: Invalid user credentials");
String authorizationIdFromClient = clientFirstMessage.authorizationId();
if (!authorizationIdFromClient.isEmpty() && !authorizationIdFromClient.equals(username))
throw new SaslAuthenticationException("Authentication failed: Client requested an authorization id that is different from username");
if (scramCredential.iterations() < mechanism.minIterations())
throw new SaslException("Iterations " + scramCredential.iterations() + " is less than the minimum " + mechanism.minIterations() + " for " + mechanism);
this.serverFirstMessage = new ServerFirstMessage(clientFirstMessage.nonce(),
serverNonce,
scramCredential.salt(),
scramCredential.iterations());
setState(State.RECEIVE_CLIENT_FINAL_MESSAGE);
return serverFirstMessage.toBytes();
} catch (SaslException | AuthenticationException e) {
throw e;
} catch (Throwable e) {
throw new SaslException("Authentication failed: Credentials could not be obtained", e);
}
case RECEIVE_CLIENT_FINAL_MESSAGE:
try {
ClientFinalMessage clientFinalMessage = new ClientFinalMessage(response);
verifyClientProof(clientFinalMessage);
byte[] serverKey = scramCredential.serverKey();
byte[] serverSignature = formatter.serverSignature(serverKey, clientFirstMessage, serverFirstMessage, clientFinalMessage);
ServerFinalMessage serverFinalMessage = new ServerFinalMessage(null, serverSignature);
clearCredentials();
setState(State.COMPLETE);
return serverFinalMessage.toBytes();
} catch (InvalidKeyException e) {
throw new SaslException("Authentication failed: Invalid client final message", e);
}
default:
throw new IllegalSaslStateException("Unexpected challenge in Sasl server state " + state);
}
} catch (SaslException | AuthenticationException e) {
clearCredentials();
setState(State.FAILED);
throw e;
}
}
@Override
public String getAuthorizationID() {
if (!isComplete())
throw new IllegalStateException("Authentication exchange has not completed");
return authorizationId;
}
@Override
public String getMechanismName() {
return mechanism.mechanismName();
}
@Override
public Object getNegotiatedProperty(String propName) {
if (!isComplete())
throw new IllegalStateException("Authentication exchange has not completed");
if (SaslInternalConfigs.CREDENTIAL_LIFETIME_MS_SASL_NEGOTIATED_PROPERTY_KEY.equals(propName))
return tokenExpiryTimestamp; // will be null if token not used
if (SUPPORTED_EXTENSIONS.contains(propName))
return scramExtensions.map().get(propName);
else
return null;
}
@Override
public boolean isComplete() {
return state == State.COMPLETE;
}
@Override
public byte[] unwrap(byte[] incoming, int offset, int len) {
if (!isComplete())
throw new IllegalStateException("Authentication exchange has not completed");
return Arrays.copyOfRange(incoming, offset, offset + len);
}
@Override
public byte[] wrap(byte[] outgoing, int offset, int len) {
if (!isComplete())
throw new IllegalStateException("Authentication exchange has not completed");
return Arrays.copyOfRange(outgoing, offset, offset + len);
}
@Override
public void dispose() {
}
private void setState(State state) {
log.debug("Setting SASL/{} server state to {}", mechanism, state);
this.state = state;
}
private void verifyClientProof(ClientFinalMessage clientFinalMessage) throws SaslException {
try {
byte[] expectedStoredKey = scramCredential.storedKey();
byte[] clientSignature = formatter.clientSignature(expectedStoredKey, clientFirstMessage, serverFirstMessage, clientFinalMessage);
byte[] computedStoredKey = formatter.storedKey(clientSignature, clientFinalMessage.proof());
if (!MessageDigest.isEqual(computedStoredKey, expectedStoredKey))
throw new SaslException("Invalid client credentials");
} catch (InvalidKeyException e) {
throw new SaslException("Sasl client verification failed", e);
}
}
private void clearCredentials() {
scramCredential = null;
clientFirstMessage = null;
serverFirstMessage = null;
}
public static class ScramSaslServerFactory implements SaslServerFactory {
@Override
public SaslServer createSaslServer(String mechanism, String protocol, String serverName, Map<String, ?> props, CallbackHandler cbh)
throws SaslException {
if (!ScramMechanism.isScram(mechanism)) {
throw new SaslException(String.format("Requested mechanism '%s' is not supported. Supported mechanisms are '%s'.",
mechanism, ScramMechanism.mechanismNames()));
}
try {
return new ScramSaslServer(ScramMechanism.forMechanismName(mechanism), props, cbh);
} catch (NoSuchAlgorithmException e) {
throw new SaslException("Hash algorithm not supported for mechanism " + mechanism, e);
}
}
@Override
public String[] getMechanismNames(Map<String, ?> props) {
Collection<String> mechanisms = ScramMechanism.mechanismNames();
return mechanisms.toArray(new String[0]);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramSaslServerProvider.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.security.Provider;
import java.security.Security;
import org.apache.kafka.common.security.scram.internals.ScramSaslServer.ScramSaslServerFactory;
public class ScramSaslServerProvider extends Provider {
private static final long serialVersionUID = 1L;
@SuppressWarnings("deprecation")
protected ScramSaslServerProvider() {
super("SASL/SCRAM Server Provider", 1.0, "SASL/SCRAM Server Provider for Kafka");
for (ScramMechanism mechanism : ScramMechanism.values())
put("SaslServerFactory." + mechanism.mechanismName(), ScramSaslServerFactory.class.getName());
}
public static void initialize() {
Security.addProvider(new ScramSaslServerProvider());
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/scram/internals/ScramServerCallbackHandler.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.scram.internals;
import java.util.List;
import java.util.Map;
import javax.security.auth.callback.Callback;
import javax.security.auth.callback.NameCallback;
import javax.security.auth.callback.UnsupportedCallbackException;
import javax.security.auth.login.AppConfigurationEntry;
import org.apache.kafka.common.security.auth.AuthenticateCallbackHandler;
import org.apache.kafka.common.security.authenticator.CredentialCache;
import org.apache.kafka.common.security.scram.ScramCredential;
import org.apache.kafka.common.security.scram.ScramCredentialCallback;
import org.apache.kafka.common.security.token.delegation.TokenInformation;
import org.apache.kafka.common.security.token.delegation.internals.DelegationTokenCache;
import org.apache.kafka.common.security.token.delegation.internals.DelegationTokenCredentialCallback;
public class ScramServerCallbackHandler implements AuthenticateCallbackHandler {
private final CredentialCache.Cache<ScramCredential> credentialCache;
private final DelegationTokenCache tokenCache;
private String saslMechanism;
public ScramServerCallbackHandler(CredentialCache.Cache<ScramCredential> credentialCache,
DelegationTokenCache tokenCache) {
this.credentialCache = credentialCache;
this.tokenCache = tokenCache;
}
@Override
public void configure(Map<String, ?> configs, String mechanism, List<AppConfigurationEntry> jaasConfigEntries) {
this.saslMechanism = mechanism;
}
@Override
public void handle(Callback[] callbacks) throws UnsupportedCallbackException {
String username = null;
for (Callback callback : callbacks) {
if (callback instanceof NameCallback)
username = ((NameCallback) callback).getDefaultName();
else if (callback instanceof DelegationTokenCredentialCallback) {
DelegationTokenCredentialCallback tokenCallback = (DelegationTokenCredentialCallback) callback;
tokenCallback.scramCredential(tokenCache.credential(saslMechanism, username));
tokenCallback.tokenOwner(tokenCache.owner(username));
TokenInformation tokenInfo = tokenCache.token(username);
if (tokenInfo != null)
tokenCallback.tokenExpiryTimestamp(tokenInfo.expiryTimestamp());
} else if (callback instanceof ScramCredentialCallback) {
ScramCredentialCallback sc = (ScramCredentialCallback) callback;
sc.scramCredential(credentialCache.get(username));
} else
throw new UnsupportedCallbackException(callback);
}
}
@Override
public void close() {
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/ssl/DefaultSslEngineFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.ssl;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.config.SslClientAuth;
import org.apache.kafka.common.config.SslConfigs;
import org.apache.kafka.common.config.internals.BrokerSecurityConfigs;
import org.apache.kafka.common.config.types.Password;
import org.apache.kafka.common.errors.InvalidConfigurationException;
import org.apache.kafka.common.network.Mode;
import org.apache.kafka.common.security.auth.SslEngineFactory;
import org.apache.kafka.common.utils.SecurityUtils;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.ByteArrayInputStream;
import java.io.IOException;
import java.io.InputStream;
import java.nio.file.Files;
import java.nio.file.Paths;
import java.security.GeneralSecurityException;
import java.security.Key;
import java.security.KeyFactory;
import java.security.KeyStore;
import java.security.PrivateKey;
import java.security.SecureRandom;
import java.security.cert.Certificate;
import java.security.cert.CertificateFactory;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Base64;
import java.util.Collections;
import java.util.Date;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import java.util.stream.Collectors;
import javax.crypto.Cipher;
import javax.crypto.EncryptedPrivateKeyInfo;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.PBEKeySpec;
import javax.net.ssl.KeyManager;
import javax.net.ssl.KeyManagerFactory;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLParameters;
import javax.net.ssl.TrustManagerFactory;
public final class DefaultSslEngineFactory implements SslEngineFactory {
private static final Logger log = LoggerFactory.getLogger(DefaultSslEngineFactory.class);
public static final String PEM_TYPE = "PEM";
private Map<String, ?> configs;
private String protocol;
private String provider;
private String kmfAlgorithm;
private String tmfAlgorithm;
private SecurityStore keystore;
private SecurityStore truststore;
private String[] cipherSuites;
private String[] enabledProtocols;
private SecureRandom secureRandomImplementation;
private SSLContext sslContext;
private SslClientAuth sslClientAuth;
@Override
public SSLEngine createClientSslEngine(String peerHost, int peerPort, String endpointIdentification) {
return createSslEngine(Mode.CLIENT, peerHost, peerPort, endpointIdentification);
}
@Override
public SSLEngine createServerSslEngine(String peerHost, int peerPort) {
return createSslEngine(Mode.SERVER, peerHost, peerPort, null);
}
@Override
public boolean shouldBeRebuilt(Map<String, Object> nextConfigs) {
if (!nextConfigs.equals(configs)) {
return true;
}
if (truststore != null && truststore.modified()) {
return true;
}
if (keystore != null && keystore.modified()) {
return true;
}
return false;
}
@Override
public Set<String> reconfigurableConfigs() {
return SslConfigs.RECONFIGURABLE_CONFIGS;
}
@Override
public KeyStore keystore() {
return this.keystore != null ? this.keystore.get() : null;
}
@Override
public KeyStore truststore() {
return this.truststore != null ? this.truststore.get() : null;
}
@SuppressWarnings("unchecked")
@Override
public void configure(Map<String, ?> configs) {
this.configs = Collections.unmodifiableMap(configs);
this.protocol = (String) configs.get(SslConfigs.SSL_PROTOCOL_CONFIG);
this.provider = (String) configs.get(SslConfigs.SSL_PROVIDER_CONFIG);
SecurityUtils.addConfiguredSecurityProviders(this.configs);
List<String> cipherSuitesList = (List<String>) configs.get(SslConfigs.SSL_CIPHER_SUITES_CONFIG);
if (cipherSuitesList != null && !cipherSuitesList.isEmpty()) {
this.cipherSuites = cipherSuitesList.toArray(new String[0]);
} else {
this.cipherSuites = null;
}
List<String> enabledProtocolsList = (List<String>) configs.get(SslConfigs.SSL_ENABLED_PROTOCOLS_CONFIG);
if (enabledProtocolsList != null && !enabledProtocolsList.isEmpty()) {
this.enabledProtocols = enabledProtocolsList.toArray(new String[0]);
} else {
this.enabledProtocols = null;
}
this.secureRandomImplementation = createSecureRandom((String)
configs.get(SslConfigs.SSL_SECURE_RANDOM_IMPLEMENTATION_CONFIG));
this.sslClientAuth = createSslClientAuth((String) configs.get(
BrokerSecurityConfigs.SSL_CLIENT_AUTH_CONFIG));
this.kmfAlgorithm = (String) configs.get(SslConfigs.SSL_KEYMANAGER_ALGORITHM_CONFIG);
this.tmfAlgorithm = (String) configs.get(SslConfigs.SSL_TRUSTMANAGER_ALGORITHM_CONFIG);
this.keystore = createKeystore((String) configs.get(SslConfigs.SSL_KEYSTORE_TYPE_CONFIG),
(String) configs.get(SslConfigs.SSL_KEYSTORE_LOCATION_CONFIG),
(Password) configs.get(SslConfigs.SSL_KEYSTORE_PASSWORD_CONFIG),
(Password) configs.get(SslConfigs.SSL_KEY_PASSWORD_CONFIG),
(Password) configs.get(SslConfigs.SSL_KEYSTORE_KEY_CONFIG),
(Password) configs.get(SslConfigs.SSL_KEYSTORE_CERTIFICATE_CHAIN_CONFIG));
this.truststore = createTruststore((String) configs.get(SslConfigs.SSL_TRUSTSTORE_TYPE_CONFIG),
(String) configs.get(SslConfigs.SSL_TRUSTSTORE_LOCATION_CONFIG),
(Password) configs.get(SslConfigs.SSL_TRUSTSTORE_PASSWORD_CONFIG),
(Password) configs.get(SslConfigs.SSL_TRUSTSTORE_CERTIFICATES_CONFIG));
this.sslContext = createSSLContext(keystore, truststore);
}
@Override
public void close() {
this.sslContext = null;
}
//For Test only
public SSLContext sslContext() {
return this.sslContext;
}
private SSLEngine createSslEngine(Mode mode, String peerHost, int peerPort, String endpointIdentification) {
SSLEngine sslEngine = sslContext.createSSLEngine(peerHost, peerPort);
if (cipherSuites != null) sslEngine.setEnabledCipherSuites(cipherSuites);
if (enabledProtocols != null) sslEngine.setEnabledProtocols(enabledProtocols);
if (mode == Mode.SERVER) {
sslEngine.setUseClientMode(false);
switch (sslClientAuth) {
case REQUIRED:
sslEngine.setNeedClientAuth(true);
break;
case REQUESTED:
sslEngine.setWantClientAuth(true);
break;
case NONE:
break;
}
sslEngine.setUseClientMode(false);
} else {
sslEngine.setUseClientMode(true);
SSLParameters sslParams = sslEngine.getSSLParameters();
// SSLParameters#setEndpointIdentificationAlgorithm enables endpoint validation
// only in client mode. Hence, validation is enabled only for clients.
sslParams.setEndpointIdentificationAlgorithm(endpointIdentification);
sslEngine.setSSLParameters(sslParams);
}
return sslEngine;
}
private static SslClientAuth createSslClientAuth(String key) {
SslClientAuth auth = SslClientAuth.forConfig(key);
if (auth != null) {
return auth;
}
log.warn("Unrecognized client authentication configuration {}. Falling " +
"back to NONE. Recognized client authentication configurations are {}.",
key, String.join(", ", SslClientAuth.VALUES.stream().
map(Enum::name).collect(Collectors.toList())));
return SslClientAuth.NONE;
}
private static SecureRandom createSecureRandom(String key) {
if (key == null) {
return null;
}
try {
return SecureRandom.getInstance(key);
} catch (GeneralSecurityException e) {
throw new KafkaException(e);
}
}
private SSLContext createSSLContext(SecurityStore keystore, SecurityStore truststore) {
try {
SSLContext sslContext;
if (provider != null)
sslContext = SSLContext.getInstance(protocol, provider);
else
sslContext = SSLContext.getInstance(protocol);
KeyManager[] keyManagers = null;
if (keystore != null || kmfAlgorithm != null) {
String kmfAlgorithm = this.kmfAlgorithm != null ?
this.kmfAlgorithm : KeyManagerFactory.getDefaultAlgorithm();
KeyManagerFactory kmf = KeyManagerFactory.getInstance(kmfAlgorithm);
if (keystore != null) {
kmf.init(keystore.get(), keystore.keyPassword());
} else {
kmf.init(null, null);
}
keyManagers = kmf.getKeyManagers();
}
String tmfAlgorithm = this.tmfAlgorithm != null ? this.tmfAlgorithm : TrustManagerFactory.getDefaultAlgorithm();
TrustManagerFactory tmf = TrustManagerFactory.getInstance(tmfAlgorithm);
KeyStore ts = truststore == null ? null : truststore.get();
tmf.init(ts);
sslContext.init(keyManagers, tmf.getTrustManagers(), this.secureRandomImplementation);
log.debug("Created SSL context with keystore {}, truststore {}, provider {}.",
keystore, truststore, sslContext.getProvider().getName());
return sslContext;
} catch (Exception e) {
throw new KafkaException(e);
}
}
// Visibility to override for testing
protected SecurityStore createKeystore(String type, String path, Password password, Password keyPassword, Password privateKey, Password certificateChain) {
if (privateKey != null) {
if (!PEM_TYPE.equals(type))
throw new InvalidConfigurationException("SSL private key can be specified only for PEM, but key store type is " + type + ".");
else if (certificateChain == null)
throw new InvalidConfigurationException("SSL private key is specified, but certificate chain is not specified.");
else if (path != null)
throw new InvalidConfigurationException("Both SSL key store location and separate private key are specified.");
else if (password != null)
throw new InvalidConfigurationException("SSL key store password cannot be specified with PEM format, only key password may be specified.");
else
return new PemStore(certificateChain, privateKey, keyPassword);
} else if (certificateChain != null) {
throw new InvalidConfigurationException("SSL certificate chain is specified, but private key is not specified");
} else if (PEM_TYPE.equals(type) && path != null) {
if (password != null)
throw new InvalidConfigurationException("SSL key store password cannot be specified with PEM format, only key password may be specified");
else
return new FileBasedPemStore(path, keyPassword, true);
} else if (path == null && password != null) {
throw new InvalidConfigurationException("SSL key store is not specified, but key store password is specified.");
} else if (path != null && password == null) {
throw new InvalidConfigurationException("SSL key store is specified, but key store password is not specified.");
} else if (path != null && password != null) {
return new FileBasedStore(type, path, password, keyPassword, true);
} else
return null; // path == null, clients may use this path with brokers that don't require client auth
}
private static SecurityStore createTruststore(String type, String path, Password password, Password trustStoreCerts) {
if (trustStoreCerts != null) {
if (!PEM_TYPE.equals(type))
throw new InvalidConfigurationException("SSL trust store certs can be specified only for PEM, but trust store type is " + type + ".");
else if (path != null)
throw new InvalidConfigurationException("Both SSL trust store location and separate trust certificates are specified.");
else if (password != null)
throw new InvalidConfigurationException("SSL trust store password cannot be specified for PEM format.");
else
return new PemStore(trustStoreCerts);
} else if (PEM_TYPE.equals(type) && path != null) {
if (password != null)
throw new InvalidConfigurationException("SSL trust store password cannot be specified for PEM format.");
else
return new FileBasedPemStore(path, null, false);
} else if (path == null && password != null) {
throw new InvalidConfigurationException("SSL trust store is not specified, but trust store password is specified.");
} else if (path != null) {
return new FileBasedStore(type, path, password, null, false);
} else
return null;
}
interface SecurityStore {
KeyStore get();
char[] keyPassword();
boolean modified();
}
// package access for testing
static class FileBasedStore implements SecurityStore {
private final String type;
protected final String path;
private final Password password;
protected final Password keyPassword;
private final Long fileLastModifiedMs;
private final KeyStore keyStore;
FileBasedStore(String type, String path, Password password, Password keyPassword, boolean isKeyStore) {
Objects.requireNonNull(type, "type must not be null");
this.type = type;
this.path = path;
this.password = password;
this.keyPassword = keyPassword;
fileLastModifiedMs = lastModifiedMs(path);
this.keyStore = load(isKeyStore);
}
@Override
public KeyStore get() {
return keyStore;
}
@Override
public char[] keyPassword() {
Password passwd = keyPassword != null ? keyPassword : password;
return passwd == null ? null : passwd.value().toCharArray();
}
/**
* Loads this keystore
* @return the keystore
* @throws KafkaException if the file could not be read or if the keystore could not be loaded
* using the specified configs (e.g. if the password or keystore type is invalid)
*/
protected KeyStore load(boolean isKeyStore) {
try (InputStream in = Files.newInputStream(Paths.get(path))) {
KeyStore ks = KeyStore.getInstance(type);
// If a password is not set access to the truststore is still available, but integrity checking is disabled.
char[] passwordChars = password != null ? password.value().toCharArray() : null;
ks.load(in, passwordChars);
return ks;
} catch (GeneralSecurityException | IOException e) {
throw new KafkaException("Failed to load SSL keystore " + path + " of type " + type, e);
}
}
private Long lastModifiedMs(String path) {
try {
return Files.getLastModifiedTime(Paths.get(path)).toMillis();
} catch (IOException e) {
log.error("Modification time of key store could not be obtained: " + path, e);
return null;
}
}
public boolean modified() {
Long modifiedMs = lastModifiedMs(path);
return modifiedMs != null && !Objects.equals(modifiedMs, this.fileLastModifiedMs);
}
@Override
public String toString() {
return "SecurityStore(" +
"path=" + path +
", modificationTime=" + (fileLastModifiedMs == null ? null : new Date(fileLastModifiedMs)) + ")";
}
}
static class FileBasedPemStore extends FileBasedStore {
FileBasedPemStore(String path, Password keyPassword, boolean isKeyStore) {
super(PEM_TYPE, path, null, keyPassword, isKeyStore);
}
@Override
protected KeyStore load(boolean isKeyStore) {
try {
Password storeContents = new Password(Utils.readFileAsString(path));
PemStore pemStore = isKeyStore ? new PemStore(storeContents, storeContents, keyPassword) :
new PemStore(storeContents);
return pemStore.keyStore;
} catch (Exception e) {
throw new InvalidConfigurationException("Failed to load PEM SSL keystore " + path, e);
}
}
}
static class PemStore implements SecurityStore {
private static final PemParser CERTIFICATE_PARSER = new PemParser("CERTIFICATE");
private static final PemParser PRIVATE_KEY_PARSER = new PemParser("PRIVATE KEY");
private static final List<KeyFactory> KEY_FACTORIES = Arrays.asList(
keyFactory("RSA"),
keyFactory("DSA"),
keyFactory("EC")
);
private final char[] keyPassword;
private final KeyStore keyStore;
PemStore(Password certificateChain, Password privateKey, Password keyPassword) {
this.keyPassword = keyPassword == null ? null : keyPassword.value().toCharArray();
keyStore = createKeyStoreFromPem(privateKey.value(), certificateChain.value(), this.keyPassword);
}
PemStore(Password trustStoreCerts) {
this.keyPassword = null;
keyStore = createTrustStoreFromPem(trustStoreCerts.value());
}
@Override
public KeyStore get() {
return keyStore;
}
@Override
public char[] keyPassword() {
return keyPassword;
}
@Override
public boolean modified() {
return false;
}
private KeyStore createKeyStoreFromPem(String privateKeyPem, String certChainPem, char[] keyPassword) {
try {
KeyStore ks = KeyStore.getInstance("PKCS12");
ks.load(null, null);
Key key = privateKey(privateKeyPem, keyPassword);
Certificate[] certChain = certs(certChainPem);
ks.setKeyEntry("kafka", key, keyPassword, certChain);
return ks;
} catch (Exception e) {
throw new InvalidConfigurationException("Invalid PEM keystore configs", e);
}
}
private KeyStore createTrustStoreFromPem(String trustedCertsPem) {
try {
KeyStore ts = KeyStore.getInstance("PKCS12");
ts.load(null, null);
Certificate[] certs = certs(trustedCertsPem);
for (int i = 0; i < certs.length; i++) {
ts.setCertificateEntry("kafka" + i, certs[i]);
}
return ts;
} catch (InvalidConfigurationException e) {
throw e;
} catch (Exception e) {
throw new InvalidConfigurationException("Invalid PEM truststore configs", e);
}
}
private Certificate[] certs(String pem) throws GeneralSecurityException {
List<byte[]> certEntries = CERTIFICATE_PARSER.pemEntries(pem);
if (certEntries.isEmpty())
throw new InvalidConfigurationException("At least one certificate expected, but none found");
Certificate[] certs = new Certificate[certEntries.size()];
for (int i = 0; i < certs.length; i++) {
certs[i] = CertificateFactory.getInstance("X.509")
.generateCertificate(new ByteArrayInputStream(certEntries.get(i)));
}
return certs;
}
private PrivateKey privateKey(String pem, char[] keyPassword) throws Exception {
List<byte[]> keyEntries = PRIVATE_KEY_PARSER.pemEntries(pem);
if (keyEntries.isEmpty())
throw new InvalidConfigurationException("Private key not provided");
if (keyEntries.size() != 1)
throw new InvalidConfigurationException("Expected one private key, but found " + keyEntries.size());
byte[] keyBytes = keyEntries.get(0);
PKCS8EncodedKeySpec keySpec;
if (keyPassword == null) {
keySpec = new PKCS8EncodedKeySpec(keyBytes);
} else {
EncryptedPrivateKeyInfo keyInfo = new EncryptedPrivateKeyInfo(keyBytes);
String algorithm = keyInfo.getAlgName();
SecretKeyFactory keyFactory = SecretKeyFactory.getInstance(algorithm);
SecretKey pbeKey = keyFactory.generateSecret(new PBEKeySpec(keyPassword));
Cipher cipher = Cipher.getInstance(algorithm);
cipher.init(Cipher.DECRYPT_MODE, pbeKey, keyInfo.getAlgParameters());
keySpec = keyInfo.getKeySpec(cipher);
}
InvalidKeySpecException firstException = null;
for (KeyFactory factory : KEY_FACTORIES) {
try {
return factory.generatePrivate(keySpec);
} catch (InvalidKeySpecException e) {
if (firstException == null)
firstException = e;
}
}
throw new InvalidConfigurationException("Private key could not be loaded", firstException);
}
private static KeyFactory keyFactory(String algorithm) {
try {
return KeyFactory.getInstance(algorithm);
} catch (Exception e) {
throw new InvalidConfigurationException("Could not create key factory for algorithm " + algorithm, e);
}
}
}
/**
* Parser to process certificate/private key entries from PEM files
* Examples:
* -----BEGIN CERTIFICATE-----
* Base64 cert
* -----END CERTIFICATE-----
*
* -----BEGIN ENCRYPTED PRIVATE KEY-----
* Base64 private key
* -----END ENCRYPTED PRIVATE KEY-----
* Additional data may be included before headers, so we match all entries within the PEM.
*/
static class PemParser {
private final String name;
private final Pattern pattern;
PemParser(String name) {
this.name = name;
String beginOrEndFormat = "-+%s\\s*.*%s[^-]*-+\\s+";
String nameIgnoreSpace = name.replace(" ", "\\s+");
String encodingParams = "\\s*[^\\r\\n]*:[^\\r\\n]*[\\r\\n]+";
String base64Pattern = "([a-zA-Z0-9/+=\\s]*)";
String patternStr = String.format(beginOrEndFormat, "BEGIN", nameIgnoreSpace) +
String.format("(?:%s)*", encodingParams) +
base64Pattern +
String.format(beginOrEndFormat, "END", nameIgnoreSpace);
pattern = Pattern.compile(patternStr);
}
private List<byte[]> pemEntries(String pem) {
Matcher matcher = pattern.matcher(pem + "\n"); // allow last newline to be omitted in value
List<byte[]> entries = new ArrayList<>();
while (matcher.find()) {
String base64Str = matcher.group(1).replaceAll("\\s", "");
entries.add(Base64.getDecoder().decode(base64Str));
}
if (entries.isEmpty())
throw new InvalidConfigurationException("No matching " + name + " entries in PEM file");
return entries;
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/ssl/SslFactory.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.ssl;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.Reconfigurable;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.config.SslConfigs;
import org.apache.kafka.common.config.internals.BrokerSecurityConfigs;
import org.apache.kafka.common.network.Mode;
import org.apache.kafka.common.security.auth.SslEngineFactory;
import org.apache.kafka.common.utils.Utils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.net.ssl.SSLEngine;
import javax.net.ssl.SSLEngineResult;
import javax.net.ssl.SSLException;
import java.io.Closeable;
import java.net.InetSocketAddress;
import java.net.Socket;
import java.nio.ByteBuffer;
import java.security.cert.Certificate;
import java.security.cert.X509Certificate;
import java.security.GeneralSecurityException;
import java.security.KeyStore;
import java.security.Principal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.Enumeration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Objects;
import java.util.Set;
import java.util.HashSet;
public class SslFactory implements Reconfigurable, Closeable {
private static final Logger log = LoggerFactory.getLogger(SslFactory.class);
private final Mode mode;
private final String clientAuthConfigOverride;
private final boolean keystoreVerifiableUsingTruststore;
private String endpointIdentification;
private SslEngineFactory sslEngineFactory;
private Map<String, Object> sslEngineFactoryConfig;
public SslFactory(Mode mode) {
this(mode, null, false);
}
/**
* Create an SslFactory.
*
* @param mode Whether to use client or server mode.
* @param clientAuthConfigOverride The value to override ssl.client.auth with, or null
* if we don't want to override it.
* @param keystoreVerifiableUsingTruststore True if we should require the keystore to be verifiable
* using the truststore.
*/
public SslFactory(Mode mode,
String clientAuthConfigOverride,
boolean keystoreVerifiableUsingTruststore) {
this.mode = mode;
this.clientAuthConfigOverride = clientAuthConfigOverride;
this.keystoreVerifiableUsingTruststore = keystoreVerifiableUsingTruststore;
}
@SuppressWarnings("unchecked")
@Override
public void configure(Map<String, ?> configs) throws KafkaException {
if (sslEngineFactory != null) {
throw new IllegalStateException("SslFactory was already configured.");
}
this.endpointIdentification = (String) configs.get(SslConfigs.SSL_ENDPOINT_IDENTIFICATION_ALGORITHM_CONFIG);
// The input map must be a mutable RecordingMap in production.
Map<String, Object> nextConfigs = (Map<String, Object>) configs;
if (clientAuthConfigOverride != null) {
nextConfigs.put(BrokerSecurityConfigs.SSL_CLIENT_AUTH_CONFIG, clientAuthConfigOverride);
}
SslEngineFactory builder = instantiateSslEngineFactory(nextConfigs);
if (keystoreVerifiableUsingTruststore) {
try {
SslEngineValidator.validate(builder, builder);
} catch (Exception e) {
throw new ConfigException("A client SSLEngine created with the provided settings " +
"can't connect to a server SSLEngine created with those settings.", e);
}
}
this.sslEngineFactory = builder;
}
@Override
public Set<String> reconfigurableConfigs() {
return sslEngineFactory.reconfigurableConfigs();
}
@Override
public void validateReconfiguration(Map<String, ?> newConfigs) {
createNewSslEngineFactory(newConfigs);
}
@Override
public void reconfigure(Map<String, ?> newConfigs) throws KafkaException {
SslEngineFactory newSslEngineFactory = createNewSslEngineFactory(newConfigs);
if (newSslEngineFactory != this.sslEngineFactory) {
Utils.closeQuietly(this.sslEngineFactory, "close stale ssl engine factory");
this.sslEngineFactory = newSslEngineFactory;
log.info("Created new {} SSL engine builder with keystore {} truststore {}", mode,
newSslEngineFactory.keystore(), newSslEngineFactory.truststore());
}
}
private SslEngineFactory instantiateSslEngineFactory(Map<String, Object> configs) {
@SuppressWarnings("unchecked")
Class<? extends SslEngineFactory> sslEngineFactoryClass =
(Class<? extends SslEngineFactory>) configs.get(SslConfigs.SSL_ENGINE_FACTORY_CLASS_CONFIG);
SslEngineFactory sslEngineFactory;
if (sslEngineFactoryClass == null) {
sslEngineFactory = new DefaultSslEngineFactory();
} else {
sslEngineFactory = Utils.newInstance(sslEngineFactoryClass);
}
sslEngineFactory.configure(configs);
this.sslEngineFactoryConfig = configs;
return sslEngineFactory;
}
private SslEngineFactory createNewSslEngineFactory(Map<String, ?> newConfigs) {
if (sslEngineFactory == null) {
throw new IllegalStateException("SslFactory has not been configured.");
}
Map<String, Object> nextConfigs = new HashMap<>(sslEngineFactoryConfig);
copyMapEntries(nextConfigs, newConfigs, reconfigurableConfigs());
if (clientAuthConfigOverride != null) {
nextConfigs.put(BrokerSecurityConfigs.SSL_CLIENT_AUTH_CONFIG, clientAuthConfigOverride);
}
if (!sslEngineFactory.shouldBeRebuilt(nextConfigs)) {
return sslEngineFactory;
}
try {
SslEngineFactory newSslEngineFactory = instantiateSslEngineFactory(nextConfigs);
if (sslEngineFactory.keystore() == null) {
if (newSslEngineFactory.keystore() != null) {
throw new ConfigException("Cannot add SSL keystore to an existing listener for " +
"which no keystore was configured.");
}
} else {
if (newSslEngineFactory.keystore() == null) {
throw new ConfigException("Cannot remove the SSL keystore from an existing listener for " +
"which a keystore was configured.");
}
CertificateEntries.ensureCompatible(newSslEngineFactory.keystore(), sslEngineFactory.keystore());
}
if (sslEngineFactory.truststore() == null && newSslEngineFactory.truststore() != null) {
throw new ConfigException("Cannot add SSL truststore to an existing listener for which no " +
"truststore was configured.");
}
if (keystoreVerifiableUsingTruststore) {
if (sslEngineFactory.truststore() != null || sslEngineFactory.keystore() != null) {
SslEngineValidator.validate(sslEngineFactory, newSslEngineFactory);
}
}
return newSslEngineFactory;
} catch (Exception e) {
log.debug("Validation of dynamic config update of SSLFactory failed.", e);
throw new ConfigException("Validation of dynamic config update of SSLFactory failed: " + e);
}
}
public SSLEngine createSslEngine(Socket socket) {
return createSslEngine(peerHost(socket), socket.getPort());
}
/**
* Prefer `createSslEngine(Socket)` if a `Socket` instance is available. If using this overload,
* avoid reverse DNS resolution in the computation of `peerHost`.
*/
public SSLEngine createSslEngine(String peerHost, int peerPort) {
if (sslEngineFactory == null) {
throw new IllegalStateException("SslFactory has not been configured.");
}
if (mode == Mode.SERVER) {
return sslEngineFactory.createServerSslEngine(peerHost, peerPort);
} else {
return sslEngineFactory.createClientSslEngine(peerHost, peerPort, endpointIdentification);
}
}
/**
* Returns host/IP address of remote host without reverse DNS lookup to be used as the host
* for creating SSL engine. This is used as a hint for session reuse strategy and also for
* hostname verification of server hostnames.
* <p>
* Scenarios:
* <ul>
* <li>Server-side
* <ul>
* <li>Server accepts connection from a client. Server knows only client IP
* address. We want to avoid reverse DNS lookup of the client IP address since the server
* does not verify or use client hostname. The IP address can be used directly.</li>
* </ul>
* </li>
* <li>Client-side
* <ul>
* <li>Client connects to server using hostname. No lookup is necessary
* and the hostname should be used to create the SSL engine. This hostname is validated
* against the hostname in SubjectAltName (dns) or CommonName in the certificate if
* hostname verification is enabled. Authentication fails if hostname does not match.</li>
* <li>Client connects to server using IP address, but certificate contains only
* SubjectAltName (dns). Use of reverse DNS lookup to determine hostname introduces
* a security vulnerability since authentication would be reliant on a secure DNS.
* Hence hostname verification should fail in this case.</li>
* <li>Client connects to server using IP address and certificate contains
* SubjectAltName (ipaddress). This could be used when Kafka is on a private network.
* If reverse DNS lookup is used, authentication would succeed using IP address if lookup
* fails and IP address is used, but authentication would fail if lookup succeeds and
* dns name is used. For consistency and to avoid dependency on a potentially insecure
* DNS, reverse DNS lookup should be avoided and the IP address specified by the client for
* connection should be used to create the SSL engine.</li>
* </ul></li>
* </ul>
*/
private String peerHost(Socket socket) {
return new InetSocketAddress(socket.getInetAddress(), 0).getHostString();
}
public SslEngineFactory sslEngineFactory() {
return sslEngineFactory;
}
/**
* Copy entries from one map into another.
*
* @param destMap The map to copy entries into.
* @param srcMap The map to copy entries from.
* @param keySet Only entries with these keys will be copied.
* @param <K> The map key type.
* @param <V> The map value type.
*/
private static <K, V> void copyMapEntries(Map<K, V> destMap,
Map<K, ? extends V> srcMap,
Set<K> keySet) {
for (K k : keySet) {
copyMapEntry(destMap, srcMap, k);
}
}
/**
* Copy entry from one map into another.
*
* @param destMap The map to copy entries into.
* @param srcMap The map to copy entries from.
* @param key The entry with this key will be copied
* @param <K> The map key type.
* @param <V> The map value type.
*/
private static <K, V> void copyMapEntry(Map<K, V> destMap,
Map<K, ? extends V> srcMap,
K key) {
if (srcMap.containsKey(key)) {
destMap.put(key, srcMap.get(key));
}
}
@Override
public void close() {
Utils.closeQuietly(sslEngineFactory, "close engine factory");
}
static class CertificateEntries {
private final String alias;
private final Principal subjectPrincipal;
private final Set<List<?>> subjectAltNames;
static List<CertificateEntries> create(KeyStore keystore) throws GeneralSecurityException {
Enumeration<String> aliases = keystore.aliases();
List<CertificateEntries> entries = new ArrayList<>();
while (aliases.hasMoreElements()) {
String alias = aliases.nextElement();
Certificate cert = keystore.getCertificate(alias);
if (cert instanceof X509Certificate)
entries.add(new CertificateEntries(alias, (X509Certificate) cert));
}
return entries;
}
static void ensureCompatible(KeyStore newKeystore, KeyStore oldKeystore) throws GeneralSecurityException {
List<CertificateEntries> newEntries = CertificateEntries.create(newKeystore);
List<CertificateEntries> oldEntries = CertificateEntries.create(oldKeystore);
if (newEntries.size() != oldEntries.size()) {
throw new ConfigException(String.format("Keystore entries do not match, existing store contains %d entries, new store contains %d entries",
oldEntries.size(), newEntries.size()));
}
for (int i = 0; i < newEntries.size(); i++) {
CertificateEntries newEntry = newEntries.get(i);
CertificateEntries oldEntry = oldEntries.get(i);
Principal newPrincipal = newEntry.subjectPrincipal;
Principal oldPrincipal = oldEntry.subjectPrincipal;
// Compare principal objects to compare canonical names (e.g. to ignore leading/trailing whitespaces).
// Canonical names may differ if the tags of a field changes from one with a printable string representation
// to one without or vice-versa due to optional conversion to hex representation based on the tag. So we
// also compare Principal.getName which compares the RFC2253 name. If either matches, allow dynamic update.
if (!Objects.equals(newPrincipal, oldPrincipal) && !newPrincipal.getName().equalsIgnoreCase(oldPrincipal.getName())) {
throw new ConfigException(String.format("Keystore DistinguishedName does not match: " +
" existing={alias=%s, DN=%s}, new={alias=%s, DN=%s}",
oldEntry.alias, oldEntry.subjectPrincipal, newEntry.alias, newEntry.subjectPrincipal));
}
if (!newEntry.subjectAltNames.containsAll(oldEntry.subjectAltNames)) {
throw new ConfigException(String.format("Keystore SubjectAltNames do not match: " +
" existing={alias=%s, SAN=%s}, new={alias=%s, SAN=%s}",
oldEntry.alias, oldEntry.subjectAltNames, newEntry.alias, newEntry.subjectAltNames));
}
}
}
CertificateEntries(String alias, X509Certificate cert) throws GeneralSecurityException {
this.alias = alias;
this.subjectPrincipal = cert.getSubjectX500Principal();
Collection<List<?>> altNames = cert.getSubjectAlternativeNames();
// use a set for comparison
this.subjectAltNames = altNames != null ? new HashSet<>(altNames) : Collections.emptySet();
}
@Override
public int hashCode() {
return Objects.hash(subjectPrincipal, subjectAltNames);
}
@Override
public boolean equals(Object obj) {
if (!(obj instanceof CertificateEntries))
return false;
CertificateEntries other = (CertificateEntries) obj;
return Objects.equals(subjectPrincipal, other.subjectPrincipal) &&
Objects.equals(subjectAltNames, other.subjectAltNames);
}
@Override
public String toString() {
return "subjectPrincipal=" + subjectPrincipal +
", subjectAltNames=" + subjectAltNames;
}
}
/**
* Validator used to verify dynamic update of keystore used in inter-broker communication.
* The validator checks that a successful handshake can be performed using the keystore and
* truststore configured on this SslFactory.
*/
private static class SslEngineValidator {
private static final ByteBuffer EMPTY_BUF = ByteBuffer.allocate(0);
private final SSLEngine sslEngine;
private SSLEngineResult handshakeResult;
private ByteBuffer appBuffer;
private ByteBuffer netBuffer;
static void validate(SslEngineFactory oldEngineBuilder,
SslEngineFactory newEngineBuilder) throws SSLException {
validate(createSslEngineForValidation(oldEngineBuilder, Mode.SERVER),
createSslEngineForValidation(newEngineBuilder, Mode.CLIENT));
validate(createSslEngineForValidation(newEngineBuilder, Mode.SERVER),
createSslEngineForValidation(oldEngineBuilder, Mode.CLIENT));
}
private static SSLEngine createSslEngineForValidation(SslEngineFactory sslEngineFactory, Mode mode) {
// Use empty hostname, disable hostname verification
if (mode == Mode.SERVER) {
return sslEngineFactory.createServerSslEngine("", 0);
} else {
return sslEngineFactory.createClientSslEngine("", 0, "");
}
}
static void validate(SSLEngine clientEngine, SSLEngine serverEngine) throws SSLException {
SslEngineValidator clientValidator = new SslEngineValidator(clientEngine);
SslEngineValidator serverValidator = new SslEngineValidator(serverEngine);
try {
clientValidator.beginHandshake();
serverValidator.beginHandshake();
while (!serverValidator.complete() || !clientValidator.complete()) {
clientValidator.handshake(serverValidator);
serverValidator.handshake(clientValidator);
}
} finally {
clientValidator.close();
serverValidator.close();
}
}
private SslEngineValidator(SSLEngine engine) {
this.sslEngine = engine;
appBuffer = ByteBuffer.allocate(sslEngine.getSession().getApplicationBufferSize());
netBuffer = ByteBuffer.allocate(sslEngine.getSession().getPacketBufferSize());
}
void beginHandshake() throws SSLException {
sslEngine.beginHandshake();
}
void handshake(SslEngineValidator peerValidator) throws SSLException {
SSLEngineResult.HandshakeStatus handshakeStatus = sslEngine.getHandshakeStatus();
while (true) {
switch (handshakeStatus) {
case NEED_WRAP:
if (netBuffer.position() != 0) // Wait for peer to consume previously wrapped data
return;
handshakeResult = sslEngine.wrap(EMPTY_BUF, netBuffer);
switch (handshakeResult.getStatus()) {
case OK: break;
case BUFFER_OVERFLOW:
netBuffer.compact();
netBuffer = Utils.ensureCapacity(netBuffer, sslEngine.getSession().getPacketBufferSize());
netBuffer.flip();
break;
case BUFFER_UNDERFLOW:
case CLOSED:
default:
throw new SSLException("Unexpected handshake status: " + handshakeResult.getStatus());
}
return;
case NEED_UNWRAP:
if (peerValidator.netBuffer.position() == 0) // no data to unwrap, return to process peer
return;
peerValidator.netBuffer.flip(); // unwrap the data from peer
handshakeResult = sslEngine.unwrap(peerValidator.netBuffer, appBuffer);
peerValidator.netBuffer.compact();
handshakeStatus = handshakeResult.getHandshakeStatus();
switch (handshakeResult.getStatus()) {
case OK: break;
case BUFFER_OVERFLOW:
appBuffer = Utils.ensureCapacity(appBuffer, sslEngine.getSession().getApplicationBufferSize());
break;
case BUFFER_UNDERFLOW:
netBuffer = Utils.ensureCapacity(netBuffer, sslEngine.getSession().getPacketBufferSize());
break;
case CLOSED:
default:
throw new SSLException("Unexpected handshake status: " + handshakeResult.getStatus());
}
break;
case NEED_TASK:
sslEngine.getDelegatedTask().run();
handshakeStatus = sslEngine.getHandshakeStatus();
break;
case FINISHED:
return;
case NOT_HANDSHAKING:
if (handshakeResult.getHandshakeStatus() != SSLEngineResult.HandshakeStatus.FINISHED)
throw new SSLException("Did not finish handshake");
return;
default:
throw new IllegalStateException("Unexpected handshake status " + handshakeStatus);
}
}
}
boolean complete() {
return sslEngine.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.FINISHED ||
sslEngine.getHandshakeStatus() == SSLEngineResult.HandshakeStatus.NOT_HANDSHAKING;
}
void close() {
sslEngine.closeOutbound();
try {
sslEngine.closeInbound();
} catch (Exception e) {
// ignore
}
}
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/ssl/SslPrincipalMapper.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.ssl;
import java.io.IOException;
import java.util.List;
import java.util.ArrayList;
import java.util.Locale;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.apache.kafka.common.config.internals.BrokerSecurityConfigs.DEFAULT_SSL_PRINCIPAL_MAPPING_RULES;
public class SslPrincipalMapper {
private static final String RULE_PATTERN = "(DEFAULT)|RULE:((\\\\.|[^\\\\/])*)/((\\\\.|[^\\\\/])*)/([LU]?).*?|(.*?)";
private static final Pattern RULE_SPLITTER = Pattern.compile("\\s*(" + RULE_PATTERN + ")\\s*(,\\s*|$)");
private static final Pattern RULE_PARSER = Pattern.compile(RULE_PATTERN);
private final List<Rule> rules;
public SslPrincipalMapper(String sslPrincipalMappingRules) {
this.rules = parseRules(splitRules(sslPrincipalMappingRules));
}
public static SslPrincipalMapper fromRules(String sslPrincipalMappingRules) {
return new SslPrincipalMapper(sslPrincipalMappingRules);
}
private static List<String> splitRules(String sslPrincipalMappingRules) {
if (sslPrincipalMappingRules == null) {
sslPrincipalMappingRules = DEFAULT_SSL_PRINCIPAL_MAPPING_RULES;
}
List<String> result = new ArrayList<>();
Matcher matcher = RULE_SPLITTER.matcher(sslPrincipalMappingRules.trim());
while (matcher.find()) {
result.add(matcher.group(1));
}
return result;
}
private static List<Rule> parseRules(List<String> rules) {
List<Rule> result = new ArrayList<>();
for (String rule : rules) {
Matcher matcher = RULE_PARSER.matcher(rule);
if (!matcher.lookingAt()) {
throw new IllegalArgumentException("Invalid rule: " + rule);
}
if (rule.length() != matcher.end()) {
throw new IllegalArgumentException("Invalid rule: `" + rule + "`, unmatched substring: `" + rule.substring(matcher.end()) + "`");
}
// empty rules are ignored
if (matcher.group(1) != null) {
result.add(new Rule());
} else if (matcher.group(2) != null) {
result.add(new Rule(matcher.group(2),
matcher.group(4),
"L".equals(matcher.group(6)),
"U".equals(matcher.group(6))));
}
}
return result;
}
public String getName(String distinguishedName) throws IOException {
for (Rule r : rules) {
String principalName = r.apply(distinguishedName);
if (principalName != null) {
return principalName;
}
}
throw new NoMatchingRule("No rules apply to " + distinguishedName + ", rules " + rules);
}
@Override
public String toString() {
return "SslPrincipalMapper(rules = " + rules + ")";
}
public static class NoMatchingRule extends IOException {
NoMatchingRule(String msg) {
super(msg);
}
}
private static class Rule {
private static final Pattern BACK_REFERENCE_PATTERN = Pattern.compile("\\$(\\d+)");
private final boolean isDefault;
private final Pattern pattern;
private final String replacement;
private final boolean toLowerCase;
private final boolean toUpperCase;
Rule() {
isDefault = true;
pattern = null;
replacement = null;
toLowerCase = false;
toUpperCase = false;
}
Rule(String pattern, String replacement, boolean toLowerCase, boolean toUpperCase) {
isDefault = false;
this.pattern = pattern == null ? null : Pattern.compile(pattern);
this.replacement = replacement;
this.toLowerCase = toLowerCase;
this.toUpperCase = toUpperCase;
}
String apply(String distinguishedName) {
if (isDefault) {
return distinguishedName;
}
String result = null;
final Matcher m = pattern.matcher(distinguishedName);
if (m.matches()) {
result = distinguishedName.replaceAll(pattern.pattern(), escapeLiteralBackReferences(replacement, m.groupCount()));
}
if (toLowerCase && result != null) {
result = result.toLowerCase(Locale.ENGLISH);
} else if (toUpperCase & result != null) {
result = result.toUpperCase(Locale.ENGLISH);
}
return result;
}
//If we find a back reference that is not valid, then we will treat it as a literal string. For example, if we have 3 capturing
//groups and the Replacement Value has the value is "$1@$4", then we want to treat the $4 as a literal "$4", rather
//than attempting to use it as a back reference.
//This method was taken from Apache Nifi project : org.apache.nifi.authorization.util.IdentityMappingUtil
private String escapeLiteralBackReferences(final String unescaped, final int numCapturingGroups) {
if (numCapturingGroups == 0) {
return unescaped;
}
String value = unescaped;
final Matcher backRefMatcher = BACK_REFERENCE_PATTERN.matcher(value);
while (backRefMatcher.find()) {
final String backRefNum = backRefMatcher.group(1);
if (backRefNum.startsWith("0")) {
continue;
}
int backRefIndex = Integer.parseInt(backRefNum);
// if we have a replacement value like $123, and we have less than 123 capturing groups, then
// we want to truncate the 3 and use capturing group 12; if we have less than 12 capturing groups,
// then we want to truncate the 2 and use capturing group 1; if we don't have a capturing group then
// we want to truncate the 1 and get 0.
while (backRefIndex > numCapturingGroups && backRefIndex >= 10) {
backRefIndex /= 10;
}
if (backRefIndex > numCapturingGroups) {
final StringBuilder sb = new StringBuilder(value.length() + 1);
final int groupStart = backRefMatcher.start(1);
sb.append(value.substring(0, groupStart - 1));
sb.append("\\");
sb.append(value.substring(groupStart - 1));
value = sb.toString();
}
}
return value;
}
@Override
public String toString() {
StringBuilder buf = new StringBuilder();
if (isDefault) {
buf.append("DEFAULT");
} else {
buf.append("RULE:");
if (pattern != null) {
buf.append(pattern);
}
if (replacement != null) {
buf.append("/");
buf.append(replacement);
}
if (toLowerCase) {
buf.append("/L");
} else if (toUpperCase) {
buf.append("/U");
}
}
return buf.toString();
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/ssl/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Provides utilities for using SSL encryption for networked connections.
* <strong>This package is not a supported Kafka API; the implementation may change without warning between minor or patch releases.</strong>
*/
package org.apache.kafka.common.security.ssl; |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token/delegation/DelegationToken.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.token.delegation;
import org.apache.kafka.common.annotation.InterfaceStability;
import java.security.MessageDigest;
import java.util.Arrays;
import java.util.Base64;
import java.util.Objects;
/**
* A class representing a delegation token.
*
*/
@InterfaceStability.Evolving
public class DelegationToken {
private TokenInformation tokenInformation;
private byte[] hmac;
public DelegationToken(TokenInformation tokenInformation, byte[] hmac) {
this.tokenInformation = tokenInformation;
this.hmac = hmac;
}
public TokenInformation tokenInfo() {
return tokenInformation;
}
public byte[] hmac() {
return hmac;
}
public String hmacAsBase64String() {
return Base64.getEncoder().encodeToString(hmac);
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
DelegationToken token = (DelegationToken) o;
return Objects.equals(tokenInformation, token.tokenInformation) && MessageDigest.isEqual(hmac, token.hmac);
}
@Override
public int hashCode() {
int result = tokenInformation != null ? tokenInformation.hashCode() : 0;
result = 31 * result + Arrays.hashCode(hmac);
return result;
}
@Override
public String toString() {
return "DelegationToken{" +
"tokenInformation=" + tokenInformation +
", hmac=[*******]" +
'}';
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token/delegation/TokenInformation.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.token.delegation;
import org.apache.kafka.common.annotation.InterfaceStability;
import org.apache.kafka.common.security.auth.KafkaPrincipal;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Objects;
/**
* A class representing a delegation token details.
*
*/
@InterfaceStability.Evolving
public class TokenInformation {
private final KafkaPrincipal owner;
private final KafkaPrincipal tokenRequester;
private final Collection<KafkaPrincipal> renewers;
private final long issueTimestamp;
private final long maxTimestamp;
private long expiryTimestamp;
private final String tokenId;
public TokenInformation(String tokenId, KafkaPrincipal owner,
Collection<KafkaPrincipal> renewers, long issueTimestamp, long maxTimestamp, long expiryTimestamp) {
this(tokenId, owner, owner, renewers, issueTimestamp, maxTimestamp, expiryTimestamp);
}
public TokenInformation(String tokenId, KafkaPrincipal owner, KafkaPrincipal tokenRequester,
Collection<KafkaPrincipal> renewers, long issueTimestamp, long maxTimestamp, long expiryTimestamp) {
this.tokenId = tokenId;
this.owner = owner;
this.tokenRequester = tokenRequester;
this.renewers = renewers;
this.issueTimestamp = issueTimestamp;
this.maxTimestamp = maxTimestamp;
this.expiryTimestamp = expiryTimestamp;
}
public KafkaPrincipal owner() {
return owner;
}
public KafkaPrincipal tokenRequester() {
return tokenRequester;
}
public String ownerAsString() {
return owner.toString();
}
public Collection<KafkaPrincipal> renewers() {
return renewers;
}
public Collection<String> renewersAsString() {
Collection<String> renewerList = new ArrayList<>();
for (KafkaPrincipal renewer : renewers) {
renewerList.add(renewer.toString());
}
return renewerList;
}
public long issueTimestamp() {
return issueTimestamp;
}
public long expiryTimestamp() {
return expiryTimestamp;
}
public void setExpiryTimestamp(long expiryTimestamp) {
this.expiryTimestamp = expiryTimestamp;
}
public String tokenId() {
return tokenId;
}
public long maxTimestamp() {
return maxTimestamp;
}
public boolean ownerOrRenewer(KafkaPrincipal principal) {
return owner.equals(principal) || tokenRequester.equals(principal) || renewers.contains(principal);
}
@Override
public String toString() {
return "TokenInformation{" +
"owner=" + owner +
", tokenRequester=" + tokenRequester +
", renewers=" + renewers +
", issueTimestamp=" + issueTimestamp +
", maxTimestamp=" + maxTimestamp +
", expiryTimestamp=" + expiryTimestamp +
", tokenId='" + tokenId + '\'' +
'}';
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
TokenInformation that = (TokenInformation) o;
return issueTimestamp == that.issueTimestamp &&
maxTimestamp == that.maxTimestamp &&
Objects.equals(owner, that.owner) &&
Objects.equals(tokenRequester, that.tokenRequester) &&
Objects.equals(renewers, that.renewers) &&
Objects.equals(tokenId, that.tokenId);
}
@Override
public int hashCode() {
return Objects.hash(owner, tokenRequester, renewers, issueTimestamp, maxTimestamp, expiryTimestamp, tokenId);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token/delegation/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Provides mechanism for delegating authorization to a distinct Principal for securing Kafka clusters.
*/
package org.apache.kafka.common.security.token.delegation; |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token/delegation | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token/delegation/internals/DelegationTokenCache.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.token.delegation.internals;
import org.apache.kafka.common.security.authenticator.CredentialCache;
import org.apache.kafka.common.security.scram.ScramCredential;
import org.apache.kafka.common.security.scram.internals.ScramCredentialUtils;
import org.apache.kafka.common.security.scram.internals.ScramMechanism;
import org.apache.kafka.common.security.token.delegation.DelegationToken;
import org.apache.kafka.common.security.token.delegation.TokenInformation;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
public class DelegationTokenCache {
private CredentialCache credentialCache = new CredentialCache();
//Cache to hold all the tokens
private Map<String, TokenInformation> tokenCache = new ConcurrentHashMap<>();
//Cache to hold hmac->tokenId mapping. This is required for renew, expire requests
private Map<String, String> hmacTokenIdCache = new ConcurrentHashMap<>();
//Cache to hold tokenId->hmac mapping. This is required for removing entry from hmacTokenIdCache using tokenId.
private Map<String, String> tokenIdHmacCache = new ConcurrentHashMap<>();
public DelegationTokenCache(Collection<String> scramMechanisms) {
//Create caches for scramMechanisms
ScramCredentialUtils.createCache(credentialCache, scramMechanisms);
}
public ScramCredential credential(String mechanism, String tokenId) {
CredentialCache.Cache<ScramCredential> cache = credentialCache.cache(mechanism, ScramCredential.class);
return cache == null ? null : cache.get(tokenId);
}
public String owner(String tokenId) {
TokenInformation tokenInfo = tokenCache.get(tokenId);
return tokenInfo == null ? null : tokenInfo.owner().getName();
}
public void updateCache(DelegationToken token, Map<String, ScramCredential> scramCredentialMap) {
//Update TokenCache
String tokenId = token.tokenInfo().tokenId();
addToken(tokenId, token.tokenInfo());
String hmac = token.hmacAsBase64String();
//Update Scram Credentials
updateCredentials(tokenId, scramCredentialMap);
//Update hmac-id cache
hmacTokenIdCache.put(hmac, tokenId);
tokenIdHmacCache.put(tokenId, hmac);
}
public void removeCache(String tokenId) {
removeToken(tokenId);
updateCredentials(tokenId, new HashMap<>());
}
public String tokenIdForHmac(String base64hmac) {
return hmacTokenIdCache.get(base64hmac);
}
public TokenInformation tokenForHmac(String base64hmac) {
String tokenId = hmacTokenIdCache.get(base64hmac);
return tokenId == null ? null : tokenCache.get(tokenId);
}
public TokenInformation addToken(String tokenId, TokenInformation tokenInfo) {
return tokenCache.put(tokenId, tokenInfo);
}
public void removeToken(String tokenId) {
TokenInformation tokenInfo = tokenCache.remove(tokenId);
if (tokenInfo != null) {
String hmac = tokenIdHmacCache.remove(tokenInfo.tokenId());
if (hmac != null) {
hmacTokenIdCache.remove(hmac);
}
}
}
public Collection<TokenInformation> tokens() {
return tokenCache.values();
}
public TokenInformation token(String tokenId) {
return tokenCache.get(tokenId);
}
public CredentialCache.Cache<ScramCredential> credentialCache(String mechanism) {
return credentialCache.cache(mechanism, ScramCredential.class);
}
private void updateCredentials(String tokenId, Map<String, ScramCredential> scramCredentialMap) {
for (String mechanism : ScramMechanism.mechanismNames()) {
CredentialCache.Cache<ScramCredential> cache = credentialCache.cache(mechanism, ScramCredential.class);
if (cache != null) {
ScramCredential credential = scramCredentialMap.get(mechanism);
if (credential == null) {
cache.remove(tokenId);
} else {
cache.put(tokenId, credential);
}
}
}
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token/delegation | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/security/token/delegation/internals/DelegationTokenCredentialCallback.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.security.token.delegation.internals;
import org.apache.kafka.common.security.scram.ScramCredentialCallback;
public class DelegationTokenCredentialCallback extends ScramCredentialCallback {
private String tokenOwner;
private Long tokenExpiryTimestamp;
public void tokenOwner(String tokenOwner) {
this.tokenOwner = tokenOwner;
}
public String tokenOwner() {
return tokenOwner;
}
public void tokenExpiryTimestamp(Long tokenExpiryTimestamp) {
this.tokenExpiryTimestamp = tokenExpiryTimestamp;
}
public Long tokenExpiryTimestamp() {
return tokenExpiryTimestamp;
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/BooleanDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
public class BooleanDeserializer implements Deserializer<Boolean> {
private static final byte TRUE = 0x01;
private static final byte FALSE = 0x00;
@Override
public Boolean deserialize(final String topic, final byte[] data) {
if (data == null) {
return null;
}
if (data.length != 1) {
throw new SerializationException("Size of data received by BooleanDeserializer is not 1");
}
if (data[0] == TRUE) {
return true;
} else if (data[0] == FALSE) {
return false;
} else {
throw new SerializationException("Unexpected byte received by BooleanDeserializer: " + data[0]);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/BooleanSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class BooleanSerializer implements Serializer<Boolean> {
private static final byte TRUE = 0x01;
private static final byte FALSE = 0x00;
@Override
public byte[] serialize(final String topic, final Boolean data) {
if (data == null) {
return null;
}
return new byte[] {
data ? TRUE : FALSE
};
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/ByteArrayDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class ByteArrayDeserializer implements Deserializer<byte[]> {
@Override
public byte[] deserialize(String topic, byte[] data) {
return data;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/ByteArraySerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class ByteArraySerializer implements Serializer<byte[]> {
@Override
public byte[] serialize(String topic, byte[] data) {
return data;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/ByteBufferDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import java.nio.ByteBuffer;
public class ByteBufferDeserializer implements Deserializer<ByteBuffer> {
public ByteBuffer deserialize(String topic, byte[] data) {
if (data == null)
return null;
return ByteBuffer.wrap(data);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/ByteBufferSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.utils.Utils;
import java.nio.ByteBuffer;
/**
* Do not need to flip before call <i>serialize(String, ByteBuffer)</i>. For example:
*
* <blockquote>
* <pre>
* ByteBufferSerializer serializer = ...; // Create Serializer
* ByteBuffer buffer = ...; // Allocate ByteBuffer
* buffer.put(data); // Put data into buffer, do not need to flip
* serializer.serialize(topic, buffer); // Serialize buffer
* </pre>
* </blockquote>
*/
public class ByteBufferSerializer implements Serializer<ByteBuffer> {
@Override
public byte[] serialize(String topic, ByteBuffer data) {
if (data == null) {
return null;
}
if (data.hasArray()) {
final byte[] arr = data.array();
if (data.arrayOffset() == 0 && arr.length == data.remaining()) {
return arr;
}
}
data.flip();
return Utils.toArray(data);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/BytesDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.utils.Bytes;
public class BytesDeserializer implements Deserializer<Bytes> {
public Bytes deserialize(String topic, byte[] data) {
if (data == null)
return null;
return new Bytes(data);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/BytesSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.utils.Bytes;
public class BytesSerializer implements Serializer<Bytes> {
public byte[] serialize(String topic, Bytes data) {
if (data == null)
return null;
return data.get();
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/Deserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.header.Headers;
import java.io.Closeable;
import java.util.Map;
/**
* An interface for converting bytes to objects.
*
* A class that implements this interface is expected to have a constructor with no parameters.
* <p>
* Implement {@link org.apache.kafka.common.ClusterResourceListener} to receive cluster metadata once it's available. Please see the class documentation for ClusterResourceListener for more information.
*
* @param <T> Type to be deserialized into.
*/
public interface Deserializer<T> extends Closeable {
/**
* Configure this class.
* @param configs configs in key/value pairs
* @param isKey whether is for key or value
*/
default void configure(Map<String, ?> configs, boolean isKey) {
// intentionally left blank
}
/**
* Deserialize a record value from a byte array into a value or object.
* @param topic topic associated with the data
* @param data serialized bytes; may be null; implementations are recommended to handle null by returning a value or null rather than throwing an exception.
* @return deserialized typed data; may be null
*/
T deserialize(String topic, byte[] data);
/**
* Deserialize a record value from a byte array into a value or object.
* @param topic topic associated with the data
* @param headers headers associated with the record; may be empty.
* @param data serialized bytes; may be null; implementations are recommended to handle null by returning a value or null rather than throwing an exception.
* @return deserialized typed data; may be null
*/
default T deserialize(String topic, Headers headers, byte[] data) {
return deserialize(topic, data);
}
/**
* Close this deserializer.
* <p>
* This method must be idempotent as it may be called multiple times.
*/
@Override
default void close() {
// intentionally left blank
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/DoubleDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
public class DoubleDeserializer implements Deserializer<Double> {
@Override
public Double deserialize(String topic, byte[] data) {
if (data == null)
return null;
if (data.length != 8) {
throw new SerializationException("Size of data received by Deserializer is not 8");
}
long value = 0;
for (byte b : data) {
value <<= 8;
value |= b & 0xFF;
}
return Double.longBitsToDouble(value);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/DoubleSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class DoubleSerializer implements Serializer<Double> {
@Override
public byte[] serialize(String topic, Double data) {
if (data == null)
return null;
long bits = Double.doubleToLongBits(data);
return new byte[] {
(byte) (bits >>> 56),
(byte) (bits >>> 48),
(byte) (bits >>> 40),
(byte) (bits >>> 32),
(byte) (bits >>> 24),
(byte) (bits >>> 16),
(byte) (bits >>> 8),
(byte) bits
};
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/FloatDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
public class FloatDeserializer implements Deserializer<Float> {
@Override
public Float deserialize(final String topic, final byte[] data) {
if (data == null)
return null;
if (data.length != 4) {
throw new SerializationException("Size of data received by Deserializer is not 4");
}
int value = 0;
for (byte b : data) {
value <<= 8;
value |= b & 0xFF;
}
return Float.intBitsToFloat(value);
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/FloatSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class FloatSerializer implements Serializer<Float> {
@Override
public byte[] serialize(final String topic, final Float data) {
if (data == null)
return null;
long bits = Float.floatToRawIntBits(data);
return new byte[] {
(byte) (bits >>> 24),
(byte) (bits >>> 16),
(byte) (bits >>> 8),
(byte) bits
};
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/IntegerDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
public class IntegerDeserializer implements Deserializer<Integer> {
public Integer deserialize(String topic, byte[] data) {
if (data == null)
return null;
if (data.length != 4) {
throw new SerializationException("Size of data received by IntegerDeserializer is not 4");
}
int value = 0;
for (byte b : data) {
value <<= 8;
value |= b & 0xFF;
}
return value;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/IntegerSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class IntegerSerializer implements Serializer<Integer> {
public byte[] serialize(String topic, Integer data) {
if (data == null)
return null;
return new byte[] {
(byte) (data >>> 24),
(byte) (data >>> 16),
(byte) (data >>> 8),
data.byteValue()
};
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/ListDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import static org.apache.kafka.common.serialization.Serdes.ListSerde.SerializationStrategy;
import static org.apache.kafka.common.utils.Utils.mkEntry;
import static org.apache.kafka.common.utils.Utils.mkMap;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.errors.SerializationException;
import org.apache.kafka.common.serialization.Serdes.ListSerde;
import org.apache.kafka.common.utils.Utils;
public class ListDeserializer<Inner> implements Deserializer<List<Inner>> {
final Logger log = LoggerFactory.getLogger(ListDeserializer.class);
private static final Map<Class<? extends Deserializer<?>>, Integer> FIXED_LENGTH_DESERIALIZERS = mkMap(
mkEntry(ShortDeserializer.class, Short.BYTES),
mkEntry(IntegerDeserializer.class, Integer.BYTES),
mkEntry(FloatDeserializer.class, Float.BYTES),
mkEntry(LongDeserializer.class, Long.BYTES),
mkEntry(DoubleDeserializer.class, Double.BYTES),
mkEntry(UUIDDeserializer.class, 36)
);
private Deserializer<Inner> inner;
private Class<?> listClass;
private Integer primitiveSize;
public ListDeserializer() {}
public <L extends List<Inner>> ListDeserializer(Class<L> listClass, Deserializer<Inner> inner) {
if (listClass == null || inner == null) {
log.error("Could not construct ListDeserializer as not all required parameters were present -- listClass: {}, inner: {}", listClass, inner);
throw new IllegalArgumentException("ListDeserializer requires both \"listClass\" and \"innerDeserializer\" parameters to be provided during initialization");
}
this.listClass = listClass;
this.inner = inner;
this.primitiveSize = FIXED_LENGTH_DESERIALIZERS.get(inner.getClass());
}
public Deserializer<Inner> innerDeserializer() {
return inner;
}
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
if (listClass != null || inner != null) {
log.error("Could not configure ListDeserializer as some parameters were already set -- listClass: {}, inner: {}", listClass, inner);
throw new ConfigException("List deserializer was already initialized using a non-default constructor");
}
configureListClass(configs, isKey);
configureInnerSerde(configs, isKey);
}
private void configureListClass(Map<String, ?> configs, boolean isKey) {
String listTypePropertyName = isKey ? CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_TYPE_CLASS : CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_TYPE_CLASS;
final Object listClassOrName = configs.get(listTypePropertyName);
if (listClassOrName == null) {
throw new ConfigException("Not able to determine the list class because it was neither passed via the constructor nor set in the config.");
}
try {
if (listClassOrName instanceof String) {
listClass = Utils.loadClass((String) listClassOrName, Object.class);
} else if (listClassOrName instanceof Class) {
listClass = (Class<?>) listClassOrName;
} else {
throw new KafkaException("Could not determine the list class instance using \"" + listTypePropertyName + "\" property.");
}
} catch (final ClassNotFoundException e) {
throw new ConfigException(listTypePropertyName, listClassOrName, "Deserializer's list class \"" + listClassOrName + "\" could not be found.");
}
}
@SuppressWarnings("unchecked")
private void configureInnerSerde(Map<String, ?> configs, boolean isKey) {
String innerSerdePropertyName = isKey ? CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS : CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_INNER_CLASS;
final Object innerSerdeClassOrName = configs.get(innerSerdePropertyName);
if (innerSerdeClassOrName == null) {
throw new ConfigException("Not able to determine the inner serde class because it was neither passed via the constructor nor set in the config.");
}
try {
if (innerSerdeClassOrName instanceof String) {
inner = Utils.newInstance((String) innerSerdeClassOrName, Serde.class).deserializer();
} else if (innerSerdeClassOrName instanceof Class) {
inner = (Deserializer<Inner>) ((Serde) Utils.newInstance((Class) innerSerdeClassOrName)).deserializer();
} else {
throw new KafkaException("Could not determine the inner serde class instance using \"" + innerSerdePropertyName + "\" property.");
}
inner.configure(configs, isKey);
primitiveSize = FIXED_LENGTH_DESERIALIZERS.get(inner.getClass());
} catch (final ClassNotFoundException e) {
throw new ConfigException(innerSerdePropertyName, innerSerdeClassOrName, "Deserializer's inner serde class \"" + innerSerdeClassOrName + "\" could not be found.");
}
}
@SuppressWarnings("unchecked")
private List<Inner> createListInstance(int listSize) {
try {
Constructor<List<Inner>> listConstructor;
try {
listConstructor = (Constructor<List<Inner>>) listClass.getConstructor(Integer.TYPE);
return listConstructor.newInstance(listSize);
} catch (NoSuchMethodException e) {
listConstructor = (Constructor<List<Inner>>) listClass.getConstructor();
return listConstructor.newInstance();
}
} catch (InstantiationException | IllegalAccessException | NoSuchMethodException |
IllegalArgumentException | InvocationTargetException e) {
log.error("Failed to construct list due to ", e);
throw new KafkaException("Could not construct a list instance of \"" + listClass.getCanonicalName() + "\"", e);
}
}
private SerializationStrategy parseSerializationStrategyFlag(final int serializationStrategyFlag) throws IOException {
if (serializationStrategyFlag < 0 || serializationStrategyFlag >= SerializationStrategy.VALUES.length) {
throw new SerializationException("Invalid serialization strategy flag value");
}
return SerializationStrategy.VALUES[serializationStrategyFlag];
}
private List<Integer> deserializeNullIndexList(final DataInputStream dis) throws IOException {
int nullIndexListSize = dis.readInt();
List<Integer> nullIndexList = new ArrayList<>(nullIndexListSize);
while (nullIndexListSize != 0) {
nullIndexList.add(dis.readInt());
nullIndexListSize--;
}
return nullIndexList;
}
@Override
public List<Inner> deserialize(String topic, byte[] data) {
if (data == null) {
return null;
}
try (final DataInputStream dis = new DataInputStream(new ByteArrayInputStream(data))) {
SerializationStrategy serStrategy = parseSerializationStrategyFlag(dis.readByte());
List<Integer> nullIndexList = null;
if (serStrategy == SerializationStrategy.CONSTANT_SIZE) {
// In CONSTANT_SIZE strategy, indexes of null entries are decoded from a null index list
nullIndexList = deserializeNullIndexList(dis);
}
final int size = dis.readInt();
List<Inner> deserializedList = createListInstance(size);
for (int i = 0; i < size; i++) {
int entrySize = serStrategy == SerializationStrategy.CONSTANT_SIZE ? primitiveSize : dis.readInt();
if (entrySize == ListSerde.NULL_ENTRY_VALUE || (nullIndexList != null && nullIndexList.contains(i))) {
deserializedList.add(null);
continue;
}
byte[] payload = new byte[entrySize];
if (dis.read(payload) == -1) {
log.error("Ran out of bytes in serialized list");
log.trace("Deserialized list so far: {}", deserializedList); // avoid logging actual data above TRACE level since it may contain sensitive information
throw new SerializationException("End of the stream was reached prematurely");
}
deserializedList.add(inner.deserialize(topic, payload));
}
return deserializedList;
} catch (IOException e) {
throw new KafkaException("Unable to deserialize into a List", e);
}
}
@Override
public void close() {
if (inner != null) {
inner.close();
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/ListSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import java.util.ArrayList;
import java.util.Iterator;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.common.KafkaException;
import org.apache.kafka.common.config.ConfigException;
import org.apache.kafka.common.utils.Utils;
import java.io.ByteArrayOutputStream;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Arrays;
import java.util.List;
import java.util.Map;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import static org.apache.kafka.common.serialization.Serdes.ListSerde.SerializationStrategy;
public class ListSerializer<Inner> implements Serializer<List<Inner>> {
final Logger log = LoggerFactory.getLogger(ListSerializer.class);
private static final List<Class<? extends Serializer<?>>> FIXED_LENGTH_SERIALIZERS = Arrays.asList(
ShortSerializer.class,
IntegerSerializer.class,
FloatSerializer.class,
LongSerializer.class,
DoubleSerializer.class,
UUIDSerializer.class);
private Serializer<Inner> inner;
private SerializationStrategy serStrategy;
public ListSerializer() {}
public ListSerializer(Serializer<Inner> inner) {
if (inner == null) {
throw new IllegalArgumentException("ListSerializer requires \"serializer\" parameter to be provided during initialization");
}
this.inner = inner;
this.serStrategy = FIXED_LENGTH_SERIALIZERS.contains(inner.getClass()) ? SerializationStrategy.CONSTANT_SIZE : SerializationStrategy.VARIABLE_SIZE;
}
public Serializer<Inner> getInnerSerializer() {
return inner;
}
@SuppressWarnings("unchecked")
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
if (inner != null) {
log.error("Could not configure ListSerializer as the parameter has already been set -- inner: {}", inner);
throw new ConfigException("List serializer was already initialized using a non-default constructor");
}
final String innerSerdePropertyName = isKey ? CommonClientConfigs.DEFAULT_LIST_KEY_SERDE_INNER_CLASS : CommonClientConfigs.DEFAULT_LIST_VALUE_SERDE_INNER_CLASS;
final Object innerSerdeClassOrName = configs.get(innerSerdePropertyName);
if (innerSerdeClassOrName == null) {
throw new ConfigException("Not able to determine the serializer class because it was neither passed via the constructor nor set in the config.");
}
try {
if (innerSerdeClassOrName instanceof String) {
inner = Utils.newInstance((String) innerSerdeClassOrName, Serde.class).serializer();
} else if (innerSerdeClassOrName instanceof Class) {
inner = (Serializer<Inner>) ((Serde) Utils.newInstance((Class) innerSerdeClassOrName)).serializer();
} else {
throw new KafkaException("Could not create a serializer class instance using \"" + innerSerdePropertyName + "\" property.");
}
inner.configure(configs, isKey);
serStrategy = FIXED_LENGTH_SERIALIZERS.contains(inner.getClass()) ? SerializationStrategy.CONSTANT_SIZE : SerializationStrategy.VARIABLE_SIZE;
} catch (final ClassNotFoundException e) {
throw new ConfigException(innerSerdePropertyName, innerSerdeClassOrName, "Serializer class " + innerSerdeClassOrName + " could not be found.");
}
}
private void serializeNullIndexList(final DataOutputStream out, List<Inner> data) throws IOException {
int i = 0;
List<Integer> nullIndexList = new ArrayList<>();
for (Iterator<Inner> it = data.listIterator(); it.hasNext(); i++) {
if (it.next() == null) {
nullIndexList.add(i);
}
}
out.writeInt(nullIndexList.size());
for (int nullIndex : nullIndexList) {
out.writeInt(nullIndex);
}
}
@Override
public byte[] serialize(String topic, List<Inner> data) {
if (data == null) {
return null;
}
try (final ByteArrayOutputStream baos = new ByteArrayOutputStream();
final DataOutputStream out = new DataOutputStream(baos)) {
out.writeByte(serStrategy.ordinal()); // write serialization strategy flag
if (serStrategy == SerializationStrategy.CONSTANT_SIZE) {
// In CONSTANT_SIZE strategy, indexes of null entries are encoded in a null index list
serializeNullIndexList(out, data);
}
final int size = data.size();
out.writeInt(size);
for (Inner entry : data) {
if (entry == null) {
if (serStrategy == SerializationStrategy.VARIABLE_SIZE) {
out.writeInt(Serdes.ListSerde.NULL_ENTRY_VALUE);
}
} else {
final byte[] bytes = inner.serialize(topic, entry);
if (serStrategy == SerializationStrategy.VARIABLE_SIZE) {
out.writeInt(bytes.length);
}
out.write(bytes);
}
}
return baos.toByteArray();
} catch (IOException e) {
log.error("Failed to serialize list due to", e);
log.trace("List that could not be serialized: {}", data); // avoid logging actual data above TRACE level since it may contain sensitive information
throw new KafkaException("Failed to serialize List", e);
}
}
@Override
public void close() {
if (inner != null) {
inner.close();
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/LongDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
public class LongDeserializer implements Deserializer<Long> {
public Long deserialize(String topic, byte[] data) {
if (data == null)
return null;
if (data.length != 8) {
throw new SerializationException("Size of data received by LongDeserializer is not 8");
}
long value = 0;
for (byte b : data) {
value <<= 8;
value |= b & 0xFF;
}
return value;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/LongSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class LongSerializer implements Serializer<Long> {
public byte[] serialize(String topic, Long data) {
if (data == null)
return null;
return new byte[] {
(byte) (data >>> 56),
(byte) (data >>> 48),
(byte) (data >>> 40),
(byte) (data >>> 32),
(byte) (data >>> 24),
(byte) (data >>> 16),
(byte) (data >>> 8),
data.byteValue()
};
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/Serde.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import java.io.Closeable;
import java.util.Map;
/**
* The interface for wrapping a serializer and deserializer for the given data type.
*
* @param <T> Type to be serialized from and deserialized into.
*
* A class that implements this interface is expected to have a constructor with no parameter.
*/
public interface Serde<T> extends Closeable {
/**
* Configure this class, which will configure the underlying serializer and deserializer.
*
* @param configs configs in key/value pairs
* @param isKey whether is for key or value
*/
default void configure(Map<String, ?> configs, boolean isKey) {
// intentionally left blank
}
/**
* Close this serde class, which will close the underlying serializer and deserializer.
* <p>
* This method has to be idempotent because it might be called multiple times.
*/
@Override
default void close() {
// intentionally left blank
}
Serializer<T> serializer();
Deserializer<T> deserializer();
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/Serdes.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.utils.Bytes;
import java.nio.ByteBuffer;
import java.util.List;
import java.util.Map;
import java.util.UUID;
/**
* Factory for creating serializers / deserializers.
*/
public class Serdes {
static public class WrapperSerde<T> implements Serde<T> {
final private Serializer<T> serializer;
final private Deserializer<T> deserializer;
public WrapperSerde(Serializer<T> serializer, Deserializer<T> deserializer) {
this.serializer = serializer;
this.deserializer = deserializer;
}
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
serializer.configure(configs, isKey);
deserializer.configure(configs, isKey);
}
@Override
public void close() {
serializer.close();
deserializer.close();
}
@Override
public Serializer<T> serializer() {
return serializer;
}
@Override
public Deserializer<T> deserializer() {
return deserializer;
}
}
static public final class VoidSerde extends WrapperSerde<Void> {
public VoidSerde() {
super(new VoidSerializer(), new VoidDeserializer());
}
}
static public final class LongSerde extends WrapperSerde<Long> {
public LongSerde() {
super(new LongSerializer(), new LongDeserializer());
}
}
static public final class IntegerSerde extends WrapperSerde<Integer> {
public IntegerSerde() {
super(new IntegerSerializer(), new IntegerDeserializer());
}
}
static public final class ShortSerde extends WrapperSerde<Short> {
public ShortSerde() {
super(new ShortSerializer(), new ShortDeserializer());
}
}
static public final class FloatSerde extends WrapperSerde<Float> {
public FloatSerde() {
super(new FloatSerializer(), new FloatDeserializer());
}
}
static public final class DoubleSerde extends WrapperSerde<Double> {
public DoubleSerde() {
super(new DoubleSerializer(), new DoubleDeserializer());
}
}
static public final class StringSerde extends WrapperSerde<String> {
public StringSerde() {
super(new StringSerializer(), new StringDeserializer());
}
}
static public final class ByteBufferSerde extends WrapperSerde<ByteBuffer> {
public ByteBufferSerde() {
super(new ByteBufferSerializer(), new ByteBufferDeserializer());
}
}
static public final class BytesSerde extends WrapperSerde<Bytes> {
public BytesSerde() {
super(new BytesSerializer(), new BytesDeserializer());
}
}
static public final class ByteArraySerde extends WrapperSerde<byte[]> {
public ByteArraySerde() {
super(new ByteArraySerializer(), new ByteArrayDeserializer());
}
}
static public final class UUIDSerde extends WrapperSerde<UUID> {
public UUIDSerde() {
super(new UUIDSerializer(), new UUIDDeserializer());
}
}
static public final class BooleanSerde extends WrapperSerde<Boolean> {
public BooleanSerde() {
super(new BooleanSerializer(), new BooleanDeserializer());
}
}
static public final class ListSerde<Inner> extends WrapperSerde<List<Inner>> {
final static int NULL_ENTRY_VALUE = -1;
enum SerializationStrategy {
CONSTANT_SIZE,
VARIABLE_SIZE;
public static final SerializationStrategy[] VALUES = SerializationStrategy.values();
}
public ListSerde() {
super(new ListSerializer<>(), new ListDeserializer<>());
}
public <L extends List<Inner>> ListSerde(Class<L> listClass, Serde<Inner> serde) {
super(new ListSerializer<>(serde.serializer()), new ListDeserializer<>(listClass, serde.deserializer()));
}
}
@SuppressWarnings("unchecked")
static public <T> Serde<T> serdeFrom(Class<T> type) {
if (String.class.isAssignableFrom(type)) {
return (Serde<T>) String();
}
if (Short.class.isAssignableFrom(type)) {
return (Serde<T>) Short();
}
if (Integer.class.isAssignableFrom(type)) {
return (Serde<T>) Integer();
}
if (Long.class.isAssignableFrom(type)) {
return (Serde<T>) Long();
}
if (Float.class.isAssignableFrom(type)) {
return (Serde<T>) Float();
}
if (Double.class.isAssignableFrom(type)) {
return (Serde<T>) Double();
}
if (byte[].class.isAssignableFrom(type)) {
return (Serde<T>) ByteArray();
}
if (ByteBuffer.class.isAssignableFrom(type)) {
return (Serde<T>) ByteBuffer();
}
if (Bytes.class.isAssignableFrom(type)) {
return (Serde<T>) Bytes();
}
if (UUID.class.isAssignableFrom(type)) {
return (Serde<T>) UUID();
}
if (Boolean.class.isAssignableFrom(type)) {
return (Serde<T>) Boolean();
}
// TODO: we can also serializes objects of type T using generic Java serialization by default
throw new IllegalArgumentException("Unknown class for built-in serializer. Supported types are: " +
"String, Short, Integer, Long, Float, Double, ByteArray, ByteBuffer, Bytes, UUID, Boolean");
}
/**
* Construct a serde object from separate serializer and deserializer
*
* @param serializer must not be null.
* @param deserializer must not be null.
*/
static public <T> Serde<T> serdeFrom(final Serializer<T> serializer, final Deserializer<T> deserializer) {
if (serializer == null) {
throw new IllegalArgumentException("serializer must not be null");
}
if (deserializer == null) {
throw new IllegalArgumentException("deserializer must not be null");
}
return new WrapperSerde<>(serializer, deserializer);
}
/**
* A serde for nullable {@code Long} type.
*/
static public Serde<Long> Long() {
return new LongSerde();
}
/**
* A serde for nullable {@code Integer} type.
*/
static public Serde<Integer> Integer() {
return new IntegerSerde();
}
/**
* A serde for nullable {@code Short} type.
*/
static public Serde<Short> Short() {
return new ShortSerde();
}
/**
* A serde for nullable {@code Float} type.
*/
static public Serde<Float> Float() {
return new FloatSerde();
}
/**
* A serde for nullable {@code Double} type.
*/
static public Serde<Double> Double() {
return new DoubleSerde();
}
/**
* A serde for nullable {@code String} type.
*/
static public Serde<String> String() {
return new StringSerde();
}
/**
* A serde for nullable {@code ByteBuffer} type.
*/
static public Serde<ByteBuffer> ByteBuffer() {
return new ByteBufferSerde();
}
/**
* A serde for nullable {@code Bytes} type.
*/
static public Serde<Bytes> Bytes() {
return new BytesSerde();
}
/**
* A serde for nullable {@code UUID} type
*/
static public Serde<UUID> UUID() {
return new UUIDSerde();
}
/**
* A serde for nullable {@code Boolean} type.
*/
static public Serde<Boolean> Boolean() {
return new BooleanSerde();
}
/**
* A serde for nullable {@code byte[]} type.
*/
static public Serde<byte[]> ByteArray() {
return new ByteArraySerde();
}
/**
* A serde for {@code Void} type.
*/
static public Serde<Void> Void() {
return new VoidSerde();
}
/*
* A serde for {@code List} type
*/
static public <L extends List<Inner>, Inner> Serde<List<Inner>> ListSerde(Class<L> listClass, Serde<Inner> innerSerde) {
return new ListSerde<>(listClass, innerSerde);
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/Serializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.header.Headers;
import java.io.Closeable;
import java.util.Map;
/**
* An interface for converting objects to bytes.
*
* A class that implements this interface is expected to have a constructor with no parameter.
* <p>
* Implement {@link org.apache.kafka.common.ClusterResourceListener} to receive cluster metadata once it's available. Please see the class documentation for ClusterResourceListener for more information.
*
* @param <T> Type to be serialized from.
*/
public interface Serializer<T> extends Closeable {
/**
* Configure this class.
* @param configs configs in key/value pairs
* @param isKey whether is for key or value
*/
default void configure(Map<String, ?> configs, boolean isKey) {
// intentionally left blank
}
/**
* Convert {@code data} into a byte array.
*
* @param topic topic associated with data
* @param data typed data
* @return serialized bytes
*/
byte[] serialize(String topic, T data);
/**
* Convert {@code data} into a byte array.
*
* @param topic topic associated with data
* @param headers headers associated with the record
* @param data typed data
* @return serialized bytes
*/
default byte[] serialize(String topic, Headers headers, T data) {
return serialize(topic, data);
}
/**
* Close this serializer.
* <p>
* This method must be idempotent as it may be called multiple times.
*/
@Override
default void close() {
// intentionally left blank
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/ShortDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
public class ShortDeserializer implements Deserializer<Short> {
public Short deserialize(String topic, byte[] data) {
if (data == null)
return null;
if (data.length != 2) {
throw new SerializationException("Size of data received by ShortDeserializer is not 2");
}
short value = 0;
for (byte b : data) {
value <<= 8;
value |= b & 0xFF;
}
return value;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/ShortSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class ShortSerializer implements Serializer<Short> {
public byte[] serialize(String topic, Short data) {
if (data == null)
return null;
return new byte[] {
(byte) (data >>> 8),
data.byteValue()
};
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/StringDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
/**
* String encoding defaults to UTF8 and can be customized by setting the property key.deserializer.encoding,
* value.deserializer.encoding or deserializer.encoding. The first two take precedence over the last.
*/
public class StringDeserializer implements Deserializer<String> {
private String encoding = StandardCharsets.UTF_8.name();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
String propertyName = isKey ? "key.deserializer.encoding" : "value.deserializer.encoding";
Object encodingValue = configs.get(propertyName);
if (encodingValue == null)
encodingValue = configs.get("deserializer.encoding");
if (encodingValue instanceof String)
encoding = (String) encodingValue;
}
@Override
public String deserialize(String topic, byte[] data) {
try {
if (data == null)
return null;
else
return new String(data, encoding);
} catch (UnsupportedEncodingException e) {
throw new SerializationException("Error when deserializing byte[] to string due to unsupported encoding " + encoding);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/StringSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
/**
* String encoding defaults to UTF8 and can be customized by setting the property key.serializer.encoding,
* value.serializer.encoding or serializer.encoding. The first two take precedence over the last.
*/
public class StringSerializer implements Serializer<String> {
private String encoding = StandardCharsets.UTF_8.name();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
String propertyName = isKey ? "key.serializer.encoding" : "value.serializer.encoding";
Object encodingValue = configs.get(propertyName);
if (encodingValue == null)
encodingValue = configs.get("serializer.encoding");
if (encodingValue instanceof String)
encoding = (String) encodingValue;
}
@Override
public byte[] serialize(String topic, String data) {
try {
if (data == null)
return null;
else
return data.getBytes(encoding);
} catch (UnsupportedEncodingException e) {
throw new SerializationException("Error when serializing string to byte[] due to unsupported encoding " + encoding);
}
}
} |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/UUIDDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.UUID;
/**
* We are converting the byte array to String before deserializing to UUID. String encoding defaults to UTF8 and can be customized by setting
* the property key.deserializer.encoding, value.deserializer.encoding or deserializer.encoding. The first two take precedence over the last.
*/
public class UUIDDeserializer implements Deserializer<UUID> {
private String encoding = StandardCharsets.UTF_8.name();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
String propertyName = isKey ? "key.deserializer.encoding" : "value.deserializer.encoding";
Object encodingValue = configs.get(propertyName);
if (encodingValue == null)
encodingValue = configs.get("deserializer.encoding");
if (encodingValue instanceof String)
encoding = (String) encodingValue;
}
@Override
public UUID deserialize(String topic, byte[] data) {
try {
if (data == null)
return null;
else
return UUID.fromString(new String(data, encoding));
} catch (UnsupportedEncodingException e) {
throw new SerializationException("Error when deserializing byte[] to UUID due to unsupported encoding " + encoding, e);
} catch (IllegalArgumentException e) {
throw new SerializationException("Error parsing data into UUID", e);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/UUIDSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
import org.apache.kafka.common.errors.SerializationException;
import java.io.UnsupportedEncodingException;
import java.nio.charset.StandardCharsets;
import java.util.Map;
import java.util.UUID;
/**
* We are converting UUID to String before serializing. String encoding defaults to UTF8 and can be customized by setting
* the property key.deserializer.encoding, value.deserializer.encoding or deserializer.encoding. The first two take precedence over the last.
*/
public class UUIDSerializer implements Serializer<UUID> {
private String encoding = StandardCharsets.UTF_8.name();
@Override
public void configure(Map<String, ?> configs, boolean isKey) {
String propertyName = isKey ? "key.serializer.encoding" : "value.serializer.encoding";
Object encodingValue = configs.get(propertyName);
if (encodingValue == null)
encodingValue = configs.get("serializer.encoding");
if (encodingValue instanceof String)
encoding = (String) encodingValue;
}
@Override
public byte[] serialize(String topic, UUID data) {
try {
if (data == null)
return null;
else
return data.toString().getBytes(encoding);
} catch (UnsupportedEncodingException e) {
throw new SerializationException("Error when serializing UUID to byte[] due to unsupported encoding " + encoding);
}
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/VoidDeserializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class VoidDeserializer implements Deserializer<Void> {
@Override
public Void deserialize(String topic, byte[] data) {
if (data != null)
throw new IllegalArgumentException("Data should be null for a VoidDeserializer.");
return null;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/VoidSerializer.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.kafka.common.serialization;
public class VoidSerializer implements Serializer<Void> {
@Override
public byte[] serialize(String topic, Void data) {
return null;
}
}
|
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/serialization/package-info.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Provides interface and some implementations of serialization/deserialization routines for various objects.
*/
package org.apache.kafka.common.serialization; |
0 | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common | java-sources/ai/superstream/kafka-clients/3.5.118/org/apache/kafka/common/superstream/Consts.java | package org.apache.kafka.common.superstream;
import java.util.ArrayList;
import java.util.List;
public class Consts {
public static final String sdkVersion = "3.5.118";
public static final String clientReconnectionUpdateSubject = "internal_tasks.clientReconnectionUpdate";
public static final String clientTypeUpdateSubject = "internal.clientTypeUpdate";
public static final String clientConfigUpdateSubject = "internal.clientConfigUpdate";
public static final String clientRegisterSubject = "internal.registerClient";
public static final String originalSerializer = "original.serializer";
public static final String originalDeserializer = "original.deserializer";
public static final String superstreamDefaultToken = "no-auth";
public static final String superstreamErrorSubject = "internal.clientErrors";
public static final String superstreamUpdatesSubject = "internal.updates.%s";
public static final String superstreamClientsUpdateSubject = "internal_tasks.clientsUpdate.%s.%s";
public static final String superstreamLearningSubject = "internal.schema.learnSchema.%s";
public static final String superstreamRegisterSchemaSubject = "internal_tasks.schema.registerSchema.%s";
public static final String superstreamInternalUsername = "superstream_internal";
public static final String superstreamGetSchemaSubject = "internal.schema.getSchema.%s";
public static final Integer superstreamDefaultLearningFactor = 20;
public static final String superstreamLearningFactorKey = "superstream.learning.factor";
public static final String superstreamTagsKey = "superstream.tags";
public static final String superstreamHostKey = "superstream.host";
public static final String superstreamTokenKey = "superstream.token";
public static final String superstreamReductionEnabledKey = "superstream.reduction.enabled";
public static final String superstreamCompressionEnabledKey = "superstream.compression.enabled";
public static final String superstreamConnectionKey = "superstream.connection";
public static final String superstreamInnerConsumerKey = "superstream.inner.consumer";
public static final String superstreamMetadataTopic = "superstream.metadata";
public static final String clientStartSubject = "internal.startClient.%s";
public static final String PRODUCER = "producer";
public static final String CONSUMER = "consumer";
public static final String ADMIN = "admin";
public static final String[] CLIENT_TYPES_LIST = {PRODUCER, CONSUMER, ADMIN};
public static final String OPTIMIZED_CONFIGURATION_KEY = "optimized_configuration";
public static final String START_KEY = "start";
public static final String ERROR_KEY = "error";
public static final long MAX_TIME_WAIT_CAN_START = 10 * 60 * 1000;
public static final long WAIT_INTERVAL_CAN_START = 3000;
public static final long WAIT_INTERVAL_SUPERSTREAM_CONFIG = 30;
public static final long TIMEOUT_SUPERSTREAM_CONFIG_DEFAULT = 3000;
public static final String SUPERSTREAM_RESPONSE_TIMEOUT_ENV_VAR = "SUPERSTREAM_RESPONSE_TIMEOUT";
public static final String SUPERSTREAM_DEBUG_ENV_VAR_ENV_VAR = "SUPERSTREAM_DEBUG";
public static final String SUPERSTREAM_REDUCTION_ENABLED_ENV_VAR = "SUPERSTREAM_REDUCTION_ENABLED";
public static final String SUPERSTREAM_TAGS_ENV_VAR = "SUPERSTREAM_TAGS";
public static final String SUPERSTREAM_LEARNING_FACTOR_ENV_VAR = "SUPERSTREAM_LEARNING_FACTOR";
public static final String SUPERSTREAM_TOKEN_ENV_VAR = "SUPERSTREAM_TOKEN";
public static final String SUPERSTREAM_HOST_ENV_VAR = "SUPERSTREAM_HOST";
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.