code
stringlengths 73
34.1k
| label
stringclasses 1
value |
|---|---|
void setSICoreConnection(final SICoreConnection connection) {
if (TRACE.isEntryEnabled()) {
SibTr.entry(this, TRACE, "setSICoreConnection", connection);
}
_coreConnection = connection;
if (TRACE.isEntryEnabled()) {
SibTr.exit(this, TRACE, "setSICoreConnection"); //412795
}
}
|
java
|
@Override
@Trivial
public ZipFile open() throws IOException {
String methodName = "open";
synchronized( zipFileLock ) {
if ( zipFile == null ) {
debug(methodName, "Opening");
if ( zipFileReaper == null ) {
zipFile = ZipFileUtils.openZipFile(file); // throws IOException
} else {
zipFile = zipFileReaper.open(path);
}
}
openCount++;
debug(methodName, "Opened");
return zipFile;
}
}
|
java
|
@Override
@Trivial
public InputStream getInputStream(ZipFile useZipFile, ZipEntry zipEntry) throws IOException {
String methodName = "getInputStream";
String entryName = zipEntry.getName();
if ( zipEntry.isDirectory() ) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled() ) {
debug(methodName, "Entry [ " + entryName + " ] [ null ] (Not using cache: Directory entry)");
}
return null;
}
long entrySize = zipEntry.getSize();
if ( entrySize == 0 ) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled() ) {
debug(methodName, "Entry [ " + entryName + " ] [ empty stream ] (Not using cache: Empty entry)");
}
return EMPTY_STREAM;
}
boolean doNotCache;
String doNotCacheReason;
if ( zipEntries == null ) { // No entry cache.
doNotCache = true;
doNotCacheReason = "Do not cache: Entry cache disabled";
} else if ( entrySize > ZipCachingProperties.ZIP_CACHE_ENTRY_LIMIT) { // Too big for the cache
doNotCache = true;
doNotCacheReason = "Do not cache: Too big";
} else if ( entryName.equals("META-INF/MANIFEST.MF") ) {
doNotCache = false;
doNotCacheReason = "Cache META-INF/MANIFEST.MF";
} else if ( entryName.endsWith(".class") ) {
doNotCache = false;
doNotCacheReason = "Cache .class resources";
} else {
doNotCache = true;
doNotCacheReason = "Do not cache: Not manifest or class resource";
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled() ) {
debug(methodName, "Entry [ " + entryName + " ] [ non-null ] [ " + doNotCacheReason + " ]");
}
if ( doNotCache ) {
return useZipFile.getInputStream(zipEntry); // throws IOException
}
// The addition of ":::" *seems* to allow for non-unique cache keys. Duplicate
// keys *are not* possible because the CRC and last-modified values are numeric.
// Duplicate keys would be possible of the CRC or last-modified values, when
// converted to strings, could contain ":::" character sequences.
String entryCacheKey =
entryName +
":::" + Long.toString( zipEntry.getCrc() ) +
":::" + Long.toString( getLastModified() );
// Note that only the individual gets and puts are protected.
//
// That means that simultaneous get misses are possible, which
// will result in double reads and double puts.
//
// That is unfortunate, but is harmless.
//
// The simultaneous puts are allowed because they should be very
// rare.
//
// They are allowed because blocking entry gets while waiting for
// reads could create large delays.
byte[] entryBytes;
synchronized( zipEntriesLock ) {
entryBytes = zipEntries.get(entryCacheKey);
}
if ( entryBytes == null ) {
InputStream inputStream = useZipFile.getInputStream(zipEntry); // throws IOException
try {
entryBytes = read(inputStream, (int) entrySize, entryName); // throws IOException
} finally {
inputStream.close(); // throws IOException
}
synchronized( zipEntriesLock ) {
zipEntries.put(entryCacheKey, entryBytes);
}
}
return new ByteArrayInputStream(entryBytes);
}
|
java
|
@Trivial
private static byte[] read(InputStream inputStream, int expectedRead, String name) throws IOException {
byte[] bytes = new byte[expectedRead];
int remainingRead = expectedRead;
int totalRead = 0;
while ( remainingRead > 0 ) {
int nextRead = inputStream.read(bytes, totalRead, remainingRead); // throws IOException
if ( nextRead <= 0 ) {
// 'nextRead == 0' should only ever happen if 'remainingRead == 0', which ought
// never be the case here. Treat a '0' return value as an error.
//
// 'nextRead == -1' means the end of input was reached.
throw new IOException(
"Read only [ " + Integer.valueOf(totalRead) + " ]" +
" of expected [ " + Integer.valueOf(expectedRead) + " ] bytes" +
" from [ " + name + " ]");
} else {
remainingRead -= nextRead;
totalRead += nextRead;
}
}
return bytes;
}
|
java
|
private String findVersion() {
WlpInformation wlp = _asset.getWlpInformation();
if (wlp == null) {
return null;
}
Collection<AppliesToFilterInfo> filterInfo = wlp.getAppliesToFilterInfo();
if (filterInfo == null) {
return null;
}
for (AppliesToFilterInfo filter : filterInfo) {
if (filter.getMinVersion() != null) {
return filter.getMinVersion().getValue();
}
}
return null;
}
|
java
|
private void addVersionDisplayString() {
WlpInformation wlp = _asset.getWlpInformation();
JavaSEVersionRequirements reqs = wlp.getJavaSEVersionRequirements();
if (reqs == null) {
return;
}
String minVersion = reqs.getMinVersion();
// Null means no requirements specified which is fine
if (minVersion == null) {
return;
}
String minJava11 = "Java SE 11";
String minJava8 = "Java SE 8, Java SE 11";
String minJava7 = "Java SE 7, Java SE 8, Java SE 11";
String minJava6 = "Java SE 6, Java SE 7, Java SE 8, Java SE 11";
// TODO: Temporary special case for jdbc-4.3 (the first feature to require Java >8)
// Once all builds are upgrade to Java 11+, we can remove this workaround
if ("jdbc-4.3".equals(wlp.getLowerCaseShortName())) {
reqs.setVersionDisplayString(minJava11);
return;
}
// The min version should have been validated when the ESA was constructed
// so checking for the version string should be safe
if (minVersion.equals("1.6.0")) {
reqs.setVersionDisplayString(minJava6);
return;
}
if (minVersion.equals("1.7.0")) {
reqs.setVersionDisplayString(minJava7);
return;
}
if (minVersion.equals("1.8.0")) {
reqs.setVersionDisplayString(minJava8);
return;
}
if (minVersion.startsWith("9.") ||
minVersion.startsWith("10.") ||
minVersion.startsWith("11.")) {
// If a feature requires a min of Java 9/10/11, state Java 11 is required because
// Liberty does not officially support Java 9 or 10
reqs.setVersionDisplayString(minJava11);
return;
}
// The min version string has been generated/validated incorrectly
// Can't recover from this, it is a bug in EsaUploader
throw new AssertionError("Unrecognized java version: " + minVersion);
}
|
java
|
private void removeRequireFeatureWithToleratesIfExists(String feature) {
Collection<RequireFeatureWithTolerates> rfwt = _asset.getWlpInformation().getRequireFeatureWithTolerates();
if (rfwt != null) {
for (RequireFeatureWithTolerates toCheck : rfwt) {
if (toCheck.getFeature().equals(feature)) {
rfwt.remove(toCheck);
return;
}
}
}
}
|
java
|
private void copyRequireFeatureToRequireFeatureWithTolerates() {
Collection<RequireFeatureWithTolerates> rfwt = _asset.getWlpInformation().getRequireFeatureWithTolerates();
if (rfwt != null) {
// Both fields (with and without tolerates) should exist, as
// rfwt should not be created unless the other field is created first.
// No need to copy, as the two fields should always be in sync
return;
}
Collection<String> requireFeature = _asset.getWlpInformation().getRequireFeature();
if (requireFeature == null) {
// Neither field exists, no need to copy
return;
}
// We have the requireFeature field but not rfwt, so copy info into
// the new field (rfwt).
Collection<RequireFeatureWithTolerates> newOne = new HashSet<RequireFeatureWithTolerates>();
for (String feature : requireFeature) {
RequireFeatureWithTolerates newFeature = new RequireFeatureWithTolerates();
newFeature.setFeature(feature);
newFeature.setTolerates(Collections.<String> emptyList());
newOne.add(newFeature);
}
_asset.getWlpInformation().setRequireFeatureWithTolerates(newOne);
}
|
java
|
public static boolean isClassVetoed(Class<?> type) {
if (type.isAnnotationPresent(Vetoed.class)) {
return true;
}
return isPackageVetoed(type.getPackage());
}
|
java
|
private Map<String, String> populateCommonAuthzHeaderParams() {
Map<String, String> parameters = new HashMap<String, String>();
parameters.put(TwitterConstants.PARAM_OAUTH_CONSUMER_KEY, consumerKey);
parameters.put(TwitterConstants.PARAM_OAUTH_NONCE, Utils.generateNonce());
parameters.put(TwitterConstants.PARAM_OAUTH_SIGNATURE_METHOD, DEFAULT_SIGNATURE_ALGORITHM);
parameters.put(TwitterConstants.PARAM_OAUTH_TIMESTAMP, Utils.getCurrentTimestamp());
parameters.put(TwitterConstants.PARAM_OAUTH_VERSION, DEFAULT_OAUTH_VERSION);
return parameters;
}
|
java
|
private String signAndCreateAuthzHeader(String endpointUrl, Map<String, String> parameters) {
String signature = computeSignature(requestMethod, endpointUrl, parameters);
parameters.put(TwitterConstants.PARAM_OAUTH_SIGNATURE, signature);
String authzHeaderString = createAuthorizationHeaderString(parameters);
return authzHeaderString;
}
|
java
|
public Map<String, Object> populateJsonResponse(String responseBody) throws JoseException {
if (responseBody == null || responseBody.isEmpty()) {
return null;
}
return JsonUtil.parseJson(responseBody);
}
|
java
|
@FFDCIgnore(SocialLoginException.class)
@Sensitive
public Map<String, Object> executeRequest(SocialLoginConfig config, String requestMethod, String authzHeaderString, String url, String endpointType, String verifierValue) {
if (endpointType == null) {
endpointType = TwitterConstants.TWITTER_ENDPOINT_REQUEST_TOKEN;
if (tc.isDebugEnabled()) {
Tr.debug(tc, "A Twitter endpoint path was not found; defaulting to using " + endpointType + " as the Twitter endpoint path.");
}
}
try {
SocialUtil.validateEndpointWithQuery(url);
} catch (SocialLoginException e) {
return createErrorResponse(e);
}
StringBuilder uri = new StringBuilder(url);
if (endpointType.equals(TwitterConstants.TWITTER_ENDPOINT_VERIFY_CREDENTIALS)) {
// Include the include_email and skip_status parameters for these endpoint requests
uri.append("?").append(TwitterConstants.PARAM_INCLUDE_EMAIL).append("=").append(TwitterConstants.INCLUDE_EMAIL).append("&").append(TwitterConstants.PARAM_SKIP_STATUS).append("=").append(TwitterConstants.SKIP_STATUS);
}
try {
Map<String, Object> result = getEndpointResponse(config, uri.toString(), requestMethod, authzHeaderString, endpointType, verifierValue);
String responseContent = httpUtil.extractTokensFromResponse(result);
return evaluateRequestResponse(responseContent, endpointType);
} catch (SocialLoginException e) {
return createErrorResponse("TWITTER_EXCEPTION_EXECUTING_REQUEST", new Object[] { url, e.getLocalizedMessage() });
}
}
|
java
|
@FFDCIgnore(IllegalStateException.class)
private void updateMonitorService() {
if ( !coveringPaths.isEmpty() ) {
if ( service == null ) {
try {
// If we are shutting down, we want to generate the exception quickly.
BundleContext bundleContext = getContainerFactoryHolder().getBundleContext();
// throws 'IllegalStateException'
setServiceProperties();
service = bundleContext.registerService(FileMonitor.class, this, serviceProperties);
// See comments on 'loadZipEntries' for why the entries must be loaded now.
loadZipEntries();
} catch ( IllegalStateException e ) {
// Ignore; the framework is shutting down.
}
} else {
// Do nothing: There is already a service registration.
}
} else {
if ( service != null ) {
try {
service.unregister();
} catch ( IllegalStateException e ) {
// Ignore; framework is shutting down.
}
service = null;
} else {
// Do nothing: There is already no service registration.
}
}
}
|
java
|
private void updateEnclosingMonitor() {
if ( !coveringPaths.isEmpty() ) {
if ( !listenerRegistered ) {
// This container is not yet registered to the enclosing container.
// Register this container.
ArtifactContainer enclosingRootContainer = entryInEnclosingContainer.getRoot();
// The path to register is the path of the enclosing entry.
ArtifactNotification enclosingNotification = new DefaultArtifactNotification(
enclosingRootContainer,
Collections.singleton( entryInEnclosingContainer.getPath() ) );
ArtifactNotifier enclosingRootNotifier = enclosingRootContainer.getArtifactNotifier();
// The enclosing container generally will accept the registration
// request. Just in case it doesn't, set the registration flag
// based on the registration result.
listenerRegistered = enclosingRootNotifier.registerForNotifications(enclosingNotification, this);
// The result is that any change to the enclosing container reaches
// this notifier through 'notifyEntryChange'.
// See comments on 'loadZipEntries' for why the entries must be loaded now.
loadZipEntries();
} else {
// Do nothing: The enclosing entry was already registered
// to the enclosing notifier.
}
} else {
if ( listenerRegistered ) {
// There are no listener registrations active on this container.
// Remove the registration of this listener.
//
// This listener should be registered exactly once to the enclosing
// container: Removing all registrations of this listener should remove
// that one registration, with no addition, unwanted registration changes.
ArtifactContainer enclosingRootContainer = entryInEnclosingContainer.getRoot();
ArtifactNotifier enclosingNotifier = enclosingRootContainer.getArtifactNotifier();
enclosingNotifier.removeListener(this);
} else {
// Do nothing: The enclosing monitor already did not have a registration
// for this container.
}
}
}
|
java
|
private boolean registerListener(String newPath, ArtifactListenerSelector newListener) {
boolean updatedCoveringPaths = addCoveringPath(newPath);
Collection<ArtifactListenerSelector> listenersForPath = listeners.get(newPath);
if ( listenersForPath == null ) {
// Each listeners collection is expected to be small.
listenersForPath = new LinkedList<ArtifactListenerSelector>();
listeners.put(newPath, listenersForPath);
}
listenersForPath.add(newListener);
return ( updatedCoveringPaths );
}
|
java
|
private boolean addCoveringPath(String newPath) {
int newLen = newPath.length();
Iterator<String> useCoveringPaths = coveringPaths.iterator();
boolean isCovered = false;
boolean isCovering = false;
while ( !isCovered && useCoveringPaths.hasNext() ) {
String coveringPath = useCoveringPaths.next();
int coveringLen = coveringPath.length();
if ( coveringLen < newLen ) {
if ( isCovering ) {
continue; // Can't be covered.
} else {
if ( newPath.regionMatches(0, coveringPath, 0, coveringLen) ) {
if ( newPath.charAt(coveringLen) == '/' ) {
isCovered = true; // Covered: "covering/child" vs "covering"
break; // No need to continue: Can't be any additional relationships to find.
} else {
continue; // Dissimilar: "coveringX" vs "covering"
}
} else {
continue; // Dissimilar: "coverXngX" vs "covering"
}
}
} else if ( coveringLen == newLen ) {
if ( isCovering ) {
continue; // Can't be covered
} else {
if ( newPath.regionMatches(0, coveringPath, 0, coveringLen) ) {
isCovered = true; // Covered: "covering" vs "covering"
break; // No need to continue: Can't be any additional relationships to find.
} else {
continue; // "covering" vs "coverXng"
}
}
} else { // coveringLen > newLen
if ( newPath.regionMatches(0, coveringPath, 0, newLen) ) {
if ( coveringPath.charAt(newLen) == '/' ) {
isCovering = true;
useCoveringPaths.remove(); // Covering: "covering" vs "covering/child"
continue; // Look for other independent children: "covering/child1" and "covering/child2"
} else {
continue; // Dissimilar: "covering" vs "coveringX"
}
} else {
continue; // Dissimilar: "covering" vs "coverXngX"
}
}
}
if ( !isCovered ) {
coveringPaths.add(newPath);
}
return !isCovered;
}
|
java
|
@Trivial
private String validateNotification(Collection<?> added, Collection<?> removed, Collection<?> updated) {
boolean isAddition = !added.isEmpty();
boolean isRemoval = !removed.isEmpty();
boolean isUpdate = !updated.isEmpty();
if ( !isAddition && !isRemoval && !isUpdate ) {
// Should never occur:
// Completely null changes are detected and cause an early return
// before reaching the validation method.
return "null";
} else if ( isAddition ) {
return "Addition of [ " + added.toString() + " ]";
} else if ( isUpdate && isRemoval ) {
return "Update of [ " + updated.toString() + " ]" +
" with removal of [ " + removed.toString() + " ]";
} else {
return null;
}
}
|
java
|
private void notifyAllListeners(boolean isUpdate, String filter) {
// Can't reuse the registered paths collection across the loop
// because the listener notification can do processing in a new
// thread. Reusing the registered paths could cause a collision
// between the listener thread with this notification processing
// thread.
// TODO: Should the notification step be separated from the listener detection step?
// That might create large data structures, but would prevent the listeners
// from being locked during a possibly extensive steps of the listeners handling
// their notifications.
// TODO: See the comment, below. The notification step has been separated from the
// listener collection step.
List<QueuedNotification> notifications = null;
synchronized( listenersLock ) {
for ( Map.Entry<String, Collection<ArtifactListenerSelector>> listenersEntry : listeners.entrySet() ) {
List<String> a_registeredPaths = new ArrayList<String>();
collectRegisteredPaths( listenersEntry.getKey(), a_registeredPaths );
if ( a_registeredPaths.isEmpty() ) {
continue;
}
ArtifactNotification registeredPaths =
new DefaultArtifactNotification(rootContainer, a_registeredPaths);
for ( ArtifactListenerSelector listener : listenersEntry.getValue() ) {
if ( notifications == null ) {
notifications = new ArrayList<QueuedNotification>( listenersEntry.getValue().size() );
}
QueuedNotification notification = new QueuedNotification(isUpdate, registeredPaths, listener, filter);
notifications.add(notification);
// parm1: additions, parm2: removals, parm3: updates
// if ( isUpdate ) {
// listener.notifyEntryChange(emptyNotification, emptyNotification, registeredPaths);
// } else {
// listener.notifyEntryChange(emptyNotification, registeredPaths, emptyNotification);
// }
}
}
}
if ( notifications != null ) {
for ( QueuedNotification notification : notifications ) {
notification.fire();
}
}
}
|
java
|
public AuthenticationService getAuthenticationService(SecurityService securityService) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
SibTr.entry(tc, CLASS_NAME + "getAuthenticationService", securityService);
}
if(_authenticationService == null) {
if (securityService != null)
_authenticationService = securityService
.getAuthenticationService();
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
SibTr.exit(tc, CLASS_NAME + "getAuthenticationService", _authenticationService);
}
return _authenticationService;
}
|
java
|
protected Subject login() {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
SibTr.entry(tc, CLASS_NAME + "login");
}
Subject subject = null;
try {
/*
* Only if we have the AuthenticationService running, we can do
* Authentication. If it is not present we cannot do any
* authentication and hence we have return null, which means
* authentication failed
*/
if (_authenticationService != null) {
subject = _authenticationService.authenticate(MESSAGING_JASS_ENTRY_NAME,
_authenticationData, _partialSubject);
}
} catch (AuthenticationException ae) {
// No FFDC Required. We will throw exception if the subject is Null later
if(TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
SibTr.debug(tc, "EXCEPTION_OCCURED_DURING_AUTHENTICATION_MSE1001");
SibTr.exception(tc, ae);
}
} finally {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) {
SibTr.exit(tc, CLASS_NAME + "login");
}
}
return subject;
}
|
java
|
private void rejectHandshake(Conversation conversation, int requestNumber, String rejectedField) {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "rejectHandshake",
new Object[] { conversation, requestNumber, rejectedField });
SIConnectionLostException exception = new SIConnectionLostException(
nls.getFormattedMessage("INVALID_PROP_SICO8012", null, null)
);
FFDCFilter.processException(exception,
CLASS_NAME + ".rejectHandshake",
CommsConstants.COMMONSERVERRECEIVELISTENER_HSREJCT_01,
this);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
SibTr.debug(this, tc, "Invalid handshake type received - rejecting field:",
rejectedField);
StaticCATHelper.sendExceptionToClient(exception,
CommsConstants.COMMONSERVERRECEIVELISTENER_HSREJCT_01,
conversation,
requestNumber);
// At this point we really don't want anything more to do with this client - so close him
closeConnection(conversation);
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "rejectHandshake");
}
|
java
|
void register(CloudantService svc, ConcurrentMap<ClientKey, Object> clients) {
registrations.put(svc, clients);
}
|
java
|
@FFDCIgnore(NoSuchMethodException.class)
private void setRRSTransactional() {
try {
ivRRSTransactional = (Boolean) activationSpec.getClass().getMethod("getRRSTransactional").invoke(activationSpec);
} catch (NoSuchMethodException x) {
ivRRSTransactional = false;
} catch (Exception x) {
ivRRSTransactional = x == null; // always false - avoid a FindBugs warning by using the value of x in some trivial way
}
}
|
java
|
@Override
public void setJCAVersion(int majorJCAVer, int minorJCAVer) {
majorJCAVersion = majorJCAVer;
minorJCAVersion = minorJCAVer;
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "MessageEndpointFactoryImpl.setJCAVersionJCA: Version " + majorJCAVersion + "." + minorJCAVersion + " is set");
}
}
|
java
|
private void setup(BeanMetaData bmd)
{
if (!ivSetup)
{
int slotSize = bmd.container.getEJBRuntime().getMetaDataSlotSize(MethodMetaData.class);
for (int i = 0; i < capacity; ++i)
{
EJBMethodInfoImpl methodInfo = bmd.createEJBMethodInfoImpl(slotSize);
methodInfo.initializeInstanceData(null, null, null, null, null, false);
elements[i] = methodInfo;
}
ivSetup = true;
}
}
|
java
|
public final void done(EJBMethodInfoImpl mi)
{
//d151861
if (orig || (mi == null) || (topOfStack == 0))
{
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "In orig mode returning:" + " orig: " + orig +
" top: " + topOfStack + " mi: " + mi);
orig = true;
elements = null;//d166651
}
//d151861
else
{
--topOfStack;
if (topOfStack < capacity)
{
//d156621
if (mi != (elements[topOfStack]))
{
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "EJBMethodInfoStack::done called with wrong " +
"TopOfStack value: " + mi + "!=" + (elements[topOfStack]));
orig = true;
elements = null;//d166651
}
else
elements[topOfStack].initializeInstanceData(null, null, null, // 199625
null, null, false);//d162441
//d156621
}
}
}
|
java
|
final public EJBMethodInfoImpl get(String methodSignature,
String methodNameOnly,
EJSWrapperBase wrapper,
MethodInterface methodInterface, // d164221
TransactionAttribute txAttr) // 199625
{
EJBMethodInfoImpl retVal = null;
BeanMetaData bmd = wrapper.bmd;
setup(bmd);// delay initting array so we can get slot count
//d151861
if ((topOfStack < 0) || orig)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
Tr.debug(tc, "EJBMethodInfoStack::get called with neg TopOfStack " +
"or in orig mode:" + topOfStack + " orig: " + orig);
orig = true;
elements = null;//d166651
retVal = bmd.createEJBMethodInfoImpl(bmd.container.getEJBRuntime().getMetaDataSlotSize(MethodMetaData.class));
}//d151861
else
{
if (topOfStack < elements.length)
{
retVal = elements[topOfStack++];
}
else
{
++topOfStack;
retVal = bmd.createEJBMethodInfoImpl(bmd.container.getEJBRuntime().getMetaDataSlotSize(MethodMetaData.class));
}
}
retVal.initializeInstanceData(null, methodNameOnly, bmd, methodInterface, txAttr, false);
return retVal;
}
|
java
|
Class<?> loadClass(String name) throws ClassNotFoundException {
// First, try to find the class by name.
ServiceReference<DeserializationClassProvider> provider = classProviders.getReference(name);
if (provider != null) {
return loadClass(provider, name);
}
// Next, try to find the class by package.
int index = name.lastIndexOf('.');
if (index != -1) {
String pkg = name.substring(0, index);
provider = packageProviders.getReference(pkg);
if (provider != null) {
return loadClass(provider, name);
}
}
return null;
}
|
java
|
public void begin() throws ResourceException {
if (tc.isEntryEnabled())
Tr.entry(this, tc, "begin", ivMC);
// if the MC marked Stale, it means the user requested a purge pool with an immediate option
// so don't allow any work on the mc
if (ivMC._mcStale) {
if (tc.isDebugEnabled())
Tr.debug(this, tc, "MC is stale");
throw new DataStoreAdapterException("INVALID_CONNECTION", AdapterUtil.staleX(), WSRdbSpiLocalTransactionImpl.class);
}
if (dsConfig.get().enableMultithreadedAccessDetection)
ivMC.detectMultithreadedAccess();
if (!ivMC.isTransactional()) { // do nothing if no enlistment
if (tc.isEntryEnabled())
Tr.exit(this, tc, "begin", "no-op. Enlistment disabled");
return;
}
if (tc.isDebugEnabled()) {
String cId = null;
try {
cId = ivMC.mcf.getCorrelator(ivMC);
} catch (SQLException x) {
// will just log the exception here and ignore it since its in trace
Tr.debug(
tc,
"got an exception trying to get the correlator, exception is: ",
x);
}
if (cId != null) {
StringBuffer stbuf = new StringBuffer(200);
stbuf.append("Correlator: DB2, ID: ");
stbuf.append(cId);
stbuf.append(" Transaction : ");
stbuf.append(this);
stbuf.append(" BEGIN");
Tr.debug(this, tc, stbuf.toString());
}
}
ResourceException re;
// Remove synchronization.
re = ivStateManager.isValid(WSStateManager.LT_BEGIN);
if (re == null) {
//Note the MC handles all notification of connection error event and such on setAutoCommit
// also, it sets it only when necessary
try {
if (ivMC.getAutoCommit())
ivMC.setAutoCommit(false);
} catch (SQLException sqle) {
FFDCFilter.processException(
sqle,
currClass.getName() + ".begin",
"126",
this);
throw new DataStoreAdapterException(
"DSA_ERROR",
sqle,
currClass);
}
//Note this exception is not caught - This is because
// 1) it is of type ResourceException so it can be thrown from the method
// 2) if isValid is okay, this should never fail. If isValid is not okay
// an exception is thrown from there
// We already validated the state in this sync block, so just set it.
ivStateManager.transtate = WSStateManager.LOCAL_TRANSACTION_ACTIVE;
} else
{
// state change was not valid
LocalTransactionException local_tran_excep =
new LocalTransactionException(re.getMessage());
DataStoreAdapterException dsae = new DataStoreAdapterException(
"WS_INTERNAL_ERROR",
local_tran_excep,
currClass,
"Cannot start SPI local transaction.",
"",
local_tran_excep.getMessage());
// Use FFDC to log the possible components list.
FFDCFilter.processException(
dsae,
"com.ibm.ws.rsadapter.spi.WSRdbSpiLocalTransactionImpl.begin",
"127",
this,
new Object[] { "Possible components: WebSphere J2C Implementation" });
if (tc.isEntryEnabled())
Tr.exit(this, tc, "begin", "Exception");
throw dsae;
}
if (tc.isEventEnabled())
Tr.event(
tc,
"SpiLocalTransaction started. ManagedConnection state is "
+ ivMC.getTransactionStateAsString());
if (tc.isEntryEnabled())
Tr.exit(this, tc, "begin");
}
|
java
|
public void commit() throws ResourceException {
if (tc.isEntryEnabled())
Tr.entry(this, tc, "commit", ivMC);
// if the MC marked Stale, it means the user requested a purge pool with an immediate option
// so don't allow any work on the mc
if (ivMC._mcStale) {
if (tc.isDebugEnabled())
Tr.debug(this, tc, "MC is stale");
throw new DataStoreAdapterException("INVALID_CONNECTION", AdapterUtil.staleX(), WSRdbSpiLocalTransactionImpl.class);
}
if (dsConfig.get().enableMultithreadedAccessDetection)
ivMC.detectMultithreadedAccess();
if (!ivMC.isTransactional()) { // do nothing if no enlistment
if (tc.isEntryEnabled())
Tr.exit(this, tc, "commit", "no-op. Enlistment disabled");
return;
}
if (tc.isDebugEnabled()) {
String cId = null;
try {
cId = ivMC.mcf.getCorrelator(ivMC);
} catch (SQLException x) {
// will just log the exception here and ignore it since its in trace
Tr.debug(
tc,
"got an exception trying to get the correlator, exception is: ",
x);
}
if (cId != null) {
StringBuffer stbuf = new StringBuffer(200);
stbuf.append("Correlator: DB2, ID: ");
stbuf.append(cId);
stbuf.append(" Transaction : ");
stbuf.append(this);
stbuf.append(" COMMIT");
Tr.debug(this, tc, stbuf.toString());
}
}
ResourceException re = ivStateManager.isValid(WSStateManager.LT_COMMIT);
if (re == null)
try
{
// If no work was done during the transaction, the autoCommit value may still
// be on. In this case, just no-op, since some drivers like ConnectJDBC 3.1
// don't allow commit/rollback when autoCommit is on.
// here the autocommit is always false, so we can call commit.
ivConnection.commit();
//Note this exception is not caught - This is because
// 1) it is of type ResourceException so it can be thrown from the method
// 2) if isValid is okay, this should never fail. If isValid is not okay
// an exception is thrown from there
// Already validated the state in this sync block, so just set it.
ivStateManager.transtate = WSStateManager.NO_TRANSACTION_ACTIVE;
}
catch (SQLException se) {
FFDCFilter.processException(
se,
"com.ibm.ws.rsadapter.spi.WSRdbSpiLocalTransactionImpl.commit",
"139",
this);
ResourceException x = AdapterUtil.translateSQLException(se, ivMC, true, currClass);
if (tc.isEntryEnabled())
Tr.exit(this, tc, "commit", "Exception");
throw x;
}
else
{
// state change was not valid
LocalTransactionException local_tran_excep =
new LocalTransactionException(re.getMessage());
DataStoreAdapterException ds = new DataStoreAdapterException(
"WS_INTERNAL_ERROR",
local_tran_excep,
currClass,
"Cannot commit SPI local transaction.",
"",
local_tran_excep.getMessage());
if (tc.isEntryEnabled())
Tr.exit(this, tc, "commit", "Exception");
throw ds;
}
// Reset so we can deferred enlist in a future global transaction.
ivMC.wasLazilyEnlistedInGlobalTran = false;
if (tc.isEventEnabled())
Tr.event(
tc,
"SPILocalTransaction committed. ManagedConnection state is "
+ ivMC.getTransactionStateAsString());
if (tc.isEntryEnabled())
Tr.exit(this, tc, "commit");
}
|
java
|
public void rollback() throws ResourceException {
if (tc.isEntryEnabled())
Tr.entry(this, tc, "rollback", ivMC);
// if the MC marked Stale, it means the user requested a purge pool with an immediate option
// so don't allow any work on the mc
if (ivMC._mcStale) {
if (tc.isDebugEnabled())
Tr.debug(this, tc, "MC is stale");
throw new DataStoreAdapterException("INVALID_CONNECTION", AdapterUtil.staleX(), WSRdbSpiLocalTransactionImpl.class);
}
if (!ivMC.isTransactional()) { // do nothing if no enlistment
if (tc.isEntryEnabled())
Tr.exit(this, tc, "rollback", "no-op. Enlistment disabled");
return;
}
ResourceException re;
if (tc.isDebugEnabled()) {
String cId = null;
try {
cId = ivMC.mcf.getCorrelator(ivMC);
} catch (SQLException x) {
// will just log the exception here and ignore it since its in trace
Tr.debug(
tc,
"got an exception trying to get the correlator, exception is: ",
x);
}
if (cId != null) {
StringBuffer stbuf = new StringBuffer(200);
stbuf.append("Correlator: DB2, ID: ");
stbuf.append(cId);
stbuf.append(" Transaction : ");
stbuf.append(this);
stbuf.append("ROLLBACK");
Tr.debug(this, tc, stbuf.toString());
}
}
re = ivStateManager.isValid(WSStateManager.LT_ROLLBACK);
if (re == null)
try
{
// If no work was done during the transaction, the autoCommit value may still
// be on. In this case, just no-op, since some drivers like ConnectJDBC 3.1
// don't allow commit/rollback when autoCommit is on.
// here the autocommit is always false, so we can call commit.
ivConnection.rollback();
//Note this exception is not caught - This is because
// 1) it is of type ResourceException so it can be thrown from the method
// 2) if isValid is okay, this should never fail. If isValid is not okay
// an exception is thrown from there
// Already validated the state in this sync block, so just set it.
ivStateManager.transtate = WSStateManager.NO_TRANSACTION_ACTIVE;
}
catch (SQLException se) {
if (!ivMC.isAborted())
FFDCFilter.processException(se, getClass().getName(), "192", this);
ResourceException resX = AdapterUtil.translateSQLException(se, ivMC, true, currClass);
if (tc.isEntryEnabled())
Tr.exit(this, tc, "rollback", "Exception");
throw resX;
}
else
{
// state change was not valid
LocalTransactionException local_tran_excep =
new LocalTransactionException(re.getMessage());
DataStoreAdapterException dsae = new DataStoreAdapterException(
"WS_INTERNAL_ERROR",
local_tran_excep,
currClass,
"Cannot rollback SPI local transaction.",
"",
local_tran_excep.getMessage());
// Use FFDC to log the possible components list.
FFDCFilter.processException(
dsae,
"com.ibm.ws.rsadapter.spi.WSRdbSpiLocalTransactionImpl.rollback",
"291",
this,
new Object[] { " Possible components: WebSphere J2C Implementation" });
if (tc.isEntryEnabled())
Tr.exit(this, tc, "rollback", "Exception");
throw dsae;
}
// Reset so we can deferred enlist in a future global transaction.
ivMC.wasLazilyEnlistedInGlobalTran = false;
if (tc.isEventEnabled())
Tr.event(
tc,
"SpiLocalTransaction rolled back. ManagedConnection state is "
+ ivMC.getTransactionStateAsString());
if (tc.isEntryEnabled())
Tr.exit(this, tc, "rollback");
}
|
java
|
private void serializeRealObject() throws ObjectFailedToSerializeException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "serializeRealObject");
if (hasRealObject) {
// If the realObject isn't null, we need to serialize it & set it into the message
if (realObject != null) {
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
ObjectOutputStream oos = new ObjectOutputStream(baos);
// Write the real object into a byte array
oos.writeObject(realObject);
// Store the bytes in the payload
getPayload().setField(JmsObjectBodyAccess.BODY_DATA_VALUE, baos.toByteArray());
// Set the flag, create a SoftReference to the Object & null out the strong reference
// so the object can be GCd if necessary
hasSerializedRealObject = true;
softRefToRealObject = new SoftReference<Serializable>(realObject);
realObject = null;
}
catch (IOException ioe) {
FFDCFilter.processException(ioe, "com.ibm.ws.sib.mfp.impl.JsJmsObjectMessageImpl.serializeRealObject", "296");
// Wrapper the exception, giving the object's class name, and throw.
String objectClassName = realObject.getClass().getName();
throw new ObjectFailedToSerializeException(ioe
, objectClassName);
}
}
// If the realObject is null, we just set the field in the message & can now claim to have serialized it.
else {
// Real object is null
getPayload().setField(JmsObjectBodyAccess.BODY_DATA_VALUE, null);
// We have not actually serialized anything, but the object data is in the payload
hasSerializedRealObject = true;
}
}
// Any length calculation will need to be redone as the message now 'owns' the payload value
clearCachedLengths();
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "serializeRealObject");
}
|
java
|
private Serializable deserializeToRealObject() throws IOException, ClassNotFoundException {
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(this, tc, "deserializeToRealObject");
Serializable obj = null;
ObjectInputStream ois = null;
byte[] bytes = getDataFromPayload();
if (bytes != null) {
try {
ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
// Get the classloader, which may be the standard classloader or may be an application
// classloader provided by WebSphere
ClassLoader cl = AccessController.doPrivileged(
new PrivilegedAction<ClassLoader>() {
@Override
public ClassLoader run() {
return Thread.currentThread().getContextClassLoader();
}
});
ois = new DeserializationObjectInputStream(bais, cl);
// Deserialize the object and set the local variables appropriately
obj = (Serializable) ois.readObject();
hasRealObject = true;
hasSerializedRealObject = true;
softRefToRealObject = new SoftReference<Serializable>(obj);
} catch (IOException ioe) {
FFDCFilter.processException(ioe, "com.ibm.ws.sib.mfp.impl.JsJmsObjectMessageImpl.deserializeToRealObject", "340");
throw ioe;
} catch (ClassNotFoundException cnfe) {
FFDCFilter.processException(cnfe, "com.ibm.ws.sib.mfp.impl.JsJmsObjectMessageImpl.deserializeToRealObject", "345");
throw cnfe;
} finally {
try {
if (ois != null) {
ois.close();
}
} catch (IOException ex) {
// No FFDC code needed
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
SibTr.debug(this, tc, "Exception closing the ObjectInputStream", ex);
}
}
}
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(this, tc, "deserializeToRealObject", (obj == null ? "null" : obj.getClass()));
return obj;
}
|
java
|
public SICoreConnection getConnection() throws SISessionUnavailableException
{
if (TraceComponent.isAnyTracingEnabled() && CoreSPIProducerSession.tc.isEntryEnabled())
{
SibTr.entry(CoreSPIProducerSession.tc, "getConnection", this);
SibTr.exit(CoreSPIProducerSession.tc, "getConnection", _conn);
}
checkNotClosed();
return _conn;
}
|
java
|
void disableDiscriminatorAccessCheckAtSend(String discriminatorAtCreate)
{
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.entry(tc, "disableDiscriminatorAccessCheckAtSend");
_checkDiscriminatorAccessAtSend = false;
this._discriminatorAtCreate = discriminatorAtCreate;
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
SibTr.exit(tc, "disableDiscriminatorAccessCheckAtSend");
}
|
java
|
public void addEntry(TimerWorkItem addItem, long curTime) {
// this routine assumes the slot is not full
this.mostRecentlyAccessedTime = curTime;
this.lastEntryIndex++;
this.entries[lastEntryIndex] = addItem;
}
|
java
|
@Generated(value = "com.ibm.jtc.jax.tools.xjc.Driver", date = "2014-06-11T05:49:00-04:00", comments = "JAXB RI v2.2.3-11/28/2011 06:21 AM(foreman)-")
public List<Flow> getFlows() {
if (flows == null) {
flows = new ArrayList<Flow>();
}
return this.flows;
}
|
java
|
public JMFMessage decode(JSchema schema, byte[] contents, int offset, int length)
throws JMFMessageCorruptionException {
return new JSMessageImpl(schema, contents, offset, length, true);
}
|
java
|
protected String read(SocketChannel sc) throws IOException {
sc.read(buffer);
buffer.flip();
decoder.decode(buffer, charBuffer, true);
charBuffer.flip();
String result = charBuffer.toString();
// Clear out buffers
buffer.clear();
charBuffer.clear();
decoder.reset();
return result;
}
|
java
|
protected void write(SocketChannel sc, String s) throws IOException {
sc.write(encoder.encode(CharBuffer.wrap(s)));
}
|
java
|
@Test
public void MPJwtBadMPConfigAsEnvVars_GoodMpJwtConfigSpecifiedInServerXml() throws Exception {
resourceServer.reconfigureServerUsingExpandedConfiguration(_testName, "rs_server_AltConfigNotInApp_goodServerXmlConfig.xml");
standardTestFlow(resourceServer, MpJwtFatConstants.NO_MP_CONFIG_IN_APP_ROOT_CONTEXT,
MpJwtFatConstants.NO_MP_CONFIG_IN_APP_APP, MpJwtFatConstants.MPJWT_APP_CLASS_NO_MP_CONFIG_IN_APP);
}
|
java
|
@Test
public void MPJwtBadMPConfigAsEnvVars_MpJwtConfigNotSpecifiedInServerXml() throws Exception {
standardTestFlow(resourceServer, MpJwtFatConstants.NO_MP_CONFIG_IN_APP_ROOT_CONTEXT,
MpJwtFatConstants.NO_MP_CONFIG_IN_APP_APP, MpJwtFatConstants.MPJWT_APP_CLASS_NO_MP_CONFIG_IN_APP,
setBadIssuerExpectations(resourceServer));
}
|
java
|
private boolean doRead(int amountToRead) throws IOException{
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "doRead, Current buffer, " + _buffer + ", reading from the TCP Channel, readLine : " + _isReadLine);
}
try {
if(_tcpChannelCallback != null && !_isReadLine){
//async read logic
return immediateRead(amountToRead);
} else {
return syncRead(amountToRead);
}
} catch (IOException e) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "doRead, we encountered an exception during the read : " + e);
}
if(_error != null){
return false;
}
_error = e;
throw e;
}
}
|
java
|
private boolean syncRead(int amountToRead) throws IOException{
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "syncRead, Executing a synchronous read");
}
// Allocate the buffer and set it on the TCP Channel
setAndAllocateBuffer(amountToRead);
try{
long bytesRead = _tcpContext.getReadInterface().read(1, WCCustomProperties31.UPGRADE_READ_TIMEOUT);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "syncRead, Completed the read, " + bytesRead);
}
if(bytesRead > 0){
//Get the buffer from the TCP Channel after we have told them to read.
_buffer = _tcpContext.getReadInterface().getBuffer();
//We don't need to check for null first as we know we will always get the buffer we just set
configurePostReadBuffer();
// record the new amount of data read from the channel
_totalBytesRead += _buffer.remaining();
return true;
}
return false;
}catch (IOException e){
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "syncRead, We encountered an exception during the read : " + e);
}
_error = e;
throw e;
}
}
|
java
|
private boolean immediateRead(int amountToRead){
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "immediateRead, Executing a read");
}
if(amountToRead > 1){
//Allocate a new temp buffer, then set the position to 0 and limit to the amount we want to read
//Copy in the current this.buffer as it should only have one byte in it
WsByteBuffer tempBuffer = allocateBuffer(amountToRead);
tempBuffer.position(0);
tempBuffer.limit(amountToRead);
tempBuffer.put(_buffer);
tempBuffer.position(1);
_buffer.release();
_buffer = tempBuffer;
tempBuffer = null;
_tcpContext.getReadInterface().setBuffer(_buffer);
long bytesRead = 0;
try{
bytesRead = _tcpContext.getReadInterface().read(0, WCCustomProperties31.UPGRADE_READ_TIMEOUT);
} catch (IOException readException){
//If we encounter an exception here we need to return the 1 byte that we already have.
//Returned true immediately and the next read will catch the exception and propagate it properly
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "immediateRead, The read encountered an exception. " + readException);
Tr.debug(tc, "immediateRead, Return with our one byte");
}
configurePostReadBuffer();
return true;
}
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "immediateRead, Complete, " + bytesRead);
}
//Get the buffer from the TCP Channel after we have told them to read.
_buffer = _tcpContext.getReadInterface().getBuffer();
//We don't need to check for null first as we know we will always get the buffer we just set
configurePostReadBuffer();
// record the new amount of data read from the channel
_totalBytesRead += _buffer.remaining();
}
//We will return true here in all circumstances because we always have 1 byte read from the isReady call or the initial read of the connection
return true;
}
|
java
|
public int read() throws IOException {
validate();
int rc = -1;
if(doRead(1)){
rc = _buffer.get() & 0x000000FF;
}
_buffer.release();
_buffer = null;
return rc;
}
|
java
|
public int read(byte[] output, int offset, int length) throws IOException {
int size = -1;
validate();
if (0 == length) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "read(byte[],int,int), Target length was 0");
}
return length;
}
if(doRead(length)){
size = _buffer.limit() - _buffer.position();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled())
{
Tr.debug(tc, "(byte[],int,int) Filling byte array, size --> " + size);
}
_buffer.get(output, offset, size);
}
_buffer.release();
_buffer = null;
return size;
}
|
java
|
private void setAndAllocateBuffer(int sizeToAllocate) {
if(_buffer == null){
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "setAndAllocateBuffer, Buffer is null, size to allocate is : " + sizeToAllocate);
}
_buffer = allocateBuffer(sizeToAllocate);
}
configurePreReadBuffer(sizeToAllocate);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "setAndAllocateBuffer, Setting the buffer : " + _buffer );
}
_tcpContext.getReadInterface().setBuffer(_buffer);
}
|
java
|
private void validate() throws IOException {
if (null != _error) {
throw _error;
}
if(!_isReadLine && !_isReady){
//If there is no data available then isReady will have returned false and this throw an IllegalStateException
if (TraceComponent.isAnyTracingEnabled() && tc.isErrorEnabled())
Tr.error(tc, "read.failed.isReady.false");
throw new IllegalStateException(Tr.formatMessage(tc, "read.failed.isReady.false"));
}
}
|
java
|
public void setupReadListener(ReadListener readListenerl, SRTUpgradeInputStream31 srtUpgradeStream){
if(readListenerl == null){
if (TraceComponent.isAnyTracingEnabled() && tc.isErrorEnabled())
Tr.error(tc, "readlistener.is.null");
throw new NullPointerException(Tr.formatMessage(tc, "readlistener.is.null"));
}
if(_rl != null){
if (TraceComponent.isAnyTracingEnabled() && tc.isErrorEnabled())
Tr.error(tc, "readlistener.already.started");
throw new IllegalStateException(Tr.formatMessage(tc, "readlistener.already.started"));
}
//Save off the current Thread data by creating the ThreadContextManager. Then pass it into the callback
ThreadContextManager tcm = new ThreadContextManager();
_tcpChannelCallback = new UpgradeReadCallback(readListenerl, this, tcm, srtUpgradeStream);
_rl = readListenerl;
_isReady = false;
_upConn.getVirtualConnection().getStateMap().put(TransportConstants.UPGRADED_LISTENER, "true");
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "setupReadListener, Starting the initial read");
}
initialRead();
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "setupReadListener, ReadListener set : " + _rl);
}
}
|
java
|
public void initialRead(){
_isInitialRead = true;
if(_buffer != null){
_buffer.release();
_buffer = null;
}
setAndAllocateBuffer(1);
configurePreReadBuffer(1);
//This if the first read of the ReadListener, which means force the read to go async
//We won't get an actual response from this read as it will always come back on another thread
_tcpContext.getReadInterface().setBuffer(_buffer);
_tcpContext.getReadInterface().read(1, _tcpChannelCallback, true, WCCustomProperties31.UPGRADE_READ_TIMEOUT);
}
|
java
|
public void configurePostInitialReadBuffer(){
_isInitialRead = false;
_isFirstRead = false;
_buffer = _tcpContext.getReadInterface().getBuffer();
configurePostReadBuffer();
}
|
java
|
public Boolean close() {
_isClosing = true;
boolean closeResult = true;
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "close, Initial read outstanding : " + _isInitialRead);
}
if(_isInitialRead){
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "close, Cancelling any outstanding read");
}
_tcpContext.getReadInterface().read(1, _tcpChannelCallback, false, TCPReadRequestContext.IMMED_TIMEOUT);
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "close, Call to cancel complete");
}
//This seems strange, but what happens during the timeout it will be set to false.
//If it's false we don't want to do the wait since it's been called in line.
//If it's true we will want to wait until the timeout has been processed
if(_isInitialRead){
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "close, Timeout has been called, waiting for it to complete");
}
closeResult = true;
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "close, No read outstanding, no reason to call cancel");
}
closeResult = false;
}
} else {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "close, No read outstanding, no reason to call cancel");
}
closeResult = false;
}
if(_rl != null){
if(!this.isAlldataReadCalled()) {
try {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "close, We are now closed, calling the ReadListener onAllDataRead");
}
this.setAlldataReadCalled(true);
_rl.onAllDataRead();
} catch (IOException ioe) {
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()){
Tr.debug(tc, "close, Encountered an exception while calling onAllDAtaRead : " + ioe);
}
}
}
}
return closeResult;
}
|
java
|
public synchronized int getDurableSubscriptions()
{
if (tc.isEntryEnabled())
SibTr.entry(tc, "getDurableSubscriptions");
if (tc.isEntryEnabled())
SibTr.exit(
tc,
"getDurableSubscriptions",
new Integer(durableSubscriptions));
return durableSubscriptions;
}
|
java
|
public synchronized int getNonDurableSubscriptions()
{
if (tc.isEntryEnabled())
SibTr.entry(tc, "getNonDurableSubscriptions");
if (tc.isEntryEnabled())
SibTr.exit(
tc,
"getNonDurableSubscriptions",
new Integer(nonDurableSubscriptions));
return nonDurableSubscriptions;
}
|
java
|
public synchronized int getTotalSubscriptions()
{
if (tc.isEntryEnabled())
SibTr.entry(tc, "getTotalSubscriptions");
int totalSubscriptions = durableSubscriptions + nonDurableSubscriptions;
if (tc.isEntryEnabled())
SibTr.exit(
tc,
"getTotalSubscriptions",
new Integer(totalSubscriptions));
return totalSubscriptions;
}
|
java
|
void balance(
NodeStack stack,
GBSNode q)
{
GBSNode p;
int bpidx = stack.balancePointIndex();
int x = bpidx;
GBSNode bpoint = stack.node(x);
GBSNode bfather = stack.node(x-1);
/* Adjust balance factors in intervening nodes */
if (bpoint.leftChild() == stack.node(x+1))
p = bpoint.leftChild();
else
p = bpoint.rightChild();
x++;
while (p != q)
{
if (p.leftChild() == stack.node(x+1))
{ /* We followed and added to left path */
p.setBalance(-1); /* It is now left heavy */
p = p.leftChild();
}
else /* We followed and added to right path */
{
p.setBalance(1); /* It is now right heavy */
p = p.rightChild();
}
x++;
}
/* Adjust the balance factor at the balance point. */
/* Re-balance if necessary. */
if (bpoint.leftChild() == stack.node(bpidx+1))
{ /* Added to left side */
int bpb = bpoint.balance();
switch (bpb)
{
case 0:
bpoint.setBalance(-1);
break;
case 1:
bpoint.clearBalance();
break;
case -1:
rotateLeft(bfather, bpoint);
break;
default:
String zzz1 = "Help1 !, bpb = " + bpb;
throw new RuntimeException(zzz1);
}
}
else /* Added to right side */
{
int bpb = bpoint.balance();
switch (bpb)
{
case 0:
bpoint.setBalance(1);
break;
case -1:
bpoint.clearBalance();
break;
case 1:
rotateRight(bfather, bpoint);
break;
default:
String zzz2 = "Help2 !, bpb = " + bpb;
throw new RuntimeException(zzz2);
}
}
}
|
java
|
private void rotateLeft(
GBSNode bfather,
GBSNode bpoint)
{
GBSNode bson = bpoint.leftChild();
if (bson.balance() == -1) /* Single LL rotation */
{
bpoint.setLeftChild(bson.rightChild());
bson.setRightChild(bpoint);
if (bfather.rightChild() == bpoint)
bfather.setRightChild(bson);
else
bfather.setLeftChild(bson);
bpoint.clearBalance();
bson.clearBalance();
}
else /* Double LR rotation */
{
GBSNode blift = bson.rightChild();
bson.setRightChild(blift.leftChild());
blift.setLeftChild(bson);
bpoint.setLeftChild(blift.rightChild());
blift.setRightChild(bpoint);
if (bfather.rightChild() == bpoint)
bfather.setRightChild(blift);
else
bfather.setLeftChild(blift);
bpoint.setBalance(newBalance2[blift.balance()+1]);
bson.setBalance( newBalance1[blift.balance()+1]);
blift.clearBalance();
}
}
|
java
|
public Entry getPrevious()
{
checkEntryParent();
Entry entry = null;
if(!isFirst())
{
entry = previous;
}
return entry;
}
|
java
|
@Override
public boolean analyzeJar(Analyzer analyzer) throws Exception {
try {
if (scanAgain) {
//this will only have an effect on the first scan, because subsequent scanAgain
//will use the errorMarker to decide if to scan again
resetErrorMarker();
List<String> newlyAddedPackages = new ArrayList<String>();
System.out.println("ImportlessPackager plugin: iteration " + iteration);
// set up exclude filter
setupFilters(analyzer);
// collect dependency packages
Set<PackageRef> importedPackages = collectDependencies(analyzer);
Jar outputJar = analyzer.getJar();
//loop through the referred packages
for (PackageRef ref : importedPackages) {
String packageName = ref.getFQN();
System.out.println("Seeking package " + packageName);
boolean foundPackage = false;
// locate the package in the classpath and add it the the export
for (Jar src : analyzer.getClasspath()) {
if (src.getPackages().contains(packageName)) {
foundPackage = true;
System.out.println("Found matching pkg " + packageName + " from " +src.getSource().getAbsolutePath());
//only want to include the package itself, not any sub-packages
//can provide an Instruction to bnd for this, but it gets a bit weird:
//match the package name plus a / since everything in the package will
//have the package path followed by a separator.
//We use [/|/] to match the single / because bnd needs something to serve
//as a wildcard to enable regex matching (otherwise it just does an equals match)
//we can't use the other wildcards e.g. * ? because bnd processes them into
//.?, .* to do an any character match
//If we don't want any sub package content we can't allow any more slashes
//so use [^/]+ (i.e. not / 1 or more times)
outputJar.addAll(src, new Instruction(ref.getPath() + "[/|/][^/]+"));
newlyAddedPackages.add(packageName);
//we don't break the outer loop in case of split packages (ick!)
//since we might find some additional content for this package in another jar
}
}
if (!foundPackage) {
//we couldn't find the package on the classpath, fail the build
//with a helpful message
String errorMsg = "Package " + packageName + " not found for inclusion in jar. Is the package available on the projects classpath?";
error(errorMsg);
}
}
//add all the newly added packages to our global list so we don't check them again
if (newlyAddedPackages.isEmpty()) {
//no new packages, no further scanning required
scanAgain = false;
} else {
addedPackages.addAll(newlyAddedPackages);
iteration++;
if (iteration > LAST_ITERATION) {
//there are new packages but we've run out of iterations, fail the build
error("Maximum number of plugin iterations reached, but there were still new packages to analyze. Consider adding more iterations.");
}
}
//don't bother scanning again if we've already had an error
if (scanAgain == true && errorMarker.exists())
scanAgain = false;
}
} catch(Exception ex) {
ex.printStackTrace();
System.out.println(ex.getMessage());
}
//tell bnd to reanalyze the classpath if we are going to scan again
return scanAgain;
}
|
java
|
private void collectClassDependencies (Clazz classInstance, Analyzer analyzer) throws Exception {
// retrieve the imports from the known class path
Set<TypeRef> importedClasses = classInstance.parseClassFile();
for (TypeRef importedClass:importedClasses) {
if (canBeSkipped(importedClass)) // validate the import
continue;
// find the class in the classpath
Clazz classInstanceImported = analyzer.findClass(importedClass);
if (classInstanceImported == null)
error( "Referenced class " + importedClass.getFQN() + " not found for inclusion in jar. It is imported by " + classInstance.getAbsolutePath());
// update the imports map
importedReferencedTypes.add(importedClass);
allReferencedTypes.add(importedClass);
// collect dependencies introduced by the imports
collectClassDependencies(classInstanceImported, analyzer);
}
}
|
java
|
private boolean canBeSkipped (TypeRef importedClass) {
// skip known imported classes and the ones in JRE
if (allReferencedTypes.contains(importedClass) || importedReferencedTypes.contains(importedClass) || importedClass.isJava())
return true;
// Skip the imported classes which are excluded
String classPackage = importedClass.getPackageRef().getFQN();
for (String excludePrefix: excludePrefixes) { // skip by the package prefix
if (classPackage.startsWith(excludePrefix))
return true;
}
if (excludes.contains(classPackage)) // skip by the full package name
return true;
return classPackage.length()<2; // special situation for the primitive array
}
|
java
|
private Set<PackageRef> collectPackageDependencies() {
Set<PackageRef> referencedPackages = new HashSet<PackageRef> ();
for (TypeRef newReferencedType:importedReferencedTypes) {
PackageRef packageRef = newReferencedType.getPackageRef();
if (referencedPackages.contains(packageRef)) // package already known
continue;
referencedPackages.add(packageRef);
System.out.println("Add package: " + packageRef.getFQN());
}
return referencedPackages;
}
|
java
|
public boolean addMetatypeAd(MetatypeAd metatypeAd) {
if (this.metatypeAds == null)
this.metatypeAds = new LinkedList<MetatypeAd>();
for (MetatypeAd ad : metatypeAds)
if (ad.getID().equals(metatypeAd.getID()))
return false;
this.metatypeAds.add(metatypeAd);
return true;
}
|
java
|
public synchronized void prepareSocket() throws IOException {
if (!prepared) {
final long fd = getFileDescriptor();
if (fd == INVALID_SOCKET) {
throw new AsyncException(AsyncProperties.aio_handle_unavailable);
}
channelIdentifier = provider.prepare2(fd, asyncChannelGroup.getCompletionPort());
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "prepareSocket - socket prepared, fd = " + fd
+ " channel id: " + channelIdentifier + " "
+ ", local: " + channel.socket().getLocalSocketAddress()
+ ", remote: " + channel.socket().getRemoteSocketAddress());
}
long callid = 0; // init to zero, reset when IO requested
readIOCB = (CompletionKey) AsyncLibrary.completionKeyPool.get();
if (readIOCB != null) {
// initialize the IOCB from the pool
readIOCB.initializePoolEntry(channelIdentifier, callid);
writeIOCB = (CompletionKey) AsyncLibrary.completionKeyPool.get();
if (writeIOCB != null) {
// initialize the IOCB from the pool
writeIOCB.initializePoolEntry(channelIdentifier, callid);
} else {
writeIOCB = new CompletionKey(channelIdentifier, callid, defaultBufferCount);
}
} else {
readIOCB = new CompletionKey(channelIdentifier, callid, defaultBufferCount);
writeIOCB = new CompletionKey(channelIdentifier, callid, defaultBufferCount);
}
provider.initializeIOCB(readIOCB);
provider.initializeIOCB(writeIOCB);
prepared = true;
}
}
|
java
|
public String getParameterClassName(String attributeName, JspCoreContext context) throws JspCoreException {
String parameterClassName = null;
if (parameterClassNameMap == null) {
parameterClassNameMap = new HashMap();
}
parameterClassName = (String)parameterClassNameMap.get(attributeName);
if (parameterClassName == null) {
TagAttributeInfo[] attributeInfos = ti.getAttributes();
for (int i = 0; i < attributeInfos.length; i++) {
if (attributeInfos[i].getName().equals(attributeName)) {
//PK69319
if(attributeInfos[i].isFragment()){
parameterClassName = "javax.servlet.jsp.tagext.JspFragment";
} else{
parameterClassName = attributeInfos[i].getTypeName();
}
parameterClassNameMap.put(attributeName, parameterClassName);
break;
}
}
}
return (parameterClassName);
}
|
java
|
public ReturnCode rollback() {
while (!history.isEmpty()) {
final Action action = (Action) history.pop();
final ReturnCode ret = action.execute();
if (ret.getCode() != 0) {
return ret;
}
}
return ReturnCode.OK;
}
|
java
|
public void updateState(SSLContext context, SSLEngine engine, SSLEngineResult result, WsByteBuffer decNetBuf, int position, int limit) {
this.sslContext = context;
this.sslEngine = engine;
this.sslEngineResult = result;
this.decryptedNetBuffer = decNetBuf;
this.netBufferPosition = position;
this.netBufferLimit = limit;
}
|
java
|
private void setXMLBeanInterface(String homeInterfaceName, String interfaceName) // F743-32443
throws InjectionException
{
// If a home or business interface was specified in XML, then set that as
// the injection type. Both may be null if the XML just provides an
// override of an annotation to add <ejb-link>... in which case the
// injection class type will be set when the annotation is processed.
// For performance, there is no need to load these classes until first
// accessed, which might be never if there is a binding file, so just the
// name will be set here. getInjectionClassType() is then overridden to load
// the class when needed. Prior to EJB 3.0 these interfaces were completely
// ignored, so this behavior allows EJB 2.1 apps to continue to function
// even if they had garbage in their deployment descriptor. d739281
if (homeInterfaceName != null && homeInterfaceName.length() != 0) // d668376
{
ivHomeInterface = true;
setInjectionClassTypeName(homeInterfaceName);
if (isValidationLoggable())
{
loadClass(homeInterfaceName, isValidationFailable());
loadClass(interfaceName, isValidationFailable());
}
}
else if (interfaceName != null && interfaceName.length() != 0) // d668376
{
ivBeanInterface = true;
setInjectionClassTypeName(interfaceName);
if (isValidationLoggable())
{
loadClass(interfaceName, isValidationFailable());
}
}
}
|
java
|
private void setBindingName() // d681743
throws InjectionException
{
Map<String, String> ejbRefBindings = ivNameSpaceConfig.getEJBRefBindings();
if (ejbRefBindings != null)
{
ivBindingName = ejbRefBindings.get(getJndiName());
if (ivBindingName != null && ivBindingName.equals(""))
{
ivBindingName = null;
Tr.warning(tc, "EJB_BOUND_TO_EMPTY_STRING_CWNEN0025W");
if (isValidationFailable()) // fail if enabled F743-14449
{
InjectionConfigurationException icex = new InjectionConfigurationException
("The " + getJndiName() + " EJB reference in the " + ivModule +
" module of the " + ivApplication + " application has been" +
" bound to the empty string in the global Java Naming and Directory Interface (JNDI) namespace.");
if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled())
Tr.exit(tc, "resolve : " + icex);
throw icex;
}
}
}
}
|
java
|
@Override
public void addInjectionTarget(Member member)
throws InjectionException
{
// If the beanName attribute was found in the constructor or merge
// method, then save the class of where it was located.
if (ivBeanName != null && ivBeanNameClass == null) {
ivBeanNameClass = member.getDeclaringClass();
}
super.addInjectionTarget(member);
}
|
java
|
@Override
public void visitInsn(int opcode) {
if (opcode == ATHROW && !enabledListeners.isEmpty()) {
String key = createKey();
ProbeImpl probe = getProbe(key);
long probeId = probe.getIdentifier();
setProbeInProgress(true);
visitInsn(DUP); // throwable throwable
visitLdcInsn(Long.valueOf(probeId)); // throwable throwable long1 long2
visitInsn(DUP2_X1); // throwable long1 long2 throwable long1 long2
visitInsn(POP2); // throwable long1 long2 throwable
if (isStatic()) {
visitInsn(ACONST_NULL); // throwable long1 long2 throwable this
} else {
visitVarInsn(ALOAD, 0); // throwable long1 long2 throwable this
}
visitInsn(SWAP); // throwable long1 long2 this throwable
visitInsn(ACONST_NULL); // throwable long1 long2 this throwable null
visitInsn(SWAP); // throwable long1 long2 this null throwable
visitFireProbeInvocation(); // throwable
setProbeInProgress(false);
setProbeListeners(probe, enabledListeners);
}
super.visitInsn(opcode);
}
|
java
|
public void addData(int index, byte[] data) throws InternalLogException
{
if (tc.isEntryEnabled()) Tr.entry(tc, "addData",new java.lang.Object[] {new Integer(index), RLSUtils.toHexString(data,RLSUtils.MAX_DISPLAY_BYTES), this});
// If the parent recovery log instance has experienced a serious internal error then prevent
// this operation from executing.
if (_recLog.failed())
{
if (tc.isEntryEnabled()) Tr.exit(tc, "addData",this);
throw new InternalLogException(null);
}
// we use an index value of 0 to indicate that it is a singledata RUsection
// so adjust now ... if (!_singleData) or if(index != 0)
if (index > 0) index--;
// list items may be added in any order, so it may be necessary to (temporarily) pad the list
final int currentSize = _writtenData.size();
if (index == currentSize)
_writtenData.add(/*index,*/ data);
else if (index < currentSize)
{
if (tc.isDebugEnabled()) Tr.debug(tc, "NMTEST: Replacing item (expect trace 'null') at index: " + index, _writtenData.get(index));
_writtenData.set(index, data);
}
else // index > currentSize
{
if (tc.isDebugEnabled()) Tr.debug(tc, "NMTEST: Adding null elements: " + (index-currentSize));
while (index-- > currentSize)
_writtenData.add(null);
_writtenData.add(data);
}
// set lastdata. This method is called during recovery and we shouldn't get asked for
// any data until all log records are read. So set lastdata to be the item at the current size
// of the array. Items may be added in random order, so lastitem will be correct when
// all items have been added
_lastDataItem = (byte[]) _writtenData.get(_writtenData.size() -1);
if (tc.isEntryEnabled()) Tr.exit(tc, "addData");
}
|
java
|
public int identity()
{
if (tc.isEntryEnabled()) Tr.entry(tc, "identity",this);
if (tc.isEntryEnabled()) Tr.exit(tc, "identity",new Integer(_identity));
return _identity;
}
|
java
|
public static int decode(WsByteBuffer headerBlock, int N) {
// if (!headerBlock.hasRemaining()) {
// throw new HeaderFieldDecodingException("No length to decode");
// }
int I = HpackUtils.getLSB(headerBlock.get(), N);
if (I < HpackUtils.ipow(2, N) - 1) {
return I;
} else {
int M = 0;
boolean done = false;
byte b;
while (done == false) {
// If there are no further elements, this is an invalid HeaderBlock.
// If this decode method is called, there should always be header
// key value bytes after the integer representation.
// if (!headerBlock.hasRemaining()) {
// throw new HeaderFieldDecodingException("");
// }
b = headerBlock.get();
I = I + ((b) & 127) * HpackUtils.ipow(2, M);
M = M + 7;
if (((b & 128) == 128) == false)
done = true;
}
return I;
}
}
|
java
|
static protected int getPadBits(
int bitString)
{
int val = 0;
for (int i = 3; i >= 0; i--)
{
//
// this may look a little odd, but if it isn't done like this pre jdk1.2
// JVM's break!
//
if (i != 0)
{
if ((bitString >> (i * 8)) != 0)
{
val = (bitString >> (i * 8)) & 0xFF;
break;
}
}
else
{
if (bitString != 0)
{
val = bitString & 0xFF;
break;
}
}
}
if (val == 0)
{
return 7;
}
int bits = 1;
while (((val <<= 1) & 0xFF) != 0)
{
bits++;
}
return 8 - bits;
}
|
java
|
static protected byte[] getBytes(int bitString)
{
int bytes = 4;
for (int i = 3; i >= 1; i--)
{
if ((bitString & (0xFF << (i * 8))) != 0)
{
break;
}
bytes--;
}
byte[] result = new byte[bytes];
for (int i = 0; i < bytes; i++)
{
result[i] = (byte) ((bitString >> (i * 8)) & 0xFF);
}
return result;
}
|
java
|
public static DERBitString getInstance(
Object obj)
{
if (obj == null || obj instanceof DERBitString)
{
return (DERBitString)obj;
}
if (obj instanceof ASN1OctetString)
{
byte[] bytes = ((ASN1OctetString)obj).getOctets();
int padBits = bytes[0];
byte[] data = new byte[bytes.length - 1];
System.arraycopy(bytes, 1, data, 0, bytes.length - 1);
return new DERBitString(data, padBits);
}
if (obj instanceof ASN1TaggedObject)
{
return getInstance(((ASN1TaggedObject)obj).getObject());
}
throw new IllegalArgumentException("illegal object in getInstance: " + obj.getClass().getName());
}
|
java
|
private AuthenticationResult handleBasicAuth(String inRealm, HttpServletRequest req, HttpServletResponse res) {
AuthenticationResult result = null;
String hdrValue = req.getHeader(BASIC_AUTH_HEADER_NAME);
if (hdrValue == null || !hdrValue.startsWith("Basic ")) {
result = new AuthenticationResult(AuthResult.SEND_401, inRealm, AuditEvent.CRED_TYPE_BASIC, null, AuditEvent.OUTCOME_CHALLENGE);
return result;
}
// Parse the username & password from the header.
String encoding = req.getHeader("Authorization-Encoding");
hdrValue = decodeBasicAuth(hdrValue.substring(6), encoding);
int idx = hdrValue.indexOf(':');
if (idx < 0) {
result = new AuthenticationResult(AuthResult.SEND_401, inRealm, AuditEvent.CRED_TYPE_BASIC, null, AuditEvent.OUTCOME_CHALLENGE);
return result;
}
String username = hdrValue.substring(0, idx);
String password = hdrValue.substring(idx + 1);
return basicAuthenticate(inRealm, username, password, req, res);
}
|
java
|
protected String getBasicAuthRealmName(WebRequest webRequest) {
SecurityMetadata securityMetadata = webRequest.getSecurityMetadata();
if (securityMetadata != null) {
LoginConfiguration loginConfig = securityMetadata.getLoginConfiguration();
if (loginConfig != null && loginConfig.getRealmName() != null) {
return loginConfig.getRealmName();
}
if (config.getDisplayAuthenticationRealm()) {
return userRegistry.getRealm();
}
}
String realm = "defaultRealm";
return realm;
}
|
java
|
@Sensitive
protected String decodeBasicAuth(String data, String encoding) {
String output = "";
byte decodedByte[] = null;
decodedByte = Base64Coder.base64DecodeString(data);
if (decodedByte != null && decodedByte.length > 0) {
boolean decoded = false;
if (encoding != null) {
try {
output = new String(decodedByte, encoding);
decoded = true;
} catch (Exception e) {
// fall back not to use encoding..
if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) {
Tr.debug(tc, "An exception is caught using the encoder: " + encoding + ". The exception is: " + e.getMessage());
}
}
}
if (!decoded) {
output = new String(decodedByte);
}
}
return output;
}
|
java
|
public static boolean isUninstallable(Set<IFixInfo> installedFixes, IFixInfo fixToBeUninstalled) {
if (Boolean.valueOf(System.getenv(S_DISABLE)).booleanValue()) {
return true;
}
if (fixToBeUninstalled != null) {
for (IFixInfo fix : installedFixes) {
if (!(fixToBeUninstalled.getId().equals(fix.getId())) && !confirmNoFileConflicts(fixToBeUninstalled.getUpdates().getFiles(), fix.getUpdates().getFiles())) {
if (!isSupersededBy(fix.getResolves().getProblems(), fixToBeUninstalled.getResolves().getProblems())) {
return false;
}
}
}
return true;
}
return false;
}
|
java
|
public boolean isUninstallable(UninstallAsset uninstallAsset, Set<IFixInfo> installedFixes, List<UninstallAsset> uninstallAssets) {
if (Boolean.valueOf(System.getenv(S_DISABLE)).booleanValue()) {
return true;
}
IFixInfo fixToBeUninstalled = uninstallAsset.getIFixInfo();
for (IFixInfo fix : installedFixes) {
if (!(fixToBeUninstalled.getId().equals(fix.getId()))) {
if ((!confirmNoFileConflicts(fixToBeUninstalled.getUpdates().getFiles(), fix.getUpdates().getFiles())) &&
(!isSupersededBy(fix.getResolves().getProblems(), fixToBeUninstalled.getResolves().getProblems())))
if (!isToBeUninstalled(fix.getId(), uninstallAssets))
return false;
}
}
return true;
}
|
java
|
public static ArrayList<String> fixRequiredByFeature(String fixApar, Map<String, ProvisioningFeatureDefinition> installedFeatures) {
ArrayList<String> dependencies = new ArrayList<String>();
for (ProvisioningFeatureDefinition fd : installedFeatures.values()) {
String requireFixes = fd.getHeader("IBM-Require-Fix");
if (requireFixes != null && requireFixes.length() > 0) {
String[] apars = requireFixes.split(";");
for (String apar : apars) {
if (apar.trim().equals(fixApar.trim())) {
dependencies.add(apar);
}
}
}
}
if (dependencies.isEmpty())
return null;
return dependencies;
}
|
java
|
public List<UninstallAsset> determineOrder(List<UninstallAsset> list) {
if (list != null) {
List<FixDependencyComparator> fixCompareList = new ArrayList<FixDependencyComparator>();
// Initialize the feature comparator
for (UninstallAsset asset : list) {
fixCompareList.add(new FixDependencyComparator(asset.getIFixInfo()));
}
// Sort the feature list
Collections.sort(fixCompareList, new FixDependencyComparator());
List<UninstallAsset> newList = new ArrayList<UninstallAsset>();
for (FixDependencyComparator f : fixCompareList) {
newList.add(new UninstallAsset(f.getIfixInfo()));
}
return newList;
}
return list;
}
|
java
|
private static boolean isSupersededBy(List<Problem> apars1, List<Problem> apars2) {
boolean result = true;
// Now iterate over the current list of problems, and see if the incoming IFixInfo contains all of the problems from this IfixInfo.
// If it does then return true, to indicate that this IFixInfo object has been superseded.
for (Iterator<Problem> iter1 = apars1.iterator(); iter1.hasNext();) {
boolean currAparMatch = false;
Problem currApar1 = iter1.next();
for (Iterator<Problem> iter2 = apars2.iterator(); iter2.hasNext();) {
Problem currApar2 = iter2.next();
if (currApar1.getDisplayId().equals(currApar2.getDisplayId())) {
currAparMatch = true;
}
}
if (!currAparMatch)
result = false;
}
return result;
}
|
java
|
private static boolean confirmNoFileConflicts(Set<UpdatedFile> updatedFiles1, Set<UpdatedFile> updatedFiles2) {
for (Iterator<UpdatedFile> iter1 = updatedFiles1.iterator(); iter1.hasNext();) {
UpdatedFile currFile1 = iter1.next();
for (Iterator<UpdatedFile> iter2 = updatedFiles2.iterator(); iter2.hasNext();) {
UpdatedFile currFile2 = iter2.next();
if (currFile1.getId().equals(currFile2.getId())) {
return false;
}
}
}
return true;
}
|
java
|
@Override
public void close(boolean deleteProgressFile) {
final String methodName = "close()";
traceDebug(methodName, "cacheName=" + this.cacheName + " deleteProgressFile=" + deleteProgressFile);
if (deleteProgressFile) { // 316654
// remove the InProgress (dummy) file when close
deleteInProgressFile();
}
try {
htod.close();
} catch (Throwable t) {
com.ibm.ws.ffdc.FFDCFilter.processException(t, "com.ibm.ws.cache.CacheOnDisk.close", "574", this);
traceDebug(methodName, "cacheName=" + this.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t));
}
}
|
java
|
@Override
public int writeAuxiliaryDepTables() {
int returnCode = htod.writeAuxiliaryDepTables();
if (returnCode == HTODDynacache.DISK_EXCEPTION) {
stopOnError(this.htod.diskCacheException);
} else {
updatePropertyFile();
}
return returnCode;
}
|
java
|
private void readLastScanFile() {
final String methodName = "readLastScanFile()";
final File f = new File(lastScanFileName);
traceDebug(methodName, "cacheName=" + this.cacheName);
if (f.exists()) {
final CacheOnDisk cod = this;
AccessController.doPrivileged(new PrivilegedAction() {
@Override
public Object run() {
FileInputStream fis = null;
ObjectInputStream ois = null;
try {
fis = new FileInputStream(f);
ois = new ObjectInputStream(fis);
cod.lastScanTime = ois.readLong();
} catch (Throwable t) {
com.ibm.ws.ffdc.FFDCFilter.processException(t, "com.ibm.ws.cache.CacheOnDisk.readLastScanFile", "611", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t));
} finally {
try {
if (ois != null) {
ois.close();
}
if (fis != null) {
fis.close();
}
} catch (Throwable t) {
com.ibm.ws.ffdc.FFDCFilter.processException(t, "com.ibm.ws.cache.CacheOnDisk.readLastScanFile", "622", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t));
}
}
return null;
}
});
}
}
|
java
|
protected void updateLastScanFile() {
final String methodName = "updateLastScanFile()";
final File f = new File(lastScanFileName);
final CacheOnDisk cod = this;
traceDebug(methodName, "cacheName=" + this.cacheName);
AccessController.doPrivileged(new PrivilegedAction() {
@Override
public Object run() {
FileOutputStream fos = null;
ObjectOutputStream oos = null;
try {
fos = new FileOutputStream(f);
oos = new ObjectOutputStream(fos);
oos.writeLong(System.currentTimeMillis());
} catch (Throwable t) {
com.ibm.ws.ffdc.FFDCFilter.processException(t, "com.ibm.ws.cache.CacheOnDisk.updateLastScanFile", "650", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t));
} finally {
try {
if (oos != null) {
oos.close();
}
if (fos != null) {
fos.close();
}
} catch (Throwable t) {
com.ibm.ws.ffdc.FFDCFilter.processException(t, "com.ibm.ws.cache.CacheOnDisk.updateLastScanFile", "661", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t));
}
}
return null;
}
});
}
|
java
|
private void deletePropertyFile() {
final String methodName = "deletePropertyFile()";
final File f = new File(htodPropertyFileName);
final CacheOnDisk cod = this;
traceDebug(methodName, "cacheName=" + this.cacheName);
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
try {
f.delete();
} catch (Throwable t) {
com.ibm.ws.ffdc.FFDCFilter.processException(t, "com.ibm.ws.cache.CacheOnDisk.deletePropertyFile", "883", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t));
}
return null;
}
});
}
|
java
|
public void deleteDiskCacheFiles() {
final String methodName = "deleteDiskCacheFiles()";
final File f = new File(swapDirPath);
final CacheOnDisk cod = this;
traceDebug(methodName, "cacheName=" + this.cacheName);
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
// delete files
File fl[] = f.listFiles();
for (int i = 0; i < fl.length; i++) {
try {
fl[i].delete();
} catch (Throwable t) {
com.ibm.ws.ffdc.FFDCFilter.processException(t, "com.ibm.ws.cache.CacheOnDisk.deleteDiskCacheFiles", "908", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t));
}
}
return null;
}
});
}
|
java
|
protected ValueSet readAndDeleteInvalidationFile() {
final String methodName = "readAndDeleteInvalidationFile()";
final File f = new File(invalidationFileName);
final CacheOnDisk cod = this;
this.valueSet = new ValueSet(1);
if (f.exists()) {
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
FileInputStream fis = null;
ObjectInputStream ois = null;
try {
fis = new FileInputStream(f);
ois = new ObjectInputStream(fis);
int size = ois.readInt();
cod.valueSet = new ValueSet(size);
for (int i = 0; i < size; i++) {
cod.valueSet.add(ois.readObject());
}
} catch (Throwable t1) {
com.ibm.ws.ffdc.FFDCFilter.processException(t1, "com.ibm.ws.cache.CacheOnDisk.readAndDeleteInvalidationFile", "1056", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t1));
} finally {
try {
if (ois != null) {
ois.close();
}
if (fis != null) {
fis.close();
}
f.delete();
} catch (Throwable t2) {
com.ibm.ws.ffdc.FFDCFilter
.processException(t2, "com.ibm.ws.cache.CacheOnDisk.readAndDeleteInvalidationFile", "1068", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t2));
}
}
return null;
}
});
}
traceDebug(methodName, "cacheName=" + this.cacheName + " " + invalidationFileName + " valueSet=" + valueSet.size());
return this.valueSet;
}
|
java
|
protected void createInvalidationFile() {
final String methodName = "createInvalidationFile()";
final File f = new File(invalidationFileName);
final CacheOnDisk cod = this;
traceDebug(methodName, "cacheName=" + this.cacheName + " valueSet=" + cod.valueSet.size());
AccessController.doPrivileged(new PrivilegedAction() {
public Object run() {
FileOutputStream fos = null;
ObjectOutputStream oos = null;
try {
fos = new FileOutputStream(f);
oos = new ObjectOutputStream(fos);
oos.writeInt(cod.valueSet.size());
Iterator it = valueSet.iterator();
while (it.hasNext()) {
Object entryId = it.next();
oos.writeObject(entryId);
}
} catch (Throwable t1) {
com.ibm.ws.ffdc.FFDCFilter.processException(t1, "com.ibm.ws.cache.CacheOnDisk.createInvalidationFile", "1106", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t1));
} finally {
try {
oos.close();
fos.close();
} catch (Throwable t2) {
com.ibm.ws.ffdc.FFDCFilter.processException(t2, "com.ibm.ws.cache.CacheOnDisk.createInvalidationFile", "1113", cod);
traceDebug(methodName, "cacheName=" + cod.cacheName + "\nException: " + ExceptionUtility.getStackTrace(t2));
}
}
return null;
}
});
}
|
java
|
public void alarm(final Object alarmContext) {
final String methodName = "alarm()";
synchronized (this) {
if (!stopping && !this.htod.invalidationBuffer.isDiskClearInProgress()) {
this.htod.invalidationBuffer.invokeBackgroundInvalidation(HTODInvalidationBuffer.SCAN);
} else if (stopping) {
traceDebug(methodName, "cacheName=" + this.cacheName + " abort disk cleanup because of server is stopping.");
} else {
if (cleanupFrequency == 0) {
sleepTime = calculateSleepTime();
}
traceDebug(methodName, "cacheName=" + this.cacheName + " disk clear is in progress - skip disk scan and set alarm sleepTime="
+ sleepTime);
Scheduler.createNonDeferrable(sleepTime, alarmContext, new Runnable() {
@Override
public void run() {
alarm(alarmContext);
}
});
}
}
}
|
java
|
public void clearDiskCache() {
if (htod.clearDiskCache() == HTODDynacache.DISK_EXCEPTION) {
stopOnError(this.htod.diskCacheException);
} else {
updateLastScanFile();
updatePropertyFile();
createInProgressFile();
}
}
|
java
|
public int writeCacheEntry(CacheEntry ce) { // @A5C
int returnCode = htod.writeCacheEntry(ce);
if (returnCode == HTODDynacache.DISK_EXCEPTION) {
stopOnError(this.htod.diskCacheException);
}
return returnCode;
}
|
java
|
public CacheEntry readCacheEntry(Object id) { // SKS-O
Result result = htod.readCacheEntry(id);
if (result.returnCode == HTODDynacache.DISK_EXCEPTION) {
stopOnError(result.diskException);
this.htod.returnToResultPool(result);
return null;
}
CacheEntry cacheEntry = (CacheEntry) result.data;
this.htod.returnToResultPool(result);
return cacheEntry;
}
|
java
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.